_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
0aadb9afb8f01f7725cf62092a177dbad7df2c85df18ab0b9d4abb6761f0794a | takikawa/sweet-racket | paren-shape.rkt | #lang sweet-exp racket/base
require rackunit
for-syntax racket/base
racket/match
define-syntax get-paren-shape
lambda (stx)
#`#,(or (syntax-property stx 'paren-shape) #\( )
check-equal? (get-paren-shape) #\(
check-equal? [get-paren-shape] #\[
check-equal? get-paren-shape() #\(
check-equal? get-paren-shape[] #\[
| null | https://raw.githubusercontent.com/takikawa/sweet-racket/a3c1ae74c2e75e8d6164a3a9d8eb34335a7ba4de/sweet-exp-test/sweet-exp/tests/paren-shape.rkt | racket | #lang sweet-exp racket/base
require rackunit
for-syntax racket/base
racket/match
define-syntax get-paren-shape
lambda (stx)
#`#,(or (syntax-property stx 'paren-shape) #\( )
check-equal? (get-paren-shape) #\(
check-equal? [get-paren-shape] #\[
check-equal? get-paren-shape() #\(
check-equal? get-paren-shape[] #\[
| |
5103e59d7c225e2e994d8bce72e80936fdef106fbb5b6b6fe214a3944b635161 | kupl/LearnML | patch.ml | let rec iter ((n : int), (f : int -> int)) : int -> int =
let f2 (x : int) : int =
match n with 0 -> f x | 1 -> f x | _ -> f (iter (n - 1, f) x)
in
match n with
| 0 -> fun (__s5 : int) -> __s5
| __s6 -> fun (__s7 : int) -> iter (n - 1, f) (f __s7)
| null | https://raw.githubusercontent.com/kupl/LearnML/c98ef2b95ef67e657b8158a2c504330e9cfb7700/result/cafe2/iter/sub44/patch.ml | ocaml | let rec iter ((n : int), (f : int -> int)) : int -> int =
let f2 (x : int) : int =
match n with 0 -> f x | 1 -> f x | _ -> f (iter (n - 1, f) x)
in
match n with
| 0 -> fun (__s5 : int) -> __s5
| __s6 -> fun (__s7 : int) -> iter (n - 1, f) (f __s7)
| |
e4ffe0ba8ca90438c8b9518a75be0f83b7a7edd25557e3b5efc3b61d362259d1 | umd-cmsc330/fall2022 | nfa.ml | open List
open Sets
(*********)
(* Types *)
(*********)
type ('q, 's) transition = 'q * 's option * 'q
type ('q, 's) nfa_t = {
sigma: 's list;
qs: 'q list;
q0: 'q;
fs: 'q list;
delta: ('q, 's) transition list;
}
(***********)
(* Utility *)
(***********)
(* explode converts a string to a character list *)
let explode (s: string) : char list =
let rec exp i l =
if i < 0 then l else exp (i - 1) (s.[i] :: l)
in
exp (String.length s - 1) []
(****************)
Part 1 : NFAs
(****************)
let move (nfa: ('q,'s) nfa_t) (qs: 'q list) (s: 's option) : 'q list =
failwith "unimplemented"
let e_closure (nfa: ('q,'s) nfa_t) (qs: 'q list) : 'q list =
failwith "unimplemented"
let accept (nfa: ('q,char) nfa_t) (s: string) : bool =
failwith "unimplemented"
(*******************************)
Part 2 : Subset Construction
(*******************************)
let new_states (nfa: ('q,'s) nfa_t) (qs: 'q list) : 'q list list =
failwith "unimplemented"
let new_trans (nfa: ('q,'s) nfa_t) (qs: 'q list) : ('q list, 's) transition list =
failwith "unimplemented"
let new_finals (nfa: ('q,'s) nfa_t) (qs: 'q list) : 'q list list =
failwith "unimplemented"
let rec nfa_to_dfa_step (nfa: ('q,'s) nfa_t) (dfa: ('q list, 's) nfa_t)
(work: 'q list list) : ('q list, 's) nfa_t =
failwith "unimplemented"
let nfa_to_dfa (nfa: ('q,'s) nfa_t) : ('q list, 's) nfa_t =
failwith "unimplemented" | null | https://raw.githubusercontent.com/umd-cmsc330/fall2022/7106b342a8fc46c4c17744152880f59dc28ea7ca/project3/src/nfa.ml | ocaml | *******
Types
*******
*********
Utility
*********
explode converts a string to a character list
**************
**************
*****************************
***************************** | open List
open Sets
type ('q, 's) transition = 'q * 's option * 'q
type ('q, 's) nfa_t = {
sigma: 's list;
qs: 'q list;
q0: 'q;
fs: 'q list;
delta: ('q, 's) transition list;
}
let explode (s: string) : char list =
let rec exp i l =
if i < 0 then l else exp (i - 1) (s.[i] :: l)
in
exp (String.length s - 1) []
Part 1 : NFAs
let move (nfa: ('q,'s) nfa_t) (qs: 'q list) (s: 's option) : 'q list =
failwith "unimplemented"
let e_closure (nfa: ('q,'s) nfa_t) (qs: 'q list) : 'q list =
failwith "unimplemented"
let accept (nfa: ('q,char) nfa_t) (s: string) : bool =
failwith "unimplemented"
Part 2 : Subset Construction
let new_states (nfa: ('q,'s) nfa_t) (qs: 'q list) : 'q list list =
failwith "unimplemented"
let new_trans (nfa: ('q,'s) nfa_t) (qs: 'q list) : ('q list, 's) transition list =
failwith "unimplemented"
let new_finals (nfa: ('q,'s) nfa_t) (qs: 'q list) : 'q list list =
failwith "unimplemented"
let rec nfa_to_dfa_step (nfa: ('q,'s) nfa_t) (dfa: ('q list, 's) nfa_t)
(work: 'q list list) : ('q list, 's) nfa_t =
failwith "unimplemented"
let nfa_to_dfa (nfa: ('q,'s) nfa_t) : ('q list, 's) nfa_t =
failwith "unimplemented" |
5cc4329cff14d50b772cb5bd43c39279a616196173d0b3d1020cfd69b40dc679 | rongarret/ergolib | bootstrap.lisp | (define-class panel name content)
(define-method (label (p panel name)) name)
(define-method (html-render (p panel content)) (html-render content))
(defmacro panel (name &rest content)
`(make-panel :name ,name :content (html-string (whos ,@content))))
(define-class panelgroup name panels)
(define-method (html-render (group panelgroup panels))
(who ((:div :class "panelGroup" :id (id group))
(for panel in panels do
(unless (stringp panel)
(htm ((:div :class "panel" :id (id panel) :style "display:none")
(hro panel))))))))
(defun panelgroup (panels)
(make-instance 'panelgroup :name (gensym "PanelGroup") :panels panels))
(defv showpanel-code "dswi.ajax.showPanel(this)")
(define-method (menu-selector (group panelgroup name panels))
(menu name (for p in panels collect (if (stringp p) p (list (label p) (id p))))
:class "panelSelector" :onchange showpanel-code))
(define-method (button-selector (group panelgroup name panels))
(html-items
(for panel in panels collect
(whobj ((:button :onclick showpanel-code :name name :value (id panel))
(str (label panel)))))))
(define-method (radio-selector (group panelgroup name panels))
(whobj
((:div :class "design-tabs toggle-group")
(for panel in panels do
(bb id (gensym)
(htm (:input :type "radio"
; ID links to label, name links to panelgroup, value links to panel
:id id :name name :value (id panel)
:onchange showpanel-code)
((:label :for id) (str (label panel)))))))))
(defv $panels-js "
function init_panels() {
$('input[type=radio]').eq(0).prop('checked',true);
$('input[type=radio]').eq(0).change();
$('select.panelSelector').change();
$('div.btn-group').each(function(n, bg) { $(bg).find('button').eq(0).click(); });
}
$(init_panels)
")
(defun bootstrap-header (title)
(who
((:META :CHARSET "utf-8"))
((:META :HTTP-EQUIV "X-UA-Compatible" :CONTENT "IE=edge"))
((:META :NAME "viewport" :CONTENT "width=device-width, initial-scale=1.0"))
(:TITLE (esc title))
(:COMMENT "Bootstrap core CSS")
(style "//maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css")
(script "//maxcdn.bootstrapcdn.com/bootstrap/3.2.0/js/bootstrap.min.js")
((:A :CLASS "sr-only" :HREF "#content") "Skip navigation") ; For accessibility
))
(defun bootstrap-footer ()
(who
((:FOOTER :CLASS "cd-footer" :ROLE "contentinfo")
((:DIV :CLASS "container")
((:DIV :CLASS "cd-social")
#+nil((:UL :CLASS "cd-social-buttons")
(:LI
((:A :HREF "" :CLASS "facebook-button" :DATA-LINK-COLOR "#0069D6" :DATA-SHOW-COUNT
"true") "Facebook"))
(:LI
((:A :HREF "-innovations-inc-" :CLASS "linkedin-button" :DATA-LINK-COLOR "#0069D6" :DATA-SHOW-COUNT
"true") "LinkedIn"))
((:LI :CLASS "follow-btn")
((:A :HREF "" :CLASS "twitter-follow-button" :DATA-LINK-COLOR "#0069D6"
:DATA-SHOW-COUNT "true")
"Twitter"))))
(:P "Designed and built in the Silicon Valley by"
((:A :HREF "about.html" :TARGET "_blank") "Spark") ".")
(:P "Copyright © 2013, Spark Innovations Inc.")
((:UL :CLASS "footer-links")
(: ( (: A : HREF " / " ) " Feedback " ) )
( (: : CLASS " muted " ) " & middot ; " ) (: ( (: A : HREF " # " ) " Blog " ) )
( (: : CLASS " muted " ) " & middot ; " )
(:LI ((:A :HREF "/terms-of-use.html") "Terms of Use"))
((:LI :CLASS "muted") "·")
(:LI ((:A :HREF "/privacy.html") "Privacy Policy")))))))
(defun bootstrap-js ()
(who
(:script :src "jquery.js")
(:script :src "dswi.js")
(:script "dswi.subscribe(dswi)")
(:script :src "ajax.js")
(:script :src "bootstrap.min.js")
(:script (str $panels-js))))
(defmacro bootstrap-page (title &rest body)
`(progn
(bootstrap-header ',title)
(bootstrap-js)
(who ,@body)
(bootstrap-footer)))
(defindent "bootstrap-page" 1)
(define-method (bootstrap-render (pg panelgroup name panels))
(who
((:UL :CLASS "nav nav-tabs" :ID name)
((:LI :CLASS "active")
((:A :DATA-TOGGLE "tab" :HREF (strcat "#" (id (1st panels))))
(esc (label (1st panels)))))
(for panel in (rst panels) do
(who
(:LI ((:A :DATA-TOGGLE "tab" :HREF (strcat "#" (id panel)))
(esc (label panel)))))))
((:DIV :CLASS "tab-content" :STYLE "padding:5px;")
((:DIV :CLASS "tab-pane active" :ID (id (1st panels)))
(hro (1st panels)))
(for panel in (rst panels) do
(who
((:DIV :CLASS "tab-pane" :ID (id panel))
(hro panel)))))))
(define-method (html-render (pg panelgroup)) (bootstrap-render pg))
(defmacro bs-modal-dialog (label title &body content)
`(bb label ,label
title ,title
id (gensym "MODAL-")
aria-id (gensym "MODAL-ARIA-")
(who
((:A :DATA-TOGGLE "modal" :HREF (strcat "#" id) :CLASS
"btn btn-lg btn-primary") (esc label))
((:DIV :CLASS "modal fade" :ID id :TABINDEX "-1" :ROLE "dialog"
:ARIA-LABELLEDBY aria-id :ARIA-HIDDEN "true")
((:DIV :CLASS "modal-dialog")
((:DIV :CLASS "modal-content")
((:DIV :CLASS "modal-header")
((:BUTTON :TYPE "button" :CLASS "close" :DATA-DISMISS "modal" :ARIA-HIDDEN
"true") "×")
((:H4 :ID aria-id :CLASS "modal-title")
(esc title)))
((:DIV :CLASS "modal-body") ,@content)
((:DIV :CLASS "modal-footer")
((:BUTTON :TYPE "button" :CLASS "btn btn-default" :DATA-DISMISS "modal")
"Dismiss"))))))))
(defindent "bs-modal-dialog" 2)
(defmacro bs-modal-form (method action label title &body content)
`(bb label ,label
title ,title
id (gensym "MODAL-")
aria-id (gensym "MODAL-ARIA-")
(who
((:button :DATA-TOGGLE "modal" :HREF (strcat "#" id) :CLASS "btn btn-sm")
(esc label))
((:DIV :CLASS "modal fade" :ID id :TABINDEX "-1" :ROLE "dialog"
:ARIA-LABELLEDBY aria-id :ARIA-HIDDEN "true")
((:DIV :CLASS "modal-dialog")
((:DIV :CLASS "modal-content")
((:form :method ,method :action ,action)
((:DIV :CLASS "modal-header")
((:BUTTON :TYPE "button" :CLASS "close" :DATA-DISMISS "modal" :ARIA-HIDDEN
"true") "×")
((:H4 :ID aria-id :CLASS "modal-title")
(esc title)))
((:DIV :CLASS "modal-body") ,@content)
((:DIV :CLASS "modal-footer")
((:BUTTON :TYPE "button" :CLASS "btn btn-default" :DATA-DISMISS "modal")
"Cancel")
((:INPUT :TYPE "SUBMIT" :CLASS "btn btn-primary"))))))))))
(defindent "bs-modal-form" 4)
(defun bs-menu (id items)
(who
((:SELECT :CLASS "form-control" :NAME id)
(for item in items do
(who ((:option :value (id item))
(esc (label item))))))))
(defun bs-radio-buttons (id items)
(for item in items do
(who
((:DIV :CLASS "input-group")
((:SPAN :CLASS "input-group-addon")
((:INPUT :TYPE "radio" :NAME id :VALUE (id item) :ID (id item))))
((:LABEL :CLASS "form-control" :FOR (id item)) (esc (label item)))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Experiments
(defpage "/bs-test"
(bootstrap-page "test"
(bootstrap-render
(panelgroup
(list (panel "Panel 1"
(bs-menu "m1" '("item1" "item2" "item3"))
(bs-menu "m2" '(one two three))
)
(panel "Panel 2"
(bs-menu "m3" '("item1" "item2" "item3"))
(bs-radio-buttons "rb1" '(foo baz bar)))
(panel "Panel 3"
(bootstrap-render
(panelgroup
(list (panel "Panel 3a" "Panel 3a content")
(panel "Panel 3b" "Panel 3b content")
(panel "Panel 3c" "Panel 3c content"))))))))
(bs-modal-form :post "/show-form-params" "Dialog1" "Test Dialog One"
(bootstrap-render
(panelgroup
(list (panel "Panel 1x"
(bs-menu "m1" '("item1" "item2" "item3"))
(bs-menu "m2" '(one two three))
)
(panel "Panel 2x"
(bs-menu "m3" '("item1" "item2" "item3"))
(bs-radio-buttons "rb1" '(foo baz bar)))
(panel "Panel 3x"
(bootstrap-render
(panelgroup
(list (panel "Panel 3xa" "Panel 3a content")
(panel "Panel 3xb" "Panel 3b content")
(panel "Panel 3xc" "Panel 3c content")))))))))))
(defpage "/panel-test"
(bb pg (panelgroup (list (panel "p1" "content1") (panel "p2" "content2")))
(who (hro (button-selector pg))
(hro pg)
(:script :src "jquery.js")
(:script :src "dswi.js")
(:script "dswi.subscribe(dswi)")
(:script :src "ajax.js")
(:script (str $panels-js)))))
(defpage "/show-form-params" (str (get-form-parameters)))
| null | https://raw.githubusercontent.com/rongarret/ergolib/757e67471251ed1329e5c35c008fb69964567994/web/bootstrap.lisp | lisp | ID links to label, name links to panelgroup, value links to panel
});
For accessibility
Experiments | (define-class panel name content)
(define-method (label (p panel name)) name)
(define-method (html-render (p panel content)) (html-render content))
(defmacro panel (name &rest content)
`(make-panel :name ,name :content (html-string (whos ,@content))))
(define-class panelgroup name panels)
(define-method (html-render (group panelgroup panels))
(who ((:div :class "panelGroup" :id (id group))
(for panel in panels do
(unless (stringp panel)
(htm ((:div :class "panel" :id (id panel) :style "display:none")
(hro panel))))))))
(defun panelgroup (panels)
(make-instance 'panelgroup :name (gensym "PanelGroup") :panels panels))
(defv showpanel-code "dswi.ajax.showPanel(this)")
(define-method (menu-selector (group panelgroup name panels))
(menu name (for p in panels collect (if (stringp p) p (list (label p) (id p))))
:class "panelSelector" :onchange showpanel-code))
(define-method (button-selector (group panelgroup name panels))
(html-items
(for panel in panels collect
(whobj ((:button :onclick showpanel-code :name name :value (id panel))
(str (label panel)))))))
(define-method (radio-selector (group panelgroup name panels))
(whobj
((:div :class "design-tabs toggle-group")
(for panel in panels do
(bb id (gensym)
(htm (:input :type "radio"
:id id :name name :value (id panel)
:onchange showpanel-code)
((:label :for id) (str (label panel)))))))))
(defv $panels-js "
function init_panels() {
}
$(init_panels)
")
(defun bootstrap-header (title)
(who
((:META :CHARSET "utf-8"))
((:META :HTTP-EQUIV "X-UA-Compatible" :CONTENT "IE=edge"))
((:META :NAME "viewport" :CONTENT "width=device-width, initial-scale=1.0"))
(:TITLE (esc title))
(:COMMENT "Bootstrap core CSS")
(style "//maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css")
(script "//maxcdn.bootstrapcdn.com/bootstrap/3.2.0/js/bootstrap.min.js")
))
(defun bootstrap-footer ()
(who
((:FOOTER :CLASS "cd-footer" :ROLE "contentinfo")
((:DIV :CLASS "container")
((:DIV :CLASS "cd-social")
#+nil((:UL :CLASS "cd-social-buttons")
(:LI
((:A :HREF "" :CLASS "facebook-button" :DATA-LINK-COLOR "#0069D6" :DATA-SHOW-COUNT
"true") "Facebook"))
(:LI
((:A :HREF "-innovations-inc-" :CLASS "linkedin-button" :DATA-LINK-COLOR "#0069D6" :DATA-SHOW-COUNT
"true") "LinkedIn"))
((:LI :CLASS "follow-btn")
((:A :HREF "" :CLASS "twitter-follow-button" :DATA-LINK-COLOR "#0069D6"
:DATA-SHOW-COUNT "true")
"Twitter"))))
(:P "Designed and built in the Silicon Valley by"
((:A :HREF "about.html" :TARGET "_blank") "Spark") ".")
(:P "Copyright © 2013, Spark Innovations Inc.")
((:UL :CLASS "footer-links")
(: ( (: A : HREF " / " ) " Feedback " ) )
( (: : CLASS " muted " ) " & middot ; " ) (: ( (: A : HREF " # " ) " Blog " ) )
( (: : CLASS " muted " ) " & middot ; " )
(:LI ((:A :HREF "/terms-of-use.html") "Terms of Use"))
((:LI :CLASS "muted") "·")
(:LI ((:A :HREF "/privacy.html") "Privacy Policy")))))))
(defun bootstrap-js ()
(who
(:script :src "jquery.js")
(:script :src "dswi.js")
(:script "dswi.subscribe(dswi)")
(:script :src "ajax.js")
(:script :src "bootstrap.min.js")
(:script (str $panels-js))))
(defmacro bootstrap-page (title &rest body)
`(progn
(bootstrap-header ',title)
(bootstrap-js)
(who ,@body)
(bootstrap-footer)))
(defindent "bootstrap-page" 1)
(define-method (bootstrap-render (pg panelgroup name panels))
(who
((:UL :CLASS "nav nav-tabs" :ID name)
((:LI :CLASS "active")
((:A :DATA-TOGGLE "tab" :HREF (strcat "#" (id (1st panels))))
(esc (label (1st panels)))))
(for panel in (rst panels) do
(who
(:LI ((:A :DATA-TOGGLE "tab" :HREF (strcat "#" (id panel)))
(esc (label panel)))))))
((:DIV :CLASS "tab-content" :STYLE "padding:5px;")
((:DIV :CLASS "tab-pane active" :ID (id (1st panels)))
(hro (1st panels)))
(for panel in (rst panels) do
(who
((:DIV :CLASS "tab-pane" :ID (id panel))
(hro panel)))))))
(define-method (html-render (pg panelgroup)) (bootstrap-render pg))
(defmacro bs-modal-dialog (label title &body content)
`(bb label ,label
title ,title
id (gensym "MODAL-")
aria-id (gensym "MODAL-ARIA-")
(who
((:A :DATA-TOGGLE "modal" :HREF (strcat "#" id) :CLASS
"btn btn-lg btn-primary") (esc label))
((:DIV :CLASS "modal fade" :ID id :TABINDEX "-1" :ROLE "dialog"
:ARIA-LABELLEDBY aria-id :ARIA-HIDDEN "true")
((:DIV :CLASS "modal-dialog")
((:DIV :CLASS "modal-content")
((:DIV :CLASS "modal-header")
((:BUTTON :TYPE "button" :CLASS "close" :DATA-DISMISS "modal" :ARIA-HIDDEN
"true") "×")
((:H4 :ID aria-id :CLASS "modal-title")
(esc title)))
((:DIV :CLASS "modal-body") ,@content)
((:DIV :CLASS "modal-footer")
((:BUTTON :TYPE "button" :CLASS "btn btn-default" :DATA-DISMISS "modal")
"Dismiss"))))))))
(defindent "bs-modal-dialog" 2)
(defmacro bs-modal-form (method action label title &body content)
`(bb label ,label
title ,title
id (gensym "MODAL-")
aria-id (gensym "MODAL-ARIA-")
(who
((:button :DATA-TOGGLE "modal" :HREF (strcat "#" id) :CLASS "btn btn-sm")
(esc label))
((:DIV :CLASS "modal fade" :ID id :TABINDEX "-1" :ROLE "dialog"
:ARIA-LABELLEDBY aria-id :ARIA-HIDDEN "true")
((:DIV :CLASS "modal-dialog")
((:DIV :CLASS "modal-content")
((:form :method ,method :action ,action)
((:DIV :CLASS "modal-header")
((:BUTTON :TYPE "button" :CLASS "close" :DATA-DISMISS "modal" :ARIA-HIDDEN
"true") "×")
((:H4 :ID aria-id :CLASS "modal-title")
(esc title)))
((:DIV :CLASS "modal-body") ,@content)
((:DIV :CLASS "modal-footer")
((:BUTTON :TYPE "button" :CLASS "btn btn-default" :DATA-DISMISS "modal")
"Cancel")
((:INPUT :TYPE "SUBMIT" :CLASS "btn btn-primary"))))))))))
(defindent "bs-modal-form" 4)
(defun bs-menu (id items)
(who
((:SELECT :CLASS "form-control" :NAME id)
(for item in items do
(who ((:option :value (id item))
(esc (label item))))))))
(defun bs-radio-buttons (id items)
(for item in items do
(who
((:DIV :CLASS "input-group")
((:SPAN :CLASS "input-group-addon")
((:INPUT :TYPE "radio" :NAME id :VALUE (id item) :ID (id item))))
((:LABEL :CLASS "form-control" :FOR (id item)) (esc (label item)))))))
(defpage "/bs-test"
(bootstrap-page "test"
(bootstrap-render
(panelgroup
(list (panel "Panel 1"
(bs-menu "m1" '("item1" "item2" "item3"))
(bs-menu "m2" '(one two three))
)
(panel "Panel 2"
(bs-menu "m3" '("item1" "item2" "item3"))
(bs-radio-buttons "rb1" '(foo baz bar)))
(panel "Panel 3"
(bootstrap-render
(panelgroup
(list (panel "Panel 3a" "Panel 3a content")
(panel "Panel 3b" "Panel 3b content")
(panel "Panel 3c" "Panel 3c content"))))))))
(bs-modal-form :post "/show-form-params" "Dialog1" "Test Dialog One"
(bootstrap-render
(panelgroup
(list (panel "Panel 1x"
(bs-menu "m1" '("item1" "item2" "item3"))
(bs-menu "m2" '(one two three))
)
(panel "Panel 2x"
(bs-menu "m3" '("item1" "item2" "item3"))
(bs-radio-buttons "rb1" '(foo baz bar)))
(panel "Panel 3x"
(bootstrap-render
(panelgroup
(list (panel "Panel 3xa" "Panel 3a content")
(panel "Panel 3xb" "Panel 3b content")
(panel "Panel 3xc" "Panel 3c content")))))))))))
(defpage "/panel-test"
(bb pg (panelgroup (list (panel "p1" "content1") (panel "p2" "content2")))
(who (hro (button-selector pg))
(hro pg)
(:script :src "jquery.js")
(:script :src "dswi.js")
(:script "dswi.subscribe(dswi)")
(:script :src "ajax.js")
(:script (str $panels-js)))))
(defpage "/show-form-params" (str (get-form-parameters)))
|
3ab5d23335c2bbfee33a63418508c27c9b6d22c98ce4d77a22b362b51e35f170 | cjlarose/de-jong | points_calculator.cljs | (ns de-jong.points-calculator)
(defn- vertex-array [length]
(js/Float32Array. (* 3 length)))
(defn random-vals [minimum maximum]
(repeatedly #(+ (rand (- maximum minimum)) minimum)))
(defn- write-random-values! [minimum maximum vertices]
(let [length (.-length vertices)]
(loop [i 0
values (random-vals minimum maximum)]
(if (< i length)
(do
(aset vertices i (first values))
(recur (inc i) (rest values)))))
vertices))
(defn random-vertex-array [length minimum maximum]
(let [arr (vertex-array length)]
(write-random-values! minimum maximum arr)))
| null | https://raw.githubusercontent.com/cjlarose/de-jong/b627e46e3e3c42ca3fa3cf786218086211a1e6d7/src/de_jong/points_calculator.cljs | clojure | (ns de-jong.points-calculator)
(defn- vertex-array [length]
(js/Float32Array. (* 3 length)))
(defn random-vals [minimum maximum]
(repeatedly #(+ (rand (- maximum minimum)) minimum)))
(defn- write-random-values! [minimum maximum vertices]
(let [length (.-length vertices)]
(loop [i 0
values (random-vals minimum maximum)]
(if (< i length)
(do
(aset vertices i (first values))
(recur (inc i) (rest values)))))
vertices))
(defn random-vertex-array [length minimum maximum]
(let [arr (vertex-array length)]
(write-random-values! minimum maximum arr)))
| |
a3e8efff16ca4cd5728d157a05d9804e873c0cf399ffe2ec3331409ec4893c5d | nikita-volkov/graph-db | Demo.hs |
import BasicPrelude
import GHC.Generics (Generic)
import qualified GraphDB as G
import qualified Data.Text as Text
-- * Model
-------------------------
data Catalogue = Catalogue deriving (Show, Eq, Generic)
data Artist = Artist Name deriving (Show, Eq, Generic)
data Genre = Genre Name deriving (Show, Eq, Generic)
type Name = Text
-- * Relations
-------------------------
instance G.Edge Catalogue Artist where
data Index Catalogue Artist =
Catalogue_Artist |
Catalogue_Artist_SearchTerm Text
deriving (Show, Eq, Generic)
indexes (Artist name) =
Catalogue_Artist :
searchTerms
where
searchTerms = map Catalogue_Artist_SearchTerm $ Text.words name
instance G.Edge Catalogue Genre where
data Index Catalogue Genre =
Catalogue_Genre |
Catalogue_Genre_Name Text
deriving (Show, Eq, Generic)
indexes (Genre name) =
Catalogue_Genre :
Catalogue_Genre_Name name :
[]
instance G.Edge Genre Artist where
data Index Genre Artist =
Genre_Artist
deriving (Show, Eq, Generic)
indexes (Artist name) =
[Genre_Artist]
G.deriveSetup ''Catalogue
main = do
putStrLn "Restoring the graph from the storage."
G.runPersistentSession (Catalogue, "./dist/demo/db", 1) $ do
do
G.read G.getStats >>= \case
(1, 0, 0) -> do
liftIO $ putStrLn "Graph is empty. Populating."
G.write $ populate
_ -> return ()
do
G.read G.getStats >>= \(nodes, edges, indexes) ->
liftIO $ putStrLn $
"There's " <> show nodes <> " nodes, " <> show edges <> " edges " <>
"and " <> show indexes <> " indexes in the graph."
do
liftIO $ putStrLn "Artists by the search term \"The\":"
liftIO . print =<< do
G.read $
G.getRoot >>=
flip G.getTargets (Catalogue_Artist_SearchTerm "The") >>=
mapM G.getValue
do
liftIO $ putStrLn "Artists by the genre named \"Rock\":"
liftIO . print =<< do
G.read $
G.getRoot >>=
flip G.getTargets (Catalogue_Genre_Name "Rock") >>=
fmap join . mapM (flip G.getTargets (Genre_Artist)) >>=
mapM G.getValue
populate :: G.Write s Catalogue t ()
populate = do
root <- G.getRoot
rollingStones <- G.newNode $ Artist "The Rolling Stones"
beatles <- G.newNode $ Artist "The Beatles"
metallica <- G.newNode $ Artist "Metallica"
nirvana <- G.newNode $ Artist "Nirvana"
rock <- G.newNode $ Genre "Rock"
grunge <- G.newNode $ Genre "Grunge"
metal <- G.newNode $ Genre "Metal"
G.addTarget root rollingStones
G.addTarget root beatles
G.addTarget root metallica
G.addTarget root nirvana
G.addTarget root rock
G.addTarget root grunge
G.addTarget root metal
G.addTarget rock rollingStones
G.addTarget rock beatles
G.addTarget rock metallica
G.addTarget rock nirvana
G.addTarget grunge nirvana
G.addTarget metal metallica
| null | https://raw.githubusercontent.com/nikita-volkov/graph-db/3e886f6b298d2b2b09eb94c2818a7b648f42cb0a/executables/Demo.hs | haskell | * Model
-----------------------
* Relations
----------------------- |
import BasicPrelude
import GHC.Generics (Generic)
import qualified GraphDB as G
import qualified Data.Text as Text
data Catalogue = Catalogue deriving (Show, Eq, Generic)
data Artist = Artist Name deriving (Show, Eq, Generic)
data Genre = Genre Name deriving (Show, Eq, Generic)
type Name = Text
instance G.Edge Catalogue Artist where
data Index Catalogue Artist =
Catalogue_Artist |
Catalogue_Artist_SearchTerm Text
deriving (Show, Eq, Generic)
indexes (Artist name) =
Catalogue_Artist :
searchTerms
where
searchTerms = map Catalogue_Artist_SearchTerm $ Text.words name
instance G.Edge Catalogue Genre where
data Index Catalogue Genre =
Catalogue_Genre |
Catalogue_Genre_Name Text
deriving (Show, Eq, Generic)
indexes (Genre name) =
Catalogue_Genre :
Catalogue_Genre_Name name :
[]
instance G.Edge Genre Artist where
data Index Genre Artist =
Genre_Artist
deriving (Show, Eq, Generic)
indexes (Artist name) =
[Genre_Artist]
G.deriveSetup ''Catalogue
main = do
putStrLn "Restoring the graph from the storage."
G.runPersistentSession (Catalogue, "./dist/demo/db", 1) $ do
do
G.read G.getStats >>= \case
(1, 0, 0) -> do
liftIO $ putStrLn "Graph is empty. Populating."
G.write $ populate
_ -> return ()
do
G.read G.getStats >>= \(nodes, edges, indexes) ->
liftIO $ putStrLn $
"There's " <> show nodes <> " nodes, " <> show edges <> " edges " <>
"and " <> show indexes <> " indexes in the graph."
do
liftIO $ putStrLn "Artists by the search term \"The\":"
liftIO . print =<< do
G.read $
G.getRoot >>=
flip G.getTargets (Catalogue_Artist_SearchTerm "The") >>=
mapM G.getValue
do
liftIO $ putStrLn "Artists by the genre named \"Rock\":"
liftIO . print =<< do
G.read $
G.getRoot >>=
flip G.getTargets (Catalogue_Genre_Name "Rock") >>=
fmap join . mapM (flip G.getTargets (Genre_Artist)) >>=
mapM G.getValue
populate :: G.Write s Catalogue t ()
populate = do
root <- G.getRoot
rollingStones <- G.newNode $ Artist "The Rolling Stones"
beatles <- G.newNode $ Artist "The Beatles"
metallica <- G.newNode $ Artist "Metallica"
nirvana <- G.newNode $ Artist "Nirvana"
rock <- G.newNode $ Genre "Rock"
grunge <- G.newNode $ Genre "Grunge"
metal <- G.newNode $ Genre "Metal"
G.addTarget root rollingStones
G.addTarget root beatles
G.addTarget root metallica
G.addTarget root nirvana
G.addTarget root rock
G.addTarget root grunge
G.addTarget root metal
G.addTarget rock rollingStones
G.addTarget rock beatles
G.addTarget rock metallica
G.addTarget rock nirvana
G.addTarget grunge nirvana
G.addTarget metal metallica
|
70275951b87e6bb2822bb3b26ea298830f21c4d7ee01ff43aaa47d0c18c89ccb | karlhof26/gimp-scheme | FU_artist_palette-knife.scm | ; FU_artist_palette-knife.scm
version 2.9 [ gimphelp.org ]
last modified / tested by
02/15/2014 on GIMP-2.8.10
;
; 02/15/2014 - work with non-rgb, merge option and install info added
;==============================================================
;
; Installation:
; This script should be placed in the user or system-wide script folder.
;
; Windows 10
C:\Program Files\GIMP 2\share\gimp\2.0\scripts
; or
C:\Users\YOUR - NAME\.gimp-2.10\scripts
C:\Program Files\GIMP 2\share\gimp\2.0\scripts
; C:\Documents and Settings\yourname\.gimp-2.10\scripts
;
; Linux
/home / yourname/.gimp-2.8 / scripts
;
; Linux system-wide
; /usr/share/gimp/2.0/scripts
;
; ALSO NEED TO COPY:
; ev_paletknife2.txt
;
; Windows 10
C:\Program Files\GIMP 2\share\gimp\2.0\gimpressionist\presets
; or
C:\Users\YOUR - NAME\.gimp-2.10\gimpressionist\presets
; or
; C:\Program Files\GIMP 2\share\gimp\2.0\gimpressionist\preset
; or
; C:\Documents and Settings\yourname\.gimp-2.10\gimpressionist\presets
;
; Linux
/home / yourname/.gimp-2.10 / gimpressionist / presets
; or
; Linux - system-wide
; /usr/share/gimp/2.0/gimpressionist/Presets
;==============================================================
;
; LICENSE
;
; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
You should have received a copy of the GNU General Public License
; along with this program. If not, see </>.
;
;==============================================================
; Original information
Palette Knife image script for GIMP 2.2
Copyright ( C ) 2007 < >
;
Update by karlhof26
; The orginal presets are around on the web but I have used the best default values to ensure no dependencies.
;==============================================================
(define (FU-paletteknife
img
drawable
gimpressOption
gimpressName
inMerge
)
(gimp-image-undo-group-start img)
(define indexed (car (gimp-drawable-is-indexed drawable)))
(if (= indexed TRUE)(gimp-image-convert-rgb img))
(let* (
(width (car (gimp-drawable-width drawable)))
(height (car (gimp-drawable-height drawable)))
(old-selection (car (gimp-selection-save img)))
(image-type (car (gimp-image-base-type img)))
(layer-type (car (gimp-drawable-type drawable)))
(layer-temp1 (car (gimp-layer-new img width height layer-type "temp1" 100 LAYER-MODE-NORMAL)))
)
(if (eqv? (car (gimp-selection-is-empty img)) TRUE)
(gimp-drawable-fill old-selection FILL-WHITE)) ; so Empty and All are the same.
(gimp-selection-none img)
(gimp-drawable-fill layer-temp1 FILL-TRANSPARENT)
(gimp-image-insert-layer img layer-temp1 0 -1)
(gimp-layer-add-alpha layer-temp1)
(gimp-edit-copy drawable)
(gimp-floating-sel-anchor (car (gimp-edit-paste layer-temp1 0)))
(cond
(( = gimpressOption 0)
(plug-in-gimpressionist 1 img layer-temp1 "Wormcan")
)
(( = gimpressOption 1)
(plug-in-gimpressionist 1 img layer-temp1 "Felt-marker")
)
(( = gimpressOption 2)
(plug-in-gimpressionist 1 img layer-temp1 "Line-art-2")
)
(( = gimpressOption 3)
(plug-in-gimpressionist 1 img layer-temp1 "ev_paletknife2")
)
(( = gimpressOption 4)
(plug-in-gimpressionist 1 img layer-temp1 gimpressName)
)
)
( gimp - levels layer - temp1 0 0 255 0.5 0 255 )
(gimp-drawable-levels layer-temp1 HISTOGRAM-VALUE 0.0 1.0 TRUE 0.5 0.0 1.0 TRUE)
(gimp-image-select-item img CHANNEL-OP-REPLACE old-selection)
(gimp-selection-invert img)
(if (eqv? (car (gimp-selection-is-empty img)) FALSE) ; both Empty and All are denied
(begin
(gimp-edit-clear layer-temp1)
)
)
(gimp-item-set-name layer-temp1 "Palette knife")
(gimp-image-select-item img CHANNEL-OP-REPLACE old-selection)
(gimp-image-remove-channel img old-selection)
(gimp-drawable-levels-stretch layer-temp1)
(if (= inMerge TRUE)(gimp-image-merge-visible-layers img EXPAND-AS-NECESSARY))
(gimp-image-undo-group-end img)
(gimp-displays-flush)
)
)
(script-fu-register "FU-paletteknife"
"<Toolbox>/Script-Fu/Artist/Palette Knife"
"Creates a drawing effect like made with a palette knife, based on the Gimpressionist. Gimpressionist option can now be selected. The ev_paletknife2 option needs to be fetched or created. \n file:FU_artist_palette-knife"
"Eddy Verlinden <>"
"Eddy Verlinden"
"2007, juli"
"*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-OPTION "Gimpressionist option" '("Wormcan" "Felt-marker" "Line-art-2" "ev_paletknife2" "Setting below")
SF-STRING "Gimpressionist setting name (use with option)" "Dotify"
SF-TOGGLE "Merge layers when complete?" FALSE
)
; end of script | null | https://raw.githubusercontent.com/karlhof26/gimp-scheme/50d5917de653ad15747da554f58884174c4bb652/FU_artist_palette-knife.scm | scheme | FU_artist_palette-knife.scm
02/15/2014 - work with non-rgb, merge option and install info added
==============================================================
Installation:
This script should be placed in the user or system-wide script folder.
Windows 10
or
C:\Documents and Settings\yourname\.gimp-2.10\scripts
Linux
Linux system-wide
/usr/share/gimp/2.0/scripts
ALSO NEED TO COPY:
ev_paletknife2.txt
Windows 10
or
or
C:\Program Files\GIMP 2\share\gimp\2.0\gimpressionist\preset
or
C:\Documents and Settings\yourname\.gimp-2.10\gimpressionist\presets
Linux
or
Linux - system-wide
/usr/share/gimp/2.0/gimpressionist/Presets
==============================================================
LICENSE
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
==============================================================
Original information
The orginal presets are around on the web but I have used the best default values to ensure no dependencies.
==============================================================
so Empty and All are the same.
both Empty and All are denied
end of script | version 2.9 [ gimphelp.org ]
last modified / tested by
02/15/2014 on GIMP-2.8.10
C:\Program Files\GIMP 2\share\gimp\2.0\scripts
C:\Users\YOUR - NAME\.gimp-2.10\scripts
C:\Program Files\GIMP 2\share\gimp\2.0\scripts
/home / yourname/.gimp-2.8 / scripts
C:\Program Files\GIMP 2\share\gimp\2.0\gimpressionist\presets
C:\Users\YOUR - NAME\.gimp-2.10\gimpressionist\presets
/home / yourname/.gimp-2.10 / gimpressionist / presets
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
Palette Knife image script for GIMP 2.2
Copyright ( C ) 2007 < >
Update by karlhof26
(define (FU-paletteknife
img
drawable
gimpressOption
gimpressName
inMerge
)
(gimp-image-undo-group-start img)
(define indexed (car (gimp-drawable-is-indexed drawable)))
(if (= indexed TRUE)(gimp-image-convert-rgb img))
(let* (
(width (car (gimp-drawable-width drawable)))
(height (car (gimp-drawable-height drawable)))
(old-selection (car (gimp-selection-save img)))
(image-type (car (gimp-image-base-type img)))
(layer-type (car (gimp-drawable-type drawable)))
(layer-temp1 (car (gimp-layer-new img width height layer-type "temp1" 100 LAYER-MODE-NORMAL)))
)
(if (eqv? (car (gimp-selection-is-empty img)) TRUE)
(gimp-selection-none img)
(gimp-drawable-fill layer-temp1 FILL-TRANSPARENT)
(gimp-image-insert-layer img layer-temp1 0 -1)
(gimp-layer-add-alpha layer-temp1)
(gimp-edit-copy drawable)
(gimp-floating-sel-anchor (car (gimp-edit-paste layer-temp1 0)))
(cond
(( = gimpressOption 0)
(plug-in-gimpressionist 1 img layer-temp1 "Wormcan")
)
(( = gimpressOption 1)
(plug-in-gimpressionist 1 img layer-temp1 "Felt-marker")
)
(( = gimpressOption 2)
(plug-in-gimpressionist 1 img layer-temp1 "Line-art-2")
)
(( = gimpressOption 3)
(plug-in-gimpressionist 1 img layer-temp1 "ev_paletknife2")
)
(( = gimpressOption 4)
(plug-in-gimpressionist 1 img layer-temp1 gimpressName)
)
)
( gimp - levels layer - temp1 0 0 255 0.5 0 255 )
(gimp-drawable-levels layer-temp1 HISTOGRAM-VALUE 0.0 1.0 TRUE 0.5 0.0 1.0 TRUE)
(gimp-image-select-item img CHANNEL-OP-REPLACE old-selection)
(gimp-selection-invert img)
(begin
(gimp-edit-clear layer-temp1)
)
)
(gimp-item-set-name layer-temp1 "Palette knife")
(gimp-image-select-item img CHANNEL-OP-REPLACE old-selection)
(gimp-image-remove-channel img old-selection)
(gimp-drawable-levels-stretch layer-temp1)
(if (= inMerge TRUE)(gimp-image-merge-visible-layers img EXPAND-AS-NECESSARY))
(gimp-image-undo-group-end img)
(gimp-displays-flush)
)
)
(script-fu-register "FU-paletteknife"
"<Toolbox>/Script-Fu/Artist/Palette Knife"
"Creates a drawing effect like made with a palette knife, based on the Gimpressionist. Gimpressionist option can now be selected. The ev_paletknife2 option needs to be fetched or created. \n file:FU_artist_palette-knife"
"Eddy Verlinden <>"
"Eddy Verlinden"
"2007, juli"
"*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-OPTION "Gimpressionist option" '("Wormcan" "Felt-marker" "Line-art-2" "ev_paletknife2" "Setting below")
SF-STRING "Gimpressionist setting name (use with option)" "Dotify"
SF-TOGGLE "Merge layers when complete?" FALSE
)
|
0c058d311b74fd51bdeca970e60c5600c9e56610d38131fe1d182368e62ed8f4 | cnuernber/dtype-next | double_ops.clj | (ns tech.v3.datatype.double-ops
(:require [clj-commons.primitive-math :as pmath]))
(defmacro SIGNIFICAND-BITS
[]
`52)
(defmacro SIGNIFICAND-MASK [] `0x000fffffffffffff)
(defmacro IMPLICIT-BIT [] `(pmath/+ (SIGNIFICAND-MASK) 1))
(defmacro is-finite?
[x]
`(<= (Math/getExponent ~x)
Double/MAX_EXPONENT))
(defn get-significand
^long [^double d]
(when-not (is-finite? d)
(throw (Exception. "not a normal value")))
(let [exponent (Math/getExponent d)
bits (Double/doubleToRawLongBits d)
bits (bit-and bits (SIGNIFICAND-MASK))]
(if (== exponent (- Double/MIN_EXPONENT 1))
(bit-shift-left bits 1)
(bit-or bits (IMPLICIT-BIT)))))
#L279-L290
(defmacro is-mathematical-integer?
[x]
`(let [x# ~x]
(boolean
(and (is-finite? x#)
(or (pmath/== x# 0.0)
(pmath/<= (- (SIGNIFICAND-BITS)
(Long/numberOfTrailingZeros (get-significand x#)))
(Math/getExponent x#)))))))
| null | https://raw.githubusercontent.com/cnuernber/dtype-next/7f3d85d159c3a74d5fca53c8c50243812e0da4e2/src/tech/v3/datatype/double_ops.clj | clojure | (ns tech.v3.datatype.double-ops
(:require [clj-commons.primitive-math :as pmath]))
(defmacro SIGNIFICAND-BITS
[]
`52)
(defmacro SIGNIFICAND-MASK [] `0x000fffffffffffff)
(defmacro IMPLICIT-BIT [] `(pmath/+ (SIGNIFICAND-MASK) 1))
(defmacro is-finite?
[x]
`(<= (Math/getExponent ~x)
Double/MAX_EXPONENT))
(defn get-significand
^long [^double d]
(when-not (is-finite? d)
(throw (Exception. "not a normal value")))
(let [exponent (Math/getExponent d)
bits (Double/doubleToRawLongBits d)
bits (bit-and bits (SIGNIFICAND-MASK))]
(if (== exponent (- Double/MIN_EXPONENT 1))
(bit-shift-left bits 1)
(bit-or bits (IMPLICIT-BIT)))))
#L279-L290
(defmacro is-mathematical-integer?
[x]
`(let [x# ~x]
(boolean
(and (is-finite? x#)
(or (pmath/== x# 0.0)
(pmath/<= (- (SIGNIFICAND-BITS)
(Long/numberOfTrailingZeros (get-significand x#)))
(Math/getExponent x#)))))))
| |
5a120e7b570ae38ea39e58513c92d1cc2e3501cd41eb80108297a9b7a51f6261 | fizruk/snakes-demo | Snakes.hs | module Snakes (
module Snakes.Bot,
module Snakes.Config,
module Snakes.Control,
module Snakes.Model,
module Snakes.Render,
) where
import Snakes.Bot
import Snakes.Config
import Snakes.Control
import Snakes.Model
import Snakes.Render
| null | https://raw.githubusercontent.com/fizruk/snakes-demo/1844a047ccebfb2d9753530ccb3d9f51aa1b42b2/src/Snakes.hs | haskell | module Snakes (
module Snakes.Bot,
module Snakes.Config,
module Snakes.Control,
module Snakes.Model,
module Snakes.Render,
) where
import Snakes.Bot
import Snakes.Config
import Snakes.Control
import Snakes.Model
import Snakes.Render
| |
c5097122dea1831a8005d88098249a30da9ec799d790dbf16240c0c942f77a2b | fukamachi/docker-cl-example | application.lisp | (defpackage #:docker-cl-example/config/application
(:use #:cl
#:utopian)
(:import-from #:lack.component
#:to-app
#:call)
(:import-from #:lack
#:builder)
(:import-from #:cl-ppcre)
(:export #:docker-cl-example-app))
(in-package #:docker-cl-example/config/application)
(defapp docker-cl-example-app ()
()
(:config #P"environments/")
(: content - type " text / html ; charset = utf-8 " )
)
(defmethod to-app ((app docker-cl-example-app))
(builder
(:static
:path (lambda (path)
(if (ppcre:scan "^(?:/assets/|/robot\\.txt$|/favicon\\.ico$)" path)
path
nil))
:root (asdf:system-relative-pathname :docker-cl-example #P"public/"))
:accesslog
(:mito (db-settings :maindb))
:session
(call-next-method)))
| null | https://raw.githubusercontent.com/fukamachi/docker-cl-example/dccaf44ea71dab730ce74097990c36ec275ff69c/standard/config/application.lisp | lisp | (defpackage #:docker-cl-example/config/application
(:use #:cl
#:utopian)
(:import-from #:lack.component
#:to-app
#:call)
(:import-from #:lack
#:builder)
(:import-from #:cl-ppcre)
(:export #:docker-cl-example-app))
(in-package #:docker-cl-example/config/application)
(defapp docker-cl-example-app ()
()
(:config #P"environments/")
(: content - type " text / html ; charset = utf-8 " )
)
(defmethod to-app ((app docker-cl-example-app))
(builder
(:static
:path (lambda (path)
(if (ppcre:scan "^(?:/assets/|/robot\\.txt$|/favicon\\.ico$)" path)
path
nil))
:root (asdf:system-relative-pathname :docker-cl-example #P"public/"))
:accesslog
(:mito (db-settings :maindb))
:session
(call-next-method)))
| |
34e959e5881c9869bab9ccbab1eace0105d2a59ae0cf3599010cad77c6f7354d | LaurentRDC/pandoc-plot | Logging.hs | # LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : $header$
Copyright : ( c ) , 2019 - present
License : GNU GPL , version 2 or above
-- Maintainer :
-- Stability : internal
-- Portability : portable
--
-- Logging primitives.
module Text.Pandoc.Filter.Plot.Monad.Logging
( MonadLogger (..),
Verbosity (..),
LogSink (..),
Logger (..),
withLogger,
terminateLogging,
-- * Logging messages
debug,
err,
warning,
info,
strict,
)
where
import Control.Concurrent (forkIO)
import Control.Concurrent.Chan (Chan, newChan, readChan, writeChan)
import Control.Concurrent.MVar (MVar, newEmptyMVar, putMVar, takeMVar)
import Control.Monad (forM_, forever, void, when)
import Control.Monad.IO.Class (MonadIO (..))
import Data.Char (toLower)
import Data.List (intercalate)
import Data.String (IsString (..))
import Data.Text (Text, unpack)
import qualified Data.Text as T
import Data.Text.IO as TIO (appendFile, hPutStr)
import Data.Yaml (FromJSON (parseJSON), Value (String))
import System.IO (stderr)
import Prelude hiding (log)
-- | Verbosity of the logger.
data Verbosity
= -- | Log all messages, including debug messages.
Debug
| -- | Log information, warning, and error messages.
Info
| -- | Log warning and error messages.
Warning
| -- | Only log errors.
Error
| -- | Don't log anything.
Silent
deriving (Eq, Ord, Show, Enum, Bounded)
-- | Description of the possible ways to sink log messages.
data LogSink
= -- | Standard error stream.
StdErr
| -- | Appended to file.
LogFile FilePath
deriving (Eq, Show)
| The logging implementation is very similar to Hakyll 's .
data Logger = Logger
{ lVerbosity :: Verbosity, -- Verbosity level below which to ignore messages
lChannel :: Chan Command, -- Queue of logging commands
lSink :: Text -> IO (), -- Action to perform with log messages
lSync :: MVar () -- Synchronization variable
}
data Command
= LogMessage Text
| EndLogging
class Monad m => MonadLogger m where
askLogger :: m Logger
-- | Ensure that all log messages are flushed, and stop logging
terminateLogging :: Logger -> IO ()
terminateLogging logger = do
-- Flushing the logger
-- To signal to the logger that logging duties are over,
-- we append Nothing to the channel, and wait for it to finish
-- dealing with all items in the channel.
writeChan (lChannel logger) EndLogging
void $ takeMVar (lSync logger)
-- | Perform an IO action with a logger. Using this function
-- ensures that logging will be gracefully shut down.
withLogger :: Verbosity -> LogSink -> (Logger -> IO a) -> IO a
withLogger v s f = do
logger <-
Logger v
<$> newChan
<*> pure (sink s)
<*> newEmptyMVar
-- The logger either logs messages (if Just "message"),
-- or stops working on Nothing.
_ <-
forkIO $
forever $
readChan (lChannel logger)
>>= \case
EndLogging -> putMVar (lSync logger) ()
LogMessage t -> lSink logger t
result <- f logger
terminateLogging logger
return result
where
sink :: LogSink -> Text -> IO ()
sink StdErr = TIO.hPutStr stderr
sink (LogFile fp) = TIO.appendFile fp
-- | General purpose logging function.
log ::
(MonadLogger m, MonadIO m) =>
Text -> -- Header
Verbosity ->
Text ->
m ()
log h v t = do
logger <- askLogger
when (v >= lVerbosity logger) $
liftIO $ do
forM_ (T.lines t) $ \l -> writeChan (lChannel logger) (LogMessage (h <> l <> "\n"))
debug, err, strict, warning, info :: (MonadLogger m, MonadIO m) => Text -> m ()
debug = log "[pandoc-plot] DEBUG | " Debug
err = log "[pandoc-plot] ERROR | " Error
strict = log "[pandoc-plot] STRICT MODE | " Error
warning = log "[pandoc-plot] WARN | " Warning
info = log "[pandoc-plot] INFO | " Info
instance IsString Verbosity where
fromString s
| ls == "silent" = Silent
| ls == "info" = Info
| ls == "warning" = Warning
| ls == "error" = Error
| ls == "debug" = Debug
| otherwise = errorWithoutStackTrace $ mconcat ["Unrecognized verbosity '", s, "'. Valid choices are: "] <> choices
where
ls = toLower <$> s
choices =
intercalate
", "
( fmap toLower . show
<$> enumFromTo minBound (maxBound :: Verbosity)
)
instance FromJSON Verbosity where
parseJSON (String t) = pure $ fromString . unpack $ t
parseJSON _ = fail "Could not parse the logging verbosity."
| null | https://raw.githubusercontent.com/LaurentRDC/pandoc-plot/af88b7e8a330a6964ba5979e71ff84af2da9944a/src/Text/Pandoc/Filter/Plot/Monad/Logging.hs | haskell | # LANGUAGE OverloadedStrings #
|
Module : $header$
Maintainer :
Stability : internal
Portability : portable
Logging primitives.
* Logging messages
| Verbosity of the logger.
| Log all messages, including debug messages.
| Log information, warning, and error messages.
| Log warning and error messages.
| Only log errors.
| Don't log anything.
| Description of the possible ways to sink log messages.
| Standard error stream.
| Appended to file.
Verbosity level below which to ignore messages
Queue of logging commands
Action to perform with log messages
Synchronization variable
| Ensure that all log messages are flushed, and stop logging
Flushing the logger
To signal to the logger that logging duties are over,
we append Nothing to the channel, and wait for it to finish
dealing with all items in the channel.
| Perform an IO action with a logger. Using this function
ensures that logging will be gracefully shut down.
The logger either logs messages (if Just "message"),
or stops working on Nothing.
| General purpose logging function.
Header | # LANGUAGE LambdaCase #
Copyright : ( c ) , 2019 - present
License : GNU GPL , version 2 or above
module Text.Pandoc.Filter.Plot.Monad.Logging
( MonadLogger (..),
Verbosity (..),
LogSink (..),
Logger (..),
withLogger,
terminateLogging,
debug,
err,
warning,
info,
strict,
)
where
import Control.Concurrent (forkIO)
import Control.Concurrent.Chan (Chan, newChan, readChan, writeChan)
import Control.Concurrent.MVar (MVar, newEmptyMVar, putMVar, takeMVar)
import Control.Monad (forM_, forever, void, when)
import Control.Monad.IO.Class (MonadIO (..))
import Data.Char (toLower)
import Data.List (intercalate)
import Data.String (IsString (..))
import Data.Text (Text, unpack)
import qualified Data.Text as T
import Data.Text.IO as TIO (appendFile, hPutStr)
import Data.Yaml (FromJSON (parseJSON), Value (String))
import System.IO (stderr)
import Prelude hiding (log)
data Verbosity
Debug
Info
Warning
Error
Silent
deriving (Eq, Ord, Show, Enum, Bounded)
data LogSink
StdErr
LogFile FilePath
deriving (Eq, Show)
| The logging implementation is very similar to Hakyll 's .
data Logger = Logger
}
data Command
= LogMessage Text
| EndLogging
class Monad m => MonadLogger m where
askLogger :: m Logger
terminateLogging :: Logger -> IO ()
terminateLogging logger = do
writeChan (lChannel logger) EndLogging
void $ takeMVar (lSync logger)
withLogger :: Verbosity -> LogSink -> (Logger -> IO a) -> IO a
withLogger v s f = do
logger <-
Logger v
<$> newChan
<*> pure (sink s)
<*> newEmptyMVar
_ <-
forkIO $
forever $
readChan (lChannel logger)
>>= \case
EndLogging -> putMVar (lSync logger) ()
LogMessage t -> lSink logger t
result <- f logger
terminateLogging logger
return result
where
sink :: LogSink -> Text -> IO ()
sink StdErr = TIO.hPutStr stderr
sink (LogFile fp) = TIO.appendFile fp
log ::
(MonadLogger m, MonadIO m) =>
Verbosity ->
Text ->
m ()
log h v t = do
logger <- askLogger
when (v >= lVerbosity logger) $
liftIO $ do
forM_ (T.lines t) $ \l -> writeChan (lChannel logger) (LogMessage (h <> l <> "\n"))
debug, err, strict, warning, info :: (MonadLogger m, MonadIO m) => Text -> m ()
debug = log "[pandoc-plot] DEBUG | " Debug
err = log "[pandoc-plot] ERROR | " Error
strict = log "[pandoc-plot] STRICT MODE | " Error
warning = log "[pandoc-plot] WARN | " Warning
info = log "[pandoc-plot] INFO | " Info
instance IsString Verbosity where
fromString s
| ls == "silent" = Silent
| ls == "info" = Info
| ls == "warning" = Warning
| ls == "error" = Error
| ls == "debug" = Debug
| otherwise = errorWithoutStackTrace $ mconcat ["Unrecognized verbosity '", s, "'. Valid choices are: "] <> choices
where
ls = toLower <$> s
choices =
intercalate
", "
( fmap toLower . show
<$> enumFromTo minBound (maxBound :: Verbosity)
)
instance FromJSON Verbosity where
parseJSON (String t) = pure $ fromString . unpack $ t
parseJSON _ = fail "Could not parse the logging verbosity."
|
5f2d39bfea4ffe0039060bda1eb12d40c781669ab53f8ee8318fd4f395bf457b | m-2k/erlach | db_user.erl | -module(db_user).
% -include_lib("kvs/include/kvs.hrl").
-include_lib("kvs/include/metainfo.hrl").
% -include_lib("kvs/include/feed.hrl").
% -include_lib("db/include/db.hrl").
-include_lib("db/include/user.hrl").
-compile(export_all).
%% rr(kvs), rr("apps/db/include/thread.hrl").
{ ok , P } = kvs : get(post , 1 ) , kvs : put(P#post{message="AAA " } ) .
metainfo() ->
#schema{name=kvs,tables=[
#table{name=user3,container=feed,fields=record_info(fields,user3),keys=[]},
#table{name=name,container=feed,fields=record_info(fields,name),keys=[]}
]}. | null | https://raw.githubusercontent.com/m-2k/erlach/ce0a19a0550c3457a1fc1d7c40e4f1cb577d7924/apps/db/src/db_user.erl | erlang | -include_lib("kvs/include/kvs.hrl").
-include_lib("kvs/include/feed.hrl").
-include_lib("db/include/db.hrl").
rr(kvs), rr("apps/db/include/thread.hrl"). | -module(db_user).
-include_lib("kvs/include/metainfo.hrl").
-include_lib("db/include/user.hrl").
-compile(export_all).
{ ok , P } = kvs : get(post , 1 ) , kvs : put(P#post{message="AAA " } ) .
metainfo() ->
#schema{name=kvs,tables=[
#table{name=user3,container=feed,fields=record_info(fields,user3),keys=[]},
#table{name=name,container=feed,fields=record_info(fields,name),keys=[]}
]}. |
215798bbcb8b4058412b093b1c32061224b6200de66517de62f7b687568a1701 | startling/partly | MBR.hs | | Types for dealing with the old - fasioned and modern Master Boot Records .
This does not cover things like the GUID partition table or any of the
weird variations like AAP or NEWLDR .
module System.Disk.Partitions.MBR where
-- base:
import Prelude hiding (head)
import Control.Applicative
import Control.Monad
import Data.Word
import Data.Bits (shiftL, shiftR, (.|.), (.&.))
-- bytestring:
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
-- binary:
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
| The so - called mystery bytes on Windows 95B , 98 , 98SE , and Me --
-- in fact, they're a timestamp and a drive number.
-- See <>.
data Timestamp = Timestamp
{ physicalDrive :: Word8
, seconds :: Word8
, minutes :: Word8
, hours :: Word8 }
deriving (Eq, Show)
instance Binary Timestamp where
get = Timestamp <$> get <*> get <*> get <*> get
put = (sequence_ .) . sequence $ [put . physicalDrive
, put . seconds, put . minutes, put . hours]
-- | A representation of the cylinder\/head\/sector addresses in MBRs.
data CHS = CHS
{ -- | The head number.
head :: Word8
| The sector number ; this is actually a six - bit number , but
-- Haskell doesn't have a convenient way to deal with those.
, sector :: Word8
| The cylinder number ; likewise , this is actually a 10 - bit number .
, cylinder :: Word16 }
deriving (Eq, Show)
instance Binary CHS where
get = do
(h, s, c) <- (,,) <$> getWord8 <*> getWord8 <*> getWord8
return . CHS h ((s `shiftL` 2) `shiftR` 2) $
Mask away everything but top two bits , convert to , and then
OR it with c converted to Word16 .
fromIntegral c .|. ((fromIntegral s .&. 0xc0) `shiftL` 2)
put (CHS h s c) = do
putWord8 h
Mask away the high two bits of s and use the high two bits of c.
putWord8 $ (s .&. 0x3f) .|. fromIntegral (shiftR c 2 .&. 0xc0)
-- Mask away the high byte of c.
putWord8 . fromIntegral $ 0x00ff .&. c
-- | Partition entries themselves are somewhat intricate.
data PartitionEntry = PartitionEntry
{ -- | A bitfield describing this partition. An 0x00 here means it's inactive;
having bit 7 set ( e.g. 0x80 ) means bootable ; anything else is invalid .
status :: Word8
| The CHS address of the first absolute sector of the partition .
, chsFirst :: CHS
-- | A partition type; for specifics, see the following document:
-- </~aeb/partitions/partition_types-1.html>
, partitionType :: Word8
| The CHS address of the last absolute sector of the partition .
, chsLast :: CHS
| The logical block address of the first absolute sector .
, lbaFirst :: Word32
-- | The number of sectors in the partition.
, sectors :: Word32 }
deriving (Eq, Show)
instance Binary PartitionEntry where
get = PartitionEntry <$> get <*> get <*> get <*> get
<*> getWord32le <*> getWord32le
put = (sequence_ .) . sequence $ [put . status, put . chsFirst
, put . partitionType, put . chsLast
, putWord32le . lbaFirst, putWord32le . sectors]
-- | The empty partition table entry.
nullPartition :: PartitionEntry
nullPartition = PartitionEntry 0 (CHS 0 0 0) 0 (CHS 0 0 0) 0 0
-- | Whether this partition entry is marked bootable.
bootable :: PartitionEntry -> Bool
bootable = ((== 1) . (`shiftR` 7)) . status
| An MBR partition table consists of four partition entries .
data PartitionTable = PartitionTable
{ first, second, third, fourth :: PartitionEntry }
deriving (Eq, Show)
instance Binary PartitionTable where
get = PartitionTable <$> get <*> get <*> get <*> get
put = (sequence_ .) . sequence $ [put . first, put . second
, put . third, put . fourth]
-- | The empty partition table.
nullPartitionTable :: PartitionTable
nullPartitionTable = PartitionTable n n n n
where n = nullPartition
| The structure of a Master Boot Record is as follows ...
data BootRecord = BootRecord
| The first piece of data on a disk with a Master Boot Record is some
bootloader code that gets loaded to address 0x7c00 in memory .
this may include the data for the Windows timestamp , if it exists . It
-- will also include the optional disk signature, if it exists -- thus this
field is always 446 bytes long .
bootloader :: ByteString
-- | Four partition table entries.
, partitions :: PartitionTable
-- | Finally, the boot signature.
, bootSig :: Word16 }
deriving (Eq, Show)
instance Binary BootRecord where
get = BootRecord <$> getByteString 446 <*> get <*> getWord16le
put = (sequence_ .) . sequence $ [ putByteString . B.take 446 . bootloader
, put . partitions , putWord16le . bootSig ]
| The empty bootloader -- 446 empty bytes .
emptyBootloader :: B.ByteString
emptyBootloader = B.replicate 446 0
-- | The empty boot record.
nullBootRecord :: BootRecord
nullBootRecord = BootRecord emptyBootloader nullPartitionTable 0xaa55
-- | Get the completely-optional, obsolete disk timestamp used by some old
versions of Windows .
getTimestamp :: BootRecord -> Maybe Timestamp
getTimestamp (BootRecord b _ _) = do
-- Check that it's long enough.
guard $ B.length b > 0x0df
Check that it has the two zero bytes at 0x0da .
guard $ B.index b 0x0da == 0 && B.index b 0x0db == 0
Get the four bytes at 0x0dc .
let _1 : _2 : _3 : _4 : _ = B.unpack . B.take 4 . B.drop 0x0dc $ b
return $ Timestamp _1 _2 _3 _4
| Get the optional disk signature from a Bootrecord 's bootloader .
getDiskSignature :: BootRecord -> Maybe Word32
getDiskSignature (BootRecord b _ _) = do
-- check that it's long enough.
guard $ B.length b > 0x1bd
Check that it has the two zero bytes at 0x1bc
guard $ B.index b 0x1bc == 0 && B.index b 0x1bd == 0
Get the four bytes at 0x1b8 ...
let [_1, _2, _3, _4] = map fromIntegral . B.unpack . B.take 4. B.drop 0x1b8 $ b
-- And construct a word32 out of them, little-endian style.
return $ (_4 << 24) .|. (_3 << 16) .|. (_2 << 8) .|. _1
where (<<) = shiftL
| null | https://raw.githubusercontent.com/startling/partly/d23b27910084eede0828bc421d7c1923660826b2/System/Disk/Partitions/MBR.hs | haskell | base:
bytestring:
binary:
in fact, they're a timestamp and a drive number.
See <>.
| A representation of the cylinder\/head\/sector addresses in MBRs.
| The head number.
Haskell doesn't have a convenient way to deal with those.
Mask away the high byte of c.
| Partition entries themselves are somewhat intricate.
| A bitfield describing this partition. An 0x00 here means it's inactive;
| A partition type; for specifics, see the following document:
</~aeb/partitions/partition_types-1.html>
| The number of sectors in the partition.
| The empty partition table entry.
| Whether this partition entry is marked bootable.
| The empty partition table.
will also include the optional disk signature, if it exists -- thus this
| Four partition table entries.
| Finally, the boot signature.
446 empty bytes .
| The empty boot record.
| Get the completely-optional, obsolete disk timestamp used by some old
Check that it's long enough.
check that it's long enough.
And construct a word32 out of them, little-endian style. | | Types for dealing with the old - fasioned and modern Master Boot Records .
This does not cover things like the GUID partition table or any of the
weird variations like AAP or NEWLDR .
module System.Disk.Partitions.MBR where
import Prelude hiding (head)
import Control.Applicative
import Control.Monad
import Data.Word
import Data.Bits (shiftL, shiftR, (.|.), (.&.))
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
data Timestamp = Timestamp
{ physicalDrive :: Word8
, seconds :: Word8
, minutes :: Word8
, hours :: Word8 }
deriving (Eq, Show)
instance Binary Timestamp where
get = Timestamp <$> get <*> get <*> get <*> get
put = (sequence_ .) . sequence $ [put . physicalDrive
, put . seconds, put . minutes, put . hours]
data CHS = CHS
head :: Word8
| The sector number ; this is actually a six - bit number , but
, sector :: Word8
| The cylinder number ; likewise , this is actually a 10 - bit number .
, cylinder :: Word16 }
deriving (Eq, Show)
instance Binary CHS where
get = do
(h, s, c) <- (,,) <$> getWord8 <*> getWord8 <*> getWord8
return . CHS h ((s `shiftL` 2) `shiftR` 2) $
Mask away everything but top two bits , convert to , and then
OR it with c converted to Word16 .
fromIntegral c .|. ((fromIntegral s .&. 0xc0) `shiftL` 2)
put (CHS h s c) = do
putWord8 h
Mask away the high two bits of s and use the high two bits of c.
putWord8 $ (s .&. 0x3f) .|. fromIntegral (shiftR c 2 .&. 0xc0)
putWord8 . fromIntegral $ 0x00ff .&. c
data PartitionEntry = PartitionEntry
having bit 7 set ( e.g. 0x80 ) means bootable ; anything else is invalid .
status :: Word8
| The CHS address of the first absolute sector of the partition .
, chsFirst :: CHS
, partitionType :: Word8
| The CHS address of the last absolute sector of the partition .
, chsLast :: CHS
| The logical block address of the first absolute sector .
, lbaFirst :: Word32
, sectors :: Word32 }
deriving (Eq, Show)
instance Binary PartitionEntry where
get = PartitionEntry <$> get <*> get <*> get <*> get
<*> getWord32le <*> getWord32le
put = (sequence_ .) . sequence $ [put . status, put . chsFirst
, put . partitionType, put . chsLast
, putWord32le . lbaFirst, putWord32le . sectors]
nullPartition :: PartitionEntry
nullPartition = PartitionEntry 0 (CHS 0 0 0) 0 (CHS 0 0 0) 0 0
bootable :: PartitionEntry -> Bool
bootable = ((== 1) . (`shiftR` 7)) . status
| An MBR partition table consists of four partition entries .
data PartitionTable = PartitionTable
{ first, second, third, fourth :: PartitionEntry }
deriving (Eq, Show)
instance Binary PartitionTable where
get = PartitionTable <$> get <*> get <*> get <*> get
put = (sequence_ .) . sequence $ [put . first, put . second
, put . third, put . fourth]
nullPartitionTable :: PartitionTable
nullPartitionTable = PartitionTable n n n n
where n = nullPartition
| The structure of a Master Boot Record is as follows ...
data BootRecord = BootRecord
| The first piece of data on a disk with a Master Boot Record is some
bootloader code that gets loaded to address 0x7c00 in memory .
this may include the data for the Windows timestamp , if it exists . It
field is always 446 bytes long .
bootloader :: ByteString
, partitions :: PartitionTable
, bootSig :: Word16 }
deriving (Eq, Show)
instance Binary BootRecord where
get = BootRecord <$> getByteString 446 <*> get <*> getWord16le
put = (sequence_ .) . sequence $ [ putByteString . B.take 446 . bootloader
, put . partitions , putWord16le . bootSig ]
emptyBootloader :: B.ByteString
emptyBootloader = B.replicate 446 0
nullBootRecord :: BootRecord
nullBootRecord = BootRecord emptyBootloader nullPartitionTable 0xaa55
versions of Windows .
getTimestamp :: BootRecord -> Maybe Timestamp
getTimestamp (BootRecord b _ _) = do
guard $ B.length b > 0x0df
Check that it has the two zero bytes at 0x0da .
guard $ B.index b 0x0da == 0 && B.index b 0x0db == 0
Get the four bytes at 0x0dc .
let _1 : _2 : _3 : _4 : _ = B.unpack . B.take 4 . B.drop 0x0dc $ b
return $ Timestamp _1 _2 _3 _4
| Get the optional disk signature from a Bootrecord 's bootloader .
getDiskSignature :: BootRecord -> Maybe Word32
getDiskSignature (BootRecord b _ _) = do
guard $ B.length b > 0x1bd
Check that it has the two zero bytes at 0x1bc
guard $ B.index b 0x1bc == 0 && B.index b 0x1bd == 0
Get the four bytes at 0x1b8 ...
let [_1, _2, _3, _4] = map fromIntegral . B.unpack . B.take 4. B.drop 0x1b8 $ b
return $ (_4 << 24) .|. (_3 << 16) .|. (_2 << 8) .|. _1
where (<<) = shiftL
|
df59438fca320db2951e9401fcaa6d8615f33d264bf07bbbb3b1bd5a9843045f | dyzsr/ocaml-selectml | printclambda_primitives.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Format
open Asttypes
let boxed_integer_name = function
| Lambda.Pnativeint -> "nativeint"
| Lambda.Pint32 -> "int32"
| Lambda.Pint64 -> "int64"
let boxed_integer_mark name = function
| Lambda.Pnativeint -> Printf.sprintf "Nativeint.%s" name
| Lambda.Pint32 -> Printf.sprintf "Int32.%s" name
| Lambda.Pint64 -> Printf.sprintf "Int64.%s" name
let print_boxed_integer name ppf bi =
fprintf ppf "%s" (boxed_integer_mark name bi);;
let array_kind array_kind =
let open Lambda in
match array_kind with
| Pgenarray -> "gen"
| Paddrarray -> "addr"
| Pintarray -> "int"
| Pfloatarray -> "float"
let access_size size =
let open Clambda_primitives in
match size with
| Sixteen -> "16"
| Thirty_two -> "32"
| Sixty_four -> "64"
let access_safety safety =
let open Lambda in
match safety with
| Safe -> ""
| Unsafe -> "unsafe_"
let primitive ppf (prim:Clambda_primitives.primitive) =
let open Lambda in
let open Clambda_primitives in
match prim with
| Pread_symbol sym ->
fprintf ppf "read_symbol %s" sym
| Pmakeblock(tag, Immutable, shape) ->
fprintf ppf "makeblock %i%a" tag Printlambda.block_shape shape
| Pmakeblock(tag, Mutable, shape) ->
fprintf ppf "makemutable %i%a" tag Printlambda.block_shape shape
| Pfield n -> fprintf ppf "field %i" n
| Pfield_computed -> fprintf ppf "field_computed"
| Psetfield(n, ptr, init) ->
let instr =
match ptr with
| Pointer -> "ptr"
| Immediate -> "imm"
in
let init =
match init with
| Heap_initialization -> "(heap-init)"
| Root_initialization -> "(root-init)"
| Assignment -> ""
in
fprintf ppf "setfield_%s%s %i" instr init n
| Psetfield_computed (ptr, init) ->
let instr =
match ptr with
| Pointer -> "ptr"
| Immediate -> "imm"
in
let init =
match init with
| Heap_initialization -> "(heap-init)"
| Root_initialization -> "(root-init)"
| Assignment -> ""
in
fprintf ppf "setfield_%s%s_computed" instr init
| Pfloatfield n -> fprintf ppf "floatfield %i" n
| Psetfloatfield (n, init) ->
let init =
match init with
| Heap_initialization -> "(heap-init)"
| Root_initialization -> "(root-init)"
| Assignment -> ""
in
fprintf ppf "setfloatfield%s %i" init n
| Pduprecord (rep, size) ->
fprintf ppf "duprecord %a %i" Printlambda.record_rep rep size
| Pccall p -> fprintf ppf "%s" p.Primitive.prim_name
| Praise k -> fprintf ppf "%s" (Lambda.raise_kind k)
| Psequand -> fprintf ppf "&&"
| Psequor -> fprintf ppf "||"
| Pnot -> fprintf ppf "not"
| Pnegint -> fprintf ppf "~"
| Paddint -> fprintf ppf "+"
| Psubint -> fprintf ppf "-"
| Pmulint -> fprintf ppf "*"
| Pdivint Safe -> fprintf ppf "/"
| Pdivint Unsafe -> fprintf ppf "/u"
| Pmodint Safe -> fprintf ppf "mod"
| Pmodint Unsafe -> fprintf ppf "mod_unsafe"
| Pandint -> fprintf ppf "and"
| Porint -> fprintf ppf "or"
| Pxorint -> fprintf ppf "xor"
| Plslint -> fprintf ppf "lsl"
| Plsrint -> fprintf ppf "lsr"
| Pasrint -> fprintf ppf "asr"
| Pintcomp(cmp) -> Printlambda.integer_comparison ppf cmp
| Pcompare_ints -> fprintf ppf "compare_ints"
| Pcompare_floats -> fprintf ppf "compare_floats"
| Pcompare_bints bi -> fprintf ppf "compare_bints %s" (boxed_integer_name bi)
| Poffsetint n -> fprintf ppf "%i+" n
| Poffsetref n -> fprintf ppf "+:=%i"n
| Pintoffloat -> fprintf ppf "int_of_float"
| Pfloatofint -> fprintf ppf "float_of_int"
| Pnegfloat -> fprintf ppf "~."
| Pabsfloat -> fprintf ppf "abs."
| Paddfloat -> fprintf ppf "+."
| Psubfloat -> fprintf ppf "-."
| Pmulfloat -> fprintf ppf "*."
| Pdivfloat -> fprintf ppf "/."
| Pfloatcomp(cmp) -> Printlambda.float_comparison ppf cmp
| Pstringlength -> fprintf ppf "string.length"
| Pstringrefu -> fprintf ppf "string.unsafe_get"
| Pstringrefs -> fprintf ppf "string.get"
| Pbyteslength -> fprintf ppf "bytes.length"
| Pbytesrefu -> fprintf ppf "bytes.unsafe_get"
| Pbytessetu -> fprintf ppf "bytes.unsafe_set"
| Pbytesrefs -> fprintf ppf "bytes.get"
| Pbytessets -> fprintf ppf "bytes.set"
| Parraylength k -> fprintf ppf "array.length[%s]" (array_kind k)
| Pmakearray (k, Mutable) -> fprintf ppf "makearray[%s]" (array_kind k)
| Pmakearray (k, Immutable) -> fprintf ppf "makearray_imm[%s]" (array_kind k)
| Pduparray (k, Mutable) -> fprintf ppf "duparray[%s]" (array_kind k)
| Pduparray (k, Immutable) -> fprintf ppf "duparray_imm[%s]" (array_kind k)
| Parrayrefu k -> fprintf ppf "array.unsafe_get[%s]" (array_kind k)
| Parraysetu k -> fprintf ppf "array.unsafe_set[%s]" (array_kind k)
| Parrayrefs k -> fprintf ppf "array.get[%s]" (array_kind k)
| Parraysets k -> fprintf ppf "array.set[%s]" (array_kind k)
| Pisint -> fprintf ppf "isint"
| Pisout -> fprintf ppf "isout"
| Pbintofint bi -> print_boxed_integer "of_int" ppf bi
| Pintofbint bi -> print_boxed_integer "to_int" ppf bi
| Pcvtbint (bi1, bi2) ->
fprintf ppf "%s_of_%s" (boxed_integer_name bi2) (boxed_integer_name bi1)
| Pnegbint bi -> print_boxed_integer "neg" ppf bi
| Paddbint bi -> print_boxed_integer "add" ppf bi
| Psubbint bi -> print_boxed_integer "sub" ppf bi
| Pmulbint bi -> print_boxed_integer "mul" ppf bi
| Pdivbint { size = bi; is_safe = Safe } ->
print_boxed_integer "div" ppf bi
| Pdivbint { size = bi; is_safe = Unsafe } ->
print_boxed_integer "div_unsafe" ppf bi
| Pmodbint { size = bi; is_safe = Safe } ->
print_boxed_integer "mod" ppf bi
| Pmodbint { size = bi; is_safe = Unsafe } ->
print_boxed_integer "mod_unsafe" ppf bi
| Pandbint bi -> print_boxed_integer "and" ppf bi
| Porbint bi -> print_boxed_integer "or" ppf bi
| Pxorbint bi -> print_boxed_integer "xor" ppf bi
| Plslbint bi -> print_boxed_integer "lsl" ppf bi
| Plsrbint bi -> print_boxed_integer "lsr" ppf bi
| Pasrbint bi -> print_boxed_integer "asr" ppf bi
| Pbintcomp(bi, Ceq) -> print_boxed_integer "==" ppf bi
| Pbintcomp(bi, Cne) -> print_boxed_integer "!=" ppf bi
| Pbintcomp(bi, Clt) -> print_boxed_integer "<" ppf bi
| Pbintcomp(bi, Cgt) -> print_boxed_integer ">" ppf bi
| Pbintcomp(bi, Cle) -> print_boxed_integer "<=" ppf bi
| Pbintcomp(bi, Cge) -> print_boxed_integer ">=" ppf bi
| Pbigarrayref(unsafe, _n, kind, layout) ->
Printlambda.print_bigarray "get" unsafe kind ppf layout
| Pbigarrayset(unsafe, _n, kind, layout) ->
Printlambda.print_bigarray "set" unsafe kind ppf layout
| Pbigarraydim(n) -> fprintf ppf "Bigarray.dim_%i" n
| Pstring_load(size, safety) ->
fprintf ppf "string.%sget%s" (access_safety safety) (access_size size)
| Pbytes_load(size, safety) ->
fprintf ppf "bytes.%sget%s" (access_safety safety) (access_size size)
| Pbytes_set(size, safety) ->
fprintf ppf "bytes.%sset%s" (access_safety safety) (access_size size)
| Pbigstring_load(size, safety) ->
fprintf ppf "bigarray.array1.%sget%s"
(access_safety safety) (access_size size)
| Pbigstring_set(size, safety) ->
fprintf ppf "bigarray.array1.%sset%s"
(access_safety safety) (access_size size)
| Pbswap16 -> fprintf ppf "bswap16"
| Pbbswap(bi) -> print_boxed_integer "bswap" ppf bi
| Pint_as_pointer -> fprintf ppf "int_as_pointer"
| Popaque -> fprintf ppf "opaque"
| null | https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/middle_end/printclambda_primitives.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************ | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Format
open Asttypes
let boxed_integer_name = function
| Lambda.Pnativeint -> "nativeint"
| Lambda.Pint32 -> "int32"
| Lambda.Pint64 -> "int64"
let boxed_integer_mark name = function
| Lambda.Pnativeint -> Printf.sprintf "Nativeint.%s" name
| Lambda.Pint32 -> Printf.sprintf "Int32.%s" name
| Lambda.Pint64 -> Printf.sprintf "Int64.%s" name
let print_boxed_integer name ppf bi =
fprintf ppf "%s" (boxed_integer_mark name bi);;
let array_kind array_kind =
let open Lambda in
match array_kind with
| Pgenarray -> "gen"
| Paddrarray -> "addr"
| Pintarray -> "int"
| Pfloatarray -> "float"
let access_size size =
let open Clambda_primitives in
match size with
| Sixteen -> "16"
| Thirty_two -> "32"
| Sixty_four -> "64"
let access_safety safety =
let open Lambda in
match safety with
| Safe -> ""
| Unsafe -> "unsafe_"
let primitive ppf (prim:Clambda_primitives.primitive) =
let open Lambda in
let open Clambda_primitives in
match prim with
| Pread_symbol sym ->
fprintf ppf "read_symbol %s" sym
| Pmakeblock(tag, Immutable, shape) ->
fprintf ppf "makeblock %i%a" tag Printlambda.block_shape shape
| Pmakeblock(tag, Mutable, shape) ->
fprintf ppf "makemutable %i%a" tag Printlambda.block_shape shape
| Pfield n -> fprintf ppf "field %i" n
| Pfield_computed -> fprintf ppf "field_computed"
| Psetfield(n, ptr, init) ->
let instr =
match ptr with
| Pointer -> "ptr"
| Immediate -> "imm"
in
let init =
match init with
| Heap_initialization -> "(heap-init)"
| Root_initialization -> "(root-init)"
| Assignment -> ""
in
fprintf ppf "setfield_%s%s %i" instr init n
| Psetfield_computed (ptr, init) ->
let instr =
match ptr with
| Pointer -> "ptr"
| Immediate -> "imm"
in
let init =
match init with
| Heap_initialization -> "(heap-init)"
| Root_initialization -> "(root-init)"
| Assignment -> ""
in
fprintf ppf "setfield_%s%s_computed" instr init
| Pfloatfield n -> fprintf ppf "floatfield %i" n
| Psetfloatfield (n, init) ->
let init =
match init with
| Heap_initialization -> "(heap-init)"
| Root_initialization -> "(root-init)"
| Assignment -> ""
in
fprintf ppf "setfloatfield%s %i" init n
| Pduprecord (rep, size) ->
fprintf ppf "duprecord %a %i" Printlambda.record_rep rep size
| Pccall p -> fprintf ppf "%s" p.Primitive.prim_name
| Praise k -> fprintf ppf "%s" (Lambda.raise_kind k)
| Psequand -> fprintf ppf "&&"
| Psequor -> fprintf ppf "||"
| Pnot -> fprintf ppf "not"
| Pnegint -> fprintf ppf "~"
| Paddint -> fprintf ppf "+"
| Psubint -> fprintf ppf "-"
| Pmulint -> fprintf ppf "*"
| Pdivint Safe -> fprintf ppf "/"
| Pdivint Unsafe -> fprintf ppf "/u"
| Pmodint Safe -> fprintf ppf "mod"
| Pmodint Unsafe -> fprintf ppf "mod_unsafe"
| Pandint -> fprintf ppf "and"
| Porint -> fprintf ppf "or"
| Pxorint -> fprintf ppf "xor"
| Plslint -> fprintf ppf "lsl"
| Plsrint -> fprintf ppf "lsr"
| Pasrint -> fprintf ppf "asr"
| Pintcomp(cmp) -> Printlambda.integer_comparison ppf cmp
| Pcompare_ints -> fprintf ppf "compare_ints"
| Pcompare_floats -> fprintf ppf "compare_floats"
| Pcompare_bints bi -> fprintf ppf "compare_bints %s" (boxed_integer_name bi)
| Poffsetint n -> fprintf ppf "%i+" n
| Poffsetref n -> fprintf ppf "+:=%i"n
| Pintoffloat -> fprintf ppf "int_of_float"
| Pfloatofint -> fprintf ppf "float_of_int"
| Pnegfloat -> fprintf ppf "~."
| Pabsfloat -> fprintf ppf "abs."
| Paddfloat -> fprintf ppf "+."
| Psubfloat -> fprintf ppf "-."
| Pmulfloat -> fprintf ppf "*."
| Pdivfloat -> fprintf ppf "/."
| Pfloatcomp(cmp) -> Printlambda.float_comparison ppf cmp
| Pstringlength -> fprintf ppf "string.length"
| Pstringrefu -> fprintf ppf "string.unsafe_get"
| Pstringrefs -> fprintf ppf "string.get"
| Pbyteslength -> fprintf ppf "bytes.length"
| Pbytesrefu -> fprintf ppf "bytes.unsafe_get"
| Pbytessetu -> fprintf ppf "bytes.unsafe_set"
| Pbytesrefs -> fprintf ppf "bytes.get"
| Pbytessets -> fprintf ppf "bytes.set"
| Parraylength k -> fprintf ppf "array.length[%s]" (array_kind k)
| Pmakearray (k, Mutable) -> fprintf ppf "makearray[%s]" (array_kind k)
| Pmakearray (k, Immutable) -> fprintf ppf "makearray_imm[%s]" (array_kind k)
| Pduparray (k, Mutable) -> fprintf ppf "duparray[%s]" (array_kind k)
| Pduparray (k, Immutable) -> fprintf ppf "duparray_imm[%s]" (array_kind k)
| Parrayrefu k -> fprintf ppf "array.unsafe_get[%s]" (array_kind k)
| Parraysetu k -> fprintf ppf "array.unsafe_set[%s]" (array_kind k)
| Parrayrefs k -> fprintf ppf "array.get[%s]" (array_kind k)
| Parraysets k -> fprintf ppf "array.set[%s]" (array_kind k)
| Pisint -> fprintf ppf "isint"
| Pisout -> fprintf ppf "isout"
| Pbintofint bi -> print_boxed_integer "of_int" ppf bi
| Pintofbint bi -> print_boxed_integer "to_int" ppf bi
| Pcvtbint (bi1, bi2) ->
fprintf ppf "%s_of_%s" (boxed_integer_name bi2) (boxed_integer_name bi1)
| Pnegbint bi -> print_boxed_integer "neg" ppf bi
| Paddbint bi -> print_boxed_integer "add" ppf bi
| Psubbint bi -> print_boxed_integer "sub" ppf bi
| Pmulbint bi -> print_boxed_integer "mul" ppf bi
| Pdivbint { size = bi; is_safe = Safe } ->
print_boxed_integer "div" ppf bi
| Pdivbint { size = bi; is_safe = Unsafe } ->
print_boxed_integer "div_unsafe" ppf bi
| Pmodbint { size = bi; is_safe = Safe } ->
print_boxed_integer "mod" ppf bi
| Pmodbint { size = bi; is_safe = Unsafe } ->
print_boxed_integer "mod_unsafe" ppf bi
| Pandbint bi -> print_boxed_integer "and" ppf bi
| Porbint bi -> print_boxed_integer "or" ppf bi
| Pxorbint bi -> print_boxed_integer "xor" ppf bi
| Plslbint bi -> print_boxed_integer "lsl" ppf bi
| Plsrbint bi -> print_boxed_integer "lsr" ppf bi
| Pasrbint bi -> print_boxed_integer "asr" ppf bi
| Pbintcomp(bi, Ceq) -> print_boxed_integer "==" ppf bi
| Pbintcomp(bi, Cne) -> print_boxed_integer "!=" ppf bi
| Pbintcomp(bi, Clt) -> print_boxed_integer "<" ppf bi
| Pbintcomp(bi, Cgt) -> print_boxed_integer ">" ppf bi
| Pbintcomp(bi, Cle) -> print_boxed_integer "<=" ppf bi
| Pbintcomp(bi, Cge) -> print_boxed_integer ">=" ppf bi
| Pbigarrayref(unsafe, _n, kind, layout) ->
Printlambda.print_bigarray "get" unsafe kind ppf layout
| Pbigarrayset(unsafe, _n, kind, layout) ->
Printlambda.print_bigarray "set" unsafe kind ppf layout
| Pbigarraydim(n) -> fprintf ppf "Bigarray.dim_%i" n
| Pstring_load(size, safety) ->
fprintf ppf "string.%sget%s" (access_safety safety) (access_size size)
| Pbytes_load(size, safety) ->
fprintf ppf "bytes.%sget%s" (access_safety safety) (access_size size)
| Pbytes_set(size, safety) ->
fprintf ppf "bytes.%sset%s" (access_safety safety) (access_size size)
| Pbigstring_load(size, safety) ->
fprintf ppf "bigarray.array1.%sget%s"
(access_safety safety) (access_size size)
| Pbigstring_set(size, safety) ->
fprintf ppf "bigarray.array1.%sset%s"
(access_safety safety) (access_size size)
| Pbswap16 -> fprintf ppf "bswap16"
| Pbbswap(bi) -> print_boxed_integer "bswap" ppf bi
| Pint_as_pointer -> fprintf ppf "int_as_pointer"
| Popaque -> fprintf ppf "opaque"
|
a206a9a0a37391c081aef53cf8b77fc2fe65ee278e1629f35708e9cf26c48a55 | bsless/clj-fast | concurrent_map.clj | (ns clj-fast.collections.concurrent-map
(:refer-clojure :exclude [get memoize])
(:require
[clj-fast
[util :as u]
[lens :as lens]])
(:import
[java.util Map]
[java.util.concurrent
ConcurrentMap ;; interface
ConcurrentHashMap
ConcurrentSkipListMap]))
(def ^:const t {:tag 'java.util.concurrent.ConcurrentMap})
(defn ->concurrent-hash-map
([] (ConcurrentHashMap.))
([^Map m] (new ConcurrentHashMap m)))
(defn ->concurrent-skip-list-map
([] (ConcurrentSkipListMap.))
([^Map m] (new ConcurrentSkipListMap m)))
(defn put!?
"Puts v in k if k is absent from m."
{:inline
(fn [m k v]
`(do (.putIfAbsent ~(with-meta m t) ~k ~v)
~m))}
[^java.util.concurrent.ConcurrentMap m k v]
(.putIfAbsent m k v) m)
(defn concurrent-map?
"Checks if m is an instance of a ConcurrentMap"
{:inline
(fn [m] `(instance? ConcurrentMap ~m))}
[chm]
(instance? ConcurrentMap chm))
(defn get
"Returns the value mapped to key or nil if key not present."
{:inline-arities #{2 3}
:inline
(fn [m k & nf]
(if nf
`(.getOrDefault ~(with-meta m t) ~k ~@nf)
`(.get ~(with-meta m t) ~k)))}
([^java.util.concurrent.ConcurrentMap m k]
(.get m k))
([^java.util.concurrent.ConcurrentMap m k nf]
(.getOrDefault m k nf)))
(defn get?
"Returns the value mapped to key or nil if key not present if m is a
ConcurrentMap, otherwise returns m."
[m k]
{:inline
(fn [m k]
`(when (concurrent-map? ~m)
(.get ~(with-meta m t) ~k))
m)}
[m k]
(when (concurrent-map? m)
(.get ^java.util.concurrent.ConcurrentMap m k)))
(defmacro get-in?
"Like core/get-in but for nested ConcurrentMaps."
[m ks]
(lens/get-some
(fn [m k] `(get? ~m ~k))
m ks))
(defmacro put-in!
"Like core/assoc-in but for nested ConcurrentMaps."
[m ks v]
(lens/put
(fn [m k v] `(put!? ~m ~k ~v))
(fn [m k] `(or (get? ~m ~k) (->concurrent-hash-map)))
m (u/simple-seq ks) v))
(defn memoize
[f]
(let [mem (->concurrent-hash-map)
sentinel (new Object)]
(fn [& args]
(if-let [e (get mem args)]
(if (u/eq? sentinel e) nil e)
(let [ret (apply f args)
ret (if (nil? ret) sentinel ret)]
(put!? mem args ret)
ret)))))
(defmacro memoize*
[n f]
(if (zero? n)
`(u/memoize0 ~f)
(let [args (repeatedly n #(gensym))]
`(let [mem# (->concurrent-hash-map)
sentinel# (new Object)]
(fn [~@args]
(if-let [e# (get-in? mem# ~args)]
(if (u/eq? sentinel# e#) nil e#)
(let [ret# (~f ~@args)
ret# (if (nil? ret#) sentinel# ret#)]
(put-in! mem# [~@args] ret#)
ret#)))))))
| null | https://raw.githubusercontent.com/bsless/clj-fast/2e9ea92428a4f3e75c23c767c43f0079b6a1a1ce/src/clj_fast/collections/concurrent_map.clj | clojure | interface | (ns clj-fast.collections.concurrent-map
(:refer-clojure :exclude [get memoize])
(:require
[clj-fast
[util :as u]
[lens :as lens]])
(:import
[java.util Map]
[java.util.concurrent
ConcurrentHashMap
ConcurrentSkipListMap]))
(def ^:const t {:tag 'java.util.concurrent.ConcurrentMap})
(defn ->concurrent-hash-map
([] (ConcurrentHashMap.))
([^Map m] (new ConcurrentHashMap m)))
(defn ->concurrent-skip-list-map
([] (ConcurrentSkipListMap.))
([^Map m] (new ConcurrentSkipListMap m)))
(defn put!?
"Puts v in k if k is absent from m."
{:inline
(fn [m k v]
`(do (.putIfAbsent ~(with-meta m t) ~k ~v)
~m))}
[^java.util.concurrent.ConcurrentMap m k v]
(.putIfAbsent m k v) m)
(defn concurrent-map?
"Checks if m is an instance of a ConcurrentMap"
{:inline
(fn [m] `(instance? ConcurrentMap ~m))}
[chm]
(instance? ConcurrentMap chm))
(defn get
"Returns the value mapped to key or nil if key not present."
{:inline-arities #{2 3}
:inline
(fn [m k & nf]
(if nf
`(.getOrDefault ~(with-meta m t) ~k ~@nf)
`(.get ~(with-meta m t) ~k)))}
([^java.util.concurrent.ConcurrentMap m k]
(.get m k))
([^java.util.concurrent.ConcurrentMap m k nf]
(.getOrDefault m k nf)))
(defn get?
"Returns the value mapped to key or nil if key not present if m is a
ConcurrentMap, otherwise returns m."
[m k]
{:inline
(fn [m k]
`(when (concurrent-map? ~m)
(.get ~(with-meta m t) ~k))
m)}
[m k]
(when (concurrent-map? m)
(.get ^java.util.concurrent.ConcurrentMap m k)))
(defmacro get-in?
"Like core/get-in but for nested ConcurrentMaps."
[m ks]
(lens/get-some
(fn [m k] `(get? ~m ~k))
m ks))
(defmacro put-in!
"Like core/assoc-in but for nested ConcurrentMaps."
[m ks v]
(lens/put
(fn [m k v] `(put!? ~m ~k ~v))
(fn [m k] `(or (get? ~m ~k) (->concurrent-hash-map)))
m (u/simple-seq ks) v))
(defn memoize
[f]
(let [mem (->concurrent-hash-map)
sentinel (new Object)]
(fn [& args]
(if-let [e (get mem args)]
(if (u/eq? sentinel e) nil e)
(let [ret (apply f args)
ret (if (nil? ret) sentinel ret)]
(put!? mem args ret)
ret)))))
(defmacro memoize*
[n f]
(if (zero? n)
`(u/memoize0 ~f)
(let [args (repeatedly n #(gensym))]
`(let [mem# (->concurrent-hash-map)
sentinel# (new Object)]
(fn [~@args]
(if-let [e# (get-in? mem# ~args)]
(if (u/eq? sentinel# e#) nil e#)
(let [ret# (~f ~@args)
ret# (if (nil? ret#) sentinel# ret#)]
(put-in! mem# [~@args] ret#)
ret#)))))))
|
1d58be55b06d3c8670360b7049cd872e0ce3caaa061d8fc45f49b036b690748c | EFanZh/EOPL-Exercises | exercise-5.15-test.rkt | #lang racket/base
(require rackunit)
(require "../solutions/exercise-5.15.rkt")
(check-equal? (run "2") (num-val 2))
(check-equal? (run "-(3, 3)") (num-val 0))
(check-equal? (run "-(3, 4)") (num-val -1))
(check-equal? (run "-(4, 3)") (num-val 1))
(check-equal? (run "zero?(0)") (bool-val #t))
(check-equal? (run "zero?(4)") (bool-val #f))
(check-equal? (run "if zero?(0) then 7 else 11") (num-val 7))
(check-equal? (run "if zero?(2) then 7 else 11") (num-val 11))
(check-equal? (run "let x = 5 in x") (num-val 5))
(check-equal? (run "let x = 5 in let x = 3 in x") (num-val 3))
(check-equal? (run "let f = proc (x)
-(x, 11)
in (f (f 77))")
(num-val 55))
(check-equal? (run "(proc (f)
(f (f 77))
proc (x)
-(x, 11))")
(num-val 55))
(check-equal? (run "let x = 200
in let f = proc (z)
-(z, x)
in let x = 100
in let g = proc (z)
-(z, x)
in -((f 1), (g 1))")
(num-val -100))
(check-equal? (run "letrec double(x) = if zero?(x)
then 0
else -((double -(x, 1)), -2)
in (double 6)")
(num-val 12))
| null | https://raw.githubusercontent.com/EFanZh/EOPL-Exercises/11667f1e84a1a3e300c2182630b56db3e3d9246a/tests/exercise-5.15-test.rkt | racket | #lang racket/base
(require rackunit)
(require "../solutions/exercise-5.15.rkt")
(check-equal? (run "2") (num-val 2))
(check-equal? (run "-(3, 3)") (num-val 0))
(check-equal? (run "-(3, 4)") (num-val -1))
(check-equal? (run "-(4, 3)") (num-val 1))
(check-equal? (run "zero?(0)") (bool-val #t))
(check-equal? (run "zero?(4)") (bool-val #f))
(check-equal? (run "if zero?(0) then 7 else 11") (num-val 7))
(check-equal? (run "if zero?(2) then 7 else 11") (num-val 11))
(check-equal? (run "let x = 5 in x") (num-val 5))
(check-equal? (run "let x = 5 in let x = 3 in x") (num-val 3))
(check-equal? (run "let f = proc (x)
-(x, 11)
in (f (f 77))")
(num-val 55))
(check-equal? (run "(proc (f)
(f (f 77))
proc (x)
-(x, 11))")
(num-val 55))
(check-equal? (run "let x = 200
in let f = proc (z)
-(z, x)
in let x = 100
in let g = proc (z)
-(z, x)
in -((f 1), (g 1))")
(num-val -100))
(check-equal? (run "letrec double(x) = if zero?(x)
then 0
else -((double -(x, 1)), -2)
in (double 6)")
(num-val 12))
| |
d47a40a089bf88776f13ef9e23ce0fea8ed73c227a6a40d0a16ca15112ae7d9d | clckwrks/clckwrks | URL.hs | # LANGUAGE DeriveDataTypeable , TemplateHaskell #
module Clckwrks.ProfileData.URL where
import Data.Data (Data, Typeable)
import Data.SafeCopy (SafeCopy(..), base, deriveSafeCopy)
import Data.UserId (UserId)
import Web.Routes.TH (derivePathInfo)
data ProfileDataURL
= CreateNewProfileData
| EditProfileData
| EditNewProfileData
| EditProfileDataFor UserId
deriving (Eq, Ord, Read, Show, Data, Typeable)
$(derivePathInfo ''ProfileDataURL)
$(deriveSafeCopy 1 'base ''ProfileDataURL)
| null | https://raw.githubusercontent.com/clckwrks/clckwrks/dd4ea1e2f41066aa5779f1cc22f3b7a0ca8a0bed/Clckwrks/ProfileData/URL.hs | haskell | # LANGUAGE DeriveDataTypeable , TemplateHaskell #
module Clckwrks.ProfileData.URL where
import Data.Data (Data, Typeable)
import Data.SafeCopy (SafeCopy(..), base, deriveSafeCopy)
import Data.UserId (UserId)
import Web.Routes.TH (derivePathInfo)
data ProfileDataURL
= CreateNewProfileData
| EditProfileData
| EditNewProfileData
| EditProfileDataFor UserId
deriving (Eq, Ord, Read, Show, Data, Typeable)
$(derivePathInfo ''ProfileDataURL)
$(deriveSafeCopy 1 'base ''ProfileDataURL)
| |
48de77f3df9d1849c85604ef3ecd280a5b984b3438c9618de29bb7fa9c6e4e05 | haskus/haskus-system | MicroArch.hs | -- | X86 Archtiectures and micro-architectures
module Haskus.Arch.X86_64.ISA.MicroArch
( X86Arch(..)
)
where
-- | X86 micro-architecture
data X86Arch
= Intel486
| IntelPentium
| IntelP6
deriving (Show,Eq)
| null | https://raw.githubusercontent.com/haskus/haskus-system/38b3a363c26bc4d82e3493d8638d46bc35678616/haskus-system/src/lib/Haskus/Arch/X86_64/ISA/MicroArch.hs | haskell | | X86 Archtiectures and micro-architectures
| X86 micro-architecture | module Haskus.Arch.X86_64.ISA.MicroArch
( X86Arch(..)
)
where
data X86Arch
= Intel486
| IntelPentium
| IntelP6
deriving (Show,Eq)
|
f35f47acdb44497176a466497ec40e6e56ba53ca3b1c7f6943fd46ca91b674bf | rmloveland/scheme48-0.53 | current-port.scm | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
; Current input, output, error, and noise ports.
These two ports are needed by the VM for the READ - CHAR and WRITE - CHAR
; opcodes.
(define $current-input-port (enum current-port-marker current-input-port))
(define $current-output-port (enum current-port-marker current-output-port))
(define $current-error-port (make-fluid #f))
(define $current-noise-port (make-fluid #f)) ; defaults to the error port
(define (current-input-port)
(fluid $current-input-port))
(define (current-output-port)
(fluid $current-output-port))
(define (current-error-port)
(fluid $current-error-port))
(define (current-noise-port)
(fluid $current-noise-port))
(define (initialize-i/o input output error thunk)
(with-current-ports input output error thunk))
(define (with-current-ports in out error thunk)
(let-fluids $current-input-port in
$current-output-port out
$current-error-port error
$current-noise-port error
thunk))
(define (call-with-current-input-port port thunk)
(let-fluid $current-input-port port thunk))
(define (call-with-current-output-port port thunk)
(let-fluid $current-output-port port thunk))
(define (call-with-current-noise-port port thunk)
(let-fluid $current-noise-port port thunk))
(define (silently thunk)
(call-with-current-noise-port (make-null-output-port) thunk))
;----------------
; Procedures with default port arguments.
(define (newline . port-option)
(write-char #\newline (output-port-option port-option)))
(define (char-ready? . port-option)
(real-char-ready? (input-port-option port-option)))
(define (output-port-option port-option)
(cond ((null? port-option) (current-output-port))
((null? (cdr port-option)) (car port-option))
(else (error "write-mumble: too many arguments" port-option))))
(define (input-port-option port-option)
(cond ((null? port-option) (current-input-port))
((null? (cdr port-option)) (car port-option))
(else (error "read-mumble: too many arguments" port-option))))
| null | https://raw.githubusercontent.com/rmloveland/scheme48-0.53/1ae4531fac7150bd2af42d124da9b50dd1b89ec1/scheme/rts/current-port.scm | scheme | Current input, output, error, and noise ports.
opcodes.
defaults to the error port
----------------
Procedures with default port arguments. | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
These two ports are needed by the VM for the READ - CHAR and WRITE - CHAR
(define $current-input-port (enum current-port-marker current-input-port))
(define $current-output-port (enum current-port-marker current-output-port))
(define $current-error-port (make-fluid #f))
(define (current-input-port)
(fluid $current-input-port))
(define (current-output-port)
(fluid $current-output-port))
(define (current-error-port)
(fluid $current-error-port))
(define (current-noise-port)
(fluid $current-noise-port))
(define (initialize-i/o input output error thunk)
(with-current-ports input output error thunk))
(define (with-current-ports in out error thunk)
(let-fluids $current-input-port in
$current-output-port out
$current-error-port error
$current-noise-port error
thunk))
(define (call-with-current-input-port port thunk)
(let-fluid $current-input-port port thunk))
(define (call-with-current-output-port port thunk)
(let-fluid $current-output-port port thunk))
(define (call-with-current-noise-port port thunk)
(let-fluid $current-noise-port port thunk))
(define (silently thunk)
(call-with-current-noise-port (make-null-output-port) thunk))
(define (newline . port-option)
(write-char #\newline (output-port-option port-option)))
(define (char-ready? . port-option)
(real-char-ready? (input-port-option port-option)))
(define (output-port-option port-option)
(cond ((null? port-option) (current-output-port))
((null? (cdr port-option)) (car port-option))
(else (error "write-mumble: too many arguments" port-option))))
(define (input-port-option port-option)
(cond ((null? port-option) (current-input-port))
((null? (cdr port-option)) (car port-option))
(else (error "read-mumble: too many arguments" port-option))))
|
94ef8fd00ca46aab226ba57c06b9989fc19508679f3562b90b086975157b59ca | bobzhang/fan | ast_inject.ml | open Astf
open Util
type key = string
let inject_exp_tbl: (key,exp) Hashtbl.t = Hashtbl.create 40
let inject_stru_tbl: (key,stru) Hashtbl.t = Hashtbl.create 40
let inject_clfield_tbl: (key,clfield)Hashtbl.t = Hashtbl.create 40
let register_inject_exp (k,f)=
Hashtbl.replace inject_exp_tbl k f
let register_inject_stru (k,f)=
Hashtbl.replace inject_stru_tbl k f
let register_inject_clfield (k,f) =
Hashtbl.replace inject_clfield_tbl k f
;;
%create{inject_exp inject_stru inject_clfield};;
%extend{
inject_exp:
[ Lid x %{
try Hashtbl.find inject_exp_tbl x
with Not_found -> failwithf "inject.exp %s not found" x } ]
inject_stru:
[Lid x %{
try Hashtbl.find inject_stru_tbl x
with Not_found -> failwithf "inject.exp %s not found" x }]
inject_clfield:
[Lid x %{
try Hashtbl.find inject_clfield_tbl x
with Not_found -> failwithf "inject.exp %s not found" x }]
};;
let open Ast_quotation in
let domain = Ns.inject in
begin
of_exp ~name:{domain; name = "exp"} ~entry:inject_exp ();
of_stru ~name:{domain; name = "stru"} ~entry:inject_stru ();
of_clfield ~name:{domain; name = "clfield"} ~entry:inject_clfield ();
end
(* local variables: *)
compile - command : " cd .. & & pmake main_annot / ast_inject.cmo "
(* end: *)
| null | https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/main/ast_inject.ml | ocaml | local variables:
end: | open Astf
open Util
type key = string
let inject_exp_tbl: (key,exp) Hashtbl.t = Hashtbl.create 40
let inject_stru_tbl: (key,stru) Hashtbl.t = Hashtbl.create 40
let inject_clfield_tbl: (key,clfield)Hashtbl.t = Hashtbl.create 40
let register_inject_exp (k,f)=
Hashtbl.replace inject_exp_tbl k f
let register_inject_stru (k,f)=
Hashtbl.replace inject_stru_tbl k f
let register_inject_clfield (k,f) =
Hashtbl.replace inject_clfield_tbl k f
;;
%create{inject_exp inject_stru inject_clfield};;
%extend{
inject_exp:
[ Lid x %{
try Hashtbl.find inject_exp_tbl x
with Not_found -> failwithf "inject.exp %s not found" x } ]
inject_stru:
[Lid x %{
try Hashtbl.find inject_stru_tbl x
with Not_found -> failwithf "inject.exp %s not found" x }]
inject_clfield:
[Lid x %{
try Hashtbl.find inject_clfield_tbl x
with Not_found -> failwithf "inject.exp %s not found" x }]
};;
let open Ast_quotation in
let domain = Ns.inject in
begin
of_exp ~name:{domain; name = "exp"} ~entry:inject_exp ();
of_stru ~name:{domain; name = "stru"} ~entry:inject_stru ();
of_clfield ~name:{domain; name = "clfield"} ~entry:inject_clfield ();
end
compile - command : " cd .. & & pmake main_annot / ast_inject.cmo "
|
3d06efafb66075c01f0d52a1e7a9d5c270aa6105498075407761e3398b25e2f8 | prathyvsh/the-little-schemer | 07-friends-and-relations.rkt | #lang racket
(define (atom? x) (or (symbol? x) (number? x) (boolean? x)))
(define (eqan? a1 a2)
(cond
((and (number? a1) (number? a2)) (= a1 a2))
((or (number? a1) (number? a2)) #f)
(else (eq? a1 a2))))
(define (eqlist? l1 l2)
(cond
((and (null? l1) (null? l2)) #t)
((or (null? l1) (null? l2)) #f)
((and (atom? l1) (null? l2)) #f)
((and (null? l1) (atom? l2)) #f)
((and (atom? l1) (atom? l2)) (eqan? l1 l2))
((and (atom? (first l1)) (atom? (first l2)))
(and (eq? (first l1) (first l2)) (eqlist? (rest l1) (rest l2))))
(else (and (eqlist? (first l1) (first l2))
(eqlist? (rest l1)
(rest l2))))))
(define (equal? s1 s2)
(cond
((and (atom? s1)
(atom? s2))
(eqan? s1 s2))
((or (atom? s1)
(atom? s2)) #f)
(else (eqlist? s1 s2))))
(define member?
(lambda (a lat)
(cond
((null? lat) #f)
(else (or (equal? a (first lat))
(member? a (rest lat)))))))
(define (set? s)
(cond
((null? s) #t)
(else (and (not (member? (first s) (rest s))) (set? (rest s))))))
(eq? (set? '(apple peaches apple plum)) #f)
(eq? (set? '(apple peaches pears plums)) #t)
(eq? (set? '(apple 3 pear 4 9 apple 3 4)) #f)
(define (makeset lat)
(cond
((null? lat) '())
((member? (first lat) (rest lat)) (makeset (rest lat)))
(else (cons (first lat) (makeset (rest lat))))))
(equal? (makeset '(apple peach pear peach plum apple lemon peach)) '(pear plum apple lemon peach))
(define (multirember a lat)
(cond
((null? lat) '())
((equal? a (first lat)) (multirember a (rest lat)))
(else (cons (first lat) (multirember a (rest lat))))))
(define (makeset2 lat)
(cond
((null? lat) '())
(else (cons (first lat) (makeset2 (multirember (first lat) (rest lat)))))))
(equal? (makeset2 '(apple peach pear peach plum apple lemon peach)) '(apple peach pear plum lemon))
(equal? (makeset2 '(apple 3 pear 4 9 apple 3 4)) '(apple 3 pear 4 9))
(define (subset? set1 set2)
(cond
((and (null? set1) (null? set2)) #t)
((null? set1) #t)
((null? set2) #f)
(else (and (member? (first set1) set2) (subset? (rest set1) set2)))))
(eq? (subset? '(5 chicken wings) '(5 hamburgers 2 pieces fried chicken and light duckling wings)) #t)
(eq? (subset? '(4 pounds of horseradish) '(four pounds chicken and 5 ounces horseradish)) #f)
(define (eqset? set1 set2) (and (subset? set1 set2) (subset? set2 set1)))
(define (intersect? set1 set2)
(cond
((or (null? set1) (null? set2)) '())
(else (or (member? (first set1) set2) (intersect? (rest set1) set2)))))
(eq? (intersect? '(stewed tomatoes and macaroni) '(macaroni and cheese)) #t)
(define (intersect set1 set2)
(cond
((or (null? set1) (null? set2)) '())
((member? (first set1) set2) (cons (first set1) (intersect (rest set1) set2)))
(else (intersect (rest set1) set2))))
(equal? (intersect '(stewed tomatoes and macaroni) '(macaroni and cheese)) '(and macaroni))
(define (union set1 set2)
(cond
((null? set1) set2)
((member? (first set1) set2) (union (rest set1) set2))
(else (cons (first set1) (union (rest set1) set2)))))
(equal? (union '(stewed tomatoes and macaroni casserole) '(macaroni and cheese)) '(stewed tomatoes casserole macaroni and cheese))
(define (difference set1 set2)
(cond
((null? set1) '())
((member? (first set1) set2) (union (rest set1) set2))
(else (cons (first set1) (union (rest set1) set2)))))
(define (intersectall l-set)
(cond
((null? (rest l-set)) (first l-set))
(else (intersect (first l-set) (intersectall (rest l-set))))))
(equal? (intersectall '((6 pears and) (3 peaches and 6 peppers) (8 pears and 6 plums) (and 6 prunes with some apples))) '(6 and))
(define (a-pair? a)
(cond
((atom? a) false)
((null? a) false)
((null? (rest a)) false)
((null? (rest (rest a))) true)
(else false)))
(eq? (a-pair? '(pear pear)) true)
(eq? (a-pair? '(3 7)) true)
(eq? (a-pair? '((2) (pair))) true)
(eq? (a-pair? '(full (house))) true)
(define (second p) (car (cdr p)))
(define (build s1 s2) (cons s1 (cons s2 '())))
(define (third p) (car (cdr (cdr p))))
(define (rel? s)
(and (set? s)
(cond
((null? s) true)
(else (and (a-pair? (first s)) (rel? (rest s)))))))
(eq? (rel? '(apple peaches pumpkin pie)) false)
(eq? (rel? '((apples peaches) (pumpkin pie) (apples peaches))) false)
;; (eq? (rel? '((apples peaches) (pumpkin pie))) false)
( eq ? ( rel ? ' ( ( 4 3 ) ( 4 2 ) ( 7 6 ) ( 6 2 ) ( 3 4 ) ) ) true )
(define (get-first l)
(cond
((null? l) '())
(else (first l))))
(define (firsts ls)
(cond
((null? ls) '())
(else (cons (get-first (first ls)) (firsts (rest ls))))))
(define (fun? s) (set? (firsts s)))
(equal? (fun? '((4 3) (4 2) (7 6) (6 2) (3 4))) false)
(equal? (fun? '((8 3) (4 2) (7 6) (6 2) (3 4))) true)
(equal? (fun? '((d 4) (b 0) (b 9) (e 5) (g 4))) false)
(define (revrel rel)
(cond
((null? rel) '())
(else (cons (build (second (first rel)) (first (first rel)))
(revrel (rest rel))))))
(equal? (revrel '((8 a) (pumpkin pie) (got sick))) '((a 8) (pie pumpkin) (sick got)))
(define (revpair pair) (build (second pair) (first pair)))
(define (revrel2 rel)
(cond
((null? rel) '())
(else (cons (revpair (first rel))
(revrel (rest rel))))))
(equal? (revrel2 '((8 a) (pumpkin pie) (got sick))) '((a 8) (pie pumpkin) (sick got)))
(define (seconds fun)
(cond
((null? fun) '())
(else (cons (second (first fun)) (seconds (rest fun))))))
(define (fullfun? fun) (and (set? (firsts fun)) (set? (seconds fun))))
(equal? (fullfun? '((8 3) (4 2) (7 6) (6 2) (3 4))) false)
(equal? (fullfun? '((8 3) (4 8) (7 6) (6 2) (3 4))) true)
(equal? (fullfun? '((grape raisin) (plum prune) (stewed prune))) false)
(define (one-to-one? fun) (and (fun? fun) (fun? (revrel fun))))
(equal? (one-to-one? '((chocolate chip) (doughy cookie))) true) | null | https://raw.githubusercontent.com/prathyvsh/the-little-schemer/02a6bdb0ef51969471811dd6cbe310dea09bcf64/07-friends-and-relations.rkt | racket | (eq? (rel? '((apples peaches) (pumpkin pie))) false) | #lang racket
(define (atom? x) (or (symbol? x) (number? x) (boolean? x)))
(define (eqan? a1 a2)
(cond
((and (number? a1) (number? a2)) (= a1 a2))
((or (number? a1) (number? a2)) #f)
(else (eq? a1 a2))))
(define (eqlist? l1 l2)
(cond
((and (null? l1) (null? l2)) #t)
((or (null? l1) (null? l2)) #f)
((and (atom? l1) (null? l2)) #f)
((and (null? l1) (atom? l2)) #f)
((and (atom? l1) (atom? l2)) (eqan? l1 l2))
((and (atom? (first l1)) (atom? (first l2)))
(and (eq? (first l1) (first l2)) (eqlist? (rest l1) (rest l2))))
(else (and (eqlist? (first l1) (first l2))
(eqlist? (rest l1)
(rest l2))))))
(define (equal? s1 s2)
(cond
((and (atom? s1)
(atom? s2))
(eqan? s1 s2))
((or (atom? s1)
(atom? s2)) #f)
(else (eqlist? s1 s2))))
(define member?
(lambda (a lat)
(cond
((null? lat) #f)
(else (or (equal? a (first lat))
(member? a (rest lat)))))))
(define (set? s)
(cond
((null? s) #t)
(else (and (not (member? (first s) (rest s))) (set? (rest s))))))
(eq? (set? '(apple peaches apple plum)) #f)
(eq? (set? '(apple peaches pears plums)) #t)
(eq? (set? '(apple 3 pear 4 9 apple 3 4)) #f)
(define (makeset lat)
(cond
((null? lat) '())
((member? (first lat) (rest lat)) (makeset (rest lat)))
(else (cons (first lat) (makeset (rest lat))))))
(equal? (makeset '(apple peach pear peach plum apple lemon peach)) '(pear plum apple lemon peach))
(define (multirember a lat)
(cond
((null? lat) '())
((equal? a (first lat)) (multirember a (rest lat)))
(else (cons (first lat) (multirember a (rest lat))))))
(define (makeset2 lat)
(cond
((null? lat) '())
(else (cons (first lat) (makeset2 (multirember (first lat) (rest lat)))))))
(equal? (makeset2 '(apple peach pear peach plum apple lemon peach)) '(apple peach pear plum lemon))
(equal? (makeset2 '(apple 3 pear 4 9 apple 3 4)) '(apple 3 pear 4 9))
(define (subset? set1 set2)
(cond
((and (null? set1) (null? set2)) #t)
((null? set1) #t)
((null? set2) #f)
(else (and (member? (first set1) set2) (subset? (rest set1) set2)))))
(eq? (subset? '(5 chicken wings) '(5 hamburgers 2 pieces fried chicken and light duckling wings)) #t)
(eq? (subset? '(4 pounds of horseradish) '(four pounds chicken and 5 ounces horseradish)) #f)
(define (eqset? set1 set2) (and (subset? set1 set2) (subset? set2 set1)))
(define (intersect? set1 set2)
(cond
((or (null? set1) (null? set2)) '())
(else (or (member? (first set1) set2) (intersect? (rest set1) set2)))))
(eq? (intersect? '(stewed tomatoes and macaroni) '(macaroni and cheese)) #t)
(define (intersect set1 set2)
(cond
((or (null? set1) (null? set2)) '())
((member? (first set1) set2) (cons (first set1) (intersect (rest set1) set2)))
(else (intersect (rest set1) set2))))
(equal? (intersect '(stewed tomatoes and macaroni) '(macaroni and cheese)) '(and macaroni))
(define (union set1 set2)
(cond
((null? set1) set2)
((member? (first set1) set2) (union (rest set1) set2))
(else (cons (first set1) (union (rest set1) set2)))))
(equal? (union '(stewed tomatoes and macaroni casserole) '(macaroni and cheese)) '(stewed tomatoes casserole macaroni and cheese))
(define (difference set1 set2)
(cond
((null? set1) '())
((member? (first set1) set2) (union (rest set1) set2))
(else (cons (first set1) (union (rest set1) set2)))))
(define (intersectall l-set)
(cond
((null? (rest l-set)) (first l-set))
(else (intersect (first l-set) (intersectall (rest l-set))))))
(equal? (intersectall '((6 pears and) (3 peaches and 6 peppers) (8 pears and 6 plums) (and 6 prunes with some apples))) '(6 and))
(define (a-pair? a)
(cond
((atom? a) false)
((null? a) false)
((null? (rest a)) false)
((null? (rest (rest a))) true)
(else false)))
(eq? (a-pair? '(pear pear)) true)
(eq? (a-pair? '(3 7)) true)
(eq? (a-pair? '((2) (pair))) true)
(eq? (a-pair? '(full (house))) true)
(define (second p) (car (cdr p)))
(define (build s1 s2) (cons s1 (cons s2 '())))
(define (third p) (car (cdr (cdr p))))
(define (rel? s)
(and (set? s)
(cond
((null? s) true)
(else (and (a-pair? (first s)) (rel? (rest s)))))))
(eq? (rel? '(apple peaches pumpkin pie)) false)
(eq? (rel? '((apples peaches) (pumpkin pie) (apples peaches))) false)
( eq ? ( rel ? ' ( ( 4 3 ) ( 4 2 ) ( 7 6 ) ( 6 2 ) ( 3 4 ) ) ) true )
(define (get-first l)
(cond
((null? l) '())
(else (first l))))
(define (firsts ls)
(cond
((null? ls) '())
(else (cons (get-first (first ls)) (firsts (rest ls))))))
(define (fun? s) (set? (firsts s)))
(equal? (fun? '((4 3) (4 2) (7 6) (6 2) (3 4))) false)
(equal? (fun? '((8 3) (4 2) (7 6) (6 2) (3 4))) true)
(equal? (fun? '((d 4) (b 0) (b 9) (e 5) (g 4))) false)
(define (revrel rel)
(cond
((null? rel) '())
(else (cons (build (second (first rel)) (first (first rel)))
(revrel (rest rel))))))
(equal? (revrel '((8 a) (pumpkin pie) (got sick))) '((a 8) (pie pumpkin) (sick got)))
(define (revpair pair) (build (second pair) (first pair)))
(define (revrel2 rel)
(cond
((null? rel) '())
(else (cons (revpair (first rel))
(revrel (rest rel))))))
(equal? (revrel2 '((8 a) (pumpkin pie) (got sick))) '((a 8) (pie pumpkin) (sick got)))
(define (seconds fun)
(cond
((null? fun) '())
(else (cons (second (first fun)) (seconds (rest fun))))))
(define (fullfun? fun) (and (set? (firsts fun)) (set? (seconds fun))))
(equal? (fullfun? '((8 3) (4 2) (7 6) (6 2) (3 4))) false)
(equal? (fullfun? '((8 3) (4 8) (7 6) (6 2) (3 4))) true)
(equal? (fullfun? '((grape raisin) (plum prune) (stewed prune))) false)
(define (one-to-one? fun) (and (fun? fun) (fun? (revrel fun))))
(equal? (one-to-one? '((chocolate chip) (doughy cookie))) true) |
a48397d32b0fb3614836280ab8a7fb8b9961a6b3e54b8527c29d06a4b54c62b6 | facebook/flow | proc_test.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Asserter
1 ( systemd ) S 0 1 1
118813 ( java ) R 1 420553 420553
527674 ( python3.6 ) R 118813 420553 420553
527663 ( python3.6 ) R 527663 420553 420553
527674 ( proc_test.opt ) R 527663 420553 420553
1 (systemd) S 0 1 1
118813 (java) R 1 420553 420553
527674 (python3.6) R 118813 420553 420553
527663 (python3.6) R 527663 420553 420553
527674 (proc_test.opt) R 527663 420553 420553
*)
let systemd_pid = 1
let java_pid = 118813
let python_pid1 = 527111
let python_pid2 = 527663
let proc_test_pid = ref 527674
let systemd_cmdline =
"/usr/lib/systemd/systemd\x00--switched-root\x00--system\x00--deserialize\x0030\x00"
This is a small subset of a Java tool 's cmdline contents
let java_cmdline =
"toold\x00-Xmx1000m\x00-Djava.awt.headless=true\x00-Djna.nosys=true\x00"
^ "-Djava.util.logging.config.class=com.company.tool.abc.bootstrapper.SomeConfig\x00-Dtool.test_util_no_tests_dir=true"
let python_cmdline1 =
"[xarexec]\x00/usr/local/bin/threshold-monitor\x00-tt\x00/mnt/xarfuse/uid-0/123/__run_xar_main__.py"
let python_cmdline2 =
"[xarexec]\x00/usr/local/bin/threshold-monitor\x00-tt\x00/mnt/xarfuse/uid-0/456/__run_xar_main__.py"
let proc_test_cmdline = "/data/users/unixname/proc_test/proc_test.opt\x00"
let proc_path_format = format_of_string "/proc/%d/"
let cmdline_path_format = format_of_string "%s/cmdline"
let stat_path_format = format_of_string "%s/stat"
let stat_format = format_of_string "%d (%s) R %d 123 456"
let create_proc_dir (pid : int) : string =
let proc_dir = Printf.sprintf proc_path_format pid in
Disk.mkdir_p proc_dir;
proc_dir
let create_cmdline (pid : int) (cmdline : string) : unit =
let proc_dir = create_proc_dir pid in
Disk.write_file ~file:(Printf.sprintf cmdline_path_format proc_dir) ~contents:cmdline
let create_stat (name : string) (pid : int) (ppid : int) : unit =
let proc_dir = create_proc_dir pid in
Disk.write_file
~file:(Printf.sprintf stat_path_format proc_dir)
~contents:(Printf.sprintf stat_format pid name ppid)
let setup ~(use_test_stubbing : bool) : unit =
if use_test_stubbing then (
create_stat "systemd" systemd_pid 0;
create_cmdline systemd_pid systemd_cmdline;
create_stat "java" java_pid systemd_pid;
create_cmdline java_pid java_cmdline;
create_stat "python3.6" python_pid1 java_pid;
create_cmdline python_pid1 python_cmdline1;
create_stat "python3.6" python_pid2 python_pid1;
create_cmdline python_pid2 python_cmdline2;
create_stat "proc_test.opt" proc_test_pid.contents python_pid2;
create_cmdline proc_test_pid.contents proc_test_cmdline
) else
proc_test_pid := Unix.getpid ()
let ok_or_assert (r : ('a, string) result) : 'a =
match r with
| Error e ->
Printf.eprintf "%s\n" e;
assert false
| Ok r -> r
let test_get_cmdline_self () : bool =
let cmdline = Proc.get_cmdline proc_test_pid.contents in
match cmdline with
| Error e ->
Printf.eprintf "%s" e;
assert false
| Ok c ->
String_asserter.assert_equals
"/data/users/unixname/proc_test/proc_test.opt "
c
"The process name should be correct!";
true
let test_get_proc_stat_systemd () : bool =
let proc_stat = ok_or_assert (Proc.get_proc_stat 1) in
String_asserter.assert_equals
"/usr/lib/systemd/systemd --switched-root --system --deserialize 30 "
proc_stat.Proc.cmdline
"The process cmdline should be correct!";
Int_asserter.assert_equals 0 proc_stat.Proc.ppid "The process's parent PID should be correct!";
true
let test_get_proc_stat_self () : bool =
let proc_stat = ok_or_assert (Proc.get_proc_stat proc_test_pid.contents) in
String_asserter.assert_equals
"/data/users/unixname/proc_test/proc_test.opt "
proc_stat.Proc.cmdline
"The process cmdline should be correct!";
true
let test_get_proc_stack_systemd () : bool =
let proc_stack = ok_or_assert (Proc.get_proc_stack 1) in
String_asserter.assert_list_equals
["/usr/lib/systemd/systemd --switched-root --system --deserialize 30"]
proc_stack
"The process cmdline stack should be correct!";
true
let test_get_proc_stack_self_max_depth () =
let proc_stack = ok_or_assert (Proc.get_proc_stack ~max_depth:2 proc_test_pid.contents) in
String_asserter.assert_list_equals
[
"[xarexec] /usr/local/bin/threshold-monitor -tt /mnt/xarfuse/uid-0/456/__run_xar_main__.py";
"/data/users/unixname/proc_test/proc_test.opt";
]
proc_stack
"The process name should be correct!";
true
let test_get_proc_stack_self_max_length () =
let proc_stack = ok_or_assert (Proc.get_proc_stack ~max_length:50 proc_test_pid.contents) in
String_asserter.assert_list_equals
[
"/usr/lib/systemd/systemd --switched-root --system...";
"toold -Xmx1000m -Djava.awt.headless=true -Djna.nos...";
"[xarexec] /usr/local/bin/threshold-monitor -tt /mn...";
"[xarexec] /usr/local/bin/threshold-monitor -tt /mn...";
"/data/users/unixname/proc_test/proc_test.opt";
]
proc_stack
"The process name should be correct!";
true
let test_get_proc_stack_self () =
let proc_stack = ok_or_assert (Proc.get_proc_stack proc_test_pid.contents) in
String_asserter.assert_list_equals
[
"/usr/lib/systemd/systemd --switched-root --system --deserialize 30";
"toold -Xmx1000m -Djava.awt.headless=true -Djna.nosys=true -Djava.util.logging.config"
^ ".class=com.company.tool.abc.bootstrapper.SomeConfig -Dtool.test_util_no_tests_dir=true";
"[xarexec] /usr/local/bin/threshold-monitor -tt /mnt/xarfuse/uid-0/123/__run_xar_main__.py";
"[xarexec] /usr/local/bin/threshold-monitor -tt /mnt/xarfuse/uid-0/456/__run_xar_main__.py";
"/data/users/unixname/proc_test/proc_test.opt";
]
proc_stack
"The process name should be correct!";
true
let test_get_proc_stack_non_existent_PID () : bool =
match Proc.get_proc_stack 9999999 with
| Ok _ -> false
| Error _ -> true
let tests =
[
("Test get_cmdline on self", test_get_cmdline_self);
("Test get_proc_stat on systemd", test_get_proc_stat_systemd);
("Test get_proc_stat on self", test_get_proc_stat_self);
("Test get_proc_stack on self", test_get_proc_stack_self);
("Test get_proc_stack on systemd", test_get_proc_stack_systemd);
("Test get_proc_stack on self with max depth", test_get_proc_stack_self_max_depth);
("Test get_proc_stack on self with max length", test_get_proc_stack_self_max_length);
("Test get_proc_stack for a non-existent PID", test_get_proc_stack_non_existent_PID);
]
let () =
(* Note: if you're running the tests with the default dependency injector,
you need to remember to pass false to setup() because it's not
possible to write to files in procfs. *)
setup ~use_test_stubbing:true;
Unit_test.run_all tests
| null | https://raw.githubusercontent.com/facebook/flow/789a748ce801e7a25a852c6079395281d9343573/src/hack_forked/test/utils/sys/proc_test.ml | ocaml | Note: if you're running the tests with the default dependency injector,
you need to remember to pass false to setup() because it's not
possible to write to files in procfs. |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Asserter
1 ( systemd ) S 0 1 1
118813 ( java ) R 1 420553 420553
527674 ( python3.6 ) R 118813 420553 420553
527663 ( python3.6 ) R 527663 420553 420553
527674 ( proc_test.opt ) R 527663 420553 420553
1 (systemd) S 0 1 1
118813 (java) R 1 420553 420553
527674 (python3.6) R 118813 420553 420553
527663 (python3.6) R 527663 420553 420553
527674 (proc_test.opt) R 527663 420553 420553
*)
let systemd_pid = 1
let java_pid = 118813
let python_pid1 = 527111
let python_pid2 = 527663
let proc_test_pid = ref 527674
let systemd_cmdline =
"/usr/lib/systemd/systemd\x00--switched-root\x00--system\x00--deserialize\x0030\x00"
This is a small subset of a Java tool 's cmdline contents
let java_cmdline =
"toold\x00-Xmx1000m\x00-Djava.awt.headless=true\x00-Djna.nosys=true\x00"
^ "-Djava.util.logging.config.class=com.company.tool.abc.bootstrapper.SomeConfig\x00-Dtool.test_util_no_tests_dir=true"
let python_cmdline1 =
"[xarexec]\x00/usr/local/bin/threshold-monitor\x00-tt\x00/mnt/xarfuse/uid-0/123/__run_xar_main__.py"
let python_cmdline2 =
"[xarexec]\x00/usr/local/bin/threshold-monitor\x00-tt\x00/mnt/xarfuse/uid-0/456/__run_xar_main__.py"
let proc_test_cmdline = "/data/users/unixname/proc_test/proc_test.opt\x00"
let proc_path_format = format_of_string "/proc/%d/"
let cmdline_path_format = format_of_string "%s/cmdline"
let stat_path_format = format_of_string "%s/stat"
let stat_format = format_of_string "%d (%s) R %d 123 456"
let create_proc_dir (pid : int) : string =
let proc_dir = Printf.sprintf proc_path_format pid in
Disk.mkdir_p proc_dir;
proc_dir
let create_cmdline (pid : int) (cmdline : string) : unit =
let proc_dir = create_proc_dir pid in
Disk.write_file ~file:(Printf.sprintf cmdline_path_format proc_dir) ~contents:cmdline
let create_stat (name : string) (pid : int) (ppid : int) : unit =
let proc_dir = create_proc_dir pid in
Disk.write_file
~file:(Printf.sprintf stat_path_format proc_dir)
~contents:(Printf.sprintf stat_format pid name ppid)
let setup ~(use_test_stubbing : bool) : unit =
if use_test_stubbing then (
create_stat "systemd" systemd_pid 0;
create_cmdline systemd_pid systemd_cmdline;
create_stat "java" java_pid systemd_pid;
create_cmdline java_pid java_cmdline;
create_stat "python3.6" python_pid1 java_pid;
create_cmdline python_pid1 python_cmdline1;
create_stat "python3.6" python_pid2 python_pid1;
create_cmdline python_pid2 python_cmdline2;
create_stat "proc_test.opt" proc_test_pid.contents python_pid2;
create_cmdline proc_test_pid.contents proc_test_cmdline
) else
proc_test_pid := Unix.getpid ()
let ok_or_assert (r : ('a, string) result) : 'a =
match r with
| Error e ->
Printf.eprintf "%s\n" e;
assert false
| Ok r -> r
let test_get_cmdline_self () : bool =
let cmdline = Proc.get_cmdline proc_test_pid.contents in
match cmdline with
| Error e ->
Printf.eprintf "%s" e;
assert false
| Ok c ->
String_asserter.assert_equals
"/data/users/unixname/proc_test/proc_test.opt "
c
"The process name should be correct!";
true
let test_get_proc_stat_systemd () : bool =
let proc_stat = ok_or_assert (Proc.get_proc_stat 1) in
String_asserter.assert_equals
"/usr/lib/systemd/systemd --switched-root --system --deserialize 30 "
proc_stat.Proc.cmdline
"The process cmdline should be correct!";
Int_asserter.assert_equals 0 proc_stat.Proc.ppid "The process's parent PID should be correct!";
true
let test_get_proc_stat_self () : bool =
let proc_stat = ok_or_assert (Proc.get_proc_stat proc_test_pid.contents) in
String_asserter.assert_equals
"/data/users/unixname/proc_test/proc_test.opt "
proc_stat.Proc.cmdline
"The process cmdline should be correct!";
true
let test_get_proc_stack_systemd () : bool =
let proc_stack = ok_or_assert (Proc.get_proc_stack 1) in
String_asserter.assert_list_equals
["/usr/lib/systemd/systemd --switched-root --system --deserialize 30"]
proc_stack
"The process cmdline stack should be correct!";
true
let test_get_proc_stack_self_max_depth () =
let proc_stack = ok_or_assert (Proc.get_proc_stack ~max_depth:2 proc_test_pid.contents) in
String_asserter.assert_list_equals
[
"[xarexec] /usr/local/bin/threshold-monitor -tt /mnt/xarfuse/uid-0/456/__run_xar_main__.py";
"/data/users/unixname/proc_test/proc_test.opt";
]
proc_stack
"The process name should be correct!";
true
let test_get_proc_stack_self_max_length () =
let proc_stack = ok_or_assert (Proc.get_proc_stack ~max_length:50 proc_test_pid.contents) in
String_asserter.assert_list_equals
[
"/usr/lib/systemd/systemd --switched-root --system...";
"toold -Xmx1000m -Djava.awt.headless=true -Djna.nos...";
"[xarexec] /usr/local/bin/threshold-monitor -tt /mn...";
"[xarexec] /usr/local/bin/threshold-monitor -tt /mn...";
"/data/users/unixname/proc_test/proc_test.opt";
]
proc_stack
"The process name should be correct!";
true
let test_get_proc_stack_self () =
let proc_stack = ok_or_assert (Proc.get_proc_stack proc_test_pid.contents) in
String_asserter.assert_list_equals
[
"/usr/lib/systemd/systemd --switched-root --system --deserialize 30";
"toold -Xmx1000m -Djava.awt.headless=true -Djna.nosys=true -Djava.util.logging.config"
^ ".class=com.company.tool.abc.bootstrapper.SomeConfig -Dtool.test_util_no_tests_dir=true";
"[xarexec] /usr/local/bin/threshold-monitor -tt /mnt/xarfuse/uid-0/123/__run_xar_main__.py";
"[xarexec] /usr/local/bin/threshold-monitor -tt /mnt/xarfuse/uid-0/456/__run_xar_main__.py";
"/data/users/unixname/proc_test/proc_test.opt";
]
proc_stack
"The process name should be correct!";
true
let test_get_proc_stack_non_existent_PID () : bool =
match Proc.get_proc_stack 9999999 with
| Ok _ -> false
| Error _ -> true
let tests =
[
("Test get_cmdline on self", test_get_cmdline_self);
("Test get_proc_stat on systemd", test_get_proc_stat_systemd);
("Test get_proc_stat on self", test_get_proc_stat_self);
("Test get_proc_stack on self", test_get_proc_stack_self);
("Test get_proc_stack on systemd", test_get_proc_stack_systemd);
("Test get_proc_stack on self with max depth", test_get_proc_stack_self_max_depth);
("Test get_proc_stack on self with max length", test_get_proc_stack_self_max_length);
("Test get_proc_stack for a non-existent PID", test_get_proc_stack_non_existent_PID);
]
let () =
setup ~use_test_stubbing:true;
Unit_test.run_all tests
|
aff763ab55293a88dd5096c565fa974c66b4db1914b052d25f33abeea97e7ca9 | larcenists/larceny | if1.scm | (text
(seq (nop)
(if (alt z! a!)
(nop)
(inv (nop)))
(nop)))
00000000 90 nop
00000001 7402 jz 0x5
00000003 7603 jna 0x8
00000005 90 nop
00000006 EB03 short 0xb
00000008 90 nop
00000009 EB01 short 0xc
0000000B 90 nop
| null | https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/src/Lib/Sassy/tests/prims/if1.scm | scheme | (text
(seq (nop)
(if (alt z! a!)
(nop)
(inv (nop)))
(nop)))
00000000 90 nop
00000001 7402 jz 0x5
00000003 7603 jna 0x8
00000005 90 nop
00000006 EB03 short 0xb
00000008 90 nop
00000009 EB01 short 0xc
0000000B 90 nop
| |
9905c7f5bcfdcdebc6932ff7970e794134107eb31819ef78d5bcbd7e27cb16a6 | cldwalker/logseq-query | util.cljs | (ns cldwalker.logseq-query.util
(:require [clojure.pprint :as pprint]
[clojure.edn :as edn]
[clojure.string :as str]
["fs" :as node-fs]
["child_process" :as child-process]
[nbb.classpath :as classpath]
[cldwalker.logseq-query.logseq-rules :as rules]
[cldwalker.logseq-query.fs :as fs]))
;; Misc utils
(defn print-table
[rows & {:keys [fields]}]
(if fields (pprint/print-table fields rows) (pprint/print-table rows))
(println "Total:" (count rows)))
(defn slurp
"Like clojure.core/slurp"
[file]
(str (node-fs/readFileSync file)))
(defn- resource
[file-name]
(some #(when (node-fs/existsSync (str % "/" file-name))
(str % "/" file-name))
(str/split (classpath/get-classpath) #":")))
(defn update-keys
"Not in cljs yet. One day this can ripped out. Maps function `f` over the keys
of map `m` to produce a new map."
[m f]
(reduce-kv
(fn [m_ k v]
(assoc m_ (f k) v)) {} m))
(defn sh
"Run shell cmd synchronously and print to inherited streams by default. Aims
to be similar to babashka.tasks/shell"
[cmd opts]
(child-process/spawnSync (first cmd)
(clj->js (rest cmd))
(clj->js (merge {:stdio "inherit"} opts))))
;; Config fns
(defn read-config-file
[file]
(if (fs/exists? file)
(-> file slurp edn/read-string)
{}))
(defn- get-logseq-rules
[]
(let [descs {:block-content "Blocks that have given string in :block/content"
:has-property "Blocks that have given property"
:has-page-property "Pages that have given property"
:page-property "Pages that have property equal to value or that contain the value"
:page-ref "Blocks associated to given page/tag"
:task "Tasks that contain one of markers"}]
;; TODO: Debug issues with upstream property
(->> (dissoc rules/query-dsl-rules :property)
(map (fn [[k v]]
[(keyword "logseq" (name k))
{:rule v :desc (descs k)}]))
(into {}))))
(defn get-all-rules
[]
(merge (get-logseq-rules)
(-> "rules.edn" resource slurp edn/read-string)
(when-not js/process.env.LQ_DISABLE_GLOBAL
(read-config-file (str (fs/expand-home "~/.lq/rules.edn"))))))
(defn get-all-queries
[]
(merge (-> "queries.edn" resource slurp edn/read-string)
(when-not js/process.env.LQ_DISABLE_GLOBAL
(read-config-file (str (fs/expand-home "~/.lq/queries.edn"))))))
(defn get-config
[]
(read-config-file (str (fs/expand-home "~/.lq/config.edn"))))
Graph fns
(defn get-graph-paths
[]
(let [dir (fs/expand-home "~/.logseq/graphs")]
(when (fs/directory? dir)
(fs/glob dir "*.transit"))))
(defn full-path->graph
[path]
(second (re-find #"\+\+([^\+]+).transit$" path)))
(defn get-graph-path
[graph]
(some #(when (= graph (full-path->graph %)) %)
(get-graph-paths)))
| null | https://raw.githubusercontent.com/cldwalker/logseq-query/064700384723b9c4934e0e73cc75d35f5fd255c3/src/cldwalker/logseq_query/util.cljs | clojure | Misc utils
Config fns
TODO: Debug issues with upstream property | (ns cldwalker.logseq-query.util
(:require [clojure.pprint :as pprint]
[clojure.edn :as edn]
[clojure.string :as str]
["fs" :as node-fs]
["child_process" :as child-process]
[nbb.classpath :as classpath]
[cldwalker.logseq-query.logseq-rules :as rules]
[cldwalker.logseq-query.fs :as fs]))
(defn print-table
[rows & {:keys [fields]}]
(if fields (pprint/print-table fields rows) (pprint/print-table rows))
(println "Total:" (count rows)))
(defn slurp
"Like clojure.core/slurp"
[file]
(str (node-fs/readFileSync file)))
(defn- resource
[file-name]
(some #(when (node-fs/existsSync (str % "/" file-name))
(str % "/" file-name))
(str/split (classpath/get-classpath) #":")))
(defn update-keys
"Not in cljs yet. One day this can ripped out. Maps function `f` over the keys
of map `m` to produce a new map."
[m f]
(reduce-kv
(fn [m_ k v]
(assoc m_ (f k) v)) {} m))
(defn sh
"Run shell cmd synchronously and print to inherited streams by default. Aims
to be similar to babashka.tasks/shell"
[cmd opts]
(child-process/spawnSync (first cmd)
(clj->js (rest cmd))
(clj->js (merge {:stdio "inherit"} opts))))
(defn read-config-file
[file]
(if (fs/exists? file)
(-> file slurp edn/read-string)
{}))
(defn- get-logseq-rules
[]
(let [descs {:block-content "Blocks that have given string in :block/content"
:has-property "Blocks that have given property"
:has-page-property "Pages that have given property"
:page-property "Pages that have property equal to value or that contain the value"
:page-ref "Blocks associated to given page/tag"
:task "Tasks that contain one of markers"}]
(->> (dissoc rules/query-dsl-rules :property)
(map (fn [[k v]]
[(keyword "logseq" (name k))
{:rule v :desc (descs k)}]))
(into {}))))
(defn get-all-rules
[]
(merge (get-logseq-rules)
(-> "rules.edn" resource slurp edn/read-string)
(when-not js/process.env.LQ_DISABLE_GLOBAL
(read-config-file (str (fs/expand-home "~/.lq/rules.edn"))))))
(defn get-all-queries
[]
(merge (-> "queries.edn" resource slurp edn/read-string)
(when-not js/process.env.LQ_DISABLE_GLOBAL
(read-config-file (str (fs/expand-home "~/.lq/queries.edn"))))))
(defn get-config
[]
(read-config-file (str (fs/expand-home "~/.lq/config.edn"))))
Graph fns
(defn get-graph-paths
[]
(let [dir (fs/expand-home "~/.logseq/graphs")]
(when (fs/directory? dir)
(fs/glob dir "*.transit"))))
(defn full-path->graph
[path]
(second (re-find #"\+\+([^\+]+).transit$" path)))
(defn get-graph-path
[graph]
(some #(when (= graph (full-path->graph %)) %)
(get-graph-paths)))
|
5d96b998b74972fc91d5dc4d0b4616c92fd55f33551fd150e838e0a8a4b0c809 | eholk/harlan | expand-primitives.scm | (library
(harlan front expand-primitives)
(export expand-primitives)
(import
(rnrs)
(elegant-weapons helpers)
(elegant-weapons compat))
;; This pass macro-expands primitives. It also inserts fresh region
;; variables.
(define-match expand-primitives
((module ,[expand-prim-decl -> decl*] ...)
`(module ,(apply append decl*) ...)))
(define-match expand-prim-decl
((fn ,name ,args ,t ,[expand-prim-stmt -> stmt])
`((fn ,name ,args ,t ,stmt)))
((define-datatype ,t (,c ,t* ...) ...)
;; For now we guard for simple enum types. The region calling
;; convention doesn't work very well with print the way it
;; currently works.
(guard (symbol? t))
(let ((adt (gensym 'adt))
(out (gensym 'out))
(type (match t
((,n ,r) `(adt ,n ,r))
(,n `(adt ,n)))))
`((define-datatype ,t (,c ,t* ...) ...)
(fn print (,adt ,out) (fn (,type (ptr ofstream)) -> void)
(begin
(do (match int (var ,type ,adt)
,@(map (lambda (c t*)
(let ((x* (map (lambda (_) (gensym 'x)) t*))
)
`((,c ,x* ...)
(begin
(print (str ,(string-append
"("
(symbol->string c)))
(var (ptr ofstream) ,out))
,@(let ((out (map (lambda (_)
`(var (ptr ofstream)
,out))
t*)))
`((begin (print (str " ")
,out)
(print (var ,t* ,x*)
,out)) ...))
(print (str ")") (var (ptr ofstream) ,out))
(int 0)))))
c t*)))
(return)))
(fn print (,adt) (fn (,type) -> void)
(begin
(do (match int (var ,type ,adt)
,@(map (lambda (c t*)
(let ((x* (map (lambda (_) (gensym 'x)) t*))
)
`((,c ,x* ...)
(begin
(print (str ,(string-append
"("
(symbol->string c))))
(begin (print (str " "))
(print (var ,t* ,x*))) ...
(print (str ")"))
(int 0)))))
c t*)))
(return))))))
Fallback for datatypes we do n't generate print for .
((define-datatype ,t (,c ,t* ...) ...)
`((define-datatype ,t (,c ,t* ...) ...)))
((extern ,name ,args -> ,rtype)
`((extern ,name ,args -> ,rtype))))
(define-match expand-prim-stmt
((let ((,x* ,t* ,[expand-prim-expr -> e*]) ...) ,[body])
`(let ((,x* ,t* ,e*) ...) ,body))
((let-region (,r ...) ,[body]) `(let-region (,r ...) ,body))
((set! ,[expand-prim-expr -> lhs] ,[expand-prim-expr -> rhs])
`(set! ,lhs ,rhs))
((if ,[expand-prim-expr -> test] ,[conseq] ,[altern])
`(if ,test ,conseq ,altern))
((if ,[expand-prim-expr -> test] ,[conseq])
`(if ,test ,conseq))
((while ,[expand-prim-expr -> test] ,[body])
`(while ,test ,body))
((begin ,[stmt*] ...)
`(begin . ,stmt*))
((print (vec ,r ,t) ,[expand-prim-expr -> e]
,[expand-prim-expr -> stream])
(expand-print r t e stream))
((print (vec ,r ,t) ,[expand-prim-expr -> e])
(expand-print r t e))
((print ,t ,[expand-prim-expr -> e] ...)
`(print . ,e))
((println ,t . ,expr)
`(begin
,(expand-prim-stmt `(print ,t . ,expr))
(print (str "\n"))))
((assert ,[expand-prim-expr -> e])
`(assert ,e))
((return) `(return))
((return ,[expand-prim-expr -> e])
`(return ,e))
((do ,[expand-prim-expr -> e])
`(do ,e)))
(define-match expand-prim-expr
((,t ,v) (guard (scalar-type? t)) `(,t ,v))
((var ,t ,x) `(var ,t ,x))
((int->float ,[e]) `(int->float ,e))
((float->int ,[e]) `(float->int ,e))
((cast ,t ,[e]) `(cast ,t ,e))
((iota ,[e])
`(iota ,e))
((iota-r ,r ,[e])
`(iota-r ,r ,e))
((vector (vec ,r ,t) ,[e*] ...)
`(vector (vec ,r ,t) ,r . ,e*))
((vector-r (vec ,r ,t) ,r ,[e*] ...)
`(vector (vec ,r ,t) ,r . ,e*))
((make-vector (vec ,r ,t) ,[size] ,[init])
(let ((i (gensym 'i))
(len (gensym 'len))
(v (gensym 'v)))
`(let ((,len int ,size))
(let ((,v (vec ,r ,t) (make-vector ,t ,r (var int ,len))))
(begin
(let ((,i int (int 0)))
(while (< (var int ,i) (var int ,len))
(begin
(set! (vector-ref ,t
(var (vec ,r ,t) ,v)
(var int ,i))
,init)
(set! (var int ,i) (+ (var int ,i) (int 1))))))
(var (vec ,r ,t) ,v))))))
((vector-ref ,t ,[v] ,[i])
`(vector-ref ,t ,v ,i))
((unsafe-vector-ref ,t ,[v] ,[i])
`(unsafe-vector-ref ,t ,v ,i))
((unsafe-vec-ptr ,t ,[v])
`(unsafe-vec-ptr ,t ,v))
((length ,[e])
`(length ,e))
((call ,[f] ,[args] ...)
`(call ,f . ,args))
((invoke ,[f] ,[args] ...)
`(invoke ,f . ,args))
((lambda ,t0 ((,x ,t) ...) ,[e])
`(lambda ,t0 ((,x ,t) ...) ,e))
((if ,[test] ,[conseq] ,[altern])
`(if ,test ,conseq ,altern))
((if ,[test] ,[conseq])
`(if ,test ,conseq))
((kernel ,ktype (((,x ,t) (,[xs] ,ts)) ...) ,[body])
`(kernel ,ktype ,(gensym 'region) (((,x ,t) (,xs ,ts)) ...) ,body))
((kernel-r ,ktype ,r (((,x ,t) (,[xs] ,ts)) ...) ,[body])
`(kernel ,ktype ,r (((,x ,t) (,xs ,ts)) ...) ,body))
((let ((,x* ,t* ,[e*]) ...) ,[e])
`(let ((,x* ,t* ,e*) ...) ,e))
((begin ,[expand-prim-stmt -> s*] ... ,[e])
`(begin ,s* ... ,e))
((+ (vec ,t) ,[lhs] ,[rhs])
(expand-vec-addition t lhs rhs))
((= (vec ,r ,t) ,[lhs] ,[rhs])
(expand-vec-comparison t r lhs rhs))
((match ,t ,[e] (,p ,[e*]) ...)
`(match ,t ,e (,p ,e*) ...))
((error! ,s)
`(call (var (fn () -> void) harlan_error) (str ,s)))
((,op ,t ,[lhs] ,[rhs])
(guard (or (relop? op) (binop? op)))
`(,op ,lhs ,rhs)))
(define (expand-print r t e . stream)
(let ((v (gensym 'v))
(i (gensym 'i))
(len (gensym 'len)))
`(let ((,v (vec ,r ,t) ,e))
(begin
(print (str "[") . ,stream)
(let ((,i int (int 0))
(,len int (length (var (vec ,r ,t) ,v))))
(while (< (var int ,i) (var int ,len))
(begin
,(if (scalar-type? t)
`(if (> (var int ,i) (int 0))
(print (str " ") . ,stream))
`(if (> (var int ,i) (int 0))
(print (str " \n ") . ,stream)))
,(expand-prim-stmt
`(print ,t
(vector-ref ,t
(var (vec ,r ,t) ,v) (var int ,i))
. ,stream))
(set! (var int ,i) (+ (var int ,i) (int 1))))))
(print (str "]") . ,stream)))))
(define (expand-vec-addition t lhs rhs)
(let ((l (gensym 'lhs))
(r (gensym 'rhs))
(len (gensym 'len))
(i (gensym 'i))
(res (gensym 'res))
(lhsi (gensym 'lhsi))
(rhsi (gensym 'rhsi)))
`(let ((,l (vec ,t) ,lhs)
(,r (vec ,t) ,rhs))
(let ((,len int (length (var (vec ,t) ,l))))
(let ((,res (vec ,t) (make-vector ,t ,(gensym 'region) (var int ,len))))
(begin
(for (,i (int 0) (var int ,len) (int 1))
(let ((,lhsi
,t
(vector-ref ,t (var (vec ,t) ,l)
(var int ,i)))
(,rhsi
,t
(vector-ref ,t (var (vec ,t) ,r)
(var int ,i))))
(set! (vector-ref ,t
(var (vec ,t) ,res)
(var int ,i))
,(expand-prim-expr
`(+ ,t (var ,t ,lhsi) (var ,t ,rhsi))))))
(var (vec ,t) ,res)))))))
(define (expand-vec-comparison t r lhs rhs)
(let ((lv (gensym 'lhs))
(rv (gensym 'rhs))
(len (gensym 'len))
(i (gensym 'i))
(res (gensym 'res))
(lhsi (gensym 'lhsi))
(rhsi (gensym 'rhsi)))
`(let ((,lv (vec ,r ,t) ,lhs)
(,rv (vec ,r ,t) ,rhs))
(let ((,len int (length (var (vec ,r ,t) ,lv)))
(,res bool (bool #t)))
(begin
(if (= (var int ,len)
(length (var (vec ,r ,t) ,rv)))
(for (,i (int 0) (var int ,len) (int 1))
(let ((,lhsi ,t
(vector-ref ,t (var (vec ,r ,t) ,lv)
(var int ,i)))
(,rhsi ,t
(vector-ref ,t (var (vec ,r ,t) ,rv)
(var int ,i))))
(if (= ,(expand-prim-expr
`(= ,t (var ,t ,lhsi) (var ,t ,rhsi)))
(bool #f))
(begin (set! (var bool ,res) (bool #f))
(set! (var int ,i) (var int ,len))))))
(set! (var bool ,res) (bool #f)))
(var bool ,res))))))
;; end library
)
| null | https://raw.githubusercontent.com/eholk/harlan/3afd95b1c3ad02a354481774585e866857a687b8/harlan/front/expand-primitives.scm | scheme | This pass macro-expands primitives. It also inserts fresh region
variables.
For now we guard for simple enum types. The region calling
convention doesn't work very well with print the way it
currently works.
end library | (library
(harlan front expand-primitives)
(export expand-primitives)
(import
(rnrs)
(elegant-weapons helpers)
(elegant-weapons compat))
(define-match expand-primitives
((module ,[expand-prim-decl -> decl*] ...)
`(module ,(apply append decl*) ...)))
(define-match expand-prim-decl
((fn ,name ,args ,t ,[expand-prim-stmt -> stmt])
`((fn ,name ,args ,t ,stmt)))
((define-datatype ,t (,c ,t* ...) ...)
(guard (symbol? t))
(let ((adt (gensym 'adt))
(out (gensym 'out))
(type (match t
((,n ,r) `(adt ,n ,r))
(,n `(adt ,n)))))
`((define-datatype ,t (,c ,t* ...) ...)
(fn print (,adt ,out) (fn (,type (ptr ofstream)) -> void)
(begin
(do (match int (var ,type ,adt)
,@(map (lambda (c t*)
(let ((x* (map (lambda (_) (gensym 'x)) t*))
)
`((,c ,x* ...)
(begin
(print (str ,(string-append
"("
(symbol->string c)))
(var (ptr ofstream) ,out))
,@(let ((out (map (lambda (_)
`(var (ptr ofstream)
,out))
t*)))
`((begin (print (str " ")
,out)
(print (var ,t* ,x*)
,out)) ...))
(print (str ")") (var (ptr ofstream) ,out))
(int 0)))))
c t*)))
(return)))
(fn print (,adt) (fn (,type) -> void)
(begin
(do (match int (var ,type ,adt)
,@(map (lambda (c t*)
(let ((x* (map (lambda (_) (gensym 'x)) t*))
)
`((,c ,x* ...)
(begin
(print (str ,(string-append
"("
(symbol->string c))))
(begin (print (str " "))
(print (var ,t* ,x*))) ...
(print (str ")"))
(int 0)))))
c t*)))
(return))))))
Fallback for datatypes we do n't generate print for .
((define-datatype ,t (,c ,t* ...) ...)
`((define-datatype ,t (,c ,t* ...) ...)))
((extern ,name ,args -> ,rtype)
`((extern ,name ,args -> ,rtype))))
(define-match expand-prim-stmt
((let ((,x* ,t* ,[expand-prim-expr -> e*]) ...) ,[body])
`(let ((,x* ,t* ,e*) ...) ,body))
((let-region (,r ...) ,[body]) `(let-region (,r ...) ,body))
((set! ,[expand-prim-expr -> lhs] ,[expand-prim-expr -> rhs])
`(set! ,lhs ,rhs))
((if ,[expand-prim-expr -> test] ,[conseq] ,[altern])
`(if ,test ,conseq ,altern))
((if ,[expand-prim-expr -> test] ,[conseq])
`(if ,test ,conseq))
((while ,[expand-prim-expr -> test] ,[body])
`(while ,test ,body))
((begin ,[stmt*] ...)
`(begin . ,stmt*))
((print (vec ,r ,t) ,[expand-prim-expr -> e]
,[expand-prim-expr -> stream])
(expand-print r t e stream))
((print (vec ,r ,t) ,[expand-prim-expr -> e])
(expand-print r t e))
((print ,t ,[expand-prim-expr -> e] ...)
`(print . ,e))
((println ,t . ,expr)
`(begin
,(expand-prim-stmt `(print ,t . ,expr))
(print (str "\n"))))
((assert ,[expand-prim-expr -> e])
`(assert ,e))
((return) `(return))
((return ,[expand-prim-expr -> e])
`(return ,e))
((do ,[expand-prim-expr -> e])
`(do ,e)))
(define-match expand-prim-expr
((,t ,v) (guard (scalar-type? t)) `(,t ,v))
((var ,t ,x) `(var ,t ,x))
((int->float ,[e]) `(int->float ,e))
((float->int ,[e]) `(float->int ,e))
((cast ,t ,[e]) `(cast ,t ,e))
((iota ,[e])
`(iota ,e))
((iota-r ,r ,[e])
`(iota-r ,r ,e))
((vector (vec ,r ,t) ,[e*] ...)
`(vector (vec ,r ,t) ,r . ,e*))
((vector-r (vec ,r ,t) ,r ,[e*] ...)
`(vector (vec ,r ,t) ,r . ,e*))
((make-vector (vec ,r ,t) ,[size] ,[init])
(let ((i (gensym 'i))
(len (gensym 'len))
(v (gensym 'v)))
`(let ((,len int ,size))
(let ((,v (vec ,r ,t) (make-vector ,t ,r (var int ,len))))
(begin
(let ((,i int (int 0)))
(while (< (var int ,i) (var int ,len))
(begin
(set! (vector-ref ,t
(var (vec ,r ,t) ,v)
(var int ,i))
,init)
(set! (var int ,i) (+ (var int ,i) (int 1))))))
(var (vec ,r ,t) ,v))))))
((vector-ref ,t ,[v] ,[i])
`(vector-ref ,t ,v ,i))
((unsafe-vector-ref ,t ,[v] ,[i])
`(unsafe-vector-ref ,t ,v ,i))
((unsafe-vec-ptr ,t ,[v])
`(unsafe-vec-ptr ,t ,v))
((length ,[e])
`(length ,e))
((call ,[f] ,[args] ...)
`(call ,f . ,args))
((invoke ,[f] ,[args] ...)
`(invoke ,f . ,args))
((lambda ,t0 ((,x ,t) ...) ,[e])
`(lambda ,t0 ((,x ,t) ...) ,e))
((if ,[test] ,[conseq] ,[altern])
`(if ,test ,conseq ,altern))
((if ,[test] ,[conseq])
`(if ,test ,conseq))
((kernel ,ktype (((,x ,t) (,[xs] ,ts)) ...) ,[body])
`(kernel ,ktype ,(gensym 'region) (((,x ,t) (,xs ,ts)) ...) ,body))
((kernel-r ,ktype ,r (((,x ,t) (,[xs] ,ts)) ...) ,[body])
`(kernel ,ktype ,r (((,x ,t) (,xs ,ts)) ...) ,body))
((let ((,x* ,t* ,[e*]) ...) ,[e])
`(let ((,x* ,t* ,e*) ...) ,e))
((begin ,[expand-prim-stmt -> s*] ... ,[e])
`(begin ,s* ... ,e))
((+ (vec ,t) ,[lhs] ,[rhs])
(expand-vec-addition t lhs rhs))
((= (vec ,r ,t) ,[lhs] ,[rhs])
(expand-vec-comparison t r lhs rhs))
((match ,t ,[e] (,p ,[e*]) ...)
`(match ,t ,e (,p ,e*) ...))
((error! ,s)
`(call (var (fn () -> void) harlan_error) (str ,s)))
((,op ,t ,[lhs] ,[rhs])
(guard (or (relop? op) (binop? op)))
`(,op ,lhs ,rhs)))
(define (expand-print r t e . stream)
(let ((v (gensym 'v))
(i (gensym 'i))
(len (gensym 'len)))
`(let ((,v (vec ,r ,t) ,e))
(begin
(print (str "[") . ,stream)
(let ((,i int (int 0))
(,len int (length (var (vec ,r ,t) ,v))))
(while (< (var int ,i) (var int ,len))
(begin
,(if (scalar-type? t)
`(if (> (var int ,i) (int 0))
(print (str " ") . ,stream))
`(if (> (var int ,i) (int 0))
(print (str " \n ") . ,stream)))
,(expand-prim-stmt
`(print ,t
(vector-ref ,t
(var (vec ,r ,t) ,v) (var int ,i))
. ,stream))
(set! (var int ,i) (+ (var int ,i) (int 1))))))
(print (str "]") . ,stream)))))
(define (expand-vec-addition t lhs rhs)
(let ((l (gensym 'lhs))
(r (gensym 'rhs))
(len (gensym 'len))
(i (gensym 'i))
(res (gensym 'res))
(lhsi (gensym 'lhsi))
(rhsi (gensym 'rhsi)))
`(let ((,l (vec ,t) ,lhs)
(,r (vec ,t) ,rhs))
(let ((,len int (length (var (vec ,t) ,l))))
(let ((,res (vec ,t) (make-vector ,t ,(gensym 'region) (var int ,len))))
(begin
(for (,i (int 0) (var int ,len) (int 1))
(let ((,lhsi
,t
(vector-ref ,t (var (vec ,t) ,l)
(var int ,i)))
(,rhsi
,t
(vector-ref ,t (var (vec ,t) ,r)
(var int ,i))))
(set! (vector-ref ,t
(var (vec ,t) ,res)
(var int ,i))
,(expand-prim-expr
`(+ ,t (var ,t ,lhsi) (var ,t ,rhsi))))))
(var (vec ,t) ,res)))))))
(define (expand-vec-comparison t r lhs rhs)
(let ((lv (gensym 'lhs))
(rv (gensym 'rhs))
(len (gensym 'len))
(i (gensym 'i))
(res (gensym 'res))
(lhsi (gensym 'lhsi))
(rhsi (gensym 'rhsi)))
`(let ((,lv (vec ,r ,t) ,lhs)
(,rv (vec ,r ,t) ,rhs))
(let ((,len int (length (var (vec ,r ,t) ,lv)))
(,res bool (bool #t)))
(begin
(if (= (var int ,len)
(length (var (vec ,r ,t) ,rv)))
(for (,i (int 0) (var int ,len) (int 1))
(let ((,lhsi ,t
(vector-ref ,t (var (vec ,r ,t) ,lv)
(var int ,i)))
(,rhsi ,t
(vector-ref ,t (var (vec ,r ,t) ,rv)
(var int ,i))))
(if (= ,(expand-prim-expr
`(= ,t (var ,t ,lhsi) (var ,t ,rhsi)))
(bool #f))
(begin (set! (var bool ,res) (bool #f))
(set! (var int ,i) (var int ,len))))))
(set! (var bool ,res) (bool #f)))
(var bool ,res))))))
)
|
76bf540ece94bde4b6cefea413562ff98184665198ea2980f0470c8604a9d4da | eholk/elegant-weapons | graphviz.scm | (library (elegant-weapons graphviz)
(export dot->string write-dot)
(import
(rnrs)
(elegant-weapons helpers))
(define dot->string
(case-lambda
((g) (dot->string g '()))
((g c)
(string-append
"digraph G {\n"
(join ""
(map (lambda (n)
(let ((s (car n)))
(join ""
(map (lambda (n)
(string-append
" \""
(symbol->string s)
"\" -> \""
(symbol->string n)
"\";\n"))
(cdr n)))))
g))
(join
"\n"
(map (let ((index 0))
(lambda (c)
(set! index (+ 1 index))
(string-append "subgraph cluster" (number->string index)
" {\n"
(join " "
(map (lambda (n)
(string-append
"\"" (symbol->string n) "\""
"; "))
c))
"\n}\n")))
c))
"}\n"))))
(define write-dot
(case-lambda
((g) (write-dot g (current-output-port)))
((g c p) (display (dot->string g c) p))
((g p) (display (dot->string g) p)))))
| null | https://raw.githubusercontent.com/eholk/elegant-weapons/ce51432c614cdba5d2f12c7b5451af01095257a4/lib/elegant-weapons/graphviz.scm | scheme | \n")) | (library (elegant-weapons graphviz)
(export dot->string write-dot)
(import
(rnrs)
(elegant-weapons helpers))
(define dot->string
(case-lambda
((g) (dot->string g '()))
((g c)
(string-append
"digraph G {\n"
(join ""
(map (lambda (n)
(let ((s (car n)))
(join ""
(map (lambda (n)
(string-append
" \""
(symbol->string s)
"\" -> \""
(symbol->string n)
(cdr n)))))
g))
(join
"\n"
(map (let ((index 0))
(lambda (c)
(set! index (+ 1 index))
(string-append "subgraph cluster" (number->string index)
" {\n"
(join " "
(map (lambda (n)
(string-append
"\"" (symbol->string n) "\""
"; "))
c))
"\n}\n")))
c))
"}\n"))))
(define write-dot
(case-lambda
((g) (write-dot g (current-output-port)))
((g c p) (display (dot->string g c) p))
((g p) (display (dot->string g) p)))))
|
c75dc27e920ff0a960438a0c8d5d154c22d7f190995c3fa7ce96f149488ad123 | disco-lang/disco | Atoms.hs | module Atoms where
| null | https://raw.githubusercontent.com/disco-lang/disco/68b96b233b04f26229fe6277678eeb8710422523/explore/sub2/Atoms.hs | haskell | module Atoms where
| |
543b857354cf711440c089af969a6493bb42b6c4faa9d6a9aa1b24a28842c1c6 | clyfe/clara-eav | store.cljc | (ns ^:no-doc clara-eav.store
"A store keeps track of max-eid and maintains an EAV index."
(:require [clara-eav.eav :as eav]
[medley.core :as medley]
#?(:clj [clojure.spec.alpha :as s]
:cljs [cljs.spec.alpha :as s])))
(def ^:dynamic *store*
"Dynamic atom of store to be used in rule productions, similar to other
mechanisms from Clara."
nil)
(s/def ::e
(s/or :string string?
:keyword keyword?
:uuid uuid?
:int int?))
(s/fdef tempid?
:args (s/cat :e ::e)
:ret boolean?)
(defn- tempid?
"True if `e` is a tempid. Strings and negative ints are tempids; keywords,
positive ints and uuids are not."
[e]
(or (string? e)
(neg-int? e)))
(s/def ::max-eid integer?)
(s/def ::eav-index map?)
(s/def ::insertables ::eav/record-seq)
(s/def ::retractables ::eav/record-seq)
(s/def ::tempids (s/map-of tempid? integer?))
(s/def ::store (s/keys :req-un [::max-eid ::eav-index]))
(s/def ::store-tx
(s/keys :req-un [::max-eid ::eav-index]
:opt-un [::insertables ::retractables ::tempids]))
(def init
{:max-eid 0
:eav-index {}})
(s/fdef state
:args (s/cat :store ::store-tx)
:ret ::store)
(defn state
"Remove extra keys from intermediary steps of computations and returns just
the store state."
[store]
(select-keys store [:max-eid :eav-index]))
(s/fdef -eav
:args (s/cat :store ::store-tx
:eav ::eav/record)
:ret ::store-tx)
(defn- -eav
"Subtracts `eav` from `store` updating it's `:eav-index`. Returns the updated
`store` including `:retractables` eavs."
[store eav]
(let [{:keys [e a]} eav]
(if (tempid? e)
(throw (ex-info "Tempids not allowed in retractions" {:e e}))
(-> store
(update :retractables conj eav)
(medley/dissoc-in [:eav-index e a])))))
(s/fdef -eavs
:args (s/cat :store ::store
:eavs ::eav/record-seq)
:ret ::store-tx)
(defn -eavs
"Called in retractions to obtain retractables. Throws if tempids are present
in `eavs`, otherwise updates `store`'s `:eav-index`. Returns the updated store
including `:retractables` eavs."
[store eavs]
(reduce -eav
(assoc store :retractables [])
eavs))
(s/fdef +eav
:args (s/cat :store ::store-tx
:eav ::eav/record)
:ret ::store-tx)
(defn- +eav
"Adds `eav` to `store` updating it's `:max-eid` and `:eav-index`. Returns the
updated `store` including `:insertables` eavs, `:retractables` eavs and
resolved `:tempids` map of {tempid -> eid}."
[store eav]
(let [{:keys [tempids max-eid eav-index]} store
{:keys [e a v]} eav
transient? (= :eav/transient a)]
(if (tempid? e)
(if-some [eid (get tempids e)]
(-> store
(update :insertables conj (assoc eav :e eid))
(cond-> (not transient?) (assoc-in [:eav-index eid a] v)))
(let [new-eid (inc max-eid)]
(-> store
(update :insertables conj (assoc eav :e new-eid))
(assoc-in [:tempids e] new-eid)
(assoc :max-eid new-eid)
(cond-> (not transient?) (assoc-in [:eav-index new-eid a] v)))))
(if transient?
(update store :insertables conj eav)
(if-some [v' (get-in eav-index [e a])]
(cond-> store
(not= v v') (-> (update :insertables conj eav)
(update :retractables conj (assoc eav :v v'))
(assoc-in [:eav-index e a] v)))
(-> store
(update :insertables conj eav)
(assoc-in [:eav-index e a] v)))))))
(s/fdef +eavs
:args (s/cat :store ::store
:eavs ::eav/record-seq)
:ret ::store-tx)
(defn +eavs
"Called in upserts to obtain insertables and retractables. Resolves tempids in
`eavs` and updates `store`'s `:max-id` and `:eav-index`. Returns the updated
store including `insertables` and `retractables` eavs and resolved tempids map
{tempid -> eid}."
[store eavs]
(reduce +eav
(assoc store :insertables []
:retractables []
:tempids {})
eavs))
| null | https://raw.githubusercontent.com/clyfe/clara-eav/1a2255a3aaac303f71a2867998453853673bc6ce/src/clara_eav/store.cljc | clojure | keywords, | (ns ^:no-doc clara-eav.store
"A store keeps track of max-eid and maintains an EAV index."
(:require [clara-eav.eav :as eav]
[medley.core :as medley]
#?(:clj [clojure.spec.alpha :as s]
:cljs [cljs.spec.alpha :as s])))
(def ^:dynamic *store*
"Dynamic atom of store to be used in rule productions, similar to other
mechanisms from Clara."
nil)
(s/def ::e
(s/or :string string?
:keyword keyword?
:uuid uuid?
:int int?))
(s/fdef tempid?
:args (s/cat :e ::e)
:ret boolean?)
(defn- tempid?
positive ints and uuids are not."
[e]
(or (string? e)
(neg-int? e)))
(s/def ::max-eid integer?)
(s/def ::eav-index map?)
(s/def ::insertables ::eav/record-seq)
(s/def ::retractables ::eav/record-seq)
(s/def ::tempids (s/map-of tempid? integer?))
(s/def ::store (s/keys :req-un [::max-eid ::eav-index]))
(s/def ::store-tx
(s/keys :req-un [::max-eid ::eav-index]
:opt-un [::insertables ::retractables ::tempids]))
(def init
{:max-eid 0
:eav-index {}})
(s/fdef state
:args (s/cat :store ::store-tx)
:ret ::store)
(defn state
"Remove extra keys from intermediary steps of computations and returns just
the store state."
[store]
(select-keys store [:max-eid :eav-index]))
(s/fdef -eav
:args (s/cat :store ::store-tx
:eav ::eav/record)
:ret ::store-tx)
(defn- -eav
"Subtracts `eav` from `store` updating it's `:eav-index`. Returns the updated
`store` including `:retractables` eavs."
[store eav]
(let [{:keys [e a]} eav]
(if (tempid? e)
(throw (ex-info "Tempids not allowed in retractions" {:e e}))
(-> store
(update :retractables conj eav)
(medley/dissoc-in [:eav-index e a])))))
(s/fdef -eavs
:args (s/cat :store ::store
:eavs ::eav/record-seq)
:ret ::store-tx)
(defn -eavs
"Called in retractions to obtain retractables. Throws if tempids are present
in `eavs`, otherwise updates `store`'s `:eav-index`. Returns the updated store
including `:retractables` eavs."
[store eavs]
(reduce -eav
(assoc store :retractables [])
eavs))
(s/fdef +eav
:args (s/cat :store ::store-tx
:eav ::eav/record)
:ret ::store-tx)
(defn- +eav
"Adds `eav` to `store` updating it's `:max-eid` and `:eav-index`. Returns the
updated `store` including `:insertables` eavs, `:retractables` eavs and
resolved `:tempids` map of {tempid -> eid}."
[store eav]
(let [{:keys [tempids max-eid eav-index]} store
{:keys [e a v]} eav
transient? (= :eav/transient a)]
(if (tempid? e)
(if-some [eid (get tempids e)]
(-> store
(update :insertables conj (assoc eav :e eid))
(cond-> (not transient?) (assoc-in [:eav-index eid a] v)))
(let [new-eid (inc max-eid)]
(-> store
(update :insertables conj (assoc eav :e new-eid))
(assoc-in [:tempids e] new-eid)
(assoc :max-eid new-eid)
(cond-> (not transient?) (assoc-in [:eav-index new-eid a] v)))))
(if transient?
(update store :insertables conj eav)
(if-some [v' (get-in eav-index [e a])]
(cond-> store
(not= v v') (-> (update :insertables conj eav)
(update :retractables conj (assoc eav :v v'))
(assoc-in [:eav-index e a] v)))
(-> store
(update :insertables conj eav)
(assoc-in [:eav-index e a] v)))))))
(s/fdef +eavs
:args (s/cat :store ::store
:eavs ::eav/record-seq)
:ret ::store-tx)
(defn +eavs
"Called in upserts to obtain insertables and retractables. Resolves tempids in
`eavs` and updates `store`'s `:max-id` and `:eav-index`. Returns the updated
store including `insertables` and `retractables` eavs and resolved tempids map
{tempid -> eid}."
[store eavs]
(reduce +eav
(assoc store :insertables []
:retractables []
:tempids {})
eavs))
|
93c373ad05f94ae2d1fd831751dbb61c0605d8db595a8565cd59076cdc425aab | oakes/Nightlight | ajax.cljs | (ns nightlight.ajax
(:require [cljs.reader :refer [read-string]]
[nightlight.state :as s]
[nightlight.constants :as c])
(:import goog.net.XhrIo))
(defn download-tree [cb]
(.send XhrIo
"tree"
(fn [e]
(when (.isSuccess (.-target e))
(cb (read-string (.. e -target getResponseText)))))
"GET"))
(defn download-state [cb]
(.send XhrIo
"read-state"
(fn [e]
(when (.isSuccess (.-target e))
(reset! s/pref-state (read-string (.. e -target getResponseText))))
(download-tree cb))
"GET"))
(defn download-completions [info completions]
(.send XhrIo
"completions"
(fn [e]
(reset! completions (read-string (.. e -target getResponseText))))
"POST"
(pr-str info)))
(defn write-file [editor]
(when-not (-> @s/runtime-state :options :read-only?)
(.send XhrIo
"write-file"
(fn [e]
(c/mark-clean editor))
"POST"
(pr-str {:path (c/get-path editor) :content (c/get-content editor)}))))
(defn rename-file [from to cb]
(when-not (-> @s/runtime-state :options :read-only?)
(.send XhrIo "rename-file" cb "POST" (pr-str {:from from :to to}))))
(defn delete-file [path cb]
(when-not (-> @s/runtime-state :options :read-only?)
(.send XhrIo "delete-file" cb "POST" path)))
(defn new-file [path cb]
(when-not (-> @s/runtime-state :options :read-only?)
(.send XhrIo "new-file" cb "POST" path)))
(defn new-file-upload [form cb]
(when-not (-> @s/runtime-state :options :read-only?)
(let [form-data (js/FormData.)]
(doseq [file (array-seq (.-files form))]
(.append form-data "files" file (.-name file)))
(.send XhrIo "new-file-upload" cb "POST" form-data))))
| null | https://raw.githubusercontent.com/oakes/Nightlight/51ed9bcd7286c2833bb48daf9cb0624e4e7b0e14/src/nightlight/ajax.cljs | clojure | (ns nightlight.ajax
(:require [cljs.reader :refer [read-string]]
[nightlight.state :as s]
[nightlight.constants :as c])
(:import goog.net.XhrIo))
(defn download-tree [cb]
(.send XhrIo
"tree"
(fn [e]
(when (.isSuccess (.-target e))
(cb (read-string (.. e -target getResponseText)))))
"GET"))
(defn download-state [cb]
(.send XhrIo
"read-state"
(fn [e]
(when (.isSuccess (.-target e))
(reset! s/pref-state (read-string (.. e -target getResponseText))))
(download-tree cb))
"GET"))
(defn download-completions [info completions]
(.send XhrIo
"completions"
(fn [e]
(reset! completions (read-string (.. e -target getResponseText))))
"POST"
(pr-str info)))
(defn write-file [editor]
(when-not (-> @s/runtime-state :options :read-only?)
(.send XhrIo
"write-file"
(fn [e]
(c/mark-clean editor))
"POST"
(pr-str {:path (c/get-path editor) :content (c/get-content editor)}))))
(defn rename-file [from to cb]
(when-not (-> @s/runtime-state :options :read-only?)
(.send XhrIo "rename-file" cb "POST" (pr-str {:from from :to to}))))
(defn delete-file [path cb]
(when-not (-> @s/runtime-state :options :read-only?)
(.send XhrIo "delete-file" cb "POST" path)))
(defn new-file [path cb]
(when-not (-> @s/runtime-state :options :read-only?)
(.send XhrIo "new-file" cb "POST" path)))
(defn new-file-upload [form cb]
(when-not (-> @s/runtime-state :options :read-only?)
(let [form-data (js/FormData.)]
(doseq [file (array-seq (.-files form))]
(.append form-data "files" file (.-name file)))
(.send XhrIo "new-file-upload" cb "POST" form-data))))
| |
aa8d451b96cdca043682822b2b1a083134869fd0cf6b4c8d2257f4cbd17f646a | DSLsofMath/DSLsofMath | P1.hs | module P1 where
import Prelude
-- a)
class Field f where
mul :: f -> f -> f
add :: f -> f -> f
zer :: f
one :: f
neg :: f -> f
rec :: f -> f
-- b)
data F v = Mul (F v) (F v) | Rec (F v) | One
| Add (F v) (F v) | Neg (F v) | Zer
| V v deriving Show
{- Or, with {-# LANGUAGE GADTs #-} at the top:
data F v where
Mul :: F v -> F v -> F v
Add :: F v -> F v -> F v
Zer :: F v
One :: F v
Neg :: F v -> F v
Rec :: F v -> F v
V :: v -> F v
deriving Show
-}
instance Field (F v) where
mul = Mul; add = Add; zer = Zer; one = One; neg = Neg; rec = Rec
-- c)
-- instance Fractional a => Field a where
instance Field Double where
mul = (*); add = (+); zer = 0; one = 1; neg = negate; rec = recip
instance Field Bool where
mul = (&&); add = (/=); zer = False; one = True; neg = id; rec = id
-- d)
eval :: Field f => (v->f) -> F v -> f
eval f (Mul x y) = mul (eval f x) (eval f y)
eval f (Add x y) = add (eval f x) (eval f y)
eval f (Zer) = zer
eval f (One) = one
eval f (Neg x) = neg (eval f x)
eval f (Rec x) = rec (eval f x)
eval f (V v) = f v
-- e)
evalD :: (v->Double) -> F v -> Double
evalD = eval
evalB :: (v->Bool) -> F v -> Bool
evalB = eval
ex1, ex2, ex3, x :: F String
ex1 = add x (rec x)
ex2 = mul x (neg x)
ex3 = mul ex1 ex2
x = V "x"
assD :: String -> Double
assD "x" = 1
assB :: String -> Bool
assB "x" = True
testD = map (evalD assD) [ex1, ex2, ex3] == [2, -1, -2]
testB = map (evalB assB) [ex1, ex2, ex3] == [False, False, False]
main = print (testD && testB)
| null | https://raw.githubusercontent.com/DSLsofMath/DSLsofMath/216464afda03c54709fae39e626ca19e8053444e/Exam/2018-08/P1.hs | haskell | a)
b)
Or, with {-# LANGUAGE GADTs #
c)
instance Fractional a => Field a where
d)
e) | module P1 where
import Prelude
class Field f where
mul :: f -> f -> f
add :: f -> f -> f
zer :: f
one :: f
neg :: f -> f
rec :: f -> f
data F v = Mul (F v) (F v) | Rec (F v) | One
| Add (F v) (F v) | Neg (F v) | Zer
| V v deriving Show
data F v where
Mul :: F v -> F v -> F v
Add :: F v -> F v -> F v
Zer :: F v
One :: F v
Neg :: F v -> F v
Rec :: F v -> F v
V :: v -> F v
deriving Show
-}
instance Field (F v) where
mul = Mul; add = Add; zer = Zer; one = One; neg = Neg; rec = Rec
instance Field Double where
mul = (*); add = (+); zer = 0; one = 1; neg = negate; rec = recip
instance Field Bool where
mul = (&&); add = (/=); zer = False; one = True; neg = id; rec = id
eval :: Field f => (v->f) -> F v -> f
eval f (Mul x y) = mul (eval f x) (eval f y)
eval f (Add x y) = add (eval f x) (eval f y)
eval f (Zer) = zer
eval f (One) = one
eval f (Neg x) = neg (eval f x)
eval f (Rec x) = rec (eval f x)
eval f (V v) = f v
evalD :: (v->Double) -> F v -> Double
evalD = eval
evalB :: (v->Bool) -> F v -> Bool
evalB = eval
ex1, ex2, ex3, x :: F String
ex1 = add x (rec x)
ex2 = mul x (neg x)
ex3 = mul ex1 ex2
x = V "x"
assD :: String -> Double
assD "x" = 1
assB :: String -> Bool
assB "x" = True
testD = map (evalD assD) [ex1, ex2, ex3] == [2, -1, -2]
testB = map (evalB assB) [ex1, ex2, ex3] == [False, False, False]
main = print (testD && testB)
|
1935eafe324b54134b65694962f137d10f59023ca5438597a8ed1a8e2ac4f2cb | rpav/cl-freetype2 | bitmap.lisp | (in-package :freetype2)
;; Basic bitmap functions
(defun bitmap-new (&optional (library *library*))
"=> BITMAP
Create a new FT_Bitmap."
(make-wrapper (bitmap &bitmap ft-bitmap (:struct foreign-ft-bitmap))
(progn (ft-bitmap-new &bitmap) :ok)
(ft-bitmap-done library &bitmap)))
(export 'bitmap-new)
(defun bitmap-convert (bitmap alignment &optional (library *library*))
"=> NEW-BITMAP
Convert `BITMAP` to an 8bpp bitmap with `ALIGNMENT`-byte alignment."
(let ((target (bitmap-new library)))
(ft-bitmap-convert library
(fw-ptr bitmap)
(fw-ptr target) alignment)
target))
(export 'bitmap-convert)
;; String utility
(defun string-pixel-width (face string &optional (load-flags '(:default)))
"Get the pixel width of STRING in FACE given LOAD-FLAGS."
(let ((flags-value (convert-to-foreign load-flags 'ft-load-flags))
(vert-flag (convert-to-foreign '(:vertical-layout) 'ft-load-flags)))
(if (= 0 (logand flags-value vert-flag))
(if (fixed-face-p face)
(* (length string)
(ft-26dot6-to-int
(ft-size-metrics-max-advance (ft-size-metrics (ft-face-size face)))))
(+ (reduce #'+ (get-string-advances face string load-flags))
(reduce #'+ (get-string-kerning face string))))
(ft-size-metrics-x-ppem (ft-size-metrics (ft-face-size face))))))
(export 'string-pixel-width)
(defun face-ascender-pixels (face)
"Return the max ascender for FACE, in pixels."
(ft-26dot6-to-float
(ft-size-metrics-ascender (ft-size-metrics (ft-face-size face)))))
(export 'face-ascender-pixels)
(defun face-descender-pixels (face)
"Return the max descender for FACE, in pixels."
(ft-26dot6-to-float
(- (ft-size-metrics-descender (ft-size-metrics (ft-face-size face))))))
(export 'face-descender-pixels)
(defun string-pixel-height (face string &optional (load-flags '(:default)))
"Get the pixel height of STRING in FACE given LOAD-FLAGS."
(let ((flags-value (convert-to-foreign load-flags 'ft-load-flags))
(vert-flag (convert-to-foreign '(:vertical-layout) 'ft-load-flags)))
(if (/= 0 (logand flags-value vert-flag))
(if (fixed-face-p face)
(* (length string)
(ft-size-metrics-y-ppem (ft-size-metrics (ft-face-size face))))
(reduce #'+ (get-string-advances face string flags-value)))
(+ (face-ascender-pixels face) (face-descender-pixels face)))))
(export 'string-pixel-height)
;; Bitmap
(defun nth-mono-pixel (row n)
(multiple-value-bind (q offset) (truncate n 8)
(let ((byte (mem-ref row :unsigned-char q)))
(if (logbitp (- 7 offset) byte) 1 0))))
(defun nth-gray-pixel (row n)
(mem-ref row :unsigned-char n))
(defun bitmap-to-array (bitmap)
"=> ARRAY
Convert `BITMAP` from internal `FT_Bitmap`'s internal representation to
a native array. This is specified for a `FT-BITMAP-PIXEL-FORMAT` of `:MONO`,
`:GRAY`, `:LCD`, and `:LCD-V`.
Note that for :LCD and :LCD-V, the result is a either 3\\*width or
3\\*height, respectively. This may change in the future."
(let ((buffer (ft-bitmap-buffer bitmap))
(rows (ft-bitmap-rows bitmap))
(width (ft-bitmap-width bitmap))
(pitch (ft-bitmap-pitch bitmap))
(format (ft-bitmap-pixel-mode bitmap)))
(let ((pixel-fn (ecase format
(:mono #'nth-mono-pixel)
(:gray #'nth-gray-pixel)
(:lcd #'nth-gray-pixel)
(:lcd-v #'nth-gray-pixel)))
(array (make-array (list rows width) :element-type 'unsigned-byte)))
(declare (function pixel-fn))
#+-(format t "buffer: ~A rows: ~A width: ~A pitch: ~A format: ~A~%"
buffer rows width pitch format)
(loop for i from 0 below rows
as ptr = (inc-pointer buffer (* i pitch))
do (loop for j from 0 below width
do (setf (aref array i j) (funcall pixel-fn ptr j)))
finally (return (values array format))))))
(export 'bitmap-to-array)
ABLIT
(defun flat-array (arr)
(make-array (apply #'* (array-dimensions arr))
:displaced-to arr))
(defun row-width (arr)
(let ((dim (array-dimensions arr)))
(if (> (array-rank arr) 2)
(* (car dim) (caddr dim))
(car dim))))
(defun ablit (arr1 arr2 &key (x 0) (y 0))
"Destructivly copy arr2 into arr1 for 2- and 3-dimensional (Y:X, Y:X:RGB(A))
arrays. X and Y may be specified as a 2D offset into ARR1."
(assert (= (array-rank arr1) (array-rank arr2)))
(let ((flat1 (flat-array arr1))
(flat2 (flat-array arr2))
(height1 (row-width arr1))
(height2 (row-width arr2))
(width1 (array-dimension arr1 1))
(width2 (array-dimension arr2 1))
(xoff (* x (if (= (array-rank arr1) 3)
(array-dimension arr1 2)
1))))
(loop for y2 from 0 below height2
for y1 from y below height1
do (let ((x1 (+ (* y1 width1) xoff))
(x2 (* y2 width2)))
(replace flat1 flat2
:start1 x1
:end1 (* (1+ y1) width1)
:start2 x2
:end2 (+ x2 width2)))))
arr1)
(defun ablit-from-nonzero (arr1 arr2 &key (x 0) (y 0))
"Destructivly copy arr2 into arr1 for 2- and 3-dimensional (Y:X,
Y:X:RGB(A)) arrays. X and Y may be specified as a 2D offset into
ARR1. Copying is started from the first nonzero element in each row.
This is a hack to make kerned fonts render properly with the toy
interface."
(assert (= (array-rank arr1) (array-rank arr2)))
(let ((flat1 (flat-array arr1))
(flat2 (flat-array arr2))
(height1 (row-width arr1))
(height2 (row-width arr2))
(width1 (array-dimension arr1 1))
(width2 (array-dimension arr2 1))
(xoff (* x (if (= (array-rank arr1) 3)
(array-dimension arr1 2)
1))))
(loop for y2 from 0 below height2
for y1 from y below height1
as start2 = (* y2 width2)
as end2 = (+ start2 width2)
do (let ((x1 (+ (* y1 width1) xoff))
(x2 (position-if-not #'zerop flat2 :start start2
:end end2)))
(when x2
(replace flat1 flat2
:start1 (+ x1 (- x2 start2))
:end1 (* (1+ y1) width1)
:start2 x2
:end2 end2)))))
arr1)
| null | https://raw.githubusercontent.com/rpav/cl-freetype2/96058da730b4812df916c1f4ee18c99b3b15a3de/src/bitmap.lisp | lisp | Basic bitmap functions
String utility
Bitmap | (in-package :freetype2)
(defun bitmap-new (&optional (library *library*))
"=> BITMAP
Create a new FT_Bitmap."
(make-wrapper (bitmap &bitmap ft-bitmap (:struct foreign-ft-bitmap))
(progn (ft-bitmap-new &bitmap) :ok)
(ft-bitmap-done library &bitmap)))
(export 'bitmap-new)
(defun bitmap-convert (bitmap alignment &optional (library *library*))
"=> NEW-BITMAP
Convert `BITMAP` to an 8bpp bitmap with `ALIGNMENT`-byte alignment."
(let ((target (bitmap-new library)))
(ft-bitmap-convert library
(fw-ptr bitmap)
(fw-ptr target) alignment)
target))
(export 'bitmap-convert)
(defun string-pixel-width (face string &optional (load-flags '(:default)))
"Get the pixel width of STRING in FACE given LOAD-FLAGS."
(let ((flags-value (convert-to-foreign load-flags 'ft-load-flags))
(vert-flag (convert-to-foreign '(:vertical-layout) 'ft-load-flags)))
(if (= 0 (logand flags-value vert-flag))
(if (fixed-face-p face)
(* (length string)
(ft-26dot6-to-int
(ft-size-metrics-max-advance (ft-size-metrics (ft-face-size face)))))
(+ (reduce #'+ (get-string-advances face string load-flags))
(reduce #'+ (get-string-kerning face string))))
(ft-size-metrics-x-ppem (ft-size-metrics (ft-face-size face))))))
(export 'string-pixel-width)
(defun face-ascender-pixels (face)
"Return the max ascender for FACE, in pixels."
(ft-26dot6-to-float
(ft-size-metrics-ascender (ft-size-metrics (ft-face-size face)))))
(export 'face-ascender-pixels)
(defun face-descender-pixels (face)
"Return the max descender for FACE, in pixels."
(ft-26dot6-to-float
(- (ft-size-metrics-descender (ft-size-metrics (ft-face-size face))))))
(export 'face-descender-pixels)
(defun string-pixel-height (face string &optional (load-flags '(:default)))
"Get the pixel height of STRING in FACE given LOAD-FLAGS."
(let ((flags-value (convert-to-foreign load-flags 'ft-load-flags))
(vert-flag (convert-to-foreign '(:vertical-layout) 'ft-load-flags)))
(if (/= 0 (logand flags-value vert-flag))
(if (fixed-face-p face)
(* (length string)
(ft-size-metrics-y-ppem (ft-size-metrics (ft-face-size face))))
(reduce #'+ (get-string-advances face string flags-value)))
(+ (face-ascender-pixels face) (face-descender-pixels face)))))
(export 'string-pixel-height)
(defun nth-mono-pixel (row n)
(multiple-value-bind (q offset) (truncate n 8)
(let ((byte (mem-ref row :unsigned-char q)))
(if (logbitp (- 7 offset) byte) 1 0))))
(defun nth-gray-pixel (row n)
(mem-ref row :unsigned-char n))
(defun bitmap-to-array (bitmap)
"=> ARRAY
Convert `BITMAP` from internal `FT_Bitmap`'s internal representation to
a native array. This is specified for a `FT-BITMAP-PIXEL-FORMAT` of `:MONO`,
`:GRAY`, `:LCD`, and `:LCD-V`.
Note that for :LCD and :LCD-V, the result is a either 3\\*width or
3\\*height, respectively. This may change in the future."
(let ((buffer (ft-bitmap-buffer bitmap))
(rows (ft-bitmap-rows bitmap))
(width (ft-bitmap-width bitmap))
(pitch (ft-bitmap-pitch bitmap))
(format (ft-bitmap-pixel-mode bitmap)))
(let ((pixel-fn (ecase format
(:mono #'nth-mono-pixel)
(:gray #'nth-gray-pixel)
(:lcd #'nth-gray-pixel)
(:lcd-v #'nth-gray-pixel)))
(array (make-array (list rows width) :element-type 'unsigned-byte)))
(declare (function pixel-fn))
#+-(format t "buffer: ~A rows: ~A width: ~A pitch: ~A format: ~A~%"
buffer rows width pitch format)
(loop for i from 0 below rows
as ptr = (inc-pointer buffer (* i pitch))
do (loop for j from 0 below width
do (setf (aref array i j) (funcall pixel-fn ptr j)))
finally (return (values array format))))))
(export 'bitmap-to-array)
ABLIT
(defun flat-array (arr)
(make-array (apply #'* (array-dimensions arr))
:displaced-to arr))
(defun row-width (arr)
(let ((dim (array-dimensions arr)))
(if (> (array-rank arr) 2)
(* (car dim) (caddr dim))
(car dim))))
(defun ablit (arr1 arr2 &key (x 0) (y 0))
"Destructivly copy arr2 into arr1 for 2- and 3-dimensional (Y:X, Y:X:RGB(A))
arrays. X and Y may be specified as a 2D offset into ARR1."
(assert (= (array-rank arr1) (array-rank arr2)))
(let ((flat1 (flat-array arr1))
(flat2 (flat-array arr2))
(height1 (row-width arr1))
(height2 (row-width arr2))
(width1 (array-dimension arr1 1))
(width2 (array-dimension arr2 1))
(xoff (* x (if (= (array-rank arr1) 3)
(array-dimension arr1 2)
1))))
(loop for y2 from 0 below height2
for y1 from y below height1
do (let ((x1 (+ (* y1 width1) xoff))
(x2 (* y2 width2)))
(replace flat1 flat2
:start1 x1
:end1 (* (1+ y1) width1)
:start2 x2
:end2 (+ x2 width2)))))
arr1)
(defun ablit-from-nonzero (arr1 arr2 &key (x 0) (y 0))
"Destructivly copy arr2 into arr1 for 2- and 3-dimensional (Y:X,
Y:X:RGB(A)) arrays. X and Y may be specified as a 2D offset into
ARR1. Copying is started from the first nonzero element in each row.
This is a hack to make kerned fonts render properly with the toy
interface."
(assert (= (array-rank arr1) (array-rank arr2)))
(let ((flat1 (flat-array arr1))
(flat2 (flat-array arr2))
(height1 (row-width arr1))
(height2 (row-width arr2))
(width1 (array-dimension arr1 1))
(width2 (array-dimension arr2 1))
(xoff (* x (if (= (array-rank arr1) 3)
(array-dimension arr1 2)
1))))
(loop for y2 from 0 below height2
for y1 from y below height1
as start2 = (* y2 width2)
as end2 = (+ start2 width2)
do (let ((x1 (+ (* y1 width1) xoff))
(x2 (position-if-not #'zerop flat2 :start start2
:end end2)))
(when x2
(replace flat1 flat2
:start1 (+ x1 (- x2 start2))
:end1 (* (1+ y1) width1)
:start2 x2
:end2 end2)))))
arr1)
|
53acd316554ce114a1e1d3b6864a160a5630552e52cf4539fffce911f0c0a01b | igorhvr/bedlam | wttest.scm | ;;; "wttest.scm" Test Weight balanced trees -*-Scheme-*-
Copyright ( c ) 1993 - 1994
;;;
Copyright ( c ) 1993 - 94 Massachusetts Institute of Technology
;;;
;;; This material was developed by the Scheme project at the
Massachusetts Institute of Technology , Department of Electrical
Engineering and Computer Science . Permission to copy and modify
;;; this software, to redistribute either the original software or a
;;; modified version, and to use this software for any purpose is
;;; granted, subject to the following restrictions and understandings.
;;;
1 . Any copy made of this software must include this copyright
;;; notice in full.
;;;
2 . Users of this software agree to make their best efforts ( a ) to
return to the MIT Scheme project any improvements or extensions
;;; that they make, so that these may be included in future releases;
and ( b ) to inform MIT of noteworthy uses of this software .
;;;
3 . All materials developed as a consequence of the use of this
;;; software shall duly acknowledge such use, in accordance with the
;;; usual standards of acknowledging credit in academic research.
;;;
4 . MIT has made no warranty or representation that the operation
of this software will be error - free , and MIT is under no
;;; obligation to provide any services, by way of maintenance, update,
;;; or otherwise.
;;;
5 . In conjunction with products arising from the use of this
material , there shall be no use of the name of the Massachusetts
Institute of Technology nor of any adaptation thereof in any
;;; advertising, promotional, or sales literature without prior
written consent from MIT in each case .
(require 'wt-tree)
;; Test code, using maps from digit strings to the numbers they represent.
(define (wt-test)
(define (make-map lo hi step)
(let loop ((i lo) (map (make-wt-tree string-wt-type)))
(if (> i hi)
map
(loop (+ i step) (wt-tree/add map (number->string i) i)))))
(define (wt-tree->alist t)
(wt-tree/fold (lambda (key datum rest) (cons (cons key datum) rest)) '() t))
(define (try-all operation trees)
(map (lambda (t1)
(map (lambda (t2)
(operation t1 t2))
trees))
trees))
(define (chunk tree)
(let ((size (wt-tree/size tree)))
(if (< size 8)
size
(let* ((midpoint (if (even? size)
(/ size 2)
(/ (+ size 1) 2)))
(fulcrum (wt-tree/index tree midpoint)))
(list (chunk (wt-tree/split< tree fulcrum))
(list fulcrum)
(chunk (wt-tree/split> tree fulcrum)))))))
(define (verify name result expected)
(newline)
(display "Test ") (display name)
(if (equal? result expected)
(begin
(display " passed"))
(begin
(display " unexpected result")
(newline)
(display "Expected: " expected)
(newline)
(display "Got: " result))))
(let ((t1 (make-map 0 99 2)) ; 0,2,4,...,98
(t2 (make-map 1 100 2)) ; 1,3,5,...,99
(t3 (make-map 0 100 3))) ; 0,3,6,...,99
(verify 'alist (wt-tree->alist t3) ;
'(("0" . 0) ("12" . 12) ("15" . 15) ("18" . 18) ("21" . 21)
("24" . 24) ("27" . 27) ("3" . 3) ("30" . 30) ("33" . 33)
("36" . 36) ("39" . 39) ("42" . 42) ("45" . 45) ("48" . 48)
("51" . 51) ("54" . 54) ("57" . 57) ("6" . 6) ("60" . 60)
("63" . 63) ("66" . 66) ("69" . 69) ("72" . 72) ("75" . 75)
("78" . 78) ("81" . 81) ("84" . 84) ("87" . 87) ("9" . 9)
("90" . 90) ("93" . 93) ("96" . 96) ("99" . 99)))
(verify 'union-sizes
(try-all (lambda (t1 t2) (wt-tree/size (wt-tree/union t1 t2)))
(list t1 t2 t3))
'((50 100 67) (100 50 67) (67 67 34)))
(verify 'difference-sizes
(try-all (lambda (t1 t2)
(wt-tree/size (wt-tree/difference t1 t2)))
(list t1 t2 t3))
'((0 50 33) (50 0 33) (17 17 0)))
(verify 'intersection-sizes
(try-all (lambda (t1 t2)
(wt-tree/size (wt-tree/intersection t1 t2)))
(list t1 t2 t3))
'((50 0 17) (0 50 17) (17 17 34)))
(verify 'equalities
(try-all (lambda (t1 t2)
(wt-tree/set-equal? (wt-tree/difference t1 t2)
(wt-tree/difference t2 t1)))
(list t1 t2 t3))
'((#t #f #f) (#f #t #f) (#f #f #t)))
(verify 'indexing
(chunk (make-map 0 99 1))
'((((7 ("15") 5) ("20") (6 ("27") 4)) ("31")
((6 ("38") 5) ("43") (6 ("5") 4)))
("54")
(((7 ("61") 5) ("67") (6 ("73") 4)) ("78")
((6 ("84") 5) ("9") (5 ("95") 4)))))
(newline)))
(wt-test)
;;; Local Variables:
eval : ( put ' with - n - node ' scheme - indent - function 1 )
eval : ( put ' with - n - node ' scheme - indent - hook 1 )
;;; End:
| null | https://raw.githubusercontent.com/igorhvr/bedlam/b62e0d047105bb0473bdb47c58b23f6ca0f79a4e/iasylum/slib/3b2/wttest.scm | scheme | "wttest.scm" Test Weight balanced trees -*-Scheme-*-
This material was developed by the Scheme project at the
this software, to redistribute either the original software or a
modified version, and to use this software for any purpose is
granted, subject to the following restrictions and understandings.
notice in full.
that they make, so that these may be included in future releases;
software shall duly acknowledge such use, in accordance with the
usual standards of acknowledging credit in academic research.
obligation to provide any services, by way of maintenance, update,
or otherwise.
advertising, promotional, or sales literature without prior
Test code, using maps from digit strings to the numbers they represent.
0,2,4,...,98
1,3,5,...,99
0,3,6,...,99
Local Variables:
End: | Copyright ( c ) 1993 - 1994
Copyright ( c ) 1993 - 94 Massachusetts Institute of Technology
Massachusetts Institute of Technology , Department of Electrical
Engineering and Computer Science . Permission to copy and modify
1 . Any copy made of this software must include this copyright
2 . Users of this software agree to make their best efforts ( a ) to
return to the MIT Scheme project any improvements or extensions
and ( b ) to inform MIT of noteworthy uses of this software .
3 . All materials developed as a consequence of the use of this
4 . MIT has made no warranty or representation that the operation
of this software will be error - free , and MIT is under no
5 . In conjunction with products arising from the use of this
material , there shall be no use of the name of the Massachusetts
Institute of Technology nor of any adaptation thereof in any
written consent from MIT in each case .
(require 'wt-tree)
(define (wt-test)
(define (make-map lo hi step)
(let loop ((i lo) (map (make-wt-tree string-wt-type)))
(if (> i hi)
map
(loop (+ i step) (wt-tree/add map (number->string i) i)))))
(define (wt-tree->alist t)
(wt-tree/fold (lambda (key datum rest) (cons (cons key datum) rest)) '() t))
(define (try-all operation trees)
(map (lambda (t1)
(map (lambda (t2)
(operation t1 t2))
trees))
trees))
(define (chunk tree)
(let ((size (wt-tree/size tree)))
(if (< size 8)
size
(let* ((midpoint (if (even? size)
(/ size 2)
(/ (+ size 1) 2)))
(fulcrum (wt-tree/index tree midpoint)))
(list (chunk (wt-tree/split< tree fulcrum))
(list fulcrum)
(chunk (wt-tree/split> tree fulcrum)))))))
(define (verify name result expected)
(newline)
(display "Test ") (display name)
(if (equal? result expected)
(begin
(display " passed"))
(begin
(display " unexpected result")
(newline)
(display "Expected: " expected)
(newline)
(display "Got: " result))))
'(("0" . 0) ("12" . 12) ("15" . 15) ("18" . 18) ("21" . 21)
("24" . 24) ("27" . 27) ("3" . 3) ("30" . 30) ("33" . 33)
("36" . 36) ("39" . 39) ("42" . 42) ("45" . 45) ("48" . 48)
("51" . 51) ("54" . 54) ("57" . 57) ("6" . 6) ("60" . 60)
("63" . 63) ("66" . 66) ("69" . 69) ("72" . 72) ("75" . 75)
("78" . 78) ("81" . 81) ("84" . 84) ("87" . 87) ("9" . 9)
("90" . 90) ("93" . 93) ("96" . 96) ("99" . 99)))
(verify 'union-sizes
(try-all (lambda (t1 t2) (wt-tree/size (wt-tree/union t1 t2)))
(list t1 t2 t3))
'((50 100 67) (100 50 67) (67 67 34)))
(verify 'difference-sizes
(try-all (lambda (t1 t2)
(wt-tree/size (wt-tree/difference t1 t2)))
(list t1 t2 t3))
'((0 50 33) (50 0 33) (17 17 0)))
(verify 'intersection-sizes
(try-all (lambda (t1 t2)
(wt-tree/size (wt-tree/intersection t1 t2)))
(list t1 t2 t3))
'((50 0 17) (0 50 17) (17 17 34)))
(verify 'equalities
(try-all (lambda (t1 t2)
(wt-tree/set-equal? (wt-tree/difference t1 t2)
(wt-tree/difference t2 t1)))
(list t1 t2 t3))
'((#t #f #f) (#f #t #f) (#f #f #t)))
(verify 'indexing
(chunk (make-map 0 99 1))
'((((7 ("15") 5) ("20") (6 ("27") 4)) ("31")
((6 ("38") 5) ("43") (6 ("5") 4)))
("54")
(((7 ("61") 5) ("67") (6 ("73") 4)) ("78")
((6 ("84") 5) ("9") (5 ("95") 4)))))
(newline)))
(wt-test)
eval : ( put ' with - n - node ' scheme - indent - function 1 )
eval : ( put ' with - n - node ' scheme - indent - hook 1 )
|
097f3c82adf72bc58f9d123e4657bceb65b98c74837d1db1df76788c7aef944d | haskell-github/github | PullRequestsSpec.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
{-# LANGUAGE TemplateHaskell #-}
module GitHub.PullRequestsSpec where
import qualified GitHub as GH
import Prelude ()
import Prelude.Compat
import Data.Aeson
(FromJSON (..), eitherDecodeStrict, withObject, (.:))
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy.Char8 as LBS8
import Data.Either.Compat (isRight)
import Data.FileEmbed (embedFile)
import Data.Foldable (for_)
import Data.String (fromString)
import Data.Tagged (Tagged (..))
import Data.Text (Text)
import qualified Data.Vector as V
import System.Environment (lookupEnv)
import Test.Hspec
(Spec, describe, it, pendingWith, shouldBe, shouldSatisfy)
fromRightS :: Show a => Either a b -> b
fromRightS (Right b) = b
fromRightS (Left a) = error $ "Expected a Right and got a Left" ++ show a
withAuth :: (GH.Auth -> IO ()) -> IO ()
withAuth action = do
mtoken <- lookupEnv "GITHUB_TOKEN"
case mtoken of
Nothing -> pendingWith "no GITHUB_TOKEN"
Just token -> action (GH.OAuth $ fromString token)
spec :: Spec
spec = do
describe "pullRequestsForR" $ do
it "works" $ withAuth $ \auth -> for_ repos $ \(owner, repo) -> do
cs <- GH.executeRequest auth $
GH.pullRequestsForR owner repo opts GH.FetchAll
cs `shouldSatisfy` isRight
describe "pullRequestPatchR" $
it "works" $ withAuth $ \auth -> do
Right patch <- GH.executeRequest auth $
GH.pullRequestPatchR "haskell-github" "github" (GH.IssueNumber 349)
head (LBS8.lines patch) `shouldBe` "From c0e4ad33811be82e1f72ee76116345c681703103 Mon Sep 17 00:00:00 2001"
describe "decoding pull request payloads" $ do
it "decodes a pull request 'opened' payload" $ do
V.length (GH.simplePullRequestRequestedReviewers simplePullRequestOpened)
`shouldBe` 0
V.length (GH.pullRequestRequestedReviewers pullRequestOpened)
`shouldBe` 0
it "decodes a pull request 'review_requested' payload" $ do
V.length (GH.simplePullRequestRequestedReviewers simplePullRequestReviewRequested)
`shouldBe` 1
V.length (GH.pullRequestRequestedReviewers pullRequestReviewRequested)
`shouldBe` 1
it "decodes a pull request 'team_requested' payload" $ do
V.length (GH.simplePullRequestRequestedTeamReviewers simplePullRequestTeamReviewRequested)
`shouldBe` 1
V.length (GH.pullRequestRequestedTeamReviewers pullRequestTeamReviewRequested)
`shouldBe` 1
describe "checking if a pull request is merged" $ do
it "works" $ withAuth $ \auth -> do
b <- GH.executeRequest auth $ GH.isPullRequestMergedR "haskell-github" "github" (GH.IssueNumber 14)
b `shouldSatisfy` isRight
fromRightS b `shouldBe` True
describe "Draft Pull Request" $ do
it "works" $ withAuth $ \auth -> do
cs <- GH.executeRequest auth $
draftPullRequestsForR "haskell-github" "github" opts GH.FetchAll
cs `shouldSatisfy` isRight
where
repos =
[ ("thoughtbot", "paperclip")
, ("haskell-github", "github")
]
opts = GH.stateClosed
simplePullRequestOpened :: GH.SimplePullRequest
simplePullRequestOpened =
fromRightS (eitherDecodeStrict prOpenedPayload)
pullRequestOpened :: GH.PullRequest
pullRequestOpened =
fromRightS (eitherDecodeStrict prOpenedPayload)
simplePullRequestReviewRequested :: GH.SimplePullRequest
simplePullRequestReviewRequested =
fromRightS (eitherDecodeStrict prReviewRequestedPayload)
simplePullRequestTeamReviewRequested :: GH.SimplePullRequest
simplePullRequestTeamReviewRequested =
fromRightS (eitherDecodeStrict prTeamReviewRequestedPayload)
pullRequestReviewRequested :: GH.PullRequest
pullRequestReviewRequested =
fromRightS (eitherDecodeStrict prReviewRequestedPayload)
pullRequestTeamReviewRequested :: GH.PullRequest
pullRequestTeamReviewRequested =
fromRightS (eitherDecodeStrict prTeamReviewRequestedPayload)
prOpenedPayload :: ByteString
prOpenedPayload = $(embedFile "fixtures/pull-request-opened.json")
prReviewRequestedPayload :: ByteString
prReviewRequestedPayload = $(embedFile "fixtures/pull-request-review-requested.json")
prTeamReviewRequestedPayload :: ByteString
prTeamReviewRequestedPayload = $(embedFile "fixtures/pull-request-team-review-requested.json")
-------------------------------------------------------------------------------
-- Draft Pull Requests
-------------------------------------------------------------------------------
draftPullRequestsForR
:: GH.Name GH.Owner
-> GH.Name GH.Repo
-> GH.PullRequestMod
-> GH.FetchCount
-> GH.GenRequest ('GH.MtPreview ShadowCat) k (V.Vector DraftPR)
draftPullRequestsForR user repo opts = GH.PagedQuery
["repos", GH.toPathPart user, GH.toPathPart repo, "pulls"]
(GH.prModToQueryString opts)
data DraftPR = DraftPR
{ dprId :: !(GH.Id GH.PullRequest)
, dprNumber :: !GH.IssueNumber
, dprTitle :: !Text
, dprDraft :: !Bool
}
deriving (Show)
instance FromJSON DraftPR where
parseJSON = withObject "DraftPR" $ \obj -> DraftPR
<$> obj .: "id"
<*> obj .: "number"
<*> obj .: "title"
<*> obj .: "draft"
-- | @application/vnd.github.shadow-cat-preview+json@ </#draft-pull-requests>
data ShadowCat
instance GH.PreviewAccept ShadowCat where
previewContentType = Tagged "application/vnd.github.shadow-cat-preview+json"
instance FromJSON a => GH.PreviewParseResponse ShadowCat a where
previewParseResponse _ res = Tagged (GH.parseResponseJSON res)
| null | https://raw.githubusercontent.com/haskell-github/github/d9ac0c7ffbcc720a24d06f0a96ea4e3891316d1a/spec/GitHub/PullRequestsSpec.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE TemplateHaskell #
-----------------------------------------------------------------------------
Draft Pull Requests
-----------------------------------------------------------------------------
| @application/vnd.github.shadow-cat-preview+json@ </#draft-pull-requests> | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
module GitHub.PullRequestsSpec where
import qualified GitHub as GH
import Prelude ()
import Prelude.Compat
import Data.Aeson
(FromJSON (..), eitherDecodeStrict, withObject, (.:))
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy.Char8 as LBS8
import Data.Either.Compat (isRight)
import Data.FileEmbed (embedFile)
import Data.Foldable (for_)
import Data.String (fromString)
import Data.Tagged (Tagged (..))
import Data.Text (Text)
import qualified Data.Vector as V
import System.Environment (lookupEnv)
import Test.Hspec
(Spec, describe, it, pendingWith, shouldBe, shouldSatisfy)
fromRightS :: Show a => Either a b -> b
fromRightS (Right b) = b
fromRightS (Left a) = error $ "Expected a Right and got a Left" ++ show a
withAuth :: (GH.Auth -> IO ()) -> IO ()
withAuth action = do
mtoken <- lookupEnv "GITHUB_TOKEN"
case mtoken of
Nothing -> pendingWith "no GITHUB_TOKEN"
Just token -> action (GH.OAuth $ fromString token)
spec :: Spec
spec = do
describe "pullRequestsForR" $ do
it "works" $ withAuth $ \auth -> for_ repos $ \(owner, repo) -> do
cs <- GH.executeRequest auth $
GH.pullRequestsForR owner repo opts GH.FetchAll
cs `shouldSatisfy` isRight
describe "pullRequestPatchR" $
it "works" $ withAuth $ \auth -> do
Right patch <- GH.executeRequest auth $
GH.pullRequestPatchR "haskell-github" "github" (GH.IssueNumber 349)
head (LBS8.lines patch) `shouldBe` "From c0e4ad33811be82e1f72ee76116345c681703103 Mon Sep 17 00:00:00 2001"
describe "decoding pull request payloads" $ do
it "decodes a pull request 'opened' payload" $ do
V.length (GH.simplePullRequestRequestedReviewers simplePullRequestOpened)
`shouldBe` 0
V.length (GH.pullRequestRequestedReviewers pullRequestOpened)
`shouldBe` 0
it "decodes a pull request 'review_requested' payload" $ do
V.length (GH.simplePullRequestRequestedReviewers simplePullRequestReviewRequested)
`shouldBe` 1
V.length (GH.pullRequestRequestedReviewers pullRequestReviewRequested)
`shouldBe` 1
it "decodes a pull request 'team_requested' payload" $ do
V.length (GH.simplePullRequestRequestedTeamReviewers simplePullRequestTeamReviewRequested)
`shouldBe` 1
V.length (GH.pullRequestRequestedTeamReviewers pullRequestTeamReviewRequested)
`shouldBe` 1
describe "checking if a pull request is merged" $ do
it "works" $ withAuth $ \auth -> do
b <- GH.executeRequest auth $ GH.isPullRequestMergedR "haskell-github" "github" (GH.IssueNumber 14)
b `shouldSatisfy` isRight
fromRightS b `shouldBe` True
describe "Draft Pull Request" $ do
it "works" $ withAuth $ \auth -> do
cs <- GH.executeRequest auth $
draftPullRequestsForR "haskell-github" "github" opts GH.FetchAll
cs `shouldSatisfy` isRight
where
repos =
[ ("thoughtbot", "paperclip")
, ("haskell-github", "github")
]
opts = GH.stateClosed
simplePullRequestOpened :: GH.SimplePullRequest
simplePullRequestOpened =
fromRightS (eitherDecodeStrict prOpenedPayload)
pullRequestOpened :: GH.PullRequest
pullRequestOpened =
fromRightS (eitherDecodeStrict prOpenedPayload)
simplePullRequestReviewRequested :: GH.SimplePullRequest
simplePullRequestReviewRequested =
fromRightS (eitherDecodeStrict prReviewRequestedPayload)
simplePullRequestTeamReviewRequested :: GH.SimplePullRequest
simplePullRequestTeamReviewRequested =
fromRightS (eitherDecodeStrict prTeamReviewRequestedPayload)
pullRequestReviewRequested :: GH.PullRequest
pullRequestReviewRequested =
fromRightS (eitherDecodeStrict prReviewRequestedPayload)
pullRequestTeamReviewRequested :: GH.PullRequest
pullRequestTeamReviewRequested =
fromRightS (eitherDecodeStrict prTeamReviewRequestedPayload)
prOpenedPayload :: ByteString
prOpenedPayload = $(embedFile "fixtures/pull-request-opened.json")
prReviewRequestedPayload :: ByteString
prReviewRequestedPayload = $(embedFile "fixtures/pull-request-review-requested.json")
prTeamReviewRequestedPayload :: ByteString
prTeamReviewRequestedPayload = $(embedFile "fixtures/pull-request-team-review-requested.json")
draftPullRequestsForR
:: GH.Name GH.Owner
-> GH.Name GH.Repo
-> GH.PullRequestMod
-> GH.FetchCount
-> GH.GenRequest ('GH.MtPreview ShadowCat) k (V.Vector DraftPR)
draftPullRequestsForR user repo opts = GH.PagedQuery
["repos", GH.toPathPart user, GH.toPathPart repo, "pulls"]
(GH.prModToQueryString opts)
data DraftPR = DraftPR
{ dprId :: !(GH.Id GH.PullRequest)
, dprNumber :: !GH.IssueNumber
, dprTitle :: !Text
, dprDraft :: !Bool
}
deriving (Show)
instance FromJSON DraftPR where
parseJSON = withObject "DraftPR" $ \obj -> DraftPR
<$> obj .: "id"
<*> obj .: "number"
<*> obj .: "title"
<*> obj .: "draft"
data ShadowCat
instance GH.PreviewAccept ShadowCat where
previewContentType = Tagged "application/vnd.github.shadow-cat-preview+json"
instance FromJSON a => GH.PreviewParseResponse ShadowCat a where
previewParseResponse _ res = Tagged (GH.parseResponseJSON res)
|
1d7f79116faeac5636e80bbe82f55d7b91d105ef4a21936046dcc2f740eb8e16 | lojic/RacketChess | global.rkt | #lang racket
#;(begin
(require racket/fixnum)
(provide (all-from-out racket/fixnum)
get-square
set-square!
vecref
vecset!)
(define-syntax-rule (get-square squares idx) (bytes-ref squares idx))
(define-syntax-rule (set-square! squares idx piece) (bytes-set! squares idx piece))
(define-syntax-rule (vecref vec i) (vector-ref vec i))
(define-syntax-rule (vecset! vec i v) (vector-set! vec i v))
)
(begin
(require (only-in racket/fixnum make-fxvector)
racket/unsafe/ops)
(provide fx*
fx+
fx-
fx<
fx<=
fx=
fx>
fx>=
fxvector-length
fxvector-ref
fxvector-set!
fxand
fxior
fxlshift
fxrshift
fxxor
get-square
make-fxvector
set-square!
vecref
vecset!)
(define-syntax-rule (fx* a ...) (unsafe-fx* a ...))
(define-syntax-rule (fx+ a ...) (unsafe-fx+ a ...))
(define-syntax-rule (fx- a ...) (unsafe-fx- a ...))
(define-syntax-rule (fx< a ...) (unsafe-fx< a ...))
(define-syntax-rule (fx<= a ...) (unsafe-fx<= a ...))
(define-syntax-rule (fx= a ...) (unsafe-fx= a ...))
(define-syntax-rule (fx> a ...) (unsafe-fx> a ...))
(define-syntax-rule (fx>= a ...) (unsafe-fx>= a ...))
(define-syntax-rule (fxvector-length v) (unsafe-fxvector-length v))
(define-syntax-rule (fxvector-ref v i) (unsafe-fxvector-ref v i))
(define-syntax-rule (fxvector-set! vec i val) (unsafe-fxvector-set! vec i val))
(define-syntax-rule (fxand a ...) (unsafe-fxand a ...))
(define-syntax-rule (fxior a ...) (unsafe-fxior a ...))
(define-syntax-rule (fxlshift a b) (unsafe-fxlshift a b))
(define-syntax-rule (fxrshift a b) (unsafe-fxrshift a b))
(define-syntax-rule (fxxor a ...) (unsafe-fxxor a ...))
(define-syntax-rule (get-square squares idx) (unsafe-bytes-ref squares idx))
(define-syntax-rule (set-square! squares idx piece) (unsafe-bytes-set! squares idx piece))
(define-syntax-rule (vecref vec i) (unsafe-vector-ref vec i))
(define-syntax-rule (vecset! vec i v) (unsafe-vector-set! vec i v))
)
| null | https://raw.githubusercontent.com/lojic/RacketChess/115ca7fecca9eaf31f9b2f4ef59935372c9920c8/src/global.rkt | racket | (begin | #lang racket
(require racket/fixnum)
(provide (all-from-out racket/fixnum)
get-square
set-square!
vecref
vecset!)
(define-syntax-rule (get-square squares idx) (bytes-ref squares idx))
(define-syntax-rule (set-square! squares idx piece) (bytes-set! squares idx piece))
(define-syntax-rule (vecref vec i) (vector-ref vec i))
(define-syntax-rule (vecset! vec i v) (vector-set! vec i v))
)
(begin
(require (only-in racket/fixnum make-fxvector)
racket/unsafe/ops)
(provide fx*
fx+
fx-
fx<
fx<=
fx=
fx>
fx>=
fxvector-length
fxvector-ref
fxvector-set!
fxand
fxior
fxlshift
fxrshift
fxxor
get-square
make-fxvector
set-square!
vecref
vecset!)
(define-syntax-rule (fx* a ...) (unsafe-fx* a ...))
(define-syntax-rule (fx+ a ...) (unsafe-fx+ a ...))
(define-syntax-rule (fx- a ...) (unsafe-fx- a ...))
(define-syntax-rule (fx< a ...) (unsafe-fx< a ...))
(define-syntax-rule (fx<= a ...) (unsafe-fx<= a ...))
(define-syntax-rule (fx= a ...) (unsafe-fx= a ...))
(define-syntax-rule (fx> a ...) (unsafe-fx> a ...))
(define-syntax-rule (fx>= a ...) (unsafe-fx>= a ...))
(define-syntax-rule (fxvector-length v) (unsafe-fxvector-length v))
(define-syntax-rule (fxvector-ref v i) (unsafe-fxvector-ref v i))
(define-syntax-rule (fxvector-set! vec i val) (unsafe-fxvector-set! vec i val))
(define-syntax-rule (fxand a ...) (unsafe-fxand a ...))
(define-syntax-rule (fxior a ...) (unsafe-fxior a ...))
(define-syntax-rule (fxlshift a b) (unsafe-fxlshift a b))
(define-syntax-rule (fxrshift a b) (unsafe-fxrshift a b))
(define-syntax-rule (fxxor a ...) (unsafe-fxxor a ...))
(define-syntax-rule (get-square squares idx) (unsafe-bytes-ref squares idx))
(define-syntax-rule (set-square! squares idx piece) (unsafe-bytes-set! squares idx piece))
(define-syntax-rule (vecref vec i) (unsafe-vector-ref vec i))
(define-syntax-rule (vecset! vec i v) (unsafe-vector-set! vec i v))
)
|
b179e7956035acf01a39acf76dda24e185d3e00944fa73827e0a9f69a908d9d1 | jazzytomato/hnlookup | core.cljs | (ns hnlookup.popup.core
(:import [goog.dom query])
(:require-macros [cljs.core.async.macros :refer [go go-loop]])
(:require [cljs.core.async :refer [<!]]
[cljs-http.client :as http]
[chromex.logging :refer-macros [log info warn error group group-end]]
[chromex.protocols :refer [post-message!]]
[chromex.ext.runtime :as runtime :refer-macros [connect]]
[chromex.ext.tabs :as tabs]
[goog.dom :as gdom]
[reagent.core :as r]
[re-com.core :as rc]
[hnlookup.popup.components :as cpts]
[cljsjs.moment]))
(defonce app-state
(r/atom {:items []
:loading false
:error nil
:url nil
:title nil
:search-terms []}))
(def items-cursor (r/cursor app-state [:items]))
(defn results? [] (seq @items-cursor))
(def no-results? (complement results?))
(defn error? [] (some? (:error @app-state)))
(defn loading? [] (:loading @app-state))
(defn loading! [] (swap! app-state assoc :loading true :error nil))
(defn finished-loading! [] (swap! app-state assoc :loading false))
(def hn-api-search-url "")
(def hn-submit-link "")
(def hn-item-url "=")
(defn repost-allowed? [stories]
(let [last-post-date (js/moment (:created_at (apply max-key :created_at_i stories)))
total-points (apply + (map :points stories))]
(and (> (.. (js/moment) (diff last-post-date "months")) 8)
(< total-points 250))))
(defn is-story? [item] (nil? (:story_id item)))
(def is-comment? (complement is-story?))
(defn build-hn-url [item]
(str hn-item-url (item (if (is-story? item)
:objectID
:story_id))))
(defn transform-response [m]
"Maps the relevant URL for each item of the reponse and returns the array of items"
(let [hits (get-in m [:body :hits])]
(map #(assoc % :hn-url (build-hn-url %)) hits)))
(defn build-hn-submit-link
"Build a submit link based on the current tab url and title"
[]
(let [url (:url @app-state)
title (:title @app-state)]
(str hn-submit-link "?u=" url "&t=" title)))
(defn build-search-terms
"Sanitize url and returns an array of search terms. i.e.
the url would return the vector
'www.domain.com/abcd/1234' 'www.domain.com' '/abcd/1234'"
[s]
(drop 1 (re-find #"^https?\://(([^/]+)([^\r\n\#]*)?)" s)))
(defn hn-api-search
"Queries the HN Api and update the state with results."
[s]
(loading!)
(go (let [response (<! (http/get hn-api-search-url {:query-params {"query" s}}))]
(if (= (:status response) 200)
(swap! app-state assoc :items (transform-response response))
(swap! app-state assoc :error (:status response)))
(finished-loading!))))
(defn search-tab-url
"Get the current tab url and update the state"
[]
(go
(if-let [[tabs] (<! (tabs/query #js {"active" true "currentWindow" true}))]
(let [tab (first tabs)
tab-url (.-url tab)
title (.-title tab)]
(if-let [search-term (first (build-search-terms tab-url))]
((swap! app-state assoc :url tab-url :title title)
(hn-api-search search-term)))))))
(defn list-stories
"Return the list of stories ordered by points desc"
[]
(sort-by :points > (filter is-story? @items-cursor)))
(defn list-related-stories
"Return the list of items that matched a comment, distinct by story id"
[]
(map first (vals (group-by :story_id (filter is-comment? @items-cursor)))))
;; React components
(defn main-cpt []
(let [submit-link (build-hn-submit-link)
s (list-stories)
rs (list-related-stories)]
[rc/v-box
:size "auto"
:children
[(if (error?)
[cpts/error-cpt (:error @app-state)]
(if (loading?)
[cpts/loading-cpt]
(if (no-results?)
[cpts/blank-cpt submit-link]
[rc/v-box
:size "auto"
:gap "10px"
:children
[[cpts/hn-cpt s rs]
[rc/line]
(when (repost-allowed? s)
[cpts/repost-cpt submit-link])]])))]]))
(defn frame-cpt []
[rc/scroller
:v-scroll :auto
:height "600px"
:width "500px"
:padding "10px"
:style {:background-color "#f6f6ef"}
:child [main-cpt]])
(defn mountit []
(r/render [frame-cpt] (aget (query "#main") 0)))
; -- main entry point -------------------------------------------------------------------------------------------------------
(defn init! []
(mountit)
(if (no-results?)
(search-tab-url)))
| null | https://raw.githubusercontent.com/jazzytomato/hnlookup/c29c5a417bdf8756a2a747d89dbc41ab4b012c75/src/popup/hnlookup/popup/core.cljs | clojure | React components
-- main entry point ------------------------------------------------------------------------------------------------------- | (ns hnlookup.popup.core
(:import [goog.dom query])
(:require-macros [cljs.core.async.macros :refer [go go-loop]])
(:require [cljs.core.async :refer [<!]]
[cljs-http.client :as http]
[chromex.logging :refer-macros [log info warn error group group-end]]
[chromex.protocols :refer [post-message!]]
[chromex.ext.runtime :as runtime :refer-macros [connect]]
[chromex.ext.tabs :as tabs]
[goog.dom :as gdom]
[reagent.core :as r]
[re-com.core :as rc]
[hnlookup.popup.components :as cpts]
[cljsjs.moment]))
(defonce app-state
(r/atom {:items []
:loading false
:error nil
:url nil
:title nil
:search-terms []}))
(def items-cursor (r/cursor app-state [:items]))
(defn results? [] (seq @items-cursor))
(def no-results? (complement results?))
(defn error? [] (some? (:error @app-state)))
(defn loading? [] (:loading @app-state))
(defn loading! [] (swap! app-state assoc :loading true :error nil))
(defn finished-loading! [] (swap! app-state assoc :loading false))
(def hn-api-search-url "")
(def hn-submit-link "")
(def hn-item-url "=")
(defn repost-allowed? [stories]
(let [last-post-date (js/moment (:created_at (apply max-key :created_at_i stories)))
total-points (apply + (map :points stories))]
(and (> (.. (js/moment) (diff last-post-date "months")) 8)
(< total-points 250))))
(defn is-story? [item] (nil? (:story_id item)))
(def is-comment? (complement is-story?))
(defn build-hn-url [item]
(str hn-item-url (item (if (is-story? item)
:objectID
:story_id))))
(defn transform-response [m]
"Maps the relevant URL for each item of the reponse and returns the array of items"
(let [hits (get-in m [:body :hits])]
(map #(assoc % :hn-url (build-hn-url %)) hits)))
(defn build-hn-submit-link
"Build a submit link based on the current tab url and title"
[]
(let [url (:url @app-state)
title (:title @app-state)]
(str hn-submit-link "?u=" url "&t=" title)))
(defn build-search-terms
"Sanitize url and returns an array of search terms. i.e.
the url would return the vector
'www.domain.com/abcd/1234' 'www.domain.com' '/abcd/1234'"
[s]
(drop 1 (re-find #"^https?\://(([^/]+)([^\r\n\#]*)?)" s)))
(defn hn-api-search
"Queries the HN Api and update the state with results."
[s]
(loading!)
(go (let [response (<! (http/get hn-api-search-url {:query-params {"query" s}}))]
(if (= (:status response) 200)
(swap! app-state assoc :items (transform-response response))
(swap! app-state assoc :error (:status response)))
(finished-loading!))))
(defn search-tab-url
"Get the current tab url and update the state"
[]
(go
(if-let [[tabs] (<! (tabs/query #js {"active" true "currentWindow" true}))]
(let [tab (first tabs)
tab-url (.-url tab)
title (.-title tab)]
(if-let [search-term (first (build-search-terms tab-url))]
((swap! app-state assoc :url tab-url :title title)
(hn-api-search search-term)))))))
(defn list-stories
"Return the list of stories ordered by points desc"
[]
(sort-by :points > (filter is-story? @items-cursor)))
(defn list-related-stories
"Return the list of items that matched a comment, distinct by story id"
[]
(map first (vals (group-by :story_id (filter is-comment? @items-cursor)))))
(defn main-cpt []
(let [submit-link (build-hn-submit-link)
s (list-stories)
rs (list-related-stories)]
[rc/v-box
:size "auto"
:children
[(if (error?)
[cpts/error-cpt (:error @app-state)]
(if (loading?)
[cpts/loading-cpt]
(if (no-results?)
[cpts/blank-cpt submit-link]
[rc/v-box
:size "auto"
:gap "10px"
:children
[[cpts/hn-cpt s rs]
[rc/line]
(when (repost-allowed? s)
[cpts/repost-cpt submit-link])]])))]]))
(defn frame-cpt []
[rc/scroller
:v-scroll :auto
:height "600px"
:width "500px"
:padding "10px"
:style {:background-color "#f6f6ef"}
:child [main-cpt]])
(defn mountit []
(r/render [frame-cpt] (aget (query "#main") 0)))
(defn init! []
(mountit)
(if (no-results?)
(search-tab-url)))
|
e688d636e3053b9efdac294310b368fff0addc5dfb639d23ebd1c6ba0e5ac146 | ghc/nofib | RC.hs | #include "unboxery.h"
module RC(rC,rCs) where
import Types
rC
= Nuc
(Tfo FL_LIT(-0.0359) FL_LIT(-0.8071) FL_LIT(0.5894) -- dgf_base_tfo
FL_LIT(-0.2669) FL_LIT(0.5761) FL_LIT(0.7726)
FL_LIT(-0.9631) FL_LIT(-0.1296) FL_LIT(-0.2361)
FL_LIT(0.1584) FL_LIT(8.3434) FL_LIT(0.5434))
(Tfo FL_LIT(-0.8313) FL_LIT(-0.4738) FL_LIT(-0.2906) -- p_o3'_275_tfo
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
(Tfo FL_LIT(0.3445) FL_LIT(-0.7630) FL_LIT(0.5470) -- p_o3'_180_tfo
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
(Tfo FL_LIT(0.5855) FL_LIT(0.7931) FL_LIT(-0.1682) -- p_o3'_60_tfo
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
(Pt FL_LIT(2.6760) FL_LIT(-8.4960) FL_LIT(3.2880)) -- P
(Pt FL_LIT(1.4950) FL_LIT(-7.6230) FL_LIT(3.4770)) -- O1P
(Pt FL_LIT(2.9490) FL_LIT(-9.4640) FL_LIT(4.3740)) -- O2P
'
C5 '
(Pt FL_LIT(5.1974) FL_LIT(-8.8497) FL_LIT(1.9223)) -- H5'
(Pt FL_LIT(5.5548) FL_LIT(-8.7348) FL_LIT(3.7469)) -- H5''
(Pt FL_LIT(6.3140) FL_LIT(-7.2060) FL_LIT(2.5510)) -- C4'
(Pt FL_LIT(7.2954) FL_LIT(-7.6762) FL_LIT(2.4898)) -- H4'
O4 '
(Pt FL_LIT(6.4190) FL_LIT(-5.1840) FL_LIT(1.3620)) -- C1'
(Pt FL_LIT(7.1608) FL_LIT(-5.0495) FL_LIT(0.5747)) -- H1'
C2 '
(Pt FL_LIT(6.7770) FL_LIT(-3.9803) FL_LIT(3.1099)) -- H2''
O2 '
(Pt FL_LIT(8.8309) FL_LIT(-4.8755) FL_LIT(1.7590)) -- H2'
(Pt FL_LIT(6.4060) FL_LIT(-6.0590) FL_LIT(3.5580)) -- C3'
(Pt FL_LIT(5.4021) FL_LIT(-5.7313) FL_LIT(3.8281)) -- H3'
(Pt FL_LIT(7.1570) FL_LIT(-6.4240) FL_LIT(4.7070)) -- O3'
(Pt FL_LIT(5.2170) FL_LIT(-4.3260) FL_LIT(1.1690)) -- N1
N3
C2
(Pt FL_LIT(2.9930) FL_LIT(-2.6780) FL_LIT(0.7940)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(6.5470) FL_LIT(-2.5560) FL_LIT(0.6290)) -- O2
(Pt FL_LIT(1.0684) FL_LIT(-2.1236) FL_LIT(0.7109)) -- H41
(Pt FL_LIT(2.2344) FL_LIT(-0.8560) FL_LIT(0.3162)) -- H42
(Pt FL_LIT(1.8797) FL_LIT(-4.4972) FL_LIT(1.3404)) -- H5
H6
)
rC01
= Nuc
(Tfo FL_LIT(-0.0137) FL_LIT(-0.8012) FL_LIT(0.5983) -- dgf_base_tfo
FL_LIT(-0.2523) FL_LIT(0.5817) FL_LIT(0.7733)
FL_LIT(-0.9675) FL_LIT(-0.1404) FL_LIT(-0.2101)
FL_LIT(0.2031) FL_LIT(8.3874) FL_LIT(0.4228))
(Tfo FL_LIT(-0.8313) FL_LIT(-0.4738) FL_LIT(-0.2906) -- p_o3'_275_tfo
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
(Tfo FL_LIT(0.3445) FL_LIT(-0.7630) FL_LIT(0.5470) -- p_o3'_180_tfo
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
(Tfo FL_LIT(0.5855) FL_LIT(0.7931) FL_LIT(-0.1682) -- p_o3'_60_tfo
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
(Pt FL_LIT(2.6760) FL_LIT(-8.4960) FL_LIT(3.2880)) -- P
(Pt FL_LIT(1.4950) FL_LIT(-7.6230) FL_LIT(3.4770)) -- O1P
(Pt FL_LIT(2.9490) FL_LIT(-9.4640) FL_LIT(4.3740)) -- O2P
'
C5 '
(Pt FL_LIT(5.2050) FL_LIT(-8.8128) FL_LIT(1.8901)) -- H5'
(Pt FL_LIT(5.5368) FL_LIT(-8.7738) FL_LIT(3.7227)) -- H5''
(Pt FL_LIT(6.3232) FL_LIT(-7.2037) FL_LIT(2.6002)) -- C4'
(Pt FL_LIT(7.3048) FL_LIT(-7.6757) FL_LIT(2.5577)) -- H4'
O4 '
(Pt FL_LIT(6.4697) FL_LIT(-5.1547) FL_LIT(1.4629)) -- C1'
(Pt FL_LIT(7.2354) FL_LIT(-5.0043) FL_LIT(0.7018)) -- H1'
C2 '
(Pt FL_LIT(6.7777) FL_LIT(-3.9935) FL_LIT(3.2487)) -- H2''
O2 '
(Pt FL_LIT(8.8693) FL_LIT(-4.8638) FL_LIT(1.9399)) -- H2'
(Pt FL_LIT(6.3877) FL_LIT(-6.0809) FL_LIT(3.6362)) -- C3'
(Pt FL_LIT(5.3770) FL_LIT(-5.7562) FL_LIT(3.8834)) -- H3'
(Pt FL_LIT(7.1024) FL_LIT(-6.4754) FL_LIT(4.7985)) -- O3'
(Pt FL_LIT(5.2764) FL_LIT(-4.2883) FL_LIT(1.2538)) -- N1
N3
C2
(Pt FL_LIT(3.0693) FL_LIT(-2.6246) FL_LIT(0.8500)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(6.6267) FL_LIT(-2.5166) FL_LIT(0.7728)) -- O2
(Pt FL_LIT(1.1496) FL_LIT(-2.0600) FL_LIT(0.7287)) -- H41
(Pt FL_LIT(2.3303) FL_LIT(-0.7921) FL_LIT(0.3815)) -- H42
(Pt FL_LIT(1.9353) FL_LIT(-4.4465) FL_LIT(1.3419)) -- H5
H6
)
rC02
= Nuc
(Tfo FL_LIT(0.5141) FL_LIT(0.0246) FL_LIT(0.8574) -- dgf_base_tfo
FL_LIT(-0.5547) FL_LIT(-0.7529) FL_LIT(0.3542)
FL_LIT(0.6542) FL_LIT(-0.6577) FL_LIT(-0.3734)
FL_LIT(-9.1111) FL_LIT(-3.4598) FL_LIT(-3.2939))
(Tfo FL_LIT(-0.8313) FL_LIT(-0.4738) FL_LIT(-0.2906) -- p_o3'_275_tfo
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
(Tfo FL_LIT(0.3445) FL_LIT(-0.7630) FL_LIT(0.5470) -- p_o3'_180_tfo
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
(Tfo FL_LIT(0.5855) FL_LIT(0.7931) FL_LIT(-0.1682) -- p_o3'_60_tfo
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
(Pt FL_LIT(2.6760) FL_LIT(-8.4960) FL_LIT(3.2880)) -- P
(Pt FL_LIT(1.4950) FL_LIT(-7.6230) FL_LIT(3.4770)) -- O1P
(Pt FL_LIT(2.9490) FL_LIT(-9.4640) FL_LIT(4.3740)) -- O2P
'
C5 '
(Pt FL_LIT(4.6841) FL_LIT(-7.2019) FL_LIT(4.9443)) -- H5'
(Pt FL_LIT(3.6189) FL_LIT(-5.8889) FL_LIT(4.1625)) -- H5''
(Pt FL_LIT(5.6255) FL_LIT(-5.9175) FL_LIT(3.5998)) -- C4'
(Pt FL_LIT(5.8732) FL_LIT(-5.1228) FL_LIT(4.3034)) -- H4'
O4 '
(Pt FL_LIT(7.5932) FL_LIT(-6.4923) FL_LIT(2.4548)) -- C1'
(Pt FL_LIT(8.5661) FL_LIT(-6.2983) FL_LIT(2.9064)) -- H1'
C2 '
(Pt FL_LIT(7.1627) FL_LIT(-5.2525) FL_LIT(0.7490)) -- H2''
O2 '
(Pt FL_LIT(8.5944) FL_LIT(-4.2543) FL_LIT(2.6981)) -- H2'
(Pt FL_LIT(5.5661) FL_LIT(-5.3029) FL_LIT(2.2009)) -- C3'
(Pt FL_LIT(5.0841) FL_LIT(-6.0018) FL_LIT(1.5172)) -- H3'
(Pt FL_LIT(4.9062) FL_LIT(-4.0452) FL_LIT(2.2042)) -- O3'
(Pt FL_LIT(7.6298) FL_LIT(-7.6136) FL_LIT(1.4752)) -- N1
N3
C2
(Pt FL_LIT(7.7426) FL_LIT(-9.6987) FL_LIT(-0.3801)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(9.5840) FL_LIT(-6.8186) FL_LIT(0.6136)) -- O2
(Pt FL_LIT(7.2009) FL_LIT(-11.3604) FL_LIT(-1.3619)) -- H41
(Pt FL_LIT(8.7058) FL_LIT(-10.6168) FL_LIT(-1.9140)) -- H42
(Pt FL_LIT(5.8585) FL_LIT(-10.3083) FL_LIT(0.5822)) -- H5
H6
)
rC03
= Nuc
(Tfo FL_LIT(-0.4993) FL_LIT(0.0476) FL_LIT(0.8651) -- dgf_base_tfo
FL_LIT(0.8078) FL_LIT(-0.3353) FL_LIT(0.4847)
FL_LIT(0.3132) FL_LIT(0.9409) FL_LIT(0.1290)
FL_LIT(6.2989) FL_LIT(-5.2303) FL_LIT(-3.8577))
(Tfo FL_LIT(-0.8313) FL_LIT(-0.4738) FL_LIT(-0.2906) -- p_o3'_275_tfo
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
(Tfo FL_LIT(0.3445) FL_LIT(-0.7630) FL_LIT(0.5470) -- p_o3'_180_tfo
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
(Tfo FL_LIT(0.5855) FL_LIT(0.7931) FL_LIT(-0.1682) -- p_o3'_60_tfo
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
(Pt FL_LIT(2.6760) FL_LIT(-8.4960) FL_LIT(3.2880)) -- P
(Pt FL_LIT(1.4950) FL_LIT(-7.6230) FL_LIT(3.4770)) -- O1P
(Pt FL_LIT(2.9490) FL_LIT(-9.4640) FL_LIT(4.3740)) -- O2P
'
C5 '
(Pt FL_LIT(3.2332) FL_LIT(-5.9343) FL_LIT(2.0319)) -- H5'
(Pt FL_LIT(3.9666) FL_LIT(-7.2863) FL_LIT(0.9812)) -- H5''
(Pt FL_LIT(5.3098) FL_LIT(-5.9546) FL_LIT(1.8564)) -- C4'
(Pt FL_LIT(5.3863) FL_LIT(-5.3702) FL_LIT(0.9395)) -- H4'
O4 '
(Pt FL_LIT(6.7315) FL_LIT(-4.9724) FL_LIT(3.4462)) -- C1'
(Pt FL_LIT(7.0033) FL_LIT(-3.9202) FL_LIT(3.3619)) -- H1'
C2 '
(Pt FL_LIT(8.3627) FL_LIT(-6.3254) FL_LIT(3.0707)) -- H2''
O2 '
(Pt FL_LIT(8.2781) FL_LIT(-4.0644) FL_LIT(1.7570)) -- H2'
(Pt FL_LIT(6.5701) FL_LIT(-6.8129) FL_LIT(1.9714)) -- C3'
(Pt FL_LIT(6.4186) FL_LIT(-7.5809) FL_LIT(2.7299)) -- H3'
(Pt FL_LIT(6.9357) FL_LIT(-7.3841) FL_LIT(0.7235)) -- O3'
(Pt FL_LIT(6.8024) FL_LIT(-5.4718) FL_LIT(4.8475)) -- N1
N3
C2
(Pt FL_LIT(6.9789) FL_LIT(-6.3827) FL_LIT(7.4823)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(8.7747) FL_LIT(-4.3728) FL_LIT(5.1568)) -- O2
(Pt FL_LIT(6.4741) FL_LIT(-7.3461) FL_LIT(9.1662)) -- H41
(Pt FL_LIT(7.9889) FL_LIT(-6.4396) FL_LIT(9.2429)) -- H42
(Pt FL_LIT(5.0736) FL_LIT(-7.3713) FL_LIT(6.9922)) -- H5
H6
)
rC04
= Nuc
(Tfo FL_LIT(-0.5669) FL_LIT(-0.8012) FL_LIT(0.1918) -- dgf_base_tfo
FL_LIT(-0.8129) FL_LIT(0.5817) FL_LIT(0.0273)
FL_LIT(-0.1334) FL_LIT(-0.1404) FL_LIT(-0.9811)
FL_LIT(-0.3279) FL_LIT(8.3874) FL_LIT(0.3355))
(Tfo FL_LIT(-0.8313) FL_LIT(-0.4738) FL_LIT(-0.2906) -- p_o3'_275_tfo
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
(Tfo FL_LIT(0.3445) FL_LIT(-0.7630) FL_LIT(0.5470) -- p_o3'_180_tfo
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
(Tfo FL_LIT(0.5855) FL_LIT(0.7931) FL_LIT(-0.1682) -- p_o3'_60_tfo
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
(Pt FL_LIT(2.6760) FL_LIT(-8.4960) FL_LIT(3.2880)) -- P
(Pt FL_LIT(1.4950) FL_LIT(-7.6230) FL_LIT(3.4770)) -- O1P
(Pt FL_LIT(2.9490) FL_LIT(-9.4640) FL_LIT(4.3740)) -- O2P
'
C5 '
(Pt FL_LIT(5.2050) FL_LIT(-8.8128) FL_LIT(1.8901)) -- H5'
(Pt FL_LIT(5.5368) FL_LIT(-8.7738) FL_LIT(3.7227)) -- H5''
(Pt FL_LIT(6.3232) FL_LIT(-7.2037) FL_LIT(2.6002)) -- C4'
(Pt FL_LIT(7.3048) FL_LIT(-7.6757) FL_LIT(2.5577)) -- H4'
O4 '
(Pt FL_LIT(6.4697) FL_LIT(-5.1547) FL_LIT(1.4629)) -- C1'
(Pt FL_LIT(7.2354) FL_LIT(-5.0043) FL_LIT(0.7018)) -- H1'
C2 '
(Pt FL_LIT(6.7777) FL_LIT(-3.9935) FL_LIT(3.2487)) -- H2''
O2 '
(Pt FL_LIT(8.8693) FL_LIT(-4.8638) FL_LIT(1.9399)) -- H2'
(Pt FL_LIT(6.3877) FL_LIT(-6.0809) FL_LIT(3.6362)) -- C3'
(Pt FL_LIT(5.3770) FL_LIT(-5.7562) FL_LIT(3.8834)) -- H3'
(Pt FL_LIT(7.1024) FL_LIT(-6.4754) FL_LIT(4.7985)) -- O3'
(Pt FL_LIT(5.2764) FL_LIT(-4.2883) FL_LIT(1.2538)) -- N1
N3
C2
(Pt FL_LIT(3.0480) FL_LIT(-2.6632) FL_LIT(0.8116)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(5.7005) FL_LIT(-4.2164) FL_LIT(-0.9842)) -- O2
(Pt FL_LIT(1.4067) FL_LIT(-1.5873) FL_LIT(1.2205)) -- H41
(Pt FL_LIT(1.8721) FL_LIT(-1.6319) FL_LIT(-0.4835)) -- H42
(Pt FL_LIT(2.8048) FL_LIT(-2.8507) FL_LIT(2.9918)) -- H5
H6
)
rC05
= Nuc
(Tfo FL_LIT(-0.6298) FL_LIT(0.0246) FL_LIT(0.7763) -- dgf_base_tfo
FL_LIT(-0.5226) FL_LIT(-0.7529) FL_LIT(-0.4001)
FL_LIT(0.5746) FL_LIT(-0.6577) FL_LIT(0.4870)
FL_LIT(-0.0208) FL_LIT(-3.4598) FL_LIT(-9.6882))
(Tfo FL_LIT(-0.8313) FL_LIT(-0.4738) FL_LIT(-0.2906) -- p_o3'_275_tfo
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
(Tfo FL_LIT(0.3445) FL_LIT(-0.7630) FL_LIT(0.5470) -- p_o3'_180_tfo
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
(Tfo FL_LIT(0.5855) FL_LIT(0.7931) FL_LIT(-0.1682) -- p_o3'_60_tfo
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
(Pt FL_LIT(2.6760) FL_LIT(-8.4960) FL_LIT(3.2880)) -- P
(Pt FL_LIT(1.4950) FL_LIT(-7.6230) FL_LIT(3.4770)) -- O1P
(Pt FL_LIT(2.9490) FL_LIT(-9.4640) FL_LIT(4.3740)) -- O2P
'
C5 '
(Pt FL_LIT(4.6841) FL_LIT(-7.2019) FL_LIT(4.9443)) -- H5'
(Pt FL_LIT(3.6189) FL_LIT(-5.8889) FL_LIT(4.1625)) -- H5''
(Pt FL_LIT(5.6255) FL_LIT(-5.9175) FL_LIT(3.5998)) -- C4'
(Pt FL_LIT(5.8732) FL_LIT(-5.1228) FL_LIT(4.3034)) -- H4'
O4 '
(Pt FL_LIT(7.5932) FL_LIT(-6.4923) FL_LIT(2.4548)) -- C1'
(Pt FL_LIT(8.5661) FL_LIT(-6.2983) FL_LIT(2.9064)) -- H1'
C2 '
(Pt FL_LIT(7.1627) FL_LIT(-5.2525) FL_LIT(0.7490)) -- H2''
O2 '
(Pt FL_LIT(8.5944) FL_LIT(-4.2543) FL_LIT(2.6981)) -- H2'
(Pt FL_LIT(5.5661) FL_LIT(-5.3029) FL_LIT(2.2009)) -- C3'
(Pt FL_LIT(5.0841) FL_LIT(-6.0018) FL_LIT(1.5172)) -- H3'
(Pt FL_LIT(4.9062) FL_LIT(-4.0452) FL_LIT(2.2042)) -- O3'
(Pt FL_LIT(7.6298) FL_LIT(-7.6136) FL_LIT(1.4752)) -- N1
N3
C2
(Pt FL_LIT(7.7372) FL_LIT(-9.7371) FL_LIT(-0.3364)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(9.3993) FL_LIT(-8.5377) FL_LIT(2.5743)) -- O2
(Pt FL_LIT(7.2499) FL_LIT(-10.8809) FL_LIT(-1.9088)) -- H41
(Pt FL_LIT(8.6122) FL_LIT(-11.4649) FL_LIT(-0.9468)) -- H42
(Pt FL_LIT(6.0317) FL_LIT(-8.6941) FL_LIT(-1.2588)) -- H5
H6
)
rC06
= Nuc
(Tfo FL_LIT(-0.9837) FL_LIT(0.0476) FL_LIT(-0.1733) -- dgf_base_tfo
FL_LIT(-0.1792) FL_LIT(-0.3353) FL_LIT(0.9249)
FL_LIT(-0.0141) FL_LIT(0.9409) FL_LIT(0.3384)
FL_LIT(5.7793) FL_LIT(-5.2303) FL_LIT(4.5997))
(Tfo FL_LIT(-0.8313) FL_LIT(-0.4738) FL_LIT(-0.2906) -- p_o3'_275_tfo
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
(Tfo FL_LIT(0.3445) FL_LIT(-0.7630) FL_LIT(0.5470) -- p_o3'_180_tfo
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
(Tfo FL_LIT(0.5855) FL_LIT(0.7931) FL_LIT(-0.1682) -- p_o3'_60_tfo
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
(Pt FL_LIT(2.6760) FL_LIT(-8.4960) FL_LIT(3.2880)) -- P
(Pt FL_LIT(1.4950) FL_LIT(-7.6230) FL_LIT(3.4770)) -- O1P
(Pt FL_LIT(2.9490) FL_LIT(-9.4640) FL_LIT(4.3740)) -- O2P
'
C5 '
(Pt FL_LIT(3.2332) FL_LIT(-5.9343) FL_LIT(2.0319)) -- H5'
(Pt FL_LIT(3.9666) FL_LIT(-7.2863) FL_LIT(0.9812)) -- H5''
(Pt FL_LIT(5.3098) FL_LIT(-5.9546) FL_LIT(1.8564)) -- C4'
(Pt FL_LIT(5.3863) FL_LIT(-5.3702) FL_LIT(0.9395)) -- H4'
O4 '
(Pt FL_LIT(6.7315) FL_LIT(-4.9724) FL_LIT(3.4462)) -- C1'
(Pt FL_LIT(7.0033) FL_LIT(-3.9202) FL_LIT(3.3619)) -- H1'
C2 '
(Pt FL_LIT(8.3627) FL_LIT(-6.3254) FL_LIT(3.0707)) -- H2''
O2 '
(Pt FL_LIT(8.2781) FL_LIT(-4.0644) FL_LIT(1.7570)) -- H2'
(Pt FL_LIT(6.5701) FL_LIT(-6.8129) FL_LIT(1.9714)) -- C3'
(Pt FL_LIT(6.4186) FL_LIT(-7.5809) FL_LIT(2.7299)) -- H3'
(Pt FL_LIT(6.9357) FL_LIT(-7.3841) FL_LIT(0.7235)) -- O3'
(Pt FL_LIT(6.8024) FL_LIT(-5.4718) FL_LIT(4.8475)) -- N1
N3
C2
(Pt FL_LIT(6.9254) FL_LIT(-6.3614) FL_LIT(7.4926)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(6.4083) FL_LIT(-3.3696) FL_LIT(5.6340)) -- O2
(Pt FL_LIT(7.1329) FL_LIT(-7.6280) FL_LIT(9.0324)) -- H41
(Pt FL_LIT(6.8204) FL_LIT(-5.9469) FL_LIT(9.4777)) -- H42
(Pt FL_LIT(7.2954) FL_LIT(-8.3135) FL_LIT(6.5440)) -- H5
H6
)
rC07
= Nuc
(Tfo FL_LIT(0.0033) FL_LIT(0.2720) FL_LIT(-0.9623) -- dgf_base_tfo
FL_LIT(0.3013) FL_LIT(-0.9179) FL_LIT(-0.2584)
FL_LIT(-0.9535) FL_LIT(-0.2891) FL_LIT(-0.0850)
FL_LIT(43.0403) FL_LIT(13.7233) FL_LIT(34.5710))
(Tfo FL_LIT(0.9187) FL_LIT(0.2887) FL_LIT(0.2694) -- p_o3'_275_tfo
FL_LIT(0.0302) FL_LIT(-0.7316) FL_LIT(0.6811)
FL_LIT(0.3938) FL_LIT(-0.6176) FL_LIT(-0.6808)
FL_LIT(-48.4330) FL_LIT(26.3254) FL_LIT(13.6383))
(Tfo FL_LIT(-0.1504) FL_LIT(0.7744) FL_LIT(-0.6145) -- p_o3'_180_tfo
FL_LIT(0.7581) FL_LIT(0.4893) FL_LIT(0.4311)
FL_LIT(0.6345) FL_LIT(-0.4010) FL_LIT(-0.6607)
FL_LIT(-31.9784) FL_LIT(-13.4285) FL_LIT(44.9650))
(Tfo FL_LIT(-0.6236) FL_LIT(-0.7810) FL_LIT(-0.0337) -- p_o3'_60_tfo
FL_LIT(-0.6890) FL_LIT(0.5694) FL_LIT(-0.4484)
FL_LIT(0.3694) FL_LIT(-0.2564) FL_LIT(-0.8932)
FL_LIT(12.1105) FL_LIT(30.8774) FL_LIT(46.0946))
(Pt FL_LIT(33.3400) FL_LIT(11.0980) FL_LIT(46.1750)) -- P
(Pt FL_LIT(34.5130) FL_LIT(10.2320) FL_LIT(46.4660)) -- O1P
(Pt FL_LIT(33.4130) FL_LIT(12.3960) FL_LIT(46.9340)) -- O2P
'
C5 '
(Pt FL_LIT(30.4519) FL_LIT(10.9454) FL_LIT(45.1957)) -- H5'
(Pt FL_LIT(31.0379) FL_LIT(12.2016) FL_LIT(46.4400)) -- H5''
(Pt FL_LIT(29.7081) FL_LIT(10.7448) FL_LIT(47.1428)) -- C4'
(Pt FL_LIT(28.8710) FL_LIT(11.4416) FL_LIT(47.0982)) -- H4'
O4 '
(Pt FL_LIT(29.3907) FL_LIT(8.5625) FL_LIT(47.9460)) -- C1'
(Pt FL_LIT(28.4416) FL_LIT(8.5669) FL_LIT(48.4819)) -- H1'
C2 '
(Pt FL_LIT(31.4222) FL_LIT(8.9651) FL_LIT(48.3709)) -- H2''
O2 '
(Pt FL_LIT(30.0652) FL_LIT(8.0304) FL_LIT(50.3740)) -- H2'
(Pt FL_LIT(30.1622) FL_LIT(10.6879) FL_LIT(48.6120)) -- C3'
(Pt FL_LIT(31.0952) FL_LIT(11.2399) FL_LIT(48.7254)) -- H3'
(Pt FL_LIT(29.1076) FL_LIT(11.1535) FL_LIT(49.4702)) -- O3'
(Pt FL_LIT(29.7883) FL_LIT(7.2209) FL_LIT(47.5235)) -- N1
N3
C2
(Pt FL_LIT(30.4888) FL_LIT(4.6890) FL_LIT(46.7186)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(27.6171) FL_LIT(6.5989) FL_LIT(47.3189)) -- O2
(Pt FL_LIT(31.7923) FL_LIT(3.2301) FL_LIT(46.2638)) -- H41
(Pt FL_LIT(30.0880) FL_LIT(2.7857) FL_LIT(46.1215)) -- H42
(Pt FL_LIT(32.5542) FL_LIT(5.3634) FL_LIT(46.9395)) -- H5
H6
)
rC08
= Nuc
(Tfo FL_LIT(0.0797) FL_LIT(-0.6026) FL_LIT(-0.7941) -- dgf_base_tfo
FL_LIT(0.7939) FL_LIT(0.5201) FL_LIT(-0.3150)
FL_LIT(0.6028) FL_LIT(-0.6054) FL_LIT(0.5198)
FL_LIT(-36.8341) FL_LIT(41.5293) FL_LIT(1.6628))
(Tfo FL_LIT(0.9187) FL_LIT(0.2887) FL_LIT(0.2694) -- p_o3'_275_tfo
FL_LIT(0.0302) FL_LIT(-0.7316) FL_LIT(0.6811)
FL_LIT(0.3938) FL_LIT(-0.6176) FL_LIT(-0.6808)
FL_LIT(-48.4330) FL_LIT(26.3254) FL_LIT(13.6383))
(Tfo FL_LIT(-0.1504) FL_LIT(0.7744) FL_LIT(-0.6145) -- p_o3'_180_tfo
FL_LIT(0.7581) FL_LIT(0.4893) FL_LIT(0.4311)
FL_LIT(0.6345) FL_LIT(-0.4010) FL_LIT(-0.6607)
FL_LIT(-31.9784) FL_LIT(-13.4285) FL_LIT(44.9650))
(Tfo FL_LIT(-0.6236) FL_LIT(-0.7810) FL_LIT(-0.0337) -- p_o3'_60_tfo
FL_LIT(-0.6890) FL_LIT(0.5694) FL_LIT(-0.4484)
FL_LIT(0.3694) FL_LIT(-0.2564) FL_LIT(-0.8932)
FL_LIT(12.1105) FL_LIT(30.8774) FL_LIT(46.0946))
(Pt FL_LIT(33.3400) FL_LIT(11.0980) FL_LIT(46.1750)) -- P
(Pt FL_LIT(34.5130) FL_LIT(10.2320) FL_LIT(46.4660)) -- O1P
(Pt FL_LIT(33.4130) FL_LIT(12.3960) FL_LIT(46.9340)) -- O2P
'
C5 '
(Pt FL_LIT(31.3239) FL_LIT(10.6931) FL_LIT(48.4322)) -- H5'
(Pt FL_LIT(32.8647) FL_LIT(9.6624) FL_LIT(48.2489)) -- H5''
(Pt FL_LIT(31.0429) FL_LIT(8.6773) FL_LIT(47.9401)) -- C4'
(Pt FL_LIT(31.0779) FL_LIT(8.2331) FL_LIT(48.9349)) -- H4'
O4 '
(Pt FL_LIT(29.2784) FL_LIT(8.1700) FL_LIT(46.4782)) -- C1'
(Pt FL_LIT(28.8006) FL_LIT(7.2731) FL_LIT(46.8722)) -- H1'
C2 '
(Pt FL_LIT(30.8837) FL_LIT(8.6410) FL_LIT(45.1856)) -- H2''
O2 '
(Pt FL_LIT(29.6694) FL_LIT(6.4168) FL_LIT(44.6326)) -- H2'
(Pt FL_LIT(31.5146) FL_LIT(7.5954) FL_LIT(46.9527)) -- C3'
(Pt FL_LIT(32.5255) FL_LIT(7.8261) FL_LIT(46.6166)) -- H3'
(Pt FL_LIT(31.3876) FL_LIT(6.2951) FL_LIT(47.5516)) -- O3'
(Pt FL_LIT(28.3976) FL_LIT(8.9302) FL_LIT(45.5933)) -- N1
N3
C2
(Pt FL_LIT(26.7044) FL_LIT(10.3489) FL_LIT(43.9595)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(26.5733) FL_LIT(8.2371) FL_LIT(46.7484)) -- O2
(Pt FL_LIT(26.2707) FL_LIT(11.5609) FL_LIT(42.4177)) -- H41
(Pt FL_LIT(24.8760) FL_LIT(10.9939) FL_LIT(43.3427)) -- H42
(Pt FL_LIT(28.5089) FL_LIT(10.9722) FL_LIT(42.8990)) -- H5
H6
)
rC09
= Nuc
(Tfo FL_LIT(0.8727) FL_LIT(0.4760) FL_LIT(-0.1091) -- dgf_base_tfo
FL_LIT(-0.4188) FL_LIT(0.6148) FL_LIT(-0.6682)
FL_LIT(-0.2510) FL_LIT(0.6289) FL_LIT(0.7359)
FL_LIT(-8.1687) FL_LIT(-52.0761) FL_LIT(-25.0726))
(Tfo FL_LIT(0.9187) FL_LIT(0.2887) FL_LIT(0.2694) -- p_o3'_275_tfo
FL_LIT(0.0302) FL_LIT(-0.7316) FL_LIT(0.6811)
FL_LIT(0.3938) FL_LIT(-0.6176) FL_LIT(-0.6808)
FL_LIT(-48.4330) FL_LIT(26.3254) FL_LIT(13.6383))
(Tfo FL_LIT(-0.1504) FL_LIT(0.7744) FL_LIT(-0.6145) -- p_o3'_180_tfo
FL_LIT(0.7581) FL_LIT(0.4893) FL_LIT(0.4311)
FL_LIT(0.6345) FL_LIT(-0.4010) FL_LIT(-0.6607)
FL_LIT(-31.9784) FL_LIT(-13.4285) FL_LIT(44.9650))
(Tfo FL_LIT(-0.6236) FL_LIT(-0.7810) FL_LIT(-0.0337) -- p_o3'_60_tfo
FL_LIT(-0.6890) FL_LIT(0.5694) FL_LIT(-0.4484)
FL_LIT(0.3694) FL_LIT(-0.2564) FL_LIT(-0.8932)
FL_LIT(12.1105) FL_LIT(30.8774) FL_LIT(46.0946))
(Pt FL_LIT(33.3400) FL_LIT(11.0980) FL_LIT(46.1750)) -- P
(Pt FL_LIT(34.5130) FL_LIT(10.2320) FL_LIT(46.4660)) -- O1P
(Pt FL_LIT(33.4130) FL_LIT(12.3960) FL_LIT(46.9340)) -- O2P
'
C5 '
(Pt FL_LIT(30.4519) FL_LIT(10.9454) FL_LIT(45.1957)) -- H5'
(Pt FL_LIT(31.0379) FL_LIT(12.2016) FL_LIT(46.4400)) -- H5''
(Pt FL_LIT(29.7081) FL_LIT(10.7448) FL_LIT(47.1428)) -- C4'
(Pt FL_LIT(29.4506) FL_LIT(9.6945) FL_LIT(47.0059)) -- H4'
O4 '
(Pt FL_LIT(29.1794) FL_LIT(11.8418) FL_LIT(49.1490)) -- C1'
(Pt FL_LIT(28.4388) FL_LIT(11.2210) FL_LIT(49.6533)) -- H1'
C2 '
(Pt FL_LIT(29.1947) FL_LIT(13.3949) FL_LIT(47.7147)) -- H2''
O2 '
(Pt FL_LIT(27.0851) FL_LIT(13.3391) FL_LIT(49.2227)) -- H2'
(Pt FL_LIT(28.4131) FL_LIT(11.5507) FL_LIT(46.9391)) -- C3'
(Pt FL_LIT(28.4451) FL_LIT(12.0512) FL_LIT(45.9713)) -- H3'
(Pt FL_LIT(27.2707) FL_LIT(10.6955) FL_LIT(47.1097)) -- O3'
(Pt FL_LIT(29.8751) FL_LIT(12.7405) FL_LIT(50.0682)) -- N1
N3
C2
(Pt FL_LIT(31.1834) FL_LIT(14.3941) FL_LIT(51.8297)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(29.6470) FL_LIT(11.2494) FL_LIT(51.7616)) -- O2
(Pt FL_LIT(32.1422) FL_LIT(16.0774) FL_LIT(52.3606)) -- H41
(Pt FL_LIT(31.9392) FL_LIT(14.8893) FL_LIT(53.6527)) -- H42
(Pt FL_LIT(31.3632) FL_LIT(15.7771) FL_LIT(50.1491)) -- H5
H6
)
rC10
= Nuc
(Tfo FL_LIT(0.1549) FL_LIT(0.8710) FL_LIT(-0.4663) -- dgf_base_tfo
FL_LIT(0.6768) FL_LIT(-0.4374) FL_LIT(-0.5921)
FL_LIT(-0.7197) FL_LIT(-0.2239) FL_LIT(-0.6572)
FL_LIT(25.2447) FL_LIT(-14.1920) FL_LIT(50.3201))
(Tfo FL_LIT(0.9187) FL_LIT(0.2887) FL_LIT(0.2694) -- p_o3'_275_tfo
FL_LIT(0.0302) FL_LIT(-0.7316) FL_LIT(0.6811)
FL_LIT(0.3938) FL_LIT(-0.6176) FL_LIT(-0.6808)
FL_LIT(-48.4330) FL_LIT(26.3254) FL_LIT(13.6383))
(Tfo FL_LIT(-0.1504) FL_LIT(0.7744) FL_LIT(-0.6145) -- p_o3'_180_tfo
FL_LIT(0.7581) FL_LIT(0.4893) FL_LIT(0.4311)
FL_LIT(0.6345) FL_LIT(-0.4010) FL_LIT(-0.6607)
FL_LIT(-31.9784) FL_LIT(-13.4285) FL_LIT(44.9650))
(Tfo FL_LIT(-0.6236) FL_LIT(-0.7810) FL_LIT(-0.0337) -- p_o3'_60_tfo
FL_LIT(-0.6890) FL_LIT(0.5694) FL_LIT(-0.4484)
FL_LIT(0.3694) FL_LIT(-0.2564) FL_LIT(-0.8932)
FL_LIT(12.1105) FL_LIT(30.8774) FL_LIT(46.0946))
(Pt FL_LIT(33.3400) FL_LIT(11.0980) FL_LIT(46.1750)) -- P
(Pt FL_LIT(34.5130) FL_LIT(10.2320) FL_LIT(46.4660)) -- O1P
(Pt FL_LIT(33.4130) FL_LIT(12.3960) FL_LIT(46.9340)) -- O2P
'
C5 '
(Pt FL_LIT(31.3239) FL_LIT(10.6931) FL_LIT(48.4322)) -- H5'
(Pt FL_LIT(32.8647) FL_LIT(9.6624) FL_LIT(48.2489)) -- H5''
(Pt FL_LIT(31.0429) FL_LIT(8.6773) FL_LIT(47.9401)) -- C4'
(Pt FL_LIT(30.0440) FL_LIT(8.8473) FL_LIT(47.5383)) -- H4'
O4 '
(Pt FL_LIT(31.9159) FL_LIT(6.5022) FL_LIT(48.0616)) -- C1'
(Pt FL_LIT(31.0691) FL_LIT(5.8243) FL_LIT(47.9544)) -- H1'
C2 '
(Pt FL_LIT(32.9024) FL_LIT(7.5288) FL_LIT(49.6245)) -- H2''
O2 '
(Pt FL_LIT(31.8416) FL_LIT(5.2663) FL_LIT(50.3200)) -- H2'
(Pt FL_LIT(30.8618) FL_LIT(8.1514) FL_LIT(49.3749)) -- C3'
(Pt FL_LIT(31.1122) FL_LIT(8.9396) FL_LIT(50.0850)) -- H3'
(Pt FL_LIT(29.5351) FL_LIT(7.6245) FL_LIT(49.5409)) -- O3'
(Pt FL_LIT(33.1890) FL_LIT(5.8629) FL_LIT(47.7343)) -- N1
N3
C2
(Pt FL_LIT(35.5600) FL_LIT(4.6374) FL_LIT(47.0822)) -- C4
C5
C6
(C
N4
(Pt FL_LIT(32.1661) FL_LIT(4.5034) FL_LIT(46.2348)) -- O2
(Pt FL_LIT(37.5405) FL_LIT(4.3347) FL_LIT(47.2259)) -- H41
(Pt FL_LIT(36.7033) FL_LIT(3.2923) FL_LIT(46.0706)) -- H42
(Pt FL_LIT(36.4713) FL_LIT(5.9811) FL_LIT(48.5428)) -- H5
H6
)
rCs = [rC01,rC02,rC03,rC04,rC05,rC06,rC07,rC08,rC09,rC10]
| null | https://raw.githubusercontent.com/ghc/nofib/f34b90b5a6ce46284693119a06d1133908b11856/spectral/hartel/nucleic2/RC.hs | haskell | dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5
dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5
dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5
dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5
dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5
dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5
dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5
dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5
dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5
dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5
dgf_base_tfo
p_o3'_275_tfo
p_o3'_180_tfo
p_o3'_60_tfo
P
O1P
O2P
H5'
H5''
C4'
H4'
C1'
H1'
H2''
H2'
C3'
H3'
O3'
N1
C4
O2
H41
H42
H5 | #include "unboxery.h"
module RC(rC,rCs) where
import Types
rC
= Nuc
FL_LIT(-0.2669) FL_LIT(0.5761) FL_LIT(0.7726)
FL_LIT(-0.9631) FL_LIT(-0.1296) FL_LIT(-0.2361)
FL_LIT(0.1584) FL_LIT(8.3434) FL_LIT(0.5434))
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rC01
= Nuc
FL_LIT(-0.2523) FL_LIT(0.5817) FL_LIT(0.7733)
FL_LIT(-0.9675) FL_LIT(-0.1404) FL_LIT(-0.2101)
FL_LIT(0.2031) FL_LIT(8.3874) FL_LIT(0.4228))
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rC02
= Nuc
FL_LIT(-0.5547) FL_LIT(-0.7529) FL_LIT(0.3542)
FL_LIT(0.6542) FL_LIT(-0.6577) FL_LIT(-0.3734)
FL_LIT(-9.1111) FL_LIT(-3.4598) FL_LIT(-3.2939))
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rC03
= Nuc
FL_LIT(0.8078) FL_LIT(-0.3353) FL_LIT(0.4847)
FL_LIT(0.3132) FL_LIT(0.9409) FL_LIT(0.1290)
FL_LIT(6.2989) FL_LIT(-5.2303) FL_LIT(-3.8577))
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rC04
= Nuc
FL_LIT(-0.8129) FL_LIT(0.5817) FL_LIT(0.0273)
FL_LIT(-0.1334) FL_LIT(-0.1404) FL_LIT(-0.9811)
FL_LIT(-0.3279) FL_LIT(8.3874) FL_LIT(0.3355))
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rC05
= Nuc
FL_LIT(-0.5226) FL_LIT(-0.7529) FL_LIT(-0.4001)
FL_LIT(0.5746) FL_LIT(-0.6577) FL_LIT(0.4870)
FL_LIT(-0.0208) FL_LIT(-3.4598) FL_LIT(-9.6882))
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rC06
= Nuc
FL_LIT(-0.1792) FL_LIT(-0.3353) FL_LIT(0.9249)
FL_LIT(-0.0141) FL_LIT(0.9409) FL_LIT(0.3384)
FL_LIT(5.7793) FL_LIT(-5.2303) FL_LIT(4.5997))
FL_LIT(0.0649) FL_LIT(0.4366) FL_LIT(-0.8973)
FL_LIT(0.5521) FL_LIT(-0.7648) FL_LIT(-0.3322)
FL_LIT(1.6833) FL_LIT(6.8060) FL_LIT(-7.0011))
FL_LIT(-0.4628) FL_LIT(-0.6450) FL_LIT(-0.6082)
FL_LIT(0.8168) FL_LIT(-0.0436) FL_LIT(-0.5753)
FL_LIT(-6.8179) FL_LIT(-3.9778) FL_LIT(-5.9887))
FL_LIT(0.8103) FL_LIT(-0.5790) FL_LIT(0.0906)
FL_LIT(-0.0255) FL_LIT(-0.1894) FL_LIT(-0.9816)
FL_LIT(6.1203) FL_LIT(-7.1051) FL_LIT(3.1984))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rC07
= Nuc
FL_LIT(0.3013) FL_LIT(-0.9179) FL_LIT(-0.2584)
FL_LIT(-0.9535) FL_LIT(-0.2891) FL_LIT(-0.0850)
FL_LIT(43.0403) FL_LIT(13.7233) FL_LIT(34.5710))
FL_LIT(0.0302) FL_LIT(-0.7316) FL_LIT(0.6811)
FL_LIT(0.3938) FL_LIT(-0.6176) FL_LIT(-0.6808)
FL_LIT(-48.4330) FL_LIT(26.3254) FL_LIT(13.6383))
FL_LIT(0.7581) FL_LIT(0.4893) FL_LIT(0.4311)
FL_LIT(0.6345) FL_LIT(-0.4010) FL_LIT(-0.6607)
FL_LIT(-31.9784) FL_LIT(-13.4285) FL_LIT(44.9650))
FL_LIT(-0.6890) FL_LIT(0.5694) FL_LIT(-0.4484)
FL_LIT(0.3694) FL_LIT(-0.2564) FL_LIT(-0.8932)
FL_LIT(12.1105) FL_LIT(30.8774) FL_LIT(46.0946))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rC08
= Nuc
FL_LIT(0.7939) FL_LIT(0.5201) FL_LIT(-0.3150)
FL_LIT(0.6028) FL_LIT(-0.6054) FL_LIT(0.5198)
FL_LIT(-36.8341) FL_LIT(41.5293) FL_LIT(1.6628))
FL_LIT(0.0302) FL_LIT(-0.7316) FL_LIT(0.6811)
FL_LIT(0.3938) FL_LIT(-0.6176) FL_LIT(-0.6808)
FL_LIT(-48.4330) FL_LIT(26.3254) FL_LIT(13.6383))
FL_LIT(0.7581) FL_LIT(0.4893) FL_LIT(0.4311)
FL_LIT(0.6345) FL_LIT(-0.4010) FL_LIT(-0.6607)
FL_LIT(-31.9784) FL_LIT(-13.4285) FL_LIT(44.9650))
FL_LIT(-0.6890) FL_LIT(0.5694) FL_LIT(-0.4484)
FL_LIT(0.3694) FL_LIT(-0.2564) FL_LIT(-0.8932)
FL_LIT(12.1105) FL_LIT(30.8774) FL_LIT(46.0946))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rC09
= Nuc
FL_LIT(-0.4188) FL_LIT(0.6148) FL_LIT(-0.6682)
FL_LIT(-0.2510) FL_LIT(0.6289) FL_LIT(0.7359)
FL_LIT(-8.1687) FL_LIT(-52.0761) FL_LIT(-25.0726))
FL_LIT(0.0302) FL_LIT(-0.7316) FL_LIT(0.6811)
FL_LIT(0.3938) FL_LIT(-0.6176) FL_LIT(-0.6808)
FL_LIT(-48.4330) FL_LIT(26.3254) FL_LIT(13.6383))
FL_LIT(0.7581) FL_LIT(0.4893) FL_LIT(0.4311)
FL_LIT(0.6345) FL_LIT(-0.4010) FL_LIT(-0.6607)
FL_LIT(-31.9784) FL_LIT(-13.4285) FL_LIT(44.9650))
FL_LIT(-0.6890) FL_LIT(0.5694) FL_LIT(-0.4484)
FL_LIT(0.3694) FL_LIT(-0.2564) FL_LIT(-0.8932)
FL_LIT(12.1105) FL_LIT(30.8774) FL_LIT(46.0946))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rC10
= Nuc
FL_LIT(0.6768) FL_LIT(-0.4374) FL_LIT(-0.5921)
FL_LIT(-0.7197) FL_LIT(-0.2239) FL_LIT(-0.6572)
FL_LIT(25.2447) FL_LIT(-14.1920) FL_LIT(50.3201))
FL_LIT(0.0302) FL_LIT(-0.7316) FL_LIT(0.6811)
FL_LIT(0.3938) FL_LIT(-0.6176) FL_LIT(-0.6808)
FL_LIT(-48.4330) FL_LIT(26.3254) FL_LIT(13.6383))
FL_LIT(0.7581) FL_LIT(0.4893) FL_LIT(0.4311)
FL_LIT(0.6345) FL_LIT(-0.4010) FL_LIT(-0.6607)
FL_LIT(-31.9784) FL_LIT(-13.4285) FL_LIT(44.9650))
FL_LIT(-0.6890) FL_LIT(0.5694) FL_LIT(-0.4484)
FL_LIT(0.3694) FL_LIT(-0.2564) FL_LIT(-0.8932)
FL_LIT(12.1105) FL_LIT(30.8774) FL_LIT(46.0946))
'
C5 '
O4 '
C2 '
O2 '
N3
C2
C5
C6
(C
N4
H6
)
rCs = [rC01,rC02,rC03,rC04,rC05,rC06,rC07,rC08,rC09,rC10]
|
9ceba7c7fac1a3297aeb0d269fc8fcb3420d8792e79b1f2d48dbe1347d7e7fce | helium/blockchain-core | blockchain_sync_SUITE.erl | -module(blockchain_sync_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("blockchain.hrl").
-export([
all/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2
]).
-export([
basic/1
]).
%%--------------------------------------------------------------------
%% COMMON TEST CALLBACK FUNCTIONS
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
@public
%% @doc
%% Running tests for this suite
%% @end
%%--------------------------------------------------------------------
all() ->
%%% thaere are no tests to run because the basic test doesn't
%%% actually work at all.
[
basic
].
%%--------------------------------------------------------------------
%% TEST SUITE SETUP
%%--------------------------------------------------------------------
init_per_suite(Config) ->
blockchain_ct_utils:init_per_suite(Config).
%%--------------------------------------------------------------------
%% TEST SUITE TEARDOWN
%%--------------------------------------------------------------------
end_per_suite(Config) ->
Config.
%%--------------------------------------------------------------------
%% TEST CASE SETUP
%%--------------------------------------------------------------------
init_per_testcase(TestCase, Config) ->
{ok, SimSwarm} = libp2p_swarm:start(sync_SUITE_sim, [{libp2p_nat, [{enabled, false}]}]),
ok = libp2p_swarm:listen(SimSwarm, "/ip4/0.0.0.0/tcp/0"),
blockchain_ct_utils:init_base_dir_config(?MODULE, TestCase, [{swarm, SimSwarm}|Config]).
%%--------------------------------------------------------------------
%% TEST CASE TEARDOWN
%%--------------------------------------------------------------------
end_per_testcase(_, Config) ->
SimSwarm = ?config(swarm, Config),
libp2p_swarm:stop(SimSwarm),
ok.
%%--------------------------------------------------------------------
%% TEST CASES
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
@public
%% @doc
%% @end
%%--------------------------------------------------------------------
basic(Config) ->
BaseDir = ?config(base_dir, Config),
SimDir = ?config(sim_dir, Config),
SimSwarm = ?config(swarm, Config),
Balance = 5000,
BlocksN = 100,
{ok, Sup, {PrivKey, PubKey}, _Opts} = test_utils:init(BaseDir),
{ok, _GenesisMembers, _GenesisBlock, ConsensusMembers, _} = test_utils:init_chain(Balance, {PrivKey, PubKey}),
Chain0 = blockchain_worker:blockchain(),
{ok, Genesis} = blockchain:genesis_block(Chain0),
% Simulate other chain with sync handler only
_Chain = blockchain:new(SimDir, Genesis, undefined, undefined),
% Add some blocks
Blocks = lists:reverse(lists:foldl(
fun(_, Acc) ->
{ok, Block} = test_utils:create_block(ConsensusMembers, []),
_ = blockchain_gossip_handler:add_block(Block, Chain0, blockchain_swarm:pubkey_bin(), blockchain_swarm:tid()),
[Block|Acc]
end,
[],
lists:seq(1, BlocksN)
)),
LastBlock = lists:last(Blocks),
: add_stream_handler (
SimSwarm
%% ,?SYNC_PROTOCOL_V1
%% ,{libp2p_framed_stream, server, [c, ?MODULE, ?SYNC_PROTOCOL_V1, Chain]}
%% ),
This is just to connect the 2 swarms
[ListenAddr|_] = libp2p_swarm:listen_addrs(blockchain_swarm:swarm()),
{ok, _} = libp2p_swarm:connect(SimSwarm, ListenAddr),
ok = test_utils:wait_until(fun() -> erlang:length(libp2p_peerbook:values(libp2p_swarm:peerbook(blockchain_swarm:swarm()))) > 1 end),
% Simulate add block from other chain
_ = blockchain_gossip_handler:add_block(LastBlock, Chain0, libp2p_swarm:pubkey_bin(SimSwarm), blockchain_swarm:tid()),
ok = test_utils:wait_until(fun() ->{ok, BlocksN + 1} =:= blockchain:height(Chain0) end),
?assertEqual({ok, LastBlock}, blockchain:head_block(blockchain_worker:blockchain())),
true = erlang:exit(Sup, normal),
ok.
| null | https://raw.githubusercontent.com/helium/blockchain-core/2e5a2d1f7d7baa79500b09c70c2a9af9b9577eab/test/blockchain_sync_SUITE.erl | erlang | --------------------------------------------------------------------
COMMON TEST CALLBACK FUNCTIONS
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Running tests for this suite
@end
--------------------------------------------------------------------
thaere are no tests to run because the basic test doesn't
actually work at all.
--------------------------------------------------------------------
TEST SUITE SETUP
--------------------------------------------------------------------
--------------------------------------------------------------------
TEST SUITE TEARDOWN
--------------------------------------------------------------------
--------------------------------------------------------------------
TEST CASE SETUP
--------------------------------------------------------------------
--------------------------------------------------------------------
TEST CASE TEARDOWN
--------------------------------------------------------------------
--------------------------------------------------------------------
TEST CASES
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
Simulate other chain with sync handler only
Add some blocks
,?SYNC_PROTOCOL_V1
,{libp2p_framed_stream, server, [c, ?MODULE, ?SYNC_PROTOCOL_V1, Chain]}
),
Simulate add block from other chain | -module(blockchain_sync_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("blockchain.hrl").
-export([
all/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2
]).
-export([
basic/1
]).
@public
all() ->
[
basic
].
init_per_suite(Config) ->
blockchain_ct_utils:init_per_suite(Config).
end_per_suite(Config) ->
Config.
init_per_testcase(TestCase, Config) ->
{ok, SimSwarm} = libp2p_swarm:start(sync_SUITE_sim, [{libp2p_nat, [{enabled, false}]}]),
ok = libp2p_swarm:listen(SimSwarm, "/ip4/0.0.0.0/tcp/0"),
blockchain_ct_utils:init_base_dir_config(?MODULE, TestCase, [{swarm, SimSwarm}|Config]).
end_per_testcase(_, Config) ->
SimSwarm = ?config(swarm, Config),
libp2p_swarm:stop(SimSwarm),
ok.
@public
basic(Config) ->
BaseDir = ?config(base_dir, Config),
SimDir = ?config(sim_dir, Config),
SimSwarm = ?config(swarm, Config),
Balance = 5000,
BlocksN = 100,
{ok, Sup, {PrivKey, PubKey}, _Opts} = test_utils:init(BaseDir),
{ok, _GenesisMembers, _GenesisBlock, ConsensusMembers, _} = test_utils:init_chain(Balance, {PrivKey, PubKey}),
Chain0 = blockchain_worker:blockchain(),
{ok, Genesis} = blockchain:genesis_block(Chain0),
_Chain = blockchain:new(SimDir, Genesis, undefined, undefined),
Blocks = lists:reverse(lists:foldl(
fun(_, Acc) ->
{ok, Block} = test_utils:create_block(ConsensusMembers, []),
_ = blockchain_gossip_handler:add_block(Block, Chain0, blockchain_swarm:pubkey_bin(), blockchain_swarm:tid()),
[Block|Acc]
end,
[],
lists:seq(1, BlocksN)
)),
LastBlock = lists:last(Blocks),
: add_stream_handler (
SimSwarm
This is just to connect the 2 swarms
[ListenAddr|_] = libp2p_swarm:listen_addrs(blockchain_swarm:swarm()),
{ok, _} = libp2p_swarm:connect(SimSwarm, ListenAddr),
ok = test_utils:wait_until(fun() -> erlang:length(libp2p_peerbook:values(libp2p_swarm:peerbook(blockchain_swarm:swarm()))) > 1 end),
_ = blockchain_gossip_handler:add_block(LastBlock, Chain0, libp2p_swarm:pubkey_bin(SimSwarm), blockchain_swarm:tid()),
ok = test_utils:wait_until(fun() ->{ok, BlocksN + 1} =:= blockchain:height(Chain0) end),
?assertEqual({ok, LastBlock}, blockchain:head_block(blockchain_worker:blockchain())),
true = erlang:exit(Sup, normal),
ok.
|
65f3df0d2eb33b144f676c89db0d1d773c76aae98f4cb2d334d2104058466902 | mfoemmel/erlang-otp | ex_popupMenu.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
-module(ex_popupMenu).
-behaviour(wx_object).
%% Client API
-export([start/1]).
%% wx_object callbacks
-export([init/1, terminate/2, code_change/3,
handle_info/2, handle_call/3, handle_event/2]).
-include_lib("wx/include/wx.hrl").
-record(state,
{
parent,
config,
menu
}).
start(Config) ->
wx_object:start_link(?MODULE, Config, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
init(Config) ->
wx:batch(fun() -> do_init(Config) end).
do_init(Config) ->
Root = proplists:get_value(parent, Config),
Parent = wxPanel:new(Root,[]),
MainSizer = wxBoxSizer:new(?wxVERTICAL),
Box = wxStaticBox:new(Parent, ?wxID_ANY, "Popup Menu"),
Sz = wxStaticBoxSizer:new(Box, ?wxVERTICAL),
Text = wxStaticText:new(Parent, ?wxID_ANY, "Right click to open popup menu", []),
Panel = wxPanel:new(Parent),
wxPanel:connect(Panel, right_up),
Sizer = wxBoxSizer:new(?wxVERTICAL),
wxSizer:add(Sizer, Text, [{border, 20}, {flag, ?wxALL}]),
wxPanel:setSizer(Panel, Sizer),
wxSizer:add(Sz, Panel, [{proportion,1}, {flag, ?wxEXPAND}]),
wxSizer:layout(Sz),
PopupMenu = create_menu(),
wxSizer:add(MainSizer, Sz, [{proportion,1}, {flag, ?wxEXPAND}]),
wxWindow:setSizer(Parent, MainSizer),
{Parent, #state{parent=Parent, config=Config, menu=PopupMenu}}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Async Events are handled in handle_event as in handle_info
handle_event(#wx{obj = Panel,
event = #wxMouse{type = right_up}},
State = #state{menu = Menu}) ->
%% Open the popup menu
wxWindow:popupMenu(Panel, Menu),
{noreply, State};
handle_event(#wx{obj = Menu, id = Id,
event = #wxCommand{type = command_menu_selected}},
State = #state{}) ->
%% Get the selected item label
Label = wxMenu:getLabel(Menu, Id),
demo:format(State#state.config, "wxMenu clicked ~p\n", [Label]),
{noreply, State};
handle_event(Ev, State) ->
demo:format(State#state.config, "Unexpected Event ~p\n", [Ev]),
{noreply, State}.
%% Callbacks handled as normal gen_server callbacks
handle_info(Msg, State) ->
demo:format(State#state.config, "Got Info ~p\n", [Msg]),
{noreply, State}.
handle_call(Msg, _From, State) ->
demo:format(State#state.config, "Got Call ~p\n", [Msg]),
{reply,{error, nyi}, State}.
code_change(_, _, State) ->
{stop, ignore, State}.
terminate(_Reason, #state{menu=Popup}) ->
wxMenu:destroy(Popup),
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Local functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
create_menu() ->
Menu = wxMenu:new([]),
SubMenu = wxMenu:new([]),
SubMenu2 = wxMenu:new([]),
wxMenu:append(Menu, ?wxID_UNDO, "Undo", []),
wxMenu:append(Menu, ?wxID_REDO, "Redo", []),
wxMenu:append(Menu, ?wxID_HELP, "Help", []),
wxMenu:appendSeparator(Menu),
wxMenu:appendCheckItem(Menu, ?wxID_ANY, "Check item", []),
wxMenu:appendSeparator(Menu),
wxMenu:appendRadioItem(Menu, ?wxID_ANY, "Radio item 1", []),
wxMenu:appendRadioItem(Menu, ?wxID_ANY, "Radio item 2", []),
wxMenu:appendRadioItem(Menu, ?wxID_ANY, "Radio item 3", []),
wxMenu:appendRadioItem(Menu, ?wxID_ANY, "Radio item 4", []),
wxMenu:appendSeparator(Menu),
wxMenuItem:enable(wxMenu:append(Menu, ?wxID_ANY, "Disabled", []), [{enable,false}]),
wxMenu:appendSeparator(Menu),
wxMenu:append(SubMenu, ?wxID_ABOUT, "About", []),
wxMenu:append(SubMenu, ?wxID_ANY, "Sub Item2", []),
wxMenu:append(SubMenu, ?wxID_SAVE, "Save", []),
wxMenu:break(SubMenu),
wxMenu:append(SubMenu, ?wxID_EXIT, "Exit", []),
wxMenu:append(SubMenu, ?wxID_OPEN, "Open", []),
wxMenu:append(SubMenu, ?wxID_NEW, "New", []),
wxMenu:append(Menu, ?wxID_ANY, "Sub menu", SubMenu, []),
wxMenu:appendCheckItem(SubMenu2, ?wxID_ANY, "Check Item", []),
wxMenu:appendSeparator(SubMenu2),
wxMenu:append(SubMenu2, ?wxID_CLEAR, "Clear", []),
wxMenu:append(SubMenu2, ?wxID_ANY, "Sub Item", []),
Bitmap = wxArtProvider:getBitmap("wxART_NEW"),
AnotherSubMenu = wxMenuItem:new([{parentMenu, Menu},
{id, ?wxID_ANY},
{text, "Another sub menu"},
{subMenu, SubMenu2},
{kind, ?wxITEM_NORMAL}]),
wxMenuItem:setBitmap(AnotherSubMenu, Bitmap),
wxMenu:append(Menu, AnotherSubMenu),
wxMenu:connect(Menu, command_menu_selected),
wxMenu:connect(SubMenu, command_menu_selected),
wxMenu:connect(SubMenu2, command_menu_selected),
Menu.
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/wx/examples/demo/ex_popupMenu.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
Client API
wx_object callbacks
Async Events are handled in handle_event as in handle_info
Open the popup menu
Get the selected item label
Callbacks handled as normal gen_server callbacks
Local functions
| Copyright Ericsson AB 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(ex_popupMenu).
-behaviour(wx_object).
-export([start/1]).
-export([init/1, terminate/2, code_change/3,
handle_info/2, handle_call/3, handle_event/2]).
-include_lib("wx/include/wx.hrl").
-record(state,
{
parent,
config,
menu
}).
start(Config) ->
wx_object:start_link(?MODULE, Config, []).
init(Config) ->
wx:batch(fun() -> do_init(Config) end).
do_init(Config) ->
Root = proplists:get_value(parent, Config),
Parent = wxPanel:new(Root,[]),
MainSizer = wxBoxSizer:new(?wxVERTICAL),
Box = wxStaticBox:new(Parent, ?wxID_ANY, "Popup Menu"),
Sz = wxStaticBoxSizer:new(Box, ?wxVERTICAL),
Text = wxStaticText:new(Parent, ?wxID_ANY, "Right click to open popup menu", []),
Panel = wxPanel:new(Parent),
wxPanel:connect(Panel, right_up),
Sizer = wxBoxSizer:new(?wxVERTICAL),
wxSizer:add(Sizer, Text, [{border, 20}, {flag, ?wxALL}]),
wxPanel:setSizer(Panel, Sizer),
wxSizer:add(Sz, Panel, [{proportion,1}, {flag, ?wxEXPAND}]),
wxSizer:layout(Sz),
PopupMenu = create_menu(),
wxSizer:add(MainSizer, Sz, [{proportion,1}, {flag, ?wxEXPAND}]),
wxWindow:setSizer(Parent, MainSizer),
{Parent, #state{parent=Parent, config=Config, menu=PopupMenu}}.
handle_event(#wx{obj = Panel,
event = #wxMouse{type = right_up}},
State = #state{menu = Menu}) ->
wxWindow:popupMenu(Panel, Menu),
{noreply, State};
handle_event(#wx{obj = Menu, id = Id,
event = #wxCommand{type = command_menu_selected}},
State = #state{}) ->
Label = wxMenu:getLabel(Menu, Id),
demo:format(State#state.config, "wxMenu clicked ~p\n", [Label]),
{noreply, State};
handle_event(Ev, State) ->
demo:format(State#state.config, "Unexpected Event ~p\n", [Ev]),
{noreply, State}.
handle_info(Msg, State) ->
demo:format(State#state.config, "Got Info ~p\n", [Msg]),
{noreply, State}.
handle_call(Msg, _From, State) ->
demo:format(State#state.config, "Got Call ~p\n", [Msg]),
{reply,{error, nyi}, State}.
code_change(_, _, State) ->
{stop, ignore, State}.
terminate(_Reason, #state{menu=Popup}) ->
wxMenu:destroy(Popup),
ok.
create_menu() ->
Menu = wxMenu:new([]),
SubMenu = wxMenu:new([]),
SubMenu2 = wxMenu:new([]),
wxMenu:append(Menu, ?wxID_UNDO, "Undo", []),
wxMenu:append(Menu, ?wxID_REDO, "Redo", []),
wxMenu:append(Menu, ?wxID_HELP, "Help", []),
wxMenu:appendSeparator(Menu),
wxMenu:appendCheckItem(Menu, ?wxID_ANY, "Check item", []),
wxMenu:appendSeparator(Menu),
wxMenu:appendRadioItem(Menu, ?wxID_ANY, "Radio item 1", []),
wxMenu:appendRadioItem(Menu, ?wxID_ANY, "Radio item 2", []),
wxMenu:appendRadioItem(Menu, ?wxID_ANY, "Radio item 3", []),
wxMenu:appendRadioItem(Menu, ?wxID_ANY, "Radio item 4", []),
wxMenu:appendSeparator(Menu),
wxMenuItem:enable(wxMenu:append(Menu, ?wxID_ANY, "Disabled", []), [{enable,false}]),
wxMenu:appendSeparator(Menu),
wxMenu:append(SubMenu, ?wxID_ABOUT, "About", []),
wxMenu:append(SubMenu, ?wxID_ANY, "Sub Item2", []),
wxMenu:append(SubMenu, ?wxID_SAVE, "Save", []),
wxMenu:break(SubMenu),
wxMenu:append(SubMenu, ?wxID_EXIT, "Exit", []),
wxMenu:append(SubMenu, ?wxID_OPEN, "Open", []),
wxMenu:append(SubMenu, ?wxID_NEW, "New", []),
wxMenu:append(Menu, ?wxID_ANY, "Sub menu", SubMenu, []),
wxMenu:appendCheckItem(SubMenu2, ?wxID_ANY, "Check Item", []),
wxMenu:appendSeparator(SubMenu2),
wxMenu:append(SubMenu2, ?wxID_CLEAR, "Clear", []),
wxMenu:append(SubMenu2, ?wxID_ANY, "Sub Item", []),
Bitmap = wxArtProvider:getBitmap("wxART_NEW"),
AnotherSubMenu = wxMenuItem:new([{parentMenu, Menu},
{id, ?wxID_ANY},
{text, "Another sub menu"},
{subMenu, SubMenu2},
{kind, ?wxITEM_NORMAL}]),
wxMenuItem:setBitmap(AnotherSubMenu, Bitmap),
wxMenu:append(Menu, AnotherSubMenu),
wxMenu:connect(Menu, command_menu_selected),
wxMenu:connect(SubMenu, command_menu_selected),
wxMenu:connect(SubMenu2, command_menu_selected),
Menu.
|
b92faac0526ddc0dc67a4598d3aa2cb08b5f6c0fef9e70d946ec2bfe29b50a88 | osstotalsoft/functional-guy | 01.Map.hs | incAll' [] = []
incAll' (x : xs) = x + 1 : incAll' xs
doubleAll' [] = []
doubleAll' (x : xs) = x * 2 : doubleAll' xs
map' _ [] = []
map' f (x : xs) = f x : map' f xs
incAll = map (+ 1)
doubleAll = map (* 2) | null | https://raw.githubusercontent.com/osstotalsoft/functional-guy/c02a8b22026c261a9722551f3641228dc02619ba/Chapter2.%20The%20foundation/Exercises/02.Hofs/01.Map.hs | haskell | incAll' [] = []
incAll' (x : xs) = x + 1 : incAll' xs
doubleAll' [] = []
doubleAll' (x : xs) = x * 2 : doubleAll' xs
map' _ [] = []
map' f (x : xs) = f x : map' f xs
incAll = map (+ 1)
doubleAll = map (* 2) | |
91ea77d75301773f1a692e696d4041403f8ac0d1cf637cdde070bba149993e87 | redbadger/karma-tracker | project.clj | (defproject karma-tracker-ui "0.1.0-SNAPSHOT"
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.9.229"]
[com.andrewmcveigh/cljs-time "0.4.0"]
[kibu/pushy "0.3.6"]
[reagent "0.6.0"]
[re-frame "0.9.2"]
[day8.re-frame/http-fx "0.1.3"]
[secretary "1.2.3"]]
:plugins [[lein-cljsbuild "1.1.4"]]
:min-lein-version "2.5.3"
:source-paths ["src/clj"]
:profiles {:dev {:dependencies [[binaryage/devtools "0.8.2"]]}}
:cljsbuild {:builds [{:id "development"
:source-paths ["src/cljs"]
:compiler {:main karma-tracker-ui.core
:output-to "target/js/app.js"
:output-dir "target/js/out"
:asset-path ""
:source-map-timestamp true
:preloads [devtools.preload]
:external-config {:devtools/config {:features-to-install :all}}}}
{:id "production"
:source-paths ["src/cljs"]
:compiler {:main karma-tracker-ui.core
:output-to "target/js/app.js"
:optimizations :advanced
:closure-defines {goog.DEBUG false
karma-tracker-ui.config.api-url "-badger.com/api"}
:pretty-print false}}]})
| null | https://raw.githubusercontent.com/redbadger/karma-tracker/c5375f32f4cd0386f6bb1560d979b79bceea19e2/ui/project.clj | clojure | (defproject karma-tracker-ui "0.1.0-SNAPSHOT"
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.9.229"]
[com.andrewmcveigh/cljs-time "0.4.0"]
[kibu/pushy "0.3.6"]
[reagent "0.6.0"]
[re-frame "0.9.2"]
[day8.re-frame/http-fx "0.1.3"]
[secretary "1.2.3"]]
:plugins [[lein-cljsbuild "1.1.4"]]
:min-lein-version "2.5.3"
:source-paths ["src/clj"]
:profiles {:dev {:dependencies [[binaryage/devtools "0.8.2"]]}}
:cljsbuild {:builds [{:id "development"
:source-paths ["src/cljs"]
:compiler {:main karma-tracker-ui.core
:output-to "target/js/app.js"
:output-dir "target/js/out"
:asset-path ""
:source-map-timestamp true
:preloads [devtools.preload]
:external-config {:devtools/config {:features-to-install :all}}}}
{:id "production"
:source-paths ["src/cljs"]
:compiler {:main karma-tracker-ui.core
:output-to "target/js/app.js"
:optimizations :advanced
:closure-defines {goog.DEBUG false
karma-tracker-ui.config.api-url "-badger.com/api"}
:pretty-print false}}]})
| |
221c2fbf4fd3ce9e8aab0bdc5f1cd905d886b60be930f4e51dec224f825501c2 | wellposed/numerical | Phased.hs |
module Numerical.Array.Phased where
An array storage type + world pair is said to have Phased instance
when there are both Array and MArray instances for that storage + world pair .
it is only when we have both mutable and immutable array variants
with the same storage rep , in the same world , that we can support
doing freeze and thaw on arrays .
the reason why this is separate from the the Array and MArray instances
is that we ca n't ( in general ) assume that every immutable array type
has a corresponding mutable one .
NB , however , could we always assume that if we have a mutable array type ,
that there s always a corresponding immutable type ?
An array storage type + world pair is said to have Phased instance
when there are both Array and MArray instances for that storage +world pair.
it is only when we have both mutable and immutable array variants
with the same storage rep, in the same world, that we can support
doing freeze and thaw on arrays.
the reason why this is separate from the the Array and MArray instances
is that we can't (in general) assume that every immutable array type
has a corresponding mutable one.
NB, however, could we always assume that if we have a mutable array type,
that theres always a corresponding immutable type?
-}
| null | https://raw.githubusercontent.com/wellposed/numerical/6b458232760b20674487bd9f8442b0991ce59423/src/Numerical/Array/Phased.hs | haskell |
module Numerical.Array.Phased where
An array storage type + world pair is said to have Phased instance
when there are both Array and MArray instances for that storage + world pair .
it is only when we have both mutable and immutable array variants
with the same storage rep , in the same world , that we can support
doing freeze and thaw on arrays .
the reason why this is separate from the the Array and MArray instances
is that we ca n't ( in general ) assume that every immutable array type
has a corresponding mutable one .
NB , however , could we always assume that if we have a mutable array type ,
that there s always a corresponding immutable type ?
An array storage type + world pair is said to have Phased instance
when there are both Array and MArray instances for that storage +world pair.
it is only when we have both mutable and immutable array variants
with the same storage rep, in the same world, that we can support
doing freeze and thaw on arrays.
the reason why this is separate from the the Array and MArray instances
is that we can't (in general) assume that every immutable array type
has a corresponding mutable one.
NB, however, could we always assume that if we have a mutable array type,
that theres always a corresponding immutable type?
-}
| |
f15faa59bcbc286d69edc022adc642fc458bdb33dfb3e5083d36920d597d79fc | ulricha/dsh | Lang.hs | # LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances #-}
-- | Definition of the Segment Language (SL): The Segment Language defines
-- operations over flat segment vectors.
module Database.DSH.SL.Lang where
import Data.Aeson.TH
import Database.Algebra.Dag.Common
import qualified Database.DSH.Common.Lang as L
import Database.DSH.Common.VectorLang
--------------------------------------------------------------------------------
Vector Language operators . Documentation can be found in module
VectorAlgebra .
data NullOp = Lit (PType, VecSegs)
| TableRef (String, L.BaseTableSchema)
deriving (Eq, Ord, Show)
$(deriveJSON defaultOptions ''NullOp)
data UnOp r e = Segment
| Unsegment
| R1
| R2
| R3
| Project r
| Select e
| GroupAggr (r, L.NE (AggrFun e))
| Number
| Unique
| Reverse
| Sort r
| Group r
| WinFun (WinFun e, FrameSpec)
| Fold (AggrFun e)
deriving (Eq, Ord, Show)
$(deriveJSON defaultOptions ''UnOp)
data BinOp e = ReplicateNest
| ReplicateScalar
| ReplicateVector
| AppKey
| AppSort
| AppFilter
| AppRep
| MergeSeg
| UnboxSng
| UnboxDefault e
| Align
| Append
| Zip
| CartProduct
| ThetaJoin (L.JoinPredicate e)
| SemiJoin (L.JoinPredicate e)
| AntiJoin (L.JoinPredicate e)
| NestJoin (L.JoinPredicate e)
| GroupJoin (L.JoinPredicate e, L.NE (AggrFun e))
deriving (Eq, Ord, Show)
$(deriveJSON defaultOptions ''BinOp)
( DBV , RenameVector , RenameVector )
deriving (Eq, Ord, Show)
$(deriveJSON defaultOptions ''TerOp)
--------------------------------------------------------------------------------
type SLOp r e = Algebra TerOp (BinOp e) (UnOp r e) NullOp AlgNode
newtype SL r e = SL
{ unSL :: SLOp r e
} deriving (Eq, Ord, Show)
$(deriveJSON defaultOptions ''SL)
instance Ordish r e => Operator (SL r e) where
opChildren = opChildren . unSL
replaceOpChild (SL a) n1 n2 = SL (replaceOpChild a n1 n2)
type TSL = SL TExpr TExpr
type RSL = SL VRow RExpr
| null | https://raw.githubusercontent.com/ulricha/dsh/e6cd5c6bea575e62a381e89bfc4cc7cb97485106/src/Database/DSH/SL/Lang.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeSynonymInstances #
| Definition of the Segment Language (SL): The Segment Language defines
operations over flat segment vectors.
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | # LANGUAGE FlexibleInstances #
module Database.DSH.SL.Lang where
import Data.Aeson.TH
import Database.Algebra.Dag.Common
import qualified Database.DSH.Common.Lang as L
import Database.DSH.Common.VectorLang
Vector Language operators . Documentation can be found in module
VectorAlgebra .
data NullOp = Lit (PType, VecSegs)
| TableRef (String, L.BaseTableSchema)
deriving (Eq, Ord, Show)
$(deriveJSON defaultOptions ''NullOp)
data UnOp r e = Segment
| Unsegment
| R1
| R2
| R3
| Project r
| Select e
| GroupAggr (r, L.NE (AggrFun e))
| Number
| Unique
| Reverse
| Sort r
| Group r
| WinFun (WinFun e, FrameSpec)
| Fold (AggrFun e)
deriving (Eq, Ord, Show)
$(deriveJSON defaultOptions ''UnOp)
data BinOp e = ReplicateNest
| ReplicateScalar
| ReplicateVector
| AppKey
| AppSort
| AppFilter
| AppRep
| MergeSeg
| UnboxSng
| UnboxDefault e
| Align
| Append
| Zip
| CartProduct
| ThetaJoin (L.JoinPredicate e)
| SemiJoin (L.JoinPredicate e)
| AntiJoin (L.JoinPredicate e)
| NestJoin (L.JoinPredicate e)
| GroupJoin (L.JoinPredicate e, L.NE (AggrFun e))
deriving (Eq, Ord, Show)
$(deriveJSON defaultOptions ''BinOp)
( DBV , RenameVector , RenameVector )
deriving (Eq, Ord, Show)
$(deriveJSON defaultOptions ''TerOp)
type SLOp r e = Algebra TerOp (BinOp e) (UnOp r e) NullOp AlgNode
newtype SL r e = SL
{ unSL :: SLOp r e
} deriving (Eq, Ord, Show)
$(deriveJSON defaultOptions ''SL)
instance Ordish r e => Operator (SL r e) where
opChildren = opChildren . unSL
replaceOpChild (SL a) n1 n2 = SL (replaceOpChild a n1 n2)
type TSL = SL TExpr TExpr
type RSL = SL VRow RExpr
|
162c5e0ff8f01e38178ecd9d5c1cff3124ecab793284049d18d857f58f88188a | ml4tp/tcoq | index.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Printf
open Cdglobals
type loc = int
type entry_type =
| Library
| Module
| Definition
| Inductive
| Constructor
| Lemma
| Record
| Projection
| Instance
| Class
| Method
| Variable
| Axiom
| TacticDefinition
| Abbreviation
| Notation
| Section
type index_entry =
| Def of string * entry_type
| Ref of coq_module * string * entry_type
let current_library = ref ""
(** refers to the file being parsed *)
(** [deftable] stores only definitions and is used to build the index *)
let deftable = Hashtbl.create 97
(** [byidtable] is used to interpolate idents inside comments, which are not
globalized otherwise. *)
let byidtable = Hashtbl.create 97
(** [reftable] stores references and definitions *)
let reftable = Hashtbl.create 97
let full_ident sp id =
if sp <> "<>" then
if id <> "<>" then
sp ^ "." ^ id
else sp
else if id <> "<>"
then id
else ""
let add_def loc1 loc2 ty sp id =
let fullid = full_ident sp id in
let def = Def (fullid, ty) in
for loc = loc1 to loc2 do
Hashtbl.add reftable (!current_library, loc) def
done;
Hashtbl.add deftable !current_library (fullid, ty);
Hashtbl.add byidtable id (!current_library, fullid, ty)
let add_ref m loc m' sp id ty =
let fullid = full_ident sp id in
if Hashtbl.mem reftable (m, loc) then ()
else Hashtbl.add reftable (m, loc) (Ref (m', fullid, ty));
let idx = if id = "<>" then m' else id in
if Hashtbl.mem byidtable idx then ()
else Hashtbl.add byidtable idx (m', fullid, ty)
let find m l = Hashtbl.find reftable (m, l)
let find_string m s = let (m,s,t) = Hashtbl.find byidtable s in Ref (m,s,t)
Coq modules
let split_sp s =
try
let i = String.rindex s '.' in
String.sub s 0 i, String.sub s (i + 1) (String.length s - i - 1)
with
Not_found -> "", s
let modules = Hashtbl.create 97
let local_modules = Hashtbl.create 97
let add_module m =
let _,id = split_sp m in
Hashtbl.add modules id m;
Hashtbl.add local_modules m ()
type module_kind = Local | External of string | Unknown
let external_libraries = ref []
let add_external_library logicalpath url =
external_libraries := (logicalpath,url) :: !external_libraries
let find_external_library logicalpath =
let rec aux = function
| [] -> raise Not_found
| (l,u)::rest ->
if String.length logicalpath > String.length l &&
String.sub logicalpath 0 (String.length l + 1) = l ^"."
then u
else aux rest
in aux !external_libraries
let init_coqlib_library () = add_external_library "Coq" !coqlib
let find_module m =
if Hashtbl.mem local_modules m then
Local
else
try External (Filename.concat (find_external_library m) m)
with Not_found -> Unknown
(* Building indexes *)
type 'a index = {
idx_name : string;
idx_entries : (char * (string * 'a) list) list;
idx_size : int }
let map f i =
{ i with idx_entries =
List.map
(fun (c,l) -> (c, List.map (fun (s,x) -> (s,f s x)) l))
i.idx_entries }
let compare_entries (s1,_) (s2,_) = Alpha.compare_string s1 s2
let sort_entries el =
let t = Hashtbl.create 97 in
List.iter
(fun c -> Hashtbl.add t c [])
['A'; 'B'; 'C'; 'D'; 'E'; 'F'; 'G'; 'H'; 'I'; 'J'; 'K'; 'L'; 'M'; 'N';
'O'; 'P'; 'Q'; 'R'; 'S'; 'T'; 'U'; 'V'; 'W'; 'X'; 'Y'; 'Z'; '_'; '*'];
List.iter
(fun ((s,_) as e) ->
let c = Alpha.norm_char s.[0] in
let c,l =
try c,Hashtbl.find t c with Not_found -> '*',Hashtbl.find t '*' in
Hashtbl.replace t c (e :: l))
el;
let res = ref [] in
Hashtbl.iter (fun c l -> res := (c, List.sort compare_entries l) :: !res) t;
List.sort (fun (c1,_) (c2,_) -> Alpha.compare_char c1 c2) !res
let display_letter c = if c = '*' then "other" else String.make 1 c
let type_name = function
| Library ->
let ln = !lib_name in
if ln <> "" then String.lowercase ln else "library"
| Module -> "module"
| Definition -> "definition"
| Inductive -> "inductive"
| Constructor -> "constructor"
| Lemma -> "lemma"
| Record -> "record"
| Projection -> "projection"
| Instance -> "instance"
| Class -> "class"
| Method -> "method"
| Variable -> "variable"
| Axiom -> "axiom"
| TacticDefinition -> "tactic"
| Abbreviation -> "abbreviation"
| Notation -> "notation"
| Section -> "section"
let prepare_entry s = function
| Notation ->
(* We decode the encoding done in Dumpglob.cook_notation of coqtop *)
(* Encoded notations have the form section:sc:x_'++'_x where: *)
(* - the section, if any, ends with a "." *)
(* - the scope can be empty *)
(* - tokens are separated with "_" *)
(* - non-terminal symbols are conventionally represented by "x" *)
(* - terminals are enclosed within simple quotes *)
(* - existing simple quotes (that necessarily are parts of *)
(* terminals) are doubled *)
(* (as a consequence, when a terminal contains "_" or "x", these *)
(* necessarily appear enclosed within non-doubled simple quotes) *)
- non - printable characters < 32 are left encoded so that they
(* are human-readable in index files *)
(* Example: "x ' %x _% y %'x %'_' z" is encoded as *)
(* "x_''''_'%x'_'_%'_x_'%''x'_'%''_'''_x" *)
let err () = eprintf "Invalid notation in globalization file\n"; exit 1 in
let h = try String.index_from s 0 ':' with _ -> err () in
let i = try String.index_from s (h+1) ':' with _ -> err () in
let sc = String.sub s (h+1) (i-h-1) in
let ntn = String.make (String.length s - i) ' ' in
let k = ref 0 in
let j = ref (i+1) in
let quoted = ref false in
let l = String.length s - 1 in
while !j <= l do
if not !quoted then begin
(match s.[!j] with
| '_' -> ntn.[!k] <- ' '; incr k
| 'x' -> ntn.[!k] <- '_'; incr k
| '\'' -> quoted := true
| _ -> assert false)
end
else
if s.[!j] = '\'' then
if (!j = l || s.[!j+1] = '_') then quoted := false
else (incr j; ntn.[!k] <- s.[!j]; incr k)
else begin
ntn.[!k] <- s.[!j];
incr k
end;
incr j
done;
let ntn = String.sub ntn 0 !k in
if sc = "" then ntn else ntn ^ " (" ^ sc ^ ")"
| _ ->
s
let all_entries () =
let gl = ref [] in
let add_g s m t = gl := (s,(m,t)) :: !gl in
let bt = Hashtbl.create 11 in
let add_bt t s m =
let l = try Hashtbl.find bt t with Not_found -> [] in
Hashtbl.replace bt t ((s,m) :: l)
in
let classify m (s,t) = (add_g s m t; add_bt t s m) in
Hashtbl.iter classify deftable;
Hashtbl.iter (fun id m -> add_g id m Library; add_bt Library id m) modules;
{ idx_name = "global";
idx_entries = sort_entries !gl;
idx_size = List.length !gl },
Hashtbl.fold (fun t e l -> (t, { idx_name = type_name t;
idx_entries = sort_entries e;
idx_size = List.length e }) :: l) bt []
let type_of_string = function
| "def" | "coe" | "subclass" | "canonstruc" | "fix" | "cofix"
| "ex" | "scheme" -> Definition
| "prf" | "thm" -> Lemma
| "ind" | "variant" | "coind" -> Inductive
| "constr" -> Constructor
| "indrec" | "rec" | "corec" -> Record
| "proj" -> Projection
| "class" -> Class
| "meth" -> Method
| "inst" -> Instance
| "var" -> Variable
| "defax" | "prfax" | "ax" -> Axiom
| "syndef" -> Abbreviation
| "not" -> Notation
| "lib" -> Library
| "mod" | "modtype" -> Module
| "tac" -> TacticDefinition
| "sec" -> Section
| s -> invalid_arg ("type_of_string:" ^ s)
let ill_formed_glob_file f =
eprintf "Warning: ill-formed file %s (links will not be available)\n" f
let outdated_glob_file f =
eprintf "Warning: %s not consistent with corresponding .v file (links will not be available)\n" f
let correct_file vfile f c =
let s = input_line c in
if String.length s < 7 || String.sub s 0 7 <> "DIGEST " then
(ill_formed_glob_file f; false)
else
let s = String.sub s 7 (String.length s - 7) in
match vfile, s with
| None, "NO" -> true
| Some _, "NO" -> ill_formed_glob_file f; false
| None, _ -> ill_formed_glob_file f; false
| Some vfile, s ->
s = Digest.to_hex (Digest.file vfile) || (outdated_glob_file f; false)
let read_glob vfile f =
let c = open_in f in
if correct_file vfile f c then
let cur_mod = ref "" in
try
while true do
let s = input_line c in
let n = String.length s in
if n > 0 then begin
match s.[0] with
| 'F' ->
cur_mod := String.sub s 1 (n - 1);
current_library := !cur_mod
| 'R' ->
(try
Scanf.sscanf s "R%d:%d %s %s %s %s"
(fun loc1 loc2 lib_dp sp id ty ->
for loc=loc1 to loc2 do
add_ref !cur_mod loc lib_dp sp id (type_of_string ty);
(* Also add an entry for each module mentioned in [lib_dp],
* to use in interpolation. *)
ignore (List.fold_right (fun thisPiece priorPieces ->
let newPieces = match priorPieces with
| "" -> thisPiece
| _ -> thisPiece ^ "." ^ priorPieces in
add_ref !cur_mod loc "" "" newPieces Library;
newPieces) (Str.split (Str.regexp_string ".") lib_dp) "")
done)
with _ -> ())
| _ ->
try Scanf.sscanf s "%s %d:%d %s %s"
(fun ty loc1 loc2 sp id ->
add_def loc1 loc2 (type_of_string ty) sp id)
with Scanf.Scan_failure _ -> ()
end
done; assert false
with End_of_file ->
close_in c
| null | https://raw.githubusercontent.com/ml4tp/tcoq/7a78c31df480fba721648f277ab0783229c8bece/tools/coqdoc/index.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* refers to the file being parsed
* [deftable] stores only definitions and is used to build the index
* [byidtable] is used to interpolate idents inside comments, which are not
globalized otherwise.
* [reftable] stores references and definitions
Building indexes
We decode the encoding done in Dumpglob.cook_notation of coqtop
Encoded notations have the form section:sc:x_'++'_x where:
- the section, if any, ends with a "."
- the scope can be empty
- tokens are separated with "_"
- non-terminal symbols are conventionally represented by "x"
- terminals are enclosed within simple quotes
- existing simple quotes (that necessarily are parts of
terminals) are doubled
(as a consequence, when a terminal contains "_" or "x", these
necessarily appear enclosed within non-doubled simple quotes)
are human-readable in index files
Example: "x ' %x _% y %'x %'_' z" is encoded as
"x_''''_'%x'_'_%'_x_'%''x'_'%''_'''_x"
Also add an entry for each module mentioned in [lib_dp],
* to use in interpolation. | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Printf
open Cdglobals
type loc = int
type entry_type =
| Library
| Module
| Definition
| Inductive
| Constructor
| Lemma
| Record
| Projection
| Instance
| Class
| Method
| Variable
| Axiom
| TacticDefinition
| Abbreviation
| Notation
| Section
type index_entry =
| Def of string * entry_type
| Ref of coq_module * string * entry_type
let current_library = ref ""
let deftable = Hashtbl.create 97
let byidtable = Hashtbl.create 97
let reftable = Hashtbl.create 97
let full_ident sp id =
if sp <> "<>" then
if id <> "<>" then
sp ^ "." ^ id
else sp
else if id <> "<>"
then id
else ""
let add_def loc1 loc2 ty sp id =
let fullid = full_ident sp id in
let def = Def (fullid, ty) in
for loc = loc1 to loc2 do
Hashtbl.add reftable (!current_library, loc) def
done;
Hashtbl.add deftable !current_library (fullid, ty);
Hashtbl.add byidtable id (!current_library, fullid, ty)
let add_ref m loc m' sp id ty =
let fullid = full_ident sp id in
if Hashtbl.mem reftable (m, loc) then ()
else Hashtbl.add reftable (m, loc) (Ref (m', fullid, ty));
let idx = if id = "<>" then m' else id in
if Hashtbl.mem byidtable idx then ()
else Hashtbl.add byidtable idx (m', fullid, ty)
let find m l = Hashtbl.find reftable (m, l)
let find_string m s = let (m,s,t) = Hashtbl.find byidtable s in Ref (m,s,t)
Coq modules
let split_sp s =
try
let i = String.rindex s '.' in
String.sub s 0 i, String.sub s (i + 1) (String.length s - i - 1)
with
Not_found -> "", s
let modules = Hashtbl.create 97
let local_modules = Hashtbl.create 97
let add_module m =
let _,id = split_sp m in
Hashtbl.add modules id m;
Hashtbl.add local_modules m ()
type module_kind = Local | External of string | Unknown
let external_libraries = ref []
let add_external_library logicalpath url =
external_libraries := (logicalpath,url) :: !external_libraries
let find_external_library logicalpath =
let rec aux = function
| [] -> raise Not_found
| (l,u)::rest ->
if String.length logicalpath > String.length l &&
String.sub logicalpath 0 (String.length l + 1) = l ^"."
then u
else aux rest
in aux !external_libraries
let init_coqlib_library () = add_external_library "Coq" !coqlib
let find_module m =
if Hashtbl.mem local_modules m then
Local
else
try External (Filename.concat (find_external_library m) m)
with Not_found -> Unknown
type 'a index = {
idx_name : string;
idx_entries : (char * (string * 'a) list) list;
idx_size : int }
let map f i =
{ i with idx_entries =
List.map
(fun (c,l) -> (c, List.map (fun (s,x) -> (s,f s x)) l))
i.idx_entries }
let compare_entries (s1,_) (s2,_) = Alpha.compare_string s1 s2
let sort_entries el =
let t = Hashtbl.create 97 in
List.iter
(fun c -> Hashtbl.add t c [])
['A'; 'B'; 'C'; 'D'; 'E'; 'F'; 'G'; 'H'; 'I'; 'J'; 'K'; 'L'; 'M'; 'N';
'O'; 'P'; 'Q'; 'R'; 'S'; 'T'; 'U'; 'V'; 'W'; 'X'; 'Y'; 'Z'; '_'; '*'];
List.iter
(fun ((s,_) as e) ->
let c = Alpha.norm_char s.[0] in
let c,l =
try c,Hashtbl.find t c with Not_found -> '*',Hashtbl.find t '*' in
Hashtbl.replace t c (e :: l))
el;
let res = ref [] in
Hashtbl.iter (fun c l -> res := (c, List.sort compare_entries l) :: !res) t;
List.sort (fun (c1,_) (c2,_) -> Alpha.compare_char c1 c2) !res
let display_letter c = if c = '*' then "other" else String.make 1 c
let type_name = function
| Library ->
let ln = !lib_name in
if ln <> "" then String.lowercase ln else "library"
| Module -> "module"
| Definition -> "definition"
| Inductive -> "inductive"
| Constructor -> "constructor"
| Lemma -> "lemma"
| Record -> "record"
| Projection -> "projection"
| Instance -> "instance"
| Class -> "class"
| Method -> "method"
| Variable -> "variable"
| Axiom -> "axiom"
| TacticDefinition -> "tactic"
| Abbreviation -> "abbreviation"
| Notation -> "notation"
| Section -> "section"
let prepare_entry s = function
| Notation ->
- non - printable characters < 32 are left encoded so that they
let err () = eprintf "Invalid notation in globalization file\n"; exit 1 in
let h = try String.index_from s 0 ':' with _ -> err () in
let i = try String.index_from s (h+1) ':' with _ -> err () in
let sc = String.sub s (h+1) (i-h-1) in
let ntn = String.make (String.length s - i) ' ' in
let k = ref 0 in
let j = ref (i+1) in
let quoted = ref false in
let l = String.length s - 1 in
while !j <= l do
if not !quoted then begin
(match s.[!j] with
| '_' -> ntn.[!k] <- ' '; incr k
| 'x' -> ntn.[!k] <- '_'; incr k
| '\'' -> quoted := true
| _ -> assert false)
end
else
if s.[!j] = '\'' then
if (!j = l || s.[!j+1] = '_') then quoted := false
else (incr j; ntn.[!k] <- s.[!j]; incr k)
else begin
ntn.[!k] <- s.[!j];
incr k
end;
incr j
done;
let ntn = String.sub ntn 0 !k in
if sc = "" then ntn else ntn ^ " (" ^ sc ^ ")"
| _ ->
s
let all_entries () =
let gl = ref [] in
let add_g s m t = gl := (s,(m,t)) :: !gl in
let bt = Hashtbl.create 11 in
let add_bt t s m =
let l = try Hashtbl.find bt t with Not_found -> [] in
Hashtbl.replace bt t ((s,m) :: l)
in
let classify m (s,t) = (add_g s m t; add_bt t s m) in
Hashtbl.iter classify deftable;
Hashtbl.iter (fun id m -> add_g id m Library; add_bt Library id m) modules;
{ idx_name = "global";
idx_entries = sort_entries !gl;
idx_size = List.length !gl },
Hashtbl.fold (fun t e l -> (t, { idx_name = type_name t;
idx_entries = sort_entries e;
idx_size = List.length e }) :: l) bt []
let type_of_string = function
| "def" | "coe" | "subclass" | "canonstruc" | "fix" | "cofix"
| "ex" | "scheme" -> Definition
| "prf" | "thm" -> Lemma
| "ind" | "variant" | "coind" -> Inductive
| "constr" -> Constructor
| "indrec" | "rec" | "corec" -> Record
| "proj" -> Projection
| "class" -> Class
| "meth" -> Method
| "inst" -> Instance
| "var" -> Variable
| "defax" | "prfax" | "ax" -> Axiom
| "syndef" -> Abbreviation
| "not" -> Notation
| "lib" -> Library
| "mod" | "modtype" -> Module
| "tac" -> TacticDefinition
| "sec" -> Section
| s -> invalid_arg ("type_of_string:" ^ s)
let ill_formed_glob_file f =
eprintf "Warning: ill-formed file %s (links will not be available)\n" f
let outdated_glob_file f =
eprintf "Warning: %s not consistent with corresponding .v file (links will not be available)\n" f
let correct_file vfile f c =
let s = input_line c in
if String.length s < 7 || String.sub s 0 7 <> "DIGEST " then
(ill_formed_glob_file f; false)
else
let s = String.sub s 7 (String.length s - 7) in
match vfile, s with
| None, "NO" -> true
| Some _, "NO" -> ill_formed_glob_file f; false
| None, _ -> ill_formed_glob_file f; false
| Some vfile, s ->
s = Digest.to_hex (Digest.file vfile) || (outdated_glob_file f; false)
let read_glob vfile f =
let c = open_in f in
if correct_file vfile f c then
let cur_mod = ref "" in
try
while true do
let s = input_line c in
let n = String.length s in
if n > 0 then begin
match s.[0] with
| 'F' ->
cur_mod := String.sub s 1 (n - 1);
current_library := !cur_mod
| 'R' ->
(try
Scanf.sscanf s "R%d:%d %s %s %s %s"
(fun loc1 loc2 lib_dp sp id ty ->
for loc=loc1 to loc2 do
add_ref !cur_mod loc lib_dp sp id (type_of_string ty);
ignore (List.fold_right (fun thisPiece priorPieces ->
let newPieces = match priorPieces with
| "" -> thisPiece
| _ -> thisPiece ^ "." ^ priorPieces in
add_ref !cur_mod loc "" "" newPieces Library;
newPieces) (Str.split (Str.regexp_string ".") lib_dp) "")
done)
with _ -> ())
| _ ->
try Scanf.sscanf s "%s %d:%d %s %s"
(fun ty loc1 loc2 sp id ->
add_def loc1 loc2 (type_of_string ty) sp id)
with Scanf.Scan_failure _ -> ()
end
done; assert false
with End_of_file ->
close_in c
|
019a801ded3aa2ace3c90f22dca9749fa0bb25d05129f06d86785106f209ff33 | clojure/core.rrb-vector | long_test.cljs | (ns clojure.core.rrb-vector.long-test
(:require [clojure.test :as test :refer [deftest testing is are]]
[clojure.core.rrb-vector.test-utils :as u]
[clojure.core.rrb-vector :as fv]
[clojure.core.rrb-vector.debug :as dv]
[clojure.core.rrb-vector.debug-platform-dependent :as dpd]))
;; The intent is to keep this file as close to
;; src/test/clojure/clojure/core/rrb_vector/long_test.clj as possible,
;; so that when we start requiring Clojure 1.7.0 and later for this
;; library, this file and that one can be replaced with a common test
file with the suffix .cljc
;; Note that the namespace of this file _intentionally_ does not match
;; the pattern of namespaces that are run for ClojureScript tests by
;; default. That is because of how long the tests in this file take
;; to run. It seems best to include them in the set of tests in such
;; a way that it is only run when a developer explicitly wants to run
;; longer tests. It should not be run by default when running on
;; build.clojure.org.
Currently the Clojure / JVM versions of these tests _ are _ run by
;; default, and on build.clojure.org, but at least the ones in here
now run significantly faster on Clojure / JVM than they do in any of
the JavaScript runtimes I have tested with .
(dv/set-debug-opts! dv/full-debug-opts)
(def generative-test-length :short)
(def check-subvec-params (case generative-test-length
:short [125 100000 10]
:medium [250 200000 20]
:long [250 200000 20]))
(deftest test-slicing-generative
(testing "slicing (generative)"
(is (try
(apply dv/generative-check-subvec u/extra-checks? check-subvec-params)
(catch js/Error e
(throw (ex-info (dpd/format "%s: %s %s"
(u/ex-message-copy e)
(:init-cnt (ex-data e))
(:s&es (ex-data e)))
{}
(u/ex-cause-copy e))))))))
short : 2 to 3 sec
medium : 50 to 60 sec
(def check-catvec-params (case generative-test-length
:short [ 10 30 10 60000]
:medium [250 30 10 60000]
:long [250 30 10 60000]))
(deftest test-splicing-generative
(testing "splicing (generative)"
(is (try
(apply dv/generative-check-catvec u/extra-checks? check-catvec-params)
(catch js/Error e
(throw (ex-info (dpd/format "%s: %s"
(u/ex-message-copy e)
(:cnts (ex-data e)))
{}
(u/ex-cause-copy e))))))))
This problem reproduction code is from CRRBV-17 ticket :
;; -17
(def benchmark-size 100000)
;; This small variation of the program in the ticket simply does
;; progress debug printing occasionally, as well as extra debug
;; checking of the results occasionally.
;; If you enable the printing of the message that begins
;; with "splice-rrbts result had shift" in function
;; fallback-to-slow-splice-if-needed, then run this test, you will see
it called hundreds or perhaps thousands of times . The fallback
;; approach is effective at avoiding a crash for this scenario, but at
;; a dramatic extra run-time cost.
(defn vector-push-f [v my-catvec extra-checks-catvec]
(loop [v v
i 0]
(let [check? (or (zero? (mod i 10000))
(and (> i 99000) (zero? (mod i 100)))
(and (> i 99900) (zero? (mod i 10))))]
(when check?
(print "i=" i " ")
(u/print-optimizer-counts))
(if (< i benchmark-size)
(recur (if check?
(extra-checks-catvec (fv/vector i) v)
(my-catvec (fv/vector i) v))
(inc i))
v))))
Approximate run times for this test on a 2015 MacBook Pro
36 sec - clj 1.10.1 , OpenJDK 11.0.4
465 sec - cljs 1.10.439 , OpenJDK 11.0.4 , Nashorn JS runtime
138 sec - cljs 1.10.238 , OpenJDK 11.0.4 , nodejs 8.10.0
137 sec - cljs 1.10.238 , OpenJDK 11.0.4 ,
(deftest test-crrbv-17
(u/reset-optimizer-counts!)
(is (= (reverse (range benchmark-size))
(vector-push-f (fv/vector) fv/catvec dv/checking-catvec))))
| null | https://raw.githubusercontent.com/clojure/core.rrb-vector/88c2f814b47c0bbc4092dad82be2ec783ed2961f/src/test/cljs/clojure/core/rrb_vector/long_test.cljs | clojure | The intent is to keep this file as close to
src/test/clojure/clojure/core/rrb_vector/long_test.clj as possible,
so that when we start requiring Clojure 1.7.0 and later for this
library, this file and that one can be replaced with a common test
Note that the namespace of this file _intentionally_ does not match
the pattern of namespaces that are run for ClojureScript tests by
default. That is because of how long the tests in this file take
to run. It seems best to include them in the set of tests in such
a way that it is only run when a developer explicitly wants to run
longer tests. It should not be run by default when running on
build.clojure.org.
default, and on build.clojure.org, but at least the ones in here
-17
This small variation of the program in the ticket simply does
progress debug printing occasionally, as well as extra debug
checking of the results occasionally.
If you enable the printing of the message that begins
with "splice-rrbts result had shift" in function
fallback-to-slow-splice-if-needed, then run this test, you will see
approach is effective at avoiding a crash for this scenario, but at
a dramatic extra run-time cost. | (ns clojure.core.rrb-vector.long-test
(:require [clojure.test :as test :refer [deftest testing is are]]
[clojure.core.rrb-vector.test-utils :as u]
[clojure.core.rrb-vector :as fv]
[clojure.core.rrb-vector.debug :as dv]
[clojure.core.rrb-vector.debug-platform-dependent :as dpd]))
file with the suffix .cljc
Currently the Clojure / JVM versions of these tests _ are _ run by
now run significantly faster on Clojure / JVM than they do in any of
the JavaScript runtimes I have tested with .
(dv/set-debug-opts! dv/full-debug-opts)
(def generative-test-length :short)
(def check-subvec-params (case generative-test-length
:short [125 100000 10]
:medium [250 200000 20]
:long [250 200000 20]))
(deftest test-slicing-generative
(testing "slicing (generative)"
(is (try
(apply dv/generative-check-subvec u/extra-checks? check-subvec-params)
(catch js/Error e
(throw (ex-info (dpd/format "%s: %s %s"
(u/ex-message-copy e)
(:init-cnt (ex-data e))
(:s&es (ex-data e)))
{}
(u/ex-cause-copy e))))))))
short : 2 to 3 sec
medium : 50 to 60 sec
(def check-catvec-params (case generative-test-length
:short [ 10 30 10 60000]
:medium [250 30 10 60000]
:long [250 30 10 60000]))
(deftest test-splicing-generative
(testing "splicing (generative)"
(is (try
(apply dv/generative-check-catvec u/extra-checks? check-catvec-params)
(catch js/Error e
(throw (ex-info (dpd/format "%s: %s"
(u/ex-message-copy e)
(:cnts (ex-data e)))
{}
(u/ex-cause-copy e))))))))
This problem reproduction code is from CRRBV-17 ticket :
(def benchmark-size 100000)
it called hundreds or perhaps thousands of times . The fallback
(defn vector-push-f [v my-catvec extra-checks-catvec]
(loop [v v
i 0]
(let [check? (or (zero? (mod i 10000))
(and (> i 99000) (zero? (mod i 100)))
(and (> i 99900) (zero? (mod i 10))))]
(when check?
(print "i=" i " ")
(u/print-optimizer-counts))
(if (< i benchmark-size)
(recur (if check?
(extra-checks-catvec (fv/vector i) v)
(my-catvec (fv/vector i) v))
(inc i))
v))))
Approximate run times for this test on a 2015 MacBook Pro
36 sec - clj 1.10.1 , OpenJDK 11.0.4
465 sec - cljs 1.10.439 , OpenJDK 11.0.4 , Nashorn JS runtime
138 sec - cljs 1.10.238 , OpenJDK 11.0.4 , nodejs 8.10.0
137 sec - cljs 1.10.238 , OpenJDK 11.0.4 ,
(deftest test-crrbv-17
(u/reset-optimizer-counts!)
(is (= (reverse (range benchmark-size))
(vector-push-f (fv/vector) fv/catvec dv/checking-catvec))))
|
31332e6b848eeb79343c288672705287cdb2f815670f981ca0cb30d4448a756c | na4zagin3/satyrographos | mode.ml | open Core
(** SATySFi typesetting mode. *)
type t =
| Pdf
| Text of string
| Generic
[@@deriving sexp, compare, hash, equal]
let of_string_opt = function
| "pdf" -> Some Pdf
| "generic" -> Some Generic
| s ->
String.chop_prefix ~prefix:"text-" s
|> Option.map ~f:(fun m -> Text m)
let of_string_exn str =
of_string_opt str
|> Option.value_exn ~message:(sprintf "Unknown mode: %s" str)
let to_string = function
| Pdf -> "pdf"
| Generic -> "generic"
| Text s ->
"text-" ^ s
let%test_unit "of_string: roundtrip" =
let test a =
match of_string_opt (to_string a) with
| Some b when equal a b ->
()
| b ->
failwithf !"of_string_opt (to_string %{sexp: t}) gets %{sexp: t option}"
a b ()
in
let values = [Pdf; Text "md"; Text "html"; Generic;] in
List.iter values ~f:(fun a ->
test a)
let of_extension_opt = function
| ".satyh" -> Some Pdf
| ".satyg" -> Some Generic
| s ->
String.chop_prefix ~prefix:".satyh-" s
|> Option.map ~f:(fun m -> Text m)
let of_basename_opt basename =
try
"." ^ FilePath.get_extension basename
|> of_extension_opt
with
| _ ->
None
let to_extension = function
| Pdf ->
".satyh"
| Text mode ->
sprintf ".satyh-%s" mode
| Generic ->
".satyg"
let to_output_extension_opt = function
| Pdf ->
Some ".pdf"
| Text mode ->
Some (sprintf ".%s" mode)
| Generic ->
None
let ( <=: ) a b = match a, b with
| Pdf, Pdf -> true
| Text a, Text b when String.equal a b -> true
| Generic, _ -> true
| _, _ -> false
let%test "pdf <=: pdf" =
Pdf <=: Pdf
let%test "pdf <=/: text" =
not (Pdf <=: Text "md")
let%test "pdf <=/: generic" =
not (Pdf <=: Text "md")
let%test "text <=/: pdf" =
not (Text "md" <=: Pdf)
let%test "text md <=/: text text" =
not (Text "md" <=: Text "text")
let%test "text md <=: text md" =
Text "md" <=: Text "md"
let%test "text <=/: generic" =
not (Text "md" <=: Generic)
let%test "generic <=: pdf" =
Generic <=: Pdf
let%test "generic <=: text md" =
Generic <=: Text "md"
let%test "generic <=: generic" =
Generic <=: Generic
let%test_unit "(a <=: b) implies (a <= b)" =
let test a b =
match (a <=: b), compare a b with
| true, n when n >= 0 ->
()
| false, _ ->
()
| t, n ->
let da = sprintf !"%{sexp: t}" a in
let db = sprintf !"%{sexp: t}" b in
failwithf !"(%s <=: %s) is %{sexp:bool} but (%s <=> %s) is %d"
da db t
da db n ()
in
let values = [Pdf; Text "md"; Text "html"; Generic;] in
List.iter values ~f:(fun a ->
List.iter values ~f:(fun b ->
test a b))
| null | https://raw.githubusercontent.com/na4zagin3/satyrographos/9dbccf05138510c977a67c859bbbb48755470c7f/src/satysfi/mode.ml | ocaml | * SATySFi typesetting mode. | open Core
type t =
| Pdf
| Text of string
| Generic
[@@deriving sexp, compare, hash, equal]
let of_string_opt = function
| "pdf" -> Some Pdf
| "generic" -> Some Generic
| s ->
String.chop_prefix ~prefix:"text-" s
|> Option.map ~f:(fun m -> Text m)
let of_string_exn str =
of_string_opt str
|> Option.value_exn ~message:(sprintf "Unknown mode: %s" str)
let to_string = function
| Pdf -> "pdf"
| Generic -> "generic"
| Text s ->
"text-" ^ s
let%test_unit "of_string: roundtrip" =
let test a =
match of_string_opt (to_string a) with
| Some b when equal a b ->
()
| b ->
failwithf !"of_string_opt (to_string %{sexp: t}) gets %{sexp: t option}"
a b ()
in
let values = [Pdf; Text "md"; Text "html"; Generic;] in
List.iter values ~f:(fun a ->
test a)
let of_extension_opt = function
| ".satyh" -> Some Pdf
| ".satyg" -> Some Generic
| s ->
String.chop_prefix ~prefix:".satyh-" s
|> Option.map ~f:(fun m -> Text m)
let of_basename_opt basename =
try
"." ^ FilePath.get_extension basename
|> of_extension_opt
with
| _ ->
None
let to_extension = function
| Pdf ->
".satyh"
| Text mode ->
sprintf ".satyh-%s" mode
| Generic ->
".satyg"
let to_output_extension_opt = function
| Pdf ->
Some ".pdf"
| Text mode ->
Some (sprintf ".%s" mode)
| Generic ->
None
let ( <=: ) a b = match a, b with
| Pdf, Pdf -> true
| Text a, Text b when String.equal a b -> true
| Generic, _ -> true
| _, _ -> false
let%test "pdf <=: pdf" =
Pdf <=: Pdf
let%test "pdf <=/: text" =
not (Pdf <=: Text "md")
let%test "pdf <=/: generic" =
not (Pdf <=: Text "md")
let%test "text <=/: pdf" =
not (Text "md" <=: Pdf)
let%test "text md <=/: text text" =
not (Text "md" <=: Text "text")
let%test "text md <=: text md" =
Text "md" <=: Text "md"
let%test "text <=/: generic" =
not (Text "md" <=: Generic)
let%test "generic <=: pdf" =
Generic <=: Pdf
let%test "generic <=: text md" =
Generic <=: Text "md"
let%test "generic <=: generic" =
Generic <=: Generic
let%test_unit "(a <=: b) implies (a <= b)" =
let test a b =
match (a <=: b), compare a b with
| true, n when n >= 0 ->
()
| false, _ ->
()
| t, n ->
let da = sprintf !"%{sexp: t}" a in
let db = sprintf !"%{sexp: t}" b in
failwithf !"(%s <=: %s) is %{sexp:bool} but (%s <=> %s) is %d"
da db t
da db n ()
in
let values = [Pdf; Text "md"; Text "html"; Generic;] in
List.iter values ~f:(fun a ->
List.iter values ~f:(fun b ->
test a b))
|
7b20c37d238bccd52bad258d0798a7563710b69ac3b75c3bf34541b00f708ece | tdrhq/fiveam-matchers | has-length.lisp | (defpackage :fiveam-matchers/has-length
(:use #:cl
#:fiveam-matchers/core)
(:local-nicknames (#:a #:alexandria))
(:export
#:has-length))
(in-package :fiveam-matchers/has-length)
(defclass has-length (matcher)
((expected :initarg :expected
:accessor expected)))
(defun has-length (expected)
(make-instance 'has-length :expected expected))
(defmethod matchesp ((self has-length) actual)
(= (expected self) (length actual)))
(defmethod describe-self ((self has-length))
`("a sequence that has length " ,(expected self )))
(defmethod describe-mismatch ((self has-length) actual)
`("got a "
,(ecase (type-of actual)
(null
'list)
(cons
'list)
(t (type-of actual)))
" of length " ,(length actual)))
| null | https://raw.githubusercontent.com/tdrhq/fiveam-matchers/79ba2144eee821f7be084d6ba7b90c83994d8ca8/has-length.lisp | lisp | (defpackage :fiveam-matchers/has-length
(:use #:cl
#:fiveam-matchers/core)
(:local-nicknames (#:a #:alexandria))
(:export
#:has-length))
(in-package :fiveam-matchers/has-length)
(defclass has-length (matcher)
((expected :initarg :expected
:accessor expected)))
(defun has-length (expected)
(make-instance 'has-length :expected expected))
(defmethod matchesp ((self has-length) actual)
(= (expected self) (length actual)))
(defmethod describe-self ((self has-length))
`("a sequence that has length " ,(expected self )))
(defmethod describe-mismatch ((self has-length) actual)
`("got a "
,(ecase (type-of actual)
(null
'list)
(cons
'list)
(t (type-of actual)))
" of length " ,(length actual)))
| |
e1cea8df79aa972059446894ed464d600be3ef366bd0554461f88a8bc10e52c8 | exoscale/clojure-kubernetes-client | v2beta1_pods_metric_source.clj | (ns clojure-kubernetes-client.specs.v2beta1-pods-metric-source
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-label-selector :refer :all]
)
(:import (java.io File)))
(declare v2beta1-pods-metric-source-data v2beta1-pods-metric-source)
(def v2beta1-pods-metric-source-data
{
(ds/req :metricName) string?
(ds/opt :selector) v1-label-selector
(ds/req :targetAverageValue) string?
})
(def v2beta1-pods-metric-source
(ds/spec
{:name ::v2beta1-pods-metric-source
:spec v2beta1-pods-metric-source-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v2beta1_pods_metric_source.clj | clojure | (ns clojure-kubernetes-client.specs.v2beta1-pods-metric-source
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-label-selector :refer :all]
)
(:import (java.io File)))
(declare v2beta1-pods-metric-source-data v2beta1-pods-metric-source)
(def v2beta1-pods-metric-source-data
{
(ds/req :metricName) string?
(ds/opt :selector) v1-label-selector
(ds/req :targetAverageValue) string?
})
(def v2beta1-pods-metric-source
(ds/spec
{:name ::v2beta1-pods-metric-source
:spec v2beta1-pods-metric-source-data}))
| |
89cf971760c009c7934f4b60977816223c992b6299996657cda4fcd5e1e4706a | soegaard/metapict | gradient.rkt | #lang racket/base
(require "def.rkt" "structs.rkt" "color.rkt" "pt-vec.rkt"
racket/format racket/match racket/class racket/draw)
;;;
;;; Color Gradients
;;;
;; A color gradient consists of a list of colors and a list of numbers
from 0 to 1 .
(provide gradient ; create a color transisition
linear-gradient ; create a color transition in a direction
create a color transition between two circles
to-linear-gradient ; give color transition a direction
to-radial-gradient ; give color transition a center
convert-gradient ; convert to racket/draw class gradients
color-stops
ball-gradient ; transition from: white to color to black
)
(define (color-stops colors [stops #f])
(when stops
(unless (andmap (λ (x) (and (number? x) (<= 0 x 1))) stops)
(error 'new-gradient (~a "stops must be numbers between 0 and 1, got: "
stops))))
(when stops
(unless (= (length colors) (length stops))
(error
'new-gradient
(~a "the color list and the stop list are not of the same length, got: "
colors stops))))
(def 1/n (/ (max 1 (sub1 (length colors)))))
(def the-stops (for/list ([s (or stops (in-range 0 (+ 1 1/n) 1/n))]) s))
(raw-color-stops colors the-stops))
(define (linear-gradient p0 p1 colors #:stops [ss #f] #:height-factor [hf 1])
(def stops (cond [(raw-gradient? colors) (raw-gradient-color-stops colors)]
[else (color-stops colors ss)]))
(raw-linear-gradient stops p0 p1 hf))
(define (radial-gradient p0 r0 p1 r1 colors #:stops [ss #f] #:height-factor [hf 1])
(def stops (cond [(raw-gradient? colors) (raw-gradient-color-stops colors)]
[else (color-stops colors ss)]))
(raw-radial-gradient stops p0 r0 p1 r1 hf))
(define (convert-gradient g P)
; convert a gradient into a linear-gradient% or a radial-gradient%
; P is a trans from logical coordinates to pattern coordinates
(define (assemble-stops col-stops)
(match col-stops
[(raw-color-stops colors stops)
(for/list ([c colors] [s stops])
(list s (make-color* c)))]))
(match g
[(raw-linear-gradient stops p0 p1 hf)
(defm (pt x0 y0) (P p0))
(defm (pt x1 y1) (P p1))
(def stops* (assemble-stops stops))
(new linear-gradient% [x0 x0] [y0 y0] [x1 x1] [y1 y1]
[stops stops*])]
[(raw-radial-gradient stops p0 r0 p1 r1 hf)
(defm (pt x0 y0) (P p0))
(defm (pt x1 y1) (P p1))
(def pr0 (dist (P (pt r0 0)) (P origo)))
(def pr1 (dist (P (pt r1 0)) (P origo)))
(def stops* (assemble-stops stops))
(new radial-gradient%
[x0 x0] [y0 y0] [r0 pr0]
[x1 x1] [y1 y1] [r1 pr1]
[stops stops*])]
[_ (error 'convert-gradient)]))
(define (gradient colors [stops #f])
(raw-gradient (color-stops colors stops)))
(define (to-linear-gradient g p0 p1 [hf 1])
(defm (raw-gradient cs) g)
(raw-linear-gradient cs p0 p1 hf))
(define (to-radial-gradient g p0 r0 p1 r1 [hf 1])
(defm (raw-gradient cs) g)
(raw-radial-gradient cs p0 r0 p1 r1 hf))
(define (ball-gradient c)
this is the ball gradient from TikZ
; fades from white to c to black
(def stops (map (λ(x) (/ x 50.)) (list 0 9 18 25 50)))
(def colors (list (color-med 0.15 "white" c)
(color-med 0.75 "white" c)
(color-med 0.70 "black" c)
(color-med 0.50 "black" c)
"black"))
(gradient colors stops))
| null | https://raw.githubusercontent.com/soegaard/metapict/47ae265f73cbb92ff3e7bdd61e49f4af17597fdf/metapict/gradient.rkt | racket |
Color Gradients
A color gradient consists of a list of colors and a list of numbers
create a color transisition
create a color transition in a direction
give color transition a direction
give color transition a center
convert to racket/draw class gradients
transition from: white to color to black
convert a gradient into a linear-gradient% or a radial-gradient%
P is a trans from logical coordinates to pattern coordinates
fades from white to c to black | #lang racket/base
(require "def.rkt" "structs.rkt" "color.rkt" "pt-vec.rkt"
racket/format racket/match racket/class racket/draw)
from 0 to 1 .
create a color transition between two circles
color-stops
)
(define (color-stops colors [stops #f])
(when stops
(unless (andmap (λ (x) (and (number? x) (<= 0 x 1))) stops)
(error 'new-gradient (~a "stops must be numbers between 0 and 1, got: "
stops))))
(when stops
(unless (= (length colors) (length stops))
(error
'new-gradient
(~a "the color list and the stop list are not of the same length, got: "
colors stops))))
(def 1/n (/ (max 1 (sub1 (length colors)))))
(def the-stops (for/list ([s (or stops (in-range 0 (+ 1 1/n) 1/n))]) s))
(raw-color-stops colors the-stops))
(define (linear-gradient p0 p1 colors #:stops [ss #f] #:height-factor [hf 1])
(def stops (cond [(raw-gradient? colors) (raw-gradient-color-stops colors)]
[else (color-stops colors ss)]))
(raw-linear-gradient stops p0 p1 hf))
(define (radial-gradient p0 r0 p1 r1 colors #:stops [ss #f] #:height-factor [hf 1])
(def stops (cond [(raw-gradient? colors) (raw-gradient-color-stops colors)]
[else (color-stops colors ss)]))
(raw-radial-gradient stops p0 r0 p1 r1 hf))
(define (convert-gradient g P)
(define (assemble-stops col-stops)
(match col-stops
[(raw-color-stops colors stops)
(for/list ([c colors] [s stops])
(list s (make-color* c)))]))
(match g
[(raw-linear-gradient stops p0 p1 hf)
(defm (pt x0 y0) (P p0))
(defm (pt x1 y1) (P p1))
(def stops* (assemble-stops stops))
(new linear-gradient% [x0 x0] [y0 y0] [x1 x1] [y1 y1]
[stops stops*])]
[(raw-radial-gradient stops p0 r0 p1 r1 hf)
(defm (pt x0 y0) (P p0))
(defm (pt x1 y1) (P p1))
(def pr0 (dist (P (pt r0 0)) (P origo)))
(def pr1 (dist (P (pt r1 0)) (P origo)))
(def stops* (assemble-stops stops))
(new radial-gradient%
[x0 x0] [y0 y0] [r0 pr0]
[x1 x1] [y1 y1] [r1 pr1]
[stops stops*])]
[_ (error 'convert-gradient)]))
(define (gradient colors [stops #f])
(raw-gradient (color-stops colors stops)))
(define (to-linear-gradient g p0 p1 [hf 1])
(defm (raw-gradient cs) g)
(raw-linear-gradient cs p0 p1 hf))
(define (to-radial-gradient g p0 r0 p1 r1 [hf 1])
(defm (raw-gradient cs) g)
(raw-radial-gradient cs p0 r0 p1 r1 hf))
(define (ball-gradient c)
this is the ball gradient from TikZ
(def stops (map (λ(x) (/ x 50.)) (list 0 9 18 25 50)))
(def colors (list (color-med 0.15 "white" c)
(color-med 0.75 "white" c)
(color-med 0.70 "black" c)
(color-med 0.50 "black" c)
"black"))
(gradient colors stops))
|
dbbc3196b987c53c56b27f92d8365cdcdcbde0249f77fb4028d6c6cf971dc2cf | basho/riak_kv | riak_kv_vnode_status_mgr.erl | %% -------------------------------------------------------------------
%%
%% riak_kv_vnode_status_mgr: Manages persistence of vnode status data
like vnodeid , vnode op counter etc
%%
Copyright ( c ) 2007 - 2015 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(riak_kv_vnode_status_mgr).
-behaviour(gen_server).
-ifdef(TEST).
-compile([export_all, nowarn_export_all]).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-endif.
-include_lib("eunit/include/eunit.hrl").
-endif.
%% API
-export([start_link/3,
get_vnodeid_and_counter/2,
lease_counter/2,
clear_vnodeid/1,
status/1,
stop/1]).
-ifdef(EQC).
-export([test_link/4]).
-endif.
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
only 32 bits per counter , when you hit that , get a new vnode i d
-define(MAX_CNTR, 4294967295).
version 2 includes epoch counter , version 1 does not
-define(VNODE_STATUS_VERSION, 2).
-record(state, {
%% vnode status directory
status_file :: undefined | file:filename(),
%% vnode index
index :: undefined | non_neg_integer(),
%% The vnode pid this mgr belongs to
vnode_pid :: undefined | pid(),
%% killswitch for counter
version = ?VNODE_STATUS_VERSION :: 1 | 2
}).
-type status() :: orddict:orddict().
-type init_args() :: {VnodePid :: pid(),
LeaseSize :: non_neg_integer(),
UseEpochCounter :: boolean(),
Path :: string()|undefined}.
-type blocking_req() :: clear | {vnodeid, LeaseSize :: non_neg_integer()}.
longer than the call default of 5 seconds , shorter than infinity .
20 seconds
-define(FLUSH_TIMEOUT_MILLIS, 20000).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
%% test_link/4 to be used only in tests in order to override the path
%%
%% @end
%%--------------------------------------------------------------------
-spec start_link(pid(), non_neg_integer(), boolean())
-> {ok, pid()} | {error, term()}.
start_link(VnodePid, Index, UseEpochCounter) ->
gen_server:start_link(?MODULE, {VnodePid, Index, UseEpochCounter, undefined}, []).
-ifdef(EQC).
-spec test_link(pid(), non_neg_integer(), boolean(), string())
-> {ok, pid()} | {error, term()}.
test_link(VnodePid, Index, UseEpochCounter, Path) ->
gen_server:start_link(?MODULE, {VnodePid, Index, UseEpochCounter, Path}, []).
-endif.
%%--------------------------------------------------------------------
@doc You ca n't ever have a ` LeaseSize ' greater than the maximum 32
%% bit unsigned integer, since that would involve breaking the
%% invariant of an vnodeid+cntr pair being used more than once to
start a key epoch , the counter is encoded in a 32 bit binary , and
going over 4billion+etc would wrap around and re - use integers .
%%
%% @end
%%--------------------------------------------------------------------
-spec get_vnodeid_and_counter(pid(), non_neg_integer()) ->
{ok, {VnodeId :: binary(),
Counter :: non_neg_integer(),
LeaseSize :: non_neg_integer()}}.
get_vnodeid_and_counter(Pid, LeaseSize) when is_integer(LeaseSize),
LeaseSize > 0 ->
gen_server:call(Pid, {vnodeid, LeaseSize}, ?FLUSH_TIMEOUT_MILLIS).
%%--------------------------------------------------------------------
@doc Asynchronously lease increments for a counter . ` Pid ' is the
server pid , and ` ' is the number of increments to lease . A
` ' of 10,000 means that a vnode can handle 10,000 new key
%% epochs before asking for a new counter. The trade-off here is
%% between the frequency of flushing and the speed with which a
frequently crashing vnode burns through the 32bit integer space ,
%% thus requiring a new vnodeid. The calling vnode should handle the
response message of ` { counter_lease , { From : : pid ( ) , VnodeId : :
binary ( ) , NewLease : : non_neg_integer ( ) } } '
%%
%% @end
%%--------------------------------------------------------------------
-spec lease_counter(pid(), non_neg_integer()) -> ok.
lease_counter(Pid, LeaseSize) when is_integer(LeaseSize),
LeaseSize > 0 ->
gen_server:cast(Pid, {lease, LeaseSize}).
%%--------------------------------------------------------------------
%% @doc Blocking call to remove the vnode id and counter and from
%% disk. Used when a vnode has finished and will not act again.
%%
%% @end
%%--------------------------------------------------------------------
-spec clear_vnodeid(pid()) -> {ok, cleared}.
clear_vnodeid(Pid) ->
gen_server:call(Pid, clear, ?FLUSH_TIMEOUT_MILLIS).
status(Pid) ->
gen_server:call(Pid, status).
stop(Pid) ->
gen_server:call(Pid, stop).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private @doc Initializes the server , Init Args must be ` { VnodePid
%% :: pid(), Index :: non_neg_integer(), UseEpochCounter ::
boolean ( ) } ' where the first element is the pid of the vnode this
manager works for , and the second is the vnode 's index / partition
number ( used for locating the status file . ) The third is a kill
%% switch for the counter.
%%
%% @end
%%--------------------------------------------------------------------
-spec init(Args :: init_args()) -> {ok, #state{}}.
init({VnodePid, Index, UseEpochCounter, Path}) ->
Version = version(UseEpochCounter),
StatusFilename = vnode_status_filename(Index, Path),
{ok, #state{status_file=StatusFilename,
index=Index,
vnode_pid=VnodePid,
version=Version
}
}.
@private determine if we use a per epcoch counter / lease scheme or
%% not
-spec version(boolean()) -> 1 | 2.
version(_UseEpochCounter=true) ->
?VNODE_STATUS_VERSION;
version(_UseEpochCounter=false) ->
1.
%%--------------------------------------------------------------------
@private handle calls
%%--------------------------------------------------------------------
-spec handle_call(blocking_req(), {pid(), term()}, #state{}) ->
{reply, {ok, {VnodeId :: binary(),
Counter :: non_neg_integer(),
LeaseTo :: non_neg_integer()}},
#state{}}.
handle_call({vnodeid, LeaseSize}, _From, State) ->
#state{status_file=File, version=Version} = State,
{ok, Status} = read_vnode_status(File),
%% Note: this is subtle change to this function, now it will
%% _always_ trigger a store of the new status, since the lease
%% will always be moving upwards. A vnode that starts, and
%% crashes, and starts, and crashes over and over will burn
%% through a lot of counter (or vnode ids (if the leases are very
%% large.))
{Counter, LeaseTo, VnodeId, Status2} = get_counter_lease(LeaseSize, Status, Version),
ok = write_vnode_status(Status2, File, Version),
Res = {ok, {VnodeId, Counter, LeaseTo}},
{reply, Res, State};
handle_call(clear, _From, State) ->
#state{status_file=File, version=Version} = State,
{ok, Status} = read_vnode_status(File),
Status2 = orddict:erase(counter, orddict:erase(vnodeid, Status)),
ok = write_vnode_status(Status2, File, Version),
{reply, {ok, cleared}, State};
handle_call(status, _From, State) ->
#state{status_file=File} = State,
{ok, Status} = read_vnode_status(File),
{reply, {ok, Status}, State};
handle_call(stop, _From, State) ->
{stop, normal, ok, State}.
%%--------------------------------------------------------------------
@private
%%--------------------------------------------------------------------
-spec handle_cast({lease, non_neg_integer()}, #state{}) ->
{noreply, #state{}}.
handle_cast({lease, LeaseSize}, State) ->
#state{status_file=File, vnode_pid=Pid} = State,
{ok, Status} = read_vnode_status(File),
{_Counter, LeaseTo, VnodeId, UpdStatus} = get_counter_lease(LeaseSize, Status, ?VNODE_STATUS_VERSION),
ok = write_vnode_status(UpdStatus, File, ?VNODE_STATUS_VERSION),
Pid ! {counter_lease, {self(), VnodeId, LeaseTo}},
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
@private monotonically advance the counter lease . Guarded at
%% interface to server.
-spec get_counter_lease(non_neg_integer(), status(), Version :: 1 | 2) ->
{PreviousLease :: non_neg_integer(),
NewLease :: non_neg_integer(),
VnodeId :: binary(),
Status :: status()}.
get_counter_lease(_LeaseSize, Status, 1) ->
case get_status_item(vnodeid, Status, undefined) of
undefined ->
{VnodeId, Status2} = assign_vnodeid(os:timestamp(),
riak_core_nodeid:get(),
Status),
{0, 0, VnodeId, Status2};
ID ->
{0, 0, ID, Status}
end;
get_counter_lease(LeaseSize0, Status, ?VNODE_STATUS_VERSION) ->
PrevLease = get_status_item(counter, Status, undefined),
VnodeId0 = get_status_item(vnodeid, Status, undefined),
Version = get_status_item(version, Status, 1),
%% A lease of ?MAX_CNTR essentially means a new vnodeid every time
%% you start the vnode. This caps the lease size (silently.)
LeaseSize = min(LeaseSize0, ?MAX_CNTR),
case {Version, PrevLease, VnodeId0} of
{_, _, undefined} ->
new_id_and_counter(Status, LeaseSize);
{1, undefined, ID} ->
Upgrade , no counter existed , do n't force a new vnodeid
%% Is there still some edge here, with UP->DOWN->UP grade?
%% We think not. Downgrade would keep the same vnode file,
and the pre - epochal vnodeid would be used . Upgrade
%% again picks up the same counter. Or, if the file is
%% re-written while downgraded, it can only be for a new
%% ID, so still safe.
{0, LeaseSize, ID, orddict:store(counter, LeaseSize, Status)};
{?VNODE_STATUS_VERSION, undefined, _ID} ->
%% Lost counter? Wha? New ID
new_id_and_counter(Status, LeaseSize);
{_AnyVersion, Leased, _ID} when Leased + LeaseSize > ?MAX_CNTR ->
Since ` ' must be > 0 , there is no edge here
%% where last lease size was ?MAX_CNTR and new lease size
%% is 0.
new_id_and_counter(Status, LeaseSize);
{_AnyVersion, Leased, ID} ->
NewLease = Leased + LeaseSize,
{PrevLease, NewLease, ID, orddict:store(counter, NewLease, Status)}
end.
@private generate a new ID and assign a new counter , and lease up
to ` ' .
-spec new_id_and_counter(status(), non_neg_integer()) ->
{non_neg_integer(), non_neg_integer(), binary(), status()}.
new_id_and_counter(Status, LeaseSize) ->
{VnodeId, Status2} = assign_vnodeid(os:timestamp(),
riak_core_nodeid:get(),
Status),
{0, LeaseSize, VnodeId, orddict:store(counter, LeaseSize, Status2)}.
@private Provide a ` proplists : get_value/3 ' like function for status
%% orddict.
-spec get_status_item(term(), status(), term()) -> term().
get_status_item(Item, Status, Default) ->
case orddict:find(Item, Status) of
{ok, Val} ->
Val;
error ->
Default
end.
@private generate a file name for the vnode status , and ensure the
%% path to exists.
-spec vnode_status_filename(non_neg_integer(), string()|undefined) -> file:filename().
vnode_status_filename(Index, Path) ->
P_DataDir =
case Path of
undefined ->
app_helper:get_env(riak_core, platform_data_dir);
Path ->
Path
end,
VnodeStatusDir = app_helper:get_env(riak_kv, vnode_status,
filename:join(P_DataDir, "kv_vnode")),
Filename = filename:join(VnodeStatusDir, integer_to_list(Index)),
ok = filelib:ensure_dir(Filename),
Filename.
@private Assign a unique vnodeid , making sure the timestamp is
%% unique by incrementing into the future if necessary.
-spec assign_vnodeid(erlang:timestamp(), binary(), status()) ->
{binary(), status()}.
assign_vnodeid(Now, NodeId, Status) ->
{_Mega, Sec, Micro} = Now,
NowEpoch = 1000000*Sec + Micro,
LastVnodeEpoch = get_status_item(last_epoch, Status, 0),
VnodeEpoch = erlang:max(NowEpoch, LastVnodeEpoch+1),
VnodeId = <<NodeId/binary, VnodeEpoch:32/integer>>,
UpdStatus = orddict:store(vnodeid, VnodeId,
orddict:store(last_epoch, VnodeEpoch, Status)),
{VnodeId, UpdStatus}.
@private read the vnode status from ` File ' . Returns ` { ok ,
%% status()}' or `{error, Reason}'. If the file does not exist, an
%% empty status is returned.
-spec read_vnode_status(file:filename()) -> {ok, status()}.
read_vnode_status(File) ->
try file:consult(File) of
{ok, [Status]} when is_list(Status) ->
{ok, orddict:from_list(Status)};
{error, enoent} ->
%% doesn't exist? same as empty
{ok, orddict:new()};
{error, {_Offset, file_io_server, invalid_unicode}} ->
case override_consult(File) of
{ok, [Status]} when is_list(Status) ->
{ok, orddict:from_list(Status)};
Er ->
%% "corruption" error, some other posix error, unreadable:
%% Log, and start anew
lager:error("Failed to override_consult vnode-status file ~p ~p", [File, Er]),
{ok, orddict:new()}
end;
Er ->
%% "corruption" error, some other posix error, unreadable:
%% Log, and start anew
lager:error("Failed to consult vnode-status file ~p ~p", [File, Er]),
{ok, orddict:new()}
catch C:T ->
%% consult threw
lager:error("Failed to consult vnode-status file ~p ~p ~p", [File, C, T]),
{ok, orddict:new()}
end.
-spec override_consult(file:filename()) -> {ok, list()}.
@private In OTP 20 file will not read unicode written in OTP16 unless we
%% read it as latin-1
override_consult(File) ->
case file:open(File, [read]) of
{ok, Fd} ->
R = consult_stream(Fd),
_ = file:close(Fd),
R;
Error ->
Error
end.
-spec consult_stream(file:io_device()) -> {ok, list()}|{error, any()}.
@private read unicode stream as latin-1
%% -consult-error-1-file-io-server-invalid-unicode-with-pre-r17-files-in-r17-td4712042.html
consult_stream(Fd) ->
_ = epp:set_encoding(Fd, latin1),
consult_stream(Fd, 1, []).
consult_stream(Fd, Line, Acc) ->
case io:read(Fd, '', Line) of
{ok,Term,EndLine} ->
consult_stream(Fd, EndLine, [Term|Acc]);
{error,Error,_Line} ->
{error,Error};
{eof,_Line} ->
{ok,lists:reverse(Acc)}
end.
-ifdef(TEST).
@private do n't make testers suffer through the fsync time
-spec write_vnode_status(status(), file:filename(), Version :: 1 | 2) -> ok.
write_vnode_status(Status, File, Version) ->
VersionedStatus = orddict:store(version, Version, Status),
ok = file:write_file(File, io_lib:format("~w.", [orddict:to_list(VersionedStatus)])).
-else.
@private write the vnode status . This is why the file is guarded by
%% the process. This file should have no concurrent access, and MUST
%% not be written at any other place/time in the system.
-spec write_vnode_status(status(), file:filename(), Version :: 1 | 2) -> ok.
write_vnode_status(Status, File, Version) ->
VersionedStatus = orddict:store(version, Version, Status),
ok = riak_core_util:replace_file(File, io_lib:format("~w.", [orddict:to_list(VersionedStatus)])).
-endif.
-ifdef(TEST).
%% What if we go v2->v1->v2? kv1142 suggests there is an error
v2_v1_v2_test() ->
Res = get_counter_lease(10000, [{counter, 10000}, {vnodeid, <<"hi!">>}, {version, 1}], 2),
?assertMatch({10000, 20000, <<"hi!">>, _Stat2}, Res).
Check assigning a vnodeid twice in the same second
assign_vnodeid_restart_same_ts_test() ->
TS=(224520 * 100000 ) + 343446
as unsigned net - order int < < 70,116,143,150 > >
NodeId = <<1, 2, 3, 4>>,
{Vid1, Status1} = assign_vnodeid(Now1, NodeId, []),
?assertEqual(<<1, 2, 3, 4, 70, 116, 143, 150>>, Vid1),
%% Simulate clear
Status2 = orddict:erase(vnodeid, Status1),
Reassign
{Vid2, _Status3} = assign_vnodeid(Now2, NodeId, Status2),
?assertEqual(<<1, 2, 3, 4, 70, 116, 143, 151>>, Vid2).
Check assigning a vnodeid with a later date , but less than 11.57
%% days later!
assign_vnodeid_restart_later_ts_test() ->
< < 70,116,143,150 > >
Now2 = {1000,224520,343546}, %% <<70,116,143,250>>
NodeId = <<1, 2, 3, 4>>,
{Vid1, Status1} = assign_vnodeid(Now1, NodeId, []),
?assertEqual(<<1, 2, 3, 4, 70,116,143,150>>, Vid1),
%% Simulate clear
Status2 = orddict:erase(vnodeid, Status1),
Reassign
{Vid2, _Status3} = assign_vnodeid(Now2, NodeId, Status2),
?assertEqual(<<1, 2, 3, 4, 70,116,143,250>>, Vid2).
%% Check assigning a vnodeid with a earlier date - just in case of clock skew
assign_vnodeid_restart_earlier_ts_test() ->
Now1 = {1000,224520,343546}, %% <<70,116,143,250>>
< < 70,116,143,150 > >
NodeId = <<1, 2, 3, 4>>,
{Vid1, Status1} = assign_vnodeid(Now1, NodeId, []),
?assertEqual(<<1, 2, 3, 4, 70,116,143,250>>, Vid1),
%% Simulate clear
Status2 = orddict:erase(vnodeid, Status1),
Reassign
Should be greater than last offered - which is the 2mil timestamp
{Vid2, _Status3} = assign_vnodeid(Now2, NodeId, Status2),
?assertEqual(<<1, 2, 3, 4, 70,116,143,251>>, Vid2).
-ifndef(GITHUBEXCLUDE).
%% Test
vnode_status_test_() ->
{setup,
fun() ->
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
filelib:ensure_dir(TestPath ++ "/.test"),
?cmd("chmod u+rwx " ++ TestPath),
?cmd("rm -rf " ++ TestPath),
application:set_env(riak_kv, vnode_status, TestPath),
ok
end,
fun(_) ->
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
application:unset_env(riak_kv, vnode_status),
?cmd("chmod u+rwx " ++ TestPath),
?cmd("rm -rf " ++ TestPath),
ok
end,
[?_test(begin % initial create failure
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
?cmd("rm -rf " ++ TestPath ++ " || true"),
?cmd("mkdir " ++ TestPath),
?cmd("chmod -w " ++ TestPath),
Index = 0,
File = vnode_status_filename(Index, TestPath),
R =
try
write_vnode_status(orddict:new(), File, ?VNODE_STATUS_VERSION)
catch _Err:{badmatch, Reason} ->
Reason
end,
?assertEqual({error, eacces}, R)
end),
?_test(begin % create successfully
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
?cmd("chmod +w " ++ TestPath),
Index = 0,
File = vnode_status_filename(Index, TestPath),
?assertEqual(ok, write_vnode_status([{created, true}], File, ?VNODE_STATUS_VERSION))
end),
?_test(begin % update successfully
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
Index = 0,
File = vnode_status_filename(Index, TestPath),
{ok, [{created, true}, {version, 2}]} = read_vnode_status(File),
?assertEqual(ok, write_vnode_status([{updated, true}], File, ?VNODE_STATUS_VERSION))
end),
?_test(begin % update failure
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
?cmd("chmod -r " ++ TestPath ++ "/0"),
Index = 0,
File = vnode_status_filename(Index, TestPath),
?assertEqual({ok, []}, read_vnode_status(File))
end
)
]}.
-endif.
-ifdef(EQC).
-define(NUMTESTS, 1000).
-define(QC_OUT(P),
eqc:on_output(fun(Str, Args) ->
io:format(user, Str, Args) end, P)).
-define(TEST_FILE, "kv_vnode_status_eqc/vnode_status_test.file").
-define(VALID_STATUS, [{vnodeid, <<"vnodeid123">>}]).
%% note this was generated by a r16, and will be written in the r16
%% style of io_lib:format("~p.", [?R16_STATUS]).
-define(R16_STATUS, [{vnodeid,<<"'êͧL÷=d">>}]).
%% Properties
@private any binary we write , we can read . ( Try changing ~w . to
%% ~p. in `write_vnode_status/3' for an example of _why_ this test).
prop_any_bin_consult() ->
?SETUP(fun() ->
TestFile = riak_kv_test_util:get_test_dir(?TEST_FILE),
file:delete(TestFile),
fun() -> file:delete(TestFile) end
end,
?FORALL(Bin, binary(),
begin
TestFile = riak_kv_test_util:get_test_dir(?TEST_FILE),
Status = [{version, 1}, {vnodeid, Bin}],
ok = write_vnode_status(Status, TestFile, 1),
equals({ok, Status}, read_vnode_status(TestFile))
end)).
@private regardless of the contents of the vnode status file , we
%% always get a status result. If the file is valid, we get its
%% contents, if not, we get a blank status, if there is no file we get
%% a blank status.
prop_any_file_status() ->
?SETUP(fun() ->
TestFile = riak_kv_test_util:get_test_dir(?TEST_FILE),
file:delete(TestFile),
fun() -> file:delete(TestFile) end
end,
?FORALL({Type, _StatusFile},
?LET(Type, oneof([r16, valid, absent, corrupt]), {Type, gen_status_file(Type)}),
begin
TestFile = riak_kv_test_util:get_test_dir(?TEST_FILE),
{ok, Status} = read_vnode_status(TestFile),
case Type of
valid ->
There is a vnodeid
is_binary(orddict:fetch(vnodeid, Status));
r16 ->
There is a vnodeid
is_binary(orddict:fetch(vnodeid, Status));
corrupt ->
%% empty
is_list(Status) andalso equals(error, orddict:find(vnodeid, Status));
absent ->
%% empty
is_list(Status) andalso equals(error, orddict:find(vnodeid, Status))
end
end)).
gen_status_file(Type) ->
gen_status_file(riak_kv_test_util:get_test_dir(?TEST_FILE), Type).
@private generate the file on disk TBQH , this might be fine as a
%% straight up eunit tests, given how little random there really is
%% here for quickcheck
gen_status_file(TestFile, r16) ->
ok = riak_core_util:replace_file(TestFile, io_lib:format("~p.", [?R16_STATUS])),
TestFile;
gen_status_file(TestFile, absent) ->
file:delete(TestFile),
TestFile;
gen_status_file(TestFile, corrupt) ->
?LET(Bin, binary(),
begin
file:write_file(TestFile, Bin),
TestFile
end);
gen_status_file(TestFile, valid) ->
?LET(VnodeId, binary(),
begin
ok = write_vnode_status([{vnodeid, VnodeId}], TestFile, 1),
TestFile
end).
-endif.
-endif.
| null | https://raw.githubusercontent.com/basho/riak_kv/aeef1591704d32230b773d952a2f1543cbfa1889/src/riak_kv_vnode_status_mgr.erl | erlang | -------------------------------------------------------------------
riak_kv_vnode_status_mgr: Manages persistence of vnode status data
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
API
gen_server callbacks
vnode status directory
vnode index
The vnode pid this mgr belongs to
killswitch for counter
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Starts the server
test_link/4 to be used only in tests in order to override the path
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
bit unsigned integer, since that would involve breaking the
invariant of an vnodeid+cntr pair being used more than once to
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
epochs before asking for a new counter. The trade-off here is
between the frequency of flushing and the speed with which a
thus requiring a new vnodeid. The calling vnode should handle the
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc Blocking call to remove the vnode id and counter and from
disk. Used when a vnode has finished and will not act again.
@end
--------------------------------------------------------------------
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
:: pid(), Index :: non_neg_integer(), UseEpochCounter ::
switch for the counter.
@end
--------------------------------------------------------------------
not
--------------------------------------------------------------------
--------------------------------------------------------------------
Note: this is subtle change to this function, now it will
_always_ trigger a store of the new status, since the lease
will always be moving upwards. A vnode that starts, and
crashes, and starts, and crashes over and over will burn
through a lot of counter (or vnode ids (if the leases are very
large.))
--------------------------------------------------------------------
--------------------------------------------------------------------
===================================================================
===================================================================
interface to server.
A lease of ?MAX_CNTR essentially means a new vnodeid every time
you start the vnode. This caps the lease size (silently.)
Is there still some edge here, with UP->DOWN->UP grade?
We think not. Downgrade would keep the same vnode file,
again picks up the same counter. Or, if the file is
re-written while downgraded, it can only be for a new
ID, so still safe.
Lost counter? Wha? New ID
where last lease size was ?MAX_CNTR and new lease size
is 0.
orddict.
path to exists.
unique by incrementing into the future if necessary.
status()}' or `{error, Reason}'. If the file does not exist, an
empty status is returned.
doesn't exist? same as empty
"corruption" error, some other posix error, unreadable:
Log, and start anew
"corruption" error, some other posix error, unreadable:
Log, and start anew
consult threw
read it as latin-1
-consult-error-1-file-io-server-invalid-unicode-with-pre-r17-files-in-r17-td4712042.html
the process. This file should have no concurrent access, and MUST
not be written at any other place/time in the system.
What if we go v2->v1->v2? kv1142 suggests there is an error
Simulate clear
days later!
<<70,116,143,250>>
Simulate clear
Check assigning a vnodeid with a earlier date - just in case of clock skew
<<70,116,143,250>>
Simulate clear
Test
initial create failure
create successfully
update successfully
update failure
note this was generated by a r16, and will be written in the r16
style of io_lib:format("~p.", [?R16_STATUS]).
Properties
~p. in `write_vnode_status/3' for an example of _why_ this test).
always get a status result. If the file is valid, we get its
contents, if not, we get a blank status, if there is no file we get
a blank status.
empty
empty
straight up eunit tests, given how little random there really is
here for quickcheck | like vnodeid , vnode op counter etc
Copyright ( c ) 2007 - 2015 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_kv_vnode_status_mgr).
-behaviour(gen_server).
-ifdef(TEST).
-compile([export_all, nowarn_export_all]).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-endif.
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([start_link/3,
get_vnodeid_and_counter/2,
lease_counter/2,
clear_vnodeid/1,
status/1,
stop/1]).
-ifdef(EQC).
-export([test_link/4]).
-endif.
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
only 32 bits per counter , when you hit that , get a new vnode i d
-define(MAX_CNTR, 4294967295).
version 2 includes epoch counter , version 1 does not
-define(VNODE_STATUS_VERSION, 2).
-record(state, {
status_file :: undefined | file:filename(),
index :: undefined | non_neg_integer(),
vnode_pid :: undefined | pid(),
version = ?VNODE_STATUS_VERSION :: 1 | 2
}).
-type status() :: orddict:orddict().
-type init_args() :: {VnodePid :: pid(),
LeaseSize :: non_neg_integer(),
UseEpochCounter :: boolean(),
Path :: string()|undefined}.
-type blocking_req() :: clear | {vnodeid, LeaseSize :: non_neg_integer()}.
longer than the call default of 5 seconds , shorter than infinity .
20 seconds
-define(FLUSH_TIMEOUT_MILLIS, 20000).
-spec start_link(pid(), non_neg_integer(), boolean())
-> {ok, pid()} | {error, term()}.
start_link(VnodePid, Index, UseEpochCounter) ->
gen_server:start_link(?MODULE, {VnodePid, Index, UseEpochCounter, undefined}, []).
-ifdef(EQC).
-spec test_link(pid(), non_neg_integer(), boolean(), string())
-> {ok, pid()} | {error, term()}.
test_link(VnodePid, Index, UseEpochCounter, Path) ->
gen_server:start_link(?MODULE, {VnodePid, Index, UseEpochCounter, Path}, []).
-endif.
@doc You ca n't ever have a ` LeaseSize ' greater than the maximum 32
start a key epoch , the counter is encoded in a 32 bit binary , and
going over 4billion+etc would wrap around and re - use integers .
-spec get_vnodeid_and_counter(pid(), non_neg_integer()) ->
{ok, {VnodeId :: binary(),
Counter :: non_neg_integer(),
LeaseSize :: non_neg_integer()}}.
get_vnodeid_and_counter(Pid, LeaseSize) when is_integer(LeaseSize),
LeaseSize > 0 ->
gen_server:call(Pid, {vnodeid, LeaseSize}, ?FLUSH_TIMEOUT_MILLIS).
@doc Asynchronously lease increments for a counter . ` Pid ' is the
server pid , and ` ' is the number of increments to lease . A
` ' of 10,000 means that a vnode can handle 10,000 new key
frequently crashing vnode burns through the 32bit integer space ,
response message of ` { counter_lease , { From : : pid ( ) , VnodeId : :
binary ( ) , NewLease : : non_neg_integer ( ) } } '
-spec lease_counter(pid(), non_neg_integer()) -> ok.
lease_counter(Pid, LeaseSize) when is_integer(LeaseSize),
LeaseSize > 0 ->
gen_server:cast(Pid, {lease, LeaseSize}).
-spec clear_vnodeid(pid()) -> {ok, cleared}.
clear_vnodeid(Pid) ->
gen_server:call(Pid, clear, ?FLUSH_TIMEOUT_MILLIS).
status(Pid) ->
gen_server:call(Pid, status).
stop(Pid) ->
gen_server:call(Pid, stop).
@private @doc Initializes the server , Init Args must be ` { VnodePid
boolean ( ) } ' where the first element is the pid of the vnode this
manager works for , and the second is the vnode 's index / partition
number ( used for locating the status file . ) The third is a kill
-spec init(Args :: init_args()) -> {ok, #state{}}.
init({VnodePid, Index, UseEpochCounter, Path}) ->
Version = version(UseEpochCounter),
StatusFilename = vnode_status_filename(Index, Path),
{ok, #state{status_file=StatusFilename,
index=Index,
vnode_pid=VnodePid,
version=Version
}
}.
@private determine if we use a per epcoch counter / lease scheme or
-spec version(boolean()) -> 1 | 2.
version(_UseEpochCounter=true) ->
?VNODE_STATUS_VERSION;
version(_UseEpochCounter=false) ->
1.
@private handle calls
-spec handle_call(blocking_req(), {pid(), term()}, #state{}) ->
{reply, {ok, {VnodeId :: binary(),
Counter :: non_neg_integer(),
LeaseTo :: non_neg_integer()}},
#state{}}.
handle_call({vnodeid, LeaseSize}, _From, State) ->
#state{status_file=File, version=Version} = State,
{ok, Status} = read_vnode_status(File),
{Counter, LeaseTo, VnodeId, Status2} = get_counter_lease(LeaseSize, Status, Version),
ok = write_vnode_status(Status2, File, Version),
Res = {ok, {VnodeId, Counter, LeaseTo}},
{reply, Res, State};
handle_call(clear, _From, State) ->
#state{status_file=File, version=Version} = State,
{ok, Status} = read_vnode_status(File),
Status2 = orddict:erase(counter, orddict:erase(vnodeid, Status)),
ok = write_vnode_status(Status2, File, Version),
{reply, {ok, cleared}, State};
handle_call(status, _From, State) ->
#state{status_file=File} = State,
{ok, Status} = read_vnode_status(File),
{reply, {ok, Status}, State};
handle_call(stop, _From, State) ->
{stop, normal, ok, State}.
@private
-spec handle_cast({lease, non_neg_integer()}, #state{}) ->
{noreply, #state{}}.
handle_cast({lease, LeaseSize}, State) ->
#state{status_file=File, vnode_pid=Pid} = State,
{ok, Status} = read_vnode_status(File),
{_Counter, LeaseTo, VnodeId, UpdStatus} = get_counter_lease(LeaseSize, Status, ?VNODE_STATUS_VERSION),
ok = write_vnode_status(UpdStatus, File, ?VNODE_STATUS_VERSION),
Pid ! {counter_lease, {self(), VnodeId, LeaseTo}},
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
@private monotonically advance the counter lease . Guarded at
-spec get_counter_lease(non_neg_integer(), status(), Version :: 1 | 2) ->
{PreviousLease :: non_neg_integer(),
NewLease :: non_neg_integer(),
VnodeId :: binary(),
Status :: status()}.
get_counter_lease(_LeaseSize, Status, 1) ->
case get_status_item(vnodeid, Status, undefined) of
undefined ->
{VnodeId, Status2} = assign_vnodeid(os:timestamp(),
riak_core_nodeid:get(),
Status),
{0, 0, VnodeId, Status2};
ID ->
{0, 0, ID, Status}
end;
get_counter_lease(LeaseSize0, Status, ?VNODE_STATUS_VERSION) ->
PrevLease = get_status_item(counter, Status, undefined),
VnodeId0 = get_status_item(vnodeid, Status, undefined),
Version = get_status_item(version, Status, 1),
LeaseSize = min(LeaseSize0, ?MAX_CNTR),
case {Version, PrevLease, VnodeId0} of
{_, _, undefined} ->
new_id_and_counter(Status, LeaseSize);
{1, undefined, ID} ->
Upgrade , no counter existed , do n't force a new vnodeid
and the pre - epochal vnodeid would be used . Upgrade
{0, LeaseSize, ID, orddict:store(counter, LeaseSize, Status)};
{?VNODE_STATUS_VERSION, undefined, _ID} ->
new_id_and_counter(Status, LeaseSize);
{_AnyVersion, Leased, _ID} when Leased + LeaseSize > ?MAX_CNTR ->
Since ` ' must be > 0 , there is no edge here
new_id_and_counter(Status, LeaseSize);
{_AnyVersion, Leased, ID} ->
NewLease = Leased + LeaseSize,
{PrevLease, NewLease, ID, orddict:store(counter, NewLease, Status)}
end.
@private generate a new ID and assign a new counter , and lease up
to ` ' .
-spec new_id_and_counter(status(), non_neg_integer()) ->
{non_neg_integer(), non_neg_integer(), binary(), status()}.
new_id_and_counter(Status, LeaseSize) ->
{VnodeId, Status2} = assign_vnodeid(os:timestamp(),
riak_core_nodeid:get(),
Status),
{0, LeaseSize, VnodeId, orddict:store(counter, LeaseSize, Status2)}.
@private Provide a ` proplists : get_value/3 ' like function for status
-spec get_status_item(term(), status(), term()) -> term().
get_status_item(Item, Status, Default) ->
case orddict:find(Item, Status) of
{ok, Val} ->
Val;
error ->
Default
end.
@private generate a file name for the vnode status , and ensure the
-spec vnode_status_filename(non_neg_integer(), string()|undefined) -> file:filename().
vnode_status_filename(Index, Path) ->
P_DataDir =
case Path of
undefined ->
app_helper:get_env(riak_core, platform_data_dir);
Path ->
Path
end,
VnodeStatusDir = app_helper:get_env(riak_kv, vnode_status,
filename:join(P_DataDir, "kv_vnode")),
Filename = filename:join(VnodeStatusDir, integer_to_list(Index)),
ok = filelib:ensure_dir(Filename),
Filename.
@private Assign a unique vnodeid , making sure the timestamp is
-spec assign_vnodeid(erlang:timestamp(), binary(), status()) ->
{binary(), status()}.
assign_vnodeid(Now, NodeId, Status) ->
{_Mega, Sec, Micro} = Now,
NowEpoch = 1000000*Sec + Micro,
LastVnodeEpoch = get_status_item(last_epoch, Status, 0),
VnodeEpoch = erlang:max(NowEpoch, LastVnodeEpoch+1),
VnodeId = <<NodeId/binary, VnodeEpoch:32/integer>>,
UpdStatus = orddict:store(vnodeid, VnodeId,
orddict:store(last_epoch, VnodeEpoch, Status)),
{VnodeId, UpdStatus}.
@private read the vnode status from ` File ' . Returns ` { ok ,
-spec read_vnode_status(file:filename()) -> {ok, status()}.
read_vnode_status(File) ->
try file:consult(File) of
{ok, [Status]} when is_list(Status) ->
{ok, orddict:from_list(Status)};
{error, enoent} ->
{ok, orddict:new()};
{error, {_Offset, file_io_server, invalid_unicode}} ->
case override_consult(File) of
{ok, [Status]} when is_list(Status) ->
{ok, orddict:from_list(Status)};
Er ->
lager:error("Failed to override_consult vnode-status file ~p ~p", [File, Er]),
{ok, orddict:new()}
end;
Er ->
lager:error("Failed to consult vnode-status file ~p ~p", [File, Er]),
{ok, orddict:new()}
catch C:T ->
lager:error("Failed to consult vnode-status file ~p ~p ~p", [File, C, T]),
{ok, orddict:new()}
end.
-spec override_consult(file:filename()) -> {ok, list()}.
@private In OTP 20 file will not read unicode written in OTP16 unless we
override_consult(File) ->
case file:open(File, [read]) of
{ok, Fd} ->
R = consult_stream(Fd),
_ = file:close(Fd),
R;
Error ->
Error
end.
-spec consult_stream(file:io_device()) -> {ok, list()}|{error, any()}.
@private read unicode stream as latin-1
consult_stream(Fd) ->
_ = epp:set_encoding(Fd, latin1),
consult_stream(Fd, 1, []).
consult_stream(Fd, Line, Acc) ->
case io:read(Fd, '', Line) of
{ok,Term,EndLine} ->
consult_stream(Fd, EndLine, [Term|Acc]);
{error,Error,_Line} ->
{error,Error};
{eof,_Line} ->
{ok,lists:reverse(Acc)}
end.
-ifdef(TEST).
@private do n't make testers suffer through the fsync time
-spec write_vnode_status(status(), file:filename(), Version :: 1 | 2) -> ok.
write_vnode_status(Status, File, Version) ->
VersionedStatus = orddict:store(version, Version, Status),
ok = file:write_file(File, io_lib:format("~w.", [orddict:to_list(VersionedStatus)])).
-else.
@private write the vnode status . This is why the file is guarded by
-spec write_vnode_status(status(), file:filename(), Version :: 1 | 2) -> ok.
write_vnode_status(Status, File, Version) ->
VersionedStatus = orddict:store(version, Version, Status),
ok = riak_core_util:replace_file(File, io_lib:format("~w.", [orddict:to_list(VersionedStatus)])).
-endif.
-ifdef(TEST).
v2_v1_v2_test() ->
Res = get_counter_lease(10000, [{counter, 10000}, {vnodeid, <<"hi!">>}, {version, 1}], 2),
?assertMatch({10000, 20000, <<"hi!">>, _Stat2}, Res).
Check assigning a vnodeid twice in the same second
assign_vnodeid_restart_same_ts_test() ->
TS=(224520 * 100000 ) + 343446
as unsigned net - order int < < 70,116,143,150 > >
NodeId = <<1, 2, 3, 4>>,
{Vid1, Status1} = assign_vnodeid(Now1, NodeId, []),
?assertEqual(<<1, 2, 3, 4, 70, 116, 143, 150>>, Vid1),
Status2 = orddict:erase(vnodeid, Status1),
Reassign
{Vid2, _Status3} = assign_vnodeid(Now2, NodeId, Status2),
?assertEqual(<<1, 2, 3, 4, 70, 116, 143, 151>>, Vid2).
Check assigning a vnodeid with a later date , but less than 11.57
assign_vnodeid_restart_later_ts_test() ->
< < 70,116,143,150 > >
NodeId = <<1, 2, 3, 4>>,
{Vid1, Status1} = assign_vnodeid(Now1, NodeId, []),
?assertEqual(<<1, 2, 3, 4, 70,116,143,150>>, Vid1),
Status2 = orddict:erase(vnodeid, Status1),
Reassign
{Vid2, _Status3} = assign_vnodeid(Now2, NodeId, Status2),
?assertEqual(<<1, 2, 3, 4, 70,116,143,250>>, Vid2).
assign_vnodeid_restart_earlier_ts_test() ->
< < 70,116,143,150 > >
NodeId = <<1, 2, 3, 4>>,
{Vid1, Status1} = assign_vnodeid(Now1, NodeId, []),
?assertEqual(<<1, 2, 3, 4, 70,116,143,250>>, Vid1),
Status2 = orddict:erase(vnodeid, Status1),
Reassign
Should be greater than last offered - which is the 2mil timestamp
{Vid2, _Status3} = assign_vnodeid(Now2, NodeId, Status2),
?assertEqual(<<1, 2, 3, 4, 70,116,143,251>>, Vid2).
-ifndef(GITHUBEXCLUDE).
vnode_status_test_() ->
{setup,
fun() ->
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
filelib:ensure_dir(TestPath ++ "/.test"),
?cmd("chmod u+rwx " ++ TestPath),
?cmd("rm -rf " ++ TestPath),
application:set_env(riak_kv, vnode_status, TestPath),
ok
end,
fun(_) ->
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
application:unset_env(riak_kv, vnode_status),
?cmd("chmod u+rwx " ++ TestPath),
?cmd("rm -rf " ++ TestPath),
ok
end,
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
?cmd("rm -rf " ++ TestPath ++ " || true"),
?cmd("mkdir " ++ TestPath),
?cmd("chmod -w " ++ TestPath),
Index = 0,
File = vnode_status_filename(Index, TestPath),
R =
try
write_vnode_status(orddict:new(), File, ?VNODE_STATUS_VERSION)
catch _Err:{badmatch, Reason} ->
Reason
end,
?assertEqual({error, eacces}, R)
end),
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
?cmd("chmod +w " ++ TestPath),
Index = 0,
File = vnode_status_filename(Index, TestPath),
?assertEqual(ok, write_vnode_status([{created, true}], File, ?VNODE_STATUS_VERSION))
end),
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
Index = 0,
File = vnode_status_filename(Index, TestPath),
{ok, [{created, true}, {version, 2}]} = read_vnode_status(File),
?assertEqual(ok, write_vnode_status([{updated, true}], File, ?VNODE_STATUS_VERSION))
end),
TestPath = riak_kv_test_util:get_test_dir("kv_vnode_status_test"),
?cmd("chmod -r " ++ TestPath ++ "/0"),
Index = 0,
File = vnode_status_filename(Index, TestPath),
?assertEqual({ok, []}, read_vnode_status(File))
end
)
]}.
-endif.
-ifdef(EQC).
-define(NUMTESTS, 1000).
-define(QC_OUT(P),
eqc:on_output(fun(Str, Args) ->
io:format(user, Str, Args) end, P)).
-define(TEST_FILE, "kv_vnode_status_eqc/vnode_status_test.file").
-define(VALID_STATUS, [{vnodeid, <<"vnodeid123">>}]).
-define(R16_STATUS, [{vnodeid,<<"'êͧL÷=d">>}]).
@private any binary we write , we can read . ( Try changing ~w . to
prop_any_bin_consult() ->
?SETUP(fun() ->
TestFile = riak_kv_test_util:get_test_dir(?TEST_FILE),
file:delete(TestFile),
fun() -> file:delete(TestFile) end
end,
?FORALL(Bin, binary(),
begin
TestFile = riak_kv_test_util:get_test_dir(?TEST_FILE),
Status = [{version, 1}, {vnodeid, Bin}],
ok = write_vnode_status(Status, TestFile, 1),
equals({ok, Status}, read_vnode_status(TestFile))
end)).
@private regardless of the contents of the vnode status file , we
prop_any_file_status() ->
?SETUP(fun() ->
TestFile = riak_kv_test_util:get_test_dir(?TEST_FILE),
file:delete(TestFile),
fun() -> file:delete(TestFile) end
end,
?FORALL({Type, _StatusFile},
?LET(Type, oneof([r16, valid, absent, corrupt]), {Type, gen_status_file(Type)}),
begin
TestFile = riak_kv_test_util:get_test_dir(?TEST_FILE),
{ok, Status} = read_vnode_status(TestFile),
case Type of
valid ->
There is a vnodeid
is_binary(orddict:fetch(vnodeid, Status));
r16 ->
There is a vnodeid
is_binary(orddict:fetch(vnodeid, Status));
corrupt ->
is_list(Status) andalso equals(error, orddict:find(vnodeid, Status));
absent ->
is_list(Status) andalso equals(error, orddict:find(vnodeid, Status))
end
end)).
gen_status_file(Type) ->
gen_status_file(riak_kv_test_util:get_test_dir(?TEST_FILE), Type).
@private generate the file on disk TBQH , this might be fine as a
gen_status_file(TestFile, r16) ->
ok = riak_core_util:replace_file(TestFile, io_lib:format("~p.", [?R16_STATUS])),
TestFile;
gen_status_file(TestFile, absent) ->
file:delete(TestFile),
TestFile;
gen_status_file(TestFile, corrupt) ->
?LET(Bin, binary(),
begin
file:write_file(TestFile, Bin),
TestFile
end);
gen_status_file(TestFile, valid) ->
?LET(VnodeId, binary(),
begin
ok = write_vnode_status([{vnodeid, VnodeId}], TestFile, 1),
TestFile
end).
-endif.
-endif.
|
6003ffb311499268a0b44b61150189935f6a619313d202a649bc82756a51b784 | thattommyhall/offline-4clojure | p107.clj | ;; Simple closures - Easy
< p > Lexical scope and first - class functions are two of the most basic building blocks of a functional language like Clojure . When you combine the two together , you get something very powerful called < strong > lexical closures</strong > . With these , you can exercise a great deal of control over the lifetime of your local bindings , saving their values for use later , long after the code you 're running now has finished.</p >
;;
;;<p>It can be hard to follow in the abstract, so let's build a simple closure. Given a positive integer <i>n</i>, return a function <code>(f x)</code> which computes <i>x<sup>n</sup></i>. Observe that the effect of this is to preserve the value of <i>n</i> for use outside the scope in which it is defined.</p>
;; tags - higher-order-functions:math
;; restricted -
(ns offline-4clojure.p107
(:use clojure.test))
(def __
;; your solution here
)
(defn -main []
(are [soln] soln
(= 256 ((__ 2) 16),
((__ 8) 2))
(= [1 8 27 64] (map (__ 3) [1 2 3 4]))
(= [1 2 4 8 16] (map #((__ %) 2) [0 1 2 3 4]))
))
| null | https://raw.githubusercontent.com/thattommyhall/offline-4clojure/73e32fc6687816aea3c514767cef3916176589ab/src/offline_4clojure/p107.clj | clojure | Simple closures - Easy
<p>It can be hard to follow in the abstract, so let's build a simple closure. Given a positive integer <i>n</i>, return a function <code>(f x)</code> which computes <i>x<sup>n</sup></i>. Observe that the effect of this is to preserve the value of <i>n</i> for use outside the scope in which it is defined.</p>
tags - higher-order-functions:math
restricted -
your solution here | < p > Lexical scope and first - class functions are two of the most basic building blocks of a functional language like Clojure . When you combine the two together , you get something very powerful called < strong > lexical closures</strong > . With these , you can exercise a great deal of control over the lifetime of your local bindings , saving their values for use later , long after the code you 're running now has finished.</p >
(ns offline-4clojure.p107
(:use clojure.test))
(def __
)
(defn -main []
(are [soln] soln
(= 256 ((__ 2) 16),
((__ 8) 2))
(= [1 8 27 64] (map (__ 3) [1 2 3 4]))
(= [1 2 4 8 16] (map #((__ %) 2) [0 1 2 3 4]))
))
|
1a0bdab741849e376cd01bdc8dae9070a1050746d670bf2e5ed6a48cc46b2ac3 | mzp/coq-ruby | dumpglob.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
$ I d : dumpglob.ml 11582 2008 - 11 - 12 19:49:57Z notin $
(* Dump of globalization (to be used by coqdoc) *)
let glob_file = ref Pervasives.stdout
let open_glob_file f =
glob_file := Pervasives.open_out f
let close_glob_file () =
Pervasives.close_out !glob_file
type glob_output_t =
| NoGlob
| StdOut
| MultFiles
| File of string
let glob_output = ref NoGlob
let dump () = !glob_output != NoGlob
let noglob () = glob_output := NoGlob
let dump_to_stdout () = glob_output := StdOut; glob_file := Pervasives.stdout
let multi_dump () = !glob_output = MultFiles
let dump_to_dotglob f = glob_output := MultFiles
let dump_into_file f = glob_output := File f; open_glob_file f
let dump_string s =
if dump () then Pervasives.output_string !glob_file s
let previous_state = ref MultFiles
let pause () = previous_state := !glob_output; glob_output := NoGlob
let continue () = glob_output := !previous_state
let token_number = ref 0
let last_pos = ref 0
type coqdoc_state = Lexer.location_table * int * int
let coqdoc_freeze () =
let lt = Lexer.location_table() in
let state = (lt,!token_number,!last_pos) in
token_number := 0;
last_pos := 0;
state
let coqdoc_unfreeze (lt,tn,lp) =
Lexer.restore_location_table lt;
token_number := tn;
last_pos := lp
open Decl_kinds
let type_of_logical_kind = function
| IsDefinition def ->
(match def with
| Definition -> "def"
| Coercion -> "coe"
| SubClass -> "subclass"
| CanonicalStructure -> "canonstruc"
| Example -> "ex"
| Fixpoint -> "def"
| CoFixpoint -> "def"
| Scheme -> "scheme"
| StructureComponent -> "proj"
| IdentityCoercion -> "coe"
| Instance -> "inst"
| Method -> "meth")
| IsAssumption a ->
(match a with
| Definitional -> "defax"
| Logical -> "prfax"
| Conjectural -> "prfax")
| IsProof th ->
(match th with
| Theorem
| Lemma
| Fact
| Remark
| Property
| Proposition
| Corollary -> "thm")
let type_of_global_ref gr =
if Typeclasses.is_class gr then
"class"
else
match gr with
| Libnames.ConstRef cst ->
type_of_logical_kind (Decls.constant_kind cst)
| Libnames.VarRef v ->
"var" ^ type_of_logical_kind (Decls.variable_kind v)
| Libnames.IndRef ind ->
let (mib,oib) = Inductive.lookup_mind_specif (Global.env ()) ind in
if mib.Declarations.mind_record then
if mib.Declarations.mind_finite then "rec"
else "corec"
else if mib.Declarations.mind_finite then "ind"
else "coind"
| Libnames.ConstructRef _ -> "constr"
let remove_sections dir =
if Libnames.is_dirpath_prefix_of dir (Lib.cwd ()) then
(* Not yet (fully) discharged *)
Libnames.extract_dirpath_prefix (Lib.sections_depth ()) (Lib.cwd ())
else
(* Theorem/Lemma outside its outer section of definition *)
dir
let dump_ref loc filepath modpath ident ty =
dump_string (Printf.sprintf "R%d %s %s %s %s\n"
(fst (Util.unloc loc)) filepath modpath ident ty)
let add_glob_gen loc sp lib_dp ty =
if dump () then
let mod_dp,id = Libnames.repr_path sp in
let mod_dp = remove_sections mod_dp in
let mod_dp_trunc = Libnames.drop_dirpath_prefix lib_dp mod_dp in
let filepath = Names.string_of_dirpath lib_dp in
let modpath = Names.string_of_dirpath mod_dp_trunc in
let ident = Names.string_of_id id in
dump_ref loc filepath modpath ident ty
let add_glob loc ref =
if dump () && loc <> Util.dummy_loc then
let sp = Nametab.sp_of_global ref in
let lib_dp = Lib.library_part ref in
let ty = type_of_global_ref ref in
add_glob_gen loc sp lib_dp ty
let mp_of_kn kn =
let mp,sec,l = Names.repr_kn kn in
Names.MPdot (mp,l)
let add_glob_kn loc kn =
if dump () && loc <> Util.dummy_loc then
let sp = Nametab.sp_of_syntactic_definition kn in
let lib_dp = Lib.dp_of_mp (mp_of_kn kn) in
add_glob_gen loc sp lib_dp "syndef"
let add_local loc id = ()
(* let mod_dp,id = repr_path sp in *)
(* let mod_dp = remove_sections mod_dp in *)
(* let mod_dp_trunc = drop_dirpath_prefix lib_dp mod_dp in *)
(* let filepath = string_of_dirpath lib_dp in *)
(* let modpath = string_of_dirpath mod_dp_trunc in *)
(* let ident = string_of_id id in *)
dump_string ( Printf.sprintf " R%d % s % s % s % s\n "
( fst ( unloc loc ) ) filepath modpath ident ty )
let dump_binding loc id = ()
let dump_definition (loc, id) sec s =
dump_string (Printf.sprintf "%s %d %s %s\n" s (fst (Util.unloc loc))
(Names.string_of_dirpath (Lib.current_dirpath sec)) (Names.string_of_id id))
let dump_reference loc modpath ident ty =
dump_string (Printf.sprintf "R%d %s %s %s %s\n"
(fst (Util.unloc loc)) (Names.string_of_dirpath (Lib.library_dp ())) modpath ident ty)
let dump_constraint ((loc, n), _, _) sec ty =
match n with
| Names.Name id -> dump_definition (loc, id) sec ty
| Names.Anonymous -> ()
let dump_name (loc, n) sec ty =
match n with
| Names.Name id -> dump_definition (loc, id) sec ty
| Names.Anonymous -> ()
let dump_local_binder b sec ty =
if dump () then
match b with
| Topconstr.LocalRawAssum (nl, _, _) ->
List.iter (fun x -> dump_name x sec ty) nl
| Topconstr.LocalRawDef _ -> ()
let dump_modref loc mp ty =
if dump () then
let (dp, l) = Lib.split_modpath mp in
let l = if l = [] then l else Util.list_drop_last l in
let fp = Names.string_of_dirpath dp in
let mp = Names.string_of_dirpath (Names.make_dirpath l) in
dump_string (Printf.sprintf "R%d %s %s %s %s\n"
(fst (Util.unloc loc)) fp mp "<>" ty)
let dump_moddef loc mp ty =
if dump () then
let (dp, l) = Lib.split_modpath mp in
let mp = Names.string_of_dirpath (Names.make_dirpath l) in
dump_string (Printf.sprintf "%s %d %s %s\n" ty (fst (Util.unloc loc)) "<>" mp)
let dump_libref loc dp ty =
dump_string (Printf.sprintf "R%d %s <> <> %s\n"
(fst (Util.unloc loc)) (Names.string_of_dirpath dp) ty)
let dump_notation_location pos ((path,df),sc) =
if dump () then
let rec next growing =
let loc = Lexer.location_function !token_number in
let (bp,_) = Util.unloc loc in
if growing then if bp >= pos then loc else (incr token_number; next true)
else if bp = pos then loc
else if bp > pos then (decr token_number;next false)
else (incr token_number;next true) in
let loc = next (pos >= !last_pos) in
last_pos := pos;
let path = Names.string_of_dirpath path in
let _sc = match sc with Some sc -> " "^sc | _ -> "" in
dump_string (Printf.sprintf "R%d %s \"%s\" not\n" (fst (Util.unloc loc)) path df)
| null | https://raw.githubusercontent.com/mzp/coq-ruby/99b9f87c4397f705d1210702416176b13f8769c1/interp/dumpglob.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
Dump of globalization (to be used by coqdoc)
Not yet (fully) discharged
Theorem/Lemma outside its outer section of definition
let mod_dp,id = repr_path sp in
let mod_dp = remove_sections mod_dp in
let mod_dp_trunc = drop_dirpath_prefix lib_dp mod_dp in
let filepath = string_of_dirpath lib_dp in
let modpath = string_of_dirpath mod_dp_trunc in
let ident = string_of_id id in | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
$ I d : dumpglob.ml 11582 2008 - 11 - 12 19:49:57Z notin $
let glob_file = ref Pervasives.stdout
let open_glob_file f =
glob_file := Pervasives.open_out f
let close_glob_file () =
Pervasives.close_out !glob_file
type glob_output_t =
| NoGlob
| StdOut
| MultFiles
| File of string
let glob_output = ref NoGlob
let dump () = !glob_output != NoGlob
let noglob () = glob_output := NoGlob
let dump_to_stdout () = glob_output := StdOut; glob_file := Pervasives.stdout
let multi_dump () = !glob_output = MultFiles
let dump_to_dotglob f = glob_output := MultFiles
let dump_into_file f = glob_output := File f; open_glob_file f
let dump_string s =
if dump () then Pervasives.output_string !glob_file s
let previous_state = ref MultFiles
let pause () = previous_state := !glob_output; glob_output := NoGlob
let continue () = glob_output := !previous_state
let token_number = ref 0
let last_pos = ref 0
type coqdoc_state = Lexer.location_table * int * int
let coqdoc_freeze () =
let lt = Lexer.location_table() in
let state = (lt,!token_number,!last_pos) in
token_number := 0;
last_pos := 0;
state
let coqdoc_unfreeze (lt,tn,lp) =
Lexer.restore_location_table lt;
token_number := tn;
last_pos := lp
open Decl_kinds
let type_of_logical_kind = function
| IsDefinition def ->
(match def with
| Definition -> "def"
| Coercion -> "coe"
| SubClass -> "subclass"
| CanonicalStructure -> "canonstruc"
| Example -> "ex"
| Fixpoint -> "def"
| CoFixpoint -> "def"
| Scheme -> "scheme"
| StructureComponent -> "proj"
| IdentityCoercion -> "coe"
| Instance -> "inst"
| Method -> "meth")
| IsAssumption a ->
(match a with
| Definitional -> "defax"
| Logical -> "prfax"
| Conjectural -> "prfax")
| IsProof th ->
(match th with
| Theorem
| Lemma
| Fact
| Remark
| Property
| Proposition
| Corollary -> "thm")
let type_of_global_ref gr =
if Typeclasses.is_class gr then
"class"
else
match gr with
| Libnames.ConstRef cst ->
type_of_logical_kind (Decls.constant_kind cst)
| Libnames.VarRef v ->
"var" ^ type_of_logical_kind (Decls.variable_kind v)
| Libnames.IndRef ind ->
let (mib,oib) = Inductive.lookup_mind_specif (Global.env ()) ind in
if mib.Declarations.mind_record then
if mib.Declarations.mind_finite then "rec"
else "corec"
else if mib.Declarations.mind_finite then "ind"
else "coind"
| Libnames.ConstructRef _ -> "constr"
let remove_sections dir =
if Libnames.is_dirpath_prefix_of dir (Lib.cwd ()) then
Libnames.extract_dirpath_prefix (Lib.sections_depth ()) (Lib.cwd ())
else
dir
let dump_ref loc filepath modpath ident ty =
dump_string (Printf.sprintf "R%d %s %s %s %s\n"
(fst (Util.unloc loc)) filepath modpath ident ty)
let add_glob_gen loc sp lib_dp ty =
if dump () then
let mod_dp,id = Libnames.repr_path sp in
let mod_dp = remove_sections mod_dp in
let mod_dp_trunc = Libnames.drop_dirpath_prefix lib_dp mod_dp in
let filepath = Names.string_of_dirpath lib_dp in
let modpath = Names.string_of_dirpath mod_dp_trunc in
let ident = Names.string_of_id id in
dump_ref loc filepath modpath ident ty
let add_glob loc ref =
if dump () && loc <> Util.dummy_loc then
let sp = Nametab.sp_of_global ref in
let lib_dp = Lib.library_part ref in
let ty = type_of_global_ref ref in
add_glob_gen loc sp lib_dp ty
let mp_of_kn kn =
let mp,sec,l = Names.repr_kn kn in
Names.MPdot (mp,l)
let add_glob_kn loc kn =
if dump () && loc <> Util.dummy_loc then
let sp = Nametab.sp_of_syntactic_definition kn in
let lib_dp = Lib.dp_of_mp (mp_of_kn kn) in
add_glob_gen loc sp lib_dp "syndef"
let add_local loc id = ()
dump_string ( Printf.sprintf " R%d % s % s % s % s\n "
( fst ( unloc loc ) ) filepath modpath ident ty )
let dump_binding loc id = ()
let dump_definition (loc, id) sec s =
dump_string (Printf.sprintf "%s %d %s %s\n" s (fst (Util.unloc loc))
(Names.string_of_dirpath (Lib.current_dirpath sec)) (Names.string_of_id id))
let dump_reference loc modpath ident ty =
dump_string (Printf.sprintf "R%d %s %s %s %s\n"
(fst (Util.unloc loc)) (Names.string_of_dirpath (Lib.library_dp ())) modpath ident ty)
let dump_constraint ((loc, n), _, _) sec ty =
match n with
| Names.Name id -> dump_definition (loc, id) sec ty
| Names.Anonymous -> ()
let dump_name (loc, n) sec ty =
match n with
| Names.Name id -> dump_definition (loc, id) sec ty
| Names.Anonymous -> ()
let dump_local_binder b sec ty =
if dump () then
match b with
| Topconstr.LocalRawAssum (nl, _, _) ->
List.iter (fun x -> dump_name x sec ty) nl
| Topconstr.LocalRawDef _ -> ()
let dump_modref loc mp ty =
if dump () then
let (dp, l) = Lib.split_modpath mp in
let l = if l = [] then l else Util.list_drop_last l in
let fp = Names.string_of_dirpath dp in
let mp = Names.string_of_dirpath (Names.make_dirpath l) in
dump_string (Printf.sprintf "R%d %s %s %s %s\n"
(fst (Util.unloc loc)) fp mp "<>" ty)
let dump_moddef loc mp ty =
if dump () then
let (dp, l) = Lib.split_modpath mp in
let mp = Names.string_of_dirpath (Names.make_dirpath l) in
dump_string (Printf.sprintf "%s %d %s %s\n" ty (fst (Util.unloc loc)) "<>" mp)
let dump_libref loc dp ty =
dump_string (Printf.sprintf "R%d %s <> <> %s\n"
(fst (Util.unloc loc)) (Names.string_of_dirpath dp) ty)
let dump_notation_location pos ((path,df),sc) =
if dump () then
let rec next growing =
let loc = Lexer.location_function !token_number in
let (bp,_) = Util.unloc loc in
if growing then if bp >= pos then loc else (incr token_number; next true)
else if bp = pos then loc
else if bp > pos then (decr token_number;next false)
else (incr token_number;next true) in
let loc = next (pos >= !last_pos) in
last_pos := pos;
let path = Names.string_of_dirpath path in
let _sc = match sc with Some sc -> " "^sc | _ -> "" in
dump_string (Printf.sprintf "R%d %s \"%s\" not\n" (fst (Util.unloc loc)) path df)
|
29222bc956487ef705d92c9c148c881a0a0a393618f228aa159fd14affac0300 | tommaisey/aeon | c-setn.help.scm | ; /c_setn Set ranges of bus value(s)
; [
; int - starting bus index
; int - number of sequential buses to change (M)
; [
; float - a control value
; ] * M
; ] * N
; Set contiguous ranges of buses to sets of values. For each range, the
; starting bus index is given followed by the number of channels to
; change, followed by the values.
| null | https://raw.githubusercontent.com/tommaisey/aeon/80744a7235425c47a061ec8324d923c53ebedf15/libs/third-party/sc3/rsc3/help/server-command/c-setn.help.scm | scheme | /c_setn Set ranges of bus value(s)
[
int - starting bus index
int - number of sequential buses to change (M)
[
float - a control value
] * M
] * N
Set contiguous ranges of buses to sets of values. For each range, the
starting bus index is given followed by the number of channels to
change, followed by the values. | |
996595a0282204ebc70feaaa9d67a8f343df57feff49d0498af7b737aae25bca | crategus/cl-cffi-gtk | rtest-gtk-box.lisp | (def-suite gtk-box :in gtk-suite)
(in-suite gtk-box)
GtkPrinterOptionWidget is a child of GtkBox
#-win32
(eval-when (:compile-toplevel :load-toplevel :execute)
; (foreign-funcall "gtk_places_view_get_type" g-size)
(foreign-funcall "gtk_printer_option_widget_get_type" g-size))
;;; --- Types and Values -------------------------------------------------------
(test gtk-box-class
;; Type check
(is (g-type-is-object "GtkBox"))
;; Check the registered name
(is (eq 'gtk-box
(registered-object-type-by-name "GtkBox")))
;; Check the type initializer
(is (eq (gtype "GtkBox")
(gtype (foreign-funcall "gtk_box_get_type" g-size))))
;; Check the parent
(is (eq (gtype "GtkContainer") (g-type-parent "GtkBox")))
;; Check the children
#-windows
(is (or (equal '("GtkHBox" "GtkVBox" "GtkStackSwitcher" "GtkButtonBox"
"GtkStatusbar" "GtkInfoBar" "GtkColorChooserWidget"
"GtkColorSelection" "GtkFileChooserWidget"
"GtkFileChooserButton" "GtkFontChooserWidget"
"GtkFontSelection" "GtkRecentChooserWidget"
"GtkAppChooserWidget" "GtkShortcutsSection"
"GtkShortcutsGroup" "GtkShortcutsShortcut"
"GtkPrinterOptionWidget")
(mapcar #'g-type-name (g-type-children "GtkBox")))
'("GtkHBox" "GtkVBox" "GtkStackSwitcher" "GtkButtonBox"
"GtkStatusbar" "GtkInfoBar" "GtkColorChooserWidget"
"GtkColorSelection" "GtkFileChooserWidget"
"GtkFileChooserButton" "GtkFontChooserWidget"
"GtkFontSelection" "GtkRecentChooserWidget"
"GtkAppChooserWidget" "GtkShortcutsSection"
"GtkShortcutsGroup" "GtkShortcutsShortcut"
"GtkPrinterOptionWidget" "GtkPlacesView")
(mapcar #'g-type-name (g-type-children "GtkBox"))))
#+windows
(is (equal '("GtkHBox" "GtkVBox" "GtkStackSwitcher" "GtkButtonBox"
"GtkStatusbar" "GtkInfoBar" "GtkColorChooserWidget"
"GtkColorSelection" "GtkFileChooserWidget"
"GtkFileChooserButton" "GtkFontChooserWidget"
"GtkFontSelection" "GtkRecentChooserWidget"
"GtkAppChooserWidget" "GtkShortcutsSection"
"GtkShortcutsGroup" "GtkShortcutsShortcut")
(mapcar #'g-type-name (g-type-children "GtkBox"))))
;; Check the interfaces
(is (equal '("AtkImplementorIface" "GtkBuildable" "GtkOrientable")
(mapcar #'g-type-name (g-type-interfaces "GtkBox"))))
;; Check the class properties
(is (equal '("baseline-position" "homogeneous" "orientation" "spacing")
(list-class-property-names "GtkBox")))
;; Get the names of the style properties.
(is (equal '()
(list-class-style-property-names "GtkBox")))
;; Get the names of the child properties
(is (equal '("expand" "fill" "pack-type" "padding" "position")
(list-class-child-property-names "GtkBox")))
;; Check the class definition
(is (equal '(DEFINE-G-OBJECT-CLASS "GtkBox" GTK-BOX
(:SUPERCLASS GTK-CONTAINER :EXPORT T :INTERFACES
("AtkImplementorIface" "GtkBuildable" "GtkOrientable")
:TYPE-INITIALIZER "gtk_box_get_type")
((BASELINE-POSITION GTK-BOX-BASELINE-POSITION
"baseline-position" "GtkBaselinePosition" T T)
(HOMOGENEOUS GTK-BOX-HOMOGENEOUS "homogeneous"
"gboolean" T T)
(SPACING GTK-BOX-SPACING "spacing" "gint" T T)))
(get-g-type-definition "GtkBox"))))
;;; --- Properties -------------------------------------------------------------
(test gtk-box-properties
(let ((box (make-instance 'gtk-box :orientation :vertical :spacing 12)))
(is (eq :vertical (gtk-orientable-orientation box)))
(is (eq :center (gtk-box-baseline-position box)))
(is-false (gtk-box-homogeneous box))
(is (= 12 (gtk-box-spacing box)))))
;;; --- Child Properties -------------------------------------------------------
(test gtk-box-child-properties
(let* ((box (make-instance 'gtk-box :orientation :vertical))
(button (make-instance 'gtk-button)))
(is-false (gtk-container-add box button))
(is-false (gtk-box-child-expand box button))
(is-true (gtk-box-child-fill box button))
(is (eq :start (gtk-box-child-pack-type box button)))
(is (= 0 (gtk-box-child-padding box button)))
(is (= 0 (gtk-box-child-position box button)))))
;;; --- Functions --------------------------------------------------------------
;;; gtk-box-new
(test gtk-box-new
;; Create a box
(let ((box (gtk-box-new :vertical 12)))
(is (eq :vertical (gtk-orientable-orientation box)))
(is (eq :center (gtk-box-baseline-position box)))
(is-false (gtk-box-homogeneous box))
(is (= 12 (gtk-box-spacing box))))
;; Create a box with the default value for spacing
(let ((box (gtk-box-new :horizontal)))
(is (eq :horizontal (gtk-orientable-orientation box)))
(is (eq :center (gtk-box-baseline-position box)))
(is-false (gtk-box-homogeneous box))
(is (= 0 (gtk-box-spacing box))))
;; Use make-instance with default values
(let ((box (make-instance 'gtk-box)))
(is (eq :horizontal (gtk-orientable-orientation box)))
(is (eq :center (gtk-box-baseline-position box)))
(is-false (gtk-box-homogeneous box))
(is (= 0 (gtk-box-spacing box))))
;; Use make-instance and set some properties
(let ((box (make-instance 'gtk-box
:orientation :vertical
:baseline-position :top
:homogeneous t
:spacing 12)))
(is (eq :vertical (gtk-orientable-orientation box)))
(is (eq :top (gtk-box-baseline-position box)))
(is-true (gtk-box-homogeneous box))
(is (= 12 (gtk-box-spacing box)))))
;;; gtk-box-pack-start
(test gtk-box-pack-start
(let ((box (make-instance 'gtk-box :orientation :vertical))
(button1 (make-instance 'gtk-button))
(button2 (make-instance 'gtk-button))
(button3 (make-instance 'gtk-button)))
Pack first button
(is-false (gtk-box-pack-start box button1))
(is (= 0 (gtk-box-child-position box button1)))
Pack second button
(is-false (gtk-box-pack-start box button2))
(is (= 0 (gtk-box-child-position box button1)))
(is (= 1 (gtk-box-child-position box button2)))
Pack third button
(is-false (gtk-box-pack-start box button3))
(is (= 0 (gtk-box-child-position box button1)))
(is (= 1 (gtk-box-child-position box button2)))
(is (= 2 (gtk-box-child-position box button3)))
;; Check the pack type
(is (eq :start (gtk-box-child-pack-type box button1)))
(is (eq :start (gtk-box-child-pack-type box button2)))
(is (eq :start (gtk-box-child-pack-type box button3)))))
;;; gtk-box-pack-end
(test gtk-box-pack-end
(let ((box (make-instance 'gtk-box :orientation :vertical))
(button1 (make-instance 'gtk-button))
(button2 (make-instance 'gtk-button))
(button3 (make-instance 'gtk-button)))
Pack first button
(is-false (gtk-box-pack-end box button1))
(is (= 0 (gtk-box-child-position box button1)))
Pack second button
(is-false (gtk-box-pack-end box button2))
(is (= 0 (gtk-box-child-position box button1)))
(is (= 1 (gtk-box-child-position box button2)))
Pack third button
(is-false (gtk-box-pack-end box button3))
(is (= 0 (gtk-box-child-position box button1)))
(is (= 1 (gtk-box-child-position box button2)))
(is (= 2 (gtk-box-child-position box button3)))
;; Check the pack type
(is (eq :end (gtk-box-child-pack-type box button1)))
(is (eq :end (gtk-box-child-pack-type box button2)))
(is (eq :end (gtk-box-child-pack-type box button3)))))
;;; gtk-box-reorder-child
(test gtk-box-reorder-child
(let ((box (make-instance 'gtk-box :orientation :vertical))
(label (make-instance 'gtk-label))
(button (make-instance 'gtk-button))
(image (make-instance 'gtk-image)))
Pack three widgets in the box
(is-false (gtk-box-pack-start box label))
(is-false (gtk-box-pack-start box button))
(is-false (gtk-box-pack-start box image))
;; Check the position of the children
(is (= 0 (gtk-box-child-position box label)))
(is (= 1 (gtk-box-child-position box button)))
(is (= 2 (gtk-box-child-position box image)))
;; Reorder the children
(gtk-box-reorder-child box label 1)
;; Check again the position of the children
(is (= 1 (gtk-box-child-position box label)))
(is (= 0 (gtk-box-child-position box button)))
(is (= 2 (gtk-box-child-position box image)))
;; Reorder the children
(gtk-box-reorder-child box label 2)
;; Check again the position of the children
(is (= 2 (gtk-box-child-position box label)))
(is (= 0 (gtk-box-child-position box button)))
(is (= 1 (gtk-box-child-position box image)))))
;;; gtk-box-query-child-packing
;;; gtk-box-child-packing
(test gtk-box-child-packing
(let ((box (make-instance 'gtk-box))
(button (make-instance 'gtk-button)))
;; Pack a button in the box
(is-false (gtk-container-add box button))
;; Query and check the child properties
(multiple-value-bind (expand fill padding pack-type)
(gtk-box-query-child-packing box button)
(is-false expand)
(is-true fill)
(is (= 0 padding))
(is (eq :start pack-type)))
;; Set new child properties
(is (eq :end (gtk-box-child-packing box button t nil 10 :end)))
;; Query and check the child properties
(multiple-value-bind (expand fill padding pack-type)
(gtk-box-query-child-packing box button)
(is-true expand)
(is-false fill)
(is (= 10 padding))
(is (eq :end pack-type)))))
;;; gtk-box-center-widget
(test gtk-box-center-widget
(let ((box (make-instance 'gtk-box :orientation :vertical)))
;; Not center widget set
(is-false (gtk-box-center-widget box))
;; Set a center widget
(is (eq 'gtk-button
(type-of (setf (gtk-box-center-widget box)
(make-instance 'gtk-button)))))
;; Retrieve the center widget
(is (eq 'gtk-button (type-of (gtk-box-center-widget box))))))
2021 - 10 - 14
| null | https://raw.githubusercontent.com/crategus/cl-cffi-gtk/7f5a09f78d8004a71efa82794265f2587fff98ab/test/rtest-gtk-box.lisp | lisp | (foreign-funcall "gtk_places_view_get_type" g-size)
--- Types and Values -------------------------------------------------------
Type check
Check the registered name
Check the type initializer
Check the parent
Check the children
Check the interfaces
Check the class properties
Get the names of the style properties.
Get the names of the child properties
Check the class definition
--- Properties -------------------------------------------------------------
--- Child Properties -------------------------------------------------------
--- Functions --------------------------------------------------------------
gtk-box-new
Create a box
Create a box with the default value for spacing
Use make-instance with default values
Use make-instance and set some properties
gtk-box-pack-start
Check the pack type
gtk-box-pack-end
Check the pack type
gtk-box-reorder-child
Check the position of the children
Reorder the children
Check again the position of the children
Reorder the children
Check again the position of the children
gtk-box-query-child-packing
gtk-box-child-packing
Pack a button in the box
Query and check the child properties
Set new child properties
Query and check the child properties
gtk-box-center-widget
Not center widget set
Set a center widget
Retrieve the center widget | (def-suite gtk-box :in gtk-suite)
(in-suite gtk-box)
GtkPrinterOptionWidget is a child of GtkBox
#-win32
(eval-when (:compile-toplevel :load-toplevel :execute)
(foreign-funcall "gtk_printer_option_widget_get_type" g-size))
(test gtk-box-class
(is (g-type-is-object "GtkBox"))
(is (eq 'gtk-box
(registered-object-type-by-name "GtkBox")))
(is (eq (gtype "GtkBox")
(gtype (foreign-funcall "gtk_box_get_type" g-size))))
(is (eq (gtype "GtkContainer") (g-type-parent "GtkBox")))
#-windows
(is (or (equal '("GtkHBox" "GtkVBox" "GtkStackSwitcher" "GtkButtonBox"
"GtkStatusbar" "GtkInfoBar" "GtkColorChooserWidget"
"GtkColorSelection" "GtkFileChooserWidget"
"GtkFileChooserButton" "GtkFontChooserWidget"
"GtkFontSelection" "GtkRecentChooserWidget"
"GtkAppChooserWidget" "GtkShortcutsSection"
"GtkShortcutsGroup" "GtkShortcutsShortcut"
"GtkPrinterOptionWidget")
(mapcar #'g-type-name (g-type-children "GtkBox")))
'("GtkHBox" "GtkVBox" "GtkStackSwitcher" "GtkButtonBox"
"GtkStatusbar" "GtkInfoBar" "GtkColorChooserWidget"
"GtkColorSelection" "GtkFileChooserWidget"
"GtkFileChooserButton" "GtkFontChooserWidget"
"GtkFontSelection" "GtkRecentChooserWidget"
"GtkAppChooserWidget" "GtkShortcutsSection"
"GtkShortcutsGroup" "GtkShortcutsShortcut"
"GtkPrinterOptionWidget" "GtkPlacesView")
(mapcar #'g-type-name (g-type-children "GtkBox"))))
#+windows
(is (equal '("GtkHBox" "GtkVBox" "GtkStackSwitcher" "GtkButtonBox"
"GtkStatusbar" "GtkInfoBar" "GtkColorChooserWidget"
"GtkColorSelection" "GtkFileChooserWidget"
"GtkFileChooserButton" "GtkFontChooserWidget"
"GtkFontSelection" "GtkRecentChooserWidget"
"GtkAppChooserWidget" "GtkShortcutsSection"
"GtkShortcutsGroup" "GtkShortcutsShortcut")
(mapcar #'g-type-name (g-type-children "GtkBox"))))
(is (equal '("AtkImplementorIface" "GtkBuildable" "GtkOrientable")
(mapcar #'g-type-name (g-type-interfaces "GtkBox"))))
(is (equal '("baseline-position" "homogeneous" "orientation" "spacing")
(list-class-property-names "GtkBox")))
(is (equal '()
(list-class-style-property-names "GtkBox")))
(is (equal '("expand" "fill" "pack-type" "padding" "position")
(list-class-child-property-names "GtkBox")))
(is (equal '(DEFINE-G-OBJECT-CLASS "GtkBox" GTK-BOX
(:SUPERCLASS GTK-CONTAINER :EXPORT T :INTERFACES
("AtkImplementorIface" "GtkBuildable" "GtkOrientable")
:TYPE-INITIALIZER "gtk_box_get_type")
((BASELINE-POSITION GTK-BOX-BASELINE-POSITION
"baseline-position" "GtkBaselinePosition" T T)
(HOMOGENEOUS GTK-BOX-HOMOGENEOUS "homogeneous"
"gboolean" T T)
(SPACING GTK-BOX-SPACING "spacing" "gint" T T)))
(get-g-type-definition "GtkBox"))))
(test gtk-box-properties
(let ((box (make-instance 'gtk-box :orientation :vertical :spacing 12)))
(is (eq :vertical (gtk-orientable-orientation box)))
(is (eq :center (gtk-box-baseline-position box)))
(is-false (gtk-box-homogeneous box))
(is (= 12 (gtk-box-spacing box)))))
(test gtk-box-child-properties
(let* ((box (make-instance 'gtk-box :orientation :vertical))
(button (make-instance 'gtk-button)))
(is-false (gtk-container-add box button))
(is-false (gtk-box-child-expand box button))
(is-true (gtk-box-child-fill box button))
(is (eq :start (gtk-box-child-pack-type box button)))
(is (= 0 (gtk-box-child-padding box button)))
(is (= 0 (gtk-box-child-position box button)))))
(test gtk-box-new
(let ((box (gtk-box-new :vertical 12)))
(is (eq :vertical (gtk-orientable-orientation box)))
(is (eq :center (gtk-box-baseline-position box)))
(is-false (gtk-box-homogeneous box))
(is (= 12 (gtk-box-spacing box))))
(let ((box (gtk-box-new :horizontal)))
(is (eq :horizontal (gtk-orientable-orientation box)))
(is (eq :center (gtk-box-baseline-position box)))
(is-false (gtk-box-homogeneous box))
(is (= 0 (gtk-box-spacing box))))
(let ((box (make-instance 'gtk-box)))
(is (eq :horizontal (gtk-orientable-orientation box)))
(is (eq :center (gtk-box-baseline-position box)))
(is-false (gtk-box-homogeneous box))
(is (= 0 (gtk-box-spacing box))))
(let ((box (make-instance 'gtk-box
:orientation :vertical
:baseline-position :top
:homogeneous t
:spacing 12)))
(is (eq :vertical (gtk-orientable-orientation box)))
(is (eq :top (gtk-box-baseline-position box)))
(is-true (gtk-box-homogeneous box))
(is (= 12 (gtk-box-spacing box)))))
(test gtk-box-pack-start
(let ((box (make-instance 'gtk-box :orientation :vertical))
(button1 (make-instance 'gtk-button))
(button2 (make-instance 'gtk-button))
(button3 (make-instance 'gtk-button)))
Pack first button
(is-false (gtk-box-pack-start box button1))
(is (= 0 (gtk-box-child-position box button1)))
Pack second button
(is-false (gtk-box-pack-start box button2))
(is (= 0 (gtk-box-child-position box button1)))
(is (= 1 (gtk-box-child-position box button2)))
Pack third button
(is-false (gtk-box-pack-start box button3))
(is (= 0 (gtk-box-child-position box button1)))
(is (= 1 (gtk-box-child-position box button2)))
(is (= 2 (gtk-box-child-position box button3)))
(is (eq :start (gtk-box-child-pack-type box button1)))
(is (eq :start (gtk-box-child-pack-type box button2)))
(is (eq :start (gtk-box-child-pack-type box button3)))))
(test gtk-box-pack-end
(let ((box (make-instance 'gtk-box :orientation :vertical))
(button1 (make-instance 'gtk-button))
(button2 (make-instance 'gtk-button))
(button3 (make-instance 'gtk-button)))
Pack first button
(is-false (gtk-box-pack-end box button1))
(is (= 0 (gtk-box-child-position box button1)))
Pack second button
(is-false (gtk-box-pack-end box button2))
(is (= 0 (gtk-box-child-position box button1)))
(is (= 1 (gtk-box-child-position box button2)))
Pack third button
(is-false (gtk-box-pack-end box button3))
(is (= 0 (gtk-box-child-position box button1)))
(is (= 1 (gtk-box-child-position box button2)))
(is (= 2 (gtk-box-child-position box button3)))
(is (eq :end (gtk-box-child-pack-type box button1)))
(is (eq :end (gtk-box-child-pack-type box button2)))
(is (eq :end (gtk-box-child-pack-type box button3)))))
(test gtk-box-reorder-child
(let ((box (make-instance 'gtk-box :orientation :vertical))
(label (make-instance 'gtk-label))
(button (make-instance 'gtk-button))
(image (make-instance 'gtk-image)))
Pack three widgets in the box
(is-false (gtk-box-pack-start box label))
(is-false (gtk-box-pack-start box button))
(is-false (gtk-box-pack-start box image))
(is (= 0 (gtk-box-child-position box label)))
(is (= 1 (gtk-box-child-position box button)))
(is (= 2 (gtk-box-child-position box image)))
(gtk-box-reorder-child box label 1)
(is (= 1 (gtk-box-child-position box label)))
(is (= 0 (gtk-box-child-position box button)))
(is (= 2 (gtk-box-child-position box image)))
(gtk-box-reorder-child box label 2)
(is (= 2 (gtk-box-child-position box label)))
(is (= 0 (gtk-box-child-position box button)))
(is (= 1 (gtk-box-child-position box image)))))
(test gtk-box-child-packing
(let ((box (make-instance 'gtk-box))
(button (make-instance 'gtk-button)))
(is-false (gtk-container-add box button))
(multiple-value-bind (expand fill padding pack-type)
(gtk-box-query-child-packing box button)
(is-false expand)
(is-true fill)
(is (= 0 padding))
(is (eq :start pack-type)))
(is (eq :end (gtk-box-child-packing box button t nil 10 :end)))
(multiple-value-bind (expand fill padding pack-type)
(gtk-box-query-child-packing box button)
(is-true expand)
(is-false fill)
(is (= 10 padding))
(is (eq :end pack-type)))))
(test gtk-box-center-widget
(let ((box (make-instance 'gtk-box :orientation :vertical)))
(is-false (gtk-box-center-widget box))
(is (eq 'gtk-button
(type-of (setf (gtk-box-center-widget box)
(make-instance 'gtk-button)))))
(is (eq 'gtk-button (type-of (gtk-box-center-widget box))))))
2021 - 10 - 14
|
f532b41d67280c66dfbad0ca399d8a96d42c617ca93649c1e8da5b6b23a95f12 | diffusionkinetics/open | KNN.hs | {-# LANGUAGE ScopedTypeVariables, TypeFamilies #-}
module Fuml.Base.KNN where
import qualified Data.Vector.Storable as VS
import Numeric.LinearAlgebra
import Data.List (nub, sortBy)
import Data.Ord (comparing)
euclideanDistance :: Vector Double -> Vector Double -> Double
euclideanDistance v1 v2 = sqrt $ VS.sum $ VS.map (^2) $ VS.zipWith (-) v1 v2
weightedBoolVote :: [(Double, Bool)] -> Double
weightedBoolVote distBools =
let wtrue = sum $ map (recip . fst) $ filter snd distBools
wfalse = sum $ map (recip . fst) $ filter (not . snd) distBools
in exp wtrue / (exp wtrue + exp wfalse)
majorityVote :: Eq a => [(Double, a)] -> a
majorityVote distXs = let classes = nub $ map snd distXs
occurences c = (c,negate $ length $ filter ((==c) . snd) distXs)
in fst $ head $ sortBy (comparing snd) $ map occurences classes
weightedMajorityVote :: Eq a => [(Double, a)] -> a
weightedMajorityVote distXs
= let classes = nub $ map snd distXs
weight c = (c,negate $ sum $ map (recip . fst) $ filter ((==c) . snd) distXs)
in fst $ head $ sortBy (comparing snd) $ map weight classes
avgVote :: [(Double, Double)] -> Double
avgVote distXs = let n = realToFrac $ length distXs
in (sum $ map snd distXs) / n
weightedAvgVote :: [(Double, Double)] -> Double
weightedAvgVote distXs
= let wsum = sum $ map (recip . fst) distXs
in (sum $ map (uncurry (*)) distXs) / wsum
| null | https://raw.githubusercontent.com/diffusionkinetics/open/673d9a4a099abd9035ccc21e37d8e614a45a1901/fuml/lib/Fuml/Base/KNN.hs | haskell | # LANGUAGE ScopedTypeVariables, TypeFamilies # |
module Fuml.Base.KNN where
import qualified Data.Vector.Storable as VS
import Numeric.LinearAlgebra
import Data.List (nub, sortBy)
import Data.Ord (comparing)
euclideanDistance :: Vector Double -> Vector Double -> Double
euclideanDistance v1 v2 = sqrt $ VS.sum $ VS.map (^2) $ VS.zipWith (-) v1 v2
weightedBoolVote :: [(Double, Bool)] -> Double
weightedBoolVote distBools =
let wtrue = sum $ map (recip . fst) $ filter snd distBools
wfalse = sum $ map (recip . fst) $ filter (not . snd) distBools
in exp wtrue / (exp wtrue + exp wfalse)
majorityVote :: Eq a => [(Double, a)] -> a
majorityVote distXs = let classes = nub $ map snd distXs
occurences c = (c,negate $ length $ filter ((==c) . snd) distXs)
in fst $ head $ sortBy (comparing snd) $ map occurences classes
weightedMajorityVote :: Eq a => [(Double, a)] -> a
weightedMajorityVote distXs
= let classes = nub $ map snd distXs
weight c = (c,negate $ sum $ map (recip . fst) $ filter ((==c) . snd) distXs)
in fst $ head $ sortBy (comparing snd) $ map weight classes
avgVote :: [(Double, Double)] -> Double
avgVote distXs = let n = realToFrac $ length distXs
in (sum $ map snd distXs) / n
weightedAvgVote :: [(Double, Double)] -> Double
weightedAvgVote distXs
= let wsum = sum $ map (recip . fst) distXs
in (sum $ map (uncurry (*)) distXs) / wsum
|
f9561e5d039e1c825c467a8f915f9c15e4790f3f96dce4c80ba98d62d8023fda | hellonico/origami-dnn | agecam.clj | (ns origami-dnn.demo.agecam
(:require [opencv4.dnn.core :as origami-dnn]
[opencv4.utils :as u]
[origami-dnn.draw :as d]
[origami-dnn.net.core :as net]))
(defn -main [& args]
(let [[net opts labels] (origami-dnn/read-net-from-repo "networks.caffe:convnet-age:1.0.0")]
(u/simple-cam-window
{:frame {:width 500} :video {:device 0}}
(fn [input]
(-> input
(net/classify net opts)
(d/guess-gender labels)))))) | null | https://raw.githubusercontent.com/hellonico/origami-dnn/f55a32d0d3d528fcf57aaac10cfb20c7998b380c/src/origami_dnn/demo/agecam.clj | clojure | (ns origami-dnn.demo.agecam
(:require [opencv4.dnn.core :as origami-dnn]
[opencv4.utils :as u]
[origami-dnn.draw :as d]
[origami-dnn.net.core :as net]))
(defn -main [& args]
(let [[net opts labels] (origami-dnn/read-net-from-repo "networks.caffe:convnet-age:1.0.0")]
(u/simple-cam-window
{:frame {:width 500} :video {:device 0}}
(fn [input]
(-> input
(net/classify net opts)
(d/guess-gender labels)))))) | |
e471cab12cb1bc3c12bc00d4cfe6b8227679ed377d4fbd452a55efd24a6cb88b | abarbu/haskell-torch | IsString.hs | # LANGUAGE TemplateHaskell #
module Data.String.InterpolateIO.IsString (c, fromStringIO) where
import Data.String.ShowIO(fromStringIO)
import Language.Haskell.TH.Quote (QuasiQuoter(..))
import qualified Data.String.InterpolateIO as I
-- |
-- Like `I.c`, but constructs a value of type
--
-- > IsString a => a
c :: QuasiQuoter
c = QuasiQuoter {
quoteExp = \s -> [|fromStringIO =<< $(quoteExp I.c $ s)|]
, quotePat = err "pattern"
, quoteType = err "type"
, quoteDec = err "declaration"
}
where
err name = error ("Data.String.Interpolate.IsString.c: This QuasiQuoter can not be used as a " ++ name ++ "!")
| null | https://raw.githubusercontent.com/abarbu/haskell-torch/03b2c10bf8ca3d4508d52c2123e753d93b3c4236/interpolateIO/src/Data/String/InterpolateIO/IsString.hs | haskell | |
Like `I.c`, but constructs a value of type
> IsString a => a | # LANGUAGE TemplateHaskell #
module Data.String.InterpolateIO.IsString (c, fromStringIO) where
import Data.String.ShowIO(fromStringIO)
import Language.Haskell.TH.Quote (QuasiQuoter(..))
import qualified Data.String.InterpolateIO as I
c :: QuasiQuoter
c = QuasiQuoter {
quoteExp = \s -> [|fromStringIO =<< $(quoteExp I.c $ s)|]
, quotePat = err "pattern"
, quoteType = err "type"
, quoteDec = err "declaration"
}
where
err name = error ("Data.String.Interpolate.IsString.c: This QuasiQuoter can not be used as a " ++ name ++ "!")
|
dce823dc6e5b595aaea54ea743e55869d2aacb878318abba11ea945205cb5386 | vlstill/hsExprTest | GenConvertible.hs | # LANGUAGE CPP #
module Test.QuickCheck.GenConvertible where
( c ) 2018
import Prelude ( Int, map, ($), (<$>), pure, foldl, foldr, zipWith, Maybe (..) )
import Language.Haskell.TH ( Exp (..), Type (..), Dec (..), Pat (..), Q
, mkName, newName, tupleTypeName
, Overlap (..), Clause (..), Body (..) )
import Control.Monad ( replicateM )
import Test.Expr.Internal.Compat
-- instance {-# OVERLAPS #-} (Convertible a a', Convertible b b') => Convertible (Fun a' b) (a -> b') where
-- convert' (Fun _ f) x = convert' (f (convert' x))
-- | For each n >= 0 build an
-- @instance OVERLAPS … => Convertible (Fun (a1', a2', …) b) (a1 -> a2 -> … -> b')@
convertibleN :: Int -> Q Dec
convertibleN n = do
let convertible = mkName "Convertible"
convert = mkName "convert'"
fun = mkName "Fun"
as <- map VarT <$> replicateM n (newName "a")
a's <- map VarT <$> replicateM n (newName "a'")
f <- newName "f"
b <- VarT <$> newName "b"
b' <- VarT <$> newName "b'"
xs <- replicateM n (newName "x")
let ntup = ConT $ tupleTypeName n
intuple'_t = foldl AppT ntup a's -- original tuple of input types
in_t = ConT fun `AppT` intuple'_t `AppT` b -- (Fun (a1', a2', …) b)
out_t = foldr (\inp ret -> (ArrowT `AppT` inp) `AppT` ret) b' as -- a1 -> (a2 -> (… -> b'))
-- (Convertible a1 a1', Convertible a2 a2', …, Convertible b b') =>
cxt = zipWith (\x y -> (ConT convertible `AppT` x) `AppT` y) (b:as) (b':a's)
-- Convertible (Fun (a1', a2', …) b) (a1 -> a2 -> … -> b')
head = (ConT convertible `AppT` in_t) `AppT` out_t
#if MIN_VERSION_template_haskell(2, 18, 0)
( )
#else
( )
#endif
-- (convert x1, convert x2, …)
argtuple = TupE $ map (\x -> wrapTupElemE (VarE convert `AppE` VarE x)) xs
-- convert (f (convert x1, convert x2, …))
body = VarE convert `AppE` (VarE f `AppE` argtuple)
clause = Clause (fun_p : map VarP xs) (NormalB body) []
pure $ InstanceD (Just Overlaps) cxt head [FunD convert [clause]]
| null | https://raw.githubusercontent.com/vlstill/hsExprTest/cdb522bf86f61e94ff7b9cb6045823d0df0f74f3/testlib/Test/QuickCheck/GenConvertible.hs | haskell | instance {-# OVERLAPS #-} (Convertible a a', Convertible b b') => Convertible (Fun a' b) (a -> b') where
convert' (Fun _ f) x = convert' (f (convert' x))
| For each n >= 0 build an
@instance OVERLAPS … => Convertible (Fun (a1', a2', …) b) (a1 -> a2 -> … -> b')@
original tuple of input types
(Fun (a1', a2', …) b)
a1 -> (a2 -> (… -> b'))
(Convertible a1 a1', Convertible a2 a2', …, Convertible b b') =>
Convertible (Fun (a1', a2', …) b) (a1 -> a2 -> … -> b')
(convert x1, convert x2, …)
convert (f (convert x1, convert x2, …)) | # LANGUAGE CPP #
module Test.QuickCheck.GenConvertible where
( c ) 2018
import Prelude ( Int, map, ($), (<$>), pure, foldl, foldr, zipWith, Maybe (..) )
import Language.Haskell.TH ( Exp (..), Type (..), Dec (..), Pat (..), Q
, mkName, newName, tupleTypeName
, Overlap (..), Clause (..), Body (..) )
import Control.Monad ( replicateM )
import Test.Expr.Internal.Compat
convertibleN :: Int -> Q Dec
convertibleN n = do
let convertible = mkName "Convertible"
convert = mkName "convert'"
fun = mkName "Fun"
as <- map VarT <$> replicateM n (newName "a")
a's <- map VarT <$> replicateM n (newName "a'")
f <- newName "f"
b <- VarT <$> newName "b"
b' <- VarT <$> newName "b'"
xs <- replicateM n (newName "x")
let ntup = ConT $ tupleTypeName n
cxt = zipWith (\x y -> (ConT convertible `AppT` x) `AppT` y) (b:as) (b':a's)
head = (ConT convertible `AppT` in_t) `AppT` out_t
#if MIN_VERSION_template_haskell(2, 18, 0)
( )
#else
( )
#endif
argtuple = TupE $ map (\x -> wrapTupElemE (VarE convert `AppE` VarE x)) xs
body = VarE convert `AppE` (VarE f `AppE` argtuple)
clause = Clause (fun_p : map VarP xs) (NormalB body) []
pure $ InstanceD (Just Overlaps) cxt head [FunD convert [clause]]
|
eb921ecff9249319912762ec4e4f19df19e1de5b221d3d9d01515a27c4b3a800 | moby/vpnkit | slirp_stack.ml | open Lwt.Infix
let src =
let src = Logs.Src.create "test" ~doc:"Test the slirp stack" in
Logs.Src.set_level src (Some Logs.Debug);
src
module Log = (val Logs.src_log src : Logs.LOG)
module Dns_policy = struct
let config_of_ips ips =
let open Dns_forward.Config in
let servers =
Server.Set.of_list (
List.map (fun (ip, _) ->
{ Server.address = { Address.ip; port = 53 };
zones = Domain.Set.empty;
timeout_ms = Some 2000; order = 0 }
) ips)
in
{ servers; search = []; assume_offline_after_drops = None }
module Config = Hostnet_dns.Config
let google_dns =
let ips = [
Ipaddr.of_string_exn "8.8.8.8", 53;
Ipaddr.of_string_exn "8.8.4.4", 53;
] in
`Upstream (config_of_ips ips)
type priority = int
module IntMap =
Map.Make(struct
type t = int let
compare (a: int) (b: int) = Stdlib.compare a b
end)
let t = ref (IntMap.add 0 google_dns IntMap.empty)
let clear () = t := (IntMap.add 0 google_dns IntMap.empty)
let config () =
snd @@ IntMap.max_binding !t
let add ~priority ~config:c =
let before = config () in
t := IntMap.add priority c (!t);
let after = config () in
if Config.compare before after <> 0
then Log.info (fun f ->
f "Add(%d): DNS configuration changed to: %s" priority
(Config.to_string after))
let remove ~priority =
let before = config () in
t := IntMap.remove priority !t;
let after = config () in
if Config.compare before after <> 0
then Log.info (fun f ->
f "Remove(%d): DNS configuration changed to: %s" priority
(Config.to_string after))
end
module VMNET = Vmnet.Make(Host.Sockets.Stream.Tcp)
module Vnet = Basic_backend.Make
module Slirp_stack =
Slirp.Make(VMNET)(Dns_policy)(Mclock)(Mirage_random_stdlib)(Vnet)
module Client = struct
module Netif = VMNET
module Ethif1 = Ethernet.Make(Netif)
module Arpv41 = Arp.Make(Ethif1)(Host.Time)
module Dhcp_client_mirage1 = Dhcp_client_mirage.Make(Mirage_random_stdlib)(Host.Time)(Netif)
module Ipv41 = Dhcp_ipv4.Make(Mirage_random_stdlib)(Mclock)(Host.Time)(Netif)(Ethif1)(Arpv41)
module Icmpv41 = struct
include Icmpv4.Make(Ipv41)
let packets = Queue.create ()
let input _ ~src ~dst buf =
match Icmpv4_packet.Unmarshal.of_cstruct buf with
| Error msg ->
Log.err (fun f -> f "Error unmarshalling ICMP message: %s" msg);
Lwt.return_unit
| Ok (reply, _) ->
let open Icmpv4_packet in
begin match reply.subheader with
| Next_hop_mtu _ | Pointer _ | Address _ | Unused ->
Log.err (fun f -> f "received an ICMP message which wasn't an echo-request or reply");
Lwt.return_unit
| Id_and_seq (id, _) ->
Log.info (fun f ->
f "ICMP src:%a dst:%a id:%d" Ipaddr.V4.pp src Ipaddr.V4.pp dst id);
Queue.push (src, dst, id) packets;
Lwt.return_unit
end
end
module Udp1 = Udp.Make(Ipv41)(Mirage_random_stdlib)
module Tcp1 = Tcp.Flow.Make(Ipv41)(Host.Time)(Mclock)(Mirage_random_stdlib)
include Tcpip_stack_direct.Make(Host.Time)
(Mirage_random_stdlib)(Netif)(Ethif1)(Arpv41)(Ipv41)(Icmpv41)(Udp1)(Tcp1)
let or_error name m =
m >>= function
| `Error _ -> Fmt.kstr failwith "Failed to connect %s device" name
| `Ok x -> Lwt.return x
type stack = {
t: t;
icmpv4: Icmpv41.t;
netif: VMNET.t;
}
let connect (interface: VMNET.t) =
Ethif1.connect interface >>= fun ethif ->
Arpv41.connect ethif >>= fun arp ->
Dhcp_client_mirage1.connect interface >>= fun _dhcp ->
Ipv41.connect interface ethif arp >>= fun ipv4 ->
Icmpv41.connect ipv4 >>= fun icmpv4 ->
Udp1.connect ipv4 >>= fun udp4 ->
Tcp1.connect ipv4 >>= fun tcp4 ->
connect interface ethif arp ipv4 icmpv4 udp4 tcp4
>>= fun t ->
Log.info (fun f -> f "Client has connected");
Lwt.return { t; icmpv4 ; netif=interface }
end
module DNS = Dns_resolver_mirage.Make(Host.Time)(Client)
let primary_dns_ip = Ipaddr.V4.of_string_exn "192.168.65.1"
let localhost_ip = Ipaddr.V4.of_string_exn "192.168.65.2"
let preferred_ip1 = Ipaddr.V4.of_string_exn "192.168.65.250"
let names_for_localhost = List.map Dns.Name.of_string [ "name1.for.localhost"; "name2.for.localhost" ]
let local_tcpv4_forwarded_port = 8888
let config =
let configuration = {
Configuration.default with
domain = Some "local";
host_names = names_for_localhost;
tcpv4_forwards = [ {
protocol = Tcp;
external_port = local_tcpv4_forwarded_port;
internal_ip = Ipaddr.V4.localhost;
internal_port = local_tcpv4_forwarded_port;
} ];
} in
let vnet = Vnet.create () in
Slirp_stack.create_static vnet configuration
This is a hacky way to get a hancle to the server side of the stack .
let slirp_stack = ref None
let slirp_stack_c = Lwt_condition.create ()
let rec get_slirp_stack () =
match !slirp_stack with
| None -> Lwt_condition.wait slirp_stack_c >>= get_slirp_stack
| Some x -> Lwt.return x
let set_slirp_stack c =
slirp_stack := Some c;
Lwt_condition.signal slirp_stack_c ()
let start_stack config () =
Host.Sockets.Stream.Tcp.bind (Ipaddr.V4 Ipaddr.V4.localhost, 0)
>>= fun server ->
Host.Sockets.Stream.Tcp.getsockname server
>|= fun (_, port) ->
Log.info (fun f -> f "Bound vpnkit server to localhost:%d" port);
Host.Sockets.Stream.Tcp.listen server (fun flow ->
Log.info (fun f -> f "Server connecting TCP/IP stack");
Slirp_stack.connect config flow >>= fun stack ->
Log.info (fun f -> f "Server connected TCP/IP stack");
set_slirp_stack stack;
Slirp_stack.after_disconnect stack >|= fun () ->
Log.info (fun f -> f "Server disconnected TCP/IP stack")
);
server, port
let stop_stack server =
Log.info (fun f -> f "Shutting down slirp stack");
Host.Sockets.Stream.Tcp.shutdown server
let pcap_dir = "./_pcap/"
let with_stack ?uuid ?preferred_ip ~pcap f =
config >>= fun config ->
start_stack config ()
>>= fun (server, port) ->
Log.info (fun f -> f "Connecting to vpnkit server on localhost:%d" port);
Host.Sockets.Stream.Tcp.connect (Ipaddr.V4 Ipaddr.V4.localhost, port)
>>= function
| Error (`Msg x) -> failwith x
| Ok flow ->
Log.info (fun f -> f "Connected to vpnkit server on localhost:%d" port);
let server_macaddr = Configuration.default_server_macaddr in
let uuid =
match uuid, Uuidm.of_string "d1d9cd61-d0dc-4715-9bb3-4c11da7ad7a5" with
| Some x, Some _ -> x
| None, Some x -> x
| _, None -> failwith "unable to parse test uuid"
in
VMNET.client_of_fd ~uuid ?preferred_ip:preferred_ip ~server_macaddr:server_macaddr flow
>>= function
| Error (`Msg x ) ->
Server will close when it gets EOF
Host.Sockets.Stream.Tcp.close flow >>= fun () ->
failwith x
| Ok client' ->
Log.info (fun f -> f "Client has established an ethernet link with the vpnkit server");
(try Unix.mkdir pcap_dir 0o0755 with Unix.Unix_error(Unix.EEXIST, _, _) -> ());
VMNET.start_capture client' (pcap_dir ^ pcap)
>>= fun () ->
Lwt.finalize (fun () ->
Log.info (fun f -> f "Client connecting TCP/IP stack");
Client.connect client' >>= fun client ->
Log.info (fun f -> f "Client connected TCP/IP stack");
get_slirp_stack () >>= fun slirp_stack ->
Log.info (fun f -> f "Calling test case with client and server stack handles");
f slirp_stack client
) (fun () ->
Server will close when it gets EOF
VMNET.disconnect client'
>>= fun () ->
stop_stack server
)
| null | https://raw.githubusercontent.com/moby/vpnkit/6039eac025e0740e530f2ff11f57d6d990d1c4a1/src/hostnet_test/slirp_stack.ml | ocaml | open Lwt.Infix
let src =
let src = Logs.Src.create "test" ~doc:"Test the slirp stack" in
Logs.Src.set_level src (Some Logs.Debug);
src
module Log = (val Logs.src_log src : Logs.LOG)
module Dns_policy = struct
let config_of_ips ips =
let open Dns_forward.Config in
let servers =
Server.Set.of_list (
List.map (fun (ip, _) ->
{ Server.address = { Address.ip; port = 53 };
zones = Domain.Set.empty;
timeout_ms = Some 2000; order = 0 }
) ips)
in
{ servers; search = []; assume_offline_after_drops = None }
module Config = Hostnet_dns.Config
let google_dns =
let ips = [
Ipaddr.of_string_exn "8.8.8.8", 53;
Ipaddr.of_string_exn "8.8.4.4", 53;
] in
`Upstream (config_of_ips ips)
type priority = int
module IntMap =
Map.Make(struct
type t = int let
compare (a: int) (b: int) = Stdlib.compare a b
end)
let t = ref (IntMap.add 0 google_dns IntMap.empty)
let clear () = t := (IntMap.add 0 google_dns IntMap.empty)
let config () =
snd @@ IntMap.max_binding !t
let add ~priority ~config:c =
let before = config () in
t := IntMap.add priority c (!t);
let after = config () in
if Config.compare before after <> 0
then Log.info (fun f ->
f "Add(%d): DNS configuration changed to: %s" priority
(Config.to_string after))
let remove ~priority =
let before = config () in
t := IntMap.remove priority !t;
let after = config () in
if Config.compare before after <> 0
then Log.info (fun f ->
f "Remove(%d): DNS configuration changed to: %s" priority
(Config.to_string after))
end
module VMNET = Vmnet.Make(Host.Sockets.Stream.Tcp)
module Vnet = Basic_backend.Make
module Slirp_stack =
Slirp.Make(VMNET)(Dns_policy)(Mclock)(Mirage_random_stdlib)(Vnet)
module Client = struct
module Netif = VMNET
module Ethif1 = Ethernet.Make(Netif)
module Arpv41 = Arp.Make(Ethif1)(Host.Time)
module Dhcp_client_mirage1 = Dhcp_client_mirage.Make(Mirage_random_stdlib)(Host.Time)(Netif)
module Ipv41 = Dhcp_ipv4.Make(Mirage_random_stdlib)(Mclock)(Host.Time)(Netif)(Ethif1)(Arpv41)
module Icmpv41 = struct
include Icmpv4.Make(Ipv41)
let packets = Queue.create ()
let input _ ~src ~dst buf =
match Icmpv4_packet.Unmarshal.of_cstruct buf with
| Error msg ->
Log.err (fun f -> f "Error unmarshalling ICMP message: %s" msg);
Lwt.return_unit
| Ok (reply, _) ->
let open Icmpv4_packet in
begin match reply.subheader with
| Next_hop_mtu _ | Pointer _ | Address _ | Unused ->
Log.err (fun f -> f "received an ICMP message which wasn't an echo-request or reply");
Lwt.return_unit
| Id_and_seq (id, _) ->
Log.info (fun f ->
f "ICMP src:%a dst:%a id:%d" Ipaddr.V4.pp src Ipaddr.V4.pp dst id);
Queue.push (src, dst, id) packets;
Lwt.return_unit
end
end
module Udp1 = Udp.Make(Ipv41)(Mirage_random_stdlib)
module Tcp1 = Tcp.Flow.Make(Ipv41)(Host.Time)(Mclock)(Mirage_random_stdlib)
include Tcpip_stack_direct.Make(Host.Time)
(Mirage_random_stdlib)(Netif)(Ethif1)(Arpv41)(Ipv41)(Icmpv41)(Udp1)(Tcp1)
let or_error name m =
m >>= function
| `Error _ -> Fmt.kstr failwith "Failed to connect %s device" name
| `Ok x -> Lwt.return x
type stack = {
t: t;
icmpv4: Icmpv41.t;
netif: VMNET.t;
}
let connect (interface: VMNET.t) =
Ethif1.connect interface >>= fun ethif ->
Arpv41.connect ethif >>= fun arp ->
Dhcp_client_mirage1.connect interface >>= fun _dhcp ->
Ipv41.connect interface ethif arp >>= fun ipv4 ->
Icmpv41.connect ipv4 >>= fun icmpv4 ->
Udp1.connect ipv4 >>= fun udp4 ->
Tcp1.connect ipv4 >>= fun tcp4 ->
connect interface ethif arp ipv4 icmpv4 udp4 tcp4
>>= fun t ->
Log.info (fun f -> f "Client has connected");
Lwt.return { t; icmpv4 ; netif=interface }
end
module DNS = Dns_resolver_mirage.Make(Host.Time)(Client)
let primary_dns_ip = Ipaddr.V4.of_string_exn "192.168.65.1"
let localhost_ip = Ipaddr.V4.of_string_exn "192.168.65.2"
let preferred_ip1 = Ipaddr.V4.of_string_exn "192.168.65.250"
let names_for_localhost = List.map Dns.Name.of_string [ "name1.for.localhost"; "name2.for.localhost" ]
let local_tcpv4_forwarded_port = 8888
let config =
let configuration = {
Configuration.default with
domain = Some "local";
host_names = names_for_localhost;
tcpv4_forwards = [ {
protocol = Tcp;
external_port = local_tcpv4_forwarded_port;
internal_ip = Ipaddr.V4.localhost;
internal_port = local_tcpv4_forwarded_port;
} ];
} in
let vnet = Vnet.create () in
Slirp_stack.create_static vnet configuration
This is a hacky way to get a hancle to the server side of the stack .
let slirp_stack = ref None
let slirp_stack_c = Lwt_condition.create ()
let rec get_slirp_stack () =
match !slirp_stack with
| None -> Lwt_condition.wait slirp_stack_c >>= get_slirp_stack
| Some x -> Lwt.return x
let set_slirp_stack c =
slirp_stack := Some c;
Lwt_condition.signal slirp_stack_c ()
let start_stack config () =
Host.Sockets.Stream.Tcp.bind (Ipaddr.V4 Ipaddr.V4.localhost, 0)
>>= fun server ->
Host.Sockets.Stream.Tcp.getsockname server
>|= fun (_, port) ->
Log.info (fun f -> f "Bound vpnkit server to localhost:%d" port);
Host.Sockets.Stream.Tcp.listen server (fun flow ->
Log.info (fun f -> f "Server connecting TCP/IP stack");
Slirp_stack.connect config flow >>= fun stack ->
Log.info (fun f -> f "Server connected TCP/IP stack");
set_slirp_stack stack;
Slirp_stack.after_disconnect stack >|= fun () ->
Log.info (fun f -> f "Server disconnected TCP/IP stack")
);
server, port
let stop_stack server =
Log.info (fun f -> f "Shutting down slirp stack");
Host.Sockets.Stream.Tcp.shutdown server
let pcap_dir = "./_pcap/"
let with_stack ?uuid ?preferred_ip ~pcap f =
config >>= fun config ->
start_stack config ()
>>= fun (server, port) ->
Log.info (fun f -> f "Connecting to vpnkit server on localhost:%d" port);
Host.Sockets.Stream.Tcp.connect (Ipaddr.V4 Ipaddr.V4.localhost, port)
>>= function
| Error (`Msg x) -> failwith x
| Ok flow ->
Log.info (fun f -> f "Connected to vpnkit server on localhost:%d" port);
let server_macaddr = Configuration.default_server_macaddr in
let uuid =
match uuid, Uuidm.of_string "d1d9cd61-d0dc-4715-9bb3-4c11da7ad7a5" with
| Some x, Some _ -> x
| None, Some x -> x
| _, None -> failwith "unable to parse test uuid"
in
VMNET.client_of_fd ~uuid ?preferred_ip:preferred_ip ~server_macaddr:server_macaddr flow
>>= function
| Error (`Msg x ) ->
Server will close when it gets EOF
Host.Sockets.Stream.Tcp.close flow >>= fun () ->
failwith x
| Ok client' ->
Log.info (fun f -> f "Client has established an ethernet link with the vpnkit server");
(try Unix.mkdir pcap_dir 0o0755 with Unix.Unix_error(Unix.EEXIST, _, _) -> ());
VMNET.start_capture client' (pcap_dir ^ pcap)
>>= fun () ->
Lwt.finalize (fun () ->
Log.info (fun f -> f "Client connecting TCP/IP stack");
Client.connect client' >>= fun client ->
Log.info (fun f -> f "Client connected TCP/IP stack");
get_slirp_stack () >>= fun slirp_stack ->
Log.info (fun f -> f "Calling test case with client and server stack handles");
f slirp_stack client
) (fun () ->
Server will close when it gets EOF
VMNET.disconnect client'
>>= fun () ->
stop_stack server
)
| |
40d49cdabe9d3ca30b41cb06c684e7651095f75c4464c53be9143449d8633b7d | skanev/playground | 82-tests.scm | (require rackunit rackunit/text-ui)
(load "../82.scm")
(define sicp-3.82-tests
(test-suite
"Tests for SICP exercise 3.82"
(check-= (estimate-pi 20000) 3.14 0.01)
))
(run-tests sicp-3.82-tests)
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/sicp/03/tests/82-tests.scm | scheme | (require rackunit rackunit/text-ui)
(load "../82.scm")
(define sicp-3.82-tests
(test-suite
"Tests for SICP exercise 3.82"
(check-= (estimate-pi 20000) 3.14 0.01)
))
(run-tests sicp-3.82-tests)
| |
db2f01335cf30dbba8263e3b4c927700415ba240fb708ee674356e62f97f6dc5 | ocaml/ocaml-lsp | lev_fiber.ml | open Stdune
open Fiber.O
open Lev_fiber_util
module Timestamp = Lev.Timestamp
module Signal_watcher = struct
type t = {
thread : Thread.t;
old_sigmask : int list;
old_sigpipe : Sys.signal_behavior option;
old_sigchld : Sys.signal_behavior;
sigchld_watcher : Lev.Async.t;
}
let stop_sig = Sys.sigusr2
let blocked_signals =
[ Sys.sigchld; stop_sig ] |> List.sort ~compare:Int.compare
let stop t =
Unix.kill (Unix.getpid ()) stop_sig;
Thread.join t.thread;
let used_mask =
Unix.sigprocmask SIG_SETMASK t.old_sigmask
|> List.sort ~compare:Int.compare
in
Option.iter t.old_sigpipe ~f:(Sys.set_signal Sys.sigpipe);
Sys.set_signal Sys.sigchld t.old_sigchld;
if used_mask <> blocked_signals then
Code_error.raise "cannot restore old sigmask"
[
("stop_sig", Dyn.int stop_sig);
("sigchld", Dyn.int stop_sig);
("used_mask", Dyn.(list int) used_mask);
("old_sigmask", Dyn.(list int) t.old_sigmask);
("blocked_signals", Dyn.(list int) blocked_signals);
]
let run (watcher, loop) =
while true do
let signal = Thread.wait_signal blocked_signals in
if signal = Sys.sigusr2 then raise_notrace Thread.Exit
else Lev.Async.send watcher loop
done
let create ~sigpipe ~sigchld_watcher ~loop =
let old_sigpipe =
match sigpipe with
| `Inherit -> None
| `Ignore -> Some (Sys.signal Sys.sigpipe Sys.Signal_ignore)
in
let old_sigchld =
Sys.signal Sys.sigchld (Sys.Signal_handle (fun (_ : int) -> ()))
in
let old_sigmask = Unix.sigprocmask SIG_BLOCK blocked_signals in
let thread = Thread.create run (sigchld_watcher, loop) in
{ thread; old_sigmask; old_sigchld; old_sigpipe; sigchld_watcher }
end
module Process_watcher = struct
module Process_table = struct
type process = { pid : Pid.t; ivar : Unix.process_status Fiber.Ivar.t }
type t = { loop : Lev.Loop.t; active : (Pid.t, process) Table.t }
let create loop = { loop; active = Table.create (module Pid) 16 }
let spawn t pid =
Lev.Loop.ref t.loop;
let ivar = Fiber.Ivar.create () in
let process = { pid; ivar } in
Table.add_exn t.active pid process;
ivar
let is_empty t = Table.length t.active = 0
let reap t queue =
Table.filteri_inplace t.active ~f:(fun ~key:pid ~data:process ->
let pid, status = Unix.waitpid [ WNOHANG ] (Pid.to_int pid) in
match pid with
| 0 -> true
| _ ->
Lev.Loop.unref t.loop;
Queue.push queue (Fiber.Fill (process.ivar, status));
false)
end
type watcher = Signal of Lev.Async.t | Poll of Lev.Timer.t
type t = { loop : Lev.Loop.t; table : Process_table.t; watcher : watcher }
let create loop queue =
let table = Process_table.create loop in
let watcher =
if Sys.win32 then
let reap timer =
Process_table.reap table queue;
if Process_table.is_empty table then Lev.Timer.stop timer loop
in
let watcher = Lev.Timer.create ~repeat:0.05 ~after:0.05 reap in
Poll watcher
else
let reap (_ : Lev.Async.t) = Process_table.reap table queue in
let watcher = Lev.Async.create reap in
Lev.Async.start watcher loop;
Lev.Loop.unref loop;
Signal watcher
in
{ table; watcher; loop }
let ensure_started t =
match t.watcher with
| Signal _ -> ()
| Poll s -> if not (Lev.Timer.is_active s) then Lev.Timer.start s t.loop
let waitpid t ~pid =
ensure_started t;
Process_table.spawn t.table pid
let cleanup t =
(* XXX shall we kill the running processes here? *)
match t.watcher with
| Poll s ->
Lev.Timer.stop s t.loop;
Lev.Timer.destroy s
| Signal s ->
Lev.Async.stop s t.loop;
Lev.Async.destroy s
end
type thread_job_status = Active | Complete | Cancelled
type thread_job = { status : thread_job_status ref; ivar : Fiber.fill }
type worker = Worker : 'a Worker.t -> worker
type t = {
loop : Lev.Loop.t;
queue : Fiber.fill Queue.t;
TODO stop when there are no threads
async : Lev.Async.t;
thread_jobs : thread_job Queue.t;
thread_mutex : Mutex.t;
process_watcher : Process_watcher.t;
signal_watcher : Signal_watcher.t option (* [None] on windows *);
mutable thread_workers : worker list;
}
type scheduler = t
let t : t Fiber.Var.t = Fiber.Var.create ()
let t_var = t
let scheduler = t
module Buffer = struct
include Bip_buffer
let default_size = 4096
type nonrec t = bytes t
let create ~size : t = create (Stdlib.Bytes.create size) ~len:size
end
module State = struct
type ('a, 'b) t' = Open of 'a | Closed of 'b
type ('a, 'b) t = ('a, 'b) t' ref
let create a = ref (Open a)
end
module Thread = struct
type job =
| Job : {
run : unit -> 'a;
status : thread_job_status ref;
ivar :
('a, [ `Exn of Exn_with_backtrace.t | `Cancelled ]) result
Fiber.Ivar.t;
}
-> job
type nonrec t = { worker : job Worker.t; scheduler : t }
let create =
let finish_job t fill =
Mutex.lock t.thread_mutex;
Queue.push t.thread_jobs fill;
Mutex.unlock t.thread_mutex;
Lev.Async.send t.async t.loop
in
fun () ->
let+ t = Fiber.Var.get_exn t in
let do_no_raise (Job { run; status; ivar }) =
let res =
match Exn_with_backtrace.try_with run with
| Ok x -> Ok x
| Error exn -> Error (`Exn exn)
in
finish_job t { status; ivar = Fiber.Fill (ivar, res) }
in
let worker =
Worker.create ~do_no_raise ~spawn_thread:(fun f -> Thread.create f ())
in
t.thread_workers <- Worker worker :: t.thread_workers;
{ worker; scheduler = t }
type 'a task = {
ivar :
('a, [ `Exn of Exn_with_backtrace.t | `Cancelled ]) result Fiber.Ivar.t;
task : Worker.task;
status : thread_job_status ref;
loop : Lev.Loop.t;
}
let task (t : t) ~f =
let ivar = Fiber.Ivar.create () in
let status = ref Active in
match Worker.add_work t.worker (Job { run = f; status; ivar }) with
| Error `Stopped -> Error `Stopped
| Ok task ->
Lev.Loop.ref t.scheduler.loop;
Ok { ivar; task; status; loop = t.scheduler.loop }
let await task = Fiber.Ivar.read task.ivar
let cancel task =
match !(task.status) with
| Cancelled | Complete -> Fiber.return ()
| Active ->
Lev.Loop.unref task.loop;
task.status := Cancelled;
Worker.cancel_if_not_consumed task.task;
Fiber.Ivar.fill task.ivar (Error `Cancelled)
let close t =
t.scheduler.thread_workers <-
(let id = Worker.id t.worker in
List.filter t.scheduler.thread_workers ~f:(fun (Worker w) ->
let id' = Worker.id w in
not (Worker.Id.equal id id')));
Worker.complete_tasks_and_stop t.worker
end
module Timer = struct
let sleepf after =
let* t = Fiber.Var.get_exn t in
let ivar = Fiber.Ivar.create () in
let timer =
Lev.Timer.create ~after (fun timer ->
Lev.Timer.stop timer t.loop;
Lev.Timer.destroy timer;
Queue.push t.queue (Fiber.Fill (ivar, ())))
in
Lev.Timer.start timer t.loop;
Fiber.Ivar.read ivar
module Wheel = struct
type running_state =
| Idle
| Sleeping of Lev.Timer.t * unit Fiber.Ivar.t
(* set whenever the wheel is waiting for a new task *)
| Waiting of { ivar : unit Fiber.Ivar.t; filled : bool }
type elt = {
ivar : [ `Ok | `Cancelled ] Fiber.Ivar.t;
scheduled : Lev.Timestamp.t;
mutable filled : bool;
wheel : t;
}
and running = {
queue : elt Removable_queue.t;
delay : float;
scheduler : scheduler;
mutable state : running_state;
}
and state = Stopped of { delay : float } | Running of running
and t = state ref
let delay t =
match !t with Stopped { delay } -> delay | Running { delay; _ } -> delay
let create ~delay =
let+ scheduler = Fiber.Var.get_exn t in
ref
(Running
{ queue = Removable_queue.create (); delay; scheduler; state = Idle })
type task = elt Removable_queue.node ref
type condition = { sleeping : bool; waiting : bool }
let wakeup_if t { sleeping; waiting } =
match t.state with
| Sleeping (timer, ivar) when sleeping ->
let* { loop; _ } = Fiber.Var.get_exn scheduler in
Lev.Timer.stop timer loop;
Lev.Timer.destroy timer;
t.state <- Idle;
Fiber.Ivar.fill ivar ()
| Waiting { ivar; filled = false } when waiting ->
t.state <- Idle;
Fiber.Ivar.fill ivar ()
| _ -> Fiber.return ()
let set_delay t ~delay =
match !t with
| Stopped _ -> Code_error.raise "Wheel.set_delay" []
| Running d ->
t := Running { d with delay };
wakeup_if d { sleeping = true; waiting = false }
let task (t : t) : task Fiber.t =
Fiber.of_thunk (fun () ->
match !t with
| Stopped _ -> Code_error.raise "Wheel.task" []
| Running wheel ->
let now = Lev.Loop.now wheel.scheduler.loop in
let data =
{
wheel = t;
ivar = Fiber.Ivar.create ();
scheduled = now;
filled = false;
}
in
let res = Removable_queue.push wheel.queue data in
let+ () = wakeup_if wheel { waiting = true; sleeping = false } in
ref res)
let reset (task : task) =
Fiber.of_thunk (fun () ->
let task' = Removable_queue.data !task in
match !(task'.wheel) with
| Stopped _ -> Code_error.raise "reset: wheel is stopped" []
| Running wheel ->
Removable_queue.remove !task;
let now = Lev.Loop.now wheel.scheduler.loop in
let filled = task'.filled in
let task' =
let task' = { task' with scheduled = now } in
if filled then (
task'.filled <- false;
{ task' with ivar = Fiber.Ivar.create () })
else task'
in
let new_task = Removable_queue.push wheel.queue task' in
task := new_task;
if filled then
wakeup_if wheel { sleeping = false; waiting = true }
else Fiber.return ())
let await (task : task) =
Fiber.of_thunk (fun () ->
let task = Removable_queue.data !task in
Fiber.Ivar.read task.ivar)
let cancel (node : task) =
Fiber.of_thunk (fun () ->
let task = Removable_queue.data !node in
if task.filled then Fiber.return ()
else (
task.filled <- true;
Removable_queue.remove !node;
Fiber.Ivar.fill task.ivar `Cancelled))
let rec run t =
TODO do not allow double [ run ]
match !t with
| Stopped _ -> Fiber.return ()
| Running r -> (
match Removable_queue.peek r.queue with
| None ->
let ivar = Fiber.Ivar.create () in
r.state <- Waiting { ivar; filled = false };
let* () = Fiber.Ivar.read ivar in
run t
| Some node ->
let task = Removable_queue.data node in
let after =
let now = Timestamp.to_float (Lev.Loop.now r.scheduler.loop) in
let scheduled = Timestamp.to_float task.scheduled in
scheduled -. now +. r.delay
in
let expired = after < 0. in
let* () =
if expired then (
Removable_queue.remove node;
if not task.filled then (
task.filled <- true;
Queue.push r.scheduler.queue (Fiber.Fill (task.ivar, `Ok)));
Fiber.return ())
else
let scheduler = r.scheduler in
let ivar = Fiber.Ivar.create () in
let timer =
Lev.Timer.create ~after (fun timer ->
TODO reuse timer
Lev.Timer.destroy timer;
Queue.push scheduler.queue (Fiber.Fill (ivar, ())))
in
r.state <- Sleeping (timer, ivar);
Lev.Timer.start timer scheduler.loop;
Fiber.Ivar.read ivar
in
run t)
let run t = Fiber.of_thunk (fun () -> run t)
let stop =
let rec cancel_all r =
match Removable_queue.pop r.queue with
| None -> Fiber.return ()
| Some task ->
let* () =
if task.filled then Fiber.return ()
else (
task.filled <- true;
Fiber.Ivar.fill task.ivar `Cancelled)
in
cancel_all r
in
fun t ->
Fiber.of_thunk (fun () ->
match !t with
| Stopped _ -> Fiber.return ()
| Running r ->
t := Stopped { delay = r.delay };
let* () = cancel_all r in
wakeup_if r { sleeping = true; waiting = true })
end
end
let waitpid ~pid =
let pid = Pid.of_int pid in
let* t = Fiber.Var.get_exn t in
let ivar = Process_watcher.waitpid t.process_watcher ~pid in
Fiber.Ivar.read ivar
let signal ~signal =
let* { loop; queue; _ } = Fiber.Var.get_exn t in
let ivar = Fiber.Ivar.create () in
let signal =
Lev.Signal.create ~signal (fun t ->
Queue.push queue (Fiber.Fill (ivar, ()));
Lev.Signal.stop t loop;
Lev.Signal.destroy t)
in
Lev.Signal.start signal loop;
Fiber.Ivar.read ivar
module Fd = struct
type kind = Blocking | Non_blocking of { mutable set : bool }
type t = { fd : Unix.file_descr; kind : kind; mutable closed : bool }
let fd_exn t =
if t.closed then raise (Unix.Unix_error (Unix.EBADF, "closed fd", ""));
t.fd
let close t =
if not t.closed then (
t.closed <- true;
Unix.close t.fd)
let create' fd kind = { kind; fd; closed = false }
let create fd kind =
let kind =
match kind with
| `Blocking -> Blocking
| `Non_blocking set -> Non_blocking { set }
in
create' fd kind
let set_nonblock t =
assert (not t.closed);
match t.kind with
| Blocking -> ()
| Non_blocking nb ->
if not nb.set then (
Unix.set_nonblock t.fd;
nb.set <- true)
let pipe =
if Sys.win32 then fun ?cloexec () ->
let r, w = Unix.pipe ?cloexec () in
( { fd = r; kind = Blocking; closed = false },
{ fd = w; kind = Blocking; closed = false } )
else fun ?cloexec () ->
let r, w = Unix.pipe ?cloexec () in
Unix.set_nonblock r;
Unix.set_nonblock w;
( { fd = r; kind = Non_blocking { set = true }; closed = false },
{ fd = w; kind = Non_blocking { set = true }; closed = false } )
end
module Lev_fd = struct
module Event = Lev.Io.Event
type open_ = {
io : Lev.Io.t;
fd : Fd.t;
scheduler : scheduler;
mutable events : Event.Set.t;
read : [ `Ready | `Closed ] Fiber.Ivar.t Queue.t;
write : [ `Ready | `Closed ] Fiber.Ivar.t Queue.t;
}
type state = Open of open_ | Closed of Fd.t
type t = state ref
let reset nb new_set =
nb.events <- new_set;
Lev.Io.stop nb.io nb.scheduler.loop;
Lev.Io.modify nb.io nb.events;
Lev.Io.start nb.io nb.scheduler.loop
let await t (what : Lev.Io.Event.t) =
let* () = Fiber.return () in
match !t with
| Closed _ -> Fiber.return `Closed
| Open t ->
if t.fd.closed then Fiber.return `Closed
else (
if not (Event.Set.mem t.events what) then
reset t (Event.Set.add t.events what);
let ivar = Fiber.Ivar.create () in
let q = match what with Write -> t.write | Read -> t.read in
Queue.push q ivar;
let+ res = Fiber.Ivar.read ivar in
match res with
| `Closed -> `Closed
| `Ready ->
assert (not t.fd.closed);
`Ready t.fd)
let rec close_queue ivar_queue q =
match Queue.pop q with
| None -> ()
| Some ivar ->
Queue.push ivar_queue (Fiber.Fill (ivar, `Closed));
close_queue ivar_queue q
let close (t : t) =
match !t with
| Closed fd -> fd
| Open { io; scheduler; fd; read; write; events = _ } ->
t := Closed fd;
Lev.Io.stop io scheduler.loop;
Lev.Io.destroy io;
Fd.close fd;
close_queue scheduler.queue read;
close_queue scheduler.queue write;
fd
let make_cb t scheduler _ _ set =
match !(Fdecl.get t) with
| Closed _ -> ()
| Open nb ->
let keep_read = ref true in
let keep_write = ref true in
(if Lev.Io.Event.Set.mem set Read then
match Queue.pop nb.read with
| Some ivar -> Queue.push scheduler.queue (Fiber.Fill (ivar, `Ready))
| None -> keep_read := false);
(if Lev.Io.Event.Set.mem set Write then
match Queue.pop nb.write with
| Some ivar -> Queue.push scheduler.queue (Fiber.Fill (ivar, `Ready))
| None -> keep_write := false);
let new_set =
Event.Set.inter nb.events
(Event.Set.create ~read:!keep_read ~write:!keep_write ())
in
if not (Event.Set.equal new_set nb.events) then reset nb new_set
let create (fd : Fd.t) : t Fiber.t =
if fd.closed then Code_error.raise "create: fd is closed" [];
let+ scheduler = Fiber.Var.get_exn scheduler in
let t : t Fdecl.t = Fdecl.create Dyn.opaque in
let events = Event.Set.create () in
let io = Lev.Io.create (make_cb t scheduler) fd.fd events in
let read = Queue.create () in
let write = Queue.create () in
Fdecl.set t (ref (Open { events; fd; scheduler; io; read; write }));
Lev.Io.start io scheduler.loop;
Fdecl.get t
end
module Io = struct
let callstack =
match Sys.getenv_opt "LEV_DEBUG" with
| None -> fun () -> None
| Some _ -> fun () -> Some (Printexc.get_callstack 15)
type input = Input
type output = Output
type 'a mode = Input : input mode | Output : output mode
module Slice = Buffer.Slice
type _ kind =
| Write : { mutable flush_counter : int } -> output kind
| Read : { mutable eof : bool } -> input kind
type fd = Blocking of Thread.t * Fd.t | Non_blocking of Lev_fd.t
let with_ fd (kind : Lev.Io.Event.t) ~f =
let* () = Fiber.return () in
match fd with
| Non_blocking lev_fd -> (
let+ event = Lev_fd.await lev_fd kind in
match event with
| `Closed -> Error `Eof
| `Ready fd -> (
match f fd with exception exn -> Error (`Exn exn) | s -> Ok s))
| Blocking (th, fd) -> (
let task =
match Thread.task th ~f:(fun () -> f fd) with
| Error `Stopped -> Code_error.raise "already stopped" []
| Ok task -> task
in
let+ res = Thread.await task in
match res with
| Ok _ as s -> s
| Error `Cancelled -> assert false
| Error (`Exn exn) -> Error (`Exn exn.exn))
type activity = Idle | Busy of Printexc.raw_backtrace option
type 'a open_ = {
mutable buffer : Buffer.t;
kind : 'a kind;
fd : fd;
mutable activity : activity;
source : Printexc.raw_backtrace option;
}
type 'a t = ('a open_, Fd.t * Printexc.raw_backtrace option) State.t
let fd (t : _ t) =
match !t with
| Closed (fd, _) -> fd
| Open { fd = Blocking (_, fd); _ } -> fd
| Open { fd = Non_blocking fd; _ } -> (
match !fd with Closed fd -> fd | Open f -> f.fd)
let rec with_resize_buffer t ~len reserve_fail k =
match Buffer.reserve t.buffer ~len with
| Some dst_pos -> k t ~len ~dst_pos
| None -> (
match reserve_fail with
| `Compress ->
if Buffer.unused_space t.buffer >= len then
Buffer.Bytes.compress t.buffer;
with_resize_buffer t ~len `Resize k
| `Resize ->
let len = Buffer.length t.buffer + len in
Buffer.Bytes.resize t.buffer ~len;
with_resize_buffer t ~len `Fail k
| `Fail -> assert false)
module Writer = struct
type nonrec t = output open_
module Expert = struct
let available t = Buffer.max_available t.buffer
let prepare =
let k t ~len ~dst_pos:pos =
let buf = Buffer.buffer t.buffer in
(buf, { Slice.pos; len })
in
fun t ~len -> with_resize_buffer t ~len `Compress k
let commit t ~len = Buffer.commit t.buffer ~len
end
let flush =
let rec loop t stop_count =
TODO fix overflow issues
if
(match t.kind with Write { flush_counter } -> flush_counter)
>= stop_count
then Fiber.return ()
else
let* res =
with_ t.fd Write ~f:(fun fd ->
match Buffer.peek t.buffer with
| None -> ()
| Some { Slice.pos; len } -> (
let buffer = Buffer.buffer t.buffer in
let len = Unix.single_write fd.fd buffer pos len in
Buffer.junk t.buffer ~len;
match t.kind with
| Write t -> t.flush_counter <- t.flush_counter + len))
in
match res with
| Ok () -> loop t stop_count
| Error (`Exn (Unix.Unix_error (Unix.EAGAIN, _, _))) ->
loop t stop_count
| Error (`Exn (Unix.Unix_error (EPIPE, _, _))) | Error `Eof ->
let args =
[
("remaining", Dyn.int stop_count);
( "contents",
Dyn.string (Format.asprintf "%a@." Buffer.Bytes.pp t.buffer)
);
]
in
let args =
match t.source with
| None -> args
| Some source ->
( "source",
Dyn.string @@ Printexc.raw_backtrace_to_string source )
:: args
in
Code_error.raise "fd closed unflushed" args
| Error (`Exn exn) -> reraise exn
in
fun t ->
Fiber.of_thunk (fun () ->
let stop_count =
match t.kind with
| Write { flush_counter } ->
flush_counter + Buffer.length t.buffer
in
loop t stop_count)
let add_substring t str ~pos ~len =
Buffer.Bytes.Writer.add_substring t.buffer str ~pos ~len
let add_string t str = Buffer.Bytes.Writer.add_string t.buffer str
end
let create_gen (type a) ~source fd (mode : a mode) =
let buffer = Buffer.create ~size:Buffer.default_size in
let kind : a kind =
match mode with
| Input -> Read { eof = false }
| Output -> Write { flush_counter = 0 }
in
State.create { buffer; fd; kind; activity = Idle; source }
let create (type a) (fd : Fd.t) (mode : a mode) =
let source = callstack () in
match fd.kind with
| Non_blocking _ ->
let+ fd = Lev_fd.create fd in
create_gen ~source (Non_blocking fd) mode
| Blocking ->
let+ thread = Thread.create () in
create_gen ~source (Blocking (thread, fd)) mode
let create_rw (fd : Fd.t) : (input t * output t) Fiber.t =
let source = callstack () in
match fd.kind with
| Non_blocking _ ->
let+ fd =
let+ fd = Lev_fd.create fd in
Non_blocking fd
in
let r = create_gen ~source fd Input in
let w = create_gen ~source fd Output in
(r, w)
| Blocking ->
let* r =
let+ thread = Thread.create () in
create_gen ~source (Blocking (thread, fd)) Input
in
let+ w =
let+ thread = Thread.create () in
create_gen ~source (Blocking (thread, fd)) Output
in
(r, w)
let close =
let close_fd t =
match t with
| Non_blocking fd -> Lev_fd.close fd
| Blocking (th, fd) ->
Thread.close th;
Fd.close fd;
fd
in
fun (type a) (t : a t) ->
match !t with
| State.Closed _ -> ()
| Open o ->
(match (o.kind : _ kind) with
| Read r -> r.eof <- true
TODO
let fd = close_fd o.fd in
t := Closed (fd, o.source)
module Reader = struct
type t = input open_
exception Unavailable
module Expert = struct
let buffer t =
match Buffer.peek t.buffer with
| None -> raise Unavailable
| Some { Buffer.Slice.pos; len } ->
let b = Buffer.buffer t.buffer in
(b, { Slice.pos; len })
let consume (t : t) ~len = Buffer.junk t.buffer ~len
let available t =
let eof = match t.kind with Read { eof } -> eof in
let available = Buffer.length t.buffer in
if available = 0 && eof then `Eof else `Ok available
let refill =
let rec read t ~len ~dst_pos =
let buffer = Buffer.buffer t.buffer in
let* res =
with_ t.fd Read ~f:(fun fd -> Unix.read fd.fd buffer 0 len)
in
match res with
| Error (`Exn (Unix.Unix_error (Unix.EAGAIN, _, _))) ->
read t ~len ~dst_pos
| Error `Eof | Ok 0 ->
(match t.kind with Read b -> b.eof <- true);
Buffer.commit t.buffer ~len:0;
Fiber.return ()
| Ok len ->
Buffer.commit t.buffer ~len;
Fiber.return ()
| Error (`Exn exn) -> reraise exn
in
fun ?(size = Buffer.default_size) (t : t) ->
with_resize_buffer t ~len:size `Compress read
end
exception Found of int
let read_char_exn t =
let b, { Buffer.Slice.pos; len } = Expert.buffer t in
assert (len > 0);
let res = Bytes.get b pos in
Expert.consume t ~len:1;
res
let read_line =
let contents buf =
let module Buffer = Stdlib.Buffer in
let len = Buffer.length buf in
if len = 0 then ""
else if Buffer.nth buf (len - 1) = '\r' then Buffer.sub buf 0 (len - 1)
else Buffer.contents buf
in
let find_nl b pos len =
try
for i = pos to pos + len - 1 do
if Bytes.get b i = '\n' then raise_notrace (Found i)
done;
None
with Found i -> Some i
in
let rec loop t buf =
match Expert.available t with
| `Eof ->
Fiber.return (Error (`Partial_eof (Stdlib.Buffer.contents buf)))
| `Ok 0 ->
let* () = Expert.refill t in
loop t buf
| `Ok _ -> (
let b, { Slice.pos; len } = Expert.buffer t in
match find_nl b pos len with
| Some i ->
let len = i - pos in
Stdlib.Buffer.add_subbytes buf b pos len;
Buffer.junk t.buffer ~len:(len + 1);
Fiber.return (Ok (contents buf))
| None ->
Stdlib.Buffer.add_subbytes buf b pos len;
Buffer.junk t.buffer ~len;
loop t buf)
in
let rec self t =
(* we can always call loop, but we do a little optimization to see if we can
read the line without an extra copy
*)
match Expert.available t with
| `Eof -> Fiber.return (Error (`Partial_eof ""))
| `Ok 0 ->
let* () = Expert.refill t in
self t
| `Ok _ -> (
let b, { Slice.pos; len } = Expert.buffer t in
match find_nl b pos len with
| Some i ->
let len = i - pos in
let res =
let len =
if len > 0 && Bytes.get b (i - 1) = '\r' then len - 1
else len
in
Bytes.sub b ~pos ~len
in
Buffer.junk t.buffer ~len:(len + 1);
Fiber.return (Ok (Bytes.unsafe_to_string res))
| None ->
let buf = Stdlib.Buffer.create len in
Stdlib.Buffer.add_subbytes buf b pos len;
Buffer.junk t.buffer ~len;
loop t buf)
in
fun t -> Fiber.of_thunk (fun () -> self t)
let read_exactly =
let rec loop_buffer t buf remains =
if remains = 0 then Fiber.return (Ok (Stdlib.Buffer.contents buf))
else
match Expert.available t with
| `Eof ->
Fiber.return (Error (`Partial_eof (Stdlib.Buffer.contents buf)))
| `Ok 0 ->
let* () = Expert.refill t in
loop_buffer t buf remains
| `Ok _ ->
let b, { Slice.pos; len } = Expert.buffer t in
let len = min remains len in
Stdlib.Buffer.add_subbytes buf b pos len;
Buffer.junk t.buffer ~len;
loop_buffer t buf (remains - len)
in
let rec self t len =
(* we can always call loop, but we do a little optimization to see if we can
read the line without an extra copy
*)
match Expert.available t with
| `Eof -> Fiber.return (Error (`Partial_eof ""))
| `Ok 0 ->
let* () = Expert.refill t in
self t len
| `Ok _ ->
let b, { Slice.pos; len = avail } = Expert.buffer t in
if len <= avail then (
let res = Bytes.sub b ~pos ~len in
Buffer.junk t.buffer ~len;
Fiber.return (Ok (Bytes.unsafe_to_string res)))
else
let buf = Stdlib.Buffer.create len in
Stdlib.Buffer.add_subbytes buf b pos avail;
Buffer.junk t.buffer ~len:avail;
loop_buffer t buf (len - avail)
in
fun t len -> Fiber.of_thunk (fun () -> self t len)
let to_string =
let rec loop t buf =
match Expert.available t with
| `Eof -> Fiber.return (Stdlib.Buffer.contents buf)
| `Ok 0 ->
let* () = Expert.refill t in
loop t buf
| `Ok _ ->
let b, { Slice.pos; len } = Expert.buffer t in
Stdlib.Buffer.add_subbytes buf b pos len;
Expert.consume t ~len;
loop t buf
in
fun t -> Fiber.of_thunk (fun () -> loop t (Stdlib.Buffer.create 512))
end
let with_ (type a) (t : a t) ~f =
let activity_source = callstack () in
let* () = Fiber.return () in
let t =
match !(t : _ State.t) with
| Open t -> t
| Closed (_, source) ->
let args =
match source with
| None -> []
| Some source ->
[
( "source",
Dyn.string (Printexc.raw_backtrace_to_string source) );
]
in
Code_error.raise "Lev_fiber.Io: already closed" args
in
(match t.activity with
| Idle -> t.activity <- Busy activity_source
| Busy activity_source ->
let args =
let args =
[
( "kind",
Dyn.string
(match t.kind with Read _ -> "read" | Write _ -> "write") );
]
in
let args =
match t.source with
| None -> args
| Some source ->
("source", Dyn.string (Printexc.raw_backtrace_to_string source))
:: args
in
match activity_source with
| None -> args
| Some activity_source ->
( "activity_source",
Dyn.string (Printexc.raw_backtrace_to_string activity_source) )
:: args
in
Code_error.raise "Io.t is already busy" args);
Fiber.finalize
(fun () -> f t)
~finally:(fun () ->
t.activity <- Idle;
Fiber.return ())
let with_read (t : input t) ~f = with_ t ~f
let with_write (t : output t) ~f = with_ t ~f
let pipe ?cloexec () : (input t * output t) Fiber.t =
Fiber.of_thunk @@ fun () ->
let r, w = Fd.pipe ?cloexec () in
let* input = create r Input in
let+ output = create w Output in
(input, output)
module Lazy_fiber : sig
type 'a t
val create : (unit -> 'a Fiber.t) -> 'a t
val force : 'a t -> 'a Fiber.t
end = struct
type 'a t = {
value : 'a Fiber.Ivar.t;
mutable f : (unit -> 'a Fiber.t) option;
}
let create f = { f = Some f; value = Fiber.Ivar.create () }
let force t =
let open Fiber.O in
match t.f with
| None -> Fiber.Ivar.read t.value
| Some f ->
Fiber.of_thunk (fun () ->
t.f <- None;
let* v = f () in
let+ () = Fiber.Ivar.fill t.value v in
v)
end
let make_std_fd fd kind =
Lazy_fiber.create (fun () ->
let blockity =
if Sys.win32 then `Blocking
else (
Unix.set_nonblock fd;
`Non_blocking true)
in
create (Fd.create fd blockity) kind)
let stdin = Lazy_fiber.force (make_std_fd Unix.stdin Input)
let stderr = Lazy_fiber.force (make_std_fd Unix.stderr Output)
let stdout = Lazy_fiber.force (make_std_fd Unix.stdout Output)
end
module Socket = struct
let writeable_fd scheduler fd =
let ivar = Fiber.Ivar.create () in
let io =
Lev.Io.create
(fun io _ _ ->
Queue.push scheduler.queue (Fiber.Fill (ivar, ()));
Lev.Io.stop io scheduler.loop;
Lev.Io.destroy io)
fd
(Lev.Io.Event.Set.create ~write:true ())
in
Lev.Io.start io scheduler.loop;
Fiber.Ivar.read ivar
let rec connect (fd : Fd.t) sock =
let* scheduler = Fiber.Var.get_exn scheduler in
Fd.set_nonblock fd;
match Unix.connect fd.fd sock with
| () -> Fiber.return ()
| exception Unix.Unix_error (Unix.EISCONN, _, _) when Sys.win32 ->
Fiber.return ()
| exception Unix.Unix_error (Unix.EWOULDBLOCK, _, _) when Sys.win32 ->
let* () = writeable_fd scheduler fd.fd in
connect fd sock
| exception Unix.Unix_error (Unix.EINPROGRESS, _, _) -> (
let+ () = writeable_fd scheduler fd.fd in
match Unix.getsockopt_error fd.fd with
| None -> ()
| Some err -> raise (Unix.Unix_error (err, "connect", "")))
module Server = struct
type t = {
fd : Fd.t;
pool : Fiber.Pool.t;
io : Lev.Io.t;
mutable close : bool;
mutable await : unit Fiber.Ivar.t;
}
let create (fd : Fd.t) sockaddr ~backlog =
let+ scheduler = Fiber.Var.get_exn scheduler in
let pool = Fiber.Pool.create () in
Fd.set_nonblock fd;
Unix.bind fd.fd sockaddr;
Unix.listen fd.fd backlog;
let t = Fdecl.create Dyn.opaque in
let io =
Lev.Io.create
(fun _ _ _ ->
let t = Fdecl.get t in
Queue.push scheduler.queue (Fiber.Fill (t.await, ())))
fd.fd
(Lev.Io.Event.Set.create ~read:true ())
in
Fdecl.set t { pool; await = Fiber.Ivar.create (); close = false; fd; io };
Fdecl.get t
let close t =
Fiber.of_thunk (fun () ->
if t.close then Fiber.return ()
else
let* scheduler = Fiber.Var.get_exn scheduler in
Fd.close t.fd;
Lev.Io.stop t.io scheduler.loop;
Lev.Io.destroy t.io;
t.close <- true;
let* () = Fiber.Pool.stop t.pool in
Fiber.Ivar.fill t.await ())
module Session = struct
type t = { fd : Fd.t; sockaddr : Unix.sockaddr }
let fd t = t.fd
let sockaddr t = t.sockaddr
let io t =
Fd.set_nonblock t.fd;
Io.create_rw t.fd
end
let serve =
let rec loop t f =
let* () = Fiber.Ivar.read t.await in
match t.close with
| true -> Fiber.return ()
| false ->
t.await <- Fiber.Ivar.create ();
let session =
let fd, sockaddr = Unix.accept ~cloexec:true t.fd.fd in
let fd = Fd.create' fd (Non_blocking { set = false }) in
{ Session.fd; sockaddr }
in
let* () = Fiber.Pool.task t.pool ~f:(fun () -> f session) in
loop t f
in
fun (t : t) ~f ->
let* scheduler = Fiber.Var.get_exn scheduler in
Lev.Io.start t.io scheduler.loop;
Fiber.fork_and_join_unit
(fun () -> Fiber.Pool.run t.pool)
(fun () -> loop t f)
end
end
let yield () =
let* scheduler = Fiber.Var.get_exn scheduler in
let ivar = Fiber.Ivar.create () in
Queue.push scheduler.queue (Fiber.Fill (ivar, ()));
Fiber.Ivar.read ivar
module Error = struct
type t = Aborted of Exn_with_backtrace.t | Already_reported | Deadlock
let ok_exn = function
| Ok s -> s
| Error (Aborted exn) -> Exn_with_backtrace.reraise exn
| Error Already_reported -> Code_error.raise "Already_reported" []
| Error Deadlock -> Code_error.raise "Deadlock" []
end
exception Deadlock
let run (type a) ?(sigpipe = `Inherit)
?(flags = Lev.Loop.Flag.Set.singleton Nosigmask) (f : unit -> a Fiber.t) :
(a, Error.t) result =
if not (Lev.Loop.Flag.Set.mem flags Nosigmask) then
Code_error.raise "flags must include Nosigmask" [];
let lev_loop = Lev.Loop.create ~flags () in
let thread_jobs = Queue.create () in
let thread_mutex = Mutex.create () in
let queue = Queue.create () in
let async =
Lev.Async.create (fun _ ->
Mutex.lock thread_mutex;
while not (Queue.is_empty thread_jobs) do
let { ivar; status } = Queue.pop_exn thread_jobs in
match !status with
| Active ->
Lev.Loop.unref lev_loop;
status := Complete;
Queue.push queue ivar
| Cancelled -> ()
| Complete -> assert false
done;
Mutex.unlock thread_mutex)
in
Lev.Async.start async lev_loop;
Lev.Loop.unref lev_loop;
let process_watcher = Process_watcher.create lev_loop queue in
let signal_watcher =
if Sys.win32 then None
else
let sigchld_watcher =
match process_watcher.watcher with
| Signal s -> s
| Poll _ -> assert false
in
Some (Signal_watcher.create ~sigpipe ~sigchld_watcher ~loop:lev_loop)
in
let t =
{
loop = lev_loop;
signal_watcher;
queue;
async;
thread_mutex;
thread_jobs;
process_watcher;
thread_workers = [];
}
in
let rec events q acc =
match Queue.pop q with None -> acc | Some e -> events q (e :: acc)
in
let rec iter_or_deadlock q =
match Nonempty_list.of_list (events q []) with
| Some e -> e
| None -> raise_notrace Deadlock
and iter loop q =
match Nonempty_list.of_list (events q []) with
| Some e -> e
| None -> (
let res = Lev.Loop.run loop Once in
match res with
| `No_more_active_watchers -> iter_or_deadlock q
| `Otherwise -> iter loop q)
in
let f =
let on_error exn =
Format.eprintf "%a@." Exn_with_backtrace.pp_uncaught exn;
Fiber.return ()
in
let f () = Fiber.Var.set t_var t f in
Fiber.map_reduce_errors (module Monoid.Unit) ~on_error f
in
let res : (a, Error.t) result =
match Fiber.run f ~iter:(fun () -> iter lev_loop queue) with
| Error () -> Error Already_reported
| Ok s -> Ok s
| exception Deadlock -> Error Deadlock
| exception exn ->
let exn = Exn_with_backtrace.capture exn in
Error (Aborted exn)
in
let () =
Process_watcher.cleanup process_watcher;
Lev.Async.stop async lev_loop;
Option.iter signal_watcher ~f:Signal_watcher.stop;
List.iter t.thread_workers ~f:(fun (Worker w) ->
Worker.complete_tasks_and_stop w;
Worker.join w);
Lev.Async.destroy async
in
res
| null | https://raw.githubusercontent.com/ocaml/ocaml-lsp/226ae3e089ec95e8333bc19c9d113a537443412a/submodules/lev/lev-fiber/src/lev_fiber.ml | ocaml | XXX shall we kill the running processes here?
[None] on windows
set whenever the wheel is waiting for a new task
we can always call loop, but we do a little optimization to see if we can
read the line without an extra copy
we can always call loop, but we do a little optimization to see if we can
read the line without an extra copy
| open Stdune
open Fiber.O
open Lev_fiber_util
module Timestamp = Lev.Timestamp
module Signal_watcher = struct
type t = {
thread : Thread.t;
old_sigmask : int list;
old_sigpipe : Sys.signal_behavior option;
old_sigchld : Sys.signal_behavior;
sigchld_watcher : Lev.Async.t;
}
let stop_sig = Sys.sigusr2
let blocked_signals =
[ Sys.sigchld; stop_sig ] |> List.sort ~compare:Int.compare
let stop t =
Unix.kill (Unix.getpid ()) stop_sig;
Thread.join t.thread;
let used_mask =
Unix.sigprocmask SIG_SETMASK t.old_sigmask
|> List.sort ~compare:Int.compare
in
Option.iter t.old_sigpipe ~f:(Sys.set_signal Sys.sigpipe);
Sys.set_signal Sys.sigchld t.old_sigchld;
if used_mask <> blocked_signals then
Code_error.raise "cannot restore old sigmask"
[
("stop_sig", Dyn.int stop_sig);
("sigchld", Dyn.int stop_sig);
("used_mask", Dyn.(list int) used_mask);
("old_sigmask", Dyn.(list int) t.old_sigmask);
("blocked_signals", Dyn.(list int) blocked_signals);
]
let run (watcher, loop) =
while true do
let signal = Thread.wait_signal blocked_signals in
if signal = Sys.sigusr2 then raise_notrace Thread.Exit
else Lev.Async.send watcher loop
done
let create ~sigpipe ~sigchld_watcher ~loop =
let old_sigpipe =
match sigpipe with
| `Inherit -> None
| `Ignore -> Some (Sys.signal Sys.sigpipe Sys.Signal_ignore)
in
let old_sigchld =
Sys.signal Sys.sigchld (Sys.Signal_handle (fun (_ : int) -> ()))
in
let old_sigmask = Unix.sigprocmask SIG_BLOCK blocked_signals in
let thread = Thread.create run (sigchld_watcher, loop) in
{ thread; old_sigmask; old_sigchld; old_sigpipe; sigchld_watcher }
end
module Process_watcher = struct
module Process_table = struct
type process = { pid : Pid.t; ivar : Unix.process_status Fiber.Ivar.t }
type t = { loop : Lev.Loop.t; active : (Pid.t, process) Table.t }
let create loop = { loop; active = Table.create (module Pid) 16 }
let spawn t pid =
Lev.Loop.ref t.loop;
let ivar = Fiber.Ivar.create () in
let process = { pid; ivar } in
Table.add_exn t.active pid process;
ivar
let is_empty t = Table.length t.active = 0
let reap t queue =
Table.filteri_inplace t.active ~f:(fun ~key:pid ~data:process ->
let pid, status = Unix.waitpid [ WNOHANG ] (Pid.to_int pid) in
match pid with
| 0 -> true
| _ ->
Lev.Loop.unref t.loop;
Queue.push queue (Fiber.Fill (process.ivar, status));
false)
end
type watcher = Signal of Lev.Async.t | Poll of Lev.Timer.t
type t = { loop : Lev.Loop.t; table : Process_table.t; watcher : watcher }
let create loop queue =
let table = Process_table.create loop in
let watcher =
if Sys.win32 then
let reap timer =
Process_table.reap table queue;
if Process_table.is_empty table then Lev.Timer.stop timer loop
in
let watcher = Lev.Timer.create ~repeat:0.05 ~after:0.05 reap in
Poll watcher
else
let reap (_ : Lev.Async.t) = Process_table.reap table queue in
let watcher = Lev.Async.create reap in
Lev.Async.start watcher loop;
Lev.Loop.unref loop;
Signal watcher
in
{ table; watcher; loop }
let ensure_started t =
match t.watcher with
| Signal _ -> ()
| Poll s -> if not (Lev.Timer.is_active s) then Lev.Timer.start s t.loop
let waitpid t ~pid =
ensure_started t;
Process_table.spawn t.table pid
let cleanup t =
match t.watcher with
| Poll s ->
Lev.Timer.stop s t.loop;
Lev.Timer.destroy s
| Signal s ->
Lev.Async.stop s t.loop;
Lev.Async.destroy s
end
type thread_job_status = Active | Complete | Cancelled
type thread_job = { status : thread_job_status ref; ivar : Fiber.fill }
type worker = Worker : 'a Worker.t -> worker
type t = {
loop : Lev.Loop.t;
queue : Fiber.fill Queue.t;
TODO stop when there are no threads
async : Lev.Async.t;
thread_jobs : thread_job Queue.t;
thread_mutex : Mutex.t;
process_watcher : Process_watcher.t;
mutable thread_workers : worker list;
}
type scheduler = t
let t : t Fiber.Var.t = Fiber.Var.create ()
let t_var = t
let scheduler = t
module Buffer = struct
include Bip_buffer
let default_size = 4096
type nonrec t = bytes t
let create ~size : t = create (Stdlib.Bytes.create size) ~len:size
end
module State = struct
type ('a, 'b) t' = Open of 'a | Closed of 'b
type ('a, 'b) t = ('a, 'b) t' ref
let create a = ref (Open a)
end
module Thread = struct
type job =
| Job : {
run : unit -> 'a;
status : thread_job_status ref;
ivar :
('a, [ `Exn of Exn_with_backtrace.t | `Cancelled ]) result
Fiber.Ivar.t;
}
-> job
type nonrec t = { worker : job Worker.t; scheduler : t }
let create =
let finish_job t fill =
Mutex.lock t.thread_mutex;
Queue.push t.thread_jobs fill;
Mutex.unlock t.thread_mutex;
Lev.Async.send t.async t.loop
in
fun () ->
let+ t = Fiber.Var.get_exn t in
let do_no_raise (Job { run; status; ivar }) =
let res =
match Exn_with_backtrace.try_with run with
| Ok x -> Ok x
| Error exn -> Error (`Exn exn)
in
finish_job t { status; ivar = Fiber.Fill (ivar, res) }
in
let worker =
Worker.create ~do_no_raise ~spawn_thread:(fun f -> Thread.create f ())
in
t.thread_workers <- Worker worker :: t.thread_workers;
{ worker; scheduler = t }
type 'a task = {
ivar :
('a, [ `Exn of Exn_with_backtrace.t | `Cancelled ]) result Fiber.Ivar.t;
task : Worker.task;
status : thread_job_status ref;
loop : Lev.Loop.t;
}
let task (t : t) ~f =
let ivar = Fiber.Ivar.create () in
let status = ref Active in
match Worker.add_work t.worker (Job { run = f; status; ivar }) with
| Error `Stopped -> Error `Stopped
| Ok task ->
Lev.Loop.ref t.scheduler.loop;
Ok { ivar; task; status; loop = t.scheduler.loop }
let await task = Fiber.Ivar.read task.ivar
let cancel task =
match !(task.status) with
| Cancelled | Complete -> Fiber.return ()
| Active ->
Lev.Loop.unref task.loop;
task.status := Cancelled;
Worker.cancel_if_not_consumed task.task;
Fiber.Ivar.fill task.ivar (Error `Cancelled)
let close t =
t.scheduler.thread_workers <-
(let id = Worker.id t.worker in
List.filter t.scheduler.thread_workers ~f:(fun (Worker w) ->
let id' = Worker.id w in
not (Worker.Id.equal id id')));
Worker.complete_tasks_and_stop t.worker
end
module Timer = struct
let sleepf after =
let* t = Fiber.Var.get_exn t in
let ivar = Fiber.Ivar.create () in
let timer =
Lev.Timer.create ~after (fun timer ->
Lev.Timer.stop timer t.loop;
Lev.Timer.destroy timer;
Queue.push t.queue (Fiber.Fill (ivar, ())))
in
Lev.Timer.start timer t.loop;
Fiber.Ivar.read ivar
module Wheel = struct
type running_state =
| Idle
| Sleeping of Lev.Timer.t * unit Fiber.Ivar.t
| Waiting of { ivar : unit Fiber.Ivar.t; filled : bool }
type elt = {
ivar : [ `Ok | `Cancelled ] Fiber.Ivar.t;
scheduled : Lev.Timestamp.t;
mutable filled : bool;
wheel : t;
}
and running = {
queue : elt Removable_queue.t;
delay : float;
scheduler : scheduler;
mutable state : running_state;
}
and state = Stopped of { delay : float } | Running of running
and t = state ref
let delay t =
match !t with Stopped { delay } -> delay | Running { delay; _ } -> delay
let create ~delay =
let+ scheduler = Fiber.Var.get_exn t in
ref
(Running
{ queue = Removable_queue.create (); delay; scheduler; state = Idle })
type task = elt Removable_queue.node ref
type condition = { sleeping : bool; waiting : bool }
let wakeup_if t { sleeping; waiting } =
match t.state with
| Sleeping (timer, ivar) when sleeping ->
let* { loop; _ } = Fiber.Var.get_exn scheduler in
Lev.Timer.stop timer loop;
Lev.Timer.destroy timer;
t.state <- Idle;
Fiber.Ivar.fill ivar ()
| Waiting { ivar; filled = false } when waiting ->
t.state <- Idle;
Fiber.Ivar.fill ivar ()
| _ -> Fiber.return ()
let set_delay t ~delay =
match !t with
| Stopped _ -> Code_error.raise "Wheel.set_delay" []
| Running d ->
t := Running { d with delay };
wakeup_if d { sleeping = true; waiting = false }
let task (t : t) : task Fiber.t =
Fiber.of_thunk (fun () ->
match !t with
| Stopped _ -> Code_error.raise "Wheel.task" []
| Running wheel ->
let now = Lev.Loop.now wheel.scheduler.loop in
let data =
{
wheel = t;
ivar = Fiber.Ivar.create ();
scheduled = now;
filled = false;
}
in
let res = Removable_queue.push wheel.queue data in
let+ () = wakeup_if wheel { waiting = true; sleeping = false } in
ref res)
let reset (task : task) =
Fiber.of_thunk (fun () ->
let task' = Removable_queue.data !task in
match !(task'.wheel) with
| Stopped _ -> Code_error.raise "reset: wheel is stopped" []
| Running wheel ->
Removable_queue.remove !task;
let now = Lev.Loop.now wheel.scheduler.loop in
let filled = task'.filled in
let task' =
let task' = { task' with scheduled = now } in
if filled then (
task'.filled <- false;
{ task' with ivar = Fiber.Ivar.create () })
else task'
in
let new_task = Removable_queue.push wheel.queue task' in
task := new_task;
if filled then
wakeup_if wheel { sleeping = false; waiting = true }
else Fiber.return ())
let await (task : task) =
Fiber.of_thunk (fun () ->
let task = Removable_queue.data !task in
Fiber.Ivar.read task.ivar)
let cancel (node : task) =
Fiber.of_thunk (fun () ->
let task = Removable_queue.data !node in
if task.filled then Fiber.return ()
else (
task.filled <- true;
Removable_queue.remove !node;
Fiber.Ivar.fill task.ivar `Cancelled))
let rec run t =
TODO do not allow double [ run ]
match !t with
| Stopped _ -> Fiber.return ()
| Running r -> (
match Removable_queue.peek r.queue with
| None ->
let ivar = Fiber.Ivar.create () in
r.state <- Waiting { ivar; filled = false };
let* () = Fiber.Ivar.read ivar in
run t
| Some node ->
let task = Removable_queue.data node in
let after =
let now = Timestamp.to_float (Lev.Loop.now r.scheduler.loop) in
let scheduled = Timestamp.to_float task.scheduled in
scheduled -. now +. r.delay
in
let expired = after < 0. in
let* () =
if expired then (
Removable_queue.remove node;
if not task.filled then (
task.filled <- true;
Queue.push r.scheduler.queue (Fiber.Fill (task.ivar, `Ok)));
Fiber.return ())
else
let scheduler = r.scheduler in
let ivar = Fiber.Ivar.create () in
let timer =
Lev.Timer.create ~after (fun timer ->
TODO reuse timer
Lev.Timer.destroy timer;
Queue.push scheduler.queue (Fiber.Fill (ivar, ())))
in
r.state <- Sleeping (timer, ivar);
Lev.Timer.start timer scheduler.loop;
Fiber.Ivar.read ivar
in
run t)
let run t = Fiber.of_thunk (fun () -> run t)
let stop =
let rec cancel_all r =
match Removable_queue.pop r.queue with
| None -> Fiber.return ()
| Some task ->
let* () =
if task.filled then Fiber.return ()
else (
task.filled <- true;
Fiber.Ivar.fill task.ivar `Cancelled)
in
cancel_all r
in
fun t ->
Fiber.of_thunk (fun () ->
match !t with
| Stopped _ -> Fiber.return ()
| Running r ->
t := Stopped { delay = r.delay };
let* () = cancel_all r in
wakeup_if r { sleeping = true; waiting = true })
end
end
let waitpid ~pid =
let pid = Pid.of_int pid in
let* t = Fiber.Var.get_exn t in
let ivar = Process_watcher.waitpid t.process_watcher ~pid in
Fiber.Ivar.read ivar
let signal ~signal =
let* { loop; queue; _ } = Fiber.Var.get_exn t in
let ivar = Fiber.Ivar.create () in
let signal =
Lev.Signal.create ~signal (fun t ->
Queue.push queue (Fiber.Fill (ivar, ()));
Lev.Signal.stop t loop;
Lev.Signal.destroy t)
in
Lev.Signal.start signal loop;
Fiber.Ivar.read ivar
module Fd = struct
type kind = Blocking | Non_blocking of { mutable set : bool }
type t = { fd : Unix.file_descr; kind : kind; mutable closed : bool }
let fd_exn t =
if t.closed then raise (Unix.Unix_error (Unix.EBADF, "closed fd", ""));
t.fd
let close t =
if not t.closed then (
t.closed <- true;
Unix.close t.fd)
let create' fd kind = { kind; fd; closed = false }
let create fd kind =
let kind =
match kind with
| `Blocking -> Blocking
| `Non_blocking set -> Non_blocking { set }
in
create' fd kind
let set_nonblock t =
assert (not t.closed);
match t.kind with
| Blocking -> ()
| Non_blocking nb ->
if not nb.set then (
Unix.set_nonblock t.fd;
nb.set <- true)
let pipe =
if Sys.win32 then fun ?cloexec () ->
let r, w = Unix.pipe ?cloexec () in
( { fd = r; kind = Blocking; closed = false },
{ fd = w; kind = Blocking; closed = false } )
else fun ?cloexec () ->
let r, w = Unix.pipe ?cloexec () in
Unix.set_nonblock r;
Unix.set_nonblock w;
( { fd = r; kind = Non_blocking { set = true }; closed = false },
{ fd = w; kind = Non_blocking { set = true }; closed = false } )
end
module Lev_fd = struct
module Event = Lev.Io.Event
type open_ = {
io : Lev.Io.t;
fd : Fd.t;
scheduler : scheduler;
mutable events : Event.Set.t;
read : [ `Ready | `Closed ] Fiber.Ivar.t Queue.t;
write : [ `Ready | `Closed ] Fiber.Ivar.t Queue.t;
}
type state = Open of open_ | Closed of Fd.t
type t = state ref
let reset nb new_set =
nb.events <- new_set;
Lev.Io.stop nb.io nb.scheduler.loop;
Lev.Io.modify nb.io nb.events;
Lev.Io.start nb.io nb.scheduler.loop
let await t (what : Lev.Io.Event.t) =
let* () = Fiber.return () in
match !t with
| Closed _ -> Fiber.return `Closed
| Open t ->
if t.fd.closed then Fiber.return `Closed
else (
if not (Event.Set.mem t.events what) then
reset t (Event.Set.add t.events what);
let ivar = Fiber.Ivar.create () in
let q = match what with Write -> t.write | Read -> t.read in
Queue.push q ivar;
let+ res = Fiber.Ivar.read ivar in
match res with
| `Closed -> `Closed
| `Ready ->
assert (not t.fd.closed);
`Ready t.fd)
let rec close_queue ivar_queue q =
match Queue.pop q with
| None -> ()
| Some ivar ->
Queue.push ivar_queue (Fiber.Fill (ivar, `Closed));
close_queue ivar_queue q
let close (t : t) =
match !t with
| Closed fd -> fd
| Open { io; scheduler; fd; read; write; events = _ } ->
t := Closed fd;
Lev.Io.stop io scheduler.loop;
Lev.Io.destroy io;
Fd.close fd;
close_queue scheduler.queue read;
close_queue scheduler.queue write;
fd
let make_cb t scheduler _ _ set =
match !(Fdecl.get t) with
| Closed _ -> ()
| Open nb ->
let keep_read = ref true in
let keep_write = ref true in
(if Lev.Io.Event.Set.mem set Read then
match Queue.pop nb.read with
| Some ivar -> Queue.push scheduler.queue (Fiber.Fill (ivar, `Ready))
| None -> keep_read := false);
(if Lev.Io.Event.Set.mem set Write then
match Queue.pop nb.write with
| Some ivar -> Queue.push scheduler.queue (Fiber.Fill (ivar, `Ready))
| None -> keep_write := false);
let new_set =
Event.Set.inter nb.events
(Event.Set.create ~read:!keep_read ~write:!keep_write ())
in
if not (Event.Set.equal new_set nb.events) then reset nb new_set
let create (fd : Fd.t) : t Fiber.t =
if fd.closed then Code_error.raise "create: fd is closed" [];
let+ scheduler = Fiber.Var.get_exn scheduler in
let t : t Fdecl.t = Fdecl.create Dyn.opaque in
let events = Event.Set.create () in
let io = Lev.Io.create (make_cb t scheduler) fd.fd events in
let read = Queue.create () in
let write = Queue.create () in
Fdecl.set t (ref (Open { events; fd; scheduler; io; read; write }));
Lev.Io.start io scheduler.loop;
Fdecl.get t
end
module Io = struct
let callstack =
match Sys.getenv_opt "LEV_DEBUG" with
| None -> fun () -> None
| Some _ -> fun () -> Some (Printexc.get_callstack 15)
type input = Input
type output = Output
type 'a mode = Input : input mode | Output : output mode
module Slice = Buffer.Slice
type _ kind =
| Write : { mutable flush_counter : int } -> output kind
| Read : { mutable eof : bool } -> input kind
type fd = Blocking of Thread.t * Fd.t | Non_blocking of Lev_fd.t
let with_ fd (kind : Lev.Io.Event.t) ~f =
let* () = Fiber.return () in
match fd with
| Non_blocking lev_fd -> (
let+ event = Lev_fd.await lev_fd kind in
match event with
| `Closed -> Error `Eof
| `Ready fd -> (
match f fd with exception exn -> Error (`Exn exn) | s -> Ok s))
| Blocking (th, fd) -> (
let task =
match Thread.task th ~f:(fun () -> f fd) with
| Error `Stopped -> Code_error.raise "already stopped" []
| Ok task -> task
in
let+ res = Thread.await task in
match res with
| Ok _ as s -> s
| Error `Cancelled -> assert false
| Error (`Exn exn) -> Error (`Exn exn.exn))
type activity = Idle | Busy of Printexc.raw_backtrace option
type 'a open_ = {
mutable buffer : Buffer.t;
kind : 'a kind;
fd : fd;
mutable activity : activity;
source : Printexc.raw_backtrace option;
}
type 'a t = ('a open_, Fd.t * Printexc.raw_backtrace option) State.t
let fd (t : _ t) =
match !t with
| Closed (fd, _) -> fd
| Open { fd = Blocking (_, fd); _ } -> fd
| Open { fd = Non_blocking fd; _ } -> (
match !fd with Closed fd -> fd | Open f -> f.fd)
let rec with_resize_buffer t ~len reserve_fail k =
match Buffer.reserve t.buffer ~len with
| Some dst_pos -> k t ~len ~dst_pos
| None -> (
match reserve_fail with
| `Compress ->
if Buffer.unused_space t.buffer >= len then
Buffer.Bytes.compress t.buffer;
with_resize_buffer t ~len `Resize k
| `Resize ->
let len = Buffer.length t.buffer + len in
Buffer.Bytes.resize t.buffer ~len;
with_resize_buffer t ~len `Fail k
| `Fail -> assert false)
module Writer = struct
type nonrec t = output open_
module Expert = struct
let available t = Buffer.max_available t.buffer
let prepare =
let k t ~len ~dst_pos:pos =
let buf = Buffer.buffer t.buffer in
(buf, { Slice.pos; len })
in
fun t ~len -> with_resize_buffer t ~len `Compress k
let commit t ~len = Buffer.commit t.buffer ~len
end
let flush =
let rec loop t stop_count =
TODO fix overflow issues
if
(match t.kind with Write { flush_counter } -> flush_counter)
>= stop_count
then Fiber.return ()
else
let* res =
with_ t.fd Write ~f:(fun fd ->
match Buffer.peek t.buffer with
| None -> ()
| Some { Slice.pos; len } -> (
let buffer = Buffer.buffer t.buffer in
let len = Unix.single_write fd.fd buffer pos len in
Buffer.junk t.buffer ~len;
match t.kind with
| Write t -> t.flush_counter <- t.flush_counter + len))
in
match res with
| Ok () -> loop t stop_count
| Error (`Exn (Unix.Unix_error (Unix.EAGAIN, _, _))) ->
loop t stop_count
| Error (`Exn (Unix.Unix_error (EPIPE, _, _))) | Error `Eof ->
let args =
[
("remaining", Dyn.int stop_count);
( "contents",
Dyn.string (Format.asprintf "%a@." Buffer.Bytes.pp t.buffer)
);
]
in
let args =
match t.source with
| None -> args
| Some source ->
( "source",
Dyn.string @@ Printexc.raw_backtrace_to_string source )
:: args
in
Code_error.raise "fd closed unflushed" args
| Error (`Exn exn) -> reraise exn
in
fun t ->
Fiber.of_thunk (fun () ->
let stop_count =
match t.kind with
| Write { flush_counter } ->
flush_counter + Buffer.length t.buffer
in
loop t stop_count)
let add_substring t str ~pos ~len =
Buffer.Bytes.Writer.add_substring t.buffer str ~pos ~len
let add_string t str = Buffer.Bytes.Writer.add_string t.buffer str
end
let create_gen (type a) ~source fd (mode : a mode) =
let buffer = Buffer.create ~size:Buffer.default_size in
let kind : a kind =
match mode with
| Input -> Read { eof = false }
| Output -> Write { flush_counter = 0 }
in
State.create { buffer; fd; kind; activity = Idle; source }
let create (type a) (fd : Fd.t) (mode : a mode) =
let source = callstack () in
match fd.kind with
| Non_blocking _ ->
let+ fd = Lev_fd.create fd in
create_gen ~source (Non_blocking fd) mode
| Blocking ->
let+ thread = Thread.create () in
create_gen ~source (Blocking (thread, fd)) mode
let create_rw (fd : Fd.t) : (input t * output t) Fiber.t =
let source = callstack () in
match fd.kind with
| Non_blocking _ ->
let+ fd =
let+ fd = Lev_fd.create fd in
Non_blocking fd
in
let r = create_gen ~source fd Input in
let w = create_gen ~source fd Output in
(r, w)
| Blocking ->
let* r =
let+ thread = Thread.create () in
create_gen ~source (Blocking (thread, fd)) Input
in
let+ w =
let+ thread = Thread.create () in
create_gen ~source (Blocking (thread, fd)) Output
in
(r, w)
let close =
let close_fd t =
match t with
| Non_blocking fd -> Lev_fd.close fd
| Blocking (th, fd) ->
Thread.close th;
Fd.close fd;
fd
in
fun (type a) (t : a t) ->
match !t with
| State.Closed _ -> ()
| Open o ->
(match (o.kind : _ kind) with
| Read r -> r.eof <- true
TODO
let fd = close_fd o.fd in
t := Closed (fd, o.source)
module Reader = struct
type t = input open_
exception Unavailable
module Expert = struct
let buffer t =
match Buffer.peek t.buffer with
| None -> raise Unavailable
| Some { Buffer.Slice.pos; len } ->
let b = Buffer.buffer t.buffer in
(b, { Slice.pos; len })
let consume (t : t) ~len = Buffer.junk t.buffer ~len
let available t =
let eof = match t.kind with Read { eof } -> eof in
let available = Buffer.length t.buffer in
if available = 0 && eof then `Eof else `Ok available
let refill =
let rec read t ~len ~dst_pos =
let buffer = Buffer.buffer t.buffer in
let* res =
with_ t.fd Read ~f:(fun fd -> Unix.read fd.fd buffer 0 len)
in
match res with
| Error (`Exn (Unix.Unix_error (Unix.EAGAIN, _, _))) ->
read t ~len ~dst_pos
| Error `Eof | Ok 0 ->
(match t.kind with Read b -> b.eof <- true);
Buffer.commit t.buffer ~len:0;
Fiber.return ()
| Ok len ->
Buffer.commit t.buffer ~len;
Fiber.return ()
| Error (`Exn exn) -> reraise exn
in
fun ?(size = Buffer.default_size) (t : t) ->
with_resize_buffer t ~len:size `Compress read
end
exception Found of int
let read_char_exn t =
let b, { Buffer.Slice.pos; len } = Expert.buffer t in
assert (len > 0);
let res = Bytes.get b pos in
Expert.consume t ~len:1;
res
let read_line =
let contents buf =
let module Buffer = Stdlib.Buffer in
let len = Buffer.length buf in
if len = 0 then ""
else if Buffer.nth buf (len - 1) = '\r' then Buffer.sub buf 0 (len - 1)
else Buffer.contents buf
in
let find_nl b pos len =
try
for i = pos to pos + len - 1 do
if Bytes.get b i = '\n' then raise_notrace (Found i)
done;
None
with Found i -> Some i
in
let rec loop t buf =
match Expert.available t with
| `Eof ->
Fiber.return (Error (`Partial_eof (Stdlib.Buffer.contents buf)))
| `Ok 0 ->
let* () = Expert.refill t in
loop t buf
| `Ok _ -> (
let b, { Slice.pos; len } = Expert.buffer t in
match find_nl b pos len with
| Some i ->
let len = i - pos in
Stdlib.Buffer.add_subbytes buf b pos len;
Buffer.junk t.buffer ~len:(len + 1);
Fiber.return (Ok (contents buf))
| None ->
Stdlib.Buffer.add_subbytes buf b pos len;
Buffer.junk t.buffer ~len;
loop t buf)
in
let rec self t =
match Expert.available t with
| `Eof -> Fiber.return (Error (`Partial_eof ""))
| `Ok 0 ->
let* () = Expert.refill t in
self t
| `Ok _ -> (
let b, { Slice.pos; len } = Expert.buffer t in
match find_nl b pos len with
| Some i ->
let len = i - pos in
let res =
let len =
if len > 0 && Bytes.get b (i - 1) = '\r' then len - 1
else len
in
Bytes.sub b ~pos ~len
in
Buffer.junk t.buffer ~len:(len + 1);
Fiber.return (Ok (Bytes.unsafe_to_string res))
| None ->
let buf = Stdlib.Buffer.create len in
Stdlib.Buffer.add_subbytes buf b pos len;
Buffer.junk t.buffer ~len;
loop t buf)
in
fun t -> Fiber.of_thunk (fun () -> self t)
let read_exactly =
let rec loop_buffer t buf remains =
if remains = 0 then Fiber.return (Ok (Stdlib.Buffer.contents buf))
else
match Expert.available t with
| `Eof ->
Fiber.return (Error (`Partial_eof (Stdlib.Buffer.contents buf)))
| `Ok 0 ->
let* () = Expert.refill t in
loop_buffer t buf remains
| `Ok _ ->
let b, { Slice.pos; len } = Expert.buffer t in
let len = min remains len in
Stdlib.Buffer.add_subbytes buf b pos len;
Buffer.junk t.buffer ~len;
loop_buffer t buf (remains - len)
in
let rec self t len =
match Expert.available t with
| `Eof -> Fiber.return (Error (`Partial_eof ""))
| `Ok 0 ->
let* () = Expert.refill t in
self t len
| `Ok _ ->
let b, { Slice.pos; len = avail } = Expert.buffer t in
if len <= avail then (
let res = Bytes.sub b ~pos ~len in
Buffer.junk t.buffer ~len;
Fiber.return (Ok (Bytes.unsafe_to_string res)))
else
let buf = Stdlib.Buffer.create len in
Stdlib.Buffer.add_subbytes buf b pos avail;
Buffer.junk t.buffer ~len:avail;
loop_buffer t buf (len - avail)
in
fun t len -> Fiber.of_thunk (fun () -> self t len)
let to_string =
let rec loop t buf =
match Expert.available t with
| `Eof -> Fiber.return (Stdlib.Buffer.contents buf)
| `Ok 0 ->
let* () = Expert.refill t in
loop t buf
| `Ok _ ->
let b, { Slice.pos; len } = Expert.buffer t in
Stdlib.Buffer.add_subbytes buf b pos len;
Expert.consume t ~len;
loop t buf
in
fun t -> Fiber.of_thunk (fun () -> loop t (Stdlib.Buffer.create 512))
end
let with_ (type a) (t : a t) ~f =
let activity_source = callstack () in
let* () = Fiber.return () in
let t =
match !(t : _ State.t) with
| Open t -> t
| Closed (_, source) ->
let args =
match source with
| None -> []
| Some source ->
[
( "source",
Dyn.string (Printexc.raw_backtrace_to_string source) );
]
in
Code_error.raise "Lev_fiber.Io: already closed" args
in
(match t.activity with
| Idle -> t.activity <- Busy activity_source
| Busy activity_source ->
let args =
let args =
[
( "kind",
Dyn.string
(match t.kind with Read _ -> "read" | Write _ -> "write") );
]
in
let args =
match t.source with
| None -> args
| Some source ->
("source", Dyn.string (Printexc.raw_backtrace_to_string source))
:: args
in
match activity_source with
| None -> args
| Some activity_source ->
( "activity_source",
Dyn.string (Printexc.raw_backtrace_to_string activity_source) )
:: args
in
Code_error.raise "Io.t is already busy" args);
Fiber.finalize
(fun () -> f t)
~finally:(fun () ->
t.activity <- Idle;
Fiber.return ())
let with_read (t : input t) ~f = with_ t ~f
let with_write (t : output t) ~f = with_ t ~f
let pipe ?cloexec () : (input t * output t) Fiber.t =
Fiber.of_thunk @@ fun () ->
let r, w = Fd.pipe ?cloexec () in
let* input = create r Input in
let+ output = create w Output in
(input, output)
module Lazy_fiber : sig
type 'a t
val create : (unit -> 'a Fiber.t) -> 'a t
val force : 'a t -> 'a Fiber.t
end = struct
type 'a t = {
value : 'a Fiber.Ivar.t;
mutable f : (unit -> 'a Fiber.t) option;
}
let create f = { f = Some f; value = Fiber.Ivar.create () }
let force t =
let open Fiber.O in
match t.f with
| None -> Fiber.Ivar.read t.value
| Some f ->
Fiber.of_thunk (fun () ->
t.f <- None;
let* v = f () in
let+ () = Fiber.Ivar.fill t.value v in
v)
end
let make_std_fd fd kind =
Lazy_fiber.create (fun () ->
let blockity =
if Sys.win32 then `Blocking
else (
Unix.set_nonblock fd;
`Non_blocking true)
in
create (Fd.create fd blockity) kind)
let stdin = Lazy_fiber.force (make_std_fd Unix.stdin Input)
let stderr = Lazy_fiber.force (make_std_fd Unix.stderr Output)
let stdout = Lazy_fiber.force (make_std_fd Unix.stdout Output)
end
module Socket = struct
let writeable_fd scheduler fd =
let ivar = Fiber.Ivar.create () in
let io =
Lev.Io.create
(fun io _ _ ->
Queue.push scheduler.queue (Fiber.Fill (ivar, ()));
Lev.Io.stop io scheduler.loop;
Lev.Io.destroy io)
fd
(Lev.Io.Event.Set.create ~write:true ())
in
Lev.Io.start io scheduler.loop;
Fiber.Ivar.read ivar
let rec connect (fd : Fd.t) sock =
let* scheduler = Fiber.Var.get_exn scheduler in
Fd.set_nonblock fd;
match Unix.connect fd.fd sock with
| () -> Fiber.return ()
| exception Unix.Unix_error (Unix.EISCONN, _, _) when Sys.win32 ->
Fiber.return ()
| exception Unix.Unix_error (Unix.EWOULDBLOCK, _, _) when Sys.win32 ->
let* () = writeable_fd scheduler fd.fd in
connect fd sock
| exception Unix.Unix_error (Unix.EINPROGRESS, _, _) -> (
let+ () = writeable_fd scheduler fd.fd in
match Unix.getsockopt_error fd.fd with
| None -> ()
| Some err -> raise (Unix.Unix_error (err, "connect", "")))
module Server = struct
type t = {
fd : Fd.t;
pool : Fiber.Pool.t;
io : Lev.Io.t;
mutable close : bool;
mutable await : unit Fiber.Ivar.t;
}
let create (fd : Fd.t) sockaddr ~backlog =
let+ scheduler = Fiber.Var.get_exn scheduler in
let pool = Fiber.Pool.create () in
Fd.set_nonblock fd;
Unix.bind fd.fd sockaddr;
Unix.listen fd.fd backlog;
let t = Fdecl.create Dyn.opaque in
let io =
Lev.Io.create
(fun _ _ _ ->
let t = Fdecl.get t in
Queue.push scheduler.queue (Fiber.Fill (t.await, ())))
fd.fd
(Lev.Io.Event.Set.create ~read:true ())
in
Fdecl.set t { pool; await = Fiber.Ivar.create (); close = false; fd; io };
Fdecl.get t
let close t =
Fiber.of_thunk (fun () ->
if t.close then Fiber.return ()
else
let* scheduler = Fiber.Var.get_exn scheduler in
Fd.close t.fd;
Lev.Io.stop t.io scheduler.loop;
Lev.Io.destroy t.io;
t.close <- true;
let* () = Fiber.Pool.stop t.pool in
Fiber.Ivar.fill t.await ())
module Session = struct
type t = { fd : Fd.t; sockaddr : Unix.sockaddr }
let fd t = t.fd
let sockaddr t = t.sockaddr
let io t =
Fd.set_nonblock t.fd;
Io.create_rw t.fd
end
let serve =
let rec loop t f =
let* () = Fiber.Ivar.read t.await in
match t.close with
| true -> Fiber.return ()
| false ->
t.await <- Fiber.Ivar.create ();
let session =
let fd, sockaddr = Unix.accept ~cloexec:true t.fd.fd in
let fd = Fd.create' fd (Non_blocking { set = false }) in
{ Session.fd; sockaddr }
in
let* () = Fiber.Pool.task t.pool ~f:(fun () -> f session) in
loop t f
in
fun (t : t) ~f ->
let* scheduler = Fiber.Var.get_exn scheduler in
Lev.Io.start t.io scheduler.loop;
Fiber.fork_and_join_unit
(fun () -> Fiber.Pool.run t.pool)
(fun () -> loop t f)
end
end
let yield () =
let* scheduler = Fiber.Var.get_exn scheduler in
let ivar = Fiber.Ivar.create () in
Queue.push scheduler.queue (Fiber.Fill (ivar, ()));
Fiber.Ivar.read ivar
module Error = struct
type t = Aborted of Exn_with_backtrace.t | Already_reported | Deadlock
let ok_exn = function
| Ok s -> s
| Error (Aborted exn) -> Exn_with_backtrace.reraise exn
| Error Already_reported -> Code_error.raise "Already_reported" []
| Error Deadlock -> Code_error.raise "Deadlock" []
end
exception Deadlock
let run (type a) ?(sigpipe = `Inherit)
?(flags = Lev.Loop.Flag.Set.singleton Nosigmask) (f : unit -> a Fiber.t) :
(a, Error.t) result =
if not (Lev.Loop.Flag.Set.mem flags Nosigmask) then
Code_error.raise "flags must include Nosigmask" [];
let lev_loop = Lev.Loop.create ~flags () in
let thread_jobs = Queue.create () in
let thread_mutex = Mutex.create () in
let queue = Queue.create () in
let async =
Lev.Async.create (fun _ ->
Mutex.lock thread_mutex;
while not (Queue.is_empty thread_jobs) do
let { ivar; status } = Queue.pop_exn thread_jobs in
match !status with
| Active ->
Lev.Loop.unref lev_loop;
status := Complete;
Queue.push queue ivar
| Cancelled -> ()
| Complete -> assert false
done;
Mutex.unlock thread_mutex)
in
Lev.Async.start async lev_loop;
Lev.Loop.unref lev_loop;
let process_watcher = Process_watcher.create lev_loop queue in
let signal_watcher =
if Sys.win32 then None
else
let sigchld_watcher =
match process_watcher.watcher with
| Signal s -> s
| Poll _ -> assert false
in
Some (Signal_watcher.create ~sigpipe ~sigchld_watcher ~loop:lev_loop)
in
let t =
{
loop = lev_loop;
signal_watcher;
queue;
async;
thread_mutex;
thread_jobs;
process_watcher;
thread_workers = [];
}
in
let rec events q acc =
match Queue.pop q with None -> acc | Some e -> events q (e :: acc)
in
let rec iter_or_deadlock q =
match Nonempty_list.of_list (events q []) with
| Some e -> e
| None -> raise_notrace Deadlock
and iter loop q =
match Nonempty_list.of_list (events q []) with
| Some e -> e
| None -> (
let res = Lev.Loop.run loop Once in
match res with
| `No_more_active_watchers -> iter_or_deadlock q
| `Otherwise -> iter loop q)
in
let f =
let on_error exn =
Format.eprintf "%a@." Exn_with_backtrace.pp_uncaught exn;
Fiber.return ()
in
let f () = Fiber.Var.set t_var t f in
Fiber.map_reduce_errors (module Monoid.Unit) ~on_error f
in
let res : (a, Error.t) result =
match Fiber.run f ~iter:(fun () -> iter lev_loop queue) with
| Error () -> Error Already_reported
| Ok s -> Ok s
| exception Deadlock -> Error Deadlock
| exception exn ->
let exn = Exn_with_backtrace.capture exn in
Error (Aborted exn)
in
let () =
Process_watcher.cleanup process_watcher;
Lev.Async.stop async lev_loop;
Option.iter signal_watcher ~f:Signal_watcher.stop;
List.iter t.thread_workers ~f:(fun (Worker w) ->
Worker.complete_tasks_and_stop w;
Worker.join w);
Lev.Async.destroy async
in
res
|
44a0ec77797d9c891b76cef3992fb8e32563382bdb3fafabd1a77979b69dee7a | kmi/irs | heuristic-classifierv2.lisp | -*- Mode : LISP ; Syntax : Common - lisp ; Base : 10 ; Package : OCML ; -*-
;;;; HEURISTIC-CLASSIFICATION :METHOD ;;;;
(in-package "OCML")
(in-ontology heuristic-classification)
;;;THE-VIRTUAL-SOLUTION-SPACE
(def-function the-virtual-solution-space (?init-space ?refs) -> ?solution-space
"The space generated by refinement application from an initial solution space"
:constraint (and (every ?refs refiner)
(solution-space ?init-space))
:def (= ?solution-space
(setofall ?sol2 (or (member ?sol2 ?init-space)
(exists ?sol3
(and (member ?sol3
(the-virtual-solution-space
?init-space ?refs))
(generalised-refinement-of ?sol2
?sol3
?refs)))))))
;;; CLASS CANDIDATE-EXCLUSION-CRITERION
(def-class candidate-exclusion-criterion ()
"this provides us with a way to specify when a solution is not
admissible. we also use this in conjunction with a solution refinement
hierarchy, to prune all refinements of an unadmissible solution"
((applies-to-match-score-type :type match-score-type)
(has-candidate-exclusion-relation :type unary-relation)
))
;;;INSTANCE DEFAULT-CANDIDATE-EXCLUSION-CRITERION
(def-instance default-candidate-exclusion-criterion candidate-exclusion-criterion
((applies-to-match-score-type default-match-score)
(has-candidate-exclusion-relation default-candidate-exclusion-relation)))
;;;RELATION DEFAULT-CANDIDATE-EXCLUSION-RELATION
(def-relation default-candidate-exclusion-relation (?score)
"a solution is excluded if it has one or more inconsistent features"
:constraint (default-match-score ?score)
:iff-def (> (length (first ?score)) 0))
;;;RELATION RULED-OUT-SOLUTION
(def-relation ruled-out-solution (?sol ?score ?criterion)
:iff-def (holds (the ?rel (has-candidate-exclusion-relation ?criterion ?rel))
?score))
;;;AXIOM EXCLUSION-IS-MONOTONIC
(def-axiom exclusion-is-monotonic
"This axiom states that the exclusion criterion is monotonic. That is,
if a solution, ?sol, is ruled out, then any solution which has a worse score than
?sol will also be ruled out"
(forall (?sol1 ?sol2 ?obs ?criterion)
(=> (and (ruled-out-solution
?sol1 (apply-match-criterion ?criterion ?obs ?sol1) ?criterion)
(not (better-match-than ?sol2 ?sol1 ?obs ?criterion)))
(ruled-out-solution
?sol2 (apply-match-criterion ?criterion ?obs ?sol2) ?criterion))))
(def-axiom CONGRUENT-ADMISSIBILITY-AND-EXCLUSION-CRITERIA
"This axiom states that the admissibility and exclusion criteria
have to be congruent: no solution should ever satisfy both"
(forall (?sol ?task)
(=> (member ?sol
(the-virtual-solution-space
(role-value ?task has-candidate-solutions)
(role-value ?task has-refiners)))
(not (and (admissible-solution
?sol
(apply-match-criterion
(role-value ?task 'has-match-criterion)
(role-value ?task 'has-observables)
?sol)
(role-value
?task
'has-solution-admissibility-criterion))
(ruled-out-solution ?sol
(apply-match-criterion
(role-value ?task 'has-match-criterion)
(role-value ?task 'has-observables)
?sol)
(role-value
?psm 'has-solution-exclusion-criterion)))))))
;;;HEURISTIC-ADMISSIBLE-SOL-CLASSIFIER
(def-class heuristic-admissible-sol-classifier (problem-solving-method) ?psm
"This is a basic method for finding an admissible solution to a
classification problem. It uses a backtracking hill-climbing solution to
home in on the most promising solutions. It is a heuristic, hierarchical
classifier, as it provides for both abstraction and refinement mechanisms.
If no refinement mechanisms are provided, then the psm just reduces to a
'flat' classifier."
((has-input-role :value has-abstractors
:value has-refiners
:value has-observables
:value has-candidate-solutions
:value has-solution-exclusion-criterion
)
(has-control-role :value 'has-current-solutions)
(has-output-role :value has-solution)
(has-current-solutions :type solution-space)
(has-abstractors :type abstractors)
(has-refiners :type refiners)
(has-observables :type observables)
(has-solution-exclusion-criterion :type candidate-exclusion-criterion
:default-value default-candidate-exclusion-criterion)
(has-candidate-solutions :type solution-space)
(has-solution :type solution)
(has-assumption
:value (kappa (?psm)
(not (exists (?ob1 ?ob2)
(and (or (member ?ob1
(role-value ?psm has-observables))
(exists ?ob
(and (member
?ob
(role-value ?psm has-observables))
(generalised-abstract-link
?ob1 ?ob
(role-value ?psm 'has-abstractors)))))
(generalised-abstract-link
?ob2 ?ob1
(role-value ?psm 'has-abstractors))
(generalised-abstract-link
?ob1 ?ob2
(role-value ?psm 'has-abstractors))))))
:value (kappa (?psm)
(not (exists (?sol1 ?sol2)
(and (or (member ?sol1
(role-value ?psm 'has-candidate-solutions))
(exists ?sol
(and (member ?sol
(role-value
?psm
'has-candidate-solutions))
(generalised-refinement-of
?sol1 ?sol
(role-value ?psm has-refiners)))))
(generalised-refinement-of
?sol2 ?sol1
(role-value ?psm has-refiners))
(generalised-refinement-of
?sol1 ?sol2
(role-value ?psm has-refiners))))))
:value (kappa (?psm)
(exists ?sol
(and (admissible-solution
?sol
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol)
(role-value
?psm
'has-solution-admissibility-criterion))
(member ?sol
(the-virtual-solution-space
(role-value ?psm has-candidate-solutions)
(role-value ?psm has-refiners))))))
:value (kappa (?psm)
(forall (?sol ?score)
(=>
(and
(sol-has-match-score ?sol
(role-value ?psm 'has-observables)
?score
(role-value
?psm 'has-match-criterion))
(ruled-out-solution ?sol ?score
(role-value
?psm 'has-solution-exclusion-criterion)))
(not (exists
?sol2
(and (generalised-refinement-of
?sol2 ?sol (role-value
?psm has-refiners))
(admissible-solution
?sol2
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol2)
(role-value
?psm
'has-solution-admissibility-criterion))))))))
:documentation "There are 4 assumptions associated with this method. The first
two state that the abstraction and refinement hierarchies have to
be free of cycles. The third states that the psm assumes the existence
of a solution in the virtual solution space - i.e., if such a solution
exists, the psm will find it. The final assumption states that the
method assumes that the exclusion criterion is correct. That is, if a
solution is excluded, all its refinements can be excluded too.")
(has-postcondition
:value (kappa (?psm ?sol)
(admissible-solution
?sol
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol)
(role-value
?psm
'has-solution-admissibility-criterion)))
:documentation "The output solution satisfies the admissibility criterion")
(has-body
:value
'(lambda (?psm)
(in-environment
((?obs . (achieve-generic-subtask
?psm abstraction
'has-observables (role-value ?psm has-observables)
'has-abstractors (role-value ?psm has-abstractors))))
(do
(set-role-value ?psm 'has-current-solutions
(role-value ?psm has-candidate-solutions))
(if (exists ?sol
(and (member ?sol (role-value
?psm 'has-current-solutions))
(admissible-solution
?sol
(apply-match-criterion
(role-value
?psm 'has-match-criterion)
(role-value
?psm 'has-observables)
?sol)
(role-value
?psm
'has-solution-admissibility-criterion))))
?sol
(in-environment
((?current-sols . (role-value
?psm 'has-current-solutions))
(?ranked-candidates . (achieve-generic-subtask
?psm rank-solutions
'has-observables ?obs
'has-candidate-solutions
?current-sols
'has-match-criterion
(role-value
?psm has-match-criterion))))
(loop for ?candidate in ?ranked-candidates
do
(if (not (ruled-out-solution ?candidate
(apply-match-criterion
(role-value
?psm 'has-match-criterion)
(role-value
?psm 'has-observables)
?candidate)
(role-value
?psm 'has-solution-exclusion-criterion)))
(in-environment
((?refined-sols . (achieve-generic-subtask
?psm refinement
'has-candidate-solution ?candidate
'has-observables ?obs
'has-refiners (role-value
?psm has-refiners))))
(if (not (null ?refined-sols))
(in-environment
((?result . (achieve-generic-subtask
?psm heuristic-admissible-sol-classifier
'has-observables ?obs
'has-candidate-solutions ?refined-sols)))
(if (not (= ?result :nothing))
(return ?result)))))))))))))
(has-output-mapping
:value '(lambda (?psm ?result)
(list-of ?result))))
:own-slots ((tackles-task-type single-solution-classification-task)
(has-generic-subtasks '(abstraction
refinement rank-solutions))))
;;;HEURISTIC-OPTIMAL-SOL-CLASSIFIER
(def-class heuristic-optimal-sol-classifier (problem-solving-method) ?psm
"This method exhaustively searches a solution space looking for an optimal
solution. It uses the exclusion criterion to prune the search space. It
returns one or more optimal solutions"
((has-input-role :value has-abstractors
:value has-refiners
:value has-observables
:value has-candidate-solutions
:value has-solution-exclusion-criterion
)
(has-control-role :value 'has-current-solutions
:value 'has-ranked-candidates)
(has-output-role :value has-solutions)
(has-current-solutions :type solution-space)
(has-ranked-candidates :type solution-space)
(has-abstractors :type abstractors)
(has-refiners :type refiners)
(has-observables :type observables)
(has-solution-exclusion-criterion :type candidate-exclusion-criterion
:default-value default-candidate-exclusion-criterion)
(has-candidate-solutions :type solution-space)
(has-solutions :type solution-space)
(has-assumption
:value (kappa (?psm)
(not (exists (?ob1 ?ob2)
(and (or (member ?ob1
(role-value ?psm has-observables))
(exists ?ob
(and (member
?ob
(role-value ?psm has-observables))
(generalised-abstract-link
?ob1 ?ob
(role-value ?psm 'has-abstractors)))))
(generalised-abstract-link
?ob2 ?ob1
(role-value ?psm 'has-abstractors))
(generalised-abstract-link
?ob1 ?ob2
(role-value ?psm 'has-abstractors))))))
:value (kappa (?psm)
(not (exists (?sol1 ?sol2)
(and (or (member ?sol1
(role-value ?psm 'has-candidate-solutions))
(exists ?sol
(and (member ?sol
(role-value
?psm
'has-candidate-solutions))
(generalised-refinement-of
?sol1 ?sol
(role-value ?psm has-refiners)))))
(generalised-refinement-of
?sol2 ?sol1
(role-value ?psm has-refiners))
(generalised-refinement-of
?sol1 ?sol2
(role-value ?psm has-refiners))))))
:value (kappa (?psm)
(exists ?sol
(and (admissible-solution
?sol
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol)
(role-value
?psm
'has-solution-admissibility-criterion))
(member ?sol
(the-virtual-solution-space
(role-value ?psm has-candidate-solutions)
(role-value ?psm has-refiners))))))
:value (kappa (?psm)
(forall (?sol ?score)
(=>
(and
(sol-has-match-score ?sol
(role-value ?psm 'has-observables)
?score
(role-value
?psm 'has-match-criterion))
(ruled-out-solution ?sol ?score
(role-value
?psm 'has-solution-exclusion-criterion)))
(not (exists
?sol2
(and (generalised-refinement-of
?sol2 ?sol (role-value
?psm has-refiners))
(admissible-solution
?sol2
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol2)
(role-value
?psm
'has-solution-admissibility-criterion))))))))
:documentation "There are 4 assumptions associated with this method. The first
two state that the abstraction and refinement hierarchies have to
be free of cycles. The third states that the psm assumes the existence
of a solution in the virtual solution space - i.e., if such a solution
exists, the psm will find it. The final assumption states that the
method assumes that the exclusion criterion is correct. That is, if a
solution is excluded, all its refinements can be excluded too.")
(has-postcondition
:value (kappa (?psm ?sols)
(forall ?sol
(=> (member ?sol ?sols)
(and
(admissible-solution
?sol
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol)
(role-value
?psm
'has-solution-admissibility-criterion))
(best-match
(role-value ?task 'has-observables)
?sol
(the-virtual-solution-space
(role-value ?psm has-observables)
(role-value ?psm 'has-candidate-solutions))
(role-value ?task 'has-match-criterion))))))
:documentation "The output solutions are both admissible and optimal
with respect to the solution space generated through
refinement application")
(has-body
:value
'(lambda (?psm)
(in-environment
((?obs . (achieve-generic-subtask
?psm abstraction
'has-observables (role-value ?psm has-observables)
'has-abstractors (role-value ?psm has-abstractors))))
(do
(set-role-value ?psm 'has-ranked-candidates
(achieve-generic-subtask
?psm rank-solutions
'has-observables ?obs
'has-candidate-solutions
(role-value ?psm 'has-candidate-solutions)
'has-match-criterion (role-value
?psm has-match-criterion)))
Initialize current solutions
(set-role-value ?psm 'has-current-solutions nil)
(if (admissible-solution
(first (role-value ?psm 'has-ranked-candidates))
(apply-match-criterion (role-value ?psm has-match-criterion)
?obs
(first
(role-value ?psm 'has-ranked-candidates)))
(role-value ?psm
'has-solution-admissibility-criterion))
(do
(set-role-value ?psm 'has-current-solutions
(List-of (first
(role-value ?psm 'has-ranked-candidates))))
(loop for ?candidate in (rest (role-value ?psm 'has-ranked-candidates))
do
(if (better-match-than
(first (role-value ?psm 'has-ranked-candidates))
?candidate ?obs
(role-value ?psm has-match-criterion))
(return :nothing)
(set-role-value ?psm 'has-current-solutions
(cons ?candidate
(role-value ?psm
'has-current-solutions)))))))
;;remove ruled out candidates
(loop for ?candidate in (reverse (role-value ?psm 'has-ranked-candidates))
do
(if (ruled-out-solution
?candidate (apply-match-criterion
(role-value ?psm has-match-criterion)
?obs
?candidate)
(role-value ?psm
'has-solution-exclusion-criterion))
(set-role-value ?psm 'has-ranked-candidates
(remove ?candidate
(role-value ?psm 'has-ranked-candidates)))
(return :nothing)))
(if (null (role-value ?psm 'has-ranked-candidates))
(role-value ?psm
'has-current-solutions)
(complete-optimal-search ?psm
?obs
(role-value ?psm has-refiners)
(first (role-value ?psm 'has-ranked-candidates))
(rest (role-value ?psm 'has-ranked-candidates))
(role-value ?psm
'has-current-solutions)
(role-value ?psm has-match-criterion)
(role-value ?psm
'has-solution-admissibility-criterion)
(role-value ?psm
'has-solution-exclusion-criterion))))))))
:own-slots ((tackles-task-type optimal-classification-task)
(has-generic-subtasks '(abstraction
refinement rank-solutions))))
(def-procedure complete-optimal-search (?psm ?obs ?refs
?candidate
?other-candidates
?current-solutions
?match-criterion
?admissibility-criterion
?exclusion-criterion)
"Neither ?candidate no ?other-candidates have been ruled out
by the exclusion criterion"
:constraint (and (solution ?candidate)
(list ?other-candidates)
(list ?refs)
(list ?current-solutions)
(observables ?obs)
(problem-solving-method ?psm)
(match-criterion ?match-criterion)
(admissibility-criterion ?admissibility-criterion)
(solution-admissibility-criterion ?admissibility-criterion)
(solution-exclusion-criterion ?exclusion-criterion)
(not (ruled-out-solution
?candidate (apply-match-criterion
?match-criterion
?obs
?candidate)
?exclusion-criterion))
(forall ?c
(=> (member ?c ?other-candidates)
(not (ruled-out-solution
?c (apply-match-criterion
?match-criterion
?obs
?c)
?exclusion-criterion)))))
:body (in-environment
((?good-refined-sols . (filter (achieve-generic-subtask
?psm refinement
'has-candidate-solution ?top-candidate
'has-observables ?obs
'has-refiners ?refs)
'(kappa (?sol)
(not (ruled-out-solution
?sol (apply-match-criterion
?match-criterion
?obs
?sol)
?exclusion-criterion))))))
(if (null ?good-refined-sols)
(if (null ?other-candidates)
?current-solutions
(complete-optimal-search ?psm ?obs ?refs
(first ?other-candidates)
(rest ?other-candidates)
?current-solutions
?match-criterion
?admissibility-criterion
?exclusion-criterion))
;;there are some new useful refinements
(in-environment
((?ranked-candidates . (achieve-generic-subtask
?psm rank-solutions
'has-observables ?obs
'has-candidate-solutions (append
?other-candidates
?good-refined-sols)
'has-match-criterion ?match-criterion))
(?new-admissible-solutions . (filter ?good-refined-sols
'(kappa (?sol)
(admissible-solution
?sol
(apply-match-criterion
?match-criterion ?obs ?sol)
?match-criterion))))
(?new-current-solutions . (if (null ?new-admissible-solutions)
?current-solutions
(in-environment
((?ranked-sols . (achieve-generic-subtask
?psm rank-solutions
'has-observables ?obs
'has-candidate-solutions
(append
?current-solutions
?new-admissible-solutions)
'has-match-criterion
?match-criterion)))
(cons (first ?ranked-sols)
(filter ?ranked-sols
'(kappa (?sol)
(not (better-match-than
(first ?ranked-sols)
?sol
?match-criterion)))))))))
(complete-optimal-search ?psm ?obs ?refs
(first ?ranked-candidates)
(rest ?ranked-candidates)
?new-current-solutions
?match-criterion
?admissibility-criterion
?exclusion-criterion)))))
;;;;;;;;;;;;;;;;;;;;; Beginning of abstraction ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def-class abstractor (function)
((domain :value observables)
(range :value observable)
(applicability-condition :type abstractor-applicability-condition-class)))
(def-class abstractor-applicability-condition-class (unary-relation) ?x
"The applicability condition for an abstractor must be a relation whose domain is
a subset of observables"
:iff-def (subset (the ?d (domain ?x ?d))
observables))
(def-function apply-abstract-operator (?ab ?observables-in) -> ?ob
:constraint (and (abstractor ?ab)
(observable ?ob)
(observables ?observables-in)
)
:body (if (abstractor-is-applicable? ?ab ?observables-in)
(call (the ?body (has-body ?ab ?body))
?observables-in)))
(def-relation abstractor-is-applicable? (?ab ?observables)
:constraint (and (abstractor ?ab)
(observables ?observables))
:iff-def (holds (the ?appl (applicability-condition ?ab ?appl)) ?observables))
(def-class abstraction (goal-specification-task) ?task
"The goal of this task is to abstract from the given observables, using the given
abstractors. Note that the output also includes the original (non-abstracted)
observables"
((has-input-role :value has-observables
:value has-abstractors)
(has-output-role :value has-abstract-observables)
(has-observables :type observables)
(has-abstract-observables :type observables)
(has-abstractors :type list)
(has-goal-expression
:value (kappa (?task ?observables)
(and (generalized-abstract-from ?observables
(role-value ?task has-observables)
(role-value ?task has-abstractors))
(forall ?ab (=> (member ?ab (role-value
?task has-abstractors))
(not (abstractor-is-applicable?
?ab
?observables))))))))
:constraint (subset (role-value ?task 'has-observables)
(role-value ?task 'has-abstract-observables)))
(def-relation generalized-abstract-from (?observables-out ?observables-in ?abs)
:constraint (and (observables ?observables-out)
(observables ?observables-in)
(abstractors ?abs))
:iff-def (forall (?ob)
(=> (member ?ob ?observables-out)
(or (directly-abstracted-from ?ob ?observables-in ?abs)
(exists (?observables-temp)
(and (directly-abstracted-from
?ob ?observables-temp ?abs)
(forall (?ob-temp)
(=> (member ?ob-temp ?observables-temp)
(or (member ?ob-temp ?observables-in)
(observable-abstracted-from
?ob-temp ?observables-in
?abs))))))))))
(def-relation generalised-abstract-link (?ob1 ?ob2 ?abs)
"?ob1 is in a chain of abstraction which stems from ?ob2"
:iff-def (and (member ?ob1 ?observables1)
(member ?ob2 ?observables2)
(generalized-abstract-from ?observables1 ?observables2 ?abs)))
(def-relation observable-abstracted-from (?ob ?observables ?abs)
:iff-def (or (directly-abstracted-from ?ob ?observables ?abs)
(exists (?observables2)
(and (generalized-abstract-from ?observables2 ?observables ?abs)
(member ?ob ?observables2)))))
(def-relation directly-abstracted-from (?ob ?observables ?abs)
:constraint (and (observables ?observables)
(observable ?ob)
(abstractors ?abs))
:iff-def (exists (?ab)
(and (member ?ab ?abs)
(abstractor-is-applicable? ?ab ?observables)
(member ?ob (apply-abstract-operator ?ab ?observables)))))
(def-class abstraction-psm (primitive-method) ?psm
"This method applies abstractors to the given data space (observables) repeatedly
until no abstractor can be used and returns a data space in which
all possible abstractions are done"
((has-control-role :value has-current-observables
:value has-current-abstractors)
(has-current-observables :type observables)
(has-current-abstractors :type list)
(has-body
:value '(lambda (?psm)
(do
(set-role-value ?psm
'has-current-observables
(role-value ?psm 'has-observables))
(set-role-value ?psm 'has-current-abstractors
(role-value ?psm 'has-abstractors))
(repeat
(in-environment
((?ab . (achieve-generic-subtask
?psm select-abstractor
'has-observables
(role-value
?psm 'has-current-observables)
'has-abstractors
(role-value
?psm 'has-current-abstractors))))
(if (abstractor ?ab)
(do
(set-role-value
?psm 'has-current-abstractors
(remove ?ab
(role-value
?psm
'has-current-abstractors)))
(in-environment
((?obs . (the ?obs2 (has-current-observables ?psm ?obs2)))
(?ob . (achieve-generic-subtask ?psm one-step-abstraction
'has-abstractor ?ab
'has-observables ?obs)))
(if (and (observable ?ob)
(not (member ?ob ?obs)))
(set-role-value ?psm 'has-current-observables
(cons ?ob ?obs)))))
(return (role-value ?psm 'has-current-observables)))))))))
:own-slots ((tackles-task-type 'abstraction)))
(def-class select-abstractor (goal-specification-task) ?task
((has-input-role :value has-observables
:value has-abstractors)
(has-output-role :value has-abstractor)
(has-observables :type observables)
(has-abstractors :type list)
(has-abstractor :type abstractor)
(has-goal-expression
:value
(kappa (?task ?abstractor)
(and (member ?abstractor (role-value ?task 'has-abstractors))
(abstractor-is-applicable?
?abstractor (role-value ?task 'has-observables))
(= ?ob (apply-abstract-operator
?abstractor (role-value ?task 'has-observables)))
(not (member ?ob (role-value ?task 'has-observables))))))))
(def-class select-abstractor-psm (primitive-method) ?psm
((has-body :value '(lambda (?psm)
(the ?x
(holds (the ?exp (has-goal-expression
(the ?task (tackles-task ?psm ?task))
?exp))
?psm
?x)))))
:own-slots ((tackles-task-type 'select-abstractor)))
(def-class one-step-abstraction (goal-specification-task) ?task
((has-input-role :value has-observables
:value has-abstractor)
(has-output-role :value has-abstract-observable)
(has-observables :type observables)
(has-abstractor :type abstractor)
(has-abstract-observable :type observable)
(has-goal-expression
:value (kappa (?task ?observable)
(directly-abstracted-from ?observable
(role-value ?task 'has-observables)
(list (role-value ?task
'has-abstractor)))))))
(def-class one-step-abstraction-psm (primitive-method) ?psm
((has-body :value '(lambda (?psm)
(apply-abstract-operator
(role-value ?psm 'has-abstractor)
(role-value ?psm 'has-observables)))))
:own-slots ((tackles-task-type 'one-step-abstraction)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;; end of abstraction ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;RANK-SOLUTIONS
(def-class rank-solutions (goal-specification-task) ?task
((has-input-role :value has-candidate-solutions
:value has-observables
:value has-match-criterion)
(has-output-role :value has-solutions)
(has-candidate-solutions :type solution-space)
(has-observables :type observables)
(has-match-criterion :type match-criterion
:default-value default-match-criterion)
(has-solutions :type solution-space)
(has-goal-expression
:documentation
"The goal is to rank the classes according to the match criterion.
The output should be a list of solutions, in which no solution follows
one which is worse"
:value (kappa (?task ?solutions)
(forall (?sol1 ?sol2)
(=> (and (member ?sol1 ?solutions)
(member ?sol2 ?solutions)
(precedes ?sol1 ?sol2 ?solutions))
(not (better-match-score ?sol2 ?sol1))))))))
(def-class rank-solutions-psm (primitive-method) ?psm
((has-input-role :value has-candidate-solutions
:value has-observables
:value has-match-criterion)
(has-output-role :value has-ranked-solutions)
(has-control-role :value has-sol-score-pairs)
(has-sol-score-pairs :type list
:default-value nil)
(has-candidate-solutions
:type solution-space)
(has-observables :type observables)
(has-match-criterion :type match-criterion
:default-value default-match-criterion)
(has-ranked-solutions :type list)
(has-precondition
:documentation "A list of candidates is required as input"
:value (kappa (?psm)
(list (role-value
?psm 'has-candidate-solutions))))
(has-postcondition
:value (kappa (?psm ?solutions)
(forall (?sol1 ?sol2)
(=> (and (member ?sol1 ?solutions)
(member ?sol2 ?solutions)
(precedes ?sol1 ?sol2 ?solutions))
(not (better-match-score ?sol2 ?sol1))))))
(has-body
:value '(lambda (?psm)
(do
(loop for ?candidate in (role-value
?psm 'has-candidate-solutions)
do
(set-role-value
?psm has-sol-score-pairs
(cons (list-of ?candidate
(achieve-generic-subtask
?psm basic-heuristic-match
'has-observables
(role-value
?psm
has-observables )
'has-candidate-solution ?candidate
'has-match-criterion
(role-value
?psm
has-match-criterion)))
(role-value ?psm has-sol-score-pairs))))
(map '(lambda (?pair)
(first ?pair))
(sort (role-value ?psm has-sol-score-pairs)
'(kappa (?pair1 ?pair2)
(better-match-score (second ?pair1)(second ?pair2)
(role-value
?psm
'has-match-criterion)))))))))
:own-slots ((has-generic-subtasks '(basic-heuristic-match))
(tackles-task-type 'rank-solutions)))
(def-class basic-heuristic-match (goal-specification-task) ?task
((has-input-role :value has-candidate-solution
:value has-observables
:value has-match-criterion)
(has-output-role :value has-score)
(has-candidate-solution :type solution)
(has-observables :type observables)
(has-match-criterion :type match-criterion
:default-value default-match-criterion)
(has-score :type match-score)
(has-goal-expression
;;the goal is to find the best matching classes
:value (kappa (?task ?score)
(match-score ?score)))))
(def-class basic-heuristic-match-psm (primitive-method) ?psm
((has-precondition
:documentation "A list of candidates is required as input"
:value (kappa (?psm)
(and (solution (role-value
?psm 'has-candidate-solution))
(exists ?x
(member ?X (role-value
?psm 'has-observables))))))
(has-postcondition
:value (kappa (?psm ?score)
(match-score ?score)))
(has-body
:value '(lambda (?psm)
(apply-match-criterion
(role-value
?psm 'has-match-criterion)
(role-value
?psm 'has-observables)
(role-value
?psm 'has-candidate-solution)))))
:own-slots (
(tackles-task-type 'basic-heuristic-match)))
;;;;;;;;;;REFINEMENT TASK;;;;;;;;;;;;;;;;;;;;;;;;;
;;;CLASS REFINER
(def-class refiner (function)
((domain :value solution)
(range :value solution-space)
(applicability-condition :type refiner-applicability-condition-class)))
(def-class refiner-applicability-condition-class (unary-relation) ?x
"The applicability condition for a refiner must be a relation whose domain is
a subset of class solution"
:iff-def (subset ?x solution))
(def-relation refiner-is-applicable? (?ref ?solution)
:constraint (and (solution ?solution)
(refiner ?ref))
:iff-def (holds (the ?appl (applicability-condition ?ref ?appl)) ?solution))
(def-relation some-refiner-is-applicable? (?refs ?solution)
:constraint (and (solution ?solution)
(list ?refs))
:iff-def (exists ?ref (and (member ?ref ?refs)
(refiner-is-applicable? ?ref ?solution))))
(def-function apply-refiner-operator (?ref ?solution) -> ?sols
:constraint (and (solution-space ?sols)
(refiner ?ref)
(solution ?solution)
)
:body (if (refiner-is-applicable? ?ref ?solution)
(call (the ?body (has-body ?ref ?body))
?solution)))
- REFINEMENT - OF
(def-relation generalised-refinement-of (?sol-out ?sol-in ?refs)
:iff-def (or (exists ?ref (and (member ?ref ?refs)
(member ?sol-out (apply-refiner-operator
?ref ?sol-in))))
(exists ?sol-temp
(and (generalised-refinement-of ?sol-temp ?sol-in ?refs)
(generalised-refinement-of ?sol-out ?sol-temp ?refs)))))
;;;REFINEMENT-THROUGH-SUBCLASS-OF-LINKS
(def-instance refinement-through-subclass-of-links refiner
"If the solution space is specified by means of classes arranged in a subclass-of
hierarchy, then this is a good refiner to use"
((has-body '(lambda (?sol)
(setofall ?sub (direct-subclass-of ?sub ?sol))))
(applicability-condition (kappa (?sol)
(and (class ?sol)
(exists ?sub (direct-subclass-of ?sub ?sol)))))))
;;;TASK REFINEMENT
(def-class refinement (goal-specification-task) ?task
"The goal of this task is to take a solution at a given
level of refinement, say n, and to try refine it, using the
given set of refiners. All refiners are tried for each solution at level n, but
the refinement hiearchy is navigated down 1 level only. That is, the output will
include at most solutions at level n+1.
If a solution at level n cannot be refined, the method returns nil"
((has-input-role :value has-candidate-solution
:value has-observables
:value has-refiners)
(has-output-role :value has-refined-solutions)
(has-candidate-solution :type solution)
(has-observables :type observables)
(has-refined-solutions :type solution-space)
(has-refiners :type list)
(has-goal-expression
:value
(kappa (?task ?solutions)
(forall ?sol
(=> (member ?sol ?solutions)
(exists ?ref
(and (member ?ref (role-value
?task has-refiners))
(member ?sol
(apply-refiner-operator
?ref (role-value
?task 'has-candidate-solution)))))))))))
VANILLA - REFINEMENT - PSM
(def-class vanilla-refinement-psm (decomposition-method) ?psm
"This method applies all refiners applicable to the given input solution"
((has-body
:value
'(lambda (?psm)
(in-environment
((?sol . (role-value ?psm 'has-candidate-solution))
(?refs . (achieve-generic-subtask
?psm collect-refiners
has-solution ?sol
has-refiners (role-value ?psm has-refiners))))
(achieve-generic-subtask ?psm apply-refiners
has-solution ?sol
has-refiners ?refs)))))
:own-slots ((has-generic-subtasks '(collect-refiners
apply-refiners ))
(tackles-task-type 'refinement)))
TASK COLLECT - REFINERS
(def-class collect-refiners (goal-specification-task) ?task
((has-input-role :value has-solution
:value has-refiners)
(has-output-role :value has-applicable-refiners)
(has-solution :type solution)
(has-refiners :type list)
(has-applicable-refiners :type list)
(has-goal-expression
:value (kappa (?task ?refiners)
(and (subset ?refiners (role-value ?task has-refiners))
(not (exists (?ref)
(and (member ?ref ?refiners)
(not (refiner-is-applicable?
?ref (role-value
?task has-solution)))))))))))
STANDARD - COLLECT - REFINERS - PSM
(def-class standard-collect-refiners-psm (primitive-method) ?psm
((has-body :value '(lambda (?psm)
(setofall ?x (and (member ?x (role-value ?psm has-refiners))
(refiner-is-applicable?
?x (role-value ?psm has-solution)))))))
:own-slots ((tackles-task-type 'collect-refiners)))
;;;TASK APPLY-REFINERS
(def-class apply-refiners (goal-specification-task) ?task
"The goal of this task is to generate all possible refinement of a solution, given
a set of refiners. Only one-step refinement is carried out here. That is, if the
input solution is defined at level n, the output can only be at level n+1."
((has-input-role :value has-solution
:value has-refiners)
(has-output-role :value has-refined-solutions)
(has-solution :type solution)
(has-refiners :type list)
(has-refined-solutions :type solution-space)
(has-goal-expression
:value (kappa (?task ?solutions)
(forall ?sol
(=> (member ?sol ?solutions)
(exists ?ref
(and (member
?ref (role-value ?task 'has-refiners)
(member ?sol
(apply-refiner-operator
?ref
(role-value
?task
'has-solution))))))))))))
;;;METHOD APPLY-REFINERS-PSM
(def-class apply-refiners-psm (problem-solving-method) ?psm
((has-control-role :value has-current-solutions)
(has-current-solutions :type solution-space)
(has-precondition :value (kappa (?psm)
(every (role-value ?psm 'has-refiners)
(kappa (?x)
(refiner-is-applicable?
?x
(role-value
?psm has-solution))))))
(has-body :value
'(lambda (?psm)
(do
(set-role-value ?psm 'has-current-solutions nil)
(loop for ?ref in (role-value ?psm has-refiners)
do
(in-environment
((?sols . (apply-refiner-operator
?ref
(role-value ?psm 'has-solution))))
(set-role-value ?psm 'has-current-solutions
(union (role-value ?psm
'has-current-solutions)
?sols))))
(role-value ?psm
'has-current-solutions)))))
:own-slots ((tackles-task-type 'apply-refiners)
))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defun heuristic-classify (&key solution-space observables abstractors refiners
domain-name
(task-type 'OPTIMAL-CLASSIFICATION-TASK)
(match-criterion 'default-match-criterion)
(method-type 'heuristic-classification-psm)
(solution-admissibility-criterion
'default-solution-admissibility-criterion)
(solution-exclusion-criterion
'default-candidate-exclusion-criterion))
(let* ((task (define-domain-instance
(gentemp "CLASSIFICATION-TASK")
task-type
""
`((has-candidate-solutions ,solution-space)
(has-observables ,observables)
(has-match-criterion ,match-criterion)
(has-solution-admissibility-criterion
,solution-admissibility-criterion ))))
(method (define-domain-instance
(gentemp "METHOD")
method-type
""
`((has-abstractors ,abstractors)
(has-solution-exclusion-criterion ,solution-exclusion-criterion)
(has-refiners ,refiners))))
(application (define-domain-instance
(gentemp "CLASSIFICATION-APPLICATION")
'application
""
`((tackles-domain ,domain-name)
(uses-method ,(name method))
(tackles-task ,(name task))))))
(ocml-eval-gen `(solve-application ,(name application)))))
| null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/ontologies/methods/heuristic-classification/heuristic-classifierv2.lisp | lisp | Syntax : Common - lisp ; Base : 10 ; Package : OCML ; -*-
HEURISTIC-CLASSIFICATION :METHOD ;;;;
THE-VIRTUAL-SOLUTION-SPACE
CLASS CANDIDATE-EXCLUSION-CRITERION
INSTANCE DEFAULT-CANDIDATE-EXCLUSION-CRITERION
RELATION DEFAULT-CANDIDATE-EXCLUSION-RELATION
RELATION RULED-OUT-SOLUTION
AXIOM EXCLUSION-IS-MONOTONIC
HEURISTIC-ADMISSIBLE-SOL-CLASSIFIER
HEURISTIC-OPTIMAL-SOL-CLASSIFIER
remove ruled out candidates
there are some new useful refinements
Beginning of abstraction ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
end of abstraction ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
RANK-SOLUTIONS
the goal is to find the best matching classes
REFINEMENT TASK;;;;;;;;;;;;;;;;;;;;;;;;;
CLASS REFINER
REFINEMENT-THROUGH-SUBCLASS-OF-LINKS
TASK REFINEMENT
TASK APPLY-REFINERS
METHOD APPLY-REFINERS-PSM
|
(in-package "OCML")
(in-ontology heuristic-classification)
(def-function the-virtual-solution-space (?init-space ?refs) -> ?solution-space
"The space generated by refinement application from an initial solution space"
:constraint (and (every ?refs refiner)
(solution-space ?init-space))
:def (= ?solution-space
(setofall ?sol2 (or (member ?sol2 ?init-space)
(exists ?sol3
(and (member ?sol3
(the-virtual-solution-space
?init-space ?refs))
(generalised-refinement-of ?sol2
?sol3
?refs)))))))
(def-class candidate-exclusion-criterion ()
"this provides us with a way to specify when a solution is not
admissible. we also use this in conjunction with a solution refinement
hierarchy, to prune all refinements of an unadmissible solution"
((applies-to-match-score-type :type match-score-type)
(has-candidate-exclusion-relation :type unary-relation)
))
(def-instance default-candidate-exclusion-criterion candidate-exclusion-criterion
((applies-to-match-score-type default-match-score)
(has-candidate-exclusion-relation default-candidate-exclusion-relation)))
(def-relation default-candidate-exclusion-relation (?score)
"a solution is excluded if it has one or more inconsistent features"
:constraint (default-match-score ?score)
:iff-def (> (length (first ?score)) 0))
(def-relation ruled-out-solution (?sol ?score ?criterion)
:iff-def (holds (the ?rel (has-candidate-exclusion-relation ?criterion ?rel))
?score))
(def-axiom exclusion-is-monotonic
"This axiom states that the exclusion criterion is monotonic. That is,
if a solution, ?sol, is ruled out, then any solution which has a worse score than
?sol will also be ruled out"
(forall (?sol1 ?sol2 ?obs ?criterion)
(=> (and (ruled-out-solution
?sol1 (apply-match-criterion ?criterion ?obs ?sol1) ?criterion)
(not (better-match-than ?sol2 ?sol1 ?obs ?criterion)))
(ruled-out-solution
?sol2 (apply-match-criterion ?criterion ?obs ?sol2) ?criterion))))
(def-axiom CONGRUENT-ADMISSIBILITY-AND-EXCLUSION-CRITERIA
"This axiom states that the admissibility and exclusion criteria
have to be congruent: no solution should ever satisfy both"
(forall (?sol ?task)
(=> (member ?sol
(the-virtual-solution-space
(role-value ?task has-candidate-solutions)
(role-value ?task has-refiners)))
(not (and (admissible-solution
?sol
(apply-match-criterion
(role-value ?task 'has-match-criterion)
(role-value ?task 'has-observables)
?sol)
(role-value
?task
'has-solution-admissibility-criterion))
(ruled-out-solution ?sol
(apply-match-criterion
(role-value ?task 'has-match-criterion)
(role-value ?task 'has-observables)
?sol)
(role-value
?psm 'has-solution-exclusion-criterion)))))))
(def-class heuristic-admissible-sol-classifier (problem-solving-method) ?psm
"This is a basic method for finding an admissible solution to a
classification problem. It uses a backtracking hill-climbing solution to
home in on the most promising solutions. It is a heuristic, hierarchical
classifier, as it provides for both abstraction and refinement mechanisms.
If no refinement mechanisms are provided, then the psm just reduces to a
'flat' classifier."
((has-input-role :value has-abstractors
:value has-refiners
:value has-observables
:value has-candidate-solutions
:value has-solution-exclusion-criterion
)
(has-control-role :value 'has-current-solutions)
(has-output-role :value has-solution)
(has-current-solutions :type solution-space)
(has-abstractors :type abstractors)
(has-refiners :type refiners)
(has-observables :type observables)
(has-solution-exclusion-criterion :type candidate-exclusion-criterion
:default-value default-candidate-exclusion-criterion)
(has-candidate-solutions :type solution-space)
(has-solution :type solution)
(has-assumption
:value (kappa (?psm)
(not (exists (?ob1 ?ob2)
(and (or (member ?ob1
(role-value ?psm has-observables))
(exists ?ob
(and (member
?ob
(role-value ?psm has-observables))
(generalised-abstract-link
?ob1 ?ob
(role-value ?psm 'has-abstractors)))))
(generalised-abstract-link
?ob2 ?ob1
(role-value ?psm 'has-abstractors))
(generalised-abstract-link
?ob1 ?ob2
(role-value ?psm 'has-abstractors))))))
:value (kappa (?psm)
(not (exists (?sol1 ?sol2)
(and (or (member ?sol1
(role-value ?psm 'has-candidate-solutions))
(exists ?sol
(and (member ?sol
(role-value
?psm
'has-candidate-solutions))
(generalised-refinement-of
?sol1 ?sol
(role-value ?psm has-refiners)))))
(generalised-refinement-of
?sol2 ?sol1
(role-value ?psm has-refiners))
(generalised-refinement-of
?sol1 ?sol2
(role-value ?psm has-refiners))))))
:value (kappa (?psm)
(exists ?sol
(and (admissible-solution
?sol
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol)
(role-value
?psm
'has-solution-admissibility-criterion))
(member ?sol
(the-virtual-solution-space
(role-value ?psm has-candidate-solutions)
(role-value ?psm has-refiners))))))
:value (kappa (?psm)
(forall (?sol ?score)
(=>
(and
(sol-has-match-score ?sol
(role-value ?psm 'has-observables)
?score
(role-value
?psm 'has-match-criterion))
(ruled-out-solution ?sol ?score
(role-value
?psm 'has-solution-exclusion-criterion)))
(not (exists
?sol2
(and (generalised-refinement-of
?sol2 ?sol (role-value
?psm has-refiners))
(admissible-solution
?sol2
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol2)
(role-value
?psm
'has-solution-admissibility-criterion))))))))
:documentation "There are 4 assumptions associated with this method. The first
two state that the abstraction and refinement hierarchies have to
be free of cycles. The third states that the psm assumes the existence
of a solution in the virtual solution space - i.e., if such a solution
exists, the psm will find it. The final assumption states that the
method assumes that the exclusion criterion is correct. That is, if a
solution is excluded, all its refinements can be excluded too.")
(has-postcondition
:value (kappa (?psm ?sol)
(admissible-solution
?sol
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol)
(role-value
?psm
'has-solution-admissibility-criterion)))
:documentation "The output solution satisfies the admissibility criterion")
(has-body
:value
'(lambda (?psm)
(in-environment
((?obs . (achieve-generic-subtask
?psm abstraction
'has-observables (role-value ?psm has-observables)
'has-abstractors (role-value ?psm has-abstractors))))
(do
(set-role-value ?psm 'has-current-solutions
(role-value ?psm has-candidate-solutions))
(if (exists ?sol
(and (member ?sol (role-value
?psm 'has-current-solutions))
(admissible-solution
?sol
(apply-match-criterion
(role-value
?psm 'has-match-criterion)
(role-value
?psm 'has-observables)
?sol)
(role-value
?psm
'has-solution-admissibility-criterion))))
?sol
(in-environment
((?current-sols . (role-value
?psm 'has-current-solutions))
(?ranked-candidates . (achieve-generic-subtask
?psm rank-solutions
'has-observables ?obs
'has-candidate-solutions
?current-sols
'has-match-criterion
(role-value
?psm has-match-criterion))))
(loop for ?candidate in ?ranked-candidates
do
(if (not (ruled-out-solution ?candidate
(apply-match-criterion
(role-value
?psm 'has-match-criterion)
(role-value
?psm 'has-observables)
?candidate)
(role-value
?psm 'has-solution-exclusion-criterion)))
(in-environment
((?refined-sols . (achieve-generic-subtask
?psm refinement
'has-candidate-solution ?candidate
'has-observables ?obs
'has-refiners (role-value
?psm has-refiners))))
(if (not (null ?refined-sols))
(in-environment
((?result . (achieve-generic-subtask
?psm heuristic-admissible-sol-classifier
'has-observables ?obs
'has-candidate-solutions ?refined-sols)))
(if (not (= ?result :nothing))
(return ?result)))))))))))))
(has-output-mapping
:value '(lambda (?psm ?result)
(list-of ?result))))
:own-slots ((tackles-task-type single-solution-classification-task)
(has-generic-subtasks '(abstraction
refinement rank-solutions))))
(def-class heuristic-optimal-sol-classifier (problem-solving-method) ?psm
"This method exhaustively searches a solution space looking for an optimal
solution. It uses the exclusion criterion to prune the search space. It
returns one or more optimal solutions"
((has-input-role :value has-abstractors
:value has-refiners
:value has-observables
:value has-candidate-solutions
:value has-solution-exclusion-criterion
)
(has-control-role :value 'has-current-solutions
:value 'has-ranked-candidates)
(has-output-role :value has-solutions)
(has-current-solutions :type solution-space)
(has-ranked-candidates :type solution-space)
(has-abstractors :type abstractors)
(has-refiners :type refiners)
(has-observables :type observables)
(has-solution-exclusion-criterion :type candidate-exclusion-criterion
:default-value default-candidate-exclusion-criterion)
(has-candidate-solutions :type solution-space)
(has-solutions :type solution-space)
(has-assumption
:value (kappa (?psm)
(not (exists (?ob1 ?ob2)
(and (or (member ?ob1
(role-value ?psm has-observables))
(exists ?ob
(and (member
?ob
(role-value ?psm has-observables))
(generalised-abstract-link
?ob1 ?ob
(role-value ?psm 'has-abstractors)))))
(generalised-abstract-link
?ob2 ?ob1
(role-value ?psm 'has-abstractors))
(generalised-abstract-link
?ob1 ?ob2
(role-value ?psm 'has-abstractors))))))
:value (kappa (?psm)
(not (exists (?sol1 ?sol2)
(and (or (member ?sol1
(role-value ?psm 'has-candidate-solutions))
(exists ?sol
(and (member ?sol
(role-value
?psm
'has-candidate-solutions))
(generalised-refinement-of
?sol1 ?sol
(role-value ?psm has-refiners)))))
(generalised-refinement-of
?sol2 ?sol1
(role-value ?psm has-refiners))
(generalised-refinement-of
?sol1 ?sol2
(role-value ?psm has-refiners))))))
:value (kappa (?psm)
(exists ?sol
(and (admissible-solution
?sol
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol)
(role-value
?psm
'has-solution-admissibility-criterion))
(member ?sol
(the-virtual-solution-space
(role-value ?psm has-candidate-solutions)
(role-value ?psm has-refiners))))))
:value (kappa (?psm)
(forall (?sol ?score)
(=>
(and
(sol-has-match-score ?sol
(role-value ?psm 'has-observables)
?score
(role-value
?psm 'has-match-criterion))
(ruled-out-solution ?sol ?score
(role-value
?psm 'has-solution-exclusion-criterion)))
(not (exists
?sol2
(and (generalised-refinement-of
?sol2 ?sol (role-value
?psm has-refiners))
(admissible-solution
?sol2
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol2)
(role-value
?psm
'has-solution-admissibility-criterion))))))))
:documentation "There are 4 assumptions associated with this method. The first
two state that the abstraction and refinement hierarchies have to
be free of cycles. The third states that the psm assumes the existence
of a solution in the virtual solution space - i.e., if such a solution
exists, the psm will find it. The final assumption states that the
method assumes that the exclusion criterion is correct. That is, if a
solution is excluded, all its refinements can be excluded too.")
(has-postcondition
:value (kappa (?psm ?sols)
(forall ?sol
(=> (member ?sol ?sols)
(and
(admissible-solution
?sol
(apply-match-criterion
(role-value ?psm 'has-match-criterion)
(role-value ?psm 'has-observables)
?sol)
(role-value
?psm
'has-solution-admissibility-criterion))
(best-match
(role-value ?task 'has-observables)
?sol
(the-virtual-solution-space
(role-value ?psm has-observables)
(role-value ?psm 'has-candidate-solutions))
(role-value ?task 'has-match-criterion))))))
:documentation "The output solutions are both admissible and optimal
with respect to the solution space generated through
refinement application")
(has-body
:value
'(lambda (?psm)
(in-environment
((?obs . (achieve-generic-subtask
?psm abstraction
'has-observables (role-value ?psm has-observables)
'has-abstractors (role-value ?psm has-abstractors))))
(do
(set-role-value ?psm 'has-ranked-candidates
(achieve-generic-subtask
?psm rank-solutions
'has-observables ?obs
'has-candidate-solutions
(role-value ?psm 'has-candidate-solutions)
'has-match-criterion (role-value
?psm has-match-criterion)))
Initialize current solutions
(set-role-value ?psm 'has-current-solutions nil)
(if (admissible-solution
(first (role-value ?psm 'has-ranked-candidates))
(apply-match-criterion (role-value ?psm has-match-criterion)
?obs
(first
(role-value ?psm 'has-ranked-candidates)))
(role-value ?psm
'has-solution-admissibility-criterion))
(do
(set-role-value ?psm 'has-current-solutions
(List-of (first
(role-value ?psm 'has-ranked-candidates))))
(loop for ?candidate in (rest (role-value ?psm 'has-ranked-candidates))
do
(if (better-match-than
(first (role-value ?psm 'has-ranked-candidates))
?candidate ?obs
(role-value ?psm has-match-criterion))
(return :nothing)
(set-role-value ?psm 'has-current-solutions
(cons ?candidate
(role-value ?psm
'has-current-solutions)))))))
(loop for ?candidate in (reverse (role-value ?psm 'has-ranked-candidates))
do
(if (ruled-out-solution
?candidate (apply-match-criterion
(role-value ?psm has-match-criterion)
?obs
?candidate)
(role-value ?psm
'has-solution-exclusion-criterion))
(set-role-value ?psm 'has-ranked-candidates
(remove ?candidate
(role-value ?psm 'has-ranked-candidates)))
(return :nothing)))
(if (null (role-value ?psm 'has-ranked-candidates))
(role-value ?psm
'has-current-solutions)
(complete-optimal-search ?psm
?obs
(role-value ?psm has-refiners)
(first (role-value ?psm 'has-ranked-candidates))
(rest (role-value ?psm 'has-ranked-candidates))
(role-value ?psm
'has-current-solutions)
(role-value ?psm has-match-criterion)
(role-value ?psm
'has-solution-admissibility-criterion)
(role-value ?psm
'has-solution-exclusion-criterion))))))))
:own-slots ((tackles-task-type optimal-classification-task)
(has-generic-subtasks '(abstraction
refinement rank-solutions))))
(def-procedure complete-optimal-search (?psm ?obs ?refs
?candidate
?other-candidates
?current-solutions
?match-criterion
?admissibility-criterion
?exclusion-criterion)
"Neither ?candidate no ?other-candidates have been ruled out
by the exclusion criterion"
:constraint (and (solution ?candidate)
(list ?other-candidates)
(list ?refs)
(list ?current-solutions)
(observables ?obs)
(problem-solving-method ?psm)
(match-criterion ?match-criterion)
(admissibility-criterion ?admissibility-criterion)
(solution-admissibility-criterion ?admissibility-criterion)
(solution-exclusion-criterion ?exclusion-criterion)
(not (ruled-out-solution
?candidate (apply-match-criterion
?match-criterion
?obs
?candidate)
?exclusion-criterion))
(forall ?c
(=> (member ?c ?other-candidates)
(not (ruled-out-solution
?c (apply-match-criterion
?match-criterion
?obs
?c)
?exclusion-criterion)))))
:body (in-environment
((?good-refined-sols . (filter (achieve-generic-subtask
?psm refinement
'has-candidate-solution ?top-candidate
'has-observables ?obs
'has-refiners ?refs)
'(kappa (?sol)
(not (ruled-out-solution
?sol (apply-match-criterion
?match-criterion
?obs
?sol)
?exclusion-criterion))))))
(if (null ?good-refined-sols)
(if (null ?other-candidates)
?current-solutions
(complete-optimal-search ?psm ?obs ?refs
(first ?other-candidates)
(rest ?other-candidates)
?current-solutions
?match-criterion
?admissibility-criterion
?exclusion-criterion))
(in-environment
((?ranked-candidates . (achieve-generic-subtask
?psm rank-solutions
'has-observables ?obs
'has-candidate-solutions (append
?other-candidates
?good-refined-sols)
'has-match-criterion ?match-criterion))
(?new-admissible-solutions . (filter ?good-refined-sols
'(kappa (?sol)
(admissible-solution
?sol
(apply-match-criterion
?match-criterion ?obs ?sol)
?match-criterion))))
(?new-current-solutions . (if (null ?new-admissible-solutions)
?current-solutions
(in-environment
((?ranked-sols . (achieve-generic-subtask
?psm rank-solutions
'has-observables ?obs
'has-candidate-solutions
(append
?current-solutions
?new-admissible-solutions)
'has-match-criterion
?match-criterion)))
(cons (first ?ranked-sols)
(filter ?ranked-sols
'(kappa (?sol)
(not (better-match-than
(first ?ranked-sols)
?sol
?match-criterion)))))))))
(complete-optimal-search ?psm ?obs ?refs
(first ?ranked-candidates)
(rest ?ranked-candidates)
?new-current-solutions
?match-criterion
?admissibility-criterion
?exclusion-criterion)))))
(def-class abstractor (function)
((domain :value observables)
(range :value observable)
(applicability-condition :type abstractor-applicability-condition-class)))
(def-class abstractor-applicability-condition-class (unary-relation) ?x
"The applicability condition for an abstractor must be a relation whose domain is
a subset of observables"
:iff-def (subset (the ?d (domain ?x ?d))
observables))
(def-function apply-abstract-operator (?ab ?observables-in) -> ?ob
:constraint (and (abstractor ?ab)
(observable ?ob)
(observables ?observables-in)
)
:body (if (abstractor-is-applicable? ?ab ?observables-in)
(call (the ?body (has-body ?ab ?body))
?observables-in)))
(def-relation abstractor-is-applicable? (?ab ?observables)
:constraint (and (abstractor ?ab)
(observables ?observables))
:iff-def (holds (the ?appl (applicability-condition ?ab ?appl)) ?observables))
(def-class abstraction (goal-specification-task) ?task
"The goal of this task is to abstract from the given observables, using the given
abstractors. Note that the output also includes the original (non-abstracted)
observables"
((has-input-role :value has-observables
:value has-abstractors)
(has-output-role :value has-abstract-observables)
(has-observables :type observables)
(has-abstract-observables :type observables)
(has-abstractors :type list)
(has-goal-expression
:value (kappa (?task ?observables)
(and (generalized-abstract-from ?observables
(role-value ?task has-observables)
(role-value ?task has-abstractors))
(forall ?ab (=> (member ?ab (role-value
?task has-abstractors))
(not (abstractor-is-applicable?
?ab
?observables))))))))
:constraint (subset (role-value ?task 'has-observables)
(role-value ?task 'has-abstract-observables)))
(def-relation generalized-abstract-from (?observables-out ?observables-in ?abs)
:constraint (and (observables ?observables-out)
(observables ?observables-in)
(abstractors ?abs))
:iff-def (forall (?ob)
(=> (member ?ob ?observables-out)
(or (directly-abstracted-from ?ob ?observables-in ?abs)
(exists (?observables-temp)
(and (directly-abstracted-from
?ob ?observables-temp ?abs)
(forall (?ob-temp)
(=> (member ?ob-temp ?observables-temp)
(or (member ?ob-temp ?observables-in)
(observable-abstracted-from
?ob-temp ?observables-in
?abs))))))))))
(def-relation generalised-abstract-link (?ob1 ?ob2 ?abs)
"?ob1 is in a chain of abstraction which stems from ?ob2"
:iff-def (and (member ?ob1 ?observables1)
(member ?ob2 ?observables2)
(generalized-abstract-from ?observables1 ?observables2 ?abs)))
(def-relation observable-abstracted-from (?ob ?observables ?abs)
:iff-def (or (directly-abstracted-from ?ob ?observables ?abs)
(exists (?observables2)
(and (generalized-abstract-from ?observables2 ?observables ?abs)
(member ?ob ?observables2)))))
(def-relation directly-abstracted-from (?ob ?observables ?abs)
:constraint (and (observables ?observables)
(observable ?ob)
(abstractors ?abs))
:iff-def (exists (?ab)
(and (member ?ab ?abs)
(abstractor-is-applicable? ?ab ?observables)
(member ?ob (apply-abstract-operator ?ab ?observables)))))
(def-class abstraction-psm (primitive-method) ?psm
"This method applies abstractors to the given data space (observables) repeatedly
until no abstractor can be used and returns a data space in which
all possible abstractions are done"
((has-control-role :value has-current-observables
:value has-current-abstractors)
(has-current-observables :type observables)
(has-current-abstractors :type list)
(has-body
:value '(lambda (?psm)
(do
(set-role-value ?psm
'has-current-observables
(role-value ?psm 'has-observables))
(set-role-value ?psm 'has-current-abstractors
(role-value ?psm 'has-abstractors))
(repeat
(in-environment
((?ab . (achieve-generic-subtask
?psm select-abstractor
'has-observables
(role-value
?psm 'has-current-observables)
'has-abstractors
(role-value
?psm 'has-current-abstractors))))
(if (abstractor ?ab)
(do
(set-role-value
?psm 'has-current-abstractors
(remove ?ab
(role-value
?psm
'has-current-abstractors)))
(in-environment
((?obs . (the ?obs2 (has-current-observables ?psm ?obs2)))
(?ob . (achieve-generic-subtask ?psm one-step-abstraction
'has-abstractor ?ab
'has-observables ?obs)))
(if (and (observable ?ob)
(not (member ?ob ?obs)))
(set-role-value ?psm 'has-current-observables
(cons ?ob ?obs)))))
(return (role-value ?psm 'has-current-observables)))))))))
:own-slots ((tackles-task-type 'abstraction)))
(def-class select-abstractor (goal-specification-task) ?task
((has-input-role :value has-observables
:value has-abstractors)
(has-output-role :value has-abstractor)
(has-observables :type observables)
(has-abstractors :type list)
(has-abstractor :type abstractor)
(has-goal-expression
:value
(kappa (?task ?abstractor)
(and (member ?abstractor (role-value ?task 'has-abstractors))
(abstractor-is-applicable?
?abstractor (role-value ?task 'has-observables))
(= ?ob (apply-abstract-operator
?abstractor (role-value ?task 'has-observables)))
(not (member ?ob (role-value ?task 'has-observables))))))))
(def-class select-abstractor-psm (primitive-method) ?psm
((has-body :value '(lambda (?psm)
(the ?x
(holds (the ?exp (has-goal-expression
(the ?task (tackles-task ?psm ?task))
?exp))
?psm
?x)))))
:own-slots ((tackles-task-type 'select-abstractor)))
(def-class one-step-abstraction (goal-specification-task) ?task
((has-input-role :value has-observables
:value has-abstractor)
(has-output-role :value has-abstract-observable)
(has-observables :type observables)
(has-abstractor :type abstractor)
(has-abstract-observable :type observable)
(has-goal-expression
:value (kappa (?task ?observable)
(directly-abstracted-from ?observable
(role-value ?task 'has-observables)
(list (role-value ?task
'has-abstractor)))))))
(def-class one-step-abstraction-psm (primitive-method) ?psm
((has-body :value '(lambda (?psm)
(apply-abstract-operator
(role-value ?psm 'has-abstractor)
(role-value ?psm 'has-observables)))))
:own-slots ((tackles-task-type 'one-step-abstraction)))
(def-class rank-solutions (goal-specification-task) ?task
((has-input-role :value has-candidate-solutions
:value has-observables
:value has-match-criterion)
(has-output-role :value has-solutions)
(has-candidate-solutions :type solution-space)
(has-observables :type observables)
(has-match-criterion :type match-criterion
:default-value default-match-criterion)
(has-solutions :type solution-space)
(has-goal-expression
:documentation
"The goal is to rank the classes according to the match criterion.
The output should be a list of solutions, in which no solution follows
one which is worse"
:value (kappa (?task ?solutions)
(forall (?sol1 ?sol2)
(=> (and (member ?sol1 ?solutions)
(member ?sol2 ?solutions)
(precedes ?sol1 ?sol2 ?solutions))
(not (better-match-score ?sol2 ?sol1))))))))
(def-class rank-solutions-psm (primitive-method) ?psm
((has-input-role :value has-candidate-solutions
:value has-observables
:value has-match-criterion)
(has-output-role :value has-ranked-solutions)
(has-control-role :value has-sol-score-pairs)
(has-sol-score-pairs :type list
:default-value nil)
(has-candidate-solutions
:type solution-space)
(has-observables :type observables)
(has-match-criterion :type match-criterion
:default-value default-match-criterion)
(has-ranked-solutions :type list)
(has-precondition
:documentation "A list of candidates is required as input"
:value (kappa (?psm)
(list (role-value
?psm 'has-candidate-solutions))))
(has-postcondition
:value (kappa (?psm ?solutions)
(forall (?sol1 ?sol2)
(=> (and (member ?sol1 ?solutions)
(member ?sol2 ?solutions)
(precedes ?sol1 ?sol2 ?solutions))
(not (better-match-score ?sol2 ?sol1))))))
(has-body
:value '(lambda (?psm)
(do
(loop for ?candidate in (role-value
?psm 'has-candidate-solutions)
do
(set-role-value
?psm has-sol-score-pairs
(cons (list-of ?candidate
(achieve-generic-subtask
?psm basic-heuristic-match
'has-observables
(role-value
?psm
has-observables )
'has-candidate-solution ?candidate
'has-match-criterion
(role-value
?psm
has-match-criterion)))
(role-value ?psm has-sol-score-pairs))))
(map '(lambda (?pair)
(first ?pair))
(sort (role-value ?psm has-sol-score-pairs)
'(kappa (?pair1 ?pair2)
(better-match-score (second ?pair1)(second ?pair2)
(role-value
?psm
'has-match-criterion)))))))))
:own-slots ((has-generic-subtasks '(basic-heuristic-match))
(tackles-task-type 'rank-solutions)))
(def-class basic-heuristic-match (goal-specification-task) ?task
((has-input-role :value has-candidate-solution
:value has-observables
:value has-match-criterion)
(has-output-role :value has-score)
(has-candidate-solution :type solution)
(has-observables :type observables)
(has-match-criterion :type match-criterion
:default-value default-match-criterion)
(has-score :type match-score)
(has-goal-expression
:value (kappa (?task ?score)
(match-score ?score)))))
(def-class basic-heuristic-match-psm (primitive-method) ?psm
((has-precondition
:documentation "A list of candidates is required as input"
:value (kappa (?psm)
(and (solution (role-value
?psm 'has-candidate-solution))
(exists ?x
(member ?X (role-value
?psm 'has-observables))))))
(has-postcondition
:value (kappa (?psm ?score)
(match-score ?score)))
(has-body
:value '(lambda (?psm)
(apply-match-criterion
(role-value
?psm 'has-match-criterion)
(role-value
?psm 'has-observables)
(role-value
?psm 'has-candidate-solution)))))
:own-slots (
(tackles-task-type 'basic-heuristic-match)))
(def-class refiner (function)
((domain :value solution)
(range :value solution-space)
(applicability-condition :type refiner-applicability-condition-class)))
(def-class refiner-applicability-condition-class (unary-relation) ?x
"The applicability condition for a refiner must be a relation whose domain is
a subset of class solution"
:iff-def (subset ?x solution))
(def-relation refiner-is-applicable? (?ref ?solution)
:constraint (and (solution ?solution)
(refiner ?ref))
:iff-def (holds (the ?appl (applicability-condition ?ref ?appl)) ?solution))
(def-relation some-refiner-is-applicable? (?refs ?solution)
:constraint (and (solution ?solution)
(list ?refs))
:iff-def (exists ?ref (and (member ?ref ?refs)
(refiner-is-applicable? ?ref ?solution))))
(def-function apply-refiner-operator (?ref ?solution) -> ?sols
:constraint (and (solution-space ?sols)
(refiner ?ref)
(solution ?solution)
)
:body (if (refiner-is-applicable? ?ref ?solution)
(call (the ?body (has-body ?ref ?body))
?solution)))
- REFINEMENT - OF
(def-relation generalised-refinement-of (?sol-out ?sol-in ?refs)
:iff-def (or (exists ?ref (and (member ?ref ?refs)
(member ?sol-out (apply-refiner-operator
?ref ?sol-in))))
(exists ?sol-temp
(and (generalised-refinement-of ?sol-temp ?sol-in ?refs)
(generalised-refinement-of ?sol-out ?sol-temp ?refs)))))
(def-instance refinement-through-subclass-of-links refiner
"If the solution space is specified by means of classes arranged in a subclass-of
hierarchy, then this is a good refiner to use"
((has-body '(lambda (?sol)
(setofall ?sub (direct-subclass-of ?sub ?sol))))
(applicability-condition (kappa (?sol)
(and (class ?sol)
(exists ?sub (direct-subclass-of ?sub ?sol)))))))
(def-class refinement (goal-specification-task) ?task
"The goal of this task is to take a solution at a given
level of refinement, say n, and to try refine it, using the
given set of refiners. All refiners are tried for each solution at level n, but
the refinement hiearchy is navigated down 1 level only. That is, the output will
include at most solutions at level n+1.
If a solution at level n cannot be refined, the method returns nil"
((has-input-role :value has-candidate-solution
:value has-observables
:value has-refiners)
(has-output-role :value has-refined-solutions)
(has-candidate-solution :type solution)
(has-observables :type observables)
(has-refined-solutions :type solution-space)
(has-refiners :type list)
(has-goal-expression
:value
(kappa (?task ?solutions)
(forall ?sol
(=> (member ?sol ?solutions)
(exists ?ref
(and (member ?ref (role-value
?task has-refiners))
(member ?sol
(apply-refiner-operator
?ref (role-value
?task 'has-candidate-solution)))))))))))
VANILLA - REFINEMENT - PSM
(def-class vanilla-refinement-psm (decomposition-method) ?psm
"This method applies all refiners applicable to the given input solution"
((has-body
:value
'(lambda (?psm)
(in-environment
((?sol . (role-value ?psm 'has-candidate-solution))
(?refs . (achieve-generic-subtask
?psm collect-refiners
has-solution ?sol
has-refiners (role-value ?psm has-refiners))))
(achieve-generic-subtask ?psm apply-refiners
has-solution ?sol
has-refiners ?refs)))))
:own-slots ((has-generic-subtasks '(collect-refiners
apply-refiners ))
(tackles-task-type 'refinement)))
TASK COLLECT - REFINERS
(def-class collect-refiners (goal-specification-task) ?task
((has-input-role :value has-solution
:value has-refiners)
(has-output-role :value has-applicable-refiners)
(has-solution :type solution)
(has-refiners :type list)
(has-applicable-refiners :type list)
(has-goal-expression
:value (kappa (?task ?refiners)
(and (subset ?refiners (role-value ?task has-refiners))
(not (exists (?ref)
(and (member ?ref ?refiners)
(not (refiner-is-applicable?
?ref (role-value
?task has-solution)))))))))))
STANDARD - COLLECT - REFINERS - PSM
(def-class standard-collect-refiners-psm (primitive-method) ?psm
((has-body :value '(lambda (?psm)
(setofall ?x (and (member ?x (role-value ?psm has-refiners))
(refiner-is-applicable?
?x (role-value ?psm has-solution)))))))
:own-slots ((tackles-task-type 'collect-refiners)))
(def-class apply-refiners (goal-specification-task) ?task
"The goal of this task is to generate all possible refinement of a solution, given
a set of refiners. Only one-step refinement is carried out here. That is, if the
input solution is defined at level n, the output can only be at level n+1."
((has-input-role :value has-solution
:value has-refiners)
(has-output-role :value has-refined-solutions)
(has-solution :type solution)
(has-refiners :type list)
(has-refined-solutions :type solution-space)
(has-goal-expression
:value (kappa (?task ?solutions)
(forall ?sol
(=> (member ?sol ?solutions)
(exists ?ref
(and (member
?ref (role-value ?task 'has-refiners)
(member ?sol
(apply-refiner-operator
?ref
(role-value
?task
'has-solution))))))))))))
(def-class apply-refiners-psm (problem-solving-method) ?psm
((has-control-role :value has-current-solutions)
(has-current-solutions :type solution-space)
(has-precondition :value (kappa (?psm)
(every (role-value ?psm 'has-refiners)
(kappa (?x)
(refiner-is-applicable?
?x
(role-value
?psm has-solution))))))
(has-body :value
'(lambda (?psm)
(do
(set-role-value ?psm 'has-current-solutions nil)
(loop for ?ref in (role-value ?psm has-refiners)
do
(in-environment
((?sols . (apply-refiner-operator
?ref
(role-value ?psm 'has-solution))))
(set-role-value ?psm 'has-current-solutions
(union (role-value ?psm
'has-current-solutions)
?sols))))
(role-value ?psm
'has-current-solutions)))))
:own-slots ((tackles-task-type 'apply-refiners)
))
(defun heuristic-classify (&key solution-space observables abstractors refiners
domain-name
(task-type 'OPTIMAL-CLASSIFICATION-TASK)
(match-criterion 'default-match-criterion)
(method-type 'heuristic-classification-psm)
(solution-admissibility-criterion
'default-solution-admissibility-criterion)
(solution-exclusion-criterion
'default-candidate-exclusion-criterion))
(let* ((task (define-domain-instance
(gentemp "CLASSIFICATION-TASK")
task-type
""
`((has-candidate-solutions ,solution-space)
(has-observables ,observables)
(has-match-criterion ,match-criterion)
(has-solution-admissibility-criterion
,solution-admissibility-criterion ))))
(method (define-domain-instance
(gentemp "METHOD")
method-type
""
`((has-abstractors ,abstractors)
(has-solution-exclusion-criterion ,solution-exclusion-criterion)
(has-refiners ,refiners))))
(application (define-domain-instance
(gentemp "CLASSIFICATION-APPLICATION")
'application
""
`((tackles-domain ,domain-name)
(uses-method ,(name method))
(tackles-task ,(name task))))))
(ocml-eval-gen `(solve-application ,(name application)))))
|
80706c3717afc462d4c38c1bb66939ff4908a364f97cc189e707ba1af33b0f79 | RYTong/erlmail-client | imapc_fsm.erl | -module(imapc_fsm).
-include("imap.hrl").
-behaviour(gen_fsm).
%% api
-export([connect/2 , connect_ssl/2 , login/3 , logout/1 , noop/1 , ,
% list/3, status/3,
select/2 , examine/2 , append/4 , expunge/1 ,
% search/2, fetch/3, store/4, copy/3
% ]).
%% callbacks
-export([init/1, handle_event/3, handle_sync_event/4, handle_info/3,
code_change/4, terminate/3]).
%% state funs
-export([server_greeting/2, server_greeting/3, not_authenticated/2,
not_authenticated/3, authenticated/2, authenticated/3,
logout/2, logout/3]).
%%%--- TODO TODO TODO -------------------------------------------------------------------
:
%%%
Escanear INBOX , , coger , parsear MIME y generar JSON
%%%--------------------------------------------------------------------------------------
%%%--- TODO TODO TODO -------------------------
1 . Implementar LIST , SELECT , ...
2 . Implementar la respuesta con LOGIN : " * CAPABILITY IMAP4rev1 UNSELECT ... "
3 . Filtrar mensajes de error_logger para desactivar los de este modulo , logger ?
%%%--------------------------------------------
%%%-----------------
%%% Client functions
%%%-----------------
% connect(Host, Port) ->
gen_fsm : start_link(?MODULE , { tcp , Host , Port } , [ ] ) .
% connect_ssl(Host, Port) ->
gen_fsm : start_link(?MODULE , { ssl , Host , Port } , [ ] ) .
% login(Conn, User, Pass) ->
gen_fsm : sync_send_event(Conn , { command , login , { User , Pass } } ) .
% logout(Conn) ->
gen_fsm : sync_send_event(Conn , { command , logout , { } } ) .
% noop(Conn) ->
gen_fsm : sync_send_event(Conn , { command , noop , { } } ) .
% disconnect(Conn) ->
% gen_fsm:sync_send_all_state_event(Conn, {command, disconnect, {}}).
list(Conn , RefName , Mailbox ) - >
gen_fsm : sync_send_event(Conn , { command , list , [ RefName , imapc_util : quote_mbox(Mailbox ) ] } ) .
% status(Conn, Mailbox, StatusDataItems) ->
gen_fsm : sync_send_event(Conn , { command , status , [ imapc_util : quote_mbox(Mailbox ) , StatusDataItems ] } ) .
select(Conn , Mailbox ) - >
gen_fsm : sync_send_event(Conn , { command , select , imapc_util : quote_mbox(Mailbox ) } ) .
examine(Conn , Mailbox ) - >
gen_fsm : sync_send_event(Conn , { command , examine , Mailbox } ) .
append(Conn , Mailbox , Flags , Message ) - >
gen_fsm : sync_send_event(Conn , { command , append , [ Mailbox , Flags , Message ] } ) .
expunge(Conn ) - >
gen_fsm : sync_send_event(Conn , { command , expunge , [ ] } ) .
search(Conn , SearchKeys ) - >
gen_fsm : sync_send_event(Conn , { command , search , SearchKeys } ) .
fetch(Conn , SequenceSet , MsgDataItems ) - >
gen_fsm : sync_send_event(Conn , { command , fetch , [ SequenceSet , MsgDataItems ] } , infinity ) .
% copy(Conn, SequenceSet, Mailbox) ->
gen_fsm : sync_send_event(Conn , { command , copy , [ SequenceSet , Mailbox ] } ) .
store(Conn , SequenceSet , Flags , Action ) - >
gen_fsm : sync_send_event(Conn , { command , store , [ SequenceSet , Flags , Action ] } ) .
% fsm_state(Conn) ->
% gen_fsm:sync_send_all_state_event(Conn, fsm_state).
%%%-------------------
%%% Callback functions
%%%-------------------
init({SockType, Host, Port}) ->
case imapc_util:sock_connect(SockType, Host, Port, [list, {packet, line}]) of
{ok, Sock} ->
?LOG_INFO("IMAP connection open", []),
{ok, server_greeting, #state_data{socket = Sock, socket_type = SockType}};
{error, Reason} ->
{stop, Reason}
end.
server_greeting(Command = {command, _, _}, From, StateData) ->
NewStateData = StateData#state_data{enqueued_commands =
[{Command, From} | StateData#state_data.enqueued_commands]},
?LOG_DEBUG("command enqueued: ~p", [Command]),
{next_state, server_greeting, NewStateData}.
server_greeting(_Response={response, untagged, "OK", Capabilities}, StateData) ->
%%?LOG_DEBUG("greeting received: ~p", [Response]),
EnqueuedCommands = lists:reverse(StateData#state_data.enqueued_commands),
NewStateData = StateData#state_data{server_capabilities = Capabilities,
enqueued_commands = []},
lists:foreach(fun({Command, From}) ->
gen_fsm:send_event(self(), {enqueued_command, Command, From})
end, EnqueuedCommands),
{next_state, not_authenticated, NewStateData};
server_greeting(_Response = {response, _, _, _}, StateData) ->
%%?LOG_ERROR(server_greeting, "unrecognized greeting: ~p", [Response]),
{stop, unrecognized_greeting, StateData}.
TODO : hacer tag CAPABILITY ' no hemos
recibido , en el login con el OK
not_authenticated(Command = {command, _, _}, From, StateData) ->
handle_command(Command, From, not_authenticated, StateData).
not_authenticated({enqueued_command, Command, From}, StateData) ->
?LOG_DEBUG("command dequeued: ~p", [Command]),
handle_command(Command, From, not_authenticated, StateData);
not_authenticated(Response = {response, _, _, _}, StateData) ->
handle_response(Response, not_authenticated, StateData).
authenticated(Command = {command, _, _}, From, StateData) ->
handle_command(Command, From, authenticated, StateData).
authenticated(Response = {response, _, _, _}, StateData) ->
handle_response(Response, authenticated, StateData).
logout(Command = {command, _, _}, From, StateData) ->
handle_command(Command, From, logout, StateData).
logout(Response = {response, _, _, _}, StateData) ->
handle_response(Response, logout, StateData).
%% TODO: reconexion en caso de desconexion inesperada
handle_info({SockTypeClosed, Sock}, StateName,
StateData = #state_data{socket = Sock}) when
SockTypeClosed == tcp_closed; SockTypeClosed == ssl_closed ->
NewStateData = StateData#state_data{socket = closed},
case StateName of
logout ->
?LOG_INFO("IMAP connection closed", []),
{next_state, logout, NewStateData};
StateName ->
?LOG_ERROR(handle_info, "IMAP connection closed unexpectedly", []),
{next_state, logout, NewStateData}
end;
handle_info({SockType, Sock, Line}, StateName,
StateData = #state_data{socket = Sock}) when
SockType == tcp; SockType == ssl ->
?LOG_DEBUG("line received: ^~s$", [Line]),
case imapc_resp:parse_response(Line) of
{ok, Response} ->
?MODULE:StateName(Response, StateData);
{error, nomatch} ->
?LOG_ERROR(handle_info, "unrecognized response: ~p",
[Line]),
{stop, unrecognized_response, StateData}
end.
handle_event(_Event, StateName, StateData) ->
? LOG_WARNING(handle_event , " fsm handle_event ignored : ~p " , [ Event ] ) ,
{next_state, StateName, StateData}.
handle_sync_event({command, disconnect, {}}, _From, _StateName, StateData) ->
case StateData#state_data.socket of
closed ->
true;
Sock ->
ok = imapc_util:sock_close(StateData#state_data.socket_type, Sock),
?LOG_INFO("IMAP connection closed", [])
end,
{stop, normal, ok, StateData};
handle_sync_event(fsm_state, _From, StateName, S) ->
io:format("fsm: ~p~n", [self()]),
io:format("socket: ~p~n", [{S#state_data.socket_type, S#state_data.socket}]),
io:format("enqueued_commands: ~p~n", [S#state_data.enqueued_commands]),
io:format("server_capabilities: ~p~n", [S#state_data.server_capabilities]),
io:format("commands_pending_response: ~p~n", [S#state_data.commands_pending_response]),
io:format("untagged_responses_received: ~p~n", [S#state_data.untagged_responses_received]),
{reply,ok,StateName,S}.
code_change(_OldVsn, StateName, StateData, _Extra) ->
{ok, StateName, StateData}.
terminate(normal, _StateName, _StateData) ->
?LOG_DEBUG("gen_fsm terminated normally", []),
ok;
terminate(Reason, _StateName, _StateData) ->
?LOG_DEBUG("gen_fsm terminated because an error occurred", []),
{error, Reason}.
%%%--------------------------------------
%%% Commands/Responses handling functions
%%%--------------------------------------
handle_response(Response = {response, untagged, _, _}, StateName, StateData) ->
NewStateData = StateData#state_data{untagged_responses_received =
[Response | StateData#state_data.untagged_responses_received]},
{next_state, StateName, NewStateData};
handle_response(Response = {response, Tag, _, _}, StateName, StateData) ->
ResponsesReceived =
case StateData#state_data.untagged_responses_received of
[] ->
[Response];
UntaggedResponsesReceived ->
lists:reverse([Response | UntaggedResponsesReceived])
end,
{ok, {Command, From}, CommandsPendingResponse} =
imapc_util:extract_dict_element(Tag,
StateData#state_data.commands_pending_response),
NewStateData = StateData#state_data{
commands_pending_response = CommandsPendingResponse
},
NextStateName = imapc_resp:analyze_response(StateName, ResponsesReceived,
Command, From),
{next_state, NextStateName, NewStateData#state_data{untagged_responses_received = []}}.
handle_command(Command, From, StateName, StateData) ->
?LOG_DEBUG("handle command: ~p~n", [Command]),
case imapc_cmd:send_command(StateData#state_data.socket_type,
StateData#state_data.socket, Command) of
{ok, Tag} ->
NewStateData = StateData#state_data{commands_pending_response =
dict:store(Tag, {Command, From},
StateData#state_data.commands_pending_response)},
{next_state, StateName, NewStateData};
{error, Reason} ->
{stop, Reason, StateData}
end.
| null | https://raw.githubusercontent.com/RYTong/erlmail-client/039b9e43d9c78a4d0aab0e1b6dcc5eb50e9658f2/src/imapc_fsm.erl | erlang | api
list/3, status/3,
search/2, fetch/3, store/4, copy/3
]).
callbacks
state funs
--- TODO TODO TODO -------------------------------------------------------------------
--------------------------------------------------------------------------------------
--- TODO TODO TODO -------------------------
--------------------------------------------
-----------------
Client functions
-----------------
connect(Host, Port) ->
connect_ssl(Host, Port) ->
login(Conn, User, Pass) ->
logout(Conn) ->
noop(Conn) ->
disconnect(Conn) ->
gen_fsm:sync_send_all_state_event(Conn, {command, disconnect, {}}).
status(Conn, Mailbox, StatusDataItems) ->
copy(Conn, SequenceSet, Mailbox) ->
fsm_state(Conn) ->
gen_fsm:sync_send_all_state_event(Conn, fsm_state).
-------------------
Callback functions
-------------------
?LOG_DEBUG("greeting received: ~p", [Response]),
?LOG_ERROR(server_greeting, "unrecognized greeting: ~p", [Response]),
TODO: reconexion en caso de desconexion inesperada
--------------------------------------
Commands/Responses handling functions
-------------------------------------- | -module(imapc_fsm).
-include("imap.hrl").
-behaviour(gen_fsm).
-export([connect/2 , connect_ssl/2 , login/3 , logout/1 , noop/1 , ,
select/2 , examine/2 , append/4 , expunge/1 ,
-export([init/1, handle_event/3, handle_sync_event/4, handle_info/3,
code_change/4, terminate/3]).
-export([server_greeting/2, server_greeting/3, not_authenticated/2,
not_authenticated/3, authenticated/2, authenticated/3,
logout/2, logout/3]).
:
Escanear INBOX , , coger , parsear MIME y generar JSON
1 . Implementar LIST , SELECT , ...
2 . Implementar la respuesta con LOGIN : " * CAPABILITY IMAP4rev1 UNSELECT ... "
3 . Filtrar mensajes de error_logger para desactivar los de este modulo , logger ?
gen_fsm : start_link(?MODULE , { tcp , Host , Port } , [ ] ) .
gen_fsm : start_link(?MODULE , { ssl , Host , Port } , [ ] ) .
gen_fsm : sync_send_event(Conn , { command , login , { User , Pass } } ) .
gen_fsm : sync_send_event(Conn , { command , logout , { } } ) .
gen_fsm : sync_send_event(Conn , { command , noop , { } } ) .
list(Conn , RefName , Mailbox ) - >
gen_fsm : sync_send_event(Conn , { command , list , [ RefName , imapc_util : quote_mbox(Mailbox ) ] } ) .
gen_fsm : sync_send_event(Conn , { command , status , [ imapc_util : quote_mbox(Mailbox ) , StatusDataItems ] } ) .
select(Conn , Mailbox ) - >
gen_fsm : sync_send_event(Conn , { command , select , imapc_util : quote_mbox(Mailbox ) } ) .
examine(Conn , Mailbox ) - >
gen_fsm : sync_send_event(Conn , { command , examine , Mailbox } ) .
append(Conn , Mailbox , Flags , Message ) - >
gen_fsm : sync_send_event(Conn , { command , append , [ Mailbox , Flags , Message ] } ) .
expunge(Conn ) - >
gen_fsm : sync_send_event(Conn , { command , expunge , [ ] } ) .
search(Conn , SearchKeys ) - >
gen_fsm : sync_send_event(Conn , { command , search , SearchKeys } ) .
fetch(Conn , SequenceSet , MsgDataItems ) - >
gen_fsm : sync_send_event(Conn , { command , fetch , [ SequenceSet , MsgDataItems ] } , infinity ) .
gen_fsm : sync_send_event(Conn , { command , copy , [ SequenceSet , Mailbox ] } ) .
store(Conn , SequenceSet , Flags , Action ) - >
gen_fsm : sync_send_event(Conn , { command , store , [ SequenceSet , Flags , Action ] } ) .
init({SockType, Host, Port}) ->
case imapc_util:sock_connect(SockType, Host, Port, [list, {packet, line}]) of
{ok, Sock} ->
?LOG_INFO("IMAP connection open", []),
{ok, server_greeting, #state_data{socket = Sock, socket_type = SockType}};
{error, Reason} ->
{stop, Reason}
end.
server_greeting(Command = {command, _, _}, From, StateData) ->
NewStateData = StateData#state_data{enqueued_commands =
[{Command, From} | StateData#state_data.enqueued_commands]},
?LOG_DEBUG("command enqueued: ~p", [Command]),
{next_state, server_greeting, NewStateData}.
server_greeting(_Response={response, untagged, "OK", Capabilities}, StateData) ->
EnqueuedCommands = lists:reverse(StateData#state_data.enqueued_commands),
NewStateData = StateData#state_data{server_capabilities = Capabilities,
enqueued_commands = []},
lists:foreach(fun({Command, From}) ->
gen_fsm:send_event(self(), {enqueued_command, Command, From})
end, EnqueuedCommands),
{next_state, not_authenticated, NewStateData};
server_greeting(_Response = {response, _, _, _}, StateData) ->
{stop, unrecognized_greeting, StateData}.
TODO : hacer tag CAPABILITY ' no hemos
recibido , en el login con el OK
not_authenticated(Command = {command, _, _}, From, StateData) ->
handle_command(Command, From, not_authenticated, StateData).
not_authenticated({enqueued_command, Command, From}, StateData) ->
?LOG_DEBUG("command dequeued: ~p", [Command]),
handle_command(Command, From, not_authenticated, StateData);
not_authenticated(Response = {response, _, _, _}, StateData) ->
handle_response(Response, not_authenticated, StateData).
authenticated(Command = {command, _, _}, From, StateData) ->
handle_command(Command, From, authenticated, StateData).
authenticated(Response = {response, _, _, _}, StateData) ->
handle_response(Response, authenticated, StateData).
logout(Command = {command, _, _}, From, StateData) ->
handle_command(Command, From, logout, StateData).
logout(Response = {response, _, _, _}, StateData) ->
handle_response(Response, logout, StateData).
handle_info({SockTypeClosed, Sock}, StateName,
StateData = #state_data{socket = Sock}) when
SockTypeClosed == tcp_closed; SockTypeClosed == ssl_closed ->
NewStateData = StateData#state_data{socket = closed},
case StateName of
logout ->
?LOG_INFO("IMAP connection closed", []),
{next_state, logout, NewStateData};
StateName ->
?LOG_ERROR(handle_info, "IMAP connection closed unexpectedly", []),
{next_state, logout, NewStateData}
end;
handle_info({SockType, Sock, Line}, StateName,
StateData = #state_data{socket = Sock}) when
SockType == tcp; SockType == ssl ->
?LOG_DEBUG("line received: ^~s$", [Line]),
case imapc_resp:parse_response(Line) of
{ok, Response} ->
?MODULE:StateName(Response, StateData);
{error, nomatch} ->
?LOG_ERROR(handle_info, "unrecognized response: ~p",
[Line]),
{stop, unrecognized_response, StateData}
end.
handle_event(_Event, StateName, StateData) ->
? LOG_WARNING(handle_event , " fsm handle_event ignored : ~p " , [ Event ] ) ,
{next_state, StateName, StateData}.
handle_sync_event({command, disconnect, {}}, _From, _StateName, StateData) ->
case StateData#state_data.socket of
closed ->
true;
Sock ->
ok = imapc_util:sock_close(StateData#state_data.socket_type, Sock),
?LOG_INFO("IMAP connection closed", [])
end,
{stop, normal, ok, StateData};
handle_sync_event(fsm_state, _From, StateName, S) ->
io:format("fsm: ~p~n", [self()]),
io:format("socket: ~p~n", [{S#state_data.socket_type, S#state_data.socket}]),
io:format("enqueued_commands: ~p~n", [S#state_data.enqueued_commands]),
io:format("server_capabilities: ~p~n", [S#state_data.server_capabilities]),
io:format("commands_pending_response: ~p~n", [S#state_data.commands_pending_response]),
io:format("untagged_responses_received: ~p~n", [S#state_data.untagged_responses_received]),
{reply,ok,StateName,S}.
code_change(_OldVsn, StateName, StateData, _Extra) ->
{ok, StateName, StateData}.
terminate(normal, _StateName, _StateData) ->
?LOG_DEBUG("gen_fsm terminated normally", []),
ok;
terminate(Reason, _StateName, _StateData) ->
?LOG_DEBUG("gen_fsm terminated because an error occurred", []),
{error, Reason}.
handle_response(Response = {response, untagged, _, _}, StateName, StateData) ->
NewStateData = StateData#state_data{untagged_responses_received =
[Response | StateData#state_data.untagged_responses_received]},
{next_state, StateName, NewStateData};
handle_response(Response = {response, Tag, _, _}, StateName, StateData) ->
ResponsesReceived =
case StateData#state_data.untagged_responses_received of
[] ->
[Response];
UntaggedResponsesReceived ->
lists:reverse([Response | UntaggedResponsesReceived])
end,
{ok, {Command, From}, CommandsPendingResponse} =
imapc_util:extract_dict_element(Tag,
StateData#state_data.commands_pending_response),
NewStateData = StateData#state_data{
commands_pending_response = CommandsPendingResponse
},
NextStateName = imapc_resp:analyze_response(StateName, ResponsesReceived,
Command, From),
{next_state, NextStateName, NewStateData#state_data{untagged_responses_received = []}}.
handle_command(Command, From, StateName, StateData) ->
?LOG_DEBUG("handle command: ~p~n", [Command]),
case imapc_cmd:send_command(StateData#state_data.socket_type,
StateData#state_data.socket, Command) of
{ok, Tag} ->
NewStateData = StateData#state_data{commands_pending_response =
dict:store(Tag, {Command, From},
StateData#state_data.commands_pending_response)},
{next_state, StateName, NewStateData};
{error, Reason} ->
{stop, Reason, StateData}
end.
|
83bc304111267e52bc9f8dac43cb9f5696f692deefc6d08802cbbf11ad1201f8 | helium/router | router_sup.erl | %%%-------------------------------------------------------------------
%% @doc router top level supervisor.
%% @end
%%%-------------------------------------------------------------------
-module(router_sup).
-behaviour(supervisor).
%% API
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
-define(SUP(I, Args), #{
id => I,
start => {I, start_link, Args},
restart => permanent,
shutdown => 5000,
type => supervisor,
modules => [I]
}).
-define(WORKER(I, Args), #{
id => I,
start => {I, start_link, Args},
restart => permanent,
shutdown => 5000,
type => worker,
modules => [I]
}).
-define(WORKER(I, Mod, Args), #{
id => I,
start => {Mod, start_link, Args},
restart => permanent,
shutdown => 5000,
type => worker,
modules => [I]
}).
-define(FLAGS, #{
strategy => rest_for_one,
intensity => 1,
period => 5
}).
-define(SERVER, ?MODULE).
%%====================================================================
%% API functions
%%====================================================================
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
%%====================================================================
%% Supervisor callbacks
%%====================================================================
%% Child :: #{id => Id, start => {M, F, A}}
%% Optional keys are restart, shutdown, type, modules.
Before OTP 18 tuples must be used to specify a child . e.g.
Child : : { Id , StartFunc , Restart , Shutdown , Type , Modules }
init([]) ->
BaseDir = application:get_env(blockchain, base_dir, "data"),
ok = router_decoder:init_ets(),
ok = router_console_dc_tracker:init_ets(),
ok = router_console_api:init_ets(),
ok = router_device_stats:init(),
ok = ru_denylist:init(BaseDir),
ok = libp2p_crypto:set_network(application:get_env(blockchain, network, mainnet)),
{ok, _} = application:ensure_all_started(ranch),
{ok, _} = application:ensure_all_started(lager),
SeedNodes =
case application:get_env(blockchain, seed_nodes) of
{ok, ""} -> [];
{ok, Seeds} -> string:split(Seeds, ",", all);
_ -> []
end,
SwarmKey = filename:join([BaseDir, "blockchain", "swarm_key"]),
ok = filelib:ensure_dir(SwarmKey),
Key =
case libp2p_crypto:load_keys(SwarmKey) of
{ok, #{secret := PrivKey, public := PubKey}} ->
{PubKey, libp2p_crypto:mk_sig_fun(PrivKey), libp2p_crypto:mk_ecdh_fun(PrivKey)};
{error, enoent} ->
KeyMap =
#{secret := PrivKey, public := PubKey} = libp2p_crypto:generate_keys(
ecc_compact
),
ok = libp2p_crypto:save_keys(KeyMap, SwarmKey),
{PubKey, libp2p_crypto:mk_sig_fun(PrivKey), libp2p_crypto:mk_ecdh_fun(PrivKey)}
end,
BlockchainOpts = [
{key, Key},
{seed_nodes, SeedNodes},
{max_inbound_connections, 10},
{port, application:get_env(blockchain, port, 0)},
{base_dir, BaseDir},
{update_dir, application:get_env(blockchain, update_dir, undefined)}
],
SCWorkerOpts = #{},
DBOpts = [BaseDir],
MetricsOpts = #{},
POCDenyListArgs =
case
{
application:get_env(router, denylist_keys, undefined),
application:get_env(router, denylist_url, undefined)
}
of
{undefined, _} ->
#{};
{_, undefined} ->
#{};
{DenyListKeys, DenyListUrl} ->
#{
denylist_keys => DenyListKeys,
denylist_url => DenyListUrl,
denylist_base_dir => BaseDir,
denylist_check_timer => {immediate, timer:hours(12)}
}
end,
{PubKey0, SigFun, _} = Key,
PubKeyBin = libp2p_crypto:pubkey_to_bin(PubKey0),
ICSOptsDefault = application:get_env(router, ics, #{}),
ICSOpts = ICSOptsDefault#{pubkey_bin => PubKeyBin, sig_fun => SigFun},
router_ics_gateway_location_worker:init_ets(),
{ok,
{?FLAGS, [
?WORKER(ru_poc_denylist, [POCDenyListArgs]),
?SUP(blockchain_sup, [BlockchainOpts]),
?WORKER(router_metrics, [MetricsOpts]),
?WORKER(router_db, [DBOpts]),
?SUP(router_devices_sup, []),
?WORKER(router_sc_worker, [SCWorkerOpts]),
?SUP(router_console_sup, []),
?SUP(router_decoder_sup, []),
?WORKER(router_device_devaddr, [#{}]),
?WORKER(router_xor_filter_worker, [#{}]),
?WORKER(router_ics_eui_worker, [ICSOpts]),
?WORKER(router_ics_skf_worker, [ICSOpts]),
?WORKER(router_ics_gateway_location_worker, [ICSOpts])
]}}.
%%====================================================================
Internal functions
%%====================================================================
| null | https://raw.githubusercontent.com/helium/router/117cf4240cdd742eb6fb20db8ec1d6a63f64bd95/src/router_sup.erl | erlang | -------------------------------------------------------------------
@doc router top level supervisor.
@end
-------------------------------------------------------------------
API
Supervisor callbacks
====================================================================
API functions
====================================================================
====================================================================
Supervisor callbacks
====================================================================
Child :: #{id => Id, start => {M, F, A}}
Optional keys are restart, shutdown, type, modules.
====================================================================
==================================================================== |
-module(router_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(SUP(I, Args), #{
id => I,
start => {I, start_link, Args},
restart => permanent,
shutdown => 5000,
type => supervisor,
modules => [I]
}).
-define(WORKER(I, Args), #{
id => I,
start => {I, start_link, Args},
restart => permanent,
shutdown => 5000,
type => worker,
modules => [I]
}).
-define(WORKER(I, Mod, Args), #{
id => I,
start => {Mod, start_link, Args},
restart => permanent,
shutdown => 5000,
type => worker,
modules => [I]
}).
-define(FLAGS, #{
strategy => rest_for_one,
intensity => 1,
period => 5
}).
-define(SERVER, ?MODULE).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
Before OTP 18 tuples must be used to specify a child . e.g.
Child : : { Id , StartFunc , Restart , Shutdown , Type , Modules }
init([]) ->
BaseDir = application:get_env(blockchain, base_dir, "data"),
ok = router_decoder:init_ets(),
ok = router_console_dc_tracker:init_ets(),
ok = router_console_api:init_ets(),
ok = router_device_stats:init(),
ok = ru_denylist:init(BaseDir),
ok = libp2p_crypto:set_network(application:get_env(blockchain, network, mainnet)),
{ok, _} = application:ensure_all_started(ranch),
{ok, _} = application:ensure_all_started(lager),
SeedNodes =
case application:get_env(blockchain, seed_nodes) of
{ok, ""} -> [];
{ok, Seeds} -> string:split(Seeds, ",", all);
_ -> []
end,
SwarmKey = filename:join([BaseDir, "blockchain", "swarm_key"]),
ok = filelib:ensure_dir(SwarmKey),
Key =
case libp2p_crypto:load_keys(SwarmKey) of
{ok, #{secret := PrivKey, public := PubKey}} ->
{PubKey, libp2p_crypto:mk_sig_fun(PrivKey), libp2p_crypto:mk_ecdh_fun(PrivKey)};
{error, enoent} ->
KeyMap =
#{secret := PrivKey, public := PubKey} = libp2p_crypto:generate_keys(
ecc_compact
),
ok = libp2p_crypto:save_keys(KeyMap, SwarmKey),
{PubKey, libp2p_crypto:mk_sig_fun(PrivKey), libp2p_crypto:mk_ecdh_fun(PrivKey)}
end,
BlockchainOpts = [
{key, Key},
{seed_nodes, SeedNodes},
{max_inbound_connections, 10},
{port, application:get_env(blockchain, port, 0)},
{base_dir, BaseDir},
{update_dir, application:get_env(blockchain, update_dir, undefined)}
],
SCWorkerOpts = #{},
DBOpts = [BaseDir],
MetricsOpts = #{},
POCDenyListArgs =
case
{
application:get_env(router, denylist_keys, undefined),
application:get_env(router, denylist_url, undefined)
}
of
{undefined, _} ->
#{};
{_, undefined} ->
#{};
{DenyListKeys, DenyListUrl} ->
#{
denylist_keys => DenyListKeys,
denylist_url => DenyListUrl,
denylist_base_dir => BaseDir,
denylist_check_timer => {immediate, timer:hours(12)}
}
end,
{PubKey0, SigFun, _} = Key,
PubKeyBin = libp2p_crypto:pubkey_to_bin(PubKey0),
ICSOptsDefault = application:get_env(router, ics, #{}),
ICSOpts = ICSOptsDefault#{pubkey_bin => PubKeyBin, sig_fun => SigFun},
router_ics_gateway_location_worker:init_ets(),
{ok,
{?FLAGS, [
?WORKER(ru_poc_denylist, [POCDenyListArgs]),
?SUP(blockchain_sup, [BlockchainOpts]),
?WORKER(router_metrics, [MetricsOpts]),
?WORKER(router_db, [DBOpts]),
?SUP(router_devices_sup, []),
?WORKER(router_sc_worker, [SCWorkerOpts]),
?SUP(router_console_sup, []),
?SUP(router_decoder_sup, []),
?WORKER(router_device_devaddr, [#{}]),
?WORKER(router_xor_filter_worker, [#{}]),
?WORKER(router_ics_eui_worker, [ICSOpts]),
?WORKER(router_ics_skf_worker, [ICSOpts]),
?WORKER(router_ics_gateway_location_worker, [ICSOpts])
]}}.
Internal functions
|
15347b45355c6a19241a0b4d8314b778ed6fb9fd6f072602b50f6104189774de | yetanalytics/dave | select.cljs | (ns com.yetanalytics.dave.ui.views.form.select
(:require ["@material/select" :refer [MDCSelect]]
[reagent.core :as r]))
TODO : Proper wrapping for MDC , this tends to leave at the bottom of the page
(defn select [& {:keys [handler]}]
(r/create-class
{:component-did-mount
(fn [c]
(let [mdc-select (MDCSelect. (r/dom-node c))]
(.listen ^MDCSelect mdc-select
"MDCSelect:change"
(fn [_]
(handler (.-value mdc-select))))))
:reagent-render
(fn
[& {:keys [label
selected
options ;; ordered list of {:value <> :label <>}
;; handler ;; handler callback
full-width?
]
:or {selected ""
full-width? false}}]
[:div {:class (str "mdc-select dave-select "
(if full-width?
"dave-select-full-width"
"dave-select-width"))}
[:input {:type "hidden",
:name "enhanced-select"
:value selected}]
[:i {:class "mdc-select__dropdown-icon"}]
[:div {:class "mdc-select__selected-text"}]
[:div {:class (str "mdc-select__menu mdc-menu mdc-menu-surface "
(if full-width?
"dave-select-full-width"
"dave-select-width"))}
(into [:ul.mdc-list]
(for [{:keys [label value]} options]
[:li.mdc-list-item
(cond-> {:class (when (= selected value)
"mdc-list-item--selected")
:data-value value}
(= selected value)
(assoc :aria-selected "true"))
label]))]
[:span {:class "mdc-floating-label"} label]
[:div {:class "mdc-line-ripple"}]])}))
| null | https://raw.githubusercontent.com/yetanalytics/dave/7a71c2017889862b2fb567edc8196b4382d01beb/src/com/yetanalytics/dave/ui/views/form/select.cljs | clojure | ordered list of {:value <> :label <>}
handler ;; handler callback | (ns com.yetanalytics.dave.ui.views.form.select
(:require ["@material/select" :refer [MDCSelect]]
[reagent.core :as r]))
TODO : Proper wrapping for MDC , this tends to leave at the bottom of the page
(defn select [& {:keys [handler]}]
(r/create-class
{:component-did-mount
(fn [c]
(let [mdc-select (MDCSelect. (r/dom-node c))]
(.listen ^MDCSelect mdc-select
"MDCSelect:change"
(fn [_]
(handler (.-value mdc-select))))))
:reagent-render
(fn
[& {:keys [label
selected
full-width?
]
:or {selected ""
full-width? false}}]
[:div {:class (str "mdc-select dave-select "
(if full-width?
"dave-select-full-width"
"dave-select-width"))}
[:input {:type "hidden",
:name "enhanced-select"
:value selected}]
[:i {:class "mdc-select__dropdown-icon"}]
[:div {:class "mdc-select__selected-text"}]
[:div {:class (str "mdc-select__menu mdc-menu mdc-menu-surface "
(if full-width?
"dave-select-full-width"
"dave-select-width"))}
(into [:ul.mdc-list]
(for [{:keys [label value]} options]
[:li.mdc-list-item
(cond-> {:class (when (= selected value)
"mdc-list-item--selected")
:data-value value}
(= selected value)
(assoc :aria-selected "true"))
label]))]
[:span {:class "mdc-floating-label"} label]
[:div {:class "mdc-line-ripple"}]])}))
|
52c4ea3717e68ba041af36df1342a24c1a95925ed7ffc2c7e57239d901938df5 | weavejester/build | git.clj | (ns weavejester.build.git
(:require [clojure.string :as str]
[clojure.java.shell :as sh]))
(defn- git [& args]
(some-> (apply sh/sh "git" args) :out str/trim))
(defn default-version []
(git "describe" "--exact-match" "--abbrev=0"))
(defn git-head []
(git "rev-parse" "HEAD"))
(defn git-origin []
(git "config" "--get" "remote.origin.url"))
(defn- parse-github-url [url]
(or (re-matches #"(?:[A-Za-z-]{2,}@)?github.com:([^/]+)/([^/]+).git" url)
(re-matches #"[^:]+://(?:[A-Za-z-]{2,}@)?github.com/([^/]+)/([^/]+?)(?:.git)?" url)))
(defn- github-urls [url]
(when-let [[_ user repo] (parse-github-url url)]
{:public-clone (str "git/" user "/" repo ".git")
:dev-clone (str "ssh:///" user "/" repo ".git")
:browse (str "/" user "/" repo)}))
(defn github-scm-map []
(try
(let [origin (git-origin)
head (git-head)
urls (github-urls origin)]
(cond-> {:url (:browse urls)}
(:public-clone urls) (assoc :connection (str "scm:git:" (:public-clone urls)))
(:dev-clone urls) (assoc :developerConnection (str "scm:git:" (:dev-clone urls)))
head (assoc :tag head)))
(catch java.io.FileNotFoundException _)))
| null | https://raw.githubusercontent.com/weavejester/build/712a1d267e1deb2e2bd041ee8ef20f2453685ced/src/weavejester/build/git.clj | clojure | (ns weavejester.build.git
(:require [clojure.string :as str]
[clojure.java.shell :as sh]))
(defn- git [& args]
(some-> (apply sh/sh "git" args) :out str/trim))
(defn default-version []
(git "describe" "--exact-match" "--abbrev=0"))
(defn git-head []
(git "rev-parse" "HEAD"))
(defn git-origin []
(git "config" "--get" "remote.origin.url"))
(defn- parse-github-url [url]
(or (re-matches #"(?:[A-Za-z-]{2,}@)?github.com:([^/]+)/([^/]+).git" url)
(re-matches #"[^:]+://(?:[A-Za-z-]{2,}@)?github.com/([^/]+)/([^/]+?)(?:.git)?" url)))
(defn- github-urls [url]
(when-let [[_ user repo] (parse-github-url url)]
{:public-clone (str "git/" user "/" repo ".git")
:dev-clone (str "ssh:///" user "/" repo ".git")
:browse (str "/" user "/" repo)}))
(defn github-scm-map []
(try
(let [origin (git-origin)
head (git-head)
urls (github-urls origin)]
(cond-> {:url (:browse urls)}
(:public-clone urls) (assoc :connection (str "scm:git:" (:public-clone urls)))
(:dev-clone urls) (assoc :developerConnection (str "scm:git:" (:dev-clone urls)))
head (assoc :tag head)))
(catch java.io.FileNotFoundException _)))
| |
ea5efc96e7c9b31a5763ebe8b34163744dc6e7604a337307a4d39147ddc0b1cf | brown/swank-crew | package.lisp | Copyright 2011 Google Inc. All Rights Reserved
;;;; Redistribution and use in source and binary forms, with or without
;;;; modification, are permitted provided that the following conditions are
;;;; met:
;;;; * Redistributions of source code must retain the above copyright
;;;; notice, this list of conditions and the following disclaimer.
;;;; * Redistributions in binary form must reproduce the above
;;;; copyright notice, this list of conditions and the following disclaimer
;;;; in the documentation and/or other materials provided with the
;;;; distribution.
* Neither the name of Google Inc. nor the names of its
;;;; contributors may be used to endorse or promote products derived from
;;;; this software without specific prior written permission.
;;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
;;;; DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
;;;; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
;;;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Author : < >
(in-package #:common-lisp-user)
(defpackage #:swank-crew
(:documentation "Evaluate expressions on remote Lisps using the Swank protocol.")
(:use #:common-lisp)
(:import-from #:com.google.base
#:defconst
#:missing-argument)
(:import-from #:bordeaux-threads
#:condition-notify
#:condition-wait
#:make-condition-variable
#:make-lock
#:make-thread
#:with-lock-held)
(:import-from #:swank-client
#:slime-close
#:slime-connect
#:slime-eval
#:slime-eval-async
#:slime-migrate-evals
#:slime-network-error
#:slime-pending-evals-p
#:swank-connection
#:with-slime-connection)
;; master.lisp
(:export #:connect-workers
#:disconnect-workers
#:eval-form-all-workers
#:eval-form-repeatedly
#:eval-repeatedly-async-state
#:parallel-mapcar
#:parallel-reduce
#:worker-count
#:worker-pool))
| null | https://raw.githubusercontent.com/brown/swank-crew/af5a78678247cdceec79c9c58c238a9a735de2f9/package.lisp | lisp | Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
LOSS OF USE ,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
master.lisp | Copyright 2011 Google Inc. All Rights Reserved
* Neither the name of Google Inc. nor the names of its
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
Author : < >
(in-package #:common-lisp-user)
(defpackage #:swank-crew
(:documentation "Evaluate expressions on remote Lisps using the Swank protocol.")
(:use #:common-lisp)
(:import-from #:com.google.base
#:defconst
#:missing-argument)
(:import-from #:bordeaux-threads
#:condition-notify
#:condition-wait
#:make-condition-variable
#:make-lock
#:make-thread
#:with-lock-held)
(:import-from #:swank-client
#:slime-close
#:slime-connect
#:slime-eval
#:slime-eval-async
#:slime-migrate-evals
#:slime-network-error
#:slime-pending-evals-p
#:swank-connection
#:with-slime-connection)
(:export #:connect-workers
#:disconnect-workers
#:eval-form-all-workers
#:eval-form-repeatedly
#:eval-repeatedly-async-state
#:parallel-mapcar
#:parallel-reduce
#:worker-count
#:worker-pool))
|
5ec1e7af31adc1c696a0eeefa614790df6909b92680321acc36628dc9bad8846 | collaborativetrust/WikiTrust | downloadwp.ml |
Copyright ( c ) 2009 The Regents of the University of California
All rights reserved .
Authors : , , and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are met :
1 . Redistributions of source code must retain the above copyright notice ,
this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above copyright notice ,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution .
3 . The names of the contributors may not be used to endorse or promote
products derived from this software without specific prior written
permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , IN
CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE )
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE .
Copyright (c) 2009 The Regents of the University of California
All rights reserved.
Authors: Luca de Alfaro, Ian Pye, B. Thomas Adler
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. The names of the contributors may not be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*)
open Online_command_line
open Online_types
exception Bad_Line of string
let custom_line_format = [] @ command_line_format
let _ = Arg.parse custom_line_format noop "Usage: downloadwp [options]";;
(* Prepares the database connection information *)
let mediawiki_db = {
Mysql.dbhost = Some !mw_db_host;
Mysql.dbname = Some !mw_db_name;
Mysql.dbport = Some !mw_db_port;
Mysql.dbpwd = Some !mw_db_pass;
Mysql.dbuser = Some !mw_db_user;
Mysql.dbsocket = None;
}
(* Sets up the db *)
(* Note that here it does not make sense to use the wikipedia API *)
let mediawiki_dbh = Mysql.connect mediawiki_db in
let db = Online_db.create_db !use_exec_api !db_prefix mediawiki_dbh None
!mw_db_name !wt_db_rev_base_path !wt_db_blob_base_path !dump_db_calls true in
let tabsplit = Str.split_delim (Str.regexp "\t") in
let splitLine2TitleRev line =
let vals = tabsplit line in
match vals with
| [title; rev] -> (title, (int_of_string rev))
| [title] -> (title, 0)
| _ -> raise (Bad_Line line)
in
let main_loop () =
try
while true do begin
let line = input_line stdin in
let (title, start_rev) = splitLine2TitleRev line in
try
Wikipedia_api.download_page_starting_with db title start_rev 0
with
Wikipedia_api.API_error msg ->
(!Online_log.online_logger)#log (Printf.sprintf "ERROR: %s\nmsg=%s\n" title msg);
end done
with End_of_file -> ()
in
main_loop ()
| null | https://raw.githubusercontent.com/collaborativetrust/WikiTrust/9dd056e65c37a22f67d600dd1e87753aa0ec9e2c/remote/analysis/downloadwp.ml | ocaml | Prepares the database connection information
Sets up the db
Note that here it does not make sense to use the wikipedia API |
Copyright ( c ) 2009 The Regents of the University of California
All rights reserved .
Authors : , , and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are met :
1 . Redistributions of source code must retain the above copyright notice ,
this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above copyright notice ,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution .
3 . The names of the contributors may not be used to endorse or promote
products derived from this software without specific prior written
permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , IN
CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE )
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE .
Copyright (c) 2009 The Regents of the University of California
All rights reserved.
Authors: Luca de Alfaro, Ian Pye, B. Thomas Adler
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. The names of the contributors may not be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*)
open Online_command_line
open Online_types
exception Bad_Line of string
let custom_line_format = [] @ command_line_format
let _ = Arg.parse custom_line_format noop "Usage: downloadwp [options]";;
let mediawiki_db = {
Mysql.dbhost = Some !mw_db_host;
Mysql.dbname = Some !mw_db_name;
Mysql.dbport = Some !mw_db_port;
Mysql.dbpwd = Some !mw_db_pass;
Mysql.dbuser = Some !mw_db_user;
Mysql.dbsocket = None;
}
let mediawiki_dbh = Mysql.connect mediawiki_db in
let db = Online_db.create_db !use_exec_api !db_prefix mediawiki_dbh None
!mw_db_name !wt_db_rev_base_path !wt_db_blob_base_path !dump_db_calls true in
let tabsplit = Str.split_delim (Str.regexp "\t") in
let splitLine2TitleRev line =
let vals = tabsplit line in
match vals with
| [title; rev] -> (title, (int_of_string rev))
| [title] -> (title, 0)
| _ -> raise (Bad_Line line)
in
let main_loop () =
try
while true do begin
let line = input_line stdin in
let (title, start_rev) = splitLine2TitleRev line in
try
Wikipedia_api.download_page_starting_with db title start_rev 0
with
Wikipedia_api.API_error msg ->
(!Online_log.online_logger)#log (Printf.sprintf "ERROR: %s\nmsg=%s\n" title msg);
end done
with End_of_file -> ()
in
main_loop ()
|
f5bca493e73c61ed308f8c922040cf86a40aadd163ded918e73f4d974f3b064c | elaforge/karya | Extract_test.hs | Copyright 2017
-- This program is distributed under the terms of the GNU General Public
-- License 3.0, see COPYING or -3.0.txt
module Cmd.Ruler.Extract_test where
import qualified Cmd.Create as Create
import qualified Cmd.Ruler.Extract as Extract
import qualified Cmd.Ruler.RulerUtil as RulerUtil
import Cmd.TestInstances ()
import qualified Ui.Meter.Meter as Meter
import qualified Ui.Ui as Ui
import qualified Ui.UiTest as UiTest
import Util.Test
test_pull_up :: Test
test_pull_up = do
let ((top, top_rid), state_pre) = UiTest.run Ui.empty $ do
[top, b1, b2] <- UiTest.mkblocks
[ ("top", [(">", [(0, 10, "b1"), (10, 6, "b2")])])
, ("b1", [])
, ("b2", [])
]
top_rid <- Create.new_ruler top "r.top" $
UiTest.mkruler 4 16 (mkmeter 4)
Create.new_ruler b1 "r.b1" $ UiTest.mkruler 4 10 (mkmeter 4)
Create.new_ruler b2 "r.b2" $ UiTest.mkruler 3 6 (mkmeter 3)
return (top, top_rid)
let state_post = UiTest.exec state_pre $ do
meter <- Extract.pull_up top (UiTest.mk_tid_block top 1)
RulerUtil.set_meter top_rid meter
extract = map ( second Mark.mark_rank ) . Meter . Make.make_measures
-- extract (t, m) = (Mark.mark_rank m, Mark.mark_duration m)
equal (UiTest.e_rulers state_pre)
[ ("b1", "1 .2 .3 .4 2 .2 .3 .4 3 .2 4")
, ("b2", "1 .2 .3 2 .2 .3 3")
, ("top", "1 .2 .3 .4 2 .2 .3 .4 3 .2 .3 .4 4 .2 .3 .4 5")
]
equal (UiTest.e_rulers state_post)
[ ("b1", "1 .2 .3 .4 2 .2 .3 .4 3 .2 4")
, ("b2", "1 .2 .3 2 .2 .3 3")
, ("top", "1 .2 .3 .4 2 .2 .3 .4 3 .2 4 .2 .3 5 .2 .3 6")
]
test_push_down :: Test
test_push_down = do
let make = do
[top, b1, b2] <- UiTest.mkblocks
[ ("top", [(">", [(0, 10, "b1"), (10, 6, "b2")])])
, ("b1=ruler", [(">", [(0, 10, "")])])
, ("b2=ruler", [(">", [(0, 6, "")])])
]
Create.new_ruler top "r.top" $ UiTest.mkruler 4 16 (mkmeter 4)
mapM_ (Create.set_block_ruler Ui.no_ruler) [b1, b2]
return top
let (top, state_pre) = UiTest.run Ui.empty make
let state_post = UiTest.exec state_pre $
Extract.push_down True top (UiTest.mk_tid_block top 1)
equal (UiTest.e_rulers state_pre)
[ ("b1", "")
, ("b2", "")
, ("top", "1 .2 .3 .4 2 .2 .3 .4 3 .2 .3 .4 4 .2 .3 .4 5")
]
equal (UiTest.e_rulers state_post)
[ ("b1", "1 .2 .3 .4 2 .2 .3 .4 3 .2 4")
TODO previously would set Meter.config_start_measure
, ( UiTest.bid " b2 " , " .3 .4 4 .2 .3 .4 5 " )
, ("b2", "1 .2 2 .2 .3 .4 3")
, ("top", "1 .2 .3 .4 2 .2 .3 .4 3 .2 .3 .4 4 .2 .3 .4 5")
]
mkmeter :: Int -> Meter.AbstractMeter
mkmeter n = Meter.repeat n Meter.T
| null | https://raw.githubusercontent.com/elaforge/karya/a6638f16da9f018686023977c1292d6ce5095e28/Cmd/Ruler/Extract_test.hs | haskell | This program is distributed under the terms of the GNU General Public
License 3.0, see COPYING or -3.0.txt
extract (t, m) = (Mark.mark_rank m, Mark.mark_duration m) | Copyright 2017
module Cmd.Ruler.Extract_test where
import qualified Cmd.Create as Create
import qualified Cmd.Ruler.Extract as Extract
import qualified Cmd.Ruler.RulerUtil as RulerUtil
import Cmd.TestInstances ()
import qualified Ui.Meter.Meter as Meter
import qualified Ui.Ui as Ui
import qualified Ui.UiTest as UiTest
import Util.Test
test_pull_up :: Test
test_pull_up = do
let ((top, top_rid), state_pre) = UiTest.run Ui.empty $ do
[top, b1, b2] <- UiTest.mkblocks
[ ("top", [(">", [(0, 10, "b1"), (10, 6, "b2")])])
, ("b1", [])
, ("b2", [])
]
top_rid <- Create.new_ruler top "r.top" $
UiTest.mkruler 4 16 (mkmeter 4)
Create.new_ruler b1 "r.b1" $ UiTest.mkruler 4 10 (mkmeter 4)
Create.new_ruler b2 "r.b2" $ UiTest.mkruler 3 6 (mkmeter 3)
return (top, top_rid)
let state_post = UiTest.exec state_pre $ do
meter <- Extract.pull_up top (UiTest.mk_tid_block top 1)
RulerUtil.set_meter top_rid meter
extract = map ( second Mark.mark_rank ) . Meter . Make.make_measures
equal (UiTest.e_rulers state_pre)
[ ("b1", "1 .2 .3 .4 2 .2 .3 .4 3 .2 4")
, ("b2", "1 .2 .3 2 .2 .3 3")
, ("top", "1 .2 .3 .4 2 .2 .3 .4 3 .2 .3 .4 4 .2 .3 .4 5")
]
equal (UiTest.e_rulers state_post)
[ ("b1", "1 .2 .3 .4 2 .2 .3 .4 3 .2 4")
, ("b2", "1 .2 .3 2 .2 .3 3")
, ("top", "1 .2 .3 .4 2 .2 .3 .4 3 .2 4 .2 .3 5 .2 .3 6")
]
test_push_down :: Test
test_push_down = do
let make = do
[top, b1, b2] <- UiTest.mkblocks
[ ("top", [(">", [(0, 10, "b1"), (10, 6, "b2")])])
, ("b1=ruler", [(">", [(0, 10, "")])])
, ("b2=ruler", [(">", [(0, 6, "")])])
]
Create.new_ruler top "r.top" $ UiTest.mkruler 4 16 (mkmeter 4)
mapM_ (Create.set_block_ruler Ui.no_ruler) [b1, b2]
return top
let (top, state_pre) = UiTest.run Ui.empty make
let state_post = UiTest.exec state_pre $
Extract.push_down True top (UiTest.mk_tid_block top 1)
equal (UiTest.e_rulers state_pre)
[ ("b1", "")
, ("b2", "")
, ("top", "1 .2 .3 .4 2 .2 .3 .4 3 .2 .3 .4 4 .2 .3 .4 5")
]
equal (UiTest.e_rulers state_post)
[ ("b1", "1 .2 .3 .4 2 .2 .3 .4 3 .2 4")
TODO previously would set Meter.config_start_measure
, ( UiTest.bid " b2 " , " .3 .4 4 .2 .3 .4 5 " )
, ("b2", "1 .2 2 .2 .3 .4 3")
, ("top", "1 .2 .3 .4 2 .2 .3 .4 3 .2 .3 .4 4 .2 .3 .4 5")
]
mkmeter :: Int -> Meter.AbstractMeter
mkmeter n = Meter.repeat n Meter.T
|
ec329773314c0cbcc87dba42f521d38f03e3332e41bd35d78a1f290fb11d0ca8 | MargaretKrutikova/me-learning-erlang | raindrops_tests.erl | %% Based on canonical data version 1.1.0
-specifications/raw/master/exercises/raindrops/canonical-data.json
%% This file is automatically generated from the exercises canonical data.
-module(raindrops_tests).
-include_lib("erl_exercism/include/exercism.hrl").
-include_lib("eunit/include/eunit.hrl").
'1_the_sound_for_1_is_1_test'() ->
?assertEqual("1", (raindrops:convert(1))).
'2_the_sound_for_3_is_pling_test'() ->
?assertEqual("Pling", (raindrops:convert(3))).
'3_the_sound_for_5_is_plang_test'() ->
?assertEqual("Plang", (raindrops:convert(5))).
'4_the_sound_for_7_is_plong_test'() ->
?assertEqual("Plong", (raindrops:convert(7))).
'5_the_sound_for_6_is_pling_as_it_has_a_factor_3_test'() ->
?assertEqual("Pling", (raindrops:convert(6))).
'6_2_to_the_power_3_does_not_make_a_raindrop_sound_as_3_is_the_exponent_not_the_base_test'() ->
?assertEqual("8", (raindrops:convert(8))).
'7_the_sound_for_9_is_pling_as_it_has_a_factor_3_test'() ->
?assertEqual("Pling", (raindrops:convert(9))).
'8_the_sound_for_10_is_plang_as_it_has_a_factor_5_test'() ->
?assertEqual("Plang", (raindrops:convert(10))).
'9_the_sound_for_14_is_plong_as_it_has_a_factor_of_7_test'() ->
?assertEqual("Plong", (raindrops:convert(14))).
'10_the_sound_for_15_is_pling_plang_as_it_has_factors_3_and_5_test'() ->
?assertEqual("PlingPlang", (raindrops:convert(15))).
'11_the_sound_for_21_is_pling_plong_as_it_has_factors_3_and_7_test'() ->
?assertEqual("PlingPlong", (raindrops:convert(21))).
'12_the_sound_for_25_is_plang_as_it_has_a_factor_5_test'() ->
?assertEqual("Plang", (raindrops:convert(25))).
'13_the_sound_for_27_is_pling_as_it_has_a_factor_3_test'() ->
?assertEqual("Pling", (raindrops:convert(27))).
'14_the_sound_for_35_is_plang_plong_as_it_has_factors_5_and_7_test'() ->
?assertEqual("PlangPlong", (raindrops:convert(35))).
'15_the_sound_for_49_is_plong_as_it_has_a_factor_7_test'() ->
?assertEqual("Plong", (raindrops:convert(49))).
'16_the_sound_for_52_is_52_test'() ->
?assertEqual("52", (raindrops:convert(52))).
'17_the_sound_for_105_is_pling_plang_plong_as_it_has_factors_3_5_and_7_test'() ->
?assertEqual("PlingPlangPlong",
(raindrops:convert(105))).
'18_the_sound_for_3125_is_plang_as_it_has_a_factor_5_test'() ->
?assertEqual("Plang", (raindrops:convert(3125))).
| null | https://raw.githubusercontent.com/MargaretKrutikova/me-learning-erlang/501f9256e332f4d48a74098fe49fcde203e53475/raindrops/test/raindrops_tests.erl | erlang | Based on canonical data version 1.1.0
This file is automatically generated from the exercises canonical data. | -specifications/raw/master/exercises/raindrops/canonical-data.json
-module(raindrops_tests).
-include_lib("erl_exercism/include/exercism.hrl").
-include_lib("eunit/include/eunit.hrl").
'1_the_sound_for_1_is_1_test'() ->
?assertEqual("1", (raindrops:convert(1))).
'2_the_sound_for_3_is_pling_test'() ->
?assertEqual("Pling", (raindrops:convert(3))).
'3_the_sound_for_5_is_plang_test'() ->
?assertEqual("Plang", (raindrops:convert(5))).
'4_the_sound_for_7_is_plong_test'() ->
?assertEqual("Plong", (raindrops:convert(7))).
'5_the_sound_for_6_is_pling_as_it_has_a_factor_3_test'() ->
?assertEqual("Pling", (raindrops:convert(6))).
'6_2_to_the_power_3_does_not_make_a_raindrop_sound_as_3_is_the_exponent_not_the_base_test'() ->
?assertEqual("8", (raindrops:convert(8))).
'7_the_sound_for_9_is_pling_as_it_has_a_factor_3_test'() ->
?assertEqual("Pling", (raindrops:convert(9))).
'8_the_sound_for_10_is_plang_as_it_has_a_factor_5_test'() ->
?assertEqual("Plang", (raindrops:convert(10))).
'9_the_sound_for_14_is_plong_as_it_has_a_factor_of_7_test'() ->
?assertEqual("Plong", (raindrops:convert(14))).
'10_the_sound_for_15_is_pling_plang_as_it_has_factors_3_and_5_test'() ->
?assertEqual("PlingPlang", (raindrops:convert(15))).
'11_the_sound_for_21_is_pling_plong_as_it_has_factors_3_and_7_test'() ->
?assertEqual("PlingPlong", (raindrops:convert(21))).
'12_the_sound_for_25_is_plang_as_it_has_a_factor_5_test'() ->
?assertEqual("Plang", (raindrops:convert(25))).
'13_the_sound_for_27_is_pling_as_it_has_a_factor_3_test'() ->
?assertEqual("Pling", (raindrops:convert(27))).
'14_the_sound_for_35_is_plang_plong_as_it_has_factors_5_and_7_test'() ->
?assertEqual("PlangPlong", (raindrops:convert(35))).
'15_the_sound_for_49_is_plong_as_it_has_a_factor_7_test'() ->
?assertEqual("Plong", (raindrops:convert(49))).
'16_the_sound_for_52_is_52_test'() ->
?assertEqual("52", (raindrops:convert(52))).
'17_the_sound_for_105_is_pling_plang_plong_as_it_has_factors_3_5_and_7_test'() ->
?assertEqual("PlingPlangPlong",
(raindrops:convert(105))).
'18_the_sound_for_3125_is_plang_as_it_has_a_factor_5_test'() ->
?assertEqual("Plang", (raindrops:convert(3125))).
|
cb9766d1ffb7fe325d2da65103a098b65054a7a97459eafc801b48c0c59663ad | bondy-io/bondy | bondy_session_counter.erl | -module(bondy_session_counter).
-include_lib("wamp/include/wamp.hrl").
-define(TAB, ?MODULE).
-type key() :: message_id.
-export([init/0]).
-export([incr/2]).
-export([delete_all/1]).
%% =============================================================================
%% API
%% =============================================================================
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec init() -> ok.
init() ->
Opts = [
ordered_set,
{keypos, 1},
named_table,
public,
{read_concurrency, true},
{write_concurrency, true},
{decentralized_counters, true}
],
{ok, ?TAB} = bondy_table_owner:add(?TAB, Opts),
ok.
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec incr(bondy_session_id:t(), key()) -> integer().
incr(SessionId, message_id) when is_binary(SessionId) ->
IDs in the _ session scope _ SHOULD be incremented by 1 beginning
with 1 ( for each direction - _ Client - to - Router _ and _ Router - to-
%% Client_)
%% This is the router-to-client direction
Key = {SessionId, request_id},
UpdateOp = {2, 1, ?MAX_ID, 0},
Default = {Key, 0},
ets:update_counter(?TAB, Key, UpdateOp, Default).
%% -----------------------------------------------------------------------------
@doc Removes all counters associated with session identifier ` SessionId ' .
%% @end
%% -----------------------------------------------------------------------------
-spec delete_all(SessionId :: bondy_session_id:t()) -> ok.
delete_all(SessionId) when is_binary(SessionId) ->
true = ets:match_delete(?TAB, {{SessionId, '_'}, '_'}),
ok. | null | https://raw.githubusercontent.com/bondy-io/bondy/a1267e7e5526db24f278e12315020753f3168b44/apps/bondy/src/bondy_session_counter.erl | erlang | =============================================================================
API
=============================================================================
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
Client_)
This is the router-to-client direction
-----------------------------------------------------------------------------
@end
----------------------------------------------------------------------------- | -module(bondy_session_counter).
-include_lib("wamp/include/wamp.hrl").
-define(TAB, ?MODULE).
-type key() :: message_id.
-export([init/0]).
-export([incr/2]).
-export([delete_all/1]).
-spec init() -> ok.
init() ->
Opts = [
ordered_set,
{keypos, 1},
named_table,
public,
{read_concurrency, true},
{write_concurrency, true},
{decentralized_counters, true}
],
{ok, ?TAB} = bondy_table_owner:add(?TAB, Opts),
ok.
-spec incr(bondy_session_id:t(), key()) -> integer().
incr(SessionId, message_id) when is_binary(SessionId) ->
IDs in the _ session scope _ SHOULD be incremented by 1 beginning
with 1 ( for each direction - _ Client - to - Router _ and _ Router - to-
Key = {SessionId, request_id},
UpdateOp = {2, 1, ?MAX_ID, 0},
Default = {Key, 0},
ets:update_counter(?TAB, Key, UpdateOp, Default).
@doc Removes all counters associated with session identifier ` SessionId ' .
-spec delete_all(SessionId :: bondy_session_id:t()) -> ok.
delete_all(SessionId) when is_binary(SessionId) ->
true = ets:match_delete(?TAB, {{SessionId, '_'}, '_'}),
ok. |
e800d3f2194d9453700c61ec1ee7d66a9deb6903480f1c880bcc9035e639f43a | ghc/ghc | Lit.hs | # LANGUAGE LambdaCase #
-----------------------------------------------------------------------------
--
Stg to C-- code generation : literals
--
( c ) The University of Glasgow 2004 - 2006
--
-----------------------------------------------------------------------------
module GHC.StgToCmm.Lit (
cgLit, mkSimpleLit,
newStringCLit, newByteStringCLit
) where
import GHC.Prelude
import GHC.Platform
import GHC.StgToCmm.Monad
import GHC.StgToCmm.Env
import GHC.Cmm
import GHC.Cmm.CLabel
import GHC.Cmm.Utils
import GHC.Types.Literal
import GHC.Types.RepType( runtimeRepPrimRep )
import GHC.Builtin.Types ( unitDataConId )
import GHC.Core.TyCon
import GHC.Utils.Misc
import GHC.Utils.Outputable
import GHC.Utils.Panic
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS8
import Data.Char (ord)
newStringCLit :: String -> FCode CmmLit
-- ^ Make a global definition for the string,
-- and return its label
newStringCLit str = newByteStringCLit (BS8.pack str)
newByteStringCLit :: ByteString -> FCode CmmLit
newByteStringCLit bytes
= do { uniq <- newUnique
; let (lit, decl) = mkByteStringCLit (mkStringLitLabel uniq) bytes
; emitDecl decl
; return lit }
cgLit :: Literal -> FCode CmmExpr
cgLit (LitString s) =
CmmLit <$> newByteStringCLit s
not unpackFS ; we want the UTF-8 byte stream .
cgLit (LitRubbish _ rep) =
case expectOnly "cgLit" prim_reps of -- Note [Post-unarisation invariants]
VoidRep -> panic "cgLit:VoidRep" -- ditto
LiftedRep -> idInfoToAmode <$> getCgIdInfo unitDataConId
UnliftedRep -> idInfoToAmode <$> getCgIdInfo unitDataConId
AddrRep -> cgLit LitNullAddr
VecRep n elem -> do
platform <- getPlatform
let elem_lit = mkSimpleLit platform (num_rep_lit (primElemRepToPrimRep elem))
pure (CmmLit (CmmVec (replicate n elem_lit)))
prep -> cgLit (num_rep_lit prep)
where
prim_reps = runtimeRepPrimRep (text "cgLit") rep
num_rep_lit IntRep = mkLitIntUnchecked 0
num_rep_lit Int8Rep = mkLitInt8Unchecked 0
num_rep_lit Int16Rep = mkLitInt16Unchecked 0
num_rep_lit Int32Rep = mkLitInt32Unchecked 0
num_rep_lit Int64Rep = mkLitInt64Unchecked 0
num_rep_lit WordRep = mkLitWordUnchecked 0
num_rep_lit Word8Rep = mkLitWord8Unchecked 0
num_rep_lit Word16Rep = mkLitWord16Unchecked 0
num_rep_lit Word32Rep = mkLitWord32Unchecked 0
num_rep_lit Word64Rep = mkLitWord64Unchecked 0
num_rep_lit FloatRep = LitFloat 0
num_rep_lit DoubleRep = LitDouble 0
num_rep_lit other = pprPanic "num_rep_lit: Not a num lit" (ppr other)
cgLit other_lit = do
platform <- getPlatform
pure (CmmLit (mkSimpleLit platform other_lit))
mkSimpleLit :: Platform -> Literal -> CmmLit
mkSimpleLit platform = \case
(LitChar c) -> CmmInt (fromIntegral (ord c))
(wordWidth platform)
LitNullAddr -> zeroCLit platform
(LitNumber LitNumInt i) -> CmmInt i (wordWidth platform)
(LitNumber LitNumInt8 i) -> CmmInt i W8
(LitNumber LitNumInt16 i) -> CmmInt i W16
(LitNumber LitNumInt32 i) -> CmmInt i W32
(LitNumber LitNumInt64 i) -> CmmInt i W64
(LitNumber LitNumWord i) -> CmmInt i (wordWidth platform)
(LitNumber LitNumWord8 i) -> CmmInt i W8
(LitNumber LitNumWord16 i) -> CmmInt i W16
(LitNumber LitNumWord32 i) -> CmmInt i W32
(LitNumber LitNumWord64 i) -> CmmInt i W64
(LitFloat r) -> CmmFloat r W32
(LitDouble r) -> CmmFloat r W64
(LitLabel fs ms fod)
-> let -- TODO: Literal labels might not actually be in the current package...
labelSrc = ForeignLabelInThisPackage
in CmmLabel (mkForeignLabel fs ms labelSrc fod)
other -> pprPanic "mkSimpleLit" (ppr other)
| null | https://raw.githubusercontent.com/ghc/ghc/37cfe3c0f4fb16189bbe3bb735f758cd6e3d9157/compiler/GHC/StgToCmm/Lit.hs | haskell | ---------------------------------------------------------------------------
code generation : literals
---------------------------------------------------------------------------
^ Make a global definition for the string,
and return its label
Note [Post-unarisation invariants]
ditto
TODO: Literal labels might not actually be in the current package... | # LANGUAGE LambdaCase #
( c ) The University of Glasgow 2004 - 2006
module GHC.StgToCmm.Lit (
cgLit, mkSimpleLit,
newStringCLit, newByteStringCLit
) where
import GHC.Prelude
import GHC.Platform
import GHC.StgToCmm.Monad
import GHC.StgToCmm.Env
import GHC.Cmm
import GHC.Cmm.CLabel
import GHC.Cmm.Utils
import GHC.Types.Literal
import GHC.Types.RepType( runtimeRepPrimRep )
import GHC.Builtin.Types ( unitDataConId )
import GHC.Core.TyCon
import GHC.Utils.Misc
import GHC.Utils.Outputable
import GHC.Utils.Panic
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS8
import Data.Char (ord)
newStringCLit :: String -> FCode CmmLit
newStringCLit str = newByteStringCLit (BS8.pack str)
newByteStringCLit :: ByteString -> FCode CmmLit
newByteStringCLit bytes
= do { uniq <- newUnique
; let (lit, decl) = mkByteStringCLit (mkStringLitLabel uniq) bytes
; emitDecl decl
; return lit }
cgLit :: Literal -> FCode CmmExpr
cgLit (LitString s) =
CmmLit <$> newByteStringCLit s
not unpackFS ; we want the UTF-8 byte stream .
cgLit (LitRubbish _ rep) =
LiftedRep -> idInfoToAmode <$> getCgIdInfo unitDataConId
UnliftedRep -> idInfoToAmode <$> getCgIdInfo unitDataConId
AddrRep -> cgLit LitNullAddr
VecRep n elem -> do
platform <- getPlatform
let elem_lit = mkSimpleLit platform (num_rep_lit (primElemRepToPrimRep elem))
pure (CmmLit (CmmVec (replicate n elem_lit)))
prep -> cgLit (num_rep_lit prep)
where
prim_reps = runtimeRepPrimRep (text "cgLit") rep
num_rep_lit IntRep = mkLitIntUnchecked 0
num_rep_lit Int8Rep = mkLitInt8Unchecked 0
num_rep_lit Int16Rep = mkLitInt16Unchecked 0
num_rep_lit Int32Rep = mkLitInt32Unchecked 0
num_rep_lit Int64Rep = mkLitInt64Unchecked 0
num_rep_lit WordRep = mkLitWordUnchecked 0
num_rep_lit Word8Rep = mkLitWord8Unchecked 0
num_rep_lit Word16Rep = mkLitWord16Unchecked 0
num_rep_lit Word32Rep = mkLitWord32Unchecked 0
num_rep_lit Word64Rep = mkLitWord64Unchecked 0
num_rep_lit FloatRep = LitFloat 0
num_rep_lit DoubleRep = LitDouble 0
num_rep_lit other = pprPanic "num_rep_lit: Not a num lit" (ppr other)
cgLit other_lit = do
platform <- getPlatform
pure (CmmLit (mkSimpleLit platform other_lit))
mkSimpleLit :: Platform -> Literal -> CmmLit
mkSimpleLit platform = \case
(LitChar c) -> CmmInt (fromIntegral (ord c))
(wordWidth platform)
LitNullAddr -> zeroCLit platform
(LitNumber LitNumInt i) -> CmmInt i (wordWidth platform)
(LitNumber LitNumInt8 i) -> CmmInt i W8
(LitNumber LitNumInt16 i) -> CmmInt i W16
(LitNumber LitNumInt32 i) -> CmmInt i W32
(LitNumber LitNumInt64 i) -> CmmInt i W64
(LitNumber LitNumWord i) -> CmmInt i (wordWidth platform)
(LitNumber LitNumWord8 i) -> CmmInt i W8
(LitNumber LitNumWord16 i) -> CmmInt i W16
(LitNumber LitNumWord32 i) -> CmmInt i W32
(LitNumber LitNumWord64 i) -> CmmInt i W64
(LitFloat r) -> CmmFloat r W32
(LitDouble r) -> CmmFloat r W64
(LitLabel fs ms fod)
labelSrc = ForeignLabelInThisPackage
in CmmLabel (mkForeignLabel fs ms labelSrc fod)
other -> pprPanic "mkSimpleLit" (ppr other)
|
5e6dd77df985f0267028b1477e990cbe8561fee380845a63f163c9f0c1b03235 | ocaml/opam | opamUrl.ml | (**************************************************************************)
(* *)
Copyright 2012 - 2019 OCamlPro
Copyright 2012 INRIA
(* *)
(* All rights reserved. This file is distributed under the terms of the *)
GNU Lesser General Public License version 2.1 , with the special
(* exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open OpamStd.Op
type version_control = [ `git | `darcs | `hg ]
type backend = [ `http | `rsync | version_control ]
type t = {
transport: string;
path: string;
hash: string option;
backend: backend;
}
let empty = {
backend = `http;
transport = "https";
path = "";
hash = None;
}
let compare {transport; path; hash; backend} u =
let transport = String.compare transport u.transport in
if transport <> 0 then transport else
let path = String.compare path u.path in
if path <> 0 then path else
let hash = OpamStd.Option.compare String.compare hash u.hash in
if hash <> 0 then hash else
compare backend u.backend
let equal u v = compare u v = 0
exception Parse_error of string
let parse_error s = raise (Parse_error s)
let split_url =
let re =
Re.(compile @@ whole_string @@ seq [
Parse the scheme , which is either backend+protocol or just a protocol
opt @@ seq [
(* Backend *)
opt @@ seq [ group @@ rep @@ diff any (set "+:");
alt [ char '+'; str "://"] ];
Protocol
group @@ rep @@ diff any (char ':');
(* Separator *)
str "://"
];
(* Parse the path, with is either path or path.suffix (suffix contains no .) *)
group @@ seq [
non_greedy @@ rep @@ diff any (char '#');
(* If there's a .suffix, group it separately (used for backend guessing) *)
opt @@ seq [ char '.'; group @@ rep1 @@ diff any (set "\\/.#")]
];
(* Parse the fragment (git branch, etc.) *)
opt @@ seq [ char '#'; group @@ rep any ];
])
in
fun u ->
match Re.Group.all (Re.exec re u) with
| [| _; vc; transport; path; suffix; hash |] ->
let opt = function "" -> None | s -> Some s in
opt vc, opt transport, path, opt suffix, opt hash
| _ -> assert false
let vc_of_string = function
| "git" -> `git
| "hg" -> `hg
| "darcs" -> `darcs
| vc ->
parse_error (Printf.sprintf "unsupported version control system %s"
(OpamConsole.colorise `underline vc))
let string_of_vc = function
| `git -> "git"
| `darcs -> "darcs"
| `hg -> "hg"
let string_of_backend = function
| `http -> "http"
| `rsync -> "rsync"
| #version_control as vc -> string_of_vc vc
let backend_of_string = function
| "http" | "https" | "ftp" | "wget" | "curl" -> `http
| "file" -> `rsync
| "git" -> `git
| "darcs" -> `darcs
| "hg" -> `hg
| "path" | "local" | "rsync" | "ssh" | "scp" | "sftp" -> `rsync
| p ->
parse_error (Printf.sprintf "unsupported protocol %s"
(OpamConsole.colorise `underline p))
let looks_like_ssh_path =
(* ':' before any '/' : assume ssh, like git does. Exception for 'x:' with
single char, because Windows *)
let re =
Re.(compile @@ seq [
group @@ repn (diff any (set "/:")) 2 None;
char ':';
opt @@ char '/';
opt @@ group @@ seq [
alt [
diff any digit;
seq [rep digit; compl [digit; char '/']]
];
rep any;
];
eos;
])
in
fun path ->
try
let sub = Re.exec re path in
Some (Re.Group.get sub 1 ^
try "/" ^ Re.Group.get sub 2 with Not_found -> "")
with Not_found -> None
let parse ?backend ?(handle_suffix=true) ?(from_file=true) s =
let vc, transport, path, suffix, hash = split_url s in
let backend =
match backend with
| Some b -> b
| None ->
match vc with
| Some vc -> vc_of_string vc
| None ->
let of_suffix ~default =
if not handle_suffix then default else
match suffix with
| Some sf -> (try vc_of_string sf with Parse_error _ -> default)
| None ->
match OpamStd.String.cut_at path '@' with
| Some (user, _) ->
(try vc_of_string user with Parse_error _ -> default)
| None -> default
in
match transport with
| None -> of_suffix ~default:`rsync
| Some tr ->
try vc_of_string tr with Parse_error _ ->
of_suffix ~default:(backend_of_string tr)
in
let transport, path =
match backend, transport, looks_like_ssh_path path with
| `http, None, _ ->
"http", path
| _, (None | Some ("git"|"hg"|"darcs")), Some path ->
"ssh", path
| _, (None | Some ("hg"|"darcs")), None ->
"file", OpamSystem.real_path path |> OpamSystem.back_to_forward
| `rsync, Some "file", _ when not from_file ->
"file", OpamSystem.real_path path |> OpamSystem.back_to_forward
| _, Some tr, _ ->
tr, path
in
{
transport;
path;
hash;
backend;
}
let parse_opt ?(quiet=false) ?backend ?handle_suffix ?from_file s =
try
Some (parse ?backend ?handle_suffix ?from_file s)
with Parse_error pe ->
if not quiet then
OpamConsole.warning "URL parsing error on %s: %s"
(OpamConsole.colorise `underline s) pe;
None
let of_string url = parse ~handle_suffix:false url
let to_string_t ?subpath url =
let hash = match url.hash with Some h -> "#" ^ h | None -> "" in
let subpath =
match subpath with
| Some sb ->
Printf.sprintf "directory /%s in "
(OpamFilename.SubPath.normalised_string sb)
| None -> ""
in
match url.backend with
| #version_control as vc ->
let vc = string_of_backend vc in
Do n't be redundant on e.g git:// protocols
Printf.sprintf "%s%s" subpath vc url.path hash
else
Printf.sprintf "%s%s+%s" subpath vc url.transport url.path hash
| `rsync | `http ->
Printf.sprintf "%s%s" subpath url.transport url.path hash
let to_string url = to_string_t url
let to_string_w_subpath subpath = to_string_t ?subpath
let base_url url =
match url.transport with
| "" -> url.path
| tr -> Printf.sprintf "%s" tr url.path
let local_path = function
| { transport = ("file"|"path"|"local"|"rsync"); path;
hash = _; backend = (#version_control | `rsync); }
when looks_like_ssh_path path = None ->
Some path
| _ -> None
let local_dir url =
let open OpamStd.Option.Op in
local_path url >>|
OpamFilename.Dir.of_string >>= fun d ->
if OpamFilename.exists_dir d then Some d
else None
let local_file url =
let open OpamStd.Option.Op in
local_path url >>|
OpamFilename.of_string >>= fun f ->
if OpamFilename.exists f then Some f
else None
let guess_version_control s =
let vc,transport,path,_,_ = split_url s in
if vc = None && transport = None && looks_like_ssh_path path = None then
let open OpamFilename in
let open Op in
let dir = Dir.of_string path in
if exists_dir (dir / ".git") || exists (dir // ".git")
then Some`git else
if exists_dir (dir / ".hg") then Some `hg else
if exists_dir (dir / "_darcs") then Some `darcs else
None
else
None
let basename =
let re =
Re.(compile @@ seq [
opt @@ seq [rep any; char '/'];
group @@ rep1 (diff any (char '/'));
rep @@ char '/';
])
in
fun t ->
try
Re.Group.get (Re.exec re t.path) 1
with Not_found -> ""
let root =
let re = Re.(compile @@ seq [char '/'; rep any]) in
fun t ->
let path =
The special - casing of " file " is needed for Windows
if t.transport = "file" then
""
else
Re.replace_string re ~by:"" t.path
in
{ t with path}
let has_trailing_slash url =
OpamStd.String.ends_with ~suffix:"/" url.path
let to_json url = `String (to_string url)
let of_json = function
| `String s -> (try Some (of_string s) with _ -> None)
| _ -> None
type url = t
let map_file_url f url =
if url.transport = "file" then
{url with path = f url.path}
else
url
module O = struct
type t = url
let to_string = to_string
let to_json = to_json
let of_json = of_json
let compare = compare
end
module Set = OpamStd.Set.Make(O)
module Map = OpamStd.Map.Make(O)
module Op = struct
(** appending to an url path *)
let ( / ) url dir =
let url =
if Filename.is_relative dir then
url
else
root url
in
Even on Windows , a file:// _ should _ use slash
let dir = OpamSystem.back_to_forward dir in
let path =
if has_trailing_slash url || url.path = "" then url.path ^ dir
else url.path ^ "/" ^ dir
in
{url with path }
end
| null | https://raw.githubusercontent.com/ocaml/opam/b001e6e214f1593c80996b5015e90dcc9948435d/src/core/opamUrl.ml | ocaml | ************************************************************************
All rights reserved. This file is distributed under the terms of the
exception on linking described in the file LICENSE.
************************************************************************
Backend
Separator
Parse the path, with is either path or path.suffix (suffix contains no .)
If there's a .suffix, group it separately (used for backend guessing)
Parse the fragment (git branch, etc.)
':' before any '/' : assume ssh, like git does. Exception for 'x:' with
single char, because Windows
* appending to an url path | Copyright 2012 - 2019 OCamlPro
Copyright 2012 INRIA
GNU Lesser General Public License version 2.1 , with the special
open OpamStd.Op
type version_control = [ `git | `darcs | `hg ]
type backend = [ `http | `rsync | version_control ]
type t = {
transport: string;
path: string;
hash: string option;
backend: backend;
}
let empty = {
backend = `http;
transport = "https";
path = "";
hash = None;
}
let compare {transport; path; hash; backend} u =
let transport = String.compare transport u.transport in
if transport <> 0 then transport else
let path = String.compare path u.path in
if path <> 0 then path else
let hash = OpamStd.Option.compare String.compare hash u.hash in
if hash <> 0 then hash else
compare backend u.backend
let equal u v = compare u v = 0
exception Parse_error of string
let parse_error s = raise (Parse_error s)
let split_url =
let re =
Re.(compile @@ whole_string @@ seq [
Parse the scheme , which is either backend+protocol or just a protocol
opt @@ seq [
opt @@ seq [ group @@ rep @@ diff any (set "+:");
alt [ char '+'; str "://"] ];
Protocol
group @@ rep @@ diff any (char ':');
str "://"
];
group @@ seq [
non_greedy @@ rep @@ diff any (char '#');
opt @@ seq [ char '.'; group @@ rep1 @@ diff any (set "\\/.#")]
];
opt @@ seq [ char '#'; group @@ rep any ];
])
in
fun u ->
match Re.Group.all (Re.exec re u) with
| [| _; vc; transport; path; suffix; hash |] ->
let opt = function "" -> None | s -> Some s in
opt vc, opt transport, path, opt suffix, opt hash
| _ -> assert false
let vc_of_string = function
| "git" -> `git
| "hg" -> `hg
| "darcs" -> `darcs
| vc ->
parse_error (Printf.sprintf "unsupported version control system %s"
(OpamConsole.colorise `underline vc))
let string_of_vc = function
| `git -> "git"
| `darcs -> "darcs"
| `hg -> "hg"
let string_of_backend = function
| `http -> "http"
| `rsync -> "rsync"
| #version_control as vc -> string_of_vc vc
let backend_of_string = function
| "http" | "https" | "ftp" | "wget" | "curl" -> `http
| "file" -> `rsync
| "git" -> `git
| "darcs" -> `darcs
| "hg" -> `hg
| "path" | "local" | "rsync" | "ssh" | "scp" | "sftp" -> `rsync
| p ->
parse_error (Printf.sprintf "unsupported protocol %s"
(OpamConsole.colorise `underline p))
let looks_like_ssh_path =
let re =
Re.(compile @@ seq [
group @@ repn (diff any (set "/:")) 2 None;
char ':';
opt @@ char '/';
opt @@ group @@ seq [
alt [
diff any digit;
seq [rep digit; compl [digit; char '/']]
];
rep any;
];
eos;
])
in
fun path ->
try
let sub = Re.exec re path in
Some (Re.Group.get sub 1 ^
try "/" ^ Re.Group.get sub 2 with Not_found -> "")
with Not_found -> None
let parse ?backend ?(handle_suffix=true) ?(from_file=true) s =
let vc, transport, path, suffix, hash = split_url s in
let backend =
match backend with
| Some b -> b
| None ->
match vc with
| Some vc -> vc_of_string vc
| None ->
let of_suffix ~default =
if not handle_suffix then default else
match suffix with
| Some sf -> (try vc_of_string sf with Parse_error _ -> default)
| None ->
match OpamStd.String.cut_at path '@' with
| Some (user, _) ->
(try vc_of_string user with Parse_error _ -> default)
| None -> default
in
match transport with
| None -> of_suffix ~default:`rsync
| Some tr ->
try vc_of_string tr with Parse_error _ ->
of_suffix ~default:(backend_of_string tr)
in
let transport, path =
match backend, transport, looks_like_ssh_path path with
| `http, None, _ ->
"http", path
| _, (None | Some ("git"|"hg"|"darcs")), Some path ->
"ssh", path
| _, (None | Some ("hg"|"darcs")), None ->
"file", OpamSystem.real_path path |> OpamSystem.back_to_forward
| `rsync, Some "file", _ when not from_file ->
"file", OpamSystem.real_path path |> OpamSystem.back_to_forward
| _, Some tr, _ ->
tr, path
in
{
transport;
path;
hash;
backend;
}
let parse_opt ?(quiet=false) ?backend ?handle_suffix ?from_file s =
try
Some (parse ?backend ?handle_suffix ?from_file s)
with Parse_error pe ->
if not quiet then
OpamConsole.warning "URL parsing error on %s: %s"
(OpamConsole.colorise `underline s) pe;
None
let of_string url = parse ~handle_suffix:false url
let to_string_t ?subpath url =
let hash = match url.hash with Some h -> "#" ^ h | None -> "" in
let subpath =
match subpath with
| Some sb ->
Printf.sprintf "directory /%s in "
(OpamFilename.SubPath.normalised_string sb)
| None -> ""
in
match url.backend with
| #version_control as vc ->
let vc = string_of_backend vc in
Do n't be redundant on e.g git:// protocols
Printf.sprintf "%s%s" subpath vc url.path hash
else
Printf.sprintf "%s%s+%s" subpath vc url.transport url.path hash
| `rsync | `http ->
Printf.sprintf "%s%s" subpath url.transport url.path hash
let to_string url = to_string_t url
let to_string_w_subpath subpath = to_string_t ?subpath
let base_url url =
match url.transport with
| "" -> url.path
| tr -> Printf.sprintf "%s" tr url.path
let local_path = function
| { transport = ("file"|"path"|"local"|"rsync"); path;
hash = _; backend = (#version_control | `rsync); }
when looks_like_ssh_path path = None ->
Some path
| _ -> None
let local_dir url =
let open OpamStd.Option.Op in
local_path url >>|
OpamFilename.Dir.of_string >>= fun d ->
if OpamFilename.exists_dir d then Some d
else None
let local_file url =
let open OpamStd.Option.Op in
local_path url >>|
OpamFilename.of_string >>= fun f ->
if OpamFilename.exists f then Some f
else None
let guess_version_control s =
let vc,transport,path,_,_ = split_url s in
if vc = None && transport = None && looks_like_ssh_path path = None then
let open OpamFilename in
let open Op in
let dir = Dir.of_string path in
if exists_dir (dir / ".git") || exists (dir // ".git")
then Some`git else
if exists_dir (dir / ".hg") then Some `hg else
if exists_dir (dir / "_darcs") then Some `darcs else
None
else
None
let basename =
let re =
Re.(compile @@ seq [
opt @@ seq [rep any; char '/'];
group @@ rep1 (diff any (char '/'));
rep @@ char '/';
])
in
fun t ->
try
Re.Group.get (Re.exec re t.path) 1
with Not_found -> ""
let root =
let re = Re.(compile @@ seq [char '/'; rep any]) in
fun t ->
let path =
The special - casing of " file " is needed for Windows
if t.transport = "file" then
""
else
Re.replace_string re ~by:"" t.path
in
{ t with path}
let has_trailing_slash url =
OpamStd.String.ends_with ~suffix:"/" url.path
let to_json url = `String (to_string url)
let of_json = function
| `String s -> (try Some (of_string s) with _ -> None)
| _ -> None
type url = t
let map_file_url f url =
if url.transport = "file" then
{url with path = f url.path}
else
url
module O = struct
type t = url
let to_string = to_string
let to_json = to_json
let of_json = of_json
let compare = compare
end
module Set = OpamStd.Set.Make(O)
module Map = OpamStd.Map.Make(O)
module Op = struct
let ( / ) url dir =
let url =
if Filename.is_relative dir then
url
else
root url
in
Even on Windows , a file:// _ should _ use slash
let dir = OpamSystem.back_to_forward dir in
let path =
if has_trailing_slash url || url.path = "" then url.path ^ dir
else url.path ^ "/" ^ dir
in
{url with path }
end
|
48b6521a63b21389620e18695fa50b2f4a70f1f55ca901d00656891e86349f39 | benzap/fif | prepl.clj | (ns fif.server.prepl)
(defn prepl [])
| null | https://raw.githubusercontent.com/benzap/fif/972adab8b86c016b04babea49d52198585172fe3/src/fif/server/prepl.clj | clojure | (ns fif.server.prepl)
(defn prepl [])
| |
e801fe15ce4ba0c4fa95025195fd8149826e448c2248b9bd9ff4703de7abb9ee | mrosset/nomad | curl.scm | ;; Curl --- download things from network protocols
Copyright ( C ) 2019 Amar Singh< >
This file is part of Nomad .
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
You should have received a copy of the GNU General Public License
;; along with this program. If not, see </>.
(define-module (nomad curl)
#:use-module (curl)
#:use-module (ice-9 textual-ports)
#:use-module (nomad init)
#:use-module (nomad util)
#:use-module (nomad download)
#:use-module (web response)
#:export (curl
curl-download))
(define (curl url)
(let ((handle (curl-easy-init)))
(curl-easy-setopt handle 'url url)
(curl-easy-setopt handle 'header #t)
(let* ((result (curl-easy-perform handle))
(port (open-input-string result))
(response (read-response port)))
(curl-easy-cleanup handle)
response)))
(define (curl-download url)
"Downloads URL to 'download-directory"
(let* ((res (curl url))
(file (download-path url))
(out (open-output-file file))
(in (response-body-port res))
(status (response-code res)))
(if (= status 200)
(copy-to-port out in)
#f)))
| null | https://raw.githubusercontent.com/mrosset/nomad/c94a65ede94d86eff039d2ef62d5ef3df609568a/scheme/nomad/curl.scm | scheme | Curl --- download things from network protocols
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>. |
Copyright ( C ) 2019 Amar Singh< >
This file is part of Nomad .
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(define-module (nomad curl)
#:use-module (curl)
#:use-module (ice-9 textual-ports)
#:use-module (nomad init)
#:use-module (nomad util)
#:use-module (nomad download)
#:use-module (web response)
#:export (curl
curl-download))
(define (curl url)
(let ((handle (curl-easy-init)))
(curl-easy-setopt handle 'url url)
(curl-easy-setopt handle 'header #t)
(let* ((result (curl-easy-perform handle))
(port (open-input-string result))
(response (read-response port)))
(curl-easy-cleanup handle)
response)))
(define (curl-download url)
"Downloads URL to 'download-directory"
(let* ((res (curl url))
(file (download-path url))
(out (open-output-file file))
(in (response-body-port res))
(status (response-code res)))
(if (= status 200)
(copy-to-port out in)
#f)))
|
8c1d3bd86ac6b2450c052644c520e547cdbd8d099dbd189d90ecb295accb2f40 | tezos/tezos-mirror | operation_selection.mli | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2021 Dynamic Ledger Solutions , Inc. < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Protocol
open Alpha_context
open Tezos_protocol_environment
type simulation_result = {
validation_result : validation_result option;
block_header_metadata : Apply_results.block_metadata option;
operations : packed_operation list list;
operations_hash : Operation_list_list_hash.t;
}
* [ filter_operations_with_simulation incremental fees_config
~hard_gas_limit_per_block ops ] tries to validate prioritized operations ( and
apply them if [ incremental ] has been initialised with an
[ application_state ] ) and filter them regarding the quota of each validation
pass . Manager operations are prioritized based on a weight computed from
their fees / gas / bytes . [ filter_operations_with_simulation ] function returns a
[ ] , containing the validated operation , their resulting
[ operations_hash ] , optional [ validation_result ] and [ block_header_metadata ]
if the operations were applied .
~hard_gas_limit_per_block ops] tries to validate prioritized operations (and
apply them if [incremental] has been initialised with an
[application_state]) and filter them regarding the quota of each validation
pass. Manager operations are prioritized based on a weight computed from
their fees/gas/bytes. [filter_operations_with_simulation] function returns a
[simulation_result], containing the validated operation, their resulting
[operations_hash], optional [validation_result] and [block_header_metadata]
if the operations were applied. *)
val filter_operations_with_simulation :
Baking_simulator.incremental ->
Baking_configuration.fees_config ->
hard_gas_limit_per_block:Gas.Arith.integral ->
Operation_pool.Prioritized.t ->
simulation_result tzresult Lwt.t
(** [filter_operations_without_simulation fees_config ~hard_gas_limit_per_block
ops] is similar to [filter_operations_with_simulation] but does not validate
(and apply) operations from [ops] and returns only the operations instead of
a [simulation_result].
Hypothesis: operations from [ops] have previously been validated. *)
val filter_operations_without_simulation :
Baking_configuration.fees_config ->
hard_gas_limit_per_block:Gas.Arith.integral ->
Operation_pool.Prioritized.t ->
packed_operation list list
(** [filter_consensus_operations_only incremental ops] is similar to
[filter_operations_with_simulation] but only filters consensus operations
from [ops]. *)
val filter_consensus_operations_only :
Baking_simulator.incremental ->
Operation_pool.ordered_pool ->
(Baking_simulator.incremental * Operation_pool.ordered_pool) tzresult Lwt.t
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/e5ca6c3e274939f1206426962aa4c02e1a1d5319/src/proto_016_PtMumbai/lib_delegate/operation_selection.mli | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* [filter_operations_without_simulation fees_config ~hard_gas_limit_per_block
ops] is similar to [filter_operations_with_simulation] but does not validate
(and apply) operations from [ops] and returns only the operations instead of
a [simulation_result].
Hypothesis: operations from [ops] have previously been validated.
* [filter_consensus_operations_only incremental ops] is similar to
[filter_operations_with_simulation] but only filters consensus operations
from [ops]. | Copyright ( c ) 2021 Dynamic Ledger Solutions , Inc. < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Protocol
open Alpha_context
open Tezos_protocol_environment
type simulation_result = {
validation_result : validation_result option;
block_header_metadata : Apply_results.block_metadata option;
operations : packed_operation list list;
operations_hash : Operation_list_list_hash.t;
}
* [ filter_operations_with_simulation incremental fees_config
~hard_gas_limit_per_block ops ] tries to validate prioritized operations ( and
apply them if [ incremental ] has been initialised with an
[ application_state ] ) and filter them regarding the quota of each validation
pass . Manager operations are prioritized based on a weight computed from
their fees / gas / bytes . [ filter_operations_with_simulation ] function returns a
[ ] , containing the validated operation , their resulting
[ operations_hash ] , optional [ validation_result ] and [ block_header_metadata ]
if the operations were applied .
~hard_gas_limit_per_block ops] tries to validate prioritized operations (and
apply them if [incremental] has been initialised with an
[application_state]) and filter them regarding the quota of each validation
pass. Manager operations are prioritized based on a weight computed from
their fees/gas/bytes. [filter_operations_with_simulation] function returns a
[simulation_result], containing the validated operation, their resulting
[operations_hash], optional [validation_result] and [block_header_metadata]
if the operations were applied. *)
val filter_operations_with_simulation :
Baking_simulator.incremental ->
Baking_configuration.fees_config ->
hard_gas_limit_per_block:Gas.Arith.integral ->
Operation_pool.Prioritized.t ->
simulation_result tzresult Lwt.t
val filter_operations_without_simulation :
Baking_configuration.fees_config ->
hard_gas_limit_per_block:Gas.Arith.integral ->
Operation_pool.Prioritized.t ->
packed_operation list list
val filter_consensus_operations_only :
Baking_simulator.incremental ->
Operation_pool.ordered_pool ->
(Baking_simulator.incremental * Operation_pool.ordered_pool) tzresult Lwt.t
|
1d38d83fa19a82ac778ac7fa05321395cfb1d3466856cc07d1c97f983436a780 | jrh13/hol-light | lists.ml | (* ========================================================================= *)
(* Theory of lists, plus characters and strings as lists of characters. *)
(* *)
, University of Cambridge Computer Laboratory
(* *)
( c ) Copyright , University of Cambridge 1998
( c ) Copyright , 1998 - 2007
( c ) Copyright , 2014
(* ========================================================================= *)
needs "ind_types.ml";;
(* ------------------------------------------------------------------------- *)
Standard tactic for list induction using MATCH_MP_TAC list_INDUCT
(* ------------------------------------------------------------------------- *)
let LIST_INDUCT_TAC =
let list_INDUCT = prove
(`!P:(A)list->bool. P [] /\ (!h t. P t ==> P (CONS h t)) ==> !l. P l`,
MATCH_ACCEPT_TAC list_INDUCT) in
MATCH_MP_TAC list_INDUCT THEN
CONJ_TAC THENL [ALL_TAC; GEN_TAC THEN GEN_TAC THEN DISCH_TAC];;
(* ------------------------------------------------------------------------- *)
(* Basic definitions. *)
(* ------------------------------------------------------------------------- *)
let HD = new_recursive_definition list_RECURSION
`HD(CONS (h:A) t) = h`;;
let TL = new_recursive_definition list_RECURSION
`TL(CONS (h:A) t) = t`;;
let APPEND = new_recursive_definition list_RECURSION
`(!l:(A)list. APPEND [] l = l) /\
(!h t l. APPEND (CONS h t) l = CONS h (APPEND t l))`;;
let REVERSE = new_recursive_definition list_RECURSION
`(REVERSE [] = []) /\
(REVERSE (CONS (x:A) l) = APPEND (REVERSE l) [x])`;;
let LENGTH = new_recursive_definition list_RECURSION
`(LENGTH [] = 0) /\
(!h:A. !t. LENGTH (CONS h t) = SUC (LENGTH t))`;;
let MAP = new_recursive_definition list_RECURSION
`(!f:A->B. MAP f NIL = NIL) /\
(!f h t. MAP f (CONS h t) = CONS (f h) (MAP f t))`;;
let LAST = new_recursive_definition list_RECURSION
`LAST (CONS (h:A) t) = if t = [] then h else LAST t`;;
let BUTLAST = new_recursive_definition list_RECURSION
`(BUTLAST [] = []) /\
(BUTLAST (CONS h t) = if t = [] then [] else CONS h (BUTLAST t))`;;
let REPLICATE = new_recursive_definition num_RECURSION
`(REPLICATE 0 x = []) /\
(REPLICATE (SUC n) x = CONS x (REPLICATE n x))`;;
let NULL = new_recursive_definition list_RECURSION
`(NULL [] = T) /\
(NULL (CONS h t) = F)`;;
let ALL = new_recursive_definition list_RECURSION
`(ALL P [] = T) /\
(ALL P (CONS h t) <=> P h /\ ALL P t)`;;
let EX = new_recursive_definition list_RECURSION
`(EX P [] = F) /\
(EX P (CONS h t) <=> P h \/ EX P t)`;;
let ITLIST = new_recursive_definition list_RECURSION
`(ITLIST f [] b = b) /\
(ITLIST f (CONS h t) b = f h (ITLIST f t b))`;;
let MEM = new_recursive_definition list_RECURSION
`(MEM x [] <=> F) /\
(MEM x (CONS h t) <=> (x = h) \/ MEM x t)`;;
let ALL2_DEF = new_recursive_definition list_RECURSION
`(ALL2 P [] l2 <=> (l2 = [])) /\
(ALL2 P (CONS h1 t1) l2 <=>
if l2 = [] then F
else P h1 (HD l2) /\ ALL2 P t1 (TL l2))`;;
let ALL2 = prove
(`(ALL2 P [] [] <=> T) /\
(ALL2 P (CONS h1 t1) [] <=> F) /\
(ALL2 P [] (CONS h2 t2) <=> F) /\
(ALL2 P (CONS h1 t1) (CONS h2 t2) <=> P h1 h2 /\ ALL2 P t1 t2)`,
REWRITE_TAC[distinctness "list"; ALL2_DEF; HD; TL]);;
let MAP2_DEF = new_recursive_definition list_RECURSION
`(MAP2 f [] l = []) /\
(MAP2 f (CONS h1 t1) l = CONS (f h1 (HD l)) (MAP2 f t1 (TL l)))`;;
let MAP2 = prove
(`(MAP2 f [] [] = []) /\
(MAP2 f (CONS h1 t1) (CONS h2 t2) = CONS (f h1 h2) (MAP2 f t1 t2))`,
REWRITE_TAC[MAP2_DEF; HD; TL]);;
let EL = new_recursive_definition num_RECURSION
`(EL 0 l = HD l) /\
(EL (SUC n) l = EL n (TL l))`;;
let FILTER = new_recursive_definition list_RECURSION
`(FILTER P [] = []) /\
(FILTER P (CONS h t) = if P h then CONS h (FILTER P t) else FILTER P t)`;;
let ASSOC = new_recursive_definition list_RECURSION
`ASSOC a (CONS h t) = if FST h = a then SND h else ASSOC a t`;;
let ITLIST2_DEF = new_recursive_definition list_RECURSION
`(ITLIST2 f [] l2 b = b) /\
(ITLIST2 f (CONS h1 t1) l2 b = f h1 (HD l2) (ITLIST2 f t1 (TL l2) b))`;;
let ITLIST2 = prove
(`(ITLIST2 f [] [] b = b) /\
(ITLIST2 f (CONS h1 t1) (CONS h2 t2) b = f h1 h2 (ITLIST2 f t1 t2 b))`,
REWRITE_TAC[ITLIST2_DEF; HD; TL]);;
let ZIP_DEF = new_recursive_definition list_RECURSION
`(ZIP [] l2 = []) /\
(ZIP (CONS h1 t1) l2 = CONS (h1,HD l2) (ZIP t1 (TL l2)))`;;
let ZIP = prove
(`(ZIP [] [] = []) /\
(ZIP (CONS h1 t1) (CONS h2 t2) = CONS (h1,h2) (ZIP t1 t2))`,
REWRITE_TAC[ZIP_DEF; HD; TL]);;
let ALLPAIRS = new_recursive_definition list_RECURSION
`(ALLPAIRS f [] l <=> T) /\
(ALLPAIRS f (CONS h t) l <=> ALL (f h) l /\ ALLPAIRS f t l)`;;
let PAIRWISE = new_recursive_definition list_RECURSION
`(PAIRWISE (r:A->A->bool) [] <=> T) /\
(PAIRWISE (r:A->A->bool) (CONS h t) <=> ALL (r h) t /\ PAIRWISE r t)`;;
let list_of_seq = new_recursive_definition num_RECURSION
`list_of_seq (s:num->A) 0 = [] /\
list_of_seq s (SUC n) = APPEND (list_of_seq s n) [s n]`;;
(* ------------------------------------------------------------------------- *)
(* Various trivial theorems. *)
(* ------------------------------------------------------------------------- *)
let NOT_CONS_NIL = prove
(`!(h:A) t. ~(CONS h t = [])`,
REWRITE_TAC[distinctness "list"]);;
let LAST_CLAUSES = prove
(`(LAST [h:A] = h) /\
(LAST (CONS h (CONS k t)) = LAST (CONS k t))`,
REWRITE_TAC[LAST; NOT_CONS_NIL]);;
let APPEND_NIL = prove
(`!l:A list. APPEND l [] = l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[APPEND]);;
let APPEND_ASSOC = prove
(`!(l:A list) m n. APPEND l (APPEND m n) = APPEND (APPEND l m) n`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[APPEND]);;
let REVERSE_APPEND = prove
(`!(l:A list) m. REVERSE (APPEND l m) = APPEND (REVERSE m) (REVERSE l)`,
LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[APPEND; REVERSE; APPEND_NIL; APPEND_ASSOC]);;
let REVERSE_REVERSE = prove
(`!l:A list. REVERSE(REVERSE l) = l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[REVERSE; REVERSE_APPEND; APPEND]);;
let REVERSE_EQ_EMPTY = prove
(`!l:A list. REVERSE l = [] <=> l = []`,
MESON_TAC[REVERSE_REVERSE; REVERSE]);;
let CONS_11 = prove
(`!(h1:A) h2 t1 t2. (CONS h1 t1 = CONS h2 t2) <=> (h1 = h2) /\ (t1 = t2)`,
REWRITE_TAC[injectivity "list"]);;
let list_CASES = prove
(`!l:(A)list. (l = []) \/ ?h t. l = CONS h t`,
LIST_INDUCT_TAC THEN REWRITE_TAC[CONS_11; NOT_CONS_NIL] THEN
MESON_TAC[]);;
let LIST_EQ = prove
(`!l1 l2:A list.
l1 = l2 <=>
LENGTH l1 = LENGTH l2 /\ !n. n < LENGTH l2 ==> EL n l1 = EL n l2`,
REPEAT LIST_INDUCT_TAC THEN
REWRITE_TAC[NOT_CONS_NIL; CONS_11; LENGTH; CONJUNCT1 LT; NOT_SUC] THEN
ASM_REWRITE_TAC[SUC_INJ] THEN
GEN_REWRITE_TAC (RAND_CONV o RAND_CONV)
[MESON[num_CASES] `(!n. P n) <=> P 0 /\ (!n. P(SUC n))`] THEN
REWRITE_TAC[EL; HD; TL; LT_0; LT_SUC; CONJ_ACI]);;
let LENGTH_APPEND = prove
(`!(l:A list) m. LENGTH(APPEND l m) = LENGTH l + LENGTH m`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[APPEND; LENGTH; ADD_CLAUSES]);;
let MAP_APPEND = prove
(`!f:A->B. !l1 l2. MAP f (APPEND l1 l2) = APPEND (MAP f l1) (MAP f l2)`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MAP; APPEND]);;
let LENGTH_MAP = prove
(`!l. !f:A->B. LENGTH (MAP f l) = LENGTH l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MAP; LENGTH]);;
let LENGTH_EQ_NIL = prove
(`!l:A list. (LENGTH l = 0) <=> (l = [])`,
LIST_INDUCT_TAC THEN REWRITE_TAC[LENGTH; NOT_CONS_NIL; NOT_SUC]);;
let LENGTH_EQ_CONS = prove
(`!l n. (LENGTH l = SUC n) <=> ?h t. (l = CONS h t) /\ (LENGTH t = n)`,
LIST_INDUCT_TAC THEN REWRITE_TAC[LENGTH; NOT_SUC; NOT_CONS_NIL] THEN
ASM_REWRITE_TAC[SUC_INJ; CONS_11] THEN MESON_TAC[]);;
let LENGTH_REVERSE = prove
(`!l:A list. LENGTH(REVERSE l) = LENGTH l`,
LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[REVERSE; LENGTH_APPEND; LENGTH] THEN
REWRITE_TAC[ADD_CLAUSES; MULT_CLAUSES]);;
let MAP_o = prove
(`!f:A->B. !g:B->C. !l. MAP (g o f) l = MAP g (MAP f l)`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[MAP; o_THM]);;
let MAP_EQ = prove
(`!f g l. ALL (\x. f x = g x) l ==> (MAP f l = MAP g l)`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
REWRITE_TAC[MAP; ALL] THEN ASM_MESON_TAC[]);;
let ALL_IMP = prove
(`!P Q l. (!x. MEM x l /\ P x ==> Q x) /\ ALL P l ==> ALL Q l`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
REWRITE_TAC[MEM; ALL] THEN ASM_MESON_TAC[]);;
let NOT_EX = prove
(`!P l. ~(EX P l) <=> ALL (\x. ~(P x)) l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[EX; ALL; DE_MORGAN_THM]);;
let NOT_ALL = prove
(`!P l. ~(ALL P l) <=> EX (\x. ~(P x)) l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[EX; ALL; DE_MORGAN_THM]);;
let ALL_MAP = prove
(`!P f l. ALL P (MAP f l) <=> ALL (P o f) l`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[ALL; MAP; o_THM]);;
let ALL_EQ = prove
(`!l. ALL R l /\ (!x. R x ==> (P x <=> Q x))
==> (ALL P l <=> ALL Q l)`,
LIST_INDUCT_TAC THEN REWRITE_TAC[ALL] THEN
STRIP_TAC THEN BINOP_TAC THEN FIRST_ASSUM MATCH_MP_TAC THEN
ASM_REWRITE_TAC[]);;
let ALL_T = prove
(`!l. ALL (\x. T) l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL]);;
let MAP_EQ_ALL2 = prove
(`!l m. ALL2 (\x y. f x = f y) l m ==> (MAP f l = MAP f m)`,
REPEAT LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MAP; ALL2; CONS_11] THEN
ASM_MESON_TAC[]);;
let ALL2_MAP = prove
(`!P f l. ALL2 P (MAP f l) l <=> ALL (\a. P (f a) a) l`,
GEN_TAC THEN GEN_TAC THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL2; MAP; ALL]);;
let MAP_EQ_DEGEN = prove
(`!l f. ALL (\x. f(x) = x) l ==> (MAP f l = l)`,
LIST_INDUCT_TAC THEN REWRITE_TAC[ALL; MAP; CONS_11] THEN
REPEAT STRIP_TAC THEN ASM_REWRITE_TAC[] THEN
FIRST_ASSUM MATCH_MP_TAC THEN ASM_REWRITE_TAC[]);;
let ALL2_AND_RIGHT = prove
(`!l m P Q. ALL2 (\x y. P x /\ Q x y) l m <=> ALL P l /\ ALL2 Q l m`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL; ALL2] THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL; ALL2] THEN
REWRITE_TAC[CONJ_ACI]);;
let ITLIST_APPEND = prove
(`!f a l1 l2. ITLIST f (APPEND l1 l2) a = ITLIST f l1 (ITLIST f l2 a)`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[ITLIST; APPEND]);;
let ITLIST_EXTRA = prove
(`!l. ITLIST f (APPEND l [a]) b = ITLIST f l (f a b)`,
REWRITE_TAC[ITLIST_APPEND; ITLIST]);;
let ALL_MP = prove
(`!P Q l. ALL (\x. P x ==> Q x) l /\ ALL P l ==> ALL Q l`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
REWRITE_TAC[ALL] THEN ASM_MESON_TAC[]);;
let AND_ALL = prove
(`!l. ALL P l /\ ALL Q l <=> ALL (\x. P x /\ Q x) l`,
CONV_TAC(ONCE_DEPTH_CONV SYM_CONV) THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL; CONJ_ACI]);;
let EX_IMP = prove
(`!P Q l. (!x. MEM x l /\ P x ==> Q x) /\ EX P l ==> EX Q l`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
REWRITE_TAC[MEM; EX] THEN ASM_MESON_TAC[]);;
let ALL_MEM = prove
(`!P l. (!x. MEM x l ==> P x) <=> ALL P l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN REWRITE_TAC[ALL; MEM] THEN
ASM_MESON_TAC[]);;
let LENGTH_REPLICATE = prove
(`!n x. LENGTH(REPLICATE n x) = n`,
INDUCT_TAC THEN ASM_REWRITE_TAC[LENGTH; REPLICATE]);;
let MEM_REPLICATE = prove
(`!n x y:A. MEM x (REPLICATE n y) <=> x = y /\ ~(n = 0)`,
INDUCT_TAC THEN ASM_REWRITE_TAC[MEM; REPLICATE; NOT_SUC] THEN
MESON_TAC[]);;
let EX_MAP = prove
(`!P f l. EX P (MAP f l) <=> EX (P o f) l`,
GEN_TAC THEN GEN_TAC THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MAP; EX; o_THM]);;
let EXISTS_EX = prove
(`!P l. (?x. EX (P x) l) <=> EX (\s. ?x. P x s) l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[EX] THEN
ASM_MESON_TAC[]);;
let FORALL_ALL = prove
(`!P l. (!x. ALL (P x) l) <=> ALL (\s. !x. P x s) l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL] THEN
ASM_MESON_TAC[]);;
let MEM_APPEND = prove
(`!x l1 l2. MEM x (APPEND l1 l2) <=> MEM x l1 \/ MEM x l2`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MEM; APPEND; DISJ_ACI]);;
let MEM_MAP = prove
(`!f y l. MEM y (MAP f l) <=> ?x. MEM x l /\ (y = f x)`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[MEM; MAP] THEN MESON_TAC[]);;
let FILTER_APPEND = prove
(`!P l1 l2. FILTER P (APPEND l1 l2) = APPEND (FILTER P l1) (FILTER P l2)`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[FILTER; APPEND] THEN
GEN_TAC THEN COND_CASES_TAC THEN ASM_REWRITE_TAC[APPEND]);;
let FILTER_MAP = prove
(`!P f l. FILTER P (MAP f l) = MAP f (FILTER (P o f) l)`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[MAP; FILTER; o_THM] THEN COND_CASES_TAC THEN
REWRITE_TAC[MAP]);;
let MEM_FILTER = prove
(`!P l x. MEM x (FILTER P l) <=> P x /\ MEM x l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MEM; FILTER] THEN
GEN_TAC THEN COND_CASES_TAC THEN ASM_REWRITE_TAC[MEM] THEN
ASM_MESON_TAC[]);;
let EX_MEM = prove
(`!P l. (?x. P x /\ MEM x l) <=> EX P l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[EX; MEM] THEN
ASM_MESON_TAC[]);;
let MAP_FST_ZIP = prove
(`!l1 l2. (LENGTH l1 = LENGTH l2) ==> (MAP FST (ZIP l1 l2) = l1)`,
LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN
ASM_SIMP_TAC[LENGTH; SUC_INJ; MAP; FST; ZIP; NOT_SUC]);;
let MAP_SND_ZIP = prove
(`!l1 l2. (LENGTH l1 = LENGTH l2) ==> (MAP SND (ZIP l1 l2) = l2)`,
LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN
ASM_SIMP_TAC[LENGTH; SUC_INJ; MAP; FST; ZIP; NOT_SUC]);;
let LENGTH_ZIP = prove
(`!l1 l2. LENGTH l1 = LENGTH l2 ==> LENGTH(ZIP l1 l2) = LENGTH l2`,
REPEAT(LIST_INDUCT_TAC ORELSE GEN_TAC) THEN
ASM_SIMP_TAC[LENGTH; NOT_SUC; ZIP; SUC_INJ]);;
let MEM_ASSOC = prove
(`!l x. MEM (x,ASSOC x l) l <=> MEM x (MAP FST l)`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MEM; MAP; ASSOC] THEN
GEN_TAC THEN COND_CASES_TAC THEN ASM_REWRITE_TAC[] THEN
ASM_MESON_TAC[PAIR; FST]);;
let ALL_APPEND = prove
(`!P l1 l2. ALL P (APPEND l1 l2) <=> ALL P l1 /\ ALL P l2`,
GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[ALL; APPEND; GSYM CONJ_ASSOC]);;
let MEM_EL = prove
(`!l n. n < LENGTH l ==> MEM (EL n l) l`,
LIST_INDUCT_TAC THEN REWRITE_TAC[MEM; CONJUNCT1 LT; LENGTH] THEN
INDUCT_TAC THEN ASM_SIMP_TAC[EL; HD; LT_SUC; TL]);;
let MEM_EXISTS_EL = prove
(`!l x. MEM x l <=> ?i. i < LENGTH l /\ x = EL i l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[LENGTH; EL; MEM; CONJUNCT1 LT] THEN
GEN_TAC THEN GEN_REWRITE_TAC RAND_CONV
[MESON[num_CASES] `(?i. P i) <=> P 0 \/ (?i. P(SUC i))`] THEN
REWRITE_TAC[LT_SUC; LT_0; EL; HD; TL]);;
let ALL_EL = prove
(`!P l. (!i. i < LENGTH l ==> P (EL i l)) <=> ALL P l`,
REWRITE_TAC[GSYM ALL_MEM; MEM_EXISTS_EL] THEN MESON_TAC[]);;
let ALL2_MAP2 = prove
(`!l m. ALL2 P (MAP f l) (MAP g m) = ALL2 (\x y. P (f x) (g y)) l m`,
LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL2; MAP]);;
let AND_ALL2 = prove
(`!P Q l m. ALL2 P l m /\ ALL2 Q l m <=> ALL2 (\x y. P x y /\ Q x y) l m`,
GEN_TAC THEN GEN_TAC THEN CONV_TAC(ONCE_DEPTH_CONV SYM_CONV) THEN
LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL2] THEN
REWRITE_TAC[CONJ_ACI]);;
let ALLPAIRS_SYM = prove
(`!P l m. ALLPAIRS P l m <=> ALLPAIRS (\x y. P y x) m l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN REWRITE_TAC[ALLPAIRS] THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALLPAIRS; ALL] THEN
ASM_MESON_TAC[]);;
let ALLPAIRS_MEM = prove
(`!P l m. (!x y. MEM x l /\ MEM y m ==> P x y) <=> ALLPAIRS P l m`,
GEN_TAC THEN
LIST_INDUCT_TAC THEN REWRITE_TAC[ALLPAIRS; GSYM ALL_MEM; MEM] THEN
ASM_MESON_TAC[]);;
let ALLPAIRS_MAP = prove
(`!P l m. ALLPAIRS P (MAP f l) (MAP g m) <=>
ALLPAIRS (\x y. P (f x) (g y)) l m`,
REWRITE_TAC[GSYM ALLPAIRS_MEM; MEM_MAP] THEN MESON_TAC[]);;
let ALLPAIRS_EQ = prove
(`!l m. !P Q. ALL P (l:A list) /\ ALL Q (m:B list) /\
(!p q. P p /\ Q q ==> (R p q <=> R' p q))
==> (ALLPAIRS R l m <=> ALLPAIRS R' l m)`,
REWRITE_TAC[GSYM ALLPAIRS_MEM; GSYM ALL_MEM] THEN MESON_TAC[]);;
let ALL2_ALL = prove
(`!P l. ALL2 P l l <=> ALL (\x. P x x) l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[ALL2; ALL]);;
let APPEND_EQ_NIL = prove
(`!l m. (APPEND l m = []) <=> (l = []) /\ (m = [])`,
REWRITE_TAC[GSYM LENGTH_EQ_NIL; LENGTH_APPEND; ADD_EQ_0]);;
let APPEND_LCANCEL = prove
(`!l1 l2 l3:A list. APPEND l1 l2 = APPEND l1 l3 <=> l2 = l3`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[APPEND; CONS_11]);;
let APPEND_RCANCEL = prove
(`!l1 l2 l3:A list. APPEND l1 l3 = APPEND l2 l3 <=> l1 = l2`,
ONCE_REWRITE_TAC[MESON[REVERSE_REVERSE]
`l = l' <=> REVERSE l = REVERSE l'`] THEN
REWRITE_TAC[REVERSE_APPEND; APPEND_LCANCEL]);;
let LENGTH_MAP2 = prove
(`!f l m. LENGTH l = LENGTH m ==> LENGTH(MAP2 f l m) = LENGTH m`,
GEN_TAC THEN LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN
ASM_SIMP_TAC[LENGTH; NOT_CONS_NIL; NOT_SUC; MAP2; SUC_INJ]);;
let EL_MAP2 = prove
(`!f l m k. k < LENGTH l /\ k < LENGTH m
==> EL k (MAP2 f l m) = f (EL k l) (EL k m)`,
GEN_TAC THEN LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN
ASM_SIMP_TAC[LENGTH; CONJUNCT1 LT] THEN
INDUCT_TAC THEN ASM_SIMP_TAC[LENGTH; MAP2; EL; HD; TL; LT_SUC]);;
let MAP_EQ_NIL = prove
(`!f l. MAP f l = [] <=> l = []`,
GEN_TAC THEN LIST_INDUCT_TAC THEN REWRITE_TAC[MAP; NOT_CONS_NIL]);;
let INJECTIVE_MAP = prove
(`!f:A->B. (!l m. MAP f l = MAP f m ==> l = m) <=>
(!x y. f x = f y ==> x = y)`,
GEN_TAC THEN EQ_TAC THEN DISCH_TAC THENL
[MAP_EVERY X_GEN_TAC [`x:A`; `y:A`] THEN DISCH_TAC THEN
FIRST_X_ASSUM(MP_TAC o SPECL [`[x:A]`; `[y:A]`]) THEN
ASM_REWRITE_TAC[MAP; CONS_11];
REPEAT LIST_INDUCT_TAC THEN ASM_SIMP_TAC[MAP; NOT_CONS_NIL; CONS_11] THEN
ASM_MESON_TAC[]]);;
let SURJECTIVE_MAP = prove
(`!f:A->B. (!m. ?l. MAP f l = m) <=> (!y. ?x. f x = y)`,
GEN_TAC THEN EQ_TAC THEN DISCH_TAC THENL
[X_GEN_TAC `y:B` THEN FIRST_X_ASSUM(MP_TAC o SPEC `[y:B]`) THEN
REWRITE_TAC[LEFT_IMP_EXISTS_THM] THEN
LIST_INDUCT_TAC THEN REWRITE_TAC[MAP; CONS_11; NOT_CONS_NIL; MAP_EQ_NIL];
MATCH_MP_TAC list_INDUCT] THEN
ASM_MESON_TAC[MAP]);;
let MAP_ID = prove
(`!l. MAP (\x. x) l = l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MAP]);;
let MAP_I = prove
(`MAP I = I`,
REWRITE_TAC[FUN_EQ_THM; I_DEF; MAP_ID]);;
let BUTLAST_CLAUSES = prove
(`BUTLAST([]:A list) = [] /\
(!a:A. BUTLAST [a] = []) /\
(!(a:A) h t. BUTLAST(CONS a (CONS h t)) = CONS a (BUTLAST(CONS h t)))`,
REWRITE_TAC[BUTLAST; NOT_CONS_NIL]);;
let BUTLAST_APPEND = prove
(`!l m:A list. BUTLAST(APPEND l m) =
if m = [] then BUTLAST l else APPEND l (BUTLAST m)`,
SIMP_TAC[COND_RAND; APPEND_NIL; MESON[]
`(if p then T else q) <=> ~p ==> q`] THEN
LIST_INDUCT_TAC THEN ASM_SIMP_TAC[APPEND; BUTLAST; APPEND_EQ_NIL]);;
let APPEND_BUTLAST_LAST = prove
(`!l. ~(l = []) ==> APPEND (BUTLAST l) [LAST l] = l`,
LIST_INDUCT_TAC THEN REWRITE_TAC[LAST; BUTLAST; NOT_CONS_NIL] THEN
COND_CASES_TAC THEN ASM_SIMP_TAC[APPEND]);;
let LAST_APPEND = prove
(`!p q. LAST(APPEND p q) = if q = [] then LAST p else LAST q`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[APPEND; LAST; APPEND_EQ_NIL] THEN
MESON_TAC[]);;
let LENGTH_TL = prove
(`!l. ~(l = []) ==> LENGTH(TL l) = LENGTH l - 1`,
LIST_INDUCT_TAC THEN REWRITE_TAC[LENGTH; TL; ARITH; SUC_SUB1]);;
let LAST_REVERSE = prove
(`!l:A list. ~(l = []) ==> LAST(REVERSE l) = HD l`,
LIST_INDUCT_TAC THEN
REWRITE_TAC[HD; REVERSE; LAST; LAST_APPEND; NOT_CONS_NIL]);;
let HD_REVERSE = prove
(`!l:A list. ~(l = []) ==> HD(REVERSE l) = LAST l`,
MESON_TAC[LAST_REVERSE; REVERSE_REVERSE; REVERSE_EQ_EMPTY]);;
let EL_APPEND = prove
(`!k l m. EL k (APPEND l m) = if k < LENGTH l then EL k l
else EL (k - LENGTH l) m`,
INDUCT_TAC THEN REWRITE_TAC[EL] THEN
LIST_INDUCT_TAC THEN
REWRITE_TAC[HD; APPEND; LENGTH; SUB_0; EL; LT_0; CONJUNCT1 LT] THEN
ASM_REWRITE_TAC[TL; LT_SUC; SUB_SUC]);;
let EL_TL = prove
(`!n. EL n (TL l) = EL (n + 1) l`,
REWRITE_TAC[GSYM ADD1; EL]);;
let EL_CONS = prove
(`!n h t. EL n (CONS h t) = if n = 0 then h else EL (n - 1) t`,
INDUCT_TAC THEN REWRITE_TAC[EL; HD; TL; NOT_SUC; SUC_SUB1]);;
let LAST_EL = prove
(`!l. ~(l = []) ==> LAST l = EL (LENGTH l - 1) l`,
LIST_INDUCT_TAC THEN REWRITE_TAC[LAST; LENGTH; SUC_SUB1] THEN
DISCH_TAC THEN COND_CASES_TAC THEN
ASM_SIMP_TAC[LENGTH; EL; HD; EL_CONS; LENGTH_EQ_NIL]);;
let HD_APPEND = prove
(`!l m:A list. HD(APPEND l m) = if l = [] then HD m else HD l`,
LIST_INDUCT_TAC THEN REWRITE_TAC[HD; APPEND; NOT_CONS_NIL]);;
let CONS_HD_TL = prove
(`!l. ~(l = []) ==> l = CONS (HD l) (TL l)`,
LIST_INDUCT_TAC THEN REWRITE_TAC[NOT_CONS_NIL;HD;TL]);;
let EL_MAP = prove
(`!f n l. n < LENGTH l ==> EL n (MAP f l) = f(EL n l)`,
GEN_TAC THEN INDUCT_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[LENGTH; CONJUNCT1 LT; LT_0; EL; HD; TL; MAP; LT_SUC]);;
let MAP_REVERSE = prove
(`!f l. REVERSE(MAP f l) = MAP f (REVERSE l)`,
GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[MAP; REVERSE; MAP_APPEND]);;
let ALL_FILTER = prove
(`!P Q l:A list. ALL P (FILTER Q l) <=> ALL (\x. Q x ==> P x) l`,
GEN_TAC THEN GEN_TAC THEN
LIST_INDUCT_TAC THEN REWRITE_TAC[ALL; FILTER] THEN
COND_CASES_TAC THEN ASM_REWRITE_TAC[ALL]);;
let APPEND_SING = prove
(`!h t. APPEND [h] t = CONS h t`,
REWRITE_TAC[APPEND]);;
let MEM_APPEND_DECOMPOSE_LEFT = prove
(`!x:A l. MEM x l <=> ?l1 l2. ~(MEM x l1) /\ l = APPEND l1 (CONS x l2)`,
REWRITE_TAC[TAUT `(p <=> q) <=> (p ==> q) /\ (q ==> p)`] THEN
SIMP_TAC[LEFT_IMP_EXISTS_THM; MEM_APPEND; MEM] THEN X_GEN_TAC `x:A` THEN
MATCH_MP_TAC list_INDUCT THEN REWRITE_TAC[MEM] THEN
MAP_EVERY X_GEN_TAC [`y:A`; `l:A list`] THEN
ASM_CASES_TAC `x:A = y` THEN ASM_MESON_TAC[MEM; APPEND]);;
let MEM_APPEND_DECOMPOSE = prove
(`!x:A l. MEM x l <=> ?l1 l2. l = APPEND l1 (CONS x l2)`,
REWRITE_TAC[TAUT `(p <=> q) <=> (p ==> q) /\ (q ==> p)`] THEN
SIMP_TAC[LEFT_IMP_EXISTS_THM; MEM_APPEND; MEM] THEN
ONCE_REWRITE_TAC[MEM_APPEND_DECOMPOSE_LEFT] THEN MESON_TAC[]);;
let PAIRWISE_APPEND = prove
(`!R:A->A->bool l m.
PAIRWISE R (APPEND l m) <=>
PAIRWISE R l /\ PAIRWISE R m /\ (!x y. MEM x l /\ MEM y m ==> R x y)`,
GEN_TAC THEN MATCH_MP_TAC list_INDUCT THEN
REWRITE_TAC[APPEND; PAIRWISE; MEM; ALL_APPEND; GSYM ALL_MEM] THEN
MESON_TAC[]);;
let PAIRWISE_MAP = prove
(`!R f:A->B l.
PAIRWISE R (MAP f l) <=> PAIRWISE (\x y. R (f x) (f y)) l`,
GEN_TAC THEN GEN_TAC THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[PAIRWISE; MAP; ALL_MAP; o_DEF]);;
let PAIRWISE_IMPLIES = prove
(`!R:A->A->bool R' l.
PAIRWISE R l /\ (!x y. MEM x l /\ MEM y l /\ R x y ==> R' x y)
==> PAIRWISE R' l`,
GEN_TAC THEN GEN_TAC THEN MATCH_MP_TAC list_INDUCT THEN
REWRITE_TAC[PAIRWISE; GSYM ALL_MEM; MEM] THEN MESON_TAC[]);;
let PAIRWISE_TRANSITIVE = prove
(`!R x y:A l.
(!x y z. R x y /\ R y z ==> R x z)
==> (PAIRWISE R (CONS x (CONS y l)) <=> R x y /\ PAIRWISE R (CONS y l))`,
REPEAT STRIP_TAC THEN
REWRITE_TAC[PAIRWISE; ALL; GSYM CONJ_ASSOC;
TAUT `(p /\ q /\ r /\ s <=> p /\ r /\ s) <=>
p /\ s ==> r ==> q`] THEN
STRIP_TAC THEN MATCH_MP_TAC(REWRITE_RULE[IMP_CONJ] ALL_IMP) THEN
ASM_MESON_TAC[]);;
let LENGTH_LIST_OF_SEQ = prove
(`!s:num->A n. LENGTH(list_of_seq s n) = n`,
GEN_TAC THEN INDUCT_TAC THEN
ASM_REWRITE_TAC[list_of_seq; LENGTH; LENGTH_APPEND; ADD_CLAUSES]);;
let EL_LIST_OF_SEQ = prove
(`!s:num->A m n. m < n ==> EL m (list_of_seq s n) = s m`,
GEN_TAC THEN ONCE_REWRITE_TAC[SWAP_FORALL_THM] THEN
INDUCT_TAC THEN
REWRITE_TAC[list_of_seq; LT; EL_APPEND; LENGTH_LIST_OF_SEQ] THEN
REPEAT STRIP_TAC THEN ASM_SIMP_TAC[SUB_REFL; EL; HD; LT_REFL]);;
let LIST_OF_SEQ_EQ_NIL = prove
(`!s:num->A n. list_of_seq s n = [] <=> n = 0`,
REWRITE_TAC[GSYM LENGTH_EQ_NIL; LENGTH_LIST_OF_SEQ; LENGTH]);;
(* ------------------------------------------------------------------------- *)
(* Syntax. *)
(* ------------------------------------------------------------------------- *)
let mk_cons h t =
try let cons = mk_const("CONS",[type_of h,aty]) in
mk_comb(mk_comb(cons,h),t)
with Failure _ -> failwith "mk_cons";;
let mk_list (tms,ty) =
try let nil = mk_const("NIL",[ty,aty]) in
if tms = [] then nil else
let cons = mk_const("CONS",[ty,aty]) in
itlist (mk_binop cons) tms nil
with Failure _ -> failwith "mk_list";;
let mk_flist tms =
try mk_list(tms,type_of(hd tms))
with Failure _ -> failwith "mk_flist";;
(* ------------------------------------------------------------------------- *)
(* Extra monotonicity theorems for inductive definitions. *)
(* ------------------------------------------------------------------------- *)
let MONO_ALL = prove
(`(!x:A. P x ==> Q x) ==> ALL P l ==> ALL Q l`,
DISCH_TAC THEN SPEC_TAC(`l:A list`,`l:A list`) THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL] THEN ASM_MESON_TAC[]);;
let MONO_ALL2 = prove
(`(!x y. (P:A->B->bool) x y ==> Q x y) ==> ALL2 P l l' ==> ALL2 Q l l'`,
DISCH_TAC THEN
SPEC_TAC(`l':B list`,`l':B list`) THEN SPEC_TAC(`l:A list`,`l:A list`) THEN
LIST_INDUCT_TAC THEN REWRITE_TAC[ALL2_DEF] THEN
GEN_TAC THEN COND_CASES_TAC THEN REWRITE_TAC[] THEN ASM_MESON_TAC[]);;
monotonicity_theorems := [MONO_ALL; MONO_ALL2] @ !monotonicity_theorems;;
(* ------------------------------------------------------------------------- *)
(* Apply a conversion down a list. *)
(* ------------------------------------------------------------------------- *)
let rec LIST_CONV conv tm =
if is_cons tm then
COMB2_CONV (RAND_CONV conv) (LIST_CONV conv) tm
else if fst(dest_const tm) = "NIL" then REFL tm
else failwith "LIST_CONV";;
(* ------------------------------------------------------------------------- *)
(* Type of characters, like the HOL88 "ascii" type, with syntax *)
(* constructors and equality conversions for chars and strings. *)
(* ------------------------------------------------------------------------- *)
let char_INDUCT,char_RECURSION = define_type
"char = ASCII bool bool bool bool bool bool bool bool";;
new_type_abbrev("string",`:char list`);;
let dest_char,mk_char,dest_string,mk_string,CHAR_EQ_CONV,STRING_EQ_CONV =
let bool_of_term t =
match t with
Const("T",_) -> true
| Const("F",_) -> false
| _ -> failwith "bool_of_term" in
let code_of_term t =
let f,tms = strip_comb t in
if not(is_const f && fst(dest_const f) = "ASCII")
|| not(length tms = 8) then failwith "code_of_term"
else
itlist (fun b f -> if b then 1 + 2 * f else 2 * f)
(map bool_of_term (rev tms)) 0 in
let char_of_term = Char.chr o code_of_term in
let dest_string tm =
try let tms = dest_list tm in
if fst(dest_type(hd(snd(dest_type(type_of tm))))) <> "char"
then fail() else
let ccs = map (String.make 1 o char_of_term) tms in
String.escaped (implode ccs)
with Failure _ -> failwith "dest_string" in
let mk_bool b =
let true_tm,false_tm = `T`,`F` in
if b then true_tm else false_tm in
let mk_code =
let ascii_tm = `ASCII` in
let mk_code c =
let lis = map (fun i -> mk_bool((c / (1 lsl i)) mod 2 = 1)) (0--7) in
itlist (fun x y -> mk_comb(y,x)) lis ascii_tm in
let codes = Array.map mk_code (Array.of_list (0--255)) in
fun c -> Array.get codes c in
let mk_char = mk_code o Char.code in
let mk_string s =
let ns = map (fun i -> Char.code(String.get s i))
(0--(String.length s - 1)) in
mk_list(map mk_code ns,`:char`) in
let CHAR_DISTINCTNESS =
let avars,bvars,cvars =
[`a0:bool`;`a1:bool`;`a2:bool`;`a3:bool`;`a4:bool`;`a5:bool`;`a6:bool`],
[`b1:bool`;`b2:bool`;`b3:bool`;`b4:bool`;`b5:bool`;`b6:bool`;`b7:bool`],
[`c1:bool`;`c2:bool`;`c3:bool`;`c4:bool`;`c5:bool`;`c6:bool`;`c7:bool`] in
let ASCII_NEQS_FT = (map EQF_INTRO o CONJUNCTS o prove)
(`~(ASCII F b1 b2 b3 b4 b5 b6 b7 = ASCII T c1 c2 c3 c4 c5 c6 c7) /\
~(ASCII a0 F b2 b3 b4 b5 b6 b7 = ASCII a0 T c2 c3 c4 c5 c6 c7) /\
~(ASCII a0 a1 F b3 b4 b5 b6 b7 = ASCII a0 a1 T c3 c4 c5 c6 c7) /\
~(ASCII a0 a1 a2 F b4 b5 b6 b7 = ASCII a0 a1 a2 T c4 c5 c6 c7) /\
~(ASCII a0 a1 a2 a3 F b5 b6 b7 = ASCII a0 a1 a2 a3 T c5 c6 c7) /\
~(ASCII a0 a1 a2 a3 a4 F b6 b7 = ASCII a0 a1 a2 a3 a4 T c6 c7) /\
~(ASCII a0 a1 a2 a3 a4 a5 F b7 = ASCII a0 a1 a2 a3 a4 a5 T c7) /\
~(ASCII a0 a1 a2 a3 a4 a5 a6 F = ASCII a0 a1 a2 a3 a4 a5 a6 T)`,
REWRITE_TAC[injectivity "char"]) in
let ASCII_NEQS_TF =
let ilist = zip bvars cvars @ zip cvars bvars in
let f = EQF_INTRO o INST ilist o GSYM o EQF_ELIM in
map f ASCII_NEQS_FT in
let rec prefix n l =
if n = 0 then [] else
match l with
h::t -> h :: prefix (n-1) t
| _ -> l in
let rec findneq n prefix a b =
match a,b with
b1::a, b2::b -> if b1 <> b2 then n,rev prefix,bool_of_term b2,a,b else
findneq (n+1) (b1 :: prefix) a b
| _, _ -> fail() in
fun c1 c2 ->
let _,a = strip_comb c1
and _,b = strip_comb c2 in
let n,p,b,s1,s2 = findneq 0 [] a b in
let ss1 = funpow n tl bvars
and ss2 = funpow n tl cvars in
let pp = prefix n avars in
let pth = if b then ASCII_NEQS_FT else ASCII_NEQS_TF in
INST (zip p pp @ zip s1 ss1 @ zip s2 ss2) (el n pth) in
let STRING_DISTINCTNESS =
let xtm,xstm = `x:char`,`xs:string`
and ytm,ystm = `y:char`,`ys:string`
and niltm = `[]:string` in
let NIL_EQ_THM = EQT_INTRO (REFL niltm)
and CONS_EQ_THM,CONS_NEQ_THM = (CONJ_PAIR o prove)
(`(CONS x xs:string = CONS x ys <=> xs = ys) /\
((x = y <=> F) ==> (CONS x xs:string = CONS y ys <=> F))`,
REWRITE_TAC[CONS_11] THEN MESON_TAC[])
and NIL_NEQ_CONS,CONS_NEQ_NIL = (CONJ_PAIR o prove)
(`(NIL:string = CONS x xs <=> F) /\
(CONS x xs:string = NIL <=> F)`,
REWRITE_TAC[NOT_CONS_NIL]) in
let rec STRING_DISTINCTNESS s1 s2 =
if s1 = niltm
then if s2 = niltm then NIL_EQ_THM
else let c2,s2 = rand (rator s2),rand s2 in
INST [c2,xtm;s2,xstm] NIL_NEQ_CONS
else let c1,s1 = rand (rator s1),rand s1 in
if s2 = niltm then INST [c1,xtm;s1,xstm] CONS_NEQ_NIL
else let c2,s2 = rand (rator s2),rand s2 in
if c1 = c2
then let th1 = INST [c1,xtm; s1,xstm; s2,ystm] CONS_EQ_THM
and th2 = STRING_DISTINCTNESS s1 s2 in
TRANS th1 th2
else let ilist = [c1,xtm; c2,ytm; s1,xstm; s2,ystm] in
let itm = INST ilist CONS_NEQ_THM in
MP itm (CHAR_DISTINCTNESS c1 c2) in
STRING_DISTINCTNESS in
let CHAR_EQ_CONV : conv =
fun tm ->
let c1,c2 = dest_eq tm in
if compare c1 c2 = 0 then EQT_INTRO (REFL c1) else
CHAR_DISTINCTNESS c1 c2
and STRING_EQ_CONV tm =
let ltm,rtm = dest_eq tm in
if compare ltm rtm = 0 then EQT_INTRO (REFL ltm) else
STRING_DISTINCTNESS ltm rtm in
char_of_term,mk_char,dest_string,mk_string,CHAR_EQ_CONV,STRING_EQ_CONV;;
| null | https://raw.githubusercontent.com/jrh13/hol-light/f25b1592a72d8c1c2666231645cff4809aed1ce4/lists.ml | ocaml | =========================================================================
Theory of lists, plus characters and strings as lists of characters.
=========================================================================
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Basic definitions.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Various trivial theorems.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Syntax.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Extra monotonicity theorems for inductive definitions.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Apply a conversion down a list.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Type of characters, like the HOL88 "ascii" type, with syntax
constructors and equality conversions for chars and strings.
------------------------------------------------------------------------- | , University of Cambridge Computer Laboratory
( c ) Copyright , University of Cambridge 1998
( c ) Copyright , 1998 - 2007
( c ) Copyright , 2014
needs "ind_types.ml";;
Standard tactic for list induction using MATCH_MP_TAC list_INDUCT
let LIST_INDUCT_TAC =
let list_INDUCT = prove
(`!P:(A)list->bool. P [] /\ (!h t. P t ==> P (CONS h t)) ==> !l. P l`,
MATCH_ACCEPT_TAC list_INDUCT) in
MATCH_MP_TAC list_INDUCT THEN
CONJ_TAC THENL [ALL_TAC; GEN_TAC THEN GEN_TAC THEN DISCH_TAC];;
let HD = new_recursive_definition list_RECURSION
`HD(CONS (h:A) t) = h`;;
let TL = new_recursive_definition list_RECURSION
`TL(CONS (h:A) t) = t`;;
let APPEND = new_recursive_definition list_RECURSION
`(!l:(A)list. APPEND [] l = l) /\
(!h t l. APPEND (CONS h t) l = CONS h (APPEND t l))`;;
let REVERSE = new_recursive_definition list_RECURSION
`(REVERSE [] = []) /\
(REVERSE (CONS (x:A) l) = APPEND (REVERSE l) [x])`;;
let LENGTH = new_recursive_definition list_RECURSION
`(LENGTH [] = 0) /\
(!h:A. !t. LENGTH (CONS h t) = SUC (LENGTH t))`;;
let MAP = new_recursive_definition list_RECURSION
`(!f:A->B. MAP f NIL = NIL) /\
(!f h t. MAP f (CONS h t) = CONS (f h) (MAP f t))`;;
let LAST = new_recursive_definition list_RECURSION
`LAST (CONS (h:A) t) = if t = [] then h else LAST t`;;
let BUTLAST = new_recursive_definition list_RECURSION
`(BUTLAST [] = []) /\
(BUTLAST (CONS h t) = if t = [] then [] else CONS h (BUTLAST t))`;;
let REPLICATE = new_recursive_definition num_RECURSION
`(REPLICATE 0 x = []) /\
(REPLICATE (SUC n) x = CONS x (REPLICATE n x))`;;
let NULL = new_recursive_definition list_RECURSION
`(NULL [] = T) /\
(NULL (CONS h t) = F)`;;
let ALL = new_recursive_definition list_RECURSION
`(ALL P [] = T) /\
(ALL P (CONS h t) <=> P h /\ ALL P t)`;;
let EX = new_recursive_definition list_RECURSION
`(EX P [] = F) /\
(EX P (CONS h t) <=> P h \/ EX P t)`;;
let ITLIST = new_recursive_definition list_RECURSION
`(ITLIST f [] b = b) /\
(ITLIST f (CONS h t) b = f h (ITLIST f t b))`;;
let MEM = new_recursive_definition list_RECURSION
`(MEM x [] <=> F) /\
(MEM x (CONS h t) <=> (x = h) \/ MEM x t)`;;
let ALL2_DEF = new_recursive_definition list_RECURSION
`(ALL2 P [] l2 <=> (l2 = [])) /\
(ALL2 P (CONS h1 t1) l2 <=>
if l2 = [] then F
else P h1 (HD l2) /\ ALL2 P t1 (TL l2))`;;
let ALL2 = prove
(`(ALL2 P [] [] <=> T) /\
(ALL2 P (CONS h1 t1) [] <=> F) /\
(ALL2 P [] (CONS h2 t2) <=> F) /\
(ALL2 P (CONS h1 t1) (CONS h2 t2) <=> P h1 h2 /\ ALL2 P t1 t2)`,
REWRITE_TAC[distinctness "list"; ALL2_DEF; HD; TL]);;
let MAP2_DEF = new_recursive_definition list_RECURSION
`(MAP2 f [] l = []) /\
(MAP2 f (CONS h1 t1) l = CONS (f h1 (HD l)) (MAP2 f t1 (TL l)))`;;
let MAP2 = prove
(`(MAP2 f [] [] = []) /\
(MAP2 f (CONS h1 t1) (CONS h2 t2) = CONS (f h1 h2) (MAP2 f t1 t2))`,
REWRITE_TAC[MAP2_DEF; HD; TL]);;
let EL = new_recursive_definition num_RECURSION
`(EL 0 l = HD l) /\
(EL (SUC n) l = EL n (TL l))`;;
let FILTER = new_recursive_definition list_RECURSION
`(FILTER P [] = []) /\
(FILTER P (CONS h t) = if P h then CONS h (FILTER P t) else FILTER P t)`;;
let ASSOC = new_recursive_definition list_RECURSION
`ASSOC a (CONS h t) = if FST h = a then SND h else ASSOC a t`;;
let ITLIST2_DEF = new_recursive_definition list_RECURSION
`(ITLIST2 f [] l2 b = b) /\
(ITLIST2 f (CONS h1 t1) l2 b = f h1 (HD l2) (ITLIST2 f t1 (TL l2) b))`;;
let ITLIST2 = prove
(`(ITLIST2 f [] [] b = b) /\
(ITLIST2 f (CONS h1 t1) (CONS h2 t2) b = f h1 h2 (ITLIST2 f t1 t2 b))`,
REWRITE_TAC[ITLIST2_DEF; HD; TL]);;
let ZIP_DEF = new_recursive_definition list_RECURSION
`(ZIP [] l2 = []) /\
(ZIP (CONS h1 t1) l2 = CONS (h1,HD l2) (ZIP t1 (TL l2)))`;;
let ZIP = prove
(`(ZIP [] [] = []) /\
(ZIP (CONS h1 t1) (CONS h2 t2) = CONS (h1,h2) (ZIP t1 t2))`,
REWRITE_TAC[ZIP_DEF; HD; TL]);;
let ALLPAIRS = new_recursive_definition list_RECURSION
`(ALLPAIRS f [] l <=> T) /\
(ALLPAIRS f (CONS h t) l <=> ALL (f h) l /\ ALLPAIRS f t l)`;;
let PAIRWISE = new_recursive_definition list_RECURSION
`(PAIRWISE (r:A->A->bool) [] <=> T) /\
(PAIRWISE (r:A->A->bool) (CONS h t) <=> ALL (r h) t /\ PAIRWISE r t)`;;
let list_of_seq = new_recursive_definition num_RECURSION
`list_of_seq (s:num->A) 0 = [] /\
list_of_seq s (SUC n) = APPEND (list_of_seq s n) [s n]`;;
let NOT_CONS_NIL = prove
(`!(h:A) t. ~(CONS h t = [])`,
REWRITE_TAC[distinctness "list"]);;
let LAST_CLAUSES = prove
(`(LAST [h:A] = h) /\
(LAST (CONS h (CONS k t)) = LAST (CONS k t))`,
REWRITE_TAC[LAST; NOT_CONS_NIL]);;
let APPEND_NIL = prove
(`!l:A list. APPEND l [] = l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[APPEND]);;
let APPEND_ASSOC = prove
(`!(l:A list) m n. APPEND l (APPEND m n) = APPEND (APPEND l m) n`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[APPEND]);;
let REVERSE_APPEND = prove
(`!(l:A list) m. REVERSE (APPEND l m) = APPEND (REVERSE m) (REVERSE l)`,
LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[APPEND; REVERSE; APPEND_NIL; APPEND_ASSOC]);;
let REVERSE_REVERSE = prove
(`!l:A list. REVERSE(REVERSE l) = l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[REVERSE; REVERSE_APPEND; APPEND]);;
let REVERSE_EQ_EMPTY = prove
(`!l:A list. REVERSE l = [] <=> l = []`,
MESON_TAC[REVERSE_REVERSE; REVERSE]);;
let CONS_11 = prove
(`!(h1:A) h2 t1 t2. (CONS h1 t1 = CONS h2 t2) <=> (h1 = h2) /\ (t1 = t2)`,
REWRITE_TAC[injectivity "list"]);;
let list_CASES = prove
(`!l:(A)list. (l = []) \/ ?h t. l = CONS h t`,
LIST_INDUCT_TAC THEN REWRITE_TAC[CONS_11; NOT_CONS_NIL] THEN
MESON_TAC[]);;
let LIST_EQ = prove
(`!l1 l2:A list.
l1 = l2 <=>
LENGTH l1 = LENGTH l2 /\ !n. n < LENGTH l2 ==> EL n l1 = EL n l2`,
REPEAT LIST_INDUCT_TAC THEN
REWRITE_TAC[NOT_CONS_NIL; CONS_11; LENGTH; CONJUNCT1 LT; NOT_SUC] THEN
ASM_REWRITE_TAC[SUC_INJ] THEN
GEN_REWRITE_TAC (RAND_CONV o RAND_CONV)
[MESON[num_CASES] `(!n. P n) <=> P 0 /\ (!n. P(SUC n))`] THEN
REWRITE_TAC[EL; HD; TL; LT_0; LT_SUC; CONJ_ACI]);;
let LENGTH_APPEND = prove
(`!(l:A list) m. LENGTH(APPEND l m) = LENGTH l + LENGTH m`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[APPEND; LENGTH; ADD_CLAUSES]);;
let MAP_APPEND = prove
(`!f:A->B. !l1 l2. MAP f (APPEND l1 l2) = APPEND (MAP f l1) (MAP f l2)`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MAP; APPEND]);;
let LENGTH_MAP = prove
(`!l. !f:A->B. LENGTH (MAP f l) = LENGTH l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MAP; LENGTH]);;
let LENGTH_EQ_NIL = prove
(`!l:A list. (LENGTH l = 0) <=> (l = [])`,
LIST_INDUCT_TAC THEN REWRITE_TAC[LENGTH; NOT_CONS_NIL; NOT_SUC]);;
let LENGTH_EQ_CONS = prove
(`!l n. (LENGTH l = SUC n) <=> ?h t. (l = CONS h t) /\ (LENGTH t = n)`,
LIST_INDUCT_TAC THEN REWRITE_TAC[LENGTH; NOT_SUC; NOT_CONS_NIL] THEN
ASM_REWRITE_TAC[SUC_INJ; CONS_11] THEN MESON_TAC[]);;
let LENGTH_REVERSE = prove
(`!l:A list. LENGTH(REVERSE l) = LENGTH l`,
LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[REVERSE; LENGTH_APPEND; LENGTH] THEN
REWRITE_TAC[ADD_CLAUSES; MULT_CLAUSES]);;
let MAP_o = prove
(`!f:A->B. !g:B->C. !l. MAP (g o f) l = MAP g (MAP f l)`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[MAP; o_THM]);;
let MAP_EQ = prove
(`!f g l. ALL (\x. f x = g x) l ==> (MAP f l = MAP g l)`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
REWRITE_TAC[MAP; ALL] THEN ASM_MESON_TAC[]);;
let ALL_IMP = prove
(`!P Q l. (!x. MEM x l /\ P x ==> Q x) /\ ALL P l ==> ALL Q l`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
REWRITE_TAC[MEM; ALL] THEN ASM_MESON_TAC[]);;
let NOT_EX = prove
(`!P l. ~(EX P l) <=> ALL (\x. ~(P x)) l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[EX; ALL; DE_MORGAN_THM]);;
let NOT_ALL = prove
(`!P l. ~(ALL P l) <=> EX (\x. ~(P x)) l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[EX; ALL; DE_MORGAN_THM]);;
let ALL_MAP = prove
(`!P f l. ALL P (MAP f l) <=> ALL (P o f) l`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[ALL; MAP; o_THM]);;
let ALL_EQ = prove
(`!l. ALL R l /\ (!x. R x ==> (P x <=> Q x))
==> (ALL P l <=> ALL Q l)`,
LIST_INDUCT_TAC THEN REWRITE_TAC[ALL] THEN
STRIP_TAC THEN BINOP_TAC THEN FIRST_ASSUM MATCH_MP_TAC THEN
ASM_REWRITE_TAC[]);;
let ALL_T = prove
(`!l. ALL (\x. T) l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL]);;
let MAP_EQ_ALL2 = prove
(`!l m. ALL2 (\x y. f x = f y) l m ==> (MAP f l = MAP f m)`,
REPEAT LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MAP; ALL2; CONS_11] THEN
ASM_MESON_TAC[]);;
let ALL2_MAP = prove
(`!P f l. ALL2 P (MAP f l) l <=> ALL (\a. P (f a) a) l`,
GEN_TAC THEN GEN_TAC THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL2; MAP; ALL]);;
let MAP_EQ_DEGEN = prove
(`!l f. ALL (\x. f(x) = x) l ==> (MAP f l = l)`,
LIST_INDUCT_TAC THEN REWRITE_TAC[ALL; MAP; CONS_11] THEN
REPEAT STRIP_TAC THEN ASM_REWRITE_TAC[] THEN
FIRST_ASSUM MATCH_MP_TAC THEN ASM_REWRITE_TAC[]);;
let ALL2_AND_RIGHT = prove
(`!l m P Q. ALL2 (\x y. P x /\ Q x y) l m <=> ALL P l /\ ALL2 Q l m`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL; ALL2] THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL; ALL2] THEN
REWRITE_TAC[CONJ_ACI]);;
let ITLIST_APPEND = prove
(`!f a l1 l2. ITLIST f (APPEND l1 l2) a = ITLIST f l1 (ITLIST f l2 a)`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[ITLIST; APPEND]);;
let ITLIST_EXTRA = prove
(`!l. ITLIST f (APPEND l [a]) b = ITLIST f l (f a b)`,
REWRITE_TAC[ITLIST_APPEND; ITLIST]);;
let ALL_MP = prove
(`!P Q l. ALL (\x. P x ==> Q x) l /\ ALL P l ==> ALL Q l`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
REWRITE_TAC[ALL] THEN ASM_MESON_TAC[]);;
let AND_ALL = prove
(`!l. ALL P l /\ ALL Q l <=> ALL (\x. P x /\ Q x) l`,
CONV_TAC(ONCE_DEPTH_CONV SYM_CONV) THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL; CONJ_ACI]);;
let EX_IMP = prove
(`!P Q l. (!x. MEM x l /\ P x ==> Q x) /\ EX P l ==> EX Q l`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
REWRITE_TAC[MEM; EX] THEN ASM_MESON_TAC[]);;
let ALL_MEM = prove
(`!P l. (!x. MEM x l ==> P x) <=> ALL P l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN REWRITE_TAC[ALL; MEM] THEN
ASM_MESON_TAC[]);;
let LENGTH_REPLICATE = prove
(`!n x. LENGTH(REPLICATE n x) = n`,
INDUCT_TAC THEN ASM_REWRITE_TAC[LENGTH; REPLICATE]);;
let MEM_REPLICATE = prove
(`!n x y:A. MEM x (REPLICATE n y) <=> x = y /\ ~(n = 0)`,
INDUCT_TAC THEN ASM_REWRITE_TAC[MEM; REPLICATE; NOT_SUC] THEN
MESON_TAC[]);;
let EX_MAP = prove
(`!P f l. EX P (MAP f l) <=> EX (P o f) l`,
GEN_TAC THEN GEN_TAC THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MAP; EX; o_THM]);;
let EXISTS_EX = prove
(`!P l. (?x. EX (P x) l) <=> EX (\s. ?x. P x s) l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[EX] THEN
ASM_MESON_TAC[]);;
let FORALL_ALL = prove
(`!P l. (!x. ALL (P x) l) <=> ALL (\s. !x. P x s) l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL] THEN
ASM_MESON_TAC[]);;
let MEM_APPEND = prove
(`!x l1 l2. MEM x (APPEND l1 l2) <=> MEM x l1 \/ MEM x l2`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MEM; APPEND; DISJ_ACI]);;
let MEM_MAP = prove
(`!f y l. MEM y (MAP f l) <=> ?x. MEM x l /\ (y = f x)`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[MEM; MAP] THEN MESON_TAC[]);;
let FILTER_APPEND = prove
(`!P l1 l2. FILTER P (APPEND l1 l2) = APPEND (FILTER P l1) (FILTER P l2)`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[FILTER; APPEND] THEN
GEN_TAC THEN COND_CASES_TAC THEN ASM_REWRITE_TAC[APPEND]);;
let FILTER_MAP = prove
(`!P f l. FILTER P (MAP f l) = MAP f (FILTER (P o f) l)`,
GEN_TAC THEN GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[MAP; FILTER; o_THM] THEN COND_CASES_TAC THEN
REWRITE_TAC[MAP]);;
let MEM_FILTER = prove
(`!P l x. MEM x (FILTER P l) <=> P x /\ MEM x l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MEM; FILTER] THEN
GEN_TAC THEN COND_CASES_TAC THEN ASM_REWRITE_TAC[MEM] THEN
ASM_MESON_TAC[]);;
let EX_MEM = prove
(`!P l. (?x. P x /\ MEM x l) <=> EX P l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[EX; MEM] THEN
ASM_MESON_TAC[]);;
let MAP_FST_ZIP = prove
(`!l1 l2. (LENGTH l1 = LENGTH l2) ==> (MAP FST (ZIP l1 l2) = l1)`,
LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN
ASM_SIMP_TAC[LENGTH; SUC_INJ; MAP; FST; ZIP; NOT_SUC]);;
let MAP_SND_ZIP = prove
(`!l1 l2. (LENGTH l1 = LENGTH l2) ==> (MAP SND (ZIP l1 l2) = l2)`,
LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN
ASM_SIMP_TAC[LENGTH; SUC_INJ; MAP; FST; ZIP; NOT_SUC]);;
let LENGTH_ZIP = prove
(`!l1 l2. LENGTH l1 = LENGTH l2 ==> LENGTH(ZIP l1 l2) = LENGTH l2`,
REPEAT(LIST_INDUCT_TAC ORELSE GEN_TAC) THEN
ASM_SIMP_TAC[LENGTH; NOT_SUC; ZIP; SUC_INJ]);;
let MEM_ASSOC = prove
(`!l x. MEM (x,ASSOC x l) l <=> MEM x (MAP FST l)`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MEM; MAP; ASSOC] THEN
GEN_TAC THEN COND_CASES_TAC THEN ASM_REWRITE_TAC[] THEN
ASM_MESON_TAC[PAIR; FST]);;
let ALL_APPEND = prove
(`!P l1 l2. ALL P (APPEND l1 l2) <=> ALL P l1 /\ ALL P l2`,
GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[ALL; APPEND; GSYM CONJ_ASSOC]);;
let MEM_EL = prove
(`!l n. n < LENGTH l ==> MEM (EL n l) l`,
LIST_INDUCT_TAC THEN REWRITE_TAC[MEM; CONJUNCT1 LT; LENGTH] THEN
INDUCT_TAC THEN ASM_SIMP_TAC[EL; HD; LT_SUC; TL]);;
let MEM_EXISTS_EL = prove
(`!l x. MEM x l <=> ?i. i < LENGTH l /\ x = EL i l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[LENGTH; EL; MEM; CONJUNCT1 LT] THEN
GEN_TAC THEN GEN_REWRITE_TAC RAND_CONV
[MESON[num_CASES] `(?i. P i) <=> P 0 \/ (?i. P(SUC i))`] THEN
REWRITE_TAC[LT_SUC; LT_0; EL; HD; TL]);;
let ALL_EL = prove
(`!P l. (!i. i < LENGTH l ==> P (EL i l)) <=> ALL P l`,
REWRITE_TAC[GSYM ALL_MEM; MEM_EXISTS_EL] THEN MESON_TAC[]);;
let ALL2_MAP2 = prove
(`!l m. ALL2 P (MAP f l) (MAP g m) = ALL2 (\x y. P (f x) (g y)) l m`,
LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL2; MAP]);;
let AND_ALL2 = prove
(`!P Q l m. ALL2 P l m /\ ALL2 Q l m <=> ALL2 (\x y. P x y /\ Q x y) l m`,
GEN_TAC THEN GEN_TAC THEN CONV_TAC(ONCE_DEPTH_CONV SYM_CONV) THEN
LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL2] THEN
REWRITE_TAC[CONJ_ACI]);;
let ALLPAIRS_SYM = prove
(`!P l m. ALLPAIRS P l m <=> ALLPAIRS (\x y. P y x) m l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN REWRITE_TAC[ALLPAIRS] THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALLPAIRS; ALL] THEN
ASM_MESON_TAC[]);;
let ALLPAIRS_MEM = prove
(`!P l m. (!x y. MEM x l /\ MEM y m ==> P x y) <=> ALLPAIRS P l m`,
GEN_TAC THEN
LIST_INDUCT_TAC THEN REWRITE_TAC[ALLPAIRS; GSYM ALL_MEM; MEM] THEN
ASM_MESON_TAC[]);;
let ALLPAIRS_MAP = prove
(`!P l m. ALLPAIRS P (MAP f l) (MAP g m) <=>
ALLPAIRS (\x y. P (f x) (g y)) l m`,
REWRITE_TAC[GSYM ALLPAIRS_MEM; MEM_MAP] THEN MESON_TAC[]);;
let ALLPAIRS_EQ = prove
(`!l m. !P Q. ALL P (l:A list) /\ ALL Q (m:B list) /\
(!p q. P p /\ Q q ==> (R p q <=> R' p q))
==> (ALLPAIRS R l m <=> ALLPAIRS R' l m)`,
REWRITE_TAC[GSYM ALLPAIRS_MEM; GSYM ALL_MEM] THEN MESON_TAC[]);;
let ALL2_ALL = prove
(`!P l. ALL2 P l l <=> ALL (\x. P x x) l`,
GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[ALL2; ALL]);;
let APPEND_EQ_NIL = prove
(`!l m. (APPEND l m = []) <=> (l = []) /\ (m = [])`,
REWRITE_TAC[GSYM LENGTH_EQ_NIL; LENGTH_APPEND; ADD_EQ_0]);;
let APPEND_LCANCEL = prove
(`!l1 l2 l3:A list. APPEND l1 l2 = APPEND l1 l3 <=> l2 = l3`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[APPEND; CONS_11]);;
let APPEND_RCANCEL = prove
(`!l1 l2 l3:A list. APPEND l1 l3 = APPEND l2 l3 <=> l1 = l2`,
ONCE_REWRITE_TAC[MESON[REVERSE_REVERSE]
`l = l' <=> REVERSE l = REVERSE l'`] THEN
REWRITE_TAC[REVERSE_APPEND; APPEND_LCANCEL]);;
let LENGTH_MAP2 = prove
(`!f l m. LENGTH l = LENGTH m ==> LENGTH(MAP2 f l m) = LENGTH m`,
GEN_TAC THEN LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN
ASM_SIMP_TAC[LENGTH; NOT_CONS_NIL; NOT_SUC; MAP2; SUC_INJ]);;
let EL_MAP2 = prove
(`!f l m k. k < LENGTH l /\ k < LENGTH m
==> EL k (MAP2 f l m) = f (EL k l) (EL k m)`,
GEN_TAC THEN LIST_INDUCT_TAC THEN LIST_INDUCT_TAC THEN
ASM_SIMP_TAC[LENGTH; CONJUNCT1 LT] THEN
INDUCT_TAC THEN ASM_SIMP_TAC[LENGTH; MAP2; EL; HD; TL; LT_SUC]);;
let MAP_EQ_NIL = prove
(`!f l. MAP f l = [] <=> l = []`,
GEN_TAC THEN LIST_INDUCT_TAC THEN REWRITE_TAC[MAP; NOT_CONS_NIL]);;
let INJECTIVE_MAP = prove
(`!f:A->B. (!l m. MAP f l = MAP f m ==> l = m) <=>
(!x y. f x = f y ==> x = y)`,
GEN_TAC THEN EQ_TAC THEN DISCH_TAC THENL
[MAP_EVERY X_GEN_TAC [`x:A`; `y:A`] THEN DISCH_TAC THEN
FIRST_X_ASSUM(MP_TAC o SPECL [`[x:A]`; `[y:A]`]) THEN
ASM_REWRITE_TAC[MAP; CONS_11];
REPEAT LIST_INDUCT_TAC THEN ASM_SIMP_TAC[MAP; NOT_CONS_NIL; CONS_11] THEN
ASM_MESON_TAC[]]);;
let SURJECTIVE_MAP = prove
(`!f:A->B. (!m. ?l. MAP f l = m) <=> (!y. ?x. f x = y)`,
GEN_TAC THEN EQ_TAC THEN DISCH_TAC THENL
[X_GEN_TAC `y:B` THEN FIRST_X_ASSUM(MP_TAC o SPEC `[y:B]`) THEN
REWRITE_TAC[LEFT_IMP_EXISTS_THM] THEN
LIST_INDUCT_TAC THEN REWRITE_TAC[MAP; CONS_11; NOT_CONS_NIL; MAP_EQ_NIL];
MATCH_MP_TAC list_INDUCT] THEN
ASM_MESON_TAC[MAP]);;
let MAP_ID = prove
(`!l. MAP (\x. x) l = l`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[MAP]);;
let MAP_I = prove
(`MAP I = I`,
REWRITE_TAC[FUN_EQ_THM; I_DEF; MAP_ID]);;
let BUTLAST_CLAUSES = prove
(`BUTLAST([]:A list) = [] /\
(!a:A. BUTLAST [a] = []) /\
(!(a:A) h t. BUTLAST(CONS a (CONS h t)) = CONS a (BUTLAST(CONS h t)))`,
REWRITE_TAC[BUTLAST; NOT_CONS_NIL]);;
let BUTLAST_APPEND = prove
(`!l m:A list. BUTLAST(APPEND l m) =
if m = [] then BUTLAST l else APPEND l (BUTLAST m)`,
SIMP_TAC[COND_RAND; APPEND_NIL; MESON[]
`(if p then T else q) <=> ~p ==> q`] THEN
LIST_INDUCT_TAC THEN ASM_SIMP_TAC[APPEND; BUTLAST; APPEND_EQ_NIL]);;
let APPEND_BUTLAST_LAST = prove
(`!l. ~(l = []) ==> APPEND (BUTLAST l) [LAST l] = l`,
LIST_INDUCT_TAC THEN REWRITE_TAC[LAST; BUTLAST; NOT_CONS_NIL] THEN
COND_CASES_TAC THEN ASM_SIMP_TAC[APPEND]);;
let LAST_APPEND = prove
(`!p q. LAST(APPEND p q) = if q = [] then LAST p else LAST q`,
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[APPEND; LAST; APPEND_EQ_NIL] THEN
MESON_TAC[]);;
let LENGTH_TL = prove
(`!l. ~(l = []) ==> LENGTH(TL l) = LENGTH l - 1`,
LIST_INDUCT_TAC THEN REWRITE_TAC[LENGTH; TL; ARITH; SUC_SUB1]);;
let LAST_REVERSE = prove
(`!l:A list. ~(l = []) ==> LAST(REVERSE l) = HD l`,
LIST_INDUCT_TAC THEN
REWRITE_TAC[HD; REVERSE; LAST; LAST_APPEND; NOT_CONS_NIL]);;
let HD_REVERSE = prove
(`!l:A list. ~(l = []) ==> HD(REVERSE l) = LAST l`,
MESON_TAC[LAST_REVERSE; REVERSE_REVERSE; REVERSE_EQ_EMPTY]);;
let EL_APPEND = prove
(`!k l m. EL k (APPEND l m) = if k < LENGTH l then EL k l
else EL (k - LENGTH l) m`,
INDUCT_TAC THEN REWRITE_TAC[EL] THEN
LIST_INDUCT_TAC THEN
REWRITE_TAC[HD; APPEND; LENGTH; SUB_0; EL; LT_0; CONJUNCT1 LT] THEN
ASM_REWRITE_TAC[TL; LT_SUC; SUB_SUC]);;
let EL_TL = prove
(`!n. EL n (TL l) = EL (n + 1) l`,
REWRITE_TAC[GSYM ADD1; EL]);;
let EL_CONS = prove
(`!n h t. EL n (CONS h t) = if n = 0 then h else EL (n - 1) t`,
INDUCT_TAC THEN REWRITE_TAC[EL; HD; TL; NOT_SUC; SUC_SUB1]);;
let LAST_EL = prove
(`!l. ~(l = []) ==> LAST l = EL (LENGTH l - 1) l`,
LIST_INDUCT_TAC THEN REWRITE_TAC[LAST; LENGTH; SUC_SUB1] THEN
DISCH_TAC THEN COND_CASES_TAC THEN
ASM_SIMP_TAC[LENGTH; EL; HD; EL_CONS; LENGTH_EQ_NIL]);;
let HD_APPEND = prove
(`!l m:A list. HD(APPEND l m) = if l = [] then HD m else HD l`,
LIST_INDUCT_TAC THEN REWRITE_TAC[HD; APPEND; NOT_CONS_NIL]);;
let CONS_HD_TL = prove
(`!l. ~(l = []) ==> l = CONS (HD l) (TL l)`,
LIST_INDUCT_TAC THEN REWRITE_TAC[NOT_CONS_NIL;HD;TL]);;
let EL_MAP = prove
(`!f n l. n < LENGTH l ==> EL n (MAP f l) = f(EL n l)`,
GEN_TAC THEN INDUCT_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[LENGTH; CONJUNCT1 LT; LT_0; EL; HD; TL; MAP; LT_SUC]);;
let MAP_REVERSE = prove
(`!f l. REVERSE(MAP f l) = MAP f (REVERSE l)`,
GEN_TAC THEN LIST_INDUCT_TAC THEN
ASM_REWRITE_TAC[MAP; REVERSE; MAP_APPEND]);;
let ALL_FILTER = prove
(`!P Q l:A list. ALL P (FILTER Q l) <=> ALL (\x. Q x ==> P x) l`,
GEN_TAC THEN GEN_TAC THEN
LIST_INDUCT_TAC THEN REWRITE_TAC[ALL; FILTER] THEN
COND_CASES_TAC THEN ASM_REWRITE_TAC[ALL]);;
let APPEND_SING = prove
(`!h t. APPEND [h] t = CONS h t`,
REWRITE_TAC[APPEND]);;
let MEM_APPEND_DECOMPOSE_LEFT = prove
(`!x:A l. MEM x l <=> ?l1 l2. ~(MEM x l1) /\ l = APPEND l1 (CONS x l2)`,
REWRITE_TAC[TAUT `(p <=> q) <=> (p ==> q) /\ (q ==> p)`] THEN
SIMP_TAC[LEFT_IMP_EXISTS_THM; MEM_APPEND; MEM] THEN X_GEN_TAC `x:A` THEN
MATCH_MP_TAC list_INDUCT THEN REWRITE_TAC[MEM] THEN
MAP_EVERY X_GEN_TAC [`y:A`; `l:A list`] THEN
ASM_CASES_TAC `x:A = y` THEN ASM_MESON_TAC[MEM; APPEND]);;
let MEM_APPEND_DECOMPOSE = prove
(`!x:A l. MEM x l <=> ?l1 l2. l = APPEND l1 (CONS x l2)`,
REWRITE_TAC[TAUT `(p <=> q) <=> (p ==> q) /\ (q ==> p)`] THEN
SIMP_TAC[LEFT_IMP_EXISTS_THM; MEM_APPEND; MEM] THEN
ONCE_REWRITE_TAC[MEM_APPEND_DECOMPOSE_LEFT] THEN MESON_TAC[]);;
let PAIRWISE_APPEND = prove
(`!R:A->A->bool l m.
PAIRWISE R (APPEND l m) <=>
PAIRWISE R l /\ PAIRWISE R m /\ (!x y. MEM x l /\ MEM y m ==> R x y)`,
GEN_TAC THEN MATCH_MP_TAC list_INDUCT THEN
REWRITE_TAC[APPEND; PAIRWISE; MEM; ALL_APPEND; GSYM ALL_MEM] THEN
MESON_TAC[]);;
let PAIRWISE_MAP = prove
(`!R f:A->B l.
PAIRWISE R (MAP f l) <=> PAIRWISE (\x y. R (f x) (f y)) l`,
GEN_TAC THEN GEN_TAC THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[PAIRWISE; MAP; ALL_MAP; o_DEF]);;
let PAIRWISE_IMPLIES = prove
(`!R:A->A->bool R' l.
PAIRWISE R l /\ (!x y. MEM x l /\ MEM y l /\ R x y ==> R' x y)
==> PAIRWISE R' l`,
GEN_TAC THEN GEN_TAC THEN MATCH_MP_TAC list_INDUCT THEN
REWRITE_TAC[PAIRWISE; GSYM ALL_MEM; MEM] THEN MESON_TAC[]);;
let PAIRWISE_TRANSITIVE = prove
(`!R x y:A l.
(!x y z. R x y /\ R y z ==> R x z)
==> (PAIRWISE R (CONS x (CONS y l)) <=> R x y /\ PAIRWISE R (CONS y l))`,
REPEAT STRIP_TAC THEN
REWRITE_TAC[PAIRWISE; ALL; GSYM CONJ_ASSOC;
TAUT `(p /\ q /\ r /\ s <=> p /\ r /\ s) <=>
p /\ s ==> r ==> q`] THEN
STRIP_TAC THEN MATCH_MP_TAC(REWRITE_RULE[IMP_CONJ] ALL_IMP) THEN
ASM_MESON_TAC[]);;
let LENGTH_LIST_OF_SEQ = prove
(`!s:num->A n. LENGTH(list_of_seq s n) = n`,
GEN_TAC THEN INDUCT_TAC THEN
ASM_REWRITE_TAC[list_of_seq; LENGTH; LENGTH_APPEND; ADD_CLAUSES]);;
let EL_LIST_OF_SEQ = prove
(`!s:num->A m n. m < n ==> EL m (list_of_seq s n) = s m`,
GEN_TAC THEN ONCE_REWRITE_TAC[SWAP_FORALL_THM] THEN
INDUCT_TAC THEN
REWRITE_TAC[list_of_seq; LT; EL_APPEND; LENGTH_LIST_OF_SEQ] THEN
REPEAT STRIP_TAC THEN ASM_SIMP_TAC[SUB_REFL; EL; HD; LT_REFL]);;
let LIST_OF_SEQ_EQ_NIL = prove
(`!s:num->A n. list_of_seq s n = [] <=> n = 0`,
REWRITE_TAC[GSYM LENGTH_EQ_NIL; LENGTH_LIST_OF_SEQ; LENGTH]);;
let mk_cons h t =
try let cons = mk_const("CONS",[type_of h,aty]) in
mk_comb(mk_comb(cons,h),t)
with Failure _ -> failwith "mk_cons";;
let mk_list (tms,ty) =
try let nil = mk_const("NIL",[ty,aty]) in
if tms = [] then nil else
let cons = mk_const("CONS",[ty,aty]) in
itlist (mk_binop cons) tms nil
with Failure _ -> failwith "mk_list";;
let mk_flist tms =
try mk_list(tms,type_of(hd tms))
with Failure _ -> failwith "mk_flist";;
let MONO_ALL = prove
(`(!x:A. P x ==> Q x) ==> ALL P l ==> ALL Q l`,
DISCH_TAC THEN SPEC_TAC(`l:A list`,`l:A list`) THEN
LIST_INDUCT_TAC THEN ASM_REWRITE_TAC[ALL] THEN ASM_MESON_TAC[]);;
let MONO_ALL2 = prove
(`(!x y. (P:A->B->bool) x y ==> Q x y) ==> ALL2 P l l' ==> ALL2 Q l l'`,
DISCH_TAC THEN
SPEC_TAC(`l':B list`,`l':B list`) THEN SPEC_TAC(`l:A list`,`l:A list`) THEN
LIST_INDUCT_TAC THEN REWRITE_TAC[ALL2_DEF] THEN
GEN_TAC THEN COND_CASES_TAC THEN REWRITE_TAC[] THEN ASM_MESON_TAC[]);;
monotonicity_theorems := [MONO_ALL; MONO_ALL2] @ !monotonicity_theorems;;
let rec LIST_CONV conv tm =
if is_cons tm then
COMB2_CONV (RAND_CONV conv) (LIST_CONV conv) tm
else if fst(dest_const tm) = "NIL" then REFL tm
else failwith "LIST_CONV";;
let char_INDUCT,char_RECURSION = define_type
"char = ASCII bool bool bool bool bool bool bool bool";;
new_type_abbrev("string",`:char list`);;
let dest_char,mk_char,dest_string,mk_string,CHAR_EQ_CONV,STRING_EQ_CONV =
let bool_of_term t =
match t with
Const("T",_) -> true
| Const("F",_) -> false
| _ -> failwith "bool_of_term" in
let code_of_term t =
let f,tms = strip_comb t in
if not(is_const f && fst(dest_const f) = "ASCII")
|| not(length tms = 8) then failwith "code_of_term"
else
itlist (fun b f -> if b then 1 + 2 * f else 2 * f)
(map bool_of_term (rev tms)) 0 in
let char_of_term = Char.chr o code_of_term in
let dest_string tm =
try let tms = dest_list tm in
if fst(dest_type(hd(snd(dest_type(type_of tm))))) <> "char"
then fail() else
let ccs = map (String.make 1 o char_of_term) tms in
String.escaped (implode ccs)
with Failure _ -> failwith "dest_string" in
let mk_bool b =
let true_tm,false_tm = `T`,`F` in
if b then true_tm else false_tm in
let mk_code =
let ascii_tm = `ASCII` in
let mk_code c =
let lis = map (fun i -> mk_bool((c / (1 lsl i)) mod 2 = 1)) (0--7) in
itlist (fun x y -> mk_comb(y,x)) lis ascii_tm in
let codes = Array.map mk_code (Array.of_list (0--255)) in
fun c -> Array.get codes c in
let mk_char = mk_code o Char.code in
let mk_string s =
let ns = map (fun i -> Char.code(String.get s i))
(0--(String.length s - 1)) in
mk_list(map mk_code ns,`:char`) in
let CHAR_DISTINCTNESS =
let avars,bvars,cvars =
[`a0:bool`;`a1:bool`;`a2:bool`;`a3:bool`;`a4:bool`;`a5:bool`;`a6:bool`],
[`b1:bool`;`b2:bool`;`b3:bool`;`b4:bool`;`b5:bool`;`b6:bool`;`b7:bool`],
[`c1:bool`;`c2:bool`;`c3:bool`;`c4:bool`;`c5:bool`;`c6:bool`;`c7:bool`] in
let ASCII_NEQS_FT = (map EQF_INTRO o CONJUNCTS o prove)
(`~(ASCII F b1 b2 b3 b4 b5 b6 b7 = ASCII T c1 c2 c3 c4 c5 c6 c7) /\
~(ASCII a0 F b2 b3 b4 b5 b6 b7 = ASCII a0 T c2 c3 c4 c5 c6 c7) /\
~(ASCII a0 a1 F b3 b4 b5 b6 b7 = ASCII a0 a1 T c3 c4 c5 c6 c7) /\
~(ASCII a0 a1 a2 F b4 b5 b6 b7 = ASCII a0 a1 a2 T c4 c5 c6 c7) /\
~(ASCII a0 a1 a2 a3 F b5 b6 b7 = ASCII a0 a1 a2 a3 T c5 c6 c7) /\
~(ASCII a0 a1 a2 a3 a4 F b6 b7 = ASCII a0 a1 a2 a3 a4 T c6 c7) /\
~(ASCII a0 a1 a2 a3 a4 a5 F b7 = ASCII a0 a1 a2 a3 a4 a5 T c7) /\
~(ASCII a0 a1 a2 a3 a4 a5 a6 F = ASCII a0 a1 a2 a3 a4 a5 a6 T)`,
REWRITE_TAC[injectivity "char"]) in
let ASCII_NEQS_TF =
let ilist = zip bvars cvars @ zip cvars bvars in
let f = EQF_INTRO o INST ilist o GSYM o EQF_ELIM in
map f ASCII_NEQS_FT in
let rec prefix n l =
if n = 0 then [] else
match l with
h::t -> h :: prefix (n-1) t
| _ -> l in
let rec findneq n prefix a b =
match a,b with
b1::a, b2::b -> if b1 <> b2 then n,rev prefix,bool_of_term b2,a,b else
findneq (n+1) (b1 :: prefix) a b
| _, _ -> fail() in
fun c1 c2 ->
let _,a = strip_comb c1
and _,b = strip_comb c2 in
let n,p,b,s1,s2 = findneq 0 [] a b in
let ss1 = funpow n tl bvars
and ss2 = funpow n tl cvars in
let pp = prefix n avars in
let pth = if b then ASCII_NEQS_FT else ASCII_NEQS_TF in
INST (zip p pp @ zip s1 ss1 @ zip s2 ss2) (el n pth) in
let STRING_DISTINCTNESS =
let xtm,xstm = `x:char`,`xs:string`
and ytm,ystm = `y:char`,`ys:string`
and niltm = `[]:string` in
let NIL_EQ_THM = EQT_INTRO (REFL niltm)
and CONS_EQ_THM,CONS_NEQ_THM = (CONJ_PAIR o prove)
(`(CONS x xs:string = CONS x ys <=> xs = ys) /\
((x = y <=> F) ==> (CONS x xs:string = CONS y ys <=> F))`,
REWRITE_TAC[CONS_11] THEN MESON_TAC[])
and NIL_NEQ_CONS,CONS_NEQ_NIL = (CONJ_PAIR o prove)
(`(NIL:string = CONS x xs <=> F) /\
(CONS x xs:string = NIL <=> F)`,
REWRITE_TAC[NOT_CONS_NIL]) in
let rec STRING_DISTINCTNESS s1 s2 =
if s1 = niltm
then if s2 = niltm then NIL_EQ_THM
else let c2,s2 = rand (rator s2),rand s2 in
INST [c2,xtm;s2,xstm] NIL_NEQ_CONS
else let c1,s1 = rand (rator s1),rand s1 in
if s2 = niltm then INST [c1,xtm;s1,xstm] CONS_NEQ_NIL
else let c2,s2 = rand (rator s2),rand s2 in
if c1 = c2
then let th1 = INST [c1,xtm; s1,xstm; s2,ystm] CONS_EQ_THM
and th2 = STRING_DISTINCTNESS s1 s2 in
TRANS th1 th2
else let ilist = [c1,xtm; c2,ytm; s1,xstm; s2,ystm] in
let itm = INST ilist CONS_NEQ_THM in
MP itm (CHAR_DISTINCTNESS c1 c2) in
STRING_DISTINCTNESS in
let CHAR_EQ_CONV : conv =
fun tm ->
let c1,c2 = dest_eq tm in
if compare c1 c2 = 0 then EQT_INTRO (REFL c1) else
CHAR_DISTINCTNESS c1 c2
and STRING_EQ_CONV tm =
let ltm,rtm = dest_eq tm in
if compare ltm rtm = 0 then EQT_INTRO (REFL ltm) else
STRING_DISTINCTNESS ltm rtm in
char_of_term,mk_char,dest_string,mk_string,CHAR_EQ_CONV,STRING_EQ_CONV;;
|
36c1488ebceabe382dee189107f7730544b57cecaf31664ee9821534265b3d3b | rhaberkorn/ermacs | edit_display.erl | -module(edit_display).
-include("edit.hrl").
-compile(export_all).
%%-export([Function/Arity, ...]).
draw_window(Window) when Window#window.minibuffer == true,
Window#window.status_text /= undefined ->
?EDIT_TERMINAL:move_to(0, Window#window.y),
draw_line(Window#window.status_text),
Window#window{status_text=undefined};
draw_window(Window) ->
try_update(Window).
try_update(Window) ->
Buf = Window#window.buffer,
PointMax = edit_buf:point_max(Buf),
DStart = edit_buf:mark_pos(Buf, Window#window.start_mark),
Scan = edit_lib:beginning_of_line_pos(Buf, DStart),
Point = edit_buf:mark_pos(Buf, point),
FIXME
Text = edit_buf:get_region(Buf, Scan, min(PointMax, Scan + Chars)),
?EDIT_TERMINAL:move_to(0, Window#window.y),
Rows = edit_window:text_lines(Window),
Prefix = Window#window.prefix,
PLen = length(Prefix),
PAcc = lists:reverse(Prefix),
case try_update_loop(Text,Rows,Scan,PLen,0,Point,undefined,PAcc) of
{X, Y} ->
%% draw mode line
draw_modeline(Window),
TrimX = min(X, Window#window.width - 1),
?EDIT_TERMINAL:move_to(TrimX, Y + Window#window.y),
Window;
undefined ->
%% The point wasn't inside the area we drew, so we
%% recenter the display with the point in the middle and
%% then draw again.
try_update(recenter_window(Window))
end.
%% Returns the location of the point in a tuple {X, Y}, or undefined
%% if it wasn't in the area drawn.
try_update_loop(Text, NRows, Scan, Col, Row, Point, PointXY, Acc)
when Scan == Point,
PointXY == undefined ->
try_update_loop(Text,NRows,Scan,Col,Row,Point,{Col, Row},Acc);
try_update_loop([$\n|T], NRows, Scan, Col, Row, Point, PointXY, Acc) ->
draw_line(lists:reverse(Acc)),
?EDIT_TERMINAL:newline(),
NextRow = Row+1,
if NextRow == NRows ->
PointXY;
true ->
try_update_loop(T,NRows,Scan+1,0,Row+1,Point,PointXY, [])
end;
try_update_loop([$\t|T], NRows, Scan, Col, Row, Point, PointXY, Acc) ->
Size = 8 - (Col rem 8),
Tab = lists:duplicate(Size, $ ),
try_update_loop(T,NRows,Scan+1,Col+Size,Row,Point,PointXY,Tab++Acc);
try_update_loop([H|T], NRows, Scan, Col, Row, Point, PointXY, Acc) ->
try_update_loop(T,NRows,Scan+1,Col+1,Row,Point,PointXY,[H|Acc]);
try_update_loop([], NRows, Scan, Col, Row, Point, PointXY, Acc) ->
draw_line(lists:reverse(Acc)),
RemainingRows = NRows - Row,
%% draw empty lines until the end
dotimes(fun() -> draw_line([]),
?EDIT_TERMINAL:newline()
end,
RemainingRows),
PointXY.
draw_line(L) ->
Wth = ?EDIT_TERMINAL:width(),
Str = trunc_line(L, Wth),
?EDIT_TERMINAL:put_string(L),
?EDIT_TERMINAL:erase_to_eol().
trunc_line([H], 1) -> [H];
trunc_line(_, 1) -> [$$];
trunc_line([H|T], N) -> [H|trunc_line(T, N-1)];
trunc_line([], _) -> [].
draw_modeline(Window) when Window#window.minibuffer == true ->
ok;
draw_modeline(Window) ->
Buffer = Window#window.buffer,
Where = modeline_where(Window, Buffer),
Text = lists:flatten(
io_lib:format("--:?? ~s (~s) ~s",
[atom_to_list(Buffer),
(edit_buf:get_mode(Buffer))#mode.name,
Where])),
?EDIT_TERMINAL:font_reverse(),
?EDIT_TERMINAL:move_to(0, Window#window.y +
edit_window:physical_lines(Window) - 1),
draw_line(Text),
?EDIT_TERMINAL:font_normal().
modeline_where(Window, Buffer) ->
case edit_buf:get_size(Buffer) of
0 ->
"ALL";
BSize ->
Start = edit_buf:mark_pos(Buffer, Window#window.start_mark),
Percentage = trunc(Start * 100 / BSize),
io_lib:format("~p%", [Percentage])
end.
%% Update the display_start of a window so that it presents the point
%% in the middle of the screen.
recenter_window(Window) ->
Buf = Window#window.buffer,
Height = edit_window:text_lines(Window),
Pos = backward_lines(Buf, trunc(Height / 2)),
edit_buf:move_mark(Buf, Window#window.start_mark, Pos),
Window.
backward_lines(Buf, N) ->
StartPos = edit_lib:beginning_of_line_pos(Buf),
edit_buf:walk_backward(Buf,
fun(X) -> back_lines(X, N, StartPos) end,
StartPos).
back_lines(finish, N, Pos) ->
{result, 1};
back_lines($\n, N, Pos) ->
if
N == 1 ->
{result, Pos};
true ->
{more, fun(New) -> back_lines(New, N-1, Pos-1) end}
end;
back_lines(_, N, Pos) ->
{more, fun(New) -> back_lines(New, N, Pos-1) end}.
dotimes(Fun, 0) ->
true;
dotimes(Fun, N) when is_integer(N), N > 0 ->
Fun(),
dotimes(Fun, N-1).
| null | https://raw.githubusercontent.com/rhaberkorn/ermacs/35c8f9b83ae85e25c646882be6ea6d340a88b05b/src/edit_display.erl | erlang | -export([Function/Arity, ...]).
draw mode line
The point wasn't inside the area we drew, so we
recenter the display with the point in the middle and
then draw again.
Returns the location of the point in a tuple {X, Y}, or undefined
if it wasn't in the area drawn.
draw empty lines until the end
Update the display_start of a window so that it presents the point
in the middle of the screen. | -module(edit_display).
-include("edit.hrl").
-compile(export_all).
draw_window(Window) when Window#window.minibuffer == true,
Window#window.status_text /= undefined ->
?EDIT_TERMINAL:move_to(0, Window#window.y),
draw_line(Window#window.status_text),
Window#window{status_text=undefined};
draw_window(Window) ->
try_update(Window).
try_update(Window) ->
Buf = Window#window.buffer,
PointMax = edit_buf:point_max(Buf),
DStart = edit_buf:mark_pos(Buf, Window#window.start_mark),
Scan = edit_lib:beginning_of_line_pos(Buf, DStart),
Point = edit_buf:mark_pos(Buf, point),
FIXME
Text = edit_buf:get_region(Buf, Scan, min(PointMax, Scan + Chars)),
?EDIT_TERMINAL:move_to(0, Window#window.y),
Rows = edit_window:text_lines(Window),
Prefix = Window#window.prefix,
PLen = length(Prefix),
PAcc = lists:reverse(Prefix),
case try_update_loop(Text,Rows,Scan,PLen,0,Point,undefined,PAcc) of
{X, Y} ->
draw_modeline(Window),
TrimX = min(X, Window#window.width - 1),
?EDIT_TERMINAL:move_to(TrimX, Y + Window#window.y),
Window;
undefined ->
try_update(recenter_window(Window))
end.
try_update_loop(Text, NRows, Scan, Col, Row, Point, PointXY, Acc)
when Scan == Point,
PointXY == undefined ->
try_update_loop(Text,NRows,Scan,Col,Row,Point,{Col, Row},Acc);
try_update_loop([$\n|T], NRows, Scan, Col, Row, Point, PointXY, Acc) ->
draw_line(lists:reverse(Acc)),
?EDIT_TERMINAL:newline(),
NextRow = Row+1,
if NextRow == NRows ->
PointXY;
true ->
try_update_loop(T,NRows,Scan+1,0,Row+1,Point,PointXY, [])
end;
try_update_loop([$\t|T], NRows, Scan, Col, Row, Point, PointXY, Acc) ->
Size = 8 - (Col rem 8),
Tab = lists:duplicate(Size, $ ),
try_update_loop(T,NRows,Scan+1,Col+Size,Row,Point,PointXY,Tab++Acc);
try_update_loop([H|T], NRows, Scan, Col, Row, Point, PointXY, Acc) ->
try_update_loop(T,NRows,Scan+1,Col+1,Row,Point,PointXY,[H|Acc]);
try_update_loop([], NRows, Scan, Col, Row, Point, PointXY, Acc) ->
draw_line(lists:reverse(Acc)),
RemainingRows = NRows - Row,
dotimes(fun() -> draw_line([]),
?EDIT_TERMINAL:newline()
end,
RemainingRows),
PointXY.
draw_line(L) ->
Wth = ?EDIT_TERMINAL:width(),
Str = trunc_line(L, Wth),
?EDIT_TERMINAL:put_string(L),
?EDIT_TERMINAL:erase_to_eol().
trunc_line([H], 1) -> [H];
trunc_line(_, 1) -> [$$];
trunc_line([H|T], N) -> [H|trunc_line(T, N-1)];
trunc_line([], _) -> [].
draw_modeline(Window) when Window#window.minibuffer == true ->
ok;
draw_modeline(Window) ->
Buffer = Window#window.buffer,
Where = modeline_where(Window, Buffer),
Text = lists:flatten(
io_lib:format("--:?? ~s (~s) ~s",
[atom_to_list(Buffer),
(edit_buf:get_mode(Buffer))#mode.name,
Where])),
?EDIT_TERMINAL:font_reverse(),
?EDIT_TERMINAL:move_to(0, Window#window.y +
edit_window:physical_lines(Window) - 1),
draw_line(Text),
?EDIT_TERMINAL:font_normal().
modeline_where(Window, Buffer) ->
case edit_buf:get_size(Buffer) of
0 ->
"ALL";
BSize ->
Start = edit_buf:mark_pos(Buffer, Window#window.start_mark),
Percentage = trunc(Start * 100 / BSize),
io_lib:format("~p%", [Percentage])
end.
recenter_window(Window) ->
Buf = Window#window.buffer,
Height = edit_window:text_lines(Window),
Pos = backward_lines(Buf, trunc(Height / 2)),
edit_buf:move_mark(Buf, Window#window.start_mark, Pos),
Window.
backward_lines(Buf, N) ->
StartPos = edit_lib:beginning_of_line_pos(Buf),
edit_buf:walk_backward(Buf,
fun(X) -> back_lines(X, N, StartPos) end,
StartPos).
back_lines(finish, N, Pos) ->
{result, 1};
back_lines($\n, N, Pos) ->
if
N == 1 ->
{result, Pos};
true ->
{more, fun(New) -> back_lines(New, N-1, Pos-1) end}
end;
back_lines(_, N, Pos) ->
{more, fun(New) -> back_lines(New, N, Pos-1) end}.
dotimes(Fun, 0) ->
true;
dotimes(Fun, N) when is_integer(N), N > 0 ->
Fun(),
dotimes(Fun, N-1).
|
bd2623f86ee8c1cd1b9488b996976c7fcd0151678c9fff725a0af2c14f203a3b | JHU-PL-Lab/jaylang | dotprod.ml |
let make_array n = n
let arraysize src = src
let update des i x = assert (0 <= i && i < des)
let sub src i = assert (0 <= i && i < src); 0
let rec dotprod_aux n v1 v2 i sum =
if i = n
then sum
else dotprod_aux n v1 v2 (i+1) (sum + (sub v1 i) * (sub v2 i))
let dotprod v1 v2 = dotprod_aux (arraysize v1) v1 v2 0 0
let main n m =
let v1 = make_array n in
let v2 = make_array m in
if 0<=n && n=m then (dotprod v1 v2; ()) else ()
| null | https://raw.githubusercontent.com/JHU-PL-Lab/jaylang/484b3876986a515fb57b11768a1b3b50418cde0c/benchmark/cases/mochi_origin/mochi/dotprod.ml | ocaml |
let make_array n = n
let arraysize src = src
let update des i x = assert (0 <= i && i < des)
let sub src i = assert (0 <= i && i < src); 0
let rec dotprod_aux n v1 v2 i sum =
if i = n
then sum
else dotprod_aux n v1 v2 (i+1) (sum + (sub v1 i) * (sub v2 i))
let dotprod v1 v2 = dotprod_aux (arraysize v1) v1 v2 0 0
let main n m =
let v1 = make_array n in
let v2 = make_array m in
if 0<=n && n=m then (dotprod v1 v2; ()) else ()
| |
077461d756bbec357280ad34c07a99756a688e517e8c1321c864c84727c21b4d | onedata/op-worker | gs_share_logic_test_SUITE.erl | %%%--------------------------------------------------------------------
@author
( C ) 2017 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%--------------------------------------------------------------------
%%% @doc
%%% This module tests share logic API using mocked gs_client module.
%%% @end
%%%--------------------------------------------------------------------
-module(gs_share_logic_test_SUITE).
-author("Lukasz Opiola").
-include("logic_tests_common.hrl").
%% export for ct
-export([all/0, init_per_suite/1, init_per_testcase/2, end_per_testcase/2, end_per_suite/1]).
-export([
get_test/1,
get_public_data_test/1,
mixed_get_test/1,
subscribe_test/1,
create_update_delete_test/1,
confined_access_token_test/1
]).
all() -> ?ALL([
get_test,
get_public_data_test,
mixed_get_test,
subscribe_test,
create_update_delete_test,
confined_access_token_test
]).
%%%===================================================================
%%% Test functions
%%%===================================================================
get_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
User1Sess = logic_tests_common:get_user_session(Config, ?USER_1),
% User 3 does not belong to the share
User3Sess = logic_tests_common:get_user_session(Config, ?USER_3),
Cache space 1 and provider 1 private data , as it is required to verify
% access to share in cache
rpc:call(Node, space_logic, get, [User1Sess, ?SPACE_1]),
rpc:call(Node, provider_logic, get, [?ROOT_SESS_ID, ?PROVIDER_1]),
ShareGriMatcher = #gri{type = od_share, id = ?SHARE_1, aspect = instance, _ = '_'},
GraphCalls = logic_tests_common:count_reqs(Config, graph, ShareGriMatcher),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
% Share private data should now be cached
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
% Make sure that provider can access cached share data
% Provider must be aware of its ID to check access to cached share - this is
% mocked in init_per_testcase.
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [?ROOT_SESS_ID, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
% Make sure that provider can access non-cached share data
logic_tests_common:invalidate_cache(Config, od_share, ?SHARE_1),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [?ROOT_SESS_ID, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
% Make sure that other users cannot access cached data
?assertMatch(
?ERROR_FORBIDDEN,
rpc:call(Node, share_logic, get, [User3Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
ok.
get_public_data_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
User1Sess = logic_tests_common:get_user_session(Config, ?USER_1),
% User 3 does not belong to the share
User3Sess = logic_tests_common:get_user_session(Config, ?USER_3),
Cache space 1 private data , as it is required to verify access to
% share in cache
rpc:call(Node, space_logic, get, [User1Sess, ?SPACE_1]),
ShareGriMatcher = #gri{type = od_share, id = ?SHARE_1, aspect = instance, _ = '_'},
GraphCalls = logic_tests_common:count_reqs(Config, graph, ShareGriMatcher),
% All users and providers should be able to fetch public share data
% when it is cached
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User3Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [?ROOT_SESS_ID, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
% All users and providers should be able to fetch public share data
% when is is NOT cached
logic_tests_common:invalidate_cache(Config, od_share, ?SHARE_1),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User3Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
logic_tests_common:invalidate_cache(Config, od_share, ?SHARE_1),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [?ROOT_SESS_ID, ?SHARE_1])
),
?assertEqual(GraphCalls + 3, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
ok.
mixed_get_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
User1Sess = logic_tests_common:get_user_session(Config, ?USER_1),
Cache space 1 private data , as it is required to verify access to
% share in cache
rpc:call(Node, space_logic, get, [User1Sess, ?SPACE_1]),
ShareGriMatcher = #gri{type = od_share, id = ?SHARE_1, aspect = instance, _ = '_'},
GraphCalls = logic_tests_common:count_reqs(Config, graph, ShareGriMatcher),
UnsubCalls = logic_tests_common:count_reqs(Config, unsub, ShareGriMatcher),
% Fetching rising scopes should cause an unsub and new fetch every time
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertEqual(UnsubCalls, logic_tests_common:count_reqs(Config, unsub, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertEqual(UnsubCalls + 1, logic_tests_common:count_reqs(Config, unsub, ShareGriMatcher)),
% When private data is cached, any scope should always be fetched from cache
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertEqual(UnsubCalls + 1, logic_tests_common:count_reqs(Config, unsub, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertEqual(UnsubCalls + 1, logic_tests_common:count_reqs(Config, unsub, ShareGriMatcher)),
ok.
subscribe_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
User1Sess = logic_tests_common:get_user_session(Config, ?USER_1),
Cache space 1 private data , as it is required to verify access to
% share in cache
rpc:call(Node, space_logic, get, [User1Sess, ?SPACE_1]),
ShareGriMatcher = #gri{type = od_share, id = ?SHARE_1, aspect = instance, _ = '_'},
GraphCalls = logic_tests_common:count_reqs(Config, graph, ShareGriMatcher),
Simulate received updates on different scopes ( in rising order )
Share1PublicGRI = #gri{type = od_share, id = ?SHARE_1, aspect = instance, scope = public},
Share1PublicData = ?SHARE_PUBLIC_DATA_VALUE(?SHARE_1),
Share1PrivateGRI = #gri{type = od_share, id = ?SHARE_1, aspect = instance, scope = private},
Share1PrivateData = ?SHARE_PRIVATE_DATA_VALUE(?SHARE_1),
% public scope
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
ChangedData1 = Share1PublicData#{
<<"revision">> => 2,
<<"name">> => <<"changedName">>
},
PushMessage1 = #gs_push_graph{gri = Share1PublicGRI, data = ChangedData1, change_type = updated},
logic_tests_common:simulate_push(Config, PushMessage1),
?assertMatch(
{ok, #document{key = ?SHARE_1, value = #od_share{
name = <<"changedName">>,
cache_state = #{revision := 2}
}}},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
% private scope
logic_tests_common:invalidate_cache(Config, od_share, ?SHARE_1),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
ChangedData2 = Share1PrivateData#{
<<"revision">> => 3,
<<"name">> => <<"changedName2">>
},
PushMessage2 = #gs_push_graph{gri = Share1PrivateGRI, data = ChangedData2, change_type = updated},
logic_tests_common:simulate_push(Config, PushMessage2),
?assertMatch(
{ok, #document{key = ?SHARE_1, value = #od_share{
name = <<"changedName2">>,
cache_state = #{revision := 3}
}}},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
Simulate a ' deleted ' push and see if cache was invalidated
PushMessage4 = #gs_push_graph{gri = Share1PrivateGRI, change_type = deleted},
logic_tests_common:simulate_push(Config, PushMessage4),
?assertMatch(
{error, not_found},
rpc:call(Node, od_share, get_from_cache, [?SHARE_1])
),
Simulate a ' nosub ' push and see if cache was invalidated , fetch the
record first .
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
PushMessage5 = #gs_push_nosub{gri = Share1PrivateGRI, reason = forbidden},
logic_tests_common:simulate_push(Config, PushMessage5),
?assertMatch(
{error, not_found},
rpc:call(Node, od_share, get_from_cache, [?SHARE_1])
),
ok.
create_update_delete_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
User1Sess = logic_tests_common:get_user_session(Config, ?USER_1),
ShareGriMatcher = #gri{type = od_share, aspect = instance, _ = '_'},
GraphCalls = logic_tests_common:count_reqs(Config, graph, ShareGriMatcher),
% Create
?assertMatch(
{ok, ?MOCK_CREATED_SHARE_ID},
rpc:call(Node, share_logic, create, [
User1Sess,
?MOCK_CREATED_SHARE_ID,
?SHARE_NAME(<<"newShare">>),
?SHARE_DESCRIPTION(<<"newShare">>),
?SHARE_SPACE(<<"newShare">>),
?SHARE_ROOT_FILE(<<"newShare">>),
dir
])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_BAD_VALUE_ID_NOT_FOUND(<<"spaceId">>),
rpc:call(Node, share_logic, create, [
User1Sess,
?MOCK_CREATED_SHARE_ID,
?SHARE_NAME(<<"newShare">>),
?SHARE_DESCRIPTION(<<"newShare">>),
<<"badSpaceId">>,
?SHARE_ROOT_FILE(<<"newShare">>),
dir
])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
% Update
?assertMatch(
ok,
rpc:call(Node, share_logic, update, [User1Sess, ?SHARE_1, #{
<<"name">> => <<"newName">>,
<<"description">> => <<"New share description">>
}])
),
two requests should be done - one for update and one for force fetch
?assertEqual(GraphCalls + 4, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_BAD_VALUE_BINARY(<<"name">>),
rpc:call(Node, share_logic, update, [User1Sess, ?SHARE_1, #{<<"name">> => 1234}])
),
?assertEqual(GraphCalls + 5, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_BAD_VALUE_BINARY(<<"description">>),
rpc:call(Node, share_logic, update, [User1Sess, ?SHARE_1, #{<<"description">> => 87.9}])
),
?assertEqual(GraphCalls + 6, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_MISSING_AT_LEAST_ONE_VALUE([<<"description">>, <<"name">>]),
rpc:call(Node, share_logic, update, [User1Sess, ?SHARE_1, #{}])
),
?assertEqual(GraphCalls + 7, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
% Delete
?assertMatch(
ok,
rpc:call(Node, share_logic, delete, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 8, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_NOT_FOUND,
rpc:call(Node, share_logic, delete, [User1Sess, <<"wrongId">>])
),
?assertEqual(GraphCalls + 9, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
ok.
confined_access_token_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
Caveat = #cv_interface{interface = oneclient},
AccessToken = initializer:create_access_token(?USER_1, [Caveat]),
TokenCredentials = auth_manager:build_token_credentials(
AccessToken, undefined,
initializer:local_ip_v4(), rest, allow_data_access_caveats
),
UserGriMatcher = #gri{type = od_user, id = ?USER_1, aspect = instance, _ = '_'},
OdTokenGriMatcher = #gri{type = od_token, aspect = verify_access_token, scope = public},
TokenSecretGriMatcher = #gri{type = temporary_token_secret, id = ?USER_1, aspect = user, scope = shared},
GraphCalls = logic_tests_common:count_reqs(Config, graph, UserGriMatcher),
OdTokenGraphCalls = logic_tests_common:count_reqs(Config, graph, OdTokenGriMatcher),
TokenSecretGraphCalls = logic_tests_common:count_reqs(Config, graph, TokenSecretGriMatcher),
Request should be denied before contacting Onezone because of the
% oneclient interface caveat
?assertMatch(
?ERROR_UNAUTHORIZED(?ERROR_TOKEN_CAVEAT_UNVERIFIED(Caveat)),
rpc:call(Node, share_logic, delete, [TokenCredentials, ?SHARE_1])
),
% Nevertheless, following requests should be made:
% - first to verify token credentials,
% - second to subscribe for token revocation notifications in oz,
% - third to fetch user data to initialize userRootDir, etc.
?assertEqual(OdTokenGraphCalls + 1, logic_tests_common:count_reqs(Config, graph, OdTokenGriMatcher)),
?assertEqual(TokenSecretGraphCalls + 1, logic_tests_common:count_reqs(Config, graph, TokenSecretGriMatcher)),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, UserGriMatcher)).
%%%===================================================================
SetUp and TearDown functions
%%%===================================================================
init_per_suite(Config) ->
Posthook = fun(NewConfig) ->
logic_tests_common:mock_gs_client(NewConfig),
NewConfig
end,
[{?ENV_UP_POSTHOOK, Posthook}, {?LOAD_MODULES, [logic_tests_common, initializer]} | Config].
init_per_testcase(get_test, Config) ->
Nodes = ?config(op_worker_nodes, Config),
% Access to cached shares depends on checking if provider supports given space
ok = test_utils:mock_expect(Nodes, provider_logic, supports_space,
fun(?ROOT_SESS_ID, ?DUMMY_PROVIDER_ID, Space) ->
Space == ?SPACE_1 orelse Space == ?SPACE_2
end),
init_per_testcase(default, Config);
init_per_testcase(_, Config) ->
logic_tests_common:init_per_testcase(Config).
end_per_testcase(_, _Config) ->
ok.
end_per_suite(Config) ->
logic_tests_common:unmock_gs_client(Config),
ok.
%%%===================================================================
Internal functions
%%%=================================================================== | null | https://raw.githubusercontent.com/onedata/op-worker/7b0a47224e596c091169dd69aae69244abbc73b6/test_distributed/suites/graph_sync/gs_share_logic_test_SUITE.erl | erlang | --------------------------------------------------------------------
@end
--------------------------------------------------------------------
@doc
This module tests share logic API using mocked gs_client module.
@end
--------------------------------------------------------------------
export for ct
===================================================================
Test functions
===================================================================
User 3 does not belong to the share
access to share in cache
Share private data should now be cached
Make sure that provider can access cached share data
Provider must be aware of its ID to check access to cached share - this is
mocked in init_per_testcase.
Make sure that provider can access non-cached share data
Make sure that other users cannot access cached data
User 3 does not belong to the share
share in cache
All users and providers should be able to fetch public share data
when it is cached
All users and providers should be able to fetch public share data
when is is NOT cached
share in cache
Fetching rising scopes should cause an unsub and new fetch every time
When private data is cached, any scope should always be fetched from cache
share in cache
public scope
private scope
Create
Update
Delete
oneclient interface caveat
Nevertheless, following requests should be made:
- first to verify token credentials,
- second to subscribe for token revocation notifications in oz,
- third to fetch user data to initialize userRootDir, etc.
===================================================================
===================================================================
Access to cached shares depends on checking if provider supports given space
===================================================================
=================================================================== | @author
( C ) 2017 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(gs_share_logic_test_SUITE).
-author("Lukasz Opiola").
-include("logic_tests_common.hrl").
-export([all/0, init_per_suite/1, init_per_testcase/2, end_per_testcase/2, end_per_suite/1]).
-export([
get_test/1,
get_public_data_test/1,
mixed_get_test/1,
subscribe_test/1,
create_update_delete_test/1,
confined_access_token_test/1
]).
all() -> ?ALL([
get_test,
get_public_data_test,
mixed_get_test,
subscribe_test,
create_update_delete_test,
confined_access_token_test
]).
get_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
User1Sess = logic_tests_common:get_user_session(Config, ?USER_1),
User3Sess = logic_tests_common:get_user_session(Config, ?USER_3),
Cache space 1 and provider 1 private data , as it is required to verify
rpc:call(Node, space_logic, get, [User1Sess, ?SPACE_1]),
rpc:call(Node, provider_logic, get, [?ROOT_SESS_ID, ?PROVIDER_1]),
ShareGriMatcher = #gri{type = od_share, id = ?SHARE_1, aspect = instance, _ = '_'},
GraphCalls = logic_tests_common:count_reqs(Config, graph, ShareGriMatcher),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [?ROOT_SESS_ID, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
logic_tests_common:invalidate_cache(Config, od_share, ?SHARE_1),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [?ROOT_SESS_ID, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_FORBIDDEN,
rpc:call(Node, share_logic, get, [User3Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
ok.
get_public_data_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
User1Sess = logic_tests_common:get_user_session(Config, ?USER_1),
User3Sess = logic_tests_common:get_user_session(Config, ?USER_3),
Cache space 1 private data , as it is required to verify access to
rpc:call(Node, space_logic, get, [User1Sess, ?SPACE_1]),
ShareGriMatcher = #gri{type = od_share, id = ?SHARE_1, aspect = instance, _ = '_'},
GraphCalls = logic_tests_common:count_reqs(Config, graph, ShareGriMatcher),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User3Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [?ROOT_SESS_ID, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
logic_tests_common:invalidate_cache(Config, od_share, ?SHARE_1),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User3Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
logic_tests_common:invalidate_cache(Config, od_share, ?SHARE_1),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [?ROOT_SESS_ID, ?SHARE_1])
),
?assertEqual(GraphCalls + 3, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
ok.
mixed_get_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
User1Sess = logic_tests_common:get_user_session(Config, ?USER_1),
Cache space 1 private data , as it is required to verify access to
rpc:call(Node, space_logic, get, [User1Sess, ?SPACE_1]),
ShareGriMatcher = #gri{type = od_share, id = ?SHARE_1, aspect = instance, _ = '_'},
GraphCalls = logic_tests_common:count_reqs(Config, graph, ShareGriMatcher),
UnsubCalls = logic_tests_common:count_reqs(Config, unsub, ShareGriMatcher),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertEqual(UnsubCalls, logic_tests_common:count_reqs(Config, unsub, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertEqual(UnsubCalls + 1, logic_tests_common:count_reqs(Config, unsub, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertEqual(UnsubCalls + 1, logic_tests_common:count_reqs(Config, unsub, ShareGriMatcher)),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertEqual(UnsubCalls + 1, logic_tests_common:count_reqs(Config, unsub, ShareGriMatcher)),
ok.
subscribe_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
User1Sess = logic_tests_common:get_user_session(Config, ?USER_1),
Cache space 1 private data , as it is required to verify access to
rpc:call(Node, space_logic, get, [User1Sess, ?SPACE_1]),
ShareGriMatcher = #gri{type = od_share, id = ?SHARE_1, aspect = instance, _ = '_'},
GraphCalls = logic_tests_common:count_reqs(Config, graph, ShareGriMatcher),
Simulate received updates on different scopes ( in rising order )
Share1PublicGRI = #gri{type = od_share, id = ?SHARE_1, aspect = instance, scope = public},
Share1PublicData = ?SHARE_PUBLIC_DATA_VALUE(?SHARE_1),
Share1PrivateGRI = #gri{type = od_share, id = ?SHARE_1, aspect = instance, scope = private},
Share1PrivateData = ?SHARE_PRIVATE_DATA_VALUE(?SHARE_1),
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
ChangedData1 = Share1PublicData#{
<<"revision">> => 2,
<<"name">> => <<"changedName">>
},
PushMessage1 = #gs_push_graph{gri = Share1PublicGRI, data = ChangedData1, change_type = updated},
logic_tests_common:simulate_push(Config, PushMessage1),
?assertMatch(
{ok, #document{key = ?SHARE_1, value = #od_share{
name = <<"changedName">>,
cache_state = #{revision := 2}
}}},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
logic_tests_common:invalidate_cache(Config, od_share, ?SHARE_1),
?assertMatch(
{ok, ?SHARE_PRIVATE_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
ChangedData2 = Share1PrivateData#{
<<"revision">> => 3,
<<"name">> => <<"changedName2">>
},
PushMessage2 = #gs_push_graph{gri = Share1PrivateGRI, data = ChangedData2, change_type = updated},
logic_tests_common:simulate_push(Config, PushMessage2),
?assertMatch(
{ok, #document{key = ?SHARE_1, value = #od_share{
name = <<"changedName2">>,
cache_state = #{revision := 3}
}}},
rpc:call(Node, share_logic, get, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
Simulate a ' deleted ' push and see if cache was invalidated
PushMessage4 = #gs_push_graph{gri = Share1PrivateGRI, change_type = deleted},
logic_tests_common:simulate_push(Config, PushMessage4),
?assertMatch(
{error, not_found},
rpc:call(Node, od_share, get_from_cache, [?SHARE_1])
),
Simulate a ' nosub ' push and see if cache was invalidated , fetch the
record first .
?assertMatch(
{ok, ?SHARE_PUBLIC_DATA_MATCHER(?SHARE_1)},
rpc:call(Node, share_logic, get_public_data, [User1Sess, ?SHARE_1])
),
PushMessage5 = #gs_push_nosub{gri = Share1PrivateGRI, reason = forbidden},
logic_tests_common:simulate_push(Config, PushMessage5),
?assertMatch(
{error, not_found},
rpc:call(Node, od_share, get_from_cache, [?SHARE_1])
),
ok.
create_update_delete_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
User1Sess = logic_tests_common:get_user_session(Config, ?USER_1),
ShareGriMatcher = #gri{type = od_share, aspect = instance, _ = '_'},
GraphCalls = logic_tests_common:count_reqs(Config, graph, ShareGriMatcher),
?assertMatch(
{ok, ?MOCK_CREATED_SHARE_ID},
rpc:call(Node, share_logic, create, [
User1Sess,
?MOCK_CREATED_SHARE_ID,
?SHARE_NAME(<<"newShare">>),
?SHARE_DESCRIPTION(<<"newShare">>),
?SHARE_SPACE(<<"newShare">>),
?SHARE_ROOT_FILE(<<"newShare">>),
dir
])
),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_BAD_VALUE_ID_NOT_FOUND(<<"spaceId">>),
rpc:call(Node, share_logic, create, [
User1Sess,
?MOCK_CREATED_SHARE_ID,
?SHARE_NAME(<<"newShare">>),
?SHARE_DESCRIPTION(<<"newShare">>),
<<"badSpaceId">>,
?SHARE_ROOT_FILE(<<"newShare">>),
dir
])
),
?assertEqual(GraphCalls + 2, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
ok,
rpc:call(Node, share_logic, update, [User1Sess, ?SHARE_1, #{
<<"name">> => <<"newName">>,
<<"description">> => <<"New share description">>
}])
),
two requests should be done - one for update and one for force fetch
?assertEqual(GraphCalls + 4, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_BAD_VALUE_BINARY(<<"name">>),
rpc:call(Node, share_logic, update, [User1Sess, ?SHARE_1, #{<<"name">> => 1234}])
),
?assertEqual(GraphCalls + 5, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_BAD_VALUE_BINARY(<<"description">>),
rpc:call(Node, share_logic, update, [User1Sess, ?SHARE_1, #{<<"description">> => 87.9}])
),
?assertEqual(GraphCalls + 6, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_MISSING_AT_LEAST_ONE_VALUE([<<"description">>, <<"name">>]),
rpc:call(Node, share_logic, update, [User1Sess, ?SHARE_1, #{}])
),
?assertEqual(GraphCalls + 7, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
ok,
rpc:call(Node, share_logic, delete, [User1Sess, ?SHARE_1])
),
?assertEqual(GraphCalls + 8, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
?assertMatch(
?ERROR_NOT_FOUND,
rpc:call(Node, share_logic, delete, [User1Sess, <<"wrongId">>])
),
?assertEqual(GraphCalls + 9, logic_tests_common:count_reqs(Config, graph, ShareGriMatcher)),
ok.
confined_access_token_test(Config) ->
[Node | _] = ?config(op_worker_nodes, Config),
Caveat = #cv_interface{interface = oneclient},
AccessToken = initializer:create_access_token(?USER_1, [Caveat]),
TokenCredentials = auth_manager:build_token_credentials(
AccessToken, undefined,
initializer:local_ip_v4(), rest, allow_data_access_caveats
),
UserGriMatcher = #gri{type = od_user, id = ?USER_1, aspect = instance, _ = '_'},
OdTokenGriMatcher = #gri{type = od_token, aspect = verify_access_token, scope = public},
TokenSecretGriMatcher = #gri{type = temporary_token_secret, id = ?USER_1, aspect = user, scope = shared},
GraphCalls = logic_tests_common:count_reqs(Config, graph, UserGriMatcher),
OdTokenGraphCalls = logic_tests_common:count_reqs(Config, graph, OdTokenGriMatcher),
TokenSecretGraphCalls = logic_tests_common:count_reqs(Config, graph, TokenSecretGriMatcher),
Request should be denied before contacting Onezone because of the
?assertMatch(
?ERROR_UNAUTHORIZED(?ERROR_TOKEN_CAVEAT_UNVERIFIED(Caveat)),
rpc:call(Node, share_logic, delete, [TokenCredentials, ?SHARE_1])
),
?assertEqual(OdTokenGraphCalls + 1, logic_tests_common:count_reqs(Config, graph, OdTokenGriMatcher)),
?assertEqual(TokenSecretGraphCalls + 1, logic_tests_common:count_reqs(Config, graph, TokenSecretGriMatcher)),
?assertEqual(GraphCalls + 1, logic_tests_common:count_reqs(Config, graph, UserGriMatcher)).
SetUp and TearDown functions
init_per_suite(Config) ->
Posthook = fun(NewConfig) ->
logic_tests_common:mock_gs_client(NewConfig),
NewConfig
end,
[{?ENV_UP_POSTHOOK, Posthook}, {?LOAD_MODULES, [logic_tests_common, initializer]} | Config].
init_per_testcase(get_test, Config) ->
Nodes = ?config(op_worker_nodes, Config),
ok = test_utils:mock_expect(Nodes, provider_logic, supports_space,
fun(?ROOT_SESS_ID, ?DUMMY_PROVIDER_ID, Space) ->
Space == ?SPACE_1 orelse Space == ?SPACE_2
end),
init_per_testcase(default, Config);
init_per_testcase(_, Config) ->
logic_tests_common:init_per_testcase(Config).
end_per_testcase(_, _Config) ->
ok.
end_per_suite(Config) ->
logic_tests_common:unmock_gs_client(Config),
ok.
Internal functions |
1b829f5f0b97eed24afc5cdc37c56397fd84cb797ae137f7a8c9b6e4aa7890eb | graninas/Functional-Design-and-Architecture | Control.hs | # LANGUAGE ExistentialQuantification #
{-# LANGUAGE RankNTypes #-}
module Control where
import Control.Monad.Trans.Free
import qualified Control.Monad.Free as F
import ScriptingDSL
data Control a = forall b. EvalScript (Script b) (b -> a)
instance Functor Control where
fmap f (EvalScript scr g) = EvalScript scr (f . g)
type DeviceControl a = F.Free Control a
evalScript :: Script a -> DeviceControl a
evalScript scr = F.liftF (EvalScript scr id)
| null | https://raw.githubusercontent.com/graninas/Functional-Design-and-Architecture/6fd7b0b04e6f6dc8cc110b6f3a87f6dc7a1ef97d/First-Edition/BookSamples/CH04/ArrowizedDSL/Control.hs | haskell | # LANGUAGE RankNTypes # | # LANGUAGE ExistentialQuantification #
module Control where
import Control.Monad.Trans.Free
import qualified Control.Monad.Free as F
import ScriptingDSL
data Control a = forall b. EvalScript (Script b) (b -> a)
instance Functor Control where
fmap f (EvalScript scr g) = EvalScript scr (f . g)
type DeviceControl a = F.Free Control a
evalScript :: Script a -> DeviceControl a
evalScript scr = F.liftF (EvalScript scr id)
|
a2c506369b3687ce79cf0780acd7e08f88a81ffcb1c23966862d7320ce41d588 | facebook/infer | int.mli |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! NS0
include module type of Containers.Int
include module type of Stdlib.Int
type t = int [@@deriving compare, equal, sexp]
val of_string : string -> int option
val of_string_exn : string -> int
val sign : int -> Sign.t
val incr : int ref -> unit
val decr : int ref -> unit
val post_incr : int ref -> int
module Infix : sig
val ( -- ) : t -> t -> t iter
val ( --^ ) : t -> t -> t iter
include module type of NS0.Int_compare
external ( + ) : t -> t -> t = "%addint"
external ( - ) : t -> t -> t = "%subint"
external ( ~- ) : t -> t = "%negint"
external ( * ) : t -> t -> t = "%mulint"
external ( / ) : t -> t -> t = "%divint"
val ( ** ) : t -> t -> t
external ( mod ) : t -> t -> t = "%modint"
external ( land ) : t -> t -> t = "%andint"
external ( lor ) : t -> t -> t = "%orint"
external ( lxor ) : t -> t -> t = "%xorint"
val lnot : t -> t
external ( lsl ) : t -> t -> t = "%lslint"
external ( lsr ) : t -> t -> t = "%lsrint"
external ( asr ) : t -> t -> t = "%asrint"
end
include module type of Infix
module Set : NSSet.S with type elt = int
module Map : NSMap.S with type key = int
module Tbl : HashTable.S with type key = int
| null | https://raw.githubusercontent.com/facebook/infer/f1d1b105ed07c543d7596765eb2291a335513318/sledge/nonstdlib/int.mli | ocaml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! NS0
include module type of Containers.Int
include module type of Stdlib.Int
type t = int [@@deriving compare, equal, sexp]
val of_string : string -> int option
val of_string_exn : string -> int
val sign : int -> Sign.t
val incr : int ref -> unit
val decr : int ref -> unit
val post_incr : int ref -> int
module Infix : sig
val ( -- ) : t -> t -> t iter
val ( --^ ) : t -> t -> t iter
include module type of NS0.Int_compare
external ( + ) : t -> t -> t = "%addint"
external ( - ) : t -> t -> t = "%subint"
external ( ~- ) : t -> t = "%negint"
external ( * ) : t -> t -> t = "%mulint"
external ( / ) : t -> t -> t = "%divint"
val ( ** ) : t -> t -> t
external ( mod ) : t -> t -> t = "%modint"
external ( land ) : t -> t -> t = "%andint"
external ( lor ) : t -> t -> t = "%orint"
external ( lxor ) : t -> t -> t = "%xorint"
val lnot : t -> t
external ( lsl ) : t -> t -> t = "%lslint"
external ( lsr ) : t -> t -> t = "%lsrint"
external ( asr ) : t -> t -> t = "%asrint"
end
include module type of Infix
module Set : NSSet.S with type elt = int
module Map : NSMap.S with type key = int
module Tbl : HashTable.S with type key = int
| |
152c0fdd54a40216e5c7934118d38be7617c8f02e2328dd3d180a71ec452e680 | exercism/erlang | phone_number_tests.erl | -module(phone_number_tests).
-include_lib("erl_exercism/include/exercism.hrl").
-include_lib("eunit/include/eunit.hrl").
cleans_number_test() ->
?assertEqual("1234567890", phone_number:number("(123) 456-7890")).
cleans_number_with_dots_test() ->
?assertEqual("1234567890", phone_number:number("123.456.7890")).
valid_when_eleven_digits_test() ->
?assertEqual("1234567890", phone_number:number("11234567890")).
invalid_when_eleven_digits_test() ->
?assertEqual("0000000000", phone_number:number("21234567890")).
invalid_when_nine_digits_test() ->
?assertEqual("0000000000", phone_number:number("123456789")).
area_code_test() ->
?assertEqual("123", phone_number:areacode("1234567890")).
pretty_print_test() ->
?assertEqual("(123) 456-7890", phone_number:pretty_print("1234567890")),
?assertEqual("(123) 456-7890", phone_number:pretty_print("11234567890")).
| null | https://raw.githubusercontent.com/exercism/erlang/57ac2707dae643682950715e74eb271f732e2100/exercises/practice/phone-number/test/phone_number_tests.erl | erlang | -module(phone_number_tests).
-include_lib("erl_exercism/include/exercism.hrl").
-include_lib("eunit/include/eunit.hrl").
cleans_number_test() ->
?assertEqual("1234567890", phone_number:number("(123) 456-7890")).
cleans_number_with_dots_test() ->
?assertEqual("1234567890", phone_number:number("123.456.7890")).
valid_when_eleven_digits_test() ->
?assertEqual("1234567890", phone_number:number("11234567890")).
invalid_when_eleven_digits_test() ->
?assertEqual("0000000000", phone_number:number("21234567890")).
invalid_when_nine_digits_test() ->
?assertEqual("0000000000", phone_number:number("123456789")).
area_code_test() ->
?assertEqual("123", phone_number:areacode("1234567890")).
pretty_print_test() ->
?assertEqual("(123) 456-7890", phone_number:pretty_print("1234567890")),
?assertEqual("(123) 456-7890", phone_number:pretty_print("11234567890")).
| |
bb1f1ba1be2e37dae697218973d36cb7db31b6bd80cd063aec5405457aed5201 | Octachron/tensority | signatures.ml | module type base_operators =
sig
type 'a t
val ( + ) : 'a t -> 'a t -> 'a t
val ( - ) : 'a t -> 'a t -> 'a t
val ( |*| ) : 'a t -> 'a t -> float
val ( *. ) : float -> 'a t -> 'a t
val ( /. ) : 'a t -> float -> 'a t
val ( ~- ) : 'a t -> 'a t
end
module type vec_operators=
sig
include base_operators
val (.%()): 'a t -> 'a Nat.lt -> float
val (.%()<-): 'a t -> 'a Nat.lt -> float -> unit
end
module type matrix_specific_operators = sig
type 'a t constraint 'a = 'b * 'c
type 'a vec
val ( @ ) : ('a * 'b) t -> 'b vec -> 'a vec
val ( * ) : ('a * 'b) t -> ('b * 'c) t -> ('a * 'c) t
val ( **^ ): ('a * 'a ) t -> int -> ('a * 'a ) t
end
module type matrix_operators =
sig
include matrix_specific_operators
module Matrix_specific: matrix_specific_operators with
type 'a vec := 'a vec and type 'a t := 'a t
val ( + ) : 'a t -> 'a t -> 'a t
val ( - ) : 'a t -> 'a t -> 'a t
val ( |*| ) : 'a t -> 'a t -> float
val ( *. ) : float -> 'a t -> 'a t
val ( /. ) : 'a t -> float -> 'a t
val ( ~- ) : 'a t -> 'a t
val (.%()): ('a*'b) t -> ('a Nat.lt * 'b Nat.lt) ->
float
val (.%()<-): ('a*'b) t -> ('a Nat.lt * 'b Nat.lt) ->
float -> unit
end
module type tensor_operators =
sig
type 'a t constraint 'a = <contr:'n * 'b; cov: 'm * 'c>
type ('a,'b) matrix
val ( * ) :
< contr : 'a; cov : 'b > t ->
< contr : 'b; cov : 'c > t -> < contr : 'a; cov : 'c > t
val ( |*| ) :
< contr : 'a; cov : 'b > t -> < contr : 'a; cov : 'b > t -> float
val ( + ) :
< contr : 'a; cov : 'b > t ->
< contr : 'a; cov : 'b > t -> < contr : 'a; cov : 'b > t
val ( - ) :
< contr : 'a; cov : 'b > t ->
< contr : 'a; cov : 'b > t -> < contr : 'a; cov : 'b > t
val ( *. ) :
float -> < contr : 'a; cov : 'b > t -> < contr : 'a; cov : 'b > t
val ( /. ) :
float -> < contr : 'a; cov : 'b > t -> < contr : 'a; cov : 'b > t
val ( ** ) : ('a, 'a) matrix -> int -> ('a, 'a) matrix
end
exception Dimension_error of string * int * int
| null | https://raw.githubusercontent.com/Octachron/tensority/2689fba0bb9c693ef51bebe9cf92c37ab30ca17e/lib/signatures.ml | ocaml | module type base_operators =
sig
type 'a t
val ( + ) : 'a t -> 'a t -> 'a t
val ( - ) : 'a t -> 'a t -> 'a t
val ( |*| ) : 'a t -> 'a t -> float
val ( *. ) : float -> 'a t -> 'a t
val ( /. ) : 'a t -> float -> 'a t
val ( ~- ) : 'a t -> 'a t
end
module type vec_operators=
sig
include base_operators
val (.%()): 'a t -> 'a Nat.lt -> float
val (.%()<-): 'a t -> 'a Nat.lt -> float -> unit
end
module type matrix_specific_operators = sig
type 'a t constraint 'a = 'b * 'c
type 'a vec
val ( @ ) : ('a * 'b) t -> 'b vec -> 'a vec
val ( * ) : ('a * 'b) t -> ('b * 'c) t -> ('a * 'c) t
val ( **^ ): ('a * 'a ) t -> int -> ('a * 'a ) t
end
module type matrix_operators =
sig
include matrix_specific_operators
module Matrix_specific: matrix_specific_operators with
type 'a vec := 'a vec and type 'a t := 'a t
val ( + ) : 'a t -> 'a t -> 'a t
val ( - ) : 'a t -> 'a t -> 'a t
val ( |*| ) : 'a t -> 'a t -> float
val ( *. ) : float -> 'a t -> 'a t
val ( /. ) : 'a t -> float -> 'a t
val ( ~- ) : 'a t -> 'a t
val (.%()): ('a*'b) t -> ('a Nat.lt * 'b Nat.lt) ->
float
val (.%()<-): ('a*'b) t -> ('a Nat.lt * 'b Nat.lt) ->
float -> unit
end
module type tensor_operators =
sig
type 'a t constraint 'a = <contr:'n * 'b; cov: 'm * 'c>
type ('a,'b) matrix
val ( * ) :
< contr : 'a; cov : 'b > t ->
< contr : 'b; cov : 'c > t -> < contr : 'a; cov : 'c > t
val ( |*| ) :
< contr : 'a; cov : 'b > t -> < contr : 'a; cov : 'b > t -> float
val ( + ) :
< contr : 'a; cov : 'b > t ->
< contr : 'a; cov : 'b > t -> < contr : 'a; cov : 'b > t
val ( - ) :
< contr : 'a; cov : 'b > t ->
< contr : 'a; cov : 'b > t -> < contr : 'a; cov : 'b > t
val ( *. ) :
float -> < contr : 'a; cov : 'b > t -> < contr : 'a; cov : 'b > t
val ( /. ) :
float -> < contr : 'a; cov : 'b > t -> < contr : 'a; cov : 'b > t
val ( ** ) : ('a, 'a) matrix -> int -> ('a, 'a) matrix
end
exception Dimension_error of string * int * int
| |
3a9eafd3fbe5c05fd50f18825d91aba19ea1c9121691dd5cfc80a8b9aa849378 | sirherrbatka/statistical-learning | functions.lisp | (cl:in-package #:sl.som)
| null | https://raw.githubusercontent.com/sirherrbatka/statistical-learning/491a9c749f0bb09194793bc26487a10fae69dae0/source/self-organizing-map/functions.lisp | lisp | (cl:in-package #:sl.som)
| |
ec397fba8267eedf232574d3e27a3050ff1225356cf4ee9adf99198580ff628d | static-analysis-engineering/codehawk | jCHSignature.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author :
------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Arnaud Venet
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHPretty
(* jchlib *)
open JCHBasicTypesAPI
val make_type_variable: string -> type_variable_int
val make_simple_class_type_signature:
name:string -> type_arguments:type_argument_int list -> simple_class_type_signature_int
val make_class_type_signature:
package:string list -> enclosing_classes:simple_class_type_signature_int list ->
simple_class_type_signature:simple_class_type_signature_int -> class_type_signature_int
val make_formal_type_parameter:
name:string -> ?class_bound:field_type_signature_int ->
interface_bounds:field_type_signature_int list -> unit -> formal_type_parameter_int
val make_type_argument:
?field_type_signature:field_type_signature_int ->
kind:type_argument_kind_t -> unit -> type_argument_int
val make_throws_signature:
?class_type_signature:class_type_signature_int ->
?type_variable:type_variable_int -> kind:throws_signature_kind_t -> unit ->
throws_signature_int
val make_type_signature:
?basic_type:java_basic_type_t -> ?object_type:field_type_signature_int ->
kind:type_signature_kind_t -> unit -> type_signature_int
val make_field_type_signature:
?class_type:class_type_signature_int -> ?array_type:type_signature_int ->
?type_variable:type_variable_int -> kind:field_type_signature_kind_t ->
unit -> field_type_signature_int
val make_class_signature:
formal_type_parameters:formal_type_parameter_int list ->
super_class:class_type_signature_int ->
super_interfaces:class_type_signature_int list -> class_signature_int
val make_method_type_signature:
formal_type_parameters:formal_type_parameter_int list ->
type_signature:type_signature_int list ->
?return_type: type_signature_int ->
throws:throws_signature_int list -> unit -> method_type_signature_int
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/98ced4d5e6d7989575092df232759afc2cb851f6/CodeHawk/CHJ/jchlib/jCHSignature.mli | ocaml | jchlib | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author :
------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Arnaud Venet
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHPretty
open JCHBasicTypesAPI
val make_type_variable: string -> type_variable_int
val make_simple_class_type_signature:
name:string -> type_arguments:type_argument_int list -> simple_class_type_signature_int
val make_class_type_signature:
package:string list -> enclosing_classes:simple_class_type_signature_int list ->
simple_class_type_signature:simple_class_type_signature_int -> class_type_signature_int
val make_formal_type_parameter:
name:string -> ?class_bound:field_type_signature_int ->
interface_bounds:field_type_signature_int list -> unit -> formal_type_parameter_int
val make_type_argument:
?field_type_signature:field_type_signature_int ->
kind:type_argument_kind_t -> unit -> type_argument_int
val make_throws_signature:
?class_type_signature:class_type_signature_int ->
?type_variable:type_variable_int -> kind:throws_signature_kind_t -> unit ->
throws_signature_int
val make_type_signature:
?basic_type:java_basic_type_t -> ?object_type:field_type_signature_int ->
kind:type_signature_kind_t -> unit -> type_signature_int
val make_field_type_signature:
?class_type:class_type_signature_int -> ?array_type:type_signature_int ->
?type_variable:type_variable_int -> kind:field_type_signature_kind_t ->
unit -> field_type_signature_int
val make_class_signature:
formal_type_parameters:formal_type_parameter_int list ->
super_class:class_type_signature_int ->
super_interfaces:class_type_signature_int list -> class_signature_int
val make_method_type_signature:
formal_type_parameters:formal_type_parameter_int list ->
type_signature:type_signature_int list ->
?return_type: type_signature_int ->
throws:throws_signature_int list -> unit -> method_type_signature_int
|
ec6d78e5361c2baf520983cce64a0ede7a14c70314c41e5b6536a067ea5e3db9 | originrose/cortex | cpu_driver_test.clj | (ns cortex.compute.cpu-driver-test
(:require [cortex.compute.cpu.driver :as cpu]
[cortex.compute.driver :as drv]
[think.datatype.core :as dtype]
[think.resource.core :as resource]
[clojure.test :refer :all]
[cortex.compute.verify.utils :refer [def-all-dtype-test
def-double-float-test] :as test-utils]
[cortex.compute.verify.driver :as verify-driver]))
(use-fixtures :each test-utils/test-wrapper)
(defn driver
[]
(cpu/driver))
(def-double-float-test simple-stream
(verify-driver/simple-stream (driver) test-utils/*datatype*))
| null | https://raw.githubusercontent.com/originrose/cortex/94b1430538e6187f3dfd1697c36ff2c62b475901/test/clj/cortex/compute/cpu_driver_test.clj | clojure | (ns cortex.compute.cpu-driver-test
(:require [cortex.compute.cpu.driver :as cpu]
[cortex.compute.driver :as drv]
[think.datatype.core :as dtype]
[think.resource.core :as resource]
[clojure.test :refer :all]
[cortex.compute.verify.utils :refer [def-all-dtype-test
def-double-float-test] :as test-utils]
[cortex.compute.verify.driver :as verify-driver]))
(use-fixtures :each test-utils/test-wrapper)
(defn driver
[]
(cpu/driver))
(def-double-float-test simple-stream
(verify-driver/simple-stream (driver) test-utils/*datatype*))
| |
9c893c830840cd340c33e907be4bf6d8fc651d7b14bcc4fd7da14473d4950ed7 | argp/bap | batIO.mli |
* BatIO - Abstract input / output
* Copyright ( C ) 2003
* 2008 ( contributor )
* 2008 ( contributor )
* 2008 ( contributor )
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* BatIO - Abstract input/output
* Copyright (C) 2003 Nicolas Cannasse
* 2008 David Teller (contributor)
* 2008 Philippe Strauss (contributor)
* 2008 Edgar Friendly (contributor)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
* High - order abstract I / O.
This module deals with { ! type : input}s and { ! type :
output}s . Inputs are manners of getting information from the
outside world and into your program ( for instance , reading from
the network , from a file , etc . ) Outputs are manners of getting
information out from your program and into the outside world ( for
instance , sending something onto the network , onto a file , etc . )
In other words , if you are looking for a way to modify files , read
from the network , etc . , you 're in the right place .
To perform I / O , you first need to { e open } your { ! type : input } or
your { ! type : output } . Chances are that there is an { e opening }
operation for this task . Note that most opening operations are
defined in their respective module . Operations for opening files
are defined in module { ! File } , operations for opening
communications with the network or with other processes are
defined in module { ! Unix } . Opening operations related to
compression and decompression are defined in module { ! Compress } ,
etc .
Once you have opened an { ! type : input } , you may read the data it
contains by using functions such as { ! read } ( to read one
character ) , { ! } or { ! : input } ( to read one string ) or one
of the [ read _ * ] functions . If you need not one information but a
complete enumeration , for instance for processing many information
before writing them , you may also convert the input into an
enumeration , by using one of the [ * s_of ] functions .
Once you have opened an { ! type : output } , you may write data to
this output by using functions scuh as { ! write } ( to write one
char ) , { ! } or { ! : output } ( to write one string ) or one of
the [ write _ * ] functions . If you have not just one piece of data
but a complete enumeration , you may write this whole enumeration
to the output by using one of the [ write_*s ] functions . Note that
most operations on output are said to be { e buffered } . This means
that small writing operations may be automatically delayed and
grouped into large writing operations , as these are generally
faster and induce less wear on the hardware . Occasionally , you
may wish to force all waiting operations to take place { e now } .
For this purpose , you may either function { ! flush } or function
I { ! flush_out } .
Once you have finished using your { ! type : input } or your { ! type :
output } , chances are that you will want to close it . This is not a
strict necessity , as OCaml will eventually close it for you when
it detects that you have no more need of that { ! type :
input}/{!type : output } , but this is generally a good policy , as
this will let other programs access the resources which are
currently allocated to that { ! type : input}/{!type : output } --
typically , under Windows , if you are reading the contents of a
file from a program , no other program may read the contents of
that file simultaneously and you may also not rename or move the
file to another directory . To close an { ! type : input } , use
function { ! } and to close an { ! type : output } , use function
{ ! close_out } .
{ b Note } Some { ! type : input}s are built on top of other
{ ! type : input}s to provide transparent translations ( e.g.
on - the - fly decompression of a file or network information ) and
that some { ! type : output}s are built on top of other
{ ! type : output}s for the same purpose ( e.g. on - the - fly compression
of a file or network information ) . In this case , closing the
" outer " { ! type : input}/{!type : output } ( e.g. the
decompressor / compressor ) will { e not } close the " inner "
{ ! type : input}/{!type : output } ( e.g. access to the file or to the
network ) . You will need to close the " inner "
{ ! type : input}/{!type : output } , which will automatically flush
the outer { ! type : input}/{!type : output } and close it .
@author @author @author @author
@documents BatInnerIO
This module deals with {!type: input}s and {!type:
output}s. Inputs are manners of getting information from the
outside world and into your program (for instance, reading from
the network, from a file, etc.) Outputs are manners of getting
information out from your program and into the outside world (for
instance, sending something onto the network, onto a file, etc.)
In other words, if you are looking for a way to modify files, read
from the network, etc., you're in the right place.
To perform I/O, you first need to {e open} your {!type: input} or
your {!type: output}. Chances are that there is an {e opening}
operation for this task. Note that most opening operations are
defined in their respective module. Operations for opening files
are defined in module {!File}, operations for opening
communications with the network or with other processes are
defined in module {!Unix}. Opening operations related to
compression and decompression are defined in module {!Compress},
etc.
Once you have opened an {!type: input}, you may read the data it
contains by using functions such as {!read} (to read one
character), {!nread} or {!val: input} (to read one string) or one
of the [read_*] functions. If you need not one information but a
complete enumeration, for instance for processing many information
before writing them, you may also convert the input into an
enumeration, by using one of the [*s_of] functions.
Once you have opened an {!type: output}, you may write data to
this output by using functions scuh as {!write} (to write one
char), {!nwrite} or {!val: output} (to write one string) or one of
the [write_*] functions. If you have not just one piece of data
but a complete enumeration, you may write this whole enumeration
to the output by using one of the [write_*s] functions. Note that
most operations on output are said to be {e buffered}. This means
that small writing operations may be automatically delayed and
grouped into large writing operations, as these are generally
faster and induce less wear on the hardware. Occasionally, you
may wish to force all waiting operations to take place {e now}.
For this purpose, you may either function {!flush} or function
I {!flush_out}.
Once you have finished using your {!type: input} or your {!type:
output}, chances are that you will want to close it. This is not a
strict necessity, as OCaml will eventually close it for you when
it detects that you have no more need of that {!type:
input}/{!type: output}, but this is generally a good policy, as
this will let other programs access the resources which are
currently allocated to that {!type:input}/{!type:output} --
typically, under Windows, if you are reading the contents of a
file from a program, no other program may read the contents of
that file simultaneously and you may also not rename or move the
file to another directory. To close an {!type: input}, use
function {!close_in} and to close an {!type: output}, use function
{!close_out}.
{b Note} Some {!type:input}s are built on top of other
{!type:input}s to provide transparent translations (e.g.
on-the-fly decompression of a file or network information) and
that some {!type:output}s are built on top of other
{!type:output}s for the same purpose (e.g. on-the-fly compression
of a file or network information). In this case, closing the
"outer" {!type:input}/{!type:output} (e.g. the
decompressor/compressor) will {e not} close the "inner"
{!type:input}/{!type:output} (e.g. access to the file or to the
network). You will need to close the "inner"
{!type:input}/{!type:output}, which will automatically flush
the outer {!type:input}/{!type:output} and close it.
@author Nicolas Cannasse
@author David Teller
@author Philippe Strauss
@author Edgar Friendly
@documents BatInnerIO
*)
open BatInnerIO
type input = BatInnerIO.input
(** The abstract input type. *)
type 'a output = 'a BatInnerIO.output
(** The abstract output type, ['a] is the accumulator data, it is returned
when the [close_out] function is called. *)
type ('a, 'b) printer = 'b output -> 'a -> unit
(** The type of a printing function to print a ['a] to an output that
produces ['b] as result. *)
type 'a f_printer = Format.formatter -> 'a -> unit
exception No_more_input
* This exception is raised when reading on an input with the [ read ] or
[ nread ] functions while there is no available token to read .
[nread] functions while there is no available token to read. *)
exception Input_closed
(** This exception is raised when reading on a closed input. *)
exception Output_closed
(** This exception is raised when reading on a closed output. *)
* { 6 Standard inputs / outputs }
val stdin : input
* Standard input , as per Unix / Windows conventions ( by default , keyboard ) .
Example : [ if read_line stdin | > Int.of_string > 10 then failwith " too big a number read " ; ]
Example: [if read_line stdin |> Int.of_string > 10 then failwith "too big a number read"; ]
*)
val stdout: unit output
* Standard output , as per Unix / Windows conventions ( by default , console ) .
Use this output to display regular messages .
Example : [
write_string stdout " Enter your name : " ;
let name = read_line stdin in
write_line stdout ( " Your name is " ^ name ) ;
]
Use this output to display regular messages.
Example: [
write_string stdout "Enter your name:";
let name = read_line stdin in
write_line stdout ("Your name is " ^ name);
]
*)
val stderr: unit output
(** Standard error output, as per Unix/Windows conventions.
Use this output to display warnings and error messages.
Example: [
write_line stderr "Error on Internet - please delete google.com";
]
*)
val stdnull: unit output
* An output which discards everything written to it .
Use this output to ignore messages .
Example : [
let out_ch = if debug then stderr else stdnull in
write_line out_ch " Program running . " ;
]
Use this output to ignore messages.
Example: [
let out_ch = if debug then stderr else stdnull in
write_line out_ch "Program running.";
]
*)
* { 6 Standard API }
val read : input -> char
(** Read a single char from an input or raise [No_more_input] if
no input is available.
Example: [let rec skip_line ch = if read ch = '\n' then skip_line ch else ();]
*)
val nread : input -> int -> string
* [ nread i n ] reads a string of size up to [ n ] from an input .
The function will raise [ No_more_input ] if no input is available .
It will raise [ Invalid_argument ] if [ n ] < 0 .
Example : [ let read_md5 ch = nread ch 32 ]
The function will raise [No_more_input] if no input is available.
It will raise [Invalid_argument] if [n] < 0.
Example: [let read_md5 ch = nread ch 32]
*)
val really_nread : input -> int -> string
(** [really_nread i n] reads a string of exactly [n] characters
from the input. @raise No_more_input if at least [n] characters are
not available. @raise Invalid_argument if [n] < 0.
Example: [let read_md5 ch = really_nread ch 32]
*)
val input : input -> string -> int -> int -> int
(** [input i s p l] reads up to [l] characters from the given input,
storing them in string [s], starting at character number [p]. It
returns the actual number of characters read (which may be 0) or
raise [No_more_input] if no character can be read. It will raise
[Invalid_argument] if [p] and [l] do not designate a valid
substring of [s].
Example: [let map_ch f ?(block_size=100) =
let b = String.create block_size in
try while true do
let l = input ch b 0 block_size in
f b 0 l;
done with No_more_input -> ()]
*)
val really_input : input -> string -> int -> int -> int
* [ really_input i s p l ] reads exactly [ l ] characters from the
given input , storing them in the string [ s ] , starting at
position [ p ] . For consistency with { ! BatIO.input } it returns
[ l ] . @raise No_more_input if at [ l ] characters are not
available . @raise Invalid_argument if [ p ] and [ l ] do not
designate a valid substring of [ s ] .
Example : [ let _ = really_input stdin b 0 3 ]
given input, storing them in the string [s], starting at
position [p]. For consistency with {!BatIO.input} it returns
[l]. @raise No_more_input if at [l] characters are not
available. @raise Invalid_argument if [p] and [l] do not
designate a valid substring of [s].
Example: [let _ = really_input stdin b 0 3]
*)
val close_in : input -> unit
* Close the input . It can no longer be read from .
Example : [ ; ]
Example: [close_in network_in;]
*)
val write : (char, _) printer
(** Write a single char to an output.
Example: [write stdout 'x';]
*)
val nwrite : (string, _) printer
(** Write a string to an output.
Example: [nwrite stdout "Enter your name: ";]
*)
val output : 'a output -> string -> int -> int -> int
* [ output o s p l ] writes up to [ l ] characters from string [ s ] , starting at
offset [ p ] . It returns the number of characters written . It will raise
[ Invalid_argument ] if [ p ] and [ l ] do not designate a valid substring of [ s ] .
Example : [ let str = " Foo Bar Baz " in let written = output ; ]
This writes " o Ba " to stdout .
offset [p]. It returns the number of characters written. It will raise
[Invalid_argument] if [p] and [l] do not designate a valid substring of [s].
Example: [let str = "Foo Bar Baz" in let written = output stdout str 2 4;]
This writes "o Ba" to stdout.
*)
val really_output : 'a output -> string -> int -> int -> int
(** [really_output o s p l] writes exactly [l] characters from string [s] onto
the the output, starting with the character at offset [p]. For consistency with
{!BatIO.output} it returns [l]. @raise Invalid_argument if [p] and [l] do not
designate a valid substring of [s].
This function is useful for networking situations where the output
buffer might fill resulting in not the entire substring being
readied for transmission. Uses [output] internally, and will
raise [Sys_blocked_io] in the case that any call returns 0.
*)
val flush : 'a output -> unit
(** Flush an output.
If previous write operations have caused errors, this may trigger an exception.
Example: [flush stdout;]
*)
val flush_all : unit -> unit
(** Flush all outputs, ignore errors.
Example: [flush_all ();]
*)
val close_out : 'a output -> 'a
* Close the output and return its accumulator data .
The output is flushed before being closed and can no longer be
written . Attempting to flush or write after the output has been
closed will have no effect .
Example : [
let strout = output_string ( ) in
write strout ' x ' ;
if 2 + 3>5 then write strout " y " ;
print_string ( close_out strout ) ]
The output is flushed before being closed and can no longer be
written. Attempting to flush or write after the output has been
closed will have no effect.
Example: [
let strout = output_string () in
write strout 'x';
if 2+3>5 then write strout "y";
print_string (close_out strout) ]
*)
(**/**)
val close_all : unit -> unit
* Close all outputs .
Ignore errors . Automatically called at the end of your program .
You probably should never use it manually , as it also closes
[ stdout ] , [ stderr ] , [ ] .
Example : [ close_all ( ) ; ]
Ignore errors. Automatically called at the end of your program.
You probably should never use it manually, as it also closes
[stdout], [stderr], [stdnull].
Example: [close_all ();]
*)
(**/**)
* { 6 Creation of BatIO Inputs / Outputs }
To open a file for reading / writing , see { ! }
and { ! }
To open a file for reading/writing, see {!File.open_in}
and {!File.open_file_out}*)
val input_string : string -> input
* Create an input that will read from a string .
Example : [
let inch = input_string " 1234554321 " in
let str1 = nread inch 3 in ( * " 123 "
Example: [
let inch = input_string "1234554321" in
let str1 = nread inch 3 in (* "123" *)
" 45543 "
" 21 "
try string_of_char(read inch) with BatIO.No_more_input -> "End of string";
]
*)
val output_string : unit -> string output
(** Create an output that will write into a string in an efficient way.
When closed, the output returns all the data written into it. *)
val input_enum : char BatEnum.t -> input
(** Create an input that will read from an [enum]. *)
val output_enum : unit -> char BatEnum.t output
(** Create an output that will write into an [enum]. The
final enum is returned when the output is closed. *)
val combine : ('a output * 'b output) -> ('a * 'b) output
(** [combine (a,b)] creates a new [output] [c] such that
writing to [c] will actually write to both [a] and [b] *)
val tab_out : ?tab:char -> int -> 'a output -> unit output
(** Create an output shifted to the right by a number of spaces
(or other character as specified by [tab]).
[tab_out n out] produces a new output for writing into [out], in
which every new line starts with [n] spaces.
@raise Invalid_argument if [n] < 0.
Closing [tab_out n out] does not close [out]. Rather,
closing [out] closes [tab_out n out].
*)
repeat : int - > ' a output - > unit output
( * * [ repeat n out ] create an output in which every character or string is repeated
[ n ] times to [ out ] .
(** [repeat n out] create an output in which every character or string is repeated
[n] times to [out].*)*)
* { 6 Utilities }
val read_all : input -> string
(** read all the contents of the input until [No_more_input] is raised. *)
val pipe : unit -> input * unit output
(** Create a pipe between an input and an ouput. Data written from
the output can be read from the input.
*)
val copy : ?buffer:int -> input -> _ output -> unit
* Read everything from an input and copy it to an output .
@param buffer The size of the buffer to use for copying , in
bytes . By default , this is 4,096b .
@param buffer The size of the buffer to use for copying, in
bytes. By default, this is 4,096b.
*)
val pos_in : input -> input * (unit -> int)
(** Create an input that provide a count function of the number of bytes
read from it. *)
val progress_in : input -> (unit -> unit) -> input
* [ progress_in f ] create an input that calls [ f ( ) ]
whenever some content is succesfully read from it .
whenever some content is succesfully read from it.*)
val pos_out : 'a output -> unit output * (unit -> int)
(** Create an output that provide a count function of the number of bytes
written through it. *)
val progress_out : 'a output -> (unit -> unit) -> unit output
(** [progress_out out f] create an output that calls [f ()]
whenever some content is succesfully written to it.*)
external cast_output : 'a output -> unit output = "%identity"
(** You can safely transform any output to an unit output in a safe way
by using this function. *)
* { 6 Binary files API }
Here is some API useful for working with binary files , in particular
binary files generated by C applications . By default , encoding of
multibyte integers is low - endian . The { ! BigEndian } module provide multibyte
operations with other encoding .
Here is some API useful for working with binary files, in particular
binary files generated by C applications. By default, encoding of
multibyte integers is low-endian. The {!BigEndian} module provide multibyte
operations with other encoding.
*)
exception Overflow of string
(** Exception raised when a read or write operation cannot be completed. *)
val read_byte : input -> int
* Read an unsigned 8 - bit integer .
val read_signed_byte : input -> int
* Read an signed 8 - bit integer .
val read_ui16 : input -> int
* Read an unsigned 16 - bit word .
val read_i16 : input -> int
* Read a signed 16 - bit word .
val read_i32 : input -> int
* Read a signed 32 - bit integer . @raise Overflow if the
read integer can not be represented as an OCaml 31 - bit integer .
read integer cannot be represented as an OCaml 31-bit integer. *)
val read_real_i32 : input -> int32
* Read a signed 32 - bit integer as an OCaml int32 .
val read_i64 : input -> int64
* Read a signed 64 - bit integer as an OCaml int64 .
val read_float : input -> float
* Read an IEEE single precision floating point value .
val read_double : input -> float
(** Read an IEEE double precision floating point value. *)
val read_string : input -> string
(** Read a null-terminated string. *)
val read_line : input -> string
* Read a LF or CRLF terminated string . If the source runs out of
input before a LF is found , returns a string of the remaining input .
Will raise [ No_more_input ] only if no characters are available .
input before a LF is found, returns a string of the remaining input.
Will raise [No_more_input] only if no characters are available. *)
val write_byte : (int, _) printer
* Write an unsigned 8 - bit byte .
val write_ui16 : (int, _) printer
* Write an unsigned 16 - bit word .
val write_i16 : (int, _) printer
* Write a signed 16 - bit word .
val write_i32 : (int, _) printer
* Write a signed 32 - bit integer .
val write_real_i32 : (int32, _) printer
(** Write an OCaml int32. *)
val write_i64 : (int64, _) printer
* Write an OCaml int64 .
val write_double : (float, _) printer
(** Write an IEEE double precision floating point value. *)
val write_float : (float, _) printer
* Write an IEEE single precision floating point value .
val write_string : (string, _) printer
(** Write a string and append an null character. *)
val write_line : (string, _) printer
* Write a line and append a line end .
This adds the correct line end for your operating system . That
is , if you are writing to a file and your system imposes that
files should end lines with character LF ( or [ ' \n ' ] ) , as Unix ,
then a LF is inserted at the end of the line . If your system
favors CRLF ( or [ ' \r\n ' ] ) , then this is what will be inserted .
This adds the correct line end for your operating system. That
is, if you are writing to a file and your system imposes that
files should end lines with character LF (or ['\n']), as Unix,
then a LF is inserted at the end of the line. If your system
favors CRLF (or ['\r\n']), then this is what will be inserted.*)
(** Same operations as module {!BatIO}, but with big-endian encoding *)
module BigEndian :
sig
* This module redefines the operations of module { ! BatIO } which behave
differently on big - endian [ input]s/[output]s .
Generally , to use this module you will wish to either open both
{ ! BatIO } and { ! BigEndian } , so as to import a big - endian version of
{ ! BatIO } , as per
[ open System . BatIO , BigEndian in ... ] ,
or to redefine locally { ! BatIO } to use big - endian encodings
[ module BatIO = System . BatIO include BigEndian ]
differently on big-endian [input]s/[output]s.
Generally, to use this module you will wish to either open both
{!BatIO} and {!BigEndian}, so as to import a big-endian version of
{!BatIO}, as per
[open System.BatIO, BigEndian in ...],
or to redefine locally {!BatIO} to use big-endian encodings
[module BatIO = System.BatIO include BigEndian]
*)
val read_ui16 : input -> int
* Read an unsigned 16 - bit word .
val read_i16 : input -> int
* Read a signed 16 - bit word .
val read_i32 : input -> int
* Read a signed 32 - bit integer . @raise Overflow if the
read integer can not be represented as an OCaml 31 - bit integer .
read integer cannot be represented as an OCaml 31-bit integer. *)
val read_real_i32 : input -> int32
* Read a signed 32 - bit integer as an OCaml int32 .
val read_i64 : input -> int64
* Read a signed 64 - bit integer as an OCaml int64 .
val read_double : input -> float
(** Read an IEEE double precision floating point value. *)
val read_float: input -> float
* Read an IEEE single precision floating point value .
val write_ui16 : (int, _) printer
* Write an unsigned 16 - bit word .
val write_i16 : (int, _) printer
* Write a signed 16 - bit word .
val write_i32 : (int, _) printer
* Write a signed 32 - bit integer .
val write_real_i32 : (int32, _) printer
(** Write an OCaml int32. *)
val write_i64 : (int64, _) printer
* Write an OCaml int64 .
val write_double : (float, _) printer
(** Write an IEEE double precision floating point value. *)
val write_float : (float, _) printer
* Write an IEEE single precision floating point value .
val ui16s_of : input -> int BatEnum.t
* Read an enumeration of unsigned 16 - bit words .
val i16s_of : input -> int BatEnum.t
* Read an enumartion of signed 16 - bit words .
val i32s_of : input -> int BatEnum.t
* Read an enumeration of signed 32 - bit integers .
@raise Overflow if the read integer can not be represented as an OCaml
31 - bit integer .
@raise Overflow if the read integer cannot be represented as an OCaml
31-bit integer. *)
val real_i32s_of : input -> int32 BatEnum.t
* Read an enumeration of signed 32 - bit integers as OCaml [ int32]s .
val i64s_of : input -> int64 BatEnum.t
* Read an enumeration of signed 64 - bit integers as OCaml [ int64]s .
val doubles_of : input -> float BatEnum.t
* Read an enumeration of IEEE double precision floating point values .
val floats_of : input -> float BatEnum.t
* Read an enumeration of IEEE single precision floating point values .
end
* { 6 Bits API }
This enable you to read and write from an BatIO bit - by - bit or several bits
at the same time .
This enable you to read and write from an BatIO bit-by-bit or several bits
at the same time.
*)
type in_bits
type out_bits
exception Bits_error
val input_bits : input -> in_bits
(** Read bits from an input *)
val output_bits : 'a output -> out_bits
(** Write bits to an output *)
val read_bits : in_bits -> int -> int
* Read up to 31 bits , raise Bits_error if n < 0 or n > 31
val write_bits : out_bits -> nbits:int -> int -> unit
* Write up to 31 bits represented as a value , raise Bits_error if < 0
or > 31 or the value representation excess nbits .
or nbits > 31 or the value representation excess nbits. *)
val flush_bits : out_bits -> unit
* Flush remaining unwritten bits , adding up to 7 bits which values 0 .
val drop_bits : in_bits -> unit
* Drop up to 7 buffered bits and restart to next input character .
*
{ 6 Creating new types of inputs / outputs }
{6 Creating new types of inputs/outputs}
*)
val create_in :
read:(unit -> char) ->
input:(string -> int -> int -> int) ->
close:(unit -> unit) -> input
* Fully create an input by giving all the needed functions .
{ b Note } Do { e not } use this function for creating an input
which reads from one or more underlying inputs . Rather , use
{ ! wrap_in } .
{b Note} Do {e not} use this function for creating an input
which reads from one or more underlying inputs. Rather, use
{!wrap_in}.
*)
val wrap_in :
read:(unit -> char) ->
input:(string -> int -> int -> int) ->
close:(unit -> unit) ->
underlying:(input list) ->
input
(** Fully create an input reading from other inputs by giving all
the needed functions.
This function is a more general version of {!create_in}
which also handles dependency management between inputs.
{b Note} When you create an input which reads from another
input, function [close] should {e not} close the inputs of
[underlying]. Doing so is a common error, which could result
in inadvertently closing {!stdin} or a network socket, etc.
*)
val inherit_in:
?read:(unit -> char) ->
?input:(string -> int -> int -> int) ->
?close:(unit -> unit) ->
input -> input
* Simplified and optimized version of { ! wrap_in } which may be used
whenever only one input appears as dependency .
[ inherit_in inp ] will return an input identical to [ inp ] .
[ inherit_in ~read inp ] will return an input identical to
[ inp ] except for method [ read ] , etc .
You do not need to close [ inp ] in [ close ] .
whenever only one input appears as dependency.
[inherit_in inp] will return an input identical to [inp].
[inherit_in ~read inp] will return an input identical to
[inp] except for method [read], etc.
You do not need to close [inp] in [close].
*)
val create_out :
write:(char -> unit) ->
output:(string -> int -> int -> int) ->
flush:(unit -> unit) ->
close:(unit -> 'a) ->
'a output
*
Fully create an output by giving all the needed functions .
@param write Write one character to the output ( see { ! write } ) .
@param output Write a ( sub)string to the output ( see { ! output } ) .
@param flush Flush any buffers of this output ( see { ! flush } ) .
@param close Close this output . The output will be automatically
flushed .
{ b Note } Do { e not } use this function for creating an output which
writes to one or more underlying outputs . Rather , use { ! wrap_out } .
Fully create an output by giving all the needed functions.
@param write Write one character to the output (see {!write}).
@param output Write a (sub)string to the output (see {!output}).
@param flush Flush any buffers of this output (see {!flush}).
@param close Close this output. The output will be automatically
flushed.
{b Note} Do {e not} use this function for creating an output which
writes to one or more underlying outputs. Rather, use {!wrap_out}.
*)
val wrap_out :
write:(char -> unit) ->
output:(string -> int -> int -> int) ->
flush:(unit -> unit) ->
close:(unit -> 'a) ->
underlying:('b output list) ->
'a output
*
Fully create an output that writes to one or more underlying outputs .
This function is a more general version of { ! create_out } ,
which also handles dependency management between outputs .
To illustrate the need for dependency management , let us consider
the following values :
- an output [ out ]
- a function [ f : _ output - > _ output ] , using { ! create_out } to
create a new output for writing some data to an underyling
output ( for instance , a function comparale to { ! tab_out } or a
function performing transparent compression or transparent
traduction between encodings )
With these values , let us consider the following scenario
- a new output [ f out ] is created
- some data is written to [ f out ] but not flushed
- output [ out ] is closed , perhaps manually or as a consequence
of garbage - collection , or because the program has ended
- data written to [ f out ] is flushed .
In this case , data reaches [ out ] only after [ out ] has been closed .
Despite appearances , it is quite easy to reach such situation ,
especially in short programs .
If , instead , [ f ] uses [ wrap_out ] , then when output [ out ] is closed ,
[ f out ] is first automatically flushed and closed , which avoids the
issue .
@param write Write one character to the output ( see { ! write } ) .
@param output Write a ( sub)string to the output ( see { ! output } ) .
@param flush Flush any buffers of this output ( see { ! flush } ) .
@param close Close this output . The output will be automatically
flushed .
@param underlying The list of outputs to which the new output will
write .
{ b Note } Function [ close ] should { e not } close [ underlying ]
yourself . This is a common mistake which may cause sockets or
standard output to be closed while they are still being used by
another part of the program .
Fully create an output that writes to one or more underlying outputs.
This function is a more general version of {!create_out},
which also handles dependency management between outputs.
To illustrate the need for dependency management, let us consider
the following values:
- an output [out]
- a function [f : _ output -> _ output], using {!create_out} to
create a new output for writing some data to an underyling
output (for instance, a function comparale to {!tab_out} or a
function performing transparent compression or transparent
traduction between encodings)
With these values, let us consider the following scenario
- a new output [f out] is created
- some data is written to [f out] but not flushed
- output [out] is closed, perhaps manually or as a consequence
of garbage-collection, or because the program has ended
- data written to [f out] is flushed.
In this case, data reaches [out] only after [out] has been closed.
Despite appearances, it is quite easy to reach such situation,
especially in short programs.
If, instead, [f] uses [wrap_out], then when output [out] is closed,
[f out] is first automatically flushed and closed, which avoids the
issue.
@param write Write one character to the output (see {!write}).
@param output Write a (sub)string to the output (see {!output}).
@param flush Flush any buffers of this output (see {!flush}).
@param close Close this output. The output will be automatically
flushed.
@param underlying The list of outputs to which the new output will
write.
{b Note} Function [close] should {e not} close [underlying]
yourself. This is a common mistake which may cause sockets or
standard output to be closed while they are still being used by
another part of the program.
*)
val inherit_out:
?write:(char -> unit) ->
?output:(string -> int -> int -> int) ->
?flush:(unit -> unit) ->
?close:(unit -> unit) ->
'a output -> unit output
*
Simplified and optimized version of { ! wrap_out } whenever only
one output appears as dependency .
[ inherit_out out ] will return an output identical to [ out ] .
[ inherit_out ~write out ] will return an output identical to
[ out ] except for its [ write ] method , etc .
You do not need to close [ out ] in [ close ] .
Simplified and optimized version of {!wrap_out} whenever only
one output appears as dependency.
[inherit_out out] will return an output identical to [out].
[inherit_out ~write out] will return an output identical to
[out] except for its [write] method, etc.
You do not need to close [out] in [close].
*)
*
{ 6 For compatibility purposes }
{6 For compatibility purposes}
*)
val input_channel : ?autoclose:bool -> ?cleanup:bool -> in_channel -> input
(** Create an input that will read from a channel.
@param autoclose If true or unspecified, the {!type: input}
will be automatically closed when the underlying [in_channel]
has reached its end.
@param cleanup If true, the channel
will be automatically closed when the {!type: input} is closed.
Otherwise, you will need to close the channel manually.
*)
val output_channel : ?cleanup:bool -> out_channel -> unit output
(** Create an output that will write into a channel.
@param cleanup If true, the channel
will be automatically closed when the {!type: output} is closed.
Otherwise, you will need to close the channel manually.
*)
val to_input_channel : input -> in_channel
(** Create a channel that will read from an input.
{b Note} This function is extremely costly and is provided
essentially for debugging purposes or for reusing legacy
libraries which can't be adapted. As a general rule, if
you can avoid using this function, don't use it.*)
* { 6 Generic BatIO Object Wrappers }
Theses OO Wrappers have been written to provide easy support of
BatIO by external librairies . If you want your library to support
BatIO without actually requiring Batteries to compile , you can
should implement the classes [ in_channel ] , [ out_channel ] ,
[ poly_in_channel ] and/or [ poly_out_channel ] which are the common
BatIO specifications established for ExtLib , OCamlNet and
Camomile .
( see for more details ) .
{ b Note } In this version of Batteries Included , the object wrappers are { e not }
closed automatically by garbage - collection .
Theses OO Wrappers have been written to provide easy support of
BatIO by external librairies. If you want your library to support
BatIO without actually requiring Batteries to compile, you can
should implement the classes [in_channel], [out_channel],
[poly_in_channel] and/or [poly_out_channel] which are the common
BatIO specifications established for ExtLib, OCamlNet and
Camomile.
(see -programming.de/tmp/BatIO-Classes.html for more details).
{b Note} In this version of Batteries Included, the object wrappers are {e not}
closed automatically by garbage-collection.
*)
class in_channel : input ->
object
method input : string -> int -> int -> int
method close_in : unit -> unit
end
class out_channel : 'a output ->
object
method output : string -> int -> int -> int
method flush : unit -> unit
method close_out : unit -> unit
end
class in_chars : input ->
object
method get : unit -> char
method close_in : unit -> unit
end
class out_chars : 'a output ->
object
method put : char -> unit
method flush : unit -> unit
method close_out : unit -> unit
end
val from_in_channel : #in_channel -> input
val from_out_channel : #out_channel -> unit output
val from_in_chars : #in_chars -> input
val from_out_chars : #out_chars -> unit output
* { 6 Enumeration API }
val bytes_of : input -> int BatEnum.t
* Read an enumeration of unsigned 8 - bit integers .
val signed_bytes_of : input -> int BatEnum.t
* Read an enumeration of signed 8 - bit integers .
val ui16s_of : input -> int BatEnum.t
* Read an enumeration of unsigned 16 - bit words .
val i16s_of : input -> int BatEnum.t
* Read an enumartion of signed 16 - bit words .
val i32s_of : input -> int BatEnum.t
* Read an enumeration of signed 32 - bit integers . @raise Overflow if the
read integer can not be represented as an OCaml 31 - bit integer .
read integer cannot be represented as an OCaml 31-bit integer. *)
val real_i32s_of : input -> int32 BatEnum.t
* Read an enumeration of signed 32 - bit integers as OCaml [ int32]s .
val i64s_of : input -> int64 BatEnum.t
* Read an enumeration of signed 64 - bit integers as OCaml [ int64]s .
val doubles_of : input -> float BatEnum.t
* Read an enumeration of IEEE double precision floating point values .
val floats_of : input -> float BatEnum.t
* Read an enumeration of IEEE single precision floating point values .
val strings_of : input -> string BatEnum.t
(** Read an enumeration of null-terminated strings. *)
val lines_of : input -> string BatEnum.t
(** Read an enumeration of LF or CRLF terminated strings. *)
val lines_of2 : input -> string BatEnum.t
val chunks_of : int -> input -> string BatEnum.t
(** Read an input as an enumeration of strings of given length. If the input isn't a multiple of that length, the final string will be smaller than the rest. *)
val chars_of : input -> char BatEnum.t
(** Read an enumeration of Latin-1 characters.
{b Note} Usually faster than calling [read] several times.*)
val bits_of : in_bits -> int BatEnum.t
(** Read an enumeration of bits *)
val write_bitss : nbits:int -> out_bits -> int BatEnum.t -> unit
(** Write an enumeration of bits*)
val default_buffer_size : int
(**The default size for internal buffers.*)
*
{ 6 Thread - safety }
{6 Thread-safety}
*)
val synchronize_in : ?lock:BatConcurrent.lock -> input -> input
* [ ] produces a new { ! type : input } which reads from [ input ]
in a thread - safe way . In other words , a lock prevents two distinct threads
from reading from that input simultaneously , something which would potentially
wreak havoc otherwise
@param lock An optional lock . If none is provided , the lock will be specific
to this [ input ] . Specifiying a custom lock may be useful to associate one
common lock for several inputs and/or outputs , for instance in the case
of pipes .
in a thread-safe way. In other words, a lock prevents two distinct threads
from reading from that input simultaneously, something which would potentially
wreak havoc otherwise
@param lock An optional lock. If none is provided, the lock will be specific
to this [input]. Specifiying a custom lock may be useful to associate one
common lock for several inputs and/or outputs, for instance in the case
of pipes.
*)
val synchronize_out: ?lock:BatConcurrent.lock -> _ output -> unit output
* [ synchronize_out out ] produces a new { ! type : output } which writes to [ output ]
in a thread - safe way . In other words , a lock prevents two distinct threads
from writing to that output simultaneously , something which would potentially
wreak havoc otherwise
@param lock An optional lock . If none is provided , the lock will be specific
to this [ output ] . Specifiying a custom lock may be useful to associate one
common lock for several inputs and/or outputs , for instance in the case
of pipes .
in a thread-safe way. In other words, a lock prevents two distinct threads
from writing to that output simultaneously, something which would potentially
wreak havoc otherwise
@param lock An optional lock. If none is provided, the lock will be specific
to this [output]. Specifiying a custom lock may be useful to associate one
common lock for several inputs and/or outputs, for instance in the case
of pipes.
*)
*
{ 6 Thread - safety internals }
Unless you are attempting to adapt Batteries Included to a new model of
concurrency , you probably wo n't need this .
{6 Thread-safety internals}
Unless you are attempting to adapt Batteries Included to a new model of
concurrency, you probably won't need this.
*)
val lock: BatConcurrent.lock ref
*
A lock used to synchronize internal operations .
By default , this is { ! } . However , if you 're
using a version of Batteries compiled in threaded mode , this uses
{ ! } . If you 're attempting to use Batteries with another
concurrency model , set the lock appropriately .
A lock used to synchronize internal operations.
By default, this is {!BatConcurrent.nolock}. However, if you're
using a version of Batteries compiled in threaded mode, this uses
{!BatMutex}. If you're attempting to use Batteries with another
concurrency model, set the lock appropriately.
*)
val lock_factory: (unit -> BatConcurrent.lock) ref
*
A factory used to create locks . This is used transparently by { ! }
and { ! synchronize_out } .
By default , this always returns { ! } . However ,
if you 're using a version of Batteries compiled in threaded mode ,
this uses { ! } .
A factory used to create locks. This is used transparently by {!synchronize_in}
and {!synchronize_out}.
By default, this always returns {!BatConcurrent.nolock}. However,
if you're using a version of Batteries compiled in threaded mode,
this uses {!BatMutex}. *)
val to_string : ('a, string) printer -> 'a -> string
val to_f_printer: ('a, _) printer -> 'a f_printer
(**/**)
val comb : ('a output * 'a output) -> 'a output
(** Old name of [combine]*)
val make_enum : (input -> 'a) -> input -> 'a BatEnum.t
*
{ 6 Debugging facilities }
{6 Debugging facilities}
*)
val get_output_id : _ output -> int
val get_input_id : input -> int
module Incubator : sig
* { 6 Format - based pretty - printing }
module Array : sig
val pp :
?flush:bool ->
?first:string ->
?last:string ->
?sep:string ->
?indent:int ->
(Format.formatter -> 'a -> 'b) -> Format.formatter -> 'a array -> unit
* Print the contents of an array , with [ first ] preceeding the first item
( default : [ " \[| " ] ) , [ last ] following the last item ( default : [ " |\ ] " ] )
and [ sep ] separating items ( default : [ " ; " ] ) . A printing function must
be provided to print the items in the array . The [ flush ] parameter
( default : [ false ] ) should be set to [ true ] for the outer - most printing
call . Setting inner calls to [ true ] - for example , for nested values -
prevent indentation from working properly .
Example :
[ pp ~flush : true Format.pp_print_int Format.std_formatter \[|1 ; 2 ; 3|\ ] ]
(default: ["\[|"]), [last] following the last item (default: ["|\]"])
and [sep] separating items (default: ["; "]). A printing function must
be provided to print the items in the array. The [flush] parameter
(default: [false]) should be set to [true] for the outer-most printing
call. Setting inner calls to [true] - for example, for nested values -
prevent indentation from working properly.
Example:
[pp ~flush:true Format.pp_print_int Format.std_formatter \[|1; 2; 3|\]]
*)
end
module Enum : sig
val pp :
?flush:bool ->
?first:string ->
?last:string ->
?sep:string ->
?indent:int ->
(Format.formatter -> 'a -> 'b) -> Format.formatter -> 'a BatEnum.t -> unit
* Print the contents of an enum , with [ first ] preceeding the first item
( default : [ " " ] ) , [ last ] following the last item ( default : [ " " ] )
and [ sep ] separating items ( default : [ " " ] ) . A printing function must
be provided to print the items in the enum . The [ flush ] parameter
( default : [ false ] ) should be set to [ true ] for the outer - most printing
call . Setting inner calls to [ true ] - for example , for nested values -
prevent indentation from working properly .
Example :
[ pp ~flush : true Format.pp_print_int Format.std_formatter ( 1 -- 3 ) ]
(default: [""]), [last] following the last item (default: [""])
and [sep] separating items (default: [" "]). A printing function must
be provided to print the items in the enum. The [flush] parameter
(default: [false]) should be set to [true] for the outer-most printing
call. Setting inner calls to [true] - for example, for nested values -
prevent indentation from working properly.
Example:
[pp ~flush:true Format.pp_print_int Format.std_formatter (1 -- 3)] *)
end
module List : sig
val pp :
?flush:bool ->
?first:string ->
?last:string ->
?sep:string ->
?indent:int ->
(Format.formatter -> 'a -> 'b) -> Format.formatter -> 'a list -> unit
* Print the contents of a list , with [ first ] preceeding the first item
( default : [ " \ [ " ] ) , [ last ] following the last item ( default : [ " \ ] " ] )
and [ sep ] separating items ( default : [ " ; " ] ) . A printing function must
be provided to print the items in the list . The [ flush ] parameter
( default : [ false ] ) should be set to [ true ] for the outer - most printing
call . Setting inner calls to [ true ] - for example , for nested values -
prevent indentation from working properly .
Example :
[ pp ~flush : true Format.pp_print_int Format.std_formatter \[1 ; 2 ; 3\ ] ]
(default: ["\["]), [last] following the last item (default: ["\]"])
and [sep] separating items (default: ["; "]). A printing function must
be provided to print the items in the list. The [flush] parameter
(default: [false]) should be set to [true] for the outer-most printing
call. Setting inner calls to [true] - for example, for nested values -
prevent indentation from working properly.
Example:
[pp ~flush:true Format.pp_print_int Format.std_formatter \[1; 2; 3\]]
*)
end
end
| null | https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/batteries/src/batIO.mli | ocaml | * The abstract input type.
* The abstract output type, ['a] is the accumulator data, it is returned
when the [close_out] function is called.
* The type of a printing function to print a ['a] to an output that
produces ['b] as result.
* This exception is raised when reading on a closed input.
* This exception is raised when reading on a closed output.
* Standard error output, as per Unix/Windows conventions.
Use this output to display warnings and error messages.
Example: [
write_line stderr "Error on Internet - please delete google.com";
]
* Read a single char from an input or raise [No_more_input] if
no input is available.
Example: [let rec skip_line ch = if read ch = '\n' then skip_line ch else ();]
* [really_nread i n] reads a string of exactly [n] characters
from the input. @raise No_more_input if at least [n] characters are
not available. @raise Invalid_argument if [n] < 0.
Example: [let read_md5 ch = really_nread ch 32]
* [input i s p l] reads up to [l] characters from the given input,
storing them in string [s], starting at character number [p]. It
returns the actual number of characters read (which may be 0) or
raise [No_more_input] if no character can be read. It will raise
[Invalid_argument] if [p] and [l] do not designate a valid
substring of [s].
Example: [let map_ch f ?(block_size=100) =
let b = String.create block_size in
try while true do
let l = input ch b 0 block_size in
f b 0 l;
done with No_more_input -> ()]
* Write a single char to an output.
Example: [write stdout 'x';]
* Write a string to an output.
Example: [nwrite stdout "Enter your name: ";]
* [really_output o s p l] writes exactly [l] characters from string [s] onto
the the output, starting with the character at offset [p]. For consistency with
{!BatIO.output} it returns [l]. @raise Invalid_argument if [p] and [l] do not
designate a valid substring of [s].
This function is useful for networking situations where the output
buffer might fill resulting in not the entire substring being
readied for transmission. Uses [output] internally, and will
raise [Sys_blocked_io] in the case that any call returns 0.
* Flush an output.
If previous write operations have caused errors, this may trigger an exception.
Example: [flush stdout;]
* Flush all outputs, ignore errors.
Example: [flush_all ();]
*/*
*/*
"123"
* Create an output that will write into a string in an efficient way.
When closed, the output returns all the data written into it.
* Create an input that will read from an [enum].
* Create an output that will write into an [enum]. The
final enum is returned when the output is closed.
* [combine (a,b)] creates a new [output] [c] such that
writing to [c] will actually write to both [a] and [b]
* Create an output shifted to the right by a number of spaces
(or other character as specified by [tab]).
[tab_out n out] produces a new output for writing into [out], in
which every new line starts with [n] spaces.
@raise Invalid_argument if [n] < 0.
Closing [tab_out n out] does not close [out]. Rather,
closing [out] closes [tab_out n out].
* [repeat n out] create an output in which every character or string is repeated
[n] times to [out].
* read all the contents of the input until [No_more_input] is raised.
* Create a pipe between an input and an ouput. Data written from
the output can be read from the input.
* Create an input that provide a count function of the number of bytes
read from it.
* Create an output that provide a count function of the number of bytes
written through it.
* [progress_out out f] create an output that calls [f ()]
whenever some content is succesfully written to it.
* You can safely transform any output to an unit output in a safe way
by using this function.
* Exception raised when a read or write operation cannot be completed.
* Read an IEEE double precision floating point value.
* Read a null-terminated string.
* Write an OCaml int32.
* Write an IEEE double precision floating point value.
* Write a string and append an null character.
* Same operations as module {!BatIO}, but with big-endian encoding
* Read an IEEE double precision floating point value.
* Write an OCaml int32.
* Write an IEEE double precision floating point value.
* Read bits from an input
* Write bits to an output
* Fully create an input reading from other inputs by giving all
the needed functions.
This function is a more general version of {!create_in}
which also handles dependency management between inputs.
{b Note} When you create an input which reads from another
input, function [close] should {e not} close the inputs of
[underlying]. Doing so is a common error, which could result
in inadvertently closing {!stdin} or a network socket, etc.
* Create an input that will read from a channel.
@param autoclose If true or unspecified, the {!type: input}
will be automatically closed when the underlying [in_channel]
has reached its end.
@param cleanup If true, the channel
will be automatically closed when the {!type: input} is closed.
Otherwise, you will need to close the channel manually.
* Create an output that will write into a channel.
@param cleanup If true, the channel
will be automatically closed when the {!type: output} is closed.
Otherwise, you will need to close the channel manually.
* Create a channel that will read from an input.
{b Note} This function is extremely costly and is provided
essentially for debugging purposes or for reusing legacy
libraries which can't be adapted. As a general rule, if
you can avoid using this function, don't use it.
* Read an enumeration of null-terminated strings.
* Read an enumeration of LF or CRLF terminated strings.
* Read an input as an enumeration of strings of given length. If the input isn't a multiple of that length, the final string will be smaller than the rest.
* Read an enumeration of Latin-1 characters.
{b Note} Usually faster than calling [read] several times.
* Read an enumeration of bits
* Write an enumeration of bits
*The default size for internal buffers.
*/*
* Old name of [combine] |
* BatIO - Abstract input / output
* Copyright ( C ) 2003
* 2008 ( contributor )
* 2008 ( contributor )
* 2008 ( contributor )
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* BatIO - Abstract input/output
* Copyright (C) 2003 Nicolas Cannasse
* 2008 David Teller (contributor)
* 2008 Philippe Strauss (contributor)
* 2008 Edgar Friendly (contributor)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
* High - order abstract I / O.
This module deals with { ! type : input}s and { ! type :
output}s . Inputs are manners of getting information from the
outside world and into your program ( for instance , reading from
the network , from a file , etc . ) Outputs are manners of getting
information out from your program and into the outside world ( for
instance , sending something onto the network , onto a file , etc . )
In other words , if you are looking for a way to modify files , read
from the network , etc . , you 're in the right place .
To perform I / O , you first need to { e open } your { ! type : input } or
your { ! type : output } . Chances are that there is an { e opening }
operation for this task . Note that most opening operations are
defined in their respective module . Operations for opening files
are defined in module { ! File } , operations for opening
communications with the network or with other processes are
defined in module { ! Unix } . Opening operations related to
compression and decompression are defined in module { ! Compress } ,
etc .
Once you have opened an { ! type : input } , you may read the data it
contains by using functions such as { ! read } ( to read one
character ) , { ! } or { ! : input } ( to read one string ) or one
of the [ read _ * ] functions . If you need not one information but a
complete enumeration , for instance for processing many information
before writing them , you may also convert the input into an
enumeration , by using one of the [ * s_of ] functions .
Once you have opened an { ! type : output } , you may write data to
this output by using functions scuh as { ! write } ( to write one
char ) , { ! } or { ! : output } ( to write one string ) or one of
the [ write _ * ] functions . If you have not just one piece of data
but a complete enumeration , you may write this whole enumeration
to the output by using one of the [ write_*s ] functions . Note that
most operations on output are said to be { e buffered } . This means
that small writing operations may be automatically delayed and
grouped into large writing operations , as these are generally
faster and induce less wear on the hardware . Occasionally , you
may wish to force all waiting operations to take place { e now } .
For this purpose , you may either function { ! flush } or function
I { ! flush_out } .
Once you have finished using your { ! type : input } or your { ! type :
output } , chances are that you will want to close it . This is not a
strict necessity , as OCaml will eventually close it for you when
it detects that you have no more need of that { ! type :
input}/{!type : output } , but this is generally a good policy , as
this will let other programs access the resources which are
currently allocated to that { ! type : input}/{!type : output } --
typically , under Windows , if you are reading the contents of a
file from a program , no other program may read the contents of
that file simultaneously and you may also not rename or move the
file to another directory . To close an { ! type : input } , use
function { ! } and to close an { ! type : output } , use function
{ ! close_out } .
{ b Note } Some { ! type : input}s are built on top of other
{ ! type : input}s to provide transparent translations ( e.g.
on - the - fly decompression of a file or network information ) and
that some { ! type : output}s are built on top of other
{ ! type : output}s for the same purpose ( e.g. on - the - fly compression
of a file or network information ) . In this case , closing the
" outer " { ! type : input}/{!type : output } ( e.g. the
decompressor / compressor ) will { e not } close the " inner "
{ ! type : input}/{!type : output } ( e.g. access to the file or to the
network ) . You will need to close the " inner "
{ ! type : input}/{!type : output } , which will automatically flush
the outer { ! type : input}/{!type : output } and close it .
@author @author @author @author
@documents BatInnerIO
This module deals with {!type: input}s and {!type:
output}s. Inputs are manners of getting information from the
outside world and into your program (for instance, reading from
the network, from a file, etc.) Outputs are manners of getting
information out from your program and into the outside world (for
instance, sending something onto the network, onto a file, etc.)
In other words, if you are looking for a way to modify files, read
from the network, etc., you're in the right place.
To perform I/O, you first need to {e open} your {!type: input} or
your {!type: output}. Chances are that there is an {e opening}
operation for this task. Note that most opening operations are
defined in their respective module. Operations for opening files
are defined in module {!File}, operations for opening
communications with the network or with other processes are
defined in module {!Unix}. Opening operations related to
compression and decompression are defined in module {!Compress},
etc.
Once you have opened an {!type: input}, you may read the data it
contains by using functions such as {!read} (to read one
character), {!nread} or {!val: input} (to read one string) or one
of the [read_*] functions. If you need not one information but a
complete enumeration, for instance for processing many information
before writing them, you may also convert the input into an
enumeration, by using one of the [*s_of] functions.
Once you have opened an {!type: output}, you may write data to
this output by using functions scuh as {!write} (to write one
char), {!nwrite} or {!val: output} (to write one string) or one of
the [write_*] functions. If you have not just one piece of data
but a complete enumeration, you may write this whole enumeration
to the output by using one of the [write_*s] functions. Note that
most operations on output are said to be {e buffered}. This means
that small writing operations may be automatically delayed and
grouped into large writing operations, as these are generally
faster and induce less wear on the hardware. Occasionally, you
may wish to force all waiting operations to take place {e now}.
For this purpose, you may either function {!flush} or function
I {!flush_out}.
Once you have finished using your {!type: input} or your {!type:
output}, chances are that you will want to close it. This is not a
strict necessity, as OCaml will eventually close it for you when
it detects that you have no more need of that {!type:
input}/{!type: output}, but this is generally a good policy, as
this will let other programs access the resources which are
currently allocated to that {!type:input}/{!type:output} --
typically, under Windows, if you are reading the contents of a
file from a program, no other program may read the contents of
that file simultaneously and you may also not rename or move the
file to another directory. To close an {!type: input}, use
function {!close_in} and to close an {!type: output}, use function
{!close_out}.
{b Note} Some {!type:input}s are built on top of other
{!type:input}s to provide transparent translations (e.g.
on-the-fly decompression of a file or network information) and
that some {!type:output}s are built on top of other
{!type:output}s for the same purpose (e.g. on-the-fly compression
of a file or network information). In this case, closing the
"outer" {!type:input}/{!type:output} (e.g. the
decompressor/compressor) will {e not} close the "inner"
{!type:input}/{!type:output} (e.g. access to the file or to the
network). You will need to close the "inner"
{!type:input}/{!type:output}, which will automatically flush
the outer {!type:input}/{!type:output} and close it.
@author Nicolas Cannasse
@author David Teller
@author Philippe Strauss
@author Edgar Friendly
@documents BatInnerIO
*)
open BatInnerIO
type input = BatInnerIO.input
type 'a output = 'a BatInnerIO.output
type ('a, 'b) printer = 'b output -> 'a -> unit
type 'a f_printer = Format.formatter -> 'a -> unit
exception No_more_input
* This exception is raised when reading on an input with the [ read ] or
[ nread ] functions while there is no available token to read .
[nread] functions while there is no available token to read. *)
exception Input_closed
exception Output_closed
* { 6 Standard inputs / outputs }
val stdin : input
* Standard input , as per Unix / Windows conventions ( by default , keyboard ) .
Example : [ if read_line stdin | > Int.of_string > 10 then failwith " too big a number read " ; ]
Example: [if read_line stdin |> Int.of_string > 10 then failwith "too big a number read"; ]
*)
val stdout: unit output
* Standard output , as per Unix / Windows conventions ( by default , console ) .
Use this output to display regular messages .
Example : [
write_string stdout " Enter your name : " ;
let name = read_line stdin in
write_line stdout ( " Your name is " ^ name ) ;
]
Use this output to display regular messages.
Example: [
write_string stdout "Enter your name:";
let name = read_line stdin in
write_line stdout ("Your name is " ^ name);
]
*)
val stderr: unit output
val stdnull: unit output
* An output which discards everything written to it .
Use this output to ignore messages .
Example : [
let out_ch = if debug then stderr else stdnull in
write_line out_ch " Program running . " ;
]
Use this output to ignore messages.
Example: [
let out_ch = if debug then stderr else stdnull in
write_line out_ch "Program running.";
]
*)
* { 6 Standard API }
val read : input -> char
val nread : input -> int -> string
* [ nread i n ] reads a string of size up to [ n ] from an input .
The function will raise [ No_more_input ] if no input is available .
It will raise [ Invalid_argument ] if [ n ] < 0 .
Example : [ let read_md5 ch = nread ch 32 ]
The function will raise [No_more_input] if no input is available.
It will raise [Invalid_argument] if [n] < 0.
Example: [let read_md5 ch = nread ch 32]
*)
val really_nread : input -> int -> string
val input : input -> string -> int -> int -> int
val really_input : input -> string -> int -> int -> int
* [ really_input i s p l ] reads exactly [ l ] characters from the
given input , storing them in the string [ s ] , starting at
position [ p ] . For consistency with { ! BatIO.input } it returns
[ l ] . @raise No_more_input if at [ l ] characters are not
available . @raise Invalid_argument if [ p ] and [ l ] do not
designate a valid substring of [ s ] .
Example : [ let _ = really_input stdin b 0 3 ]
given input, storing them in the string [s], starting at
position [p]. For consistency with {!BatIO.input} it returns
[l]. @raise No_more_input if at [l] characters are not
available. @raise Invalid_argument if [p] and [l] do not
designate a valid substring of [s].
Example: [let _ = really_input stdin b 0 3]
*)
val close_in : input -> unit
* Close the input . It can no longer be read from .
Example : [ ; ]
Example: [close_in network_in;]
*)
val write : (char, _) printer
val nwrite : (string, _) printer
val output : 'a output -> string -> int -> int -> int
* [ output o s p l ] writes up to [ l ] characters from string [ s ] , starting at
offset [ p ] . It returns the number of characters written . It will raise
[ Invalid_argument ] if [ p ] and [ l ] do not designate a valid substring of [ s ] .
Example : [ let str = " Foo Bar Baz " in let written = output ; ]
This writes " o Ba " to stdout .
offset [p]. It returns the number of characters written. It will raise
[Invalid_argument] if [p] and [l] do not designate a valid substring of [s].
Example: [let str = "Foo Bar Baz" in let written = output stdout str 2 4;]
This writes "o Ba" to stdout.
*)
val really_output : 'a output -> string -> int -> int -> int
val flush : 'a output -> unit
val flush_all : unit -> unit
val close_out : 'a output -> 'a
* Close the output and return its accumulator data .
The output is flushed before being closed and can no longer be
written . Attempting to flush or write after the output has been
closed will have no effect .
Example : [
let strout = output_string ( ) in
write strout ' x ' ;
if 2 + 3>5 then write strout " y " ;
print_string ( close_out strout ) ]
The output is flushed before being closed and can no longer be
written. Attempting to flush or write after the output has been
closed will have no effect.
Example: [
let strout = output_string () in
write strout 'x';
if 2+3>5 then write strout "y";
print_string (close_out strout) ]
*)
val close_all : unit -> unit
* Close all outputs .
Ignore errors . Automatically called at the end of your program .
You probably should never use it manually , as it also closes
[ stdout ] , [ stderr ] , [ ] .
Example : [ close_all ( ) ; ]
Ignore errors. Automatically called at the end of your program.
You probably should never use it manually, as it also closes
[stdout], [stderr], [stdnull].
Example: [close_all ();]
*)
* { 6 Creation of BatIO Inputs / Outputs }
To open a file for reading / writing , see { ! }
and { ! }
To open a file for reading/writing, see {!File.open_in}
and {!File.open_file_out}*)
val input_string : string -> input
* Create an input that will read from a string .
Example : [
let inch = input_string " 1234554321 " in
let str1 = nread inch 3 in ( * " 123 "
Example: [
let inch = input_string "1234554321" in
" 45543 "
" 21 "
try string_of_char(read inch) with BatIO.No_more_input -> "End of string";
]
*)
val output_string : unit -> string output
val input_enum : char BatEnum.t -> input
val output_enum : unit -> char BatEnum.t output
val combine : ('a output * 'b output) -> ('a * 'b) output
val tab_out : ?tab:char -> int -> 'a output -> unit output
repeat : int - > ' a output - > unit output
( * * [ repeat n out ] create an output in which every character or string is repeated
[ n ] times to [ out ] .
* { 6 Utilities }
val read_all : input -> string
val pipe : unit -> input * unit output
val copy : ?buffer:int -> input -> _ output -> unit
* Read everything from an input and copy it to an output .
@param buffer The size of the buffer to use for copying , in
bytes . By default , this is 4,096b .
@param buffer The size of the buffer to use for copying, in
bytes. By default, this is 4,096b.
*)
val pos_in : input -> input * (unit -> int)
val progress_in : input -> (unit -> unit) -> input
* [ progress_in f ] create an input that calls [ f ( ) ]
whenever some content is succesfully read from it .
whenever some content is succesfully read from it.*)
val pos_out : 'a output -> unit output * (unit -> int)
val progress_out : 'a output -> (unit -> unit) -> unit output
external cast_output : 'a output -> unit output = "%identity"
* { 6 Binary files API }
Here is some API useful for working with binary files , in particular
binary files generated by C applications . By default , encoding of
multibyte integers is low - endian . The { ! BigEndian } module provide multibyte
operations with other encoding .
Here is some API useful for working with binary files, in particular
binary files generated by C applications. By default, encoding of
multibyte integers is low-endian. The {!BigEndian} module provide multibyte
operations with other encoding.
*)
exception Overflow of string
val read_byte : input -> int
* Read an unsigned 8 - bit integer .
val read_signed_byte : input -> int
* Read an signed 8 - bit integer .
val read_ui16 : input -> int
* Read an unsigned 16 - bit word .
val read_i16 : input -> int
* Read a signed 16 - bit word .
val read_i32 : input -> int
* Read a signed 32 - bit integer . @raise Overflow if the
read integer can not be represented as an OCaml 31 - bit integer .
read integer cannot be represented as an OCaml 31-bit integer. *)
val read_real_i32 : input -> int32
* Read a signed 32 - bit integer as an OCaml int32 .
val read_i64 : input -> int64
* Read a signed 64 - bit integer as an OCaml int64 .
val read_float : input -> float
* Read an IEEE single precision floating point value .
val read_double : input -> float
val read_string : input -> string
val read_line : input -> string
* Read a LF or CRLF terminated string . If the source runs out of
input before a LF is found , returns a string of the remaining input .
Will raise [ No_more_input ] only if no characters are available .
input before a LF is found, returns a string of the remaining input.
Will raise [No_more_input] only if no characters are available. *)
val write_byte : (int, _) printer
* Write an unsigned 8 - bit byte .
val write_ui16 : (int, _) printer
* Write an unsigned 16 - bit word .
val write_i16 : (int, _) printer
* Write a signed 16 - bit word .
val write_i32 : (int, _) printer
* Write a signed 32 - bit integer .
val write_real_i32 : (int32, _) printer
val write_i64 : (int64, _) printer
* Write an OCaml int64 .
val write_double : (float, _) printer
val write_float : (float, _) printer
* Write an IEEE single precision floating point value .
val write_string : (string, _) printer
val write_line : (string, _) printer
* Write a line and append a line end .
This adds the correct line end for your operating system . That
is , if you are writing to a file and your system imposes that
files should end lines with character LF ( or [ ' \n ' ] ) , as Unix ,
then a LF is inserted at the end of the line . If your system
favors CRLF ( or [ ' \r\n ' ] ) , then this is what will be inserted .
This adds the correct line end for your operating system. That
is, if you are writing to a file and your system imposes that
files should end lines with character LF (or ['\n']), as Unix,
then a LF is inserted at the end of the line. If your system
favors CRLF (or ['\r\n']), then this is what will be inserted.*)
module BigEndian :
sig
* This module redefines the operations of module { ! BatIO } which behave
differently on big - endian [ input]s/[output]s .
Generally , to use this module you will wish to either open both
{ ! BatIO } and { ! BigEndian } , so as to import a big - endian version of
{ ! BatIO } , as per
[ open System . BatIO , BigEndian in ... ] ,
or to redefine locally { ! BatIO } to use big - endian encodings
[ module BatIO = System . BatIO include BigEndian ]
differently on big-endian [input]s/[output]s.
Generally, to use this module you will wish to either open both
{!BatIO} and {!BigEndian}, so as to import a big-endian version of
{!BatIO}, as per
[open System.BatIO, BigEndian in ...],
or to redefine locally {!BatIO} to use big-endian encodings
[module BatIO = System.BatIO include BigEndian]
*)
val read_ui16 : input -> int
* Read an unsigned 16 - bit word .
val read_i16 : input -> int
* Read a signed 16 - bit word .
val read_i32 : input -> int
* Read a signed 32 - bit integer . @raise Overflow if the
read integer can not be represented as an OCaml 31 - bit integer .
read integer cannot be represented as an OCaml 31-bit integer. *)
val read_real_i32 : input -> int32
* Read a signed 32 - bit integer as an OCaml int32 .
val read_i64 : input -> int64
* Read a signed 64 - bit integer as an OCaml int64 .
val read_double : input -> float
val read_float: input -> float
* Read an IEEE single precision floating point value .
val write_ui16 : (int, _) printer
* Write an unsigned 16 - bit word .
val write_i16 : (int, _) printer
* Write a signed 16 - bit word .
val write_i32 : (int, _) printer
* Write a signed 32 - bit integer .
val write_real_i32 : (int32, _) printer
val write_i64 : (int64, _) printer
* Write an OCaml int64 .
val write_double : (float, _) printer
val write_float : (float, _) printer
* Write an IEEE single precision floating point value .
val ui16s_of : input -> int BatEnum.t
* Read an enumeration of unsigned 16 - bit words .
val i16s_of : input -> int BatEnum.t
* Read an enumartion of signed 16 - bit words .
val i32s_of : input -> int BatEnum.t
* Read an enumeration of signed 32 - bit integers .
@raise Overflow if the read integer can not be represented as an OCaml
31 - bit integer .
@raise Overflow if the read integer cannot be represented as an OCaml
31-bit integer. *)
val real_i32s_of : input -> int32 BatEnum.t
* Read an enumeration of signed 32 - bit integers as OCaml [ int32]s .
val i64s_of : input -> int64 BatEnum.t
* Read an enumeration of signed 64 - bit integers as OCaml [ int64]s .
val doubles_of : input -> float BatEnum.t
* Read an enumeration of IEEE double precision floating point values .
val floats_of : input -> float BatEnum.t
* Read an enumeration of IEEE single precision floating point values .
end
* { 6 Bits API }
This enable you to read and write from an BatIO bit - by - bit or several bits
at the same time .
This enable you to read and write from an BatIO bit-by-bit or several bits
at the same time.
*)
type in_bits
type out_bits
exception Bits_error
val input_bits : input -> in_bits
val output_bits : 'a output -> out_bits
val read_bits : in_bits -> int -> int
* Read up to 31 bits , raise Bits_error if n < 0 or n > 31
val write_bits : out_bits -> nbits:int -> int -> unit
* Write up to 31 bits represented as a value , raise Bits_error if < 0
or > 31 or the value representation excess nbits .
or nbits > 31 or the value representation excess nbits. *)
val flush_bits : out_bits -> unit
* Flush remaining unwritten bits , adding up to 7 bits which values 0 .
val drop_bits : in_bits -> unit
* Drop up to 7 buffered bits and restart to next input character .
*
{ 6 Creating new types of inputs / outputs }
{6 Creating new types of inputs/outputs}
*)
val create_in :
read:(unit -> char) ->
input:(string -> int -> int -> int) ->
close:(unit -> unit) -> input
* Fully create an input by giving all the needed functions .
{ b Note } Do { e not } use this function for creating an input
which reads from one or more underlying inputs . Rather , use
{ ! wrap_in } .
{b Note} Do {e not} use this function for creating an input
which reads from one or more underlying inputs. Rather, use
{!wrap_in}.
*)
val wrap_in :
read:(unit -> char) ->
input:(string -> int -> int -> int) ->
close:(unit -> unit) ->
underlying:(input list) ->
input
val inherit_in:
?read:(unit -> char) ->
?input:(string -> int -> int -> int) ->
?close:(unit -> unit) ->
input -> input
* Simplified and optimized version of { ! wrap_in } which may be used
whenever only one input appears as dependency .
[ inherit_in inp ] will return an input identical to [ inp ] .
[ inherit_in ~read inp ] will return an input identical to
[ inp ] except for method [ read ] , etc .
You do not need to close [ inp ] in [ close ] .
whenever only one input appears as dependency.
[inherit_in inp] will return an input identical to [inp].
[inherit_in ~read inp] will return an input identical to
[inp] except for method [read], etc.
You do not need to close [inp] in [close].
*)
val create_out :
write:(char -> unit) ->
output:(string -> int -> int -> int) ->
flush:(unit -> unit) ->
close:(unit -> 'a) ->
'a output
*
Fully create an output by giving all the needed functions .
@param write Write one character to the output ( see { ! write } ) .
@param output Write a ( sub)string to the output ( see { ! output } ) .
@param flush Flush any buffers of this output ( see { ! flush } ) .
@param close Close this output . The output will be automatically
flushed .
{ b Note } Do { e not } use this function for creating an output which
writes to one or more underlying outputs . Rather , use { ! wrap_out } .
Fully create an output by giving all the needed functions.
@param write Write one character to the output (see {!write}).
@param output Write a (sub)string to the output (see {!output}).
@param flush Flush any buffers of this output (see {!flush}).
@param close Close this output. The output will be automatically
flushed.
{b Note} Do {e not} use this function for creating an output which
writes to one or more underlying outputs. Rather, use {!wrap_out}.
*)
val wrap_out :
write:(char -> unit) ->
output:(string -> int -> int -> int) ->
flush:(unit -> unit) ->
close:(unit -> 'a) ->
underlying:('b output list) ->
'a output
*
Fully create an output that writes to one or more underlying outputs .
This function is a more general version of { ! create_out } ,
which also handles dependency management between outputs .
To illustrate the need for dependency management , let us consider
the following values :
- an output [ out ]
- a function [ f : _ output - > _ output ] , using { ! create_out } to
create a new output for writing some data to an underyling
output ( for instance , a function comparale to { ! tab_out } or a
function performing transparent compression or transparent
traduction between encodings )
With these values , let us consider the following scenario
- a new output [ f out ] is created
- some data is written to [ f out ] but not flushed
- output [ out ] is closed , perhaps manually or as a consequence
of garbage - collection , or because the program has ended
- data written to [ f out ] is flushed .
In this case , data reaches [ out ] only after [ out ] has been closed .
Despite appearances , it is quite easy to reach such situation ,
especially in short programs .
If , instead , [ f ] uses [ wrap_out ] , then when output [ out ] is closed ,
[ f out ] is first automatically flushed and closed , which avoids the
issue .
@param write Write one character to the output ( see { ! write } ) .
@param output Write a ( sub)string to the output ( see { ! output } ) .
@param flush Flush any buffers of this output ( see { ! flush } ) .
@param close Close this output . The output will be automatically
flushed .
@param underlying The list of outputs to which the new output will
write .
{ b Note } Function [ close ] should { e not } close [ underlying ]
yourself . This is a common mistake which may cause sockets or
standard output to be closed while they are still being used by
another part of the program .
Fully create an output that writes to one or more underlying outputs.
This function is a more general version of {!create_out},
which also handles dependency management between outputs.
To illustrate the need for dependency management, let us consider
the following values:
- an output [out]
- a function [f : _ output -> _ output], using {!create_out} to
create a new output for writing some data to an underyling
output (for instance, a function comparale to {!tab_out} or a
function performing transparent compression or transparent
traduction between encodings)
With these values, let us consider the following scenario
- a new output [f out] is created
- some data is written to [f out] but not flushed
- output [out] is closed, perhaps manually or as a consequence
of garbage-collection, or because the program has ended
- data written to [f out] is flushed.
In this case, data reaches [out] only after [out] has been closed.
Despite appearances, it is quite easy to reach such situation,
especially in short programs.
If, instead, [f] uses [wrap_out], then when output [out] is closed,
[f out] is first automatically flushed and closed, which avoids the
issue.
@param write Write one character to the output (see {!write}).
@param output Write a (sub)string to the output (see {!output}).
@param flush Flush any buffers of this output (see {!flush}).
@param close Close this output. The output will be automatically
flushed.
@param underlying The list of outputs to which the new output will
write.
{b Note} Function [close] should {e not} close [underlying]
yourself. This is a common mistake which may cause sockets or
standard output to be closed while they are still being used by
another part of the program.
*)
val inherit_out:
?write:(char -> unit) ->
?output:(string -> int -> int -> int) ->
?flush:(unit -> unit) ->
?close:(unit -> unit) ->
'a output -> unit output
*
Simplified and optimized version of { ! wrap_out } whenever only
one output appears as dependency .
[ inherit_out out ] will return an output identical to [ out ] .
[ inherit_out ~write out ] will return an output identical to
[ out ] except for its [ write ] method , etc .
You do not need to close [ out ] in [ close ] .
Simplified and optimized version of {!wrap_out} whenever only
one output appears as dependency.
[inherit_out out] will return an output identical to [out].
[inherit_out ~write out] will return an output identical to
[out] except for its [write] method, etc.
You do not need to close [out] in [close].
*)
*
{ 6 For compatibility purposes }
{6 For compatibility purposes}
*)
val input_channel : ?autoclose:bool -> ?cleanup:bool -> in_channel -> input
val output_channel : ?cleanup:bool -> out_channel -> unit output
val to_input_channel : input -> in_channel
* { 6 Generic BatIO Object Wrappers }
Theses OO Wrappers have been written to provide easy support of
BatIO by external librairies . If you want your library to support
BatIO without actually requiring Batteries to compile , you can
should implement the classes [ in_channel ] , [ out_channel ] ,
[ poly_in_channel ] and/or [ poly_out_channel ] which are the common
BatIO specifications established for ExtLib , OCamlNet and
Camomile .
( see for more details ) .
{ b Note } In this version of Batteries Included , the object wrappers are { e not }
closed automatically by garbage - collection .
Theses OO Wrappers have been written to provide easy support of
BatIO by external librairies. If you want your library to support
BatIO without actually requiring Batteries to compile, you can
should implement the classes [in_channel], [out_channel],
[poly_in_channel] and/or [poly_out_channel] which are the common
BatIO specifications established for ExtLib, OCamlNet and
Camomile.
(see -programming.de/tmp/BatIO-Classes.html for more details).
{b Note} In this version of Batteries Included, the object wrappers are {e not}
closed automatically by garbage-collection.
*)
class in_channel : input ->
object
method input : string -> int -> int -> int
method close_in : unit -> unit
end
class out_channel : 'a output ->
object
method output : string -> int -> int -> int
method flush : unit -> unit
method close_out : unit -> unit
end
class in_chars : input ->
object
method get : unit -> char
method close_in : unit -> unit
end
class out_chars : 'a output ->
object
method put : char -> unit
method flush : unit -> unit
method close_out : unit -> unit
end
val from_in_channel : #in_channel -> input
val from_out_channel : #out_channel -> unit output
val from_in_chars : #in_chars -> input
val from_out_chars : #out_chars -> unit output
* { 6 Enumeration API }
val bytes_of : input -> int BatEnum.t
* Read an enumeration of unsigned 8 - bit integers .
val signed_bytes_of : input -> int BatEnum.t
* Read an enumeration of signed 8 - bit integers .
val ui16s_of : input -> int BatEnum.t
* Read an enumeration of unsigned 16 - bit words .
val i16s_of : input -> int BatEnum.t
* Read an enumartion of signed 16 - bit words .
val i32s_of : input -> int BatEnum.t
* Read an enumeration of signed 32 - bit integers . @raise Overflow if the
read integer can not be represented as an OCaml 31 - bit integer .
read integer cannot be represented as an OCaml 31-bit integer. *)
val real_i32s_of : input -> int32 BatEnum.t
* Read an enumeration of signed 32 - bit integers as OCaml [ int32]s .
val i64s_of : input -> int64 BatEnum.t
* Read an enumeration of signed 64 - bit integers as OCaml [ int64]s .
val doubles_of : input -> float BatEnum.t
* Read an enumeration of IEEE double precision floating point values .
val floats_of : input -> float BatEnum.t
* Read an enumeration of IEEE single precision floating point values .
val strings_of : input -> string BatEnum.t
val lines_of : input -> string BatEnum.t
val lines_of2 : input -> string BatEnum.t
val chunks_of : int -> input -> string BatEnum.t
val chars_of : input -> char BatEnum.t
val bits_of : in_bits -> int BatEnum.t
val write_bitss : nbits:int -> out_bits -> int BatEnum.t -> unit
val default_buffer_size : int
*
{ 6 Thread - safety }
{6 Thread-safety}
*)
val synchronize_in : ?lock:BatConcurrent.lock -> input -> input
* [ ] produces a new { ! type : input } which reads from [ input ]
in a thread - safe way . In other words , a lock prevents two distinct threads
from reading from that input simultaneously , something which would potentially
wreak havoc otherwise
@param lock An optional lock . If none is provided , the lock will be specific
to this [ input ] . Specifiying a custom lock may be useful to associate one
common lock for several inputs and/or outputs , for instance in the case
of pipes .
in a thread-safe way. In other words, a lock prevents two distinct threads
from reading from that input simultaneously, something which would potentially
wreak havoc otherwise
@param lock An optional lock. If none is provided, the lock will be specific
to this [input]. Specifiying a custom lock may be useful to associate one
common lock for several inputs and/or outputs, for instance in the case
of pipes.
*)
val synchronize_out: ?lock:BatConcurrent.lock -> _ output -> unit output
* [ synchronize_out out ] produces a new { ! type : output } which writes to [ output ]
in a thread - safe way . In other words , a lock prevents two distinct threads
from writing to that output simultaneously , something which would potentially
wreak havoc otherwise
@param lock An optional lock . If none is provided , the lock will be specific
to this [ output ] . Specifiying a custom lock may be useful to associate one
common lock for several inputs and/or outputs , for instance in the case
of pipes .
in a thread-safe way. In other words, a lock prevents two distinct threads
from writing to that output simultaneously, something which would potentially
wreak havoc otherwise
@param lock An optional lock. If none is provided, the lock will be specific
to this [output]. Specifiying a custom lock may be useful to associate one
common lock for several inputs and/or outputs, for instance in the case
of pipes.
*)
*
{ 6 Thread - safety internals }
Unless you are attempting to adapt Batteries Included to a new model of
concurrency , you probably wo n't need this .
{6 Thread-safety internals}
Unless you are attempting to adapt Batteries Included to a new model of
concurrency, you probably won't need this.
*)
val lock: BatConcurrent.lock ref
*
A lock used to synchronize internal operations .
By default , this is { ! } . However , if you 're
using a version of Batteries compiled in threaded mode , this uses
{ ! } . If you 're attempting to use Batteries with another
concurrency model , set the lock appropriately .
A lock used to synchronize internal operations.
By default, this is {!BatConcurrent.nolock}. However, if you're
using a version of Batteries compiled in threaded mode, this uses
{!BatMutex}. If you're attempting to use Batteries with another
concurrency model, set the lock appropriately.
*)
val lock_factory: (unit -> BatConcurrent.lock) ref
*
A factory used to create locks . This is used transparently by { ! }
and { ! synchronize_out } .
By default , this always returns { ! } . However ,
if you 're using a version of Batteries compiled in threaded mode ,
this uses { ! } .
A factory used to create locks. This is used transparently by {!synchronize_in}
and {!synchronize_out}.
By default, this always returns {!BatConcurrent.nolock}. However,
if you're using a version of Batteries compiled in threaded mode,
this uses {!BatMutex}. *)
val to_string : ('a, string) printer -> 'a -> string
val to_f_printer: ('a, _) printer -> 'a f_printer
val comb : ('a output * 'a output) -> 'a output
val make_enum : (input -> 'a) -> input -> 'a BatEnum.t
*
{ 6 Debugging facilities }
{6 Debugging facilities}
*)
val get_output_id : _ output -> int
val get_input_id : input -> int
module Incubator : sig
* { 6 Format - based pretty - printing }
module Array : sig
val pp :
?flush:bool ->
?first:string ->
?last:string ->
?sep:string ->
?indent:int ->
(Format.formatter -> 'a -> 'b) -> Format.formatter -> 'a array -> unit
* Print the contents of an array , with [ first ] preceeding the first item
( default : [ " \[| " ] ) , [ last ] following the last item ( default : [ " |\ ] " ] )
and [ sep ] separating items ( default : [ " ; " ] ) . A printing function must
be provided to print the items in the array . The [ flush ] parameter
( default : [ false ] ) should be set to [ true ] for the outer - most printing
call . Setting inner calls to [ true ] - for example , for nested values -
prevent indentation from working properly .
Example :
[ pp ~flush : true Format.pp_print_int Format.std_formatter \[|1 ; 2 ; 3|\ ] ]
(default: ["\[|"]), [last] following the last item (default: ["|\]"])
and [sep] separating items (default: ["; "]). A printing function must
be provided to print the items in the array. The [flush] parameter
(default: [false]) should be set to [true] for the outer-most printing
call. Setting inner calls to [true] - for example, for nested values -
prevent indentation from working properly.
Example:
[pp ~flush:true Format.pp_print_int Format.std_formatter \[|1; 2; 3|\]]
*)
end
module Enum : sig
val pp :
?flush:bool ->
?first:string ->
?last:string ->
?sep:string ->
?indent:int ->
(Format.formatter -> 'a -> 'b) -> Format.formatter -> 'a BatEnum.t -> unit
* Print the contents of an enum , with [ first ] preceeding the first item
( default : [ " " ] ) , [ last ] following the last item ( default : [ " " ] )
and [ sep ] separating items ( default : [ " " ] ) . A printing function must
be provided to print the items in the enum . The [ flush ] parameter
( default : [ false ] ) should be set to [ true ] for the outer - most printing
call . Setting inner calls to [ true ] - for example , for nested values -
prevent indentation from working properly .
Example :
[ pp ~flush : true Format.pp_print_int Format.std_formatter ( 1 -- 3 ) ]
(default: [""]), [last] following the last item (default: [""])
and [sep] separating items (default: [" "]). A printing function must
be provided to print the items in the enum. The [flush] parameter
(default: [false]) should be set to [true] for the outer-most printing
call. Setting inner calls to [true] - for example, for nested values -
prevent indentation from working properly.
Example:
[pp ~flush:true Format.pp_print_int Format.std_formatter (1 -- 3)] *)
end
module List : sig
val pp :
?flush:bool ->
?first:string ->
?last:string ->
?sep:string ->
?indent:int ->
(Format.formatter -> 'a -> 'b) -> Format.formatter -> 'a list -> unit
* Print the contents of a list , with [ first ] preceeding the first item
( default : [ " \ [ " ] ) , [ last ] following the last item ( default : [ " \ ] " ] )
and [ sep ] separating items ( default : [ " ; " ] ) . A printing function must
be provided to print the items in the list . The [ flush ] parameter
( default : [ false ] ) should be set to [ true ] for the outer - most printing
call . Setting inner calls to [ true ] - for example , for nested values -
prevent indentation from working properly .
Example :
[ pp ~flush : true Format.pp_print_int Format.std_formatter \[1 ; 2 ; 3\ ] ]
(default: ["\["]), [last] following the last item (default: ["\]"])
and [sep] separating items (default: ["; "]). A printing function must
be provided to print the items in the list. The [flush] parameter
(default: [false]) should be set to [true] for the outer-most printing
call. Setting inner calls to [true] - for example, for nested values -
prevent indentation from working properly.
Example:
[pp ~flush:true Format.pp_print_int Format.std_formatter \[1; 2; 3\]]
*)
end
end
|
f196399faca387536e24f63e13c481fd8ff73d6abfae5bbb9983af5b71736ae4 | fakedata-haskell/fakedata | Volleyball.hs | # LANGUAGE TemplateHaskell #
{-# LANGUAGE OverloadedStrings #-}
| @since 1.0
module Faker.Sport.Volleyball where
import Data.Text (Text)
import Faker (Fake(..))
import Faker.Provider.Volleyball
import Faker.TH
$(generateFakeField "volleyball" "team")
$(generateFakeField "volleyball" "player")
$(generateFakeField "volleyball" "coach")
$(generateFakeField "volleyball" "position")
$(generateFakeField "volleyball" "formation")
| null | https://raw.githubusercontent.com/fakedata-haskell/fakedata/ea938c38845b274e28abe7f4e8e342f491e83c89/src/Faker/Sport/Volleyball.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE TemplateHaskell #
| @since 1.0
module Faker.Sport.Volleyball where
import Data.Text (Text)
import Faker (Fake(..))
import Faker.Provider.Volleyball
import Faker.TH
$(generateFakeField "volleyball" "team")
$(generateFakeField "volleyball" "player")
$(generateFakeField "volleyball" "coach")
$(generateFakeField "volleyball" "position")
$(generateFakeField "volleyball" "formation")
|
d45803f94fb3e51ae357c5cb6af83275d445c2210c3be48d8948a9bd2975d98d | scrintal/heroicons-reagent | receipt_refund.cljs | (ns com.scrintal.heroicons.outline.receipt-refund)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M8.25 9.75h4.875a2.625 2.625 0 010 5.25H12M8.25 9.75L10.5 7.5M8.25 9.75L10.5 12m9-7.243V21.75l-3.75-1.5-3.75 1.5-3.75-1.5-3.75 1.5V4.757c0-1.108.806-2.057 1.907-2.185a48.507 48.507 0 0111.186 0c1.1.128 1.907 1.077 1.907 2.185z"}]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/outline/receipt_refund.cljs | clojure | (ns com.scrintal.heroicons.outline.receipt-refund)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M8.25 9.75h4.875a2.625 2.625 0 010 5.25H12M8.25 9.75L10.5 7.5M8.25 9.75L10.5 12m9-7.243V21.75l-3.75-1.5-3.75 1.5-3.75-1.5-3.75 1.5V4.757c0-1.108.806-2.057 1.907-2.185a48.507 48.507 0 0111.186 0c1.1.128 1.907 1.077 1.907 2.185z"}]]) | |
8932a17487e34ab1adb3bb46d7baf2e8e03086e87e4264b424e6d3dae80b36d5 | reach-sh/reach-lang | EmbeddedFiles.hs | module Reach.EmbeddedFiles (runtime_smt2, runtime_bt_smt2, stdlib_sol, stdlib_rsh) where
import Data.ByteString (ByteString)
import Data.FileEmbed
runtime_smt2 :: ByteString
runtime_smt2 = $(makeRelativeToProject "./smt2/runtime.smt2" >>= embedFile)
runtime_bt_smt2 :: ByteString
runtime_bt_smt2 = $(makeRelativeToProject "./smt2/runtime-bt.smt2" >>= embedFile)
stdlib_sol :: ByteString
stdlib_sol = $(makeRelativeToProject "./sol/stdlib.sol" >>= embedFile)
stdlib_rsh :: ByteString
stdlib_rsh = $(makeRelativeToProject "./rsh/stdlib.rsh" >>= embedFile)
| null | https://raw.githubusercontent.com/reach-sh/reach-lang/8f41a2ae17220041ba365274dd32ae7c96b11f2e/hs/src/Reach/EmbeddedFiles.hs | haskell | module Reach.EmbeddedFiles (runtime_smt2, runtime_bt_smt2, stdlib_sol, stdlib_rsh) where
import Data.ByteString (ByteString)
import Data.FileEmbed
runtime_smt2 :: ByteString
runtime_smt2 = $(makeRelativeToProject "./smt2/runtime.smt2" >>= embedFile)
runtime_bt_smt2 :: ByteString
runtime_bt_smt2 = $(makeRelativeToProject "./smt2/runtime-bt.smt2" >>= embedFile)
stdlib_sol :: ByteString
stdlib_sol = $(makeRelativeToProject "./sol/stdlib.sol" >>= embedFile)
stdlib_rsh :: ByteString
stdlib_rsh = $(makeRelativeToProject "./rsh/stdlib.rsh" >>= embedFile)
| |
559dc65d83e8cd99b30f092f7a68dc90afd88d9e115530f76b9542e8ac9aab78 | cfpb/qu | main.clj | (ns ^:integration integration.test.main
(:require [clojure.test :refer :all]
[qu.test-util :refer :all]))
(use-fixtures :once (mongo-setup-fn "integration_test"))
(deftest ^:integration test-index-url
(testing "it redirects to /data"
(does-contain (GET "/") {:status 302})
(does-contain (:headers (GET "/"))
{"Location" "/data"})))
(deftest ^:integration test-data-url
(testing "it returns successfully"
(let [resp (GET "/data")]
(does= (:status resp) 200)
(does-contain (:headers resp)
{"Content-Type" "text/html;charset=UTF-8"}))
(let [resp (GET "/data.xml")]
(does= (:status resp) 200)
(does-contain (:headers resp)
{"Content-Type" "application/xml;charset=UTF-8"}))))
(deftest ^:integration test-dataset-url
(testing "it returns successfully"
(let [resp (GET "/data/integration_test")]
(does= (:status resp) 200)
(does-contain (:headers resp)
{"Content-Type" "text/html;charset=UTF-8"}))
(let [resp (GET "/data/integration_test.xml")]
(does= (:status resp) 200)
(does-contain (:headers resp)
{"Content-Type" "application/xml;charset=UTF-8"}))))
(deftest ^:integration test-dataset-url-does-not-exist
(testing "it returns a 404"
(let [resp (GET "/data/bad_dataset")]
(does= (:status resp) 404)
(does-contain (:headers resp)
{"Content-Type" "text/html"}))
(let [resp (GET "/data/bad_dataset.xml")]
(does= (:status resp) 404)
(does-contain (:headers resp)
{"Content-Type" "application/xml;charset=UTF-8"}))))
;; (run-tests)
| null | https://raw.githubusercontent.com/cfpb/qu/f460d9ab2f05ac22f6d68a98a9641daf0f7c7ba4/test/integration/test/main.clj | clojure | (run-tests) | (ns ^:integration integration.test.main
(:require [clojure.test :refer :all]
[qu.test-util :refer :all]))
(use-fixtures :once (mongo-setup-fn "integration_test"))
(deftest ^:integration test-index-url
(testing "it redirects to /data"
(does-contain (GET "/") {:status 302})
(does-contain (:headers (GET "/"))
{"Location" "/data"})))
(deftest ^:integration test-data-url
(testing "it returns successfully"
(let [resp (GET "/data")]
(does= (:status resp) 200)
(does-contain (:headers resp)
{"Content-Type" "text/html;charset=UTF-8"}))
(let [resp (GET "/data.xml")]
(does= (:status resp) 200)
(does-contain (:headers resp)
{"Content-Type" "application/xml;charset=UTF-8"}))))
(deftest ^:integration test-dataset-url
(testing "it returns successfully"
(let [resp (GET "/data/integration_test")]
(does= (:status resp) 200)
(does-contain (:headers resp)
{"Content-Type" "text/html;charset=UTF-8"}))
(let [resp (GET "/data/integration_test.xml")]
(does= (:status resp) 200)
(does-contain (:headers resp)
{"Content-Type" "application/xml;charset=UTF-8"}))))
(deftest ^:integration test-dataset-url-does-not-exist
(testing "it returns a 404"
(let [resp (GET "/data/bad_dataset")]
(does= (:status resp) 404)
(does-contain (:headers resp)
{"Content-Type" "text/html"}))
(let [resp (GET "/data/bad_dataset.xml")]
(does= (:status resp) 404)
(does-contain (:headers resp)
{"Content-Type" "application/xml;charset=UTF-8"}))))
|
9e671a6040c69f311cd4a427134f7463a32f3d37ad5f34239ae5f14bfa02a913 | project-oak/hafnium-verification | ClangPointers.mli |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module Map : module type of Map.Make (Int)
val ivar_to_property_table : Clang_ast_t.decl Int.Table.t
* maps ivar decl pointer to its record
val pointer_decl_table : Clang_ast_t.decl Int.Table.t
* maps pointer to its record
val pointer_stmt_table : Clang_ast_t.stmt Int.Table.t
* maps stmt pointer to its stmt record
val pointer_type_table : Clang_ast_t.c_type Int.Table.t
(** map pointer to its type *)
val populate_all_tables : Clang_ast_t.decl -> unit
* discover what pointers should point to in the tables above ; should be run once for the current
toplevel
toplevel decl *)
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/infer/src/clang/ClangPointers.mli | ocaml | * map pointer to its type |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module Map : module type of Map.Make (Int)
val ivar_to_property_table : Clang_ast_t.decl Int.Table.t
* maps ivar decl pointer to its record
val pointer_decl_table : Clang_ast_t.decl Int.Table.t
* maps pointer to its record
val pointer_stmt_table : Clang_ast_t.stmt Int.Table.t
* maps stmt pointer to its stmt record
val pointer_type_table : Clang_ast_t.c_type Int.Table.t
val populate_all_tables : Clang_ast_t.decl -> unit
* discover what pointers should point to in the tables above ; should be run once for the current
toplevel
toplevel decl *)
|
f1a9101e598a40379214f81c7a7d035ff1ac0de3cf3ab695aba66fb3519c1042 | bootstrapworld/curr | info.rkt | #lang setup/infotab
(define name "scribble-bootstrap curriculum")
(define categories '(misc))
(define can-be-loaded-with 'all)
(define required-core-version "5.1.1")
(define version "1.0")
(define repositories '("4.x"))
(define scribblings '(("manual.scrbl")))
| null | https://raw.githubusercontent.com/bootstrapworld/curr/443015255eacc1c902a29978df0e3e8e8f3b9430/lib/info.rkt | racket | #lang setup/infotab
(define name "scribble-bootstrap curriculum")
(define categories '(misc))
(define can-be-loaded-with 'all)
(define required-core-version "5.1.1")
(define version "1.0")
(define repositories '("4.x"))
(define scribblings '(("manual.scrbl")))
| |
4cddf411d148c78ec2643eb4d4a0030383da2ae583a43ce4263de3df6edb3c2d | leptonyu/boots | Swagger.hs | # LANGUAGE CPP #
# LANGUAGE DataKinds #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module Boots.Endpoint.Swagger where
import qualified Data.Swagger as S
import Data.Text (Text, pack)
import Data.Version (Version, showVersion)
import Data.Word
import Lens.Micro
import Servant
#if __GLASGOW_HASKELL__ < 804
import Data.Semigroup
#endif
type EndpointSwagger = "endpoints" :> "swagger" :> Get '[JSON] S.Swagger
-- | Swagger modification
baseInfo
:: String -- ^ Hostname
-> Text -- ^ Server Name
-> Version -- ^ Server version
-> Word16 -- ^ Port
-> S.Swagger -- ^ Old swagger
-> S.Swagger
baseInfo hostName n v p s = s
& S.info . S.title .~ (n <> " API Documents")
& S.info . S.version .~ pack (showVersion v)
& S.host ?~ S.Host hostName (Just $ fromIntegral p)
| null | https://raw.githubusercontent.com/leptonyu/boots/335d58baafb1e0700b1a7dbe595a7264bd4d83ba/boots-web/src/Boots/Endpoint/Swagger.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE TypeOperators #
| Swagger modification
^ Hostname
^ Server Name
^ Server version
^ Port
^ Old swagger | # LANGUAGE CPP #
# LANGUAGE DataKinds #
module Boots.Endpoint.Swagger where
import qualified Data.Swagger as S
import Data.Text (Text, pack)
import Data.Version (Version, showVersion)
import Data.Word
import Lens.Micro
import Servant
#if __GLASGOW_HASKELL__ < 804
import Data.Semigroup
#endif
type EndpointSwagger = "endpoints" :> "swagger" :> Get '[JSON] S.Swagger
baseInfo
-> S.Swagger
baseInfo hostName n v p s = s
& S.info . S.title .~ (n <> " API Documents")
& S.info . S.version .~ pack (showVersion v)
& S.host ?~ S.Host hostName (Just $ fromIntegral p)
|
37abe41233b7084854e23da7cefd212fa3edadc401ddc4d8ad5469f43cc7b80d | axelarge/advent-of-code | day04.clj | (ns advent-of-code.y2020.day04
(:require [advent-of-code.support :refer :all]
[clojure.string :as str]))
(def input (get-input 2020 4))
(defn parse-passport [line]
(->> (str/split line #"(?m)[:\s]")
(partition 2)
(map (fn [[k v]] [(keyword k) v]))
(into {})))
(defn parse [input]
(map parse-passport (str/split input #"\n\n")))
(def required-fields
(set (keywords "byr iyr eyr hgt hcl ecl pid")))
(def eye-colors
(set (split-ws "amb blu brn gry grn hzl oth")))
(defn valid-keys? [pw]
(every? pw required-fields))
(defn valid-year? [s lo hi]
(when (re-matches #"\d{4}" s)
(<= lo (parse-int s) hi)))
(defn solve1 [input]
(->> (parse input)
(count-where valid-keys?)))
(defn solve2 [input]
(->> (parse input)
(count-where
(fn [{:keys [byr iyr eyr hgt hcl ecl pid] :as pw}]
(and (valid-keys? pw)
(valid-year? byr 1920 2002)
(valid-year? iyr 2010 2020)
(valid-year? eyr 2020 2030)
(when-let [[_ h unit] (re-matches #"(\d+)(cm|in)" hgt)]
(let [h (parse-int h)]
(if (= "cm" unit)
(<= 150 h 193)
(<= 59 h 76))))
(re-matches #"#[a-f0-9]{6}" hcl)
(eye-colors ecl)
(re-matches #"\d{9}" pid))))))
| null | https://raw.githubusercontent.com/axelarge/advent-of-code/4c62a53ef71605780a22cf8219029453d8e1b977/src/advent_of_code/y2020/day04.clj | clojure | (ns advent-of-code.y2020.day04
(:require [advent-of-code.support :refer :all]
[clojure.string :as str]))
(def input (get-input 2020 4))
(defn parse-passport [line]
(->> (str/split line #"(?m)[:\s]")
(partition 2)
(map (fn [[k v]] [(keyword k) v]))
(into {})))
(defn parse [input]
(map parse-passport (str/split input #"\n\n")))
(def required-fields
(set (keywords "byr iyr eyr hgt hcl ecl pid")))
(def eye-colors
(set (split-ws "amb blu brn gry grn hzl oth")))
(defn valid-keys? [pw]
(every? pw required-fields))
(defn valid-year? [s lo hi]
(when (re-matches #"\d{4}" s)
(<= lo (parse-int s) hi)))
(defn solve1 [input]
(->> (parse input)
(count-where valid-keys?)))
(defn solve2 [input]
(->> (parse input)
(count-where
(fn [{:keys [byr iyr eyr hgt hcl ecl pid] :as pw}]
(and (valid-keys? pw)
(valid-year? byr 1920 2002)
(valid-year? iyr 2010 2020)
(valid-year? eyr 2020 2030)
(when-let [[_ h unit] (re-matches #"(\d+)(cm|in)" hgt)]
(let [h (parse-int h)]
(if (= "cm" unit)
(<= 150 h 193)
(<= 59 h 76))))
(re-matches #"#[a-f0-9]{6}" hcl)
(eye-colors ecl)
(re-matches #"\d{9}" pid))))))
| |
24ef538151dc3a23b1e583c269d582eac5f6dbd8905502d0ced4c74ab021873b | lispnik/cl-http | format.lisp | -*- Mode : lisp ; Syntax : ansi - common - lisp ; Package : USER ; Base : 10 -*-
(in-package "USER")
Minimal update to .
Copyright ( C ) 1994 , 1995 ( OBC ) all rights reserved .
See copyright notice in CLIM;CLIM - SYS;MINIPROC file .
;;;
;;; This will support the missing readably keyword for now
;;;
(define-function write :redefine (object &rest options)
(remf options :readably)
(apply (original-function) object options))
;;; This allows compiling of most format calls containing
the non portable Tilde - Return directive ( or Tilde - Linefeed
;;; depending on the OS)...
;;;
(define-function format :redefine (stream string-or-fn &rest args)
(flet ((search-tilder (string &key (start 0) (end (length string)))
(search #.(make-array 2 :element-type 'character :initial-contents '(#\~ #\Linefeed)) string :start2 start :end2 end)))
(if (and (stringp string-or-fn)
(search-tilder string-or-fn))
(setq string-or-fn
(with-output-to-string (out)
(loop as start = 0 then end
as end = (search-tilder string-or-fn :start start)
if end
do (write-string string-or-fn out :start start :end (incf end))
(write-char #\Return out)
else do (write-string string-or-fn out :start start)
(return)))))
(apply (original-function) stream string-or-fn args)))
;;; Enable read-line across from non dos file servers!
;;;
(define-function read-line :redefine (&optional stream (eof-error-p t) eof-value recursive-p)
(case (stream-title stream)
(#1="Unix Stream"
(unix-read-line stream eof-error-p eof-value recursive-p))
(#2="Dos Stream"
(funcall (original-function) stream eof-error-p eof-value recursive-p))
(t
(typecase stream
(cg:text
(multiple-value-bind (line dosmode)
(unix-read-line stream eof-error-p eof-value recursive-p)
(setf (stream-title stream) (if dosmode #2# #1#))
line))
(t
(funcall (original-function) stream eof-error-p eof-value recursive-p))))))
#+Debug
(defun read-line1 (&optional stream (eof-error-p t) eof-value recursive-p)
(case (stream-title stream)
(#1="unix"
(unix-read-line stream eof-error-p eof-value recursive-p))
(#2="dos"
(funcall #'read-line stream eof-error-p eof-value recursive-p))
(t
(typecase stream
(cg:text
(multiple-value-bind (line dosmode)
(unix-read-line stream eof-error-p eof-value recursive-p)
(setf (stream-title stream) (if dosmode #2# #1#))
line))
(t
(funcall #'read-line stream eof-error-p eof-value recursive-p))))))
(defvar *read-line-buffer* (make-string 64))
(defvar *max-line-buffer-size* (ash 1 16))
Raw read - line for UNIX file format
(defun unix-read-line (&optional stream (eof-error-p t) eof-value recursive-p)
(let (buffers
(buffer (if recursive-p
(make-string (length *read-line-buffer*))
*read-line-buffer*))
(size (length *read-line-buffer*))
(pos (file-position stream))
incpos eofp dosmode line)
(loop (setq incpos (cg:device-nread-string stream buffer size))
(if (zerop incpos)
(if (and eof-error-p (not eofp))
(error "Unexpected end of file at position ~a in ~a." pos stream)
(return eof-value))
(let ((eolpos (or (position #\Linefeed buffer :end incpos)
(if (setq eofp (cg:device-eof-p stream))
incpos))))
(cond (eolpos
(if (not eofp)
(file-position stream (incf pos (1+ eolpos))))
(if (> eolpos 0)
(when (char= (elt buffer (1- eolpos)) #\Newline)
(decf eolpos)
(setq dosmode t)))
(setf buffer (subseq buffer 0 eolpos))
(cond (buffers
(push buffer buffers)
(setq line (apply #'concatenate 'string (nreverse buffers))))
(t
(setq line buffer)))
(if dosmode
(return (values line dosmode))
(return line)))
(t
(file-position stream (incf pos incpos))
(push buffer buffers)
(setq buffer (make-string (setq size (min *max-line-buffer-size* (ash size 1))))))))))))
;;; This patches the macro DESTRUCTURING-BIND on ACLPC
;;; to handle dotted list.
(unless (fboundp 'destructuring-bind-orig)
(setf (macro-function 'destructuring-bind-orig)
(macro-function 'destructuring-bind)))
(eval-when (compile eval load)
(defmacro handle-redefinition (&rest body)
`(handler-bind ((simple-error
#'(lambda (c)
(if (equal (simple-condition-format-control c)
"Attempt to redefine protected function ~s")
(progn (format t "~&;;;; Redefining Protected Function,~&~s.~%"
(simple-condition-format-arguments c))
(continue))
c)))
(cerror
#'(lambda (c)
(if (equal (simple-condition-format-control c)
"~ ~S has been defined in ~S")
(progn (format t "~&;;;; Redefining previously defined Function,~&~s.~%"
(simple-condition-format-arguments c))
(continue))
c))))
,@body))
)
This patch exploits the advantages of BUTLAST and is
probably not meaningful to other CL .
;;;
(handle-redefinition
(defmacro destructuring-bind (args form &rest body)
(let ((#1=#:args args)
(#2=#:tail)
(#3=#:position))
(if (setq #2# (rest (last #1#)))
;; Simple dotted list tail
(setq #1# (butlast #1# 0)
#3# (length #1#))
(if (and (setq #2# (rest (member '&rest #1#)))
;; Simple rest case may not be a cons
(null (rest #2#)))
(setq #3# (position '&rest #1#)
#1# (butlast #1# 2)
#2# (first #2#))
;; rest case with keys must be a cons
(setq #2# nil)))
(if #2#
;; Dotted list
`(let ((#4=#:form ,form))
(destructuring-bind-orig ,#1#
(nthcar ,#3# #4#)
(let ((,#2# (nthcdr ,#3# #4#)))
,@body)))
;; All other cases
`(destructuring-bind-orig ,#1# ,form
,@body))))
)
(defun nthcar (n l)
(loop for elt in l
for i from 0 below n
collect elt))
;;; ANSI style last
;;;
(define-function last :redefine (list &optional n)
(if n
(last2 list n)
;; Original definition does not do much good
;; but if you take it out the compiler will break!
(funcall (original-function) list)))
(defun last2 (list n)
(let ((length (loop for l upfrom 0 as rest on list
while (consp rest)
finally (return l))))
(decf length n)
(loop for rest = list then (rest rest)
when (< (decf length) 0)
do (return rest))))
(unless (fboundp 'defgeneric-orig)
(setf (macro-function 'defgeneric-orig)
(macro-function 'defgeneric)))
Eliminate VALUES declaration from defgeneric
This should not be required since ALCPC is ANSI compliant
in this regard , but the numerous warnings compiling CL - HTTP
;;; get very annoying...
;;;
(handle-redefinition
(defmacro defgeneric (ref . body)
(let* ((#2=#:ldeclare (member-if #'(lambda (x) (and (consp x)
(eql (first x) 'declare)))
(rest (member-if #'listp body))))
(#1=#:declare (first #2#)))
(if #1#
(setf (rest #1#) (delete 'values (rest #1#) :key #'first)))
(when (and #1# (null (rest #1#)))
(if (and (null (second #2#))
(null (cddr #2#)))
(setf (rest (last2 body 2)) nil)
(setf (first #2#) (second #2#)
(rest #2#) (cddr #2#))))
`(defgeneric-orig ,ref ,@body))))
| null | https://raw.githubusercontent.com/lispnik/cl-http/84391892d88c505aed705762a153eb65befb6409/acl/aclpc/format.lisp | lisp | Syntax : ansi - common - lisp ; Package : USER ; Base : 10 -*-
CLIM - SYS;MINIPROC file .
This will support the missing readably keyword for now
This allows compiling of most format calls containing
depending on the OS)...
Enable read-line across from non dos file servers!
This patches the macro DESTRUCTURING-BIND on ACLPC
to handle dotted list.
Simple dotted list tail
Simple rest case may not be a cons
rest case with keys must be a cons
Dotted list
All other cases
ANSI style last
Original definition does not do much good
but if you take it out the compiler will break!
get very annoying...
|
(in-package "USER")
Minimal update to .
Copyright ( C ) 1994 , 1995 ( OBC ) all rights reserved .
(define-function write :redefine (object &rest options)
(remf options :readably)
(apply (original-function) object options))
the non portable Tilde - Return directive ( or Tilde - Linefeed
(define-function format :redefine (stream string-or-fn &rest args)
(flet ((search-tilder (string &key (start 0) (end (length string)))
(search #.(make-array 2 :element-type 'character :initial-contents '(#\~ #\Linefeed)) string :start2 start :end2 end)))
(if (and (stringp string-or-fn)
(search-tilder string-or-fn))
(setq string-or-fn
(with-output-to-string (out)
(loop as start = 0 then end
as end = (search-tilder string-or-fn :start start)
if end
do (write-string string-or-fn out :start start :end (incf end))
(write-char #\Return out)
else do (write-string string-or-fn out :start start)
(return)))))
(apply (original-function) stream string-or-fn args)))
(define-function read-line :redefine (&optional stream (eof-error-p t) eof-value recursive-p)
(case (stream-title stream)
(#1="Unix Stream"
(unix-read-line stream eof-error-p eof-value recursive-p))
(#2="Dos Stream"
(funcall (original-function) stream eof-error-p eof-value recursive-p))
(t
(typecase stream
(cg:text
(multiple-value-bind (line dosmode)
(unix-read-line stream eof-error-p eof-value recursive-p)
(setf (stream-title stream) (if dosmode #2# #1#))
line))
(t
(funcall (original-function) stream eof-error-p eof-value recursive-p))))))
#+Debug
(defun read-line1 (&optional stream (eof-error-p t) eof-value recursive-p)
(case (stream-title stream)
(#1="unix"
(unix-read-line stream eof-error-p eof-value recursive-p))
(#2="dos"
(funcall #'read-line stream eof-error-p eof-value recursive-p))
(t
(typecase stream
(cg:text
(multiple-value-bind (line dosmode)
(unix-read-line stream eof-error-p eof-value recursive-p)
(setf (stream-title stream) (if dosmode #2# #1#))
line))
(t
(funcall #'read-line stream eof-error-p eof-value recursive-p))))))
(defvar *read-line-buffer* (make-string 64))
(defvar *max-line-buffer-size* (ash 1 16))
Raw read - line for UNIX file format
(defun unix-read-line (&optional stream (eof-error-p t) eof-value recursive-p)
(let (buffers
(buffer (if recursive-p
(make-string (length *read-line-buffer*))
*read-line-buffer*))
(size (length *read-line-buffer*))
(pos (file-position stream))
incpos eofp dosmode line)
(loop (setq incpos (cg:device-nread-string stream buffer size))
(if (zerop incpos)
(if (and eof-error-p (not eofp))
(error "Unexpected end of file at position ~a in ~a." pos stream)
(return eof-value))
(let ((eolpos (or (position #\Linefeed buffer :end incpos)
(if (setq eofp (cg:device-eof-p stream))
incpos))))
(cond (eolpos
(if (not eofp)
(file-position stream (incf pos (1+ eolpos))))
(if (> eolpos 0)
(when (char= (elt buffer (1- eolpos)) #\Newline)
(decf eolpos)
(setq dosmode t)))
(setf buffer (subseq buffer 0 eolpos))
(cond (buffers
(push buffer buffers)
(setq line (apply #'concatenate 'string (nreverse buffers))))
(t
(setq line buffer)))
(if dosmode
(return (values line dosmode))
(return line)))
(t
(file-position stream (incf pos incpos))
(push buffer buffers)
(setq buffer (make-string (setq size (min *max-line-buffer-size* (ash size 1))))))))))))
(unless (fboundp 'destructuring-bind-orig)
(setf (macro-function 'destructuring-bind-orig)
(macro-function 'destructuring-bind)))
(eval-when (compile eval load)
(defmacro handle-redefinition (&rest body)
`(handler-bind ((simple-error
#'(lambda (c)
(if (equal (simple-condition-format-control c)
"Attempt to redefine protected function ~s")
(progn (format t "~&;;;; Redefining Protected Function,~&~s.~%"
(simple-condition-format-arguments c))
(continue))
c)))
(cerror
#'(lambda (c)
(if (equal (simple-condition-format-control c)
"~ ~S has been defined in ~S")
(progn (format t "~&;;;; Redefining previously defined Function,~&~s.~%"
(simple-condition-format-arguments c))
(continue))
c))))
,@body))
)
This patch exploits the advantages of BUTLAST and is
probably not meaningful to other CL .
(handle-redefinition
(defmacro destructuring-bind (args form &rest body)
(let ((#1=#:args args)
(#2=#:tail)
(#3=#:position))
(if (setq #2# (rest (last #1#)))
(setq #1# (butlast #1# 0)
#3# (length #1#))
(if (and (setq #2# (rest (member '&rest #1#)))
(null (rest #2#)))
(setq #3# (position '&rest #1#)
#1# (butlast #1# 2)
#2# (first #2#))
(setq #2# nil)))
(if #2#
`(let ((#4=#:form ,form))
(destructuring-bind-orig ,#1#
(nthcar ,#3# #4#)
(let ((,#2# (nthcdr ,#3# #4#)))
,@body)))
`(destructuring-bind-orig ,#1# ,form
,@body))))
)
(defun nthcar (n l)
(loop for elt in l
for i from 0 below n
collect elt))
(define-function last :redefine (list &optional n)
(if n
(last2 list n)
(funcall (original-function) list)))
(defun last2 (list n)
(let ((length (loop for l upfrom 0 as rest on list
while (consp rest)
finally (return l))))
(decf length n)
(loop for rest = list then (rest rest)
when (< (decf length) 0)
do (return rest))))
(unless (fboundp 'defgeneric-orig)
(setf (macro-function 'defgeneric-orig)
(macro-function 'defgeneric)))
Eliminate VALUES declaration from defgeneric
This should not be required since ALCPC is ANSI compliant
in this regard , but the numerous warnings compiling CL - HTTP
(handle-redefinition
(defmacro defgeneric (ref . body)
(let* ((#2=#:ldeclare (member-if #'(lambda (x) (and (consp x)
(eql (first x) 'declare)))
(rest (member-if #'listp body))))
(#1=#:declare (first #2#)))
(if #1#
(setf (rest #1#) (delete 'values (rest #1#) :key #'first)))
(when (and #1# (null (rest #1#)))
(if (and (null (second #2#))
(null (cddr #2#)))
(setf (rest (last2 body 2)) nil)
(setf (first #2#) (second #2#)
(rest #2#) (cddr #2#))))
`(defgeneric-orig ,ref ,@body))))
|
897fddf1756cfc2acf48c403e55d9a4a9080049ea47302b480429ac2d5c082c2 | microsoft/SLAyer | NSLib.mli | Copyright ( c ) Microsoft Corporation . All rights reserved .
(** Extensions of the standard library. *)
(*============================================================================
Combinators
============================================================================*)
(** {3 Combinators } *)
* { 4 Function combinators }
val id : 'a -> 'a
val const : 'a -> 'b -> 'a
val flip : ('a -> 'b -> 'c) -> 'b -> 'a -> 'c
val curry : ('a * 'b -> 'c) -> 'a -> 'b -> 'c
val uncurry : ('a -> 'b -> 'c) -> 'a * 'b -> 'c
val ( &> ) : 'a -> ('a -> unit) -> 'a
(** [x &> f] applies [f] to [x] and returns [x], left associative. *)
val ( <& ) : ('a -> unit) -> 'a -> 'a
(** [f <& x] applies [f] to [x] and returns [x], left associative. *)
val ( $> ) : 'a -> unit -> 'a
(** Reverse sequential composition, left associative *)
* { 4 Tuple combinators }
val pair : 'a -> 'b -> 'a * 'b
val swap : 'a * 'b -> 'b * 'a
val fst3 : ('a * 'b * 'c) -> 'a
val snd3 : ('a * 'b * 'c) -> 'b
val thd3 : ('a * 'b * 'c) -> 'c
val fst4 : ('a * 'b * 'c * 'd) -> 'a
val snd4 : ('a * 'b * 'c * 'd) -> 'b
val thd4 : ('a * 'b * 'c * 'd) -> 'c
val fth4 : ('a * 'b * 'c * 'd) -> 'd
val ( *** ) : ('a -> 'b) -> ('c -> 'd) -> 'a * 'c -> 'b * 'd
* { 4 Predicate combinators }
val ( &&& ) : ('a -> bool) -> ('a -> bool) -> 'a -> bool
(** Short-circuit conjunction lifted to predicates, left associative. *)
val ( ||| ) : ('a -> bool) -> ('a -> bool) -> 'a -> bool
(** Short-circuit disjunction lifted to predicates, left associative. *)
* { 4 Equality combinators }
val equal_tup2 : ('a->'b->bool)->('c->'d->bool)->'a*'c->'b*'d->bool
val equal_tup3 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->'a*'c*'e->'b*'d*'f->bool
val equal_tup4 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->'a*'c*'e*'g->'b*'d*'f*'h->bool
val equal_tup5 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->('i->'j->bool)->'a*'c*'e*'g*'i->'b*'d*'f*'h*'j->bool
val equal_tup6 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->('i->'j->bool)->('k->'l->bool)->'a*'c*'e*'g*'i*'k->'b*'d*'f*'h*'j*'l->bool
val equal_tup7 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->('i->'j->bool)->('k->'l->bool)->('m->'n->bool)->'a*'c*'e*'g*'i*'k*'m->'b*'d*'f*'h*'j*'l*'n->bool
val equal_tup8 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->('i->'j->bool)->('k->'l->bool)->('m->'n->bool)->('o->'p->bool)->'a*'c*'e*'g*'i*'k*'m*'o->'b*'d*'f*'h*'j*'l*'n*'p->bool
val equal_tup9 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->('i->'j->bool)->('k->'l->bool)->('m->'n->bool)->('o->'p->bool)->('q->'r->bool)->'a*'c*'e*'g*'i*'k*'m*'o*'q->'b*'d*'f*'h*'j*'l*'n*'p*'r->bool
* { 4 Comparison combinators }
val compare_tup2 : ('a->'b->int)->('c->'d->int)->'a*'c->'b*'d->int
val compare_tup3 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->'a*'c*'e->'b*'d*'f->int
val compare_tup4 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->'a*'c*'e*'g->'b*'d*'f*'h->int
val compare_tup5 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->('i->'j->int)->'a*'c*'e*'g*'i->'b*'d*'f*'h*'j->int
val compare_tup6 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->('i->'j->int)->('k->'l->int)->'a*'c*'e*'g*'i*'k->'b*'d*'f*'h*'j*'l->int
val compare_tup7 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->('i->'j->int)->('k->'l->int)->('m->'n->int)->'a*'c*'e*'g*'i*'k*'m->'b*'d*'f*'h*'j*'l*'n->int
val compare_tup8 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->('i->'j->int)->('k->'l->int)->('m->'n->int)->('o->'p->int)->'a*'c*'e*'g*'i*'k*'m*'o->'b*'d*'f*'h*'j*'l*'n*'p->int
val compare_tup9 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->('i->'j->int)->('k->'l->int)->('m->'n->int)->('o->'p->int)->('q->'r->int)->'a*'c*'e*'g*'i*'k*'m*'o*'q->'b*'d*'f*'h*'j*'l*'n*'p*'r->int
(** {3 File handling } *)
val with_in_bin : string -> (in_channel -> 'a) -> 'a
val with_out_bin : string -> (out_channel -> 'a -> 'b) -> 'a -> 'b
val with_out : string -> (Buffer.t -> 'a) -> 'a
* { 3 Exception handling }
exception Undef
val try_finally : (unit -> 'a) -> (unit -> 'b) -> 'a
val finally_try : (unit -> 'b) -> (unit -> 'a) -> 'a
val debug_wrap1 : int ref -> int -> ('a->'b) -> 'a->'b
val debug_wrap2 : int ref -> int -> ('a->'b->'c) -> 'a->'b->'c
val debug_wrap3 : int ref -> int -> ('a->'b->'c->'d) -> 'a->'b->'c->'d
val debug_wrap4 : int ref -> int -> ('a->'b->'c->'d->'e) -> 'a->'b->'c->'d->'e
val debug_wrap5 : int ref -> int -> ('a->'b->'c->'d->'e->'f) -> 'a->'b->'c->'d->'e->'f
(*============================================================================
Formatting
============================================================================*)
(** {3 Formatting } *)
exception Nothing_to_fmt
(** Type of functions for formatting ['a] values. *)
type 'a formatter = Format.formatter -> 'a -> unit
(** Type of format strings that make a single call to an ['a formatter]. *)
type 'a format_str = ('a formatter -> 'a -> unit, Format.formatter, unit) format
val ifbreakf : ('a, Format.formatter, unit) format -> Format.formatter -> 'a
val failwithf : ('a, Format.formatter, unit, 'b) format4 -> 'a
val invalid_argf : ('a, Format.formatter, unit, 'b) format4 -> 'a
(*============================================================================
Collections
============================================================================*)
* { 2 Collections }
(** Types equipped with an equivalence relation. *)
module type EqualityType = sig
type t
val equal: t -> t -> bool
end
(** Types equipped with a total order. *)
module type OrderedType = sig
type t
val equal: t -> t -> bool
val compare: t -> t -> int
end
(** Types equipped with a hash function. *)
module type HashedType = sig
type t
val equal: t -> t -> bool
val hash: t -> int
end
(** Pairs of types equipped with a hash function. *)
module HashedTypeTup2 (H0: HashedType) (H1: HashedType)
: (HashedType with type t = H0.t * H1.t)
(** Sets of unordered values. *)
module type Set0 = sig
type elt
type t
val empty : t
val is_empty : t -> bool
val add : elt -> t -> t
val singleton : elt -> t
val iter : (elt -> unit) -> t -> unit
val map : (elt -> elt) -> t -> t
val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a
val map_fold : (elt * 'z -> elt * 'z) -> t * 'z -> t * 'z
val kfold : t -> (elt -> ('a->'b) -> 'a->'b) -> ('a->'b) -> 'a->'b
val for_all : (elt -> bool) -> t -> bool
val exists : (elt -> bool) -> t -> bool
val exists_unique : (elt -> bool) -> t -> bool
val filter : (elt -> bool) -> t -> t
val cardinal : t -> int
val of_list : elt list -> t
val to_list : t -> elt list
val choose : t -> elt
val union : t -> t -> t
val diff : t -> t -> t
end
module type Set1 = sig
include Set0
include OrderedType with type t := t
val remove : elt -> t -> t
val diff_inter_diff : t -> t -> t * t * t
end
| null | https://raw.githubusercontent.com/microsoft/SLAyer/6f46f6999c18f415bc368b43b5ba3eb54f0b1c04/src/Library/NSLib.mli | ocaml | * Extensions of the standard library.
============================================================================
Combinators
============================================================================
* {3 Combinators }
* [x &> f] applies [f] to [x] and returns [x], left associative.
* [f <& x] applies [f] to [x] and returns [x], left associative.
* Reverse sequential composition, left associative
* Short-circuit conjunction lifted to predicates, left associative.
* Short-circuit disjunction lifted to predicates, left associative.
* {3 File handling }
============================================================================
Formatting
============================================================================
* {3 Formatting }
* Type of functions for formatting ['a] values.
* Type of format strings that make a single call to an ['a formatter].
============================================================================
Collections
============================================================================
* Types equipped with an equivalence relation.
* Types equipped with a total order.
* Types equipped with a hash function.
* Pairs of types equipped with a hash function.
* Sets of unordered values. | Copyright ( c ) Microsoft Corporation . All rights reserved .
* { 4 Function combinators }
val id : 'a -> 'a
val const : 'a -> 'b -> 'a
val flip : ('a -> 'b -> 'c) -> 'b -> 'a -> 'c
val curry : ('a * 'b -> 'c) -> 'a -> 'b -> 'c
val uncurry : ('a -> 'b -> 'c) -> 'a * 'b -> 'c
val ( &> ) : 'a -> ('a -> unit) -> 'a
val ( <& ) : ('a -> unit) -> 'a -> 'a
val ( $> ) : 'a -> unit -> 'a
* { 4 Tuple combinators }
val pair : 'a -> 'b -> 'a * 'b
val swap : 'a * 'b -> 'b * 'a
val fst3 : ('a * 'b * 'c) -> 'a
val snd3 : ('a * 'b * 'c) -> 'b
val thd3 : ('a * 'b * 'c) -> 'c
val fst4 : ('a * 'b * 'c * 'd) -> 'a
val snd4 : ('a * 'b * 'c * 'd) -> 'b
val thd4 : ('a * 'b * 'c * 'd) -> 'c
val fth4 : ('a * 'b * 'c * 'd) -> 'd
val ( *** ) : ('a -> 'b) -> ('c -> 'd) -> 'a * 'c -> 'b * 'd
* { 4 Predicate combinators }
val ( &&& ) : ('a -> bool) -> ('a -> bool) -> 'a -> bool
val ( ||| ) : ('a -> bool) -> ('a -> bool) -> 'a -> bool
* { 4 Equality combinators }
val equal_tup2 : ('a->'b->bool)->('c->'d->bool)->'a*'c->'b*'d->bool
val equal_tup3 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->'a*'c*'e->'b*'d*'f->bool
val equal_tup4 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->'a*'c*'e*'g->'b*'d*'f*'h->bool
val equal_tup5 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->('i->'j->bool)->'a*'c*'e*'g*'i->'b*'d*'f*'h*'j->bool
val equal_tup6 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->('i->'j->bool)->('k->'l->bool)->'a*'c*'e*'g*'i*'k->'b*'d*'f*'h*'j*'l->bool
val equal_tup7 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->('i->'j->bool)->('k->'l->bool)->('m->'n->bool)->'a*'c*'e*'g*'i*'k*'m->'b*'d*'f*'h*'j*'l*'n->bool
val equal_tup8 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->('i->'j->bool)->('k->'l->bool)->('m->'n->bool)->('o->'p->bool)->'a*'c*'e*'g*'i*'k*'m*'o->'b*'d*'f*'h*'j*'l*'n*'p->bool
val equal_tup9 : ('a->'b->bool)->('c->'d->bool)->('e->'f->bool)->('g->'h->bool)->('i->'j->bool)->('k->'l->bool)->('m->'n->bool)->('o->'p->bool)->('q->'r->bool)->'a*'c*'e*'g*'i*'k*'m*'o*'q->'b*'d*'f*'h*'j*'l*'n*'p*'r->bool
* { 4 Comparison combinators }
val compare_tup2 : ('a->'b->int)->('c->'d->int)->'a*'c->'b*'d->int
val compare_tup3 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->'a*'c*'e->'b*'d*'f->int
val compare_tup4 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->'a*'c*'e*'g->'b*'d*'f*'h->int
val compare_tup5 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->('i->'j->int)->'a*'c*'e*'g*'i->'b*'d*'f*'h*'j->int
val compare_tup6 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->('i->'j->int)->('k->'l->int)->'a*'c*'e*'g*'i*'k->'b*'d*'f*'h*'j*'l->int
val compare_tup7 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->('i->'j->int)->('k->'l->int)->('m->'n->int)->'a*'c*'e*'g*'i*'k*'m->'b*'d*'f*'h*'j*'l*'n->int
val compare_tup8 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->('i->'j->int)->('k->'l->int)->('m->'n->int)->('o->'p->int)->'a*'c*'e*'g*'i*'k*'m*'o->'b*'d*'f*'h*'j*'l*'n*'p->int
val compare_tup9 : ('a->'b->int)->('c->'d->int)->('e->'f->int)->('g->'h->int)->('i->'j->int)->('k->'l->int)->('m->'n->int)->('o->'p->int)->('q->'r->int)->'a*'c*'e*'g*'i*'k*'m*'o*'q->'b*'d*'f*'h*'j*'l*'n*'p*'r->int
val with_in_bin : string -> (in_channel -> 'a) -> 'a
val with_out_bin : string -> (out_channel -> 'a -> 'b) -> 'a -> 'b
val with_out : string -> (Buffer.t -> 'a) -> 'a
* { 3 Exception handling }
exception Undef
val try_finally : (unit -> 'a) -> (unit -> 'b) -> 'a
val finally_try : (unit -> 'b) -> (unit -> 'a) -> 'a
val debug_wrap1 : int ref -> int -> ('a->'b) -> 'a->'b
val debug_wrap2 : int ref -> int -> ('a->'b->'c) -> 'a->'b->'c
val debug_wrap3 : int ref -> int -> ('a->'b->'c->'d) -> 'a->'b->'c->'d
val debug_wrap4 : int ref -> int -> ('a->'b->'c->'d->'e) -> 'a->'b->'c->'d->'e
val debug_wrap5 : int ref -> int -> ('a->'b->'c->'d->'e->'f) -> 'a->'b->'c->'d->'e->'f
exception Nothing_to_fmt
type 'a formatter = Format.formatter -> 'a -> unit
type 'a format_str = ('a formatter -> 'a -> unit, Format.formatter, unit) format
val ifbreakf : ('a, Format.formatter, unit) format -> Format.formatter -> 'a
val failwithf : ('a, Format.formatter, unit, 'b) format4 -> 'a
val invalid_argf : ('a, Format.formatter, unit, 'b) format4 -> 'a
* { 2 Collections }
module type EqualityType = sig
type t
val equal: t -> t -> bool
end
module type OrderedType = sig
type t
val equal: t -> t -> bool
val compare: t -> t -> int
end
module type HashedType = sig
type t
val equal: t -> t -> bool
val hash: t -> int
end
module HashedTypeTup2 (H0: HashedType) (H1: HashedType)
: (HashedType with type t = H0.t * H1.t)
module type Set0 = sig
type elt
type t
val empty : t
val is_empty : t -> bool
val add : elt -> t -> t
val singleton : elt -> t
val iter : (elt -> unit) -> t -> unit
val map : (elt -> elt) -> t -> t
val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a
val map_fold : (elt * 'z -> elt * 'z) -> t * 'z -> t * 'z
val kfold : t -> (elt -> ('a->'b) -> 'a->'b) -> ('a->'b) -> 'a->'b
val for_all : (elt -> bool) -> t -> bool
val exists : (elt -> bool) -> t -> bool
val exists_unique : (elt -> bool) -> t -> bool
val filter : (elt -> bool) -> t -> t
val cardinal : t -> int
val of_list : elt list -> t
val to_list : t -> elt list
val choose : t -> elt
val union : t -> t -> t
val diff : t -> t -> t
end
module type Set1 = sig
include Set0
include OrderedType with type t := t
val remove : elt -> t -> t
val diff_inter_diff : t -> t -> t * t * t
end
|
3fe5cf55ebdbe4386c781aef1cbc142787e3c8e97b00168877f701324d18db30 | mindreframer/clojure-stuff | compiler.clj | Copyright ( c ) and . All rights reserved .
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns tailrecursion.hoplon.compiler.compiler
(:require
[clojure.pprint :as pp]
[clojure.java.io :as io]
[clojure.string :as str]
[tailrecursion.hoplon.compiler.tagsoup :as tags]
[tailrecursion.hoplon.compiler.util :as util]
[tailrecursion.hoplon.compiler.refer :as refer]))
(def ^:dynamic *printer* prn)
(defn up-parents [path name]
(let [[f & dirs] (str/split path #"/")]
(->> [name] (concat (repeat (count dirs) "../")) (apply str))))
(defn inline-code [s process]
(let [lines (str/split s #"\n")
start #";;\{\{\s*$"
end #"^\s*;;\}\}\s*$"
pad #"^\s*"
unpad #(str/replace %1 (re-pattern (format "^\\s{0,%d}" %2)) "")]
(loop [txt nil, i 0, [line & lines] lines, out []]
(if-not line
(str/join "\n" out)
(if-not txt
(if (re-find start line)
(recur [] i lines out)
(recur txt i lines (conj out line)))
(if (re-find end line)
(let [s (process (str/trim (str/join "\n" txt)))]
(recur nil 0 (rest lines) (conj (pop out) (str (peek out) s (first lines)))))
(let [i (if-not (empty? txt) i (count (re-find pad line)))]
(recur (conj txt (unpad line i)) i lines out))))))))
(defn as-forms [s]
(if (= \< (first (str/trim s)))
(tags/parse-string (inline-code s tags/html-escape))
(util/read-string (inline-code s pr-str))))
(defn output-path [forms] (-> forms first second str))
(defn output-path-for [path] (-> path slurp as-forms output-path))
(defn make-nsdecl
[[_ ns-sym & forms]]
(let [ns-sym (symbol ns-sym)
ns-syms '#{tailrecursion.hoplon tailrecursion.javelin}
rm? #(or (contains? ns-syms %) (and (seq %) (contains? ns-syms (first %))))
mk-req #(concat (remove rm? %2) (map %1 ns-syms (repeat %3)))
clauses (->> (tree-seq list? seq forms) (filter list?) (group-by first))
exclude (when-let [e (:refer-hoplon clauses)] (nth (first e) 2))
combine #(mapcat (partial drop 1) (% clauses))
req (combine :require)
reqm (combine :require-macros)
reqs `(:require ~@(mk-req refer/make-require req exclude))
macros `(:require-macros ~@(mk-req refer/make-require-macros reqm exclude))
other? #(-> #{:require :require-macros :refer-hoplon}
((comp not contains?) (first %)))
others (->> forms (filter list?) (filter other?))]
`(~'ns ~ns-sym ~@others ~reqs ~macros)))
(defn forms-str [forms]
(str/join "\n" (map #(with-out-str (*printer* %)) forms)))
(defn compile-lib [[[ns* & _ :as nsdecl] & tlfs]]
(when (= 'ns ns*) (forms-str (cons (make-nsdecl nsdecl) tlfs))))
(defn ns->path [ns]
(-> ns munge (str/replace \. \/) (str ".cljs")))
(defn compile-forms [forms js-path css-inc-path]
(require 'cljs.compiler)
(let [[nsdecl & tlfs] forms
cljs-munge (resolve 'cljs.compiler/munge)]
(if (= 'ns (first nsdecl))
{:cljs (forms-str (cons (make-nsdecl nsdecl) tlfs)) :ns (second nsdecl)}
(let [[_ page & _] nsdecl
outpath (output-path forms)
js-uri (up-parents outpath js-path)
css-uri (up-parents outpath css-inc-path)
page-ns (util/munge-page page)
nsdecl (let [[h n & t] (make-nsdecl nsdecl)]
`(~h ~page-ns ~@t))
script #(list 'script {:type "text/javascript"} (str %))
script-src #(list 'script {:type "text/javascript" :src (str %)})
s-html `(~'html {}
(~'head {}
(~'meta {:charset "utf-8"})
~(script (str "window._hoplon_main_css = '" css-uri "';"))
~(script-src js-uri)
~(script (str (cljs-munge (second nsdecl)) ".hoploninit();")))
(~'body {}))
htmlstr (tags/print-page "html" s-html)
cljs `(~nsdecl
(defn ~(symbol "^:export") ~'hoploninit []
~@tlfs
(~'tailrecursion.hoplon/init)))
cljsstr (forms-str cljs)]
{:html htmlstr :cljs cljsstr :ns page-ns :file outpath}))))
(defn pp [form] (pp/write form :dispatch pp/code-dispatch))
(def cache (atom {}))
(defn compile-string
[forms-str path js-path cljsdir htmldir & {:keys [opts]}]
(let [{cache? :cache :keys [pretty-print css-inc-path]} opts
cached (get @cache path)
last-mod (.lastModified (io/file path))
use-cached? (and (pos? last-mod)
(<= last-mod (get cached :last-modified 0)))
write (fn [f s m]
(when (and f s)
(spit (doto f io/make-parents) s)
(when m (.setLastModified f m))))]
(let [{mod :last-modified :keys [file cljs html ns]}
(if use-cached?
cached
(when-let [forms (as-forms forms-str)]
(binding [*printer* (if pretty-print pp prn)]
(let [compiled (-> (compile-forms forms js-path css-inc-path)
(assoc :last-modified last-mod))]
(if (= cache? false)
compiled
(get (swap! cache assoc path compiled) path))))))
cljs-out (io/file cljsdir (ns->path ns))]
(write cljs-out cljs mod)
(write (when file (io/file htmldir file)) html mod))))
(defn compile-file [f & args]
(apply compile-string (slurp f) (.getPath f) args))
| null | https://raw.githubusercontent.com/mindreframer/clojure-stuff/1e761b2dacbbfbeec6f20530f136767e788e0fe3/github.com/tailrecursion/hoplon/src/tailrecursion/hoplon/compiler/compiler.clj | clojure | The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) and . All rights reserved .
Eclipse Public License 1.0 ( -1.0.php )
(ns tailrecursion.hoplon.compiler.compiler
(:require
[clojure.pprint :as pp]
[clojure.java.io :as io]
[clojure.string :as str]
[tailrecursion.hoplon.compiler.tagsoup :as tags]
[tailrecursion.hoplon.compiler.util :as util]
[tailrecursion.hoplon.compiler.refer :as refer]))
(def ^:dynamic *printer* prn)
(defn up-parents [path name]
(let [[f & dirs] (str/split path #"/")]
(->> [name] (concat (repeat (count dirs) "../")) (apply str))))
(defn inline-code [s process]
(let [lines (str/split s #"\n")
start #";;\{\{\s*$"
end #"^\s*;;\}\}\s*$"
pad #"^\s*"
unpad #(str/replace %1 (re-pattern (format "^\\s{0,%d}" %2)) "")]
(loop [txt nil, i 0, [line & lines] lines, out []]
(if-not line
(str/join "\n" out)
(if-not txt
(if (re-find start line)
(recur [] i lines out)
(recur txt i lines (conj out line)))
(if (re-find end line)
(let [s (process (str/trim (str/join "\n" txt)))]
(recur nil 0 (rest lines) (conj (pop out) (str (peek out) s (first lines)))))
(let [i (if-not (empty? txt) i (count (re-find pad line)))]
(recur (conj txt (unpad line i)) i lines out))))))))
(defn as-forms [s]
(if (= \< (first (str/trim s)))
(tags/parse-string (inline-code s tags/html-escape))
(util/read-string (inline-code s pr-str))))
(defn output-path [forms] (-> forms first second str))
(defn output-path-for [path] (-> path slurp as-forms output-path))
(defn make-nsdecl
[[_ ns-sym & forms]]
(let [ns-sym (symbol ns-sym)
ns-syms '#{tailrecursion.hoplon tailrecursion.javelin}
rm? #(or (contains? ns-syms %) (and (seq %) (contains? ns-syms (first %))))
mk-req #(concat (remove rm? %2) (map %1 ns-syms (repeat %3)))
clauses (->> (tree-seq list? seq forms) (filter list?) (group-by first))
exclude (when-let [e (:refer-hoplon clauses)] (nth (first e) 2))
combine #(mapcat (partial drop 1) (% clauses))
req (combine :require)
reqm (combine :require-macros)
reqs `(:require ~@(mk-req refer/make-require req exclude))
macros `(:require-macros ~@(mk-req refer/make-require-macros reqm exclude))
other? #(-> #{:require :require-macros :refer-hoplon}
((comp not contains?) (first %)))
others (->> forms (filter list?) (filter other?))]
`(~'ns ~ns-sym ~@others ~reqs ~macros)))
(defn forms-str [forms]
(str/join "\n" (map #(with-out-str (*printer* %)) forms)))
(defn compile-lib [[[ns* & _ :as nsdecl] & tlfs]]
(when (= 'ns ns*) (forms-str (cons (make-nsdecl nsdecl) tlfs))))
(defn ns->path [ns]
(-> ns munge (str/replace \. \/) (str ".cljs")))
(defn compile-forms [forms js-path css-inc-path]
(require 'cljs.compiler)
(let [[nsdecl & tlfs] forms
cljs-munge (resolve 'cljs.compiler/munge)]
(if (= 'ns (first nsdecl))
{:cljs (forms-str (cons (make-nsdecl nsdecl) tlfs)) :ns (second nsdecl)}
(let [[_ page & _] nsdecl
outpath (output-path forms)
js-uri (up-parents outpath js-path)
css-uri (up-parents outpath css-inc-path)
page-ns (util/munge-page page)
nsdecl (let [[h n & t] (make-nsdecl nsdecl)]
`(~h ~page-ns ~@t))
script #(list 'script {:type "text/javascript"} (str %))
script-src #(list 'script {:type "text/javascript" :src (str %)})
s-html `(~'html {}
(~'head {}
(~'meta {:charset "utf-8"})
~(script (str "window._hoplon_main_css = '" css-uri "';"))
~(script-src js-uri)
~(script (str (cljs-munge (second nsdecl)) ".hoploninit();")))
(~'body {}))
htmlstr (tags/print-page "html" s-html)
cljs `(~nsdecl
(defn ~(symbol "^:export") ~'hoploninit []
~@tlfs
(~'tailrecursion.hoplon/init)))
cljsstr (forms-str cljs)]
{:html htmlstr :cljs cljsstr :ns page-ns :file outpath}))))
(defn pp [form] (pp/write form :dispatch pp/code-dispatch))
(def cache (atom {}))
(defn compile-string
[forms-str path js-path cljsdir htmldir & {:keys [opts]}]
(let [{cache? :cache :keys [pretty-print css-inc-path]} opts
cached (get @cache path)
last-mod (.lastModified (io/file path))
use-cached? (and (pos? last-mod)
(<= last-mod (get cached :last-modified 0)))
write (fn [f s m]
(when (and f s)
(spit (doto f io/make-parents) s)
(when m (.setLastModified f m))))]
(let [{mod :last-modified :keys [file cljs html ns]}
(if use-cached?
cached
(when-let [forms (as-forms forms-str)]
(binding [*printer* (if pretty-print pp prn)]
(let [compiled (-> (compile-forms forms js-path css-inc-path)
(assoc :last-modified last-mod))]
(if (= cache? false)
compiled
(get (swap! cache assoc path compiled) path))))))
cljs-out (io/file cljsdir (ns->path ns))]
(write cljs-out cljs mod)
(write (when file (io/file htmldir file)) html mod))))
(defn compile-file [f & args]
(apply compile-string (slurp f) (.getPath f) args))
|
b8cfe40d0783de109a5d07ab8ca088c8eac03fe76373ae015dfea3665a009f76 | astro/hashvortex | ControlSocket.hs | module ControlSocket (listenSocket) where
import Data.IORef
import Control.Monad.State.Lazy
import qualified System.Event as Ev
import Network.Socket
import System.Directory (removeFile)
import InState
type ControlHandler = [String] -> IO String
listenSocket :: Ev.EventManager -> FilePath -> ControlHandler -> IO ()
listenSocket mgr path handler
= do catch (removeFile path) (const $ return ())
serv <- socket AF_UNIX Stream defaultProtocol
bindSocket serv (SockAddrUnix path)
listen serv 0
Ev.registerFd mgr (acceptClient mgr serv handler) (fromIntegral $ fdSocket serv) Ev.evtRead
return ()
data Client = Context ControlHandler Socket String
acceptClient mgr serv handler _key _ev
= do (sock, _) <- accept serv
refCtx <- newIORef $ Context handler sock ""
let f key _ev = catch (refInStateT refCtx $ readClient) $
const (Ev.unregisterFd mgr key >>
sClose sock)
Ev.registerFd mgr f (fromIntegral $ fdSocket sock) Ev.evtRead
return ()
readClient :: StateT Client IO ()
readClient
= do Context handler sock buf <- get
buf' <- liftIO $ recv sock 1
let buf'' = buf ++ buf'
breaks = (`elem` "\r\n")
case break breaks buf'' of
(line, c:rest)
| breaks c ->
do liftIO $ handler (words line) >>= send sock
put $ Context handler sock rest
_ ->
put $ Context handler sock buf''
| null | https://raw.githubusercontent.com/astro/hashvortex/ccf32d13bd6057b442eb50c087c43c3870bb5be2/ControlSocket.hs | haskell | module ControlSocket (listenSocket) where
import Data.IORef
import Control.Monad.State.Lazy
import qualified System.Event as Ev
import Network.Socket
import System.Directory (removeFile)
import InState
type ControlHandler = [String] -> IO String
listenSocket :: Ev.EventManager -> FilePath -> ControlHandler -> IO ()
listenSocket mgr path handler
= do catch (removeFile path) (const $ return ())
serv <- socket AF_UNIX Stream defaultProtocol
bindSocket serv (SockAddrUnix path)
listen serv 0
Ev.registerFd mgr (acceptClient mgr serv handler) (fromIntegral $ fdSocket serv) Ev.evtRead
return ()
data Client = Context ControlHandler Socket String
acceptClient mgr serv handler _key _ev
= do (sock, _) <- accept serv
refCtx <- newIORef $ Context handler sock ""
let f key _ev = catch (refInStateT refCtx $ readClient) $
const (Ev.unregisterFd mgr key >>
sClose sock)
Ev.registerFd mgr f (fromIntegral $ fdSocket sock) Ev.evtRead
return ()
readClient :: StateT Client IO ()
readClient
= do Context handler sock buf <- get
buf' <- liftIO $ recv sock 1
let buf'' = buf ++ buf'
breaks = (`elem` "\r\n")
case break breaks buf'' of
(line, c:rest)
| breaks c ->
do liftIO $ handler (words line) >>= send sock
put $ Context handler sock rest
_ ->
put $ Context handler sock buf''
| |
dd4f8eded7cdbebd79da997425aa9eb7ba352def3b20a0d9baeab2a92cf095a9 | chris-taylor/SICP-in-Haskell | Section2.hs | The Environment Model of Evaluation
3.9
-- no code
3.10
-- no code | null | https://raw.githubusercontent.com/chris-taylor/SICP-in-Haskell/d0a10b5c5990081b4acaaae9f01ced5513ea9368/ch3/Section2.hs | haskell | no code
no code | The Environment Model of Evaluation
3.9
3.10 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.