_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
f0e6ee901ce3ebb4bae52b04aa9c05be8ce7a03cba1df78dadd5f31436995367 | youscape/ebt | ebt_priority_selector.erl | %%%-------------------------------------------------------------------
@author zyuyou
( C ) 2015 ,
%%% @doc 优先级选择节点
%%%
%%% @end
%%%-------------------------------------------------------------------
-module(ebt_priority_selector).
-ebt_extend(ebt_node).
-include("ebt.hrl").
-include("ebt_transform.hrl").
%% API
-export([do_evaluate/1, tick/1]).
do_evaluate(#ebt_node{id = Id, childs = Childs}) ->
Data = ?EBT_NODE_DATA(Id),
LastActiveNode = maps:get(active_child, Data, undefined),
case select(Childs, LastActiveNode) of
false ->
false;
{true, ActiveChild} ->
Data2 = Data#{active_child => ActiveChild},
?EBT_NODE_DATA(Id, Data2),
true
end.
tick(#ebt_node{id = Id}) ->
Data = ?EBT_NODE_DATA(Id),
case maps:get(active_child, Data, undefined) of
undefined ->
?EBT_RESULT_FINISHED;
#ebt_node{mod = Mod} = ChildNode ->
case Mod:tick(ChildNode) of
?EBT_RESULT_FINISHED ->
Mod:clear(ChildNode),
Data2 = maps:remove(active_child, Data),
?EBT_NODE_DATA(Id, Data2),
?EBT_RESULT_FINISHED;
?EBT_RESULT_RUNNING ->
?EBT_RESULT_RUNNING
end
end.
@private 选择可进入子节点
select([], _LastActiveNode) ->
false;
select([#ebt_node{mod = Mod} = Node| Tail], LastActiveNode) ->
case Mod:evaluate(Node) of
true ->
case Node =/= LastActiveNode of
true ->
case LastActiveNode of
#ebt_node{mod = LastMod} ->
LastMod:clear(LastActiveNode);
undefined ->
ok
end;
false -> ok
end,
{true, Node};
false ->
select(Tail, LastActiveNode)
end.
| null | https://raw.githubusercontent.com/youscape/ebt/546121bdfa24cb8cbd64ff4f842fd42ec7d50189/src/ebt_priority_selector.erl | erlang | -------------------------------------------------------------------
@doc 优先级选择节点
@end
-------------------------------------------------------------------
API | @author zyuyou
( C ) 2015 ,
-module(ebt_priority_selector).
-ebt_extend(ebt_node).
-include("ebt.hrl").
-include("ebt_transform.hrl").
-export([do_evaluate/1, tick/1]).
do_evaluate(#ebt_node{id = Id, childs = Childs}) ->
Data = ?EBT_NODE_DATA(Id),
LastActiveNode = maps:get(active_child, Data, undefined),
case select(Childs, LastActiveNode) of
false ->
false;
{true, ActiveChild} ->
Data2 = Data#{active_child => ActiveChild},
?EBT_NODE_DATA(Id, Data2),
true
end.
tick(#ebt_node{id = Id}) ->
Data = ?EBT_NODE_DATA(Id),
case maps:get(active_child, Data, undefined) of
undefined ->
?EBT_RESULT_FINISHED;
#ebt_node{mod = Mod} = ChildNode ->
case Mod:tick(ChildNode) of
?EBT_RESULT_FINISHED ->
Mod:clear(ChildNode),
Data2 = maps:remove(active_child, Data),
?EBT_NODE_DATA(Id, Data2),
?EBT_RESULT_FINISHED;
?EBT_RESULT_RUNNING ->
?EBT_RESULT_RUNNING
end
end.
@private 选择可进入子节点
select([], _LastActiveNode) ->
false;
select([#ebt_node{mod = Mod} = Node| Tail], LastActiveNode) ->
case Mod:evaluate(Node) of
true ->
case Node =/= LastActiveNode of
true ->
case LastActiveNode of
#ebt_node{mod = LastMod} ->
LastMod:clear(LastActiveNode);
undefined ->
ok
end;
false -> ok
end,
{true, Node};
false ->
select(Tail, LastActiveNode)
end.
|
c66f492073d7dc58e4c15fb768650b9b54ed91f7a62dfd67b3d59930fc0e438a | darrenldl/oali | useradd_helper_as_powerful_script_template.ml | let gen () =
{|#!/bin/bash
INVALID_ANS="Invalid answer"
NO_COMMAND="Command not found"
groups="users,wheel,rfkill"
ask_ans() {
if (( $# <= 1 )); then
echo "Too few parameters"
exit
elif (( $# >= 2 )); then
ret_var=$1
message=$2
fi
echo -ne "$message"": "
read ans
eval "$ret_var=$ans"
}
ask_yn() {
if (( $# <= 1 )); then
echo "Too few parameters"
exit
elif (( $# >= 2 )); then
ret_var=$1
message=$2
fi
while true; do
echo -ne "$message"" y/n: "
read ans
if [[ $ans == "y" ]]; then
eval "$ret_var=true"
break
elif [[ $ans == "n" ]]; then
eval "$ret_var=false"
break
else
echo -e $INVALID_ANS
fi
done
}
ask_if_correct() {
ask_yn $1 "Is this correct?"
}
comple() {
if $(eval echo '$'$1); then
eval "$2=false"
else
eval "$2=true"
fi
}
flip_ans() {
if $(eval echo '$'$1); then
eval "$1=false"
else
eval "$1=true"
fi
}
default_wait=1
wait_and_clear() {
if [[ $# == 0 ]]; then
sleep $default_wait
else
sleep $1
fi
clear
}
tell_press_enter() {
echo "Press enter to continue"
read
}
echo "User setup"
echo ""
while true; do
ask_end=false
while ! $ask_end; do
ask_ans user_name "Please enter the user name"
echo "You entered: " $user_name
ask_if_correct ask_end
done
echo "Adding user"
useradd -m "$user_name" -G "$groups"
if [[ $? == 0 ]]; then
break
else
echo "Failed to add user"
echo "Please check whether the user name is correctly specified and if acceptable by the system"
tell_press_enter
fi
done
while true; do
echo "Setting password for user: " $user_name
passwd "$user_name"
if [[ $? == 0 ]]; then
break
else
echo "Failed to set password"
echo "Please repeat the procedure"
tell_press_enter
fi
done
echo "User: " $user_name " added"
|}
| null | https://raw.githubusercontent.com/darrenldl/oali/fa2a7e7171eb57c4bab5961a124ac165120e40cb/src/useradd_helper_as_powerful_script_template.ml | ocaml | let gen () =
{|#!/bin/bash
INVALID_ANS="Invalid answer"
NO_COMMAND="Command not found"
groups="users,wheel,rfkill"
ask_ans() {
if (( $# <= 1 )); then
echo "Too few parameters"
exit
elif (( $# >= 2 )); then
ret_var=$1
message=$2
fi
echo -ne "$message"": "
read ans
eval "$ret_var=$ans"
}
ask_yn() {
if (( $# <= 1 )); then
echo "Too few parameters"
exit
elif (( $# >= 2 )); then
ret_var=$1
message=$2
fi
while true; do
echo -ne "$message"" y/n: "
read ans
if [[ $ans == "y" ]]; then
eval "$ret_var=true"
break
elif [[ $ans == "n" ]]; then
eval "$ret_var=false"
break
else
echo -e $INVALID_ANS
fi
done
}
ask_if_correct() {
ask_yn $1 "Is this correct?"
}
comple() {
if $(eval echo '$'$1); then
eval "$2=false"
else
eval "$2=true"
fi
}
flip_ans() {
if $(eval echo '$'$1); then
eval "$1=false"
else
eval "$1=true"
fi
}
default_wait=1
wait_and_clear() {
if [[ $# == 0 ]]; then
sleep $default_wait
else
sleep $1
fi
clear
}
tell_press_enter() {
echo "Press enter to continue"
read
}
echo "User setup"
echo ""
while true; do
ask_end=false
while ! $ask_end; do
ask_ans user_name "Please enter the user name"
echo "You entered: " $user_name
ask_if_correct ask_end
done
echo "Adding user"
useradd -m "$user_name" -G "$groups"
if [[ $? == 0 ]]; then
break
else
echo "Failed to add user"
echo "Please check whether the user name is correctly specified and if acceptable by the system"
tell_press_enter
fi
done
while true; do
echo "Setting password for user: " $user_name
passwd "$user_name"
if [[ $? == 0 ]]; then
break
else
echo "Failed to set password"
echo "Please repeat the procedure"
tell_press_enter
fi
done
echo "User: " $user_name " added"
|}
| |
127ab9d049850331cf326889794867ab3ba6decb9b871b57987e9dd3dbad5b49 | headwinds/reagent-reframe-material-ui | fx.cljc | (ns re-frame.fx
(:require
[re-frame.router :as router]
[re-frame.db :refer [app-db]]
[re-frame.interceptor :refer [->interceptor]]
[re-frame.interop :refer [set-timeout!]]
[re-frame.events :as events]
[re-frame.registrar :refer [get-handler clear-handlers register-handler]]
[re-frame.loggers :refer [console]]))
;; -- Registration ------------------------------------------------------------
(def kind :fx)
(assert (re-frame.registrar/kinds kind))
(def register (partial register-handler kind))
;; -- Interceptor -------------------------------------------------------------
(def do-fx
"An interceptor which actions a `context's` (side) `:effects`.
For each key in the `:effects` map, call the `effects handler` previously
registered using `reg-fx`.
So, if `:effects` was:
{:dispatch [:hello 42]
:db {...}
:undo \"set flag\"}
call the registered effects handlers for each of the map's keys:
`:dispatch`, `:undo` and `:db`."
(->interceptor
:id :do-fx
:after (fn do-fx-after
[context]
(doseq [[effect-k value] (:effects context)]
(if-let [effect-fn (get-handler kind effect-k true)]
(effect-fn value))))))
-- Builtin Effect Handlers ------------------------------------------------
;; :dispatch-later
;;
` dispatch ` one or more events after given delays . Expects a collection
of maps with two keys : : ` ms ` and ` : dispatch `
;;
;; usage:
;;
{ : dispatch - later [ { : ms 200 : dispatch [: event - id " param " ] } ; ; in 200ms do this : ( dispatch [: event - id " param " ] )
{ : ms 100 : dispatch [: also : this : in : 100ms ] } ] }
;;
(register
:dispatch-later
(fn [value]
(doseq [{:keys [ms dispatch] :as effect} value]
(if (or (empty? dispatch) (not (number? ms)))
(console :error "re-frame: ignoring bad :dispatch-later value:" effect)
(set-timeout! #(router/dispatch dispatch) ms)))))
;; :dispatch
;;
` dispatch ` one event . Excepts a single vector .
;;
;; usage:
;; {:dispatch [:event-id "param"] }
(register
:dispatch
(fn [value]
(if-not (vector? value)
(console :error "re-frame: ignoring bad :dispatch value. Expected a vector, but got:" value)
(router/dispatch value))))
;; :dispatch-n
;;
` dispatch ` more than one event . Expects a list or vector of events . Something for which
;; sequential? returns true.
;;
;; usage:
{ : dispatch - n ( list [: do : all ] [: three : of ] [: these ] ) }
;;
(register
:dispatch-n
(fn [value]
(if-not (sequential? value)
(console :error "re-frame: ignoring bad :dispatch-n value. Expected a collection, got got:" value))
(doseq [event value] (router/dispatch event))))
;; :deregister-event-handler
;;
;; removes a previously registered event handler. Expects either a single id (
;; typically a keyword), or a seq of ids.
;;
;; usage:
;; {:deregister-event-handler :my-id)}
;; or:
{ : deregister - event - handler [: one - id : another - id ] }
;;
(register
:deregister-event-handler
(fn [value]
(let [clear-event (partial clear-handlers events/kind)]
(if (sequential? value)
(doseq [event (if (sequential? value) value [value])]
(clear-event event))))))
;; :db
;;
;; reset! app-db with a new value. Expects a map.
;;
;; usage:
;; {:db {:key1 value1 key2 value2}}
;;
(register
:db
(fn [value]
(reset! app-db value)))
| null | https://raw.githubusercontent.com/headwinds/reagent-reframe-material-ui/8a6fba82a026cfedca38491becac85751be9a9d4/resources/public/js/out/re_frame/fx.cljc | clojure | -- Registration ------------------------------------------------------------
-- Interceptor -------------------------------------------------------------
:dispatch-later
usage:
; in 200ms do this : ( dispatch [: event - id " param " ] )
:dispatch
usage:
{:dispatch [:event-id "param"] }
:dispatch-n
sequential? returns true.
usage:
:deregister-event-handler
removes a previously registered event handler. Expects either a single id (
typically a keyword), or a seq of ids.
usage:
{:deregister-event-handler :my-id)}
or:
:db
reset! app-db with a new value. Expects a map.
usage:
{:db {:key1 value1 key2 value2}}
| (ns re-frame.fx
(:require
[re-frame.router :as router]
[re-frame.db :refer [app-db]]
[re-frame.interceptor :refer [->interceptor]]
[re-frame.interop :refer [set-timeout!]]
[re-frame.events :as events]
[re-frame.registrar :refer [get-handler clear-handlers register-handler]]
[re-frame.loggers :refer [console]]))
(def kind :fx)
(assert (re-frame.registrar/kinds kind))
(def register (partial register-handler kind))
(def do-fx
"An interceptor which actions a `context's` (side) `:effects`.
For each key in the `:effects` map, call the `effects handler` previously
registered using `reg-fx`.
So, if `:effects` was:
{:dispatch [:hello 42]
:db {...}
:undo \"set flag\"}
call the registered effects handlers for each of the map's keys:
`:dispatch`, `:undo` and `:db`."
(->interceptor
:id :do-fx
:after (fn do-fx-after
[context]
(doseq [[effect-k value] (:effects context)]
(if-let [effect-fn (get-handler kind effect-k true)]
(effect-fn value))))))
-- Builtin Effect Handlers ------------------------------------------------
` dispatch ` one or more events after given delays . Expects a collection
of maps with two keys : : ` ms ` and ` : dispatch `
{ : ms 100 : dispatch [: also : this : in : 100ms ] } ] }
(register
:dispatch-later
(fn [value]
(doseq [{:keys [ms dispatch] :as effect} value]
(if (or (empty? dispatch) (not (number? ms)))
(console :error "re-frame: ignoring bad :dispatch-later value:" effect)
(set-timeout! #(router/dispatch dispatch) ms)))))
` dispatch ` one event . Excepts a single vector .
(register
:dispatch
(fn [value]
(if-not (vector? value)
(console :error "re-frame: ignoring bad :dispatch value. Expected a vector, but got:" value)
(router/dispatch value))))
` dispatch ` more than one event . Expects a list or vector of events . Something for which
{ : dispatch - n ( list [: do : all ] [: three : of ] [: these ] ) }
(register
:dispatch-n
(fn [value]
(if-not (sequential? value)
(console :error "re-frame: ignoring bad :dispatch-n value. Expected a collection, got got:" value))
(doseq [event value] (router/dispatch event))))
{ : deregister - event - handler [: one - id : another - id ] }
(register
:deregister-event-handler
(fn [value]
(let [clear-event (partial clear-handlers events/kind)]
(if (sequential? value)
(doseq [event (if (sequential? value) value [value])]
(clear-event event))))))
(register
:db
(fn [value]
(reset! app-db value)))
|
b877017778f79d19e38b8002c81aca44328c184ec56493f130d74f3dc8117b1c | NalaGinrut/artanis | sxml.scm | -*- indent - tabs - mode : nil ; coding : utf-8 -*-
;; Copyright (C) 2014,2015
" Mu Lei " known as " NalaGinrut " < >
Artanis is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License and GNU
Lesser General Public License published by the Free Software
Foundation , either version 3 of the License , or ( at your option )
;; any later version.
Artanis is distributed in the hope that it will be useful ,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License and GNU Lesser General Public License
;; for more details.
You should have received a copy of the GNU General Public License
;; and GNU Lesser General Public License along with this program.
;; If not, see </>.
(define-module (artanis tpl sxml)
#:use-module (sxml ssax input-parse)
#:use-module (sxml ssax)
#:use-module (sxml transform)
#:use-module (ice-9 match)
#:use-module (srfi srfi-13)
#:export (sxml->xml))
These function is modified from ( sxml simple )
(define check-name
(let ((*good-cache* (make-hash-table)))
(lambda (name)
(if (not (hashq-ref *good-cache* name))
(let* ((str (symbol->string name))
(i (string-index str #\:))
(head (or (and i (substring str 0 i)) str))
(tail (and i (substring str (1+ i)))))
(and i (string-index (substring str (1+ i)) #\:)
(error "Invalid QName: more than one colon" name))
(for-each
(lambda (s)
(and s
(or (char-alphabetic? (string-ref s 0))
(eq? (string-ref s 0) #\_)
(error "Invalid name starting character" s name))
(string-for-each
(lambda (c)
(or (char-alphabetic? c) (string-index "0123456789.-_" c)
(error "Invalid name character" c s name)))
s)))
(list head tail))
(hashq-set! *good-cache* name #t))))))
The following two functions serialize tags and attributes . They are
;; being used in the node handlers for the post-order function, see
;; below.
(define (attribute-value->xml value port)
(cond
((pair? value)
(attribute-value->xml (car value) port)
(attribute-value->xml (cdr value) port))
((null? value)
*unspecified*)
((string? value)
(string->escaped-xml value port))
((procedure? value)
(with-output-to-port port value))
(else
(string->escaped-xml
(call-with-output-string (lambda (port) (display value port)))
port))))
(define (attribute->xml attr value port)
(check-name attr)
(display attr port)
(display "=\"" port)
(attribute-value->xml value port)
(display #\" port))
(define (element->xml tag attrs body port)
(check-name tag)
(display #\< port)
(display tag port)
(if attrs
(let lp ((attrs attrs))
(if (pair? attrs)
(let ((attr (car attrs)))
(display #\space port)
(if (pair? attr)
(attribute->xml (car attr) (cdr attr) port)
(error "bad attribute" tag attr))
(lp (cdr attrs)))
(if (not (null? attrs))
(error "bad attributes" tag attrs)))))
(if (pair? body)
(begin
(display #\> port)
(let lp ((body body))
(cond
((pair? body)
(sxml->xml (car body) port)
(lp (cdr body)))
((null? body)
(display "</" port)
(display tag port)
(display ">" port))
(else
(error "bad element body" tag body)))))
(display " />" port)))
;; FIXME: ensure name is valid
(define (entity->xml name port)
(display #\& port)
(display name port)
(display #\; port))
;; FIXME: ensure tag and str are valid
(define (pi->xml tag str port)
(display "<?" port)
(display tag port)
(display #\space port)
(display str port)
(display "?>" port))
(define* (sxml->xml tree #:optional (port (current-output-port)) (escape? #f))
"Serialize the sxml tree @var{tree} as XML. The output will be written
to the current output port, unless the optional argument @var{port} is
present."
(cond
((pair? tree)
(if (symbol? (car tree))
;; An element.
(let ((tag (car tree)))
(case tag
((*TOP*)
(sxml->xml (cdr tree) port escape?))
((*ENTITY*)
(if (and (list? (cdr tree)) (= (length (cdr tree)) 1))
(entity->xml (cadr tree) port)
(error "bad *ENTITY* args" (cdr tree))))
((*PI*)
(if (and (list? (cdr tree)) (= (length (cdr tree)) 2))
(pi->xml (cadr tree) (caddr tree) port)
(error "bad *PI* args" (cdr tree))))
(else
(let* ((elems (cdr tree))
(attrs (and (pair? elems) (pair? (car elems))
(eq? '@ (caar elems))
(cdar elems))))
(element->xml tag attrs (if attrs (cdr elems) elems) port)))))
;; A nodelist.
(for-each (lambda (x) (sxml->xml x port)) tree)))
((string? tree)
(if escape? (string->escaped-xml tree port) (display tree port)))
((null? tree) *unspecified*)
((not tree) *unspecified*)
((eqv? tree #t) *unspecified*)
((procedure? tree)
(with-output-to-port port tree))
(else
(when escape?
(string->escaped-xml
(call-with-output-string (lambda (port) (display tree port)))
port)))))
(define (sxml->string sxml)
"Detag an sxml tree @var{sxml} into a string. Does not perform any
formatting."
(string-concatenate-reverse
(foldts
(lambda (seed tree) ; fdown
'())
(lambda (seed kid-seed tree) ; fup
(append! kid-seed seed))
(lambda (seed tree) ; fhere
(if (string? tree) (cons tree seed) seed))
'()
sxml)))
(define (make-char-quotator char-encoding)
(let ((bad-chars (list->char-set (map car char-encoding))))
Check to see if str contains one of the characters in charset ,
;; from the position i onward. If so, return that character's index.
;; otherwise, return #f
(define (index-cset str i charset)
(string-index str charset i))
;; The body of the function
(lambda (str port)
(let ((bad-pos (index-cset str 0 bad-chars)))
(if (not bad-pos)
(display str port) ; str had all good chars
(let loop ((from 0) (to bad-pos))
(cond
((>= from (string-length str)) *unspecified*)
((not to)
(display (substring str from (string-length str)) port))
(else
(let ((quoted-char
(cdr (assv (string-ref str to) char-encoding)))
(new-to
(index-cset str (+ 1 to) bad-chars)))
(if (< from to)
(display (substring str from to) port))
(display quoted-char port)
(loop (1+ to) new-to))))))))))
;; Given a string, check to make sure it does not contain characters
;; such as '<' or '&' that require encoding. Return either the original
;; string, or a list of string fragments with special characters
;; replaced by appropriate character entities.
(define string->escaped-xml
(make-char-quotator
'((#\< . "<") (#\> . ">") (#\& . "&") (#\" . """))))
| null | https://raw.githubusercontent.com/NalaGinrut/artanis/3412d6eb5b46fde71b0965598ba085bacc2a6c12/artanis/tpl/sxml.scm | scheme | coding : utf-8 -*-
Copyright (C) 2014,2015
any later version.
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License and GNU Lesser General Public License
for more details.
and GNU Lesser General Public License along with this program.
If not, see </>.
being used in the node handlers for the post-order function, see
below.
FIXME: ensure name is valid
port))
FIXME: ensure tag and str are valid
An element.
A nodelist.
fdown
fup
fhere
from the position i onward. If so, return that character's index.
otherwise, return #f
The body of the function
str had all good chars
Given a string, check to make sure it does not contain characters
such as '<' or '&' that require encoding. Return either the original
string, or a list of string fragments with special characters
replaced by appropriate character entities.
")))) | " Mu Lei " known as " NalaGinrut " < >
Artanis is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License and GNU
Lesser General Public License published by the Free Software
Foundation , either version 3 of the License , or ( at your option )
Artanis is distributed in the hope that it will be useful ,
You should have received a copy of the GNU General Public License
(define-module (artanis tpl sxml)
#:use-module (sxml ssax input-parse)
#:use-module (sxml ssax)
#:use-module (sxml transform)
#:use-module (ice-9 match)
#:use-module (srfi srfi-13)
#:export (sxml->xml))
These function is modified from ( sxml simple )
(define check-name
(let ((*good-cache* (make-hash-table)))
(lambda (name)
(if (not (hashq-ref *good-cache* name))
(let* ((str (symbol->string name))
(i (string-index str #\:))
(head (or (and i (substring str 0 i)) str))
(tail (and i (substring str (1+ i)))))
(and i (string-index (substring str (1+ i)) #\:)
(error "Invalid QName: more than one colon" name))
(for-each
(lambda (s)
(and s
(or (char-alphabetic? (string-ref s 0))
(eq? (string-ref s 0) #\_)
(error "Invalid name starting character" s name))
(string-for-each
(lambda (c)
(or (char-alphabetic? c) (string-index "0123456789.-_" c)
(error "Invalid name character" c s name)))
s)))
(list head tail))
(hashq-set! *good-cache* name #t))))))
The following two functions serialize tags and attributes . They are
(define (attribute-value->xml value port)
(cond
((pair? value)
(attribute-value->xml (car value) port)
(attribute-value->xml (cdr value) port))
((null? value)
*unspecified*)
((string? value)
(string->escaped-xml value port))
((procedure? value)
(with-output-to-port port value))
(else
(string->escaped-xml
(call-with-output-string (lambda (port) (display value port)))
port))))
(define (attribute->xml attr value port)
(check-name attr)
(display attr port)
(display "=\"" port)
(attribute-value->xml value port)
(display #\" port))
(define (element->xml tag attrs body port)
(check-name tag)
(display #\< port)
(display tag port)
(if attrs
(let lp ((attrs attrs))
(if (pair? attrs)
(let ((attr (car attrs)))
(display #\space port)
(if (pair? attr)
(attribute->xml (car attr) (cdr attr) port)
(error "bad attribute" tag attr))
(lp (cdr attrs)))
(if (not (null? attrs))
(error "bad attributes" tag attrs)))))
(if (pair? body)
(begin
(display #\> port)
(let lp ((body body))
(cond
((pair? body)
(sxml->xml (car body) port)
(lp (cdr body)))
((null? body)
(display "</" port)
(display tag port)
(display ">" port))
(else
(error "bad element body" tag body)))))
(display " />" port)))
(define (entity->xml name port)
(display #\& port)
(display name port)
(define (pi->xml tag str port)
(display "<?" port)
(display tag port)
(display #\space port)
(display str port)
(display "?>" port))
(define* (sxml->xml tree #:optional (port (current-output-port)) (escape? #f))
"Serialize the sxml tree @var{tree} as XML. The output will be written
to the current output port, unless the optional argument @var{port} is
present."
(cond
((pair? tree)
(if (symbol? (car tree))
(let ((tag (car tree)))
(case tag
((*TOP*)
(sxml->xml (cdr tree) port escape?))
((*ENTITY*)
(if (and (list? (cdr tree)) (= (length (cdr tree)) 1))
(entity->xml (cadr tree) port)
(error "bad *ENTITY* args" (cdr tree))))
((*PI*)
(if (and (list? (cdr tree)) (= (length (cdr tree)) 2))
(pi->xml (cadr tree) (caddr tree) port)
(error "bad *PI* args" (cdr tree))))
(else
(let* ((elems (cdr tree))
(attrs (and (pair? elems) (pair? (car elems))
(eq? '@ (caar elems))
(cdar elems))))
(element->xml tag attrs (if attrs (cdr elems) elems) port)))))
(for-each (lambda (x) (sxml->xml x port)) tree)))
((string? tree)
(if escape? (string->escaped-xml tree port) (display tree port)))
((null? tree) *unspecified*)
((not tree) *unspecified*)
((eqv? tree #t) *unspecified*)
((procedure? tree)
(with-output-to-port port tree))
(else
(when escape?
(string->escaped-xml
(call-with-output-string (lambda (port) (display tree port)))
port)))))
(define (sxml->string sxml)
"Detag an sxml tree @var{sxml} into a string. Does not perform any
formatting."
(string-concatenate-reverse
(foldts
'())
(append! kid-seed seed))
(if (string? tree) (cons tree seed) seed))
'()
sxml)))
(define (make-char-quotator char-encoding)
(let ((bad-chars (list->char-set (map car char-encoding))))
Check to see if str contains one of the characters in charset ,
(define (index-cset str i charset)
(string-index str charset i))
(lambda (str port)
(let ((bad-pos (index-cset str 0 bad-chars)))
(if (not bad-pos)
(let loop ((from 0) (to bad-pos))
(cond
((>= from (string-length str)) *unspecified*)
((not to)
(display (substring str from (string-length str)) port))
(else
(let ((quoted-char
(cdr (assv (string-ref str to) char-encoding)))
(new-to
(index-cset str (+ 1 to) bad-chars)))
(if (< from to)
(display (substring str from to) port))
(display quoted-char port)
(loop (1+ to) new-to))))))))))
(define string->escaped-xml
(make-char-quotator
|
2afe6eb674829637de372c99eedd5be2b390b8083a22dc9621f06b22194a48fe | audreyt/openafp | NOP.hs | module OpenAFP.Records.PTX.NOP where
import OpenAFP.Types
import OpenAFP.Internals
data PTX_NOP = PTX_NOP {
ptx_nop_Type :: !N1
,ptx_nop :: !NStr
} deriving (Show, Typeable)
| null | https://raw.githubusercontent.com/audreyt/openafp/178e0dd427479ac7b8b461e05c263e52dd614b73/src/OpenAFP/Records/PTX/NOP.hs | haskell | module OpenAFP.Records.PTX.NOP where
import OpenAFP.Types
import OpenAFP.Internals
data PTX_NOP = PTX_NOP {
ptx_nop_Type :: !N1
,ptx_nop :: !NStr
} deriving (Show, Typeable)
| |
53659be3e98d8b661f4617f0f703b64e14b2eeb7cc23762a6b13f9cea1faac72 | Carnap/Carnap | Syntax.hs | # LANGUAGE UndecidableInstances , FlexibleInstances , MultiParamTypeClasses , GADTs , TypeOperators , PatternSynonyms , FlexibleContexts #
module Carnap.Languages.DefiniteDescription.Syntax where
import Carnap.Core.Util
import Carnap.Core.Data.Util
import Carnap.Core.Data.Types
import Carnap.Core.Data.Optics
import Carnap.Core.Data.Classes
import Carnap.Languages.PureFirstOrder.Syntax
import Carnap.Languages.PurePropositional.Util
import Carnap.Languages.PureFirstOrder.Logic.Rules (seqVar)
import Carnap.Languages.ClassicalSequent.Syntax
import Carnap.Languages.Util.LanguageClasses
import Carnap.Languages.Util.GenericConstructors
import Control.Lens (Traversal', preview, outside, (^?), (%~), (.~), (&), Prism')
import Data.Typeable (Typeable)
-----------------------------------------------
1 . Data for Definite Description Logics --
-----------------------------------------------
type FregeanDescription = DefiniteDescription Bool Int --"fregean" because every term has a denotation.
-----------------------------------------
2 . Definite Description Languages --
-----------------------------------------
type FregeanDescLex = PureLexiconFOL :|: Binders FregeanDescription :|: EndLang
type FregeanDescLang = FixLang FregeanDescLex
instance PrismDefiniteDesc FregeanDescLex Bool Int
instance PrismPropLex FregeanDescLex Bool
instance PrismSchematicProp FregeanDescLex Bool
instance PrismIndexedConstant FregeanDescLex Int
instance PrismPolyadicPredicate FregeanDescLex Int Bool
instance PrismPolyadicSchematicPredicate FregeanDescLex Int Bool
instance PrismPolyadicFunction FregeanDescLex Int Int
instance PrismPolyadicSchematicFunction FregeanDescLex Int Int
instance PrismTermEquality FregeanDescLex Int Bool
instance PrismBooleanConnLex FregeanDescLex Bool
instance PrismBooleanConst FregeanDescLex Bool
instance PrismGenericTypedLambda FregeanDescLex Term Form Int
instance PrismStandardVar FregeanDescLex Int
instance PrismSubstitutionalVariable FregeanDescLex
instance PrismGenericQuant FregeanDescLex Term Form Bool Int
instance PrismQuantContext FregeanDescLex Bool Int
instance Incrementable FregeanDescLex (Term Int) where
incHead = const Nothing
& outside (_predIdx') .~ (\(n,a) -> Just $ ppn n (ASucc a))
& outside (_spredIdx') .~ (\(n,a) -> Just $ pphin n (ASucc a))
& outside (_funcIdx') .~ (\(n,a) -> Just $ pfn n (ASucc a))
& outside (_sfuncIdx') .~ (\(n,a) -> Just $ spfn n (ASucc a))
where _predIdx' :: Typeable ret => Prism' (FregeanDescLang ret) (Int, Arity (Term Int) (Form Bool) ret)
_predIdx' = _predIdx
_spredIdx' :: Typeable ret => Prism' (FregeanDescLang ret) (Int, Arity (Term Int) (Form Bool) ret)
_spredIdx' = _spredIdx
_funcIdx' :: Typeable ret => Prism' (FregeanDescLang ret) (Int, Arity (Term Int) (Term Int) ret)
_funcIdx' = _funcIdx
_sfuncIdx' :: Typeable ret => Prism' (FregeanDescLang ret) (Int, Arity (Term Int) (Term Int) ret)
_sfuncIdx' = _sfuncIdx
instance BoundVars FregeanDescLex where
scopeUniqueVar q (LLam f) = case castTo $ foVar $ show $ maxVar (LLam f) + 1 of
Just x -> x
Nothing -> error "cast failed in ScopeUniqueVar"
scopeUniqueVar _ _ = undefined
subBoundVar = saferSubst
instance Eq (FregeanDescLang sem) where (==) = (=*)
type FregeanDescSeq = ClassicalSequentOver FregeanDescLex
instance CopulaSchema FregeanDescLang where
appSchema q@(Fx _) (LLam f) e = case ( qtype q >>= preview _all >>= \x -> (,) <$> Just x <*> castTo (foVar x)
, qtype q >>= preview _some >>= \x -> (,) <$> Just x <*> castTo (foVar x)
, dtype q >>= preview _desc >>= \x -> (,) <$> Just x <*> castTo (foVar x)
) of
(Just (x,v), _,_) -> schematize (All x) (show (f v) : e)
(_, Just (x,v),_) -> schematize (Some x) (show (f v) : e)
(_, _, Just (x,v))-> schematize (DefinDesc x) (show (f v) : e)
_ -> schematize q (show (LLam f) : e)
appSchema x y e = schematize x (show y : e)
lamSchema = defaultLamSchema
instance CopulaSchema FregeanDescSeq where
appSchema q@(Fx _) (LLam f) e = case ( qtype q >>= preview _all >>= \x -> (,) <$> Just x <*> castTo (seqVar x)
, qtype q >>= preview _some >>= \x -> (,) <$> Just x <*> castTo (seqVar x)
, dtype q >>= preview _desc >>= \x -> (,) <$> Just x <*> castTo (seqVar x)
) of
(Just (x,v), _,_) -> schematize (All x) (show (f v) : e)
(_, Just (x,v),_) -> schematize (Some x) (show (f v) : e)
(_, _, Just (x,v))-> schematize (DefinDesc x) (show (f v) : e)
_ -> schematize q (show (LLam f) : e)
appSchema x y e = schematize x (show y : e)
lamSchema = defaultLamSchema
instance RelabelVars FregeanDescLex Form Bool where
subBinder (q :!$: LLam f) y = case ( qtype q >>= preview _all
, qtype q >>= preview _some
, oftype (LLam f)) of
(Just _, _, Just (LLam f')) -> Just $ lall y f'
(_, Just _, Just (LLam f')) -> Just $ lsome y f'
_ -> Nothing
subBinder _ _ = Nothing
instance RelabelVars FregeanDescLex Term Int where
subBinder (q :!$: LLam f) y = case ( dtype q >>= preview _desc , oftype (LLam f)) of
(Just _, Just (LLam f')) -> Just $ ddesc y f'
_ -> Nothing
subBinder _ _ = Nothing
instance CanonicalForm (FregeanDescLang (Form Bool)) where
canonical = relabelVars [i ++ j | i <- ["x"], j <- map show [1 ..]] . (termsOf %~ relabelVars [i ++ j | i <- ["y"], j <- map show [1 ..]])
instance HasLiterals FregeanDescLex Bool where
isAtom a | (a ^? _propIndex) /= Nothing = True
| (a ^? binaryOpPrism _termEq') /= Nothing = True
| otherwise = withHead (\h -> not . null $ h ^? _predIdx') a
where _predIdx' :: Typeable ret => Prism' (FregeanDescLang ret) (Int, Arity (Term Int) (Form Bool) ret)
_predIdx' = _predIdx
_termEq' :: Prism' (FregeanDescLang (Term Int -> Term Int -> Form Bool)) ()
_termEq' = _termEq
instance Eq (FregeanDescSeq sem) where (==) = (=*)
dtype :: Typeable a => FixLang lex a -> Maybe (FixLang lex ((Term Int -> Form Bool)-> Term Int))
dtype = castTo
| null | https://raw.githubusercontent.com/Carnap/Carnap/99546e377008247c5a1e8de1e294aac8e22584d7/Carnap/src/Carnap/Languages/DefiniteDescription/Syntax.hs | haskell | ---------------------------------------------
---------------------------------------------
"fregean" because every term has a denotation.
---------------------------------------
--------------------------------------- | # LANGUAGE UndecidableInstances , FlexibleInstances , MultiParamTypeClasses , GADTs , TypeOperators , PatternSynonyms , FlexibleContexts #
module Carnap.Languages.DefiniteDescription.Syntax where
import Carnap.Core.Util
import Carnap.Core.Data.Util
import Carnap.Core.Data.Types
import Carnap.Core.Data.Optics
import Carnap.Core.Data.Classes
import Carnap.Languages.PureFirstOrder.Syntax
import Carnap.Languages.PurePropositional.Util
import Carnap.Languages.PureFirstOrder.Logic.Rules (seqVar)
import Carnap.Languages.ClassicalSequent.Syntax
import Carnap.Languages.Util.LanguageClasses
import Carnap.Languages.Util.GenericConstructors
import Control.Lens (Traversal', preview, outside, (^?), (%~), (.~), (&), Prism')
import Data.Typeable (Typeable)
type FregeanDescLex = PureLexiconFOL :|: Binders FregeanDescription :|: EndLang
type FregeanDescLang = FixLang FregeanDescLex
instance PrismDefiniteDesc FregeanDescLex Bool Int
instance PrismPropLex FregeanDescLex Bool
instance PrismSchematicProp FregeanDescLex Bool
instance PrismIndexedConstant FregeanDescLex Int
instance PrismPolyadicPredicate FregeanDescLex Int Bool
instance PrismPolyadicSchematicPredicate FregeanDescLex Int Bool
instance PrismPolyadicFunction FregeanDescLex Int Int
instance PrismPolyadicSchematicFunction FregeanDescLex Int Int
instance PrismTermEquality FregeanDescLex Int Bool
instance PrismBooleanConnLex FregeanDescLex Bool
instance PrismBooleanConst FregeanDescLex Bool
instance PrismGenericTypedLambda FregeanDescLex Term Form Int
instance PrismStandardVar FregeanDescLex Int
instance PrismSubstitutionalVariable FregeanDescLex
instance PrismGenericQuant FregeanDescLex Term Form Bool Int
instance PrismQuantContext FregeanDescLex Bool Int
instance Incrementable FregeanDescLex (Term Int) where
incHead = const Nothing
& outside (_predIdx') .~ (\(n,a) -> Just $ ppn n (ASucc a))
& outside (_spredIdx') .~ (\(n,a) -> Just $ pphin n (ASucc a))
& outside (_funcIdx') .~ (\(n,a) -> Just $ pfn n (ASucc a))
& outside (_sfuncIdx') .~ (\(n,a) -> Just $ spfn n (ASucc a))
where _predIdx' :: Typeable ret => Prism' (FregeanDescLang ret) (Int, Arity (Term Int) (Form Bool) ret)
_predIdx' = _predIdx
_spredIdx' :: Typeable ret => Prism' (FregeanDescLang ret) (Int, Arity (Term Int) (Form Bool) ret)
_spredIdx' = _spredIdx
_funcIdx' :: Typeable ret => Prism' (FregeanDescLang ret) (Int, Arity (Term Int) (Term Int) ret)
_funcIdx' = _funcIdx
_sfuncIdx' :: Typeable ret => Prism' (FregeanDescLang ret) (Int, Arity (Term Int) (Term Int) ret)
_sfuncIdx' = _sfuncIdx
instance BoundVars FregeanDescLex where
scopeUniqueVar q (LLam f) = case castTo $ foVar $ show $ maxVar (LLam f) + 1 of
Just x -> x
Nothing -> error "cast failed in ScopeUniqueVar"
scopeUniqueVar _ _ = undefined
subBoundVar = saferSubst
instance Eq (FregeanDescLang sem) where (==) = (=*)
type FregeanDescSeq = ClassicalSequentOver FregeanDescLex
instance CopulaSchema FregeanDescLang where
appSchema q@(Fx _) (LLam f) e = case ( qtype q >>= preview _all >>= \x -> (,) <$> Just x <*> castTo (foVar x)
, qtype q >>= preview _some >>= \x -> (,) <$> Just x <*> castTo (foVar x)
, dtype q >>= preview _desc >>= \x -> (,) <$> Just x <*> castTo (foVar x)
) of
(Just (x,v), _,_) -> schematize (All x) (show (f v) : e)
(_, Just (x,v),_) -> schematize (Some x) (show (f v) : e)
(_, _, Just (x,v))-> schematize (DefinDesc x) (show (f v) : e)
_ -> schematize q (show (LLam f) : e)
appSchema x y e = schematize x (show y : e)
lamSchema = defaultLamSchema
instance CopulaSchema FregeanDescSeq where
appSchema q@(Fx _) (LLam f) e = case ( qtype q >>= preview _all >>= \x -> (,) <$> Just x <*> castTo (seqVar x)
, qtype q >>= preview _some >>= \x -> (,) <$> Just x <*> castTo (seqVar x)
, dtype q >>= preview _desc >>= \x -> (,) <$> Just x <*> castTo (seqVar x)
) of
(Just (x,v), _,_) -> schematize (All x) (show (f v) : e)
(_, Just (x,v),_) -> schematize (Some x) (show (f v) : e)
(_, _, Just (x,v))-> schematize (DefinDesc x) (show (f v) : e)
_ -> schematize q (show (LLam f) : e)
appSchema x y e = schematize x (show y : e)
lamSchema = defaultLamSchema
instance RelabelVars FregeanDescLex Form Bool where
subBinder (q :!$: LLam f) y = case ( qtype q >>= preview _all
, qtype q >>= preview _some
, oftype (LLam f)) of
(Just _, _, Just (LLam f')) -> Just $ lall y f'
(_, Just _, Just (LLam f')) -> Just $ lsome y f'
_ -> Nothing
subBinder _ _ = Nothing
instance RelabelVars FregeanDescLex Term Int where
subBinder (q :!$: LLam f) y = case ( dtype q >>= preview _desc , oftype (LLam f)) of
(Just _, Just (LLam f')) -> Just $ ddesc y f'
_ -> Nothing
subBinder _ _ = Nothing
instance CanonicalForm (FregeanDescLang (Form Bool)) where
canonical = relabelVars [i ++ j | i <- ["x"], j <- map show [1 ..]] . (termsOf %~ relabelVars [i ++ j | i <- ["y"], j <- map show [1 ..]])
instance HasLiterals FregeanDescLex Bool where
isAtom a | (a ^? _propIndex) /= Nothing = True
| (a ^? binaryOpPrism _termEq') /= Nothing = True
| otherwise = withHead (\h -> not . null $ h ^? _predIdx') a
where _predIdx' :: Typeable ret => Prism' (FregeanDescLang ret) (Int, Arity (Term Int) (Form Bool) ret)
_predIdx' = _predIdx
_termEq' :: Prism' (FregeanDescLang (Term Int -> Term Int -> Form Bool)) ()
_termEq' = _termEq
instance Eq (FregeanDescSeq sem) where (==) = (=*)
dtype :: Typeable a => FixLang lex a -> Maybe (FixLang lex ((Term Int -> Form Bool)-> Term Int))
dtype = castTo
|
e4bc2f1430563fd547960f7258bff016cc4e62c25cb11c02dac534b5997415c2 | rlhk/logseq-url-plus | api.cljs | CAUTION : ONLY allows pure fns to facilitate Node.js based TDD setup
(ns api
(:require
[cuerdas.core :as str]))
TODO
(def settings-schema
[{:key "TwitterAccessToken"
:type "string"
:title "Twitter Access Token"
:description "See: -2-0/bearer-tokens"
:default ""}])
(defn md-link->label-and-url
"Convert markdown link to [label, url], return input as it is if not a markdown link."
[maybe-link]
(let [output (re-find #"\[(.*?)\]\((.*?)\)" maybe-link)]
(if output (rest output), [nil maybe-link])))
(defn edn->logseq-attrs
"Convert EDN data to Logseq attributes string. Assume input is map."
[data]
(let [maybe-map (if (sequential? data) (first data) data)
m (when (map? maybe-map) maybe-map)
;; warning (if (map? m)
( when ( sequential ? data ) " Data is a collection . Taking the first map - like record . " )
;; "Can't determine a map-like record.")
]
(str
#_(when warning (str "#+BEGIN_WARNING\n" warning "\n#+END_WARNING\n"))
(str/join "\n" (for [[k v] m] (str (name k) ":: " v))))))
(defn edn->logseq-blocks
"Convert EDN data to Logseq attribute blocks"
[data]
(if (sequential? data)
(map #(identity {:content (edn->logseq-attrs %)}) data)
{:content (edn->logseq-attrs data)}))
| null | https://raw.githubusercontent.com/rlhk/logseq-url-plus/a636623f79204fb396ffb2f776287c37879f1ef4/src/main/api.cljs | clojure | warning (if (map? m)
"Can't determine a map-like record.") | CAUTION : ONLY allows pure fns to facilitate Node.js based TDD setup
(ns api
(:require
[cuerdas.core :as str]))
TODO
(def settings-schema
[{:key "TwitterAccessToken"
:type "string"
:title "Twitter Access Token"
:description "See: -2-0/bearer-tokens"
:default ""}])
(defn md-link->label-and-url
"Convert markdown link to [label, url], return input as it is if not a markdown link."
[maybe-link]
(let [output (re-find #"\[(.*?)\]\((.*?)\)" maybe-link)]
(if output (rest output), [nil maybe-link])))
(defn edn->logseq-attrs
"Convert EDN data to Logseq attributes string. Assume input is map."
[data]
(let [maybe-map (if (sequential? data) (first data) data)
m (when (map? maybe-map) maybe-map)
( when ( sequential ? data ) " Data is a collection . Taking the first map - like record . " )
]
(str
#_(when warning (str "#+BEGIN_WARNING\n" warning "\n#+END_WARNING\n"))
(str/join "\n" (for [[k v] m] (str (name k) ":: " v))))))
(defn edn->logseq-blocks
"Convert EDN data to Logseq attribute blocks"
[data]
(if (sequential? data)
(map #(identity {:content (edn->logseq-attrs %)}) data)
{:content (edn->logseq-attrs data)}))
|
05d943849a4b998ff22c9d6c233119c6c78a7e485218fd2805005ff52b21d671 | spechub/Hets | Parser.hs | # LANGUAGE ScopedTypeVariables #
module Parser where
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import Logic
import Grothendieck
import Structured
import Data.Dynamic
import Data.Maybe
hetParse :: LogicGraph -> String -> SPEC
hetParse (logics@((_, defaultLogic) : _), translations) input =
case runParser spec defaultLogic "" input of
Left err -> error ("parse error at " ++ show err)
Right x -> x
where
spec :: CharParser AnyLogic SPEC
spec = buildExpressionParser table basic
<?> "SPEC"
basic = do { G_logic id <- getState;
b <- parse_basic_spec id;
return (Basic_spec (G_basic_spec id b))}
table = [[Prefix (do {string "logic"; spaces;
name <- many1 alphaNum;
setState
(fromMaybe (error ("logic " ++ name ++ " unknown"))
(lookup name logics));
spaces; return id } )],
[Postfix (do
string "with"; spaces;
do string "logic"; spaces
name <- many1 alphaNum
G_logic (id :: src) <- getState
case lookup name translations of
Nothing -> error ("translation " ++ name ++ " unknown")
Just (G_LTR tr) ->
case coerce (source tr) :: Maybe src of
Nothing -> error "translation type mismatch"
Just _ -> do
setState (G_logic (target tr))
return (\ sp -> Inter_Translation sp (G_LTR tr))
<|> do G_logic id <- getState
sy <- parse_symbol_mapping id
spaces
return (\ sp -> Intra_Translation sp (G_symbol_mapping_list id sy))
)],
[Infix (do {string "then"; spaces; return Extension}) AssocLeft]
]
| null | https://raw.githubusercontent.com/spechub/Hets/af7b628a75aab0d510b8ae7f067a5c9bc48d0f9e/mini/Parser.hs | haskell | # LANGUAGE ScopedTypeVariables #
module Parser where
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import Logic
import Grothendieck
import Structured
import Data.Dynamic
import Data.Maybe
hetParse :: LogicGraph -> String -> SPEC
hetParse (logics@((_, defaultLogic) : _), translations) input =
case runParser spec defaultLogic "" input of
Left err -> error ("parse error at " ++ show err)
Right x -> x
where
spec :: CharParser AnyLogic SPEC
spec = buildExpressionParser table basic
<?> "SPEC"
basic = do { G_logic id <- getState;
b <- parse_basic_spec id;
return (Basic_spec (G_basic_spec id b))}
table = [[Prefix (do {string "logic"; spaces;
name <- many1 alphaNum;
setState
(fromMaybe (error ("logic " ++ name ++ " unknown"))
(lookup name logics));
spaces; return id } )],
[Postfix (do
string "with"; spaces;
do string "logic"; spaces
name <- many1 alphaNum
G_logic (id :: src) <- getState
case lookup name translations of
Nothing -> error ("translation " ++ name ++ " unknown")
Just (G_LTR tr) ->
case coerce (source tr) :: Maybe src of
Nothing -> error "translation type mismatch"
Just _ -> do
setState (G_logic (target tr))
return (\ sp -> Inter_Translation sp (G_LTR tr))
<|> do G_logic id <- getState
sy <- parse_symbol_mapping id
spaces
return (\ sp -> Intra_Translation sp (G_symbol_mapping_list id sy))
)],
[Infix (do {string "then"; spaces; return Extension}) AssocLeft]
]
| |
4ab90d1fcd163c3d16a8ceae3e70c25e4ceff97a07307d616678064ff4e9331f | ocamllabs/opam-doc | printdoctree.ml | open Format
open Doctree
open Info
let line i f s (*...*) =
fprintf f "%s" (String.make (2*i) ' ');
fprintf f s (*...*)
let list i f ppf l =
match l with
| [] -> line i ppf "[]\n"
| _ :: _ ->
line i ppf "[\n";
List.iter (f (i+1) ppf) l;
line i ppf "]\n"
let option i f ppf x =
match x with
| None -> line i ppf "None\n";
| Some x ->
line i ppf "Some\n";
f (i+1) ppf x
let string i ppf s = line i ppf "\"%s\"\n" s
let text_element i ppf x =
line i ppf "text_element\n";
let i = (i+1) in
match x with
| Raw s -> line i ppf "Raw \"%s\"\n" s
| Code s -> line i ppf "Code \"%s\"\n" s
| PreCode s -> line i ppf "PreCode \"%s\"\n" s
| Verbatim s -> line i ppf "Verbatim \"%s\"\n" s
| Style _ -> line i ppf "STYLE\n"
| List _ -> line i ppf "LISR\n"
| Enum _ -> line i ppf "ENUM\n"
| Newline -> line i ppf "Newline\n"
| Block _ -> line i ppf "BLOCK\n"
| Title _ -> line i ppf "TITLE\n"
| Ref _ -> line i ppf "REF\n"
| Special_ref _ -> line i ppf "SPECIAL_REF\n"
| Target _ -> line i ppf "TARGET\n"
let text i ppf x = list i text_element ppf x
let see i ppf x = line i ppf "SEE\n"
let info i ppf x =
line i ppf "info\n";
let i = (i+1) in
line i ppf "i_desc =\n";
option (i+1) text ppf x.i_desc;
line i ppf "i_authors =\n";
list (i+1) string ppf x.i_authors;
line i ppf "i_version =\n";
option (i+1) string ppf x.i_version;
line i ppf "i_sees =\n";
list (i+1) see ppf x.i_sees;
line i ppf "i_since =\n";
option (i+1) string ppf x.i_since;
line i ppf "i_before =\n";
list (i+1) (fun i ppf _ -> line i ppf "BEFORE\n") ppf x.i_before;
line i ppf "i_deprecated =\n";
option (i+1) text ppf x.i_deprecated;
line i ppf "i_params =\n";
list (i+1) (fun i ppf _ -> line i ppf "PARAM\n") ppf x.i_params;
line i ppf "i_raised_exceptions =\n";
list (i+1) (fun i ppf _ -> line i ppf "RAISED_EXCEPTION\n") ppf x.i_raised_exceptions;
line i ppf "i_return_value =\n";
option (i+1) text ppf x.i_return_value;
line i ppf "i_custom =\n";
list (i+1) (fun i ppf _ -> line i ppf "CUSTOM\n") ppf x.i_custom
let string_x_info_option i ppf (s, i_opt) =
line i ppf "\"%s\"\n" s;
option (i+1) info ppf i_opt
let type_kind i ppf x =
match x with
Dtype_abstract ->
line i ppf "Dtype_abstract\n"
| Dtype_variant l ->
line i ppf "Dtype_variant\n";
list (i+1) string_x_info_option ppf l;
| Dtype_record l ->
line i ppf "Dtype_record\n";
list (i+1) string_x_info_option ppf l
let rec class_type i ppf x =
match x with
Dcty_constr ->
line i ppf "Dcty_constr\n"
| Dcty_signature cs ->
line i ppf "Dcty_signature\n";
class_signature (i+1) ppf cs
| Dcty_fun ct ->
line i ppf "Dcty_fun\n";
class_type (i+1) ppf ct
and class_signature i ppf x = list i class_type_field ppf x
and class_type_field i ppf x =
line i ppf "CLASS_TYPE_FIELD\n"
and class_expr i ppf x =
line i ppf "CLASS_EXPR\n"
and class_structure i ppf x = list i class_field ppf x
and class_field i ppf x =
line i ppf "CLASS_FIELD\n"
(* Type expressions for the module language *)
and module_type i ppf x =
match x with
Dmty_ident ->
line i ppf "Dmty_ident\n"
| Dmty_signature s ->
line i ppf "Dmty_signature\n";
signature (i+1) ppf s
| Dmty_functor(mt1, mt2) ->
line i ppf "Dmty_functor\n";
module_type (i+1) ppf mt1;
module_type (i+1) ppf mt2;
| Dmty_with mt ->
line i ppf "Dmty_with\n";
module_type (i+1) ppf mt
| Dmty_typeof me ->
line i ppf "Dmty_typeof\n";
module_expr (i+1) ppf me
and signature i ppf x = list i signature_item ppf x
and signature_item i ppf x =
line i ppf "signature_item\n";
let i = i+1 in
line i ppf "dsig_info\n";
option (i+1) info ppf x.dsig_info;
line i ppf "dsig_after_info\n";
option (i+1) info ppf x.dsig_after_info;
line i ppf "dsig_desc\n";
let i = i+1 in
match x.dsig_desc with
| Dsig_value s -> line i ppf "Dsig_value \"%s\"\n" s
| Dsig_type(s, tk) ->
line i ppf "Dsig_type \"%s\"\n" s;
type_kind i ppf tk
| Dsig_exception s -> line i ppf "Dsig_exception \"%s\"\n" s
| Dsig_module(s, mt) ->
line i ppf "Dsig_module \"%s\"\n" s;
module_type i ppf mt
| Dsig_recmodule(s, mt) ->
line i ppf "Dsig_recmodule \"%s\"\n" s;
module_type i ppf mt
| Dsig_modtype(s, mt_opt) ->
line i ppf "Dsig_modtype \"%s\"\n" s;
option i module_type ppf mt_opt
| Dsig_open -> line i ppf "Dsig_open\n"
| Dsig_include mt ->
line i ppf "Dsig_include\n";
module_type i ppf mt
| Dsig_class(s, ct) ->
line i ppf "Dsig_class \"%s\"\n" s;
class_type i ppf ct
| Dsig_class_type(s, ct) ->
line i ppf "Dsig_class_type \"%s\"\n" s;
class_type i ppf ct
| Dsig_comment -> line i ppf "Dsig_comment\n"
| Dsig_stop -> line i ppf "Dsig_stop\n"
and module_expr i ppf x =
match x with
Dmod_ident ->
line i ppf "Dmod_ident\n"
| Dmod_structure s ->
line i ppf "Dmod_structure\n";
structure (i+1) ppf s
| Dmod_functor(mt, me) ->
line i ppf "Dmod_functor\n";
module_type (i+1) ppf mt;
module_expr (i+1) ppf me;
| Dmod_apply(me1, me2) ->
line i ppf "Dmod_apply\n";
module_expr (i+1) ppf me1;
module_expr (i+1) ppf me2;
| Dmod_constraint(me, mt) ->
line i ppf "Dmod_constraint\n";
module_expr (i+1) ppf me;
module_type (i+1) ppf mt;
| Dmod_unpack ->
line i ppf "Dmod_unpack\n"
and structure i ppf x = list i structure_item ppf x
and structure_item i ppf x =
line i ppf "STRUCTURE_ITEM\n"
and interface i ppf x =
line i ppf "interface\n";
let i = i+1 in
line i ppf "dintf_info\n";
option (i+1) info ppf x.dintf_info;
line i ppf "dintf_items\n";
list (i+1) signature_item ppf x.dintf_items;
and implementation i ppf x =
line i ppf "IMPLEMENTATION\n"
and file i ppf x =
match x with
| Dfile_intf intf ->
line i ppf "Dfile_intf\n";
interface (i+1) ppf intf
| Dfile_impl impl ->
line i ppf "Dfile_impl\n";
implementation (i+1) ppf impl
| null | https://raw.githubusercontent.com/ocamllabs/opam-doc/4f5a332750177e3016b82d5923a681510335af4c/src/bin-doc/printdoctree.ml | ocaml | ...
...
Type expressions for the module language | open Format
open Doctree
open Info
fprintf f "%s" (String.make (2*i) ' ');
let list i f ppf l =
match l with
| [] -> line i ppf "[]\n"
| _ :: _ ->
line i ppf "[\n";
List.iter (f (i+1) ppf) l;
line i ppf "]\n"
let option i f ppf x =
match x with
| None -> line i ppf "None\n";
| Some x ->
line i ppf "Some\n";
f (i+1) ppf x
let string i ppf s = line i ppf "\"%s\"\n" s
let text_element i ppf x =
line i ppf "text_element\n";
let i = (i+1) in
match x with
| Raw s -> line i ppf "Raw \"%s\"\n" s
| Code s -> line i ppf "Code \"%s\"\n" s
| PreCode s -> line i ppf "PreCode \"%s\"\n" s
| Verbatim s -> line i ppf "Verbatim \"%s\"\n" s
| Style _ -> line i ppf "STYLE\n"
| List _ -> line i ppf "LISR\n"
| Enum _ -> line i ppf "ENUM\n"
| Newline -> line i ppf "Newline\n"
| Block _ -> line i ppf "BLOCK\n"
| Title _ -> line i ppf "TITLE\n"
| Ref _ -> line i ppf "REF\n"
| Special_ref _ -> line i ppf "SPECIAL_REF\n"
| Target _ -> line i ppf "TARGET\n"
let text i ppf x = list i text_element ppf x
let see i ppf x = line i ppf "SEE\n"
let info i ppf x =
line i ppf "info\n";
let i = (i+1) in
line i ppf "i_desc =\n";
option (i+1) text ppf x.i_desc;
line i ppf "i_authors =\n";
list (i+1) string ppf x.i_authors;
line i ppf "i_version =\n";
option (i+1) string ppf x.i_version;
line i ppf "i_sees =\n";
list (i+1) see ppf x.i_sees;
line i ppf "i_since =\n";
option (i+1) string ppf x.i_since;
line i ppf "i_before =\n";
list (i+1) (fun i ppf _ -> line i ppf "BEFORE\n") ppf x.i_before;
line i ppf "i_deprecated =\n";
option (i+1) text ppf x.i_deprecated;
line i ppf "i_params =\n";
list (i+1) (fun i ppf _ -> line i ppf "PARAM\n") ppf x.i_params;
line i ppf "i_raised_exceptions =\n";
list (i+1) (fun i ppf _ -> line i ppf "RAISED_EXCEPTION\n") ppf x.i_raised_exceptions;
line i ppf "i_return_value =\n";
option (i+1) text ppf x.i_return_value;
line i ppf "i_custom =\n";
list (i+1) (fun i ppf _ -> line i ppf "CUSTOM\n") ppf x.i_custom
let string_x_info_option i ppf (s, i_opt) =
line i ppf "\"%s\"\n" s;
option (i+1) info ppf i_opt
let type_kind i ppf x =
match x with
Dtype_abstract ->
line i ppf "Dtype_abstract\n"
| Dtype_variant l ->
line i ppf "Dtype_variant\n";
list (i+1) string_x_info_option ppf l;
| Dtype_record l ->
line i ppf "Dtype_record\n";
list (i+1) string_x_info_option ppf l
let rec class_type i ppf x =
match x with
Dcty_constr ->
line i ppf "Dcty_constr\n"
| Dcty_signature cs ->
line i ppf "Dcty_signature\n";
class_signature (i+1) ppf cs
| Dcty_fun ct ->
line i ppf "Dcty_fun\n";
class_type (i+1) ppf ct
and class_signature i ppf x = list i class_type_field ppf x
and class_type_field i ppf x =
line i ppf "CLASS_TYPE_FIELD\n"
and class_expr i ppf x =
line i ppf "CLASS_EXPR\n"
and class_structure i ppf x = list i class_field ppf x
and class_field i ppf x =
line i ppf "CLASS_FIELD\n"
and module_type i ppf x =
match x with
Dmty_ident ->
line i ppf "Dmty_ident\n"
| Dmty_signature s ->
line i ppf "Dmty_signature\n";
signature (i+1) ppf s
| Dmty_functor(mt1, mt2) ->
line i ppf "Dmty_functor\n";
module_type (i+1) ppf mt1;
module_type (i+1) ppf mt2;
| Dmty_with mt ->
line i ppf "Dmty_with\n";
module_type (i+1) ppf mt
| Dmty_typeof me ->
line i ppf "Dmty_typeof\n";
module_expr (i+1) ppf me
and signature i ppf x = list i signature_item ppf x
and signature_item i ppf x =
line i ppf "signature_item\n";
let i = i+1 in
line i ppf "dsig_info\n";
option (i+1) info ppf x.dsig_info;
line i ppf "dsig_after_info\n";
option (i+1) info ppf x.dsig_after_info;
line i ppf "dsig_desc\n";
let i = i+1 in
match x.dsig_desc with
| Dsig_value s -> line i ppf "Dsig_value \"%s\"\n" s
| Dsig_type(s, tk) ->
line i ppf "Dsig_type \"%s\"\n" s;
type_kind i ppf tk
| Dsig_exception s -> line i ppf "Dsig_exception \"%s\"\n" s
| Dsig_module(s, mt) ->
line i ppf "Dsig_module \"%s\"\n" s;
module_type i ppf mt
| Dsig_recmodule(s, mt) ->
line i ppf "Dsig_recmodule \"%s\"\n" s;
module_type i ppf mt
| Dsig_modtype(s, mt_opt) ->
line i ppf "Dsig_modtype \"%s\"\n" s;
option i module_type ppf mt_opt
| Dsig_open -> line i ppf "Dsig_open\n"
| Dsig_include mt ->
line i ppf "Dsig_include\n";
module_type i ppf mt
| Dsig_class(s, ct) ->
line i ppf "Dsig_class \"%s\"\n" s;
class_type i ppf ct
| Dsig_class_type(s, ct) ->
line i ppf "Dsig_class_type \"%s\"\n" s;
class_type i ppf ct
| Dsig_comment -> line i ppf "Dsig_comment\n"
| Dsig_stop -> line i ppf "Dsig_stop\n"
and module_expr i ppf x =
match x with
Dmod_ident ->
line i ppf "Dmod_ident\n"
| Dmod_structure s ->
line i ppf "Dmod_structure\n";
structure (i+1) ppf s
| Dmod_functor(mt, me) ->
line i ppf "Dmod_functor\n";
module_type (i+1) ppf mt;
module_expr (i+1) ppf me;
| Dmod_apply(me1, me2) ->
line i ppf "Dmod_apply\n";
module_expr (i+1) ppf me1;
module_expr (i+1) ppf me2;
| Dmod_constraint(me, mt) ->
line i ppf "Dmod_constraint\n";
module_expr (i+1) ppf me;
module_type (i+1) ppf mt;
| Dmod_unpack ->
line i ppf "Dmod_unpack\n"
and structure i ppf x = list i structure_item ppf x
and structure_item i ppf x =
line i ppf "STRUCTURE_ITEM\n"
and interface i ppf x =
line i ppf "interface\n";
let i = i+1 in
line i ppf "dintf_info\n";
option (i+1) info ppf x.dintf_info;
line i ppf "dintf_items\n";
list (i+1) signature_item ppf x.dintf_items;
and implementation i ppf x =
line i ppf "IMPLEMENTATION\n"
and file i ppf x =
match x with
| Dfile_intf intf ->
line i ppf "Dfile_intf\n";
interface (i+1) ppf intf
| Dfile_impl impl ->
line i ppf "Dfile_impl\n";
implementation (i+1) ppf impl
|
093372e247b901c5e9bce446e2cefcea844e60791f8d9f5c4672be36235220a1 | jdhorwitz/shadow-reframe-antizer | subs.cljs | (ns app.subs
(:require [re-frame.core :as rf]))
(rf/reg-sub
:active-page
(fn [db _]
(:active-page db)))
(rf/reg-sub
:counter
(fn [db _]
(:counter db)))
(rf/reg-sub
:topic-id
(fn [db _]
(:topic-id db)))
| null | https://raw.githubusercontent.com/jdhorwitz/shadow-reframe-antizer/e73029576406b1fe153398b689515ad88d2045b0/src/app/subs.cljs | clojure | (ns app.subs
(:require [re-frame.core :as rf]))
(rf/reg-sub
:active-page
(fn [db _]
(:active-page db)))
(rf/reg-sub
:counter
(fn [db _]
(:counter db)))
(rf/reg-sub
:topic-id
(fn [db _]
(:topic-id db)))
| |
eb41c73c2c34fd7209677751cd95971c8b25b32da21a4161bc6b42f294a2f13d | astrada/google-drive-ocamlfuse | cacheData.mli | type t = {
cache_dir : string;
db_path : string;
busy_timeout : int;
in_memory : bool;
autosaving_interval : int;
}
module Resource : sig
module State : sig
type t =
| Synchronized
| ToDownload
| Downloading
| ToUpload
| Uploading
| NotFound
val to_string : t -> string
val of_string : string -> t
end
type t = {
id : int64;
remote_id : string option;
name : string option;
mime_type : string option;
created_time : float option;
modified_time : float option;
viewed_by_me_time : float option;
file_extension : string option;
full_file_extension : string option;
md5_checksum : string option;
size : int64 option;
can_edit : bool option;
trashed : bool option;
web_view_link : string option;
export_links : string option;
version : int64 option;
resource_key : string option;
target_id : string option;
target_resource_key : string option;
file_mode_bits : int64 option;
uid : int64 option;
gid : int64 option;
link_target : string option;
xattrs : string;
parent_path : string;
path : string;
state : State.t;
last_update : float;
}
val id : (t, int64) GapiLens.t
val remote_id : (t, string option) GapiLens.t
val name : (t, string option) GapiLens.t
val mime_type : (t, string option) GapiLens.t
val created_time : (t, float option) GapiLens.t
val modified_time : (t, float option) GapiLens.t
val viewed_by_me_time : (t, float option) GapiLens.t
val file_extension : (t, string option) GapiLens.t
val full_file_extension : (t, string option) GapiLens.t
val md5_checksum : (t, string option) GapiLens.t
val size : (t, int64 option) GapiLens.t
val can_edit : (t, bool option) GapiLens.t
val trashed : (t, bool option) GapiLens.t
val web_view_link : (t, string option) GapiLens.t
val export_links : (t, string option) GapiLens.t
val version : (t, int64 option) GapiLens.t
val link_target : (t, string option) GapiLens.t
val file_mode_bits : (t, int64 option) GapiLens.t
val uid : (t, int64 option) GapiLens.t
val gid : (t, int64 option) GapiLens.t
val resource_key : (t, string option) GapiLens.t
val target_id : (t, string option) GapiLens.t
val target_resource_key : (t, string option) GapiLens.t
val xattrs : (t, string) GapiLens.t
val parent_path : (t, string) GapiLens.t
val path : (t, string) GapiLens.t
val state : (t, State.t) GapiLens.t
val last_update : (t, float) GapiLens.t
val file_mode_bits_to_kind : int64 -> Unix.file_kind
val file_mode_bits_to_perm : int64 -> int
val render_xattrs : (string * string) list -> string
val parse_xattrs : string -> (string * string) list
val find_app_property : 'a -> ('a * 'b) list -> 'b option
val app_property_to_int64 : string option -> int64 option
val get_file_mode_bits : (string * string) list -> int64 option
val file_mode_bits_to_app_property : int64 option -> string * string
val mode_to_app_property : int -> string * string
val get_uid : (string * string) list -> int64 option
val uid_to_app_property : 'a -> string * 'a
val get_gid : (string * string) list -> int64 option
val gid_to_app_property : 'a -> string * 'a
val get_link_target : (string * 'a) list -> 'a option
val link_target_to_app_property : 'a -> string * 'a
val get_xattrs : (string * string) list -> string
val xattr_to_app_property : string -> 'a -> string * 'a
val xattr_no_value_to_app_property : string -> string * string
val is_folder : t -> bool
val is_document_mime_type : string -> bool
val is_document : t -> bool
val is_symlink : t -> bool
val is_shortcut : t -> bool
val is_valid : t -> float -> bool
val is_large_file : Config.t -> t -> bool
val to_stream : Config.t -> t -> bool * bool
val get_format_from_mime_type : string -> Config.t -> string
val get_format : t -> Config.t -> string
val get_icon_from_mime_type : string -> Config.t -> string
val get_icon : t -> Config.t -> string
val mime_type_of_format : string -> string
val serialize_export_links : (string * string) list -> string
val parse_export_links : string -> (string * string) list
end
module Metadata : sig
type t = {
display_name : string;
storage_quota_limit : int64;
storage_quota_usage : int64;
start_page_token : string;
cache_size : int64;
last_update : float;
clean_shutdown : bool;
}
val display_name : (t, string) GapiLens.t
val storage_quota_limit : (t, int64) GapiLens.t
val storage_quota_usage : (t, int64) GapiLens.t
val start_page_token : (t, string) GapiLens.t
val cache_size : (t, int64) GapiLens.t
val last_update : (t, float) GapiLens.t
val clean_shutdown : (t, bool) GapiLens.t
val is_valid : int -> t -> bool
end
module UploadEntry : sig
module State : sig
type t = ToUpload | Uploading
val to_string : t -> string
val of_string : string -> t
end
type t = {
id : int64;
resource_id : int64;
state : string;
last_update : float;
}
val id : (t, int64) GapiLens.t
val resource_id : (t, int64) GapiLens.t
val state : (t, string) GapiLens.t
val last_update : (t, float) GapiLens.t
end
| null | https://raw.githubusercontent.com/astrada/google-drive-ocamlfuse/9027602dbbaf7473cfacc8bd44865d2bef7c41c7/src/cacheData.mli | ocaml | type t = {
cache_dir : string;
db_path : string;
busy_timeout : int;
in_memory : bool;
autosaving_interval : int;
}
module Resource : sig
module State : sig
type t =
| Synchronized
| ToDownload
| Downloading
| ToUpload
| Uploading
| NotFound
val to_string : t -> string
val of_string : string -> t
end
type t = {
id : int64;
remote_id : string option;
name : string option;
mime_type : string option;
created_time : float option;
modified_time : float option;
viewed_by_me_time : float option;
file_extension : string option;
full_file_extension : string option;
md5_checksum : string option;
size : int64 option;
can_edit : bool option;
trashed : bool option;
web_view_link : string option;
export_links : string option;
version : int64 option;
resource_key : string option;
target_id : string option;
target_resource_key : string option;
file_mode_bits : int64 option;
uid : int64 option;
gid : int64 option;
link_target : string option;
xattrs : string;
parent_path : string;
path : string;
state : State.t;
last_update : float;
}
val id : (t, int64) GapiLens.t
val remote_id : (t, string option) GapiLens.t
val name : (t, string option) GapiLens.t
val mime_type : (t, string option) GapiLens.t
val created_time : (t, float option) GapiLens.t
val modified_time : (t, float option) GapiLens.t
val viewed_by_me_time : (t, float option) GapiLens.t
val file_extension : (t, string option) GapiLens.t
val full_file_extension : (t, string option) GapiLens.t
val md5_checksum : (t, string option) GapiLens.t
val size : (t, int64 option) GapiLens.t
val can_edit : (t, bool option) GapiLens.t
val trashed : (t, bool option) GapiLens.t
val web_view_link : (t, string option) GapiLens.t
val export_links : (t, string option) GapiLens.t
val version : (t, int64 option) GapiLens.t
val link_target : (t, string option) GapiLens.t
val file_mode_bits : (t, int64 option) GapiLens.t
val uid : (t, int64 option) GapiLens.t
val gid : (t, int64 option) GapiLens.t
val resource_key : (t, string option) GapiLens.t
val target_id : (t, string option) GapiLens.t
val target_resource_key : (t, string option) GapiLens.t
val xattrs : (t, string) GapiLens.t
val parent_path : (t, string) GapiLens.t
val path : (t, string) GapiLens.t
val state : (t, State.t) GapiLens.t
val last_update : (t, float) GapiLens.t
val file_mode_bits_to_kind : int64 -> Unix.file_kind
val file_mode_bits_to_perm : int64 -> int
val render_xattrs : (string * string) list -> string
val parse_xattrs : string -> (string * string) list
val find_app_property : 'a -> ('a * 'b) list -> 'b option
val app_property_to_int64 : string option -> int64 option
val get_file_mode_bits : (string * string) list -> int64 option
val file_mode_bits_to_app_property : int64 option -> string * string
val mode_to_app_property : int -> string * string
val get_uid : (string * string) list -> int64 option
val uid_to_app_property : 'a -> string * 'a
val get_gid : (string * string) list -> int64 option
val gid_to_app_property : 'a -> string * 'a
val get_link_target : (string * 'a) list -> 'a option
val link_target_to_app_property : 'a -> string * 'a
val get_xattrs : (string * string) list -> string
val xattr_to_app_property : string -> 'a -> string * 'a
val xattr_no_value_to_app_property : string -> string * string
val is_folder : t -> bool
val is_document_mime_type : string -> bool
val is_document : t -> bool
val is_symlink : t -> bool
val is_shortcut : t -> bool
val is_valid : t -> float -> bool
val is_large_file : Config.t -> t -> bool
val to_stream : Config.t -> t -> bool * bool
val get_format_from_mime_type : string -> Config.t -> string
val get_format : t -> Config.t -> string
val get_icon_from_mime_type : string -> Config.t -> string
val get_icon : t -> Config.t -> string
val mime_type_of_format : string -> string
val serialize_export_links : (string * string) list -> string
val parse_export_links : string -> (string * string) list
end
module Metadata : sig
type t = {
display_name : string;
storage_quota_limit : int64;
storage_quota_usage : int64;
start_page_token : string;
cache_size : int64;
last_update : float;
clean_shutdown : bool;
}
val display_name : (t, string) GapiLens.t
val storage_quota_limit : (t, int64) GapiLens.t
val storage_quota_usage : (t, int64) GapiLens.t
val start_page_token : (t, string) GapiLens.t
val cache_size : (t, int64) GapiLens.t
val last_update : (t, float) GapiLens.t
val clean_shutdown : (t, bool) GapiLens.t
val is_valid : int -> t -> bool
end
module UploadEntry : sig
module State : sig
type t = ToUpload | Uploading
val to_string : t -> string
val of_string : string -> t
end
type t = {
id : int64;
resource_id : int64;
state : string;
last_update : float;
}
val id : (t, int64) GapiLens.t
val resource_id : (t, int64) GapiLens.t
val state : (t, string) GapiLens.t
val last_update : (t, float) GapiLens.t
end
| |
309b4e905d8c85ee1abd61c541e11b790f18e624d26ec7a6a720c46e2ed052e3 | typelead/eta | UTF32.hs | # LANGUAGE Trustworthy #
# LANGUAGE NoImplicitPrelude
, BangPatterns
, NondecreasingIndentation
, MagicHash
#
, BangPatterns
, NondecreasingIndentation
, MagicHash
#-}
# OPTIONS_GHC -funbox - strict - fields #
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Encoding.UTF32
Copyright : ( c ) The University of Glasgow , 2009
-- License : see libraries/base/LICENSE
--
-- Maintainer :
-- Stability : internal
-- Portability : non-portable
--
UTF-32 Codecs for the IO library
--
Portions Copyright : ( c ) 2008 - 2009 ,
( c ) 2009 ,
( c ) 2009
--
-----------------------------------------------------------------------------
module GHC.IO.Encoding.UTF32 (
utf32, mkUTF32,
utf32_decode,
utf32_encode,
utf32be, mkUTF32be,
utf32be_decode,
utf32be_encode,
utf32le, mkUTF32le,
utf32le_decode,
utf32le_encode,
) where
import GHC.Base
import GHC.Real
import GHC.Num
import GHC.IO
import GHC.IO.Buffer
import GHC.IO.Encoding.Failure
import GHC.IO.Encoding.Types
import GHC.Word
import Data.Bits
import GHC.IORef
-- -----------------------------------------------------------------------------
The UTF-32 codec : either UTF-32BE or UTF-32LE with a BOM
utf32 :: TextEncoding
utf32 = mkUTF32 ErrorOnCodingFailure
| @since 4.4.0.0
mkUTF32 :: CodingFailureMode -> TextEncoding
mkUTF32 cfm = TextEncoding { textEncodingName = "UTF-32",
mkTextDecoder = utf32_DF cfm,
mkTextEncoder = utf32_EF cfm }
utf32_DF :: CodingFailureMode -> IO (TextDecoder (Maybe DecodeBuffer))
utf32_DF cfm = do
seen_bom <- newIORef Nothing
return (BufferCodec {
encode = utf32_decode seen_bom,
recover = recoverDecode cfm,
close = return (),
getState = readIORef seen_bom,
setState = writeIORef seen_bom
})
utf32_EF :: CodingFailureMode -> IO (TextEncoder Bool)
utf32_EF cfm = do
done_bom <- newIORef False
return (BufferCodec {
encode = utf32_encode done_bom,
recover = recoverEncode cfm,
close = return (),
getState = readIORef done_bom,
setState = writeIORef done_bom
})
utf32_encode :: IORef Bool -> EncodeBuffer
utf32_encode done_bom input
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow, bufSize=os }
= do
b <- readIORef done_bom
if b then utf32_native_encode input output
else if os - ow < 4
then return (OutputUnderflow, input,output)
else do
writeIORef done_bom True
writeWord8Buf oraw ow bom0
writeWord8Buf oraw (ow+1) bom1
writeWord8Buf oraw (ow+2) bom2
writeWord8Buf oraw (ow+3) bom3
utf32_native_encode input output{ bufR = ow+4 }
utf32_decode :: IORef (Maybe DecodeBuffer) -> DecodeBuffer
utf32_decode seen_bom
input@Buffer{ bufRaw=iraw, bufL=ir, bufR=iw, bufSize=_ }
output
= do
mb <- readIORef seen_bom
case mb of
Just decode -> decode input output
Nothing ->
if iw - ir < 4 then return (InputUnderflow, input,output) else do
c0 <- readWord8Buf iraw ir
c1 <- readWord8Buf iraw (ir+1)
c2 <- readWord8Buf iraw (ir+2)
c3 <- readWord8Buf iraw (ir+3)
case () of
_ | c0 == bom0 && c1 == bom1 && c2 == bom2 && c3 == bom3 -> do
writeIORef seen_bom (Just utf32be_decode)
utf32be_decode input{ bufL= ir+4 } output
_ | c0 == bom3 && c1 == bom2 && c2 == bom1 && c3 == bom0 -> do
writeIORef seen_bom (Just utf32le_decode)
utf32le_decode input{ bufL= ir+4 } output
| otherwise -> do
writeIORef seen_bom (Just utf32_native_decode)
utf32_native_decode input output
bom0, bom1, bom2, bom3 :: Word8
bom0 = 0
bom1 = 0
bom2 = 0xfe
bom3 = 0xff
-- choose UTF-32BE by default for UTF-32 output
utf32_native_decode :: DecodeBuffer
utf32_native_decode = utf32be_decode
utf32_native_encode :: EncodeBuffer
utf32_native_encode = utf32be_encode
-- -----------------------------------------------------------------------------
-- UTF32LE and UTF32BE
utf32be :: TextEncoding
utf32be = mkUTF32be ErrorOnCodingFailure
| @since 4.4.0.0
mkUTF32be :: CodingFailureMode -> TextEncoding
mkUTF32be cfm = TextEncoding { textEncodingName = "UTF-32BE",
mkTextDecoder = utf32be_DF cfm,
mkTextEncoder = utf32be_EF cfm }
utf32be_DF :: CodingFailureMode -> IO (TextDecoder ())
utf32be_DF cfm =
return (BufferCodec {
encode = utf32be_decode,
recover = recoverDecode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
utf32be_EF :: CodingFailureMode -> IO (TextEncoder ())
utf32be_EF cfm =
return (BufferCodec {
encode = utf32be_encode,
recover = recoverEncode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
utf32le :: TextEncoding
utf32le = mkUTF32le ErrorOnCodingFailure
| @since 4.4.0.0
mkUTF32le :: CodingFailureMode -> TextEncoding
mkUTF32le cfm = TextEncoding { textEncodingName = "UTF-32LE",
mkTextDecoder = utf32le_DF cfm,
mkTextEncoder = utf32le_EF cfm }
utf32le_DF :: CodingFailureMode -> IO (TextDecoder ())
utf32le_DF cfm =
return (BufferCodec {
encode = utf32le_decode,
recover = recoverDecode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
utf32le_EF :: CodingFailureMode -> IO (TextEncoder ())
utf32le_EF cfm =
return (BufferCodec {
encode = utf32le_encode,
recover = recoverEncode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
utf32be_decode :: DecodeBuffer
utf32be_decode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| iw - ir < 4 = done InputUnderflow ir ow
| otherwise = do
c0 <- readWord8Buf iraw ir
c1 <- readWord8Buf iraw (ir+1)
c2 <- readWord8Buf iraw (ir+2)
c3 <- readWord8Buf iraw (ir+3)
let x1 = chr4 c0 c1 c2 c3
if not (validate x1) then invalid else do
ow' <- writeCharBuf oraw ow x1
loop (ir+4) ow'
where
invalid = done InvalidSequence ir ow
-- lambda-lifted, to avoid thunks being built in the inner-loop:
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
in
loop ir0 ow0
utf32le_decode :: DecodeBuffer
utf32le_decode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| iw - ir < 4 = done InputUnderflow ir ow
| otherwise = do
c0 <- readWord8Buf iraw ir
c1 <- readWord8Buf iraw (ir+1)
c2 <- readWord8Buf iraw (ir+2)
c3 <- readWord8Buf iraw (ir+3)
let x1 = chr4 c3 c2 c1 c0
if not (validate x1) then invalid else do
ow' <- writeCharBuf oraw ow x1
loop (ir+4) ow'
where
invalid = done InvalidSequence ir ow
-- lambda-lifted, to avoid thunks being built in the inner-loop:
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
in
loop ir0 ow0
utf32be_encode :: EncodeBuffer
utf32be_encode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
loop !ir !ow
| ir >= iw = done InputUnderflow ir ow
| os - ow < 4 = done OutputUnderflow ir ow
| otherwise = do
(c,ir') <- readCharBuf iraw ir
if isSurrogate c then done InvalidSequence ir ow else do
let (c0,c1,c2,c3) = ord4 c
writeWord8Buf oraw ow c0
writeWord8Buf oraw (ow+1) c1
writeWord8Buf oraw (ow+2) c2
writeWord8Buf oraw (ow+3) c3
loop ir' (ow+4)
in
loop ir0 ow0
utf32le_encode :: EncodeBuffer
utf32le_encode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
loop !ir !ow
| ir >= iw = done InputUnderflow ir ow
| os - ow < 4 = done OutputUnderflow ir ow
| otherwise = do
(c,ir') <- readCharBuf iraw ir
if isSurrogate c then done InvalidSequence ir ow else do
let (c0,c1,c2,c3) = ord4 c
writeWord8Buf oraw ow c3
writeWord8Buf oraw (ow+1) c2
writeWord8Buf oraw (ow+2) c1
writeWord8Buf oraw (ow+3) c0
loop ir' (ow+4)
in
loop ir0 ow0
chr4 :: Word8 -> Word8 -> Word8 -> Word8 -> Char
chr4 (W8# x1#) (W8# x2#) (W8# x3#) (W8# x4#) =
C# (chr# (z1# +# z2# +# z3# +# z4#))
where
!y1# = word2Int# x1#
!y2# = word2Int# x2#
!y3# = word2Int# x3#
!y4# = word2Int# x4#
!z1# = uncheckedIShiftL# y1# 24#
!z2# = uncheckedIShiftL# y2# 16#
!z3# = uncheckedIShiftL# y3# 8#
!z4# = y4#
# INLINE chr4 #
ord4 :: Char -> (Word8,Word8,Word8,Word8)
ord4 c = (fromIntegral (x `shiftR` 24),
fromIntegral (x `shiftR` 16),
fromIntegral (x `shiftR` 8),
fromIntegral x)
where
x = ord c
# INLINE ord4 #
validate :: Char -> Bool
validate c = (x1 >= 0x0 && x1 < 0xD800) || (x1 > 0xDFFF && x1 <= 0x10FFFF)
where x1 = ord c
# INLINE validate #
| null | https://raw.githubusercontent.com/typelead/eta/97ee2251bbc52294efbf60fa4342ce6f52c0d25c/libraries/base/GHC/IO/Encoding/UTF32.hs | haskell | ---------------------------------------------------------------------------
|
Module : GHC.IO.Encoding.UTF32
License : see libraries/base/LICENSE
Maintainer :
Stability : internal
Portability : non-portable
---------------------------------------------------------------------------
-----------------------------------------------------------------------------
choose UTF-32BE by default for UTF-32 output
-----------------------------------------------------------------------------
UTF32LE and UTF32BE
lambda-lifted, to avoid thunks being built in the inner-loop:
lambda-lifted, to avoid thunks being built in the inner-loop: | # LANGUAGE Trustworthy #
# LANGUAGE NoImplicitPrelude
, BangPatterns
, NondecreasingIndentation
, MagicHash
#
, BangPatterns
, NondecreasingIndentation
, MagicHash
#-}
# OPTIONS_GHC -funbox - strict - fields #
Copyright : ( c ) The University of Glasgow , 2009
UTF-32 Codecs for the IO library
Portions Copyright : ( c ) 2008 - 2009 ,
( c ) 2009 ,
( c ) 2009
module GHC.IO.Encoding.UTF32 (
utf32, mkUTF32,
utf32_decode,
utf32_encode,
utf32be, mkUTF32be,
utf32be_decode,
utf32be_encode,
utf32le, mkUTF32le,
utf32le_decode,
utf32le_encode,
) where
import GHC.Base
import GHC.Real
import GHC.Num
import GHC.IO
import GHC.IO.Buffer
import GHC.IO.Encoding.Failure
import GHC.IO.Encoding.Types
import GHC.Word
import Data.Bits
import GHC.IORef
The UTF-32 codec : either UTF-32BE or UTF-32LE with a BOM
utf32 :: TextEncoding
utf32 = mkUTF32 ErrorOnCodingFailure
| @since 4.4.0.0
mkUTF32 :: CodingFailureMode -> TextEncoding
mkUTF32 cfm = TextEncoding { textEncodingName = "UTF-32",
mkTextDecoder = utf32_DF cfm,
mkTextEncoder = utf32_EF cfm }
utf32_DF :: CodingFailureMode -> IO (TextDecoder (Maybe DecodeBuffer))
utf32_DF cfm = do
seen_bom <- newIORef Nothing
return (BufferCodec {
encode = utf32_decode seen_bom,
recover = recoverDecode cfm,
close = return (),
getState = readIORef seen_bom,
setState = writeIORef seen_bom
})
utf32_EF :: CodingFailureMode -> IO (TextEncoder Bool)
utf32_EF cfm = do
done_bom <- newIORef False
return (BufferCodec {
encode = utf32_encode done_bom,
recover = recoverEncode cfm,
close = return (),
getState = readIORef done_bom,
setState = writeIORef done_bom
})
utf32_encode :: IORef Bool -> EncodeBuffer
utf32_encode done_bom input
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow, bufSize=os }
= do
b <- readIORef done_bom
if b then utf32_native_encode input output
else if os - ow < 4
then return (OutputUnderflow, input,output)
else do
writeIORef done_bom True
writeWord8Buf oraw ow bom0
writeWord8Buf oraw (ow+1) bom1
writeWord8Buf oraw (ow+2) bom2
writeWord8Buf oraw (ow+3) bom3
utf32_native_encode input output{ bufR = ow+4 }
utf32_decode :: IORef (Maybe DecodeBuffer) -> DecodeBuffer
utf32_decode seen_bom
input@Buffer{ bufRaw=iraw, bufL=ir, bufR=iw, bufSize=_ }
output
= do
mb <- readIORef seen_bom
case mb of
Just decode -> decode input output
Nothing ->
if iw - ir < 4 then return (InputUnderflow, input,output) else do
c0 <- readWord8Buf iraw ir
c1 <- readWord8Buf iraw (ir+1)
c2 <- readWord8Buf iraw (ir+2)
c3 <- readWord8Buf iraw (ir+3)
case () of
_ | c0 == bom0 && c1 == bom1 && c2 == bom2 && c3 == bom3 -> do
writeIORef seen_bom (Just utf32be_decode)
utf32be_decode input{ bufL= ir+4 } output
_ | c0 == bom3 && c1 == bom2 && c2 == bom1 && c3 == bom0 -> do
writeIORef seen_bom (Just utf32le_decode)
utf32le_decode input{ bufL= ir+4 } output
| otherwise -> do
writeIORef seen_bom (Just utf32_native_decode)
utf32_native_decode input output
bom0, bom1, bom2, bom3 :: Word8
bom0 = 0
bom1 = 0
bom2 = 0xfe
bom3 = 0xff
utf32_native_decode :: DecodeBuffer
utf32_native_decode = utf32be_decode
utf32_native_encode :: EncodeBuffer
utf32_native_encode = utf32be_encode
utf32be :: TextEncoding
utf32be = mkUTF32be ErrorOnCodingFailure
| @since 4.4.0.0
mkUTF32be :: CodingFailureMode -> TextEncoding
mkUTF32be cfm = TextEncoding { textEncodingName = "UTF-32BE",
mkTextDecoder = utf32be_DF cfm,
mkTextEncoder = utf32be_EF cfm }
utf32be_DF :: CodingFailureMode -> IO (TextDecoder ())
utf32be_DF cfm =
return (BufferCodec {
encode = utf32be_decode,
recover = recoverDecode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
utf32be_EF :: CodingFailureMode -> IO (TextEncoder ())
utf32be_EF cfm =
return (BufferCodec {
encode = utf32be_encode,
recover = recoverEncode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
utf32le :: TextEncoding
utf32le = mkUTF32le ErrorOnCodingFailure
| @since 4.4.0.0
mkUTF32le :: CodingFailureMode -> TextEncoding
mkUTF32le cfm = TextEncoding { textEncodingName = "UTF-32LE",
mkTextDecoder = utf32le_DF cfm,
mkTextEncoder = utf32le_EF cfm }
utf32le_DF :: CodingFailureMode -> IO (TextDecoder ())
utf32le_DF cfm =
return (BufferCodec {
encode = utf32le_decode,
recover = recoverDecode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
utf32le_EF :: CodingFailureMode -> IO (TextEncoder ())
utf32le_EF cfm =
return (BufferCodec {
encode = utf32le_encode,
recover = recoverEncode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
utf32be_decode :: DecodeBuffer
utf32be_decode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| iw - ir < 4 = done InputUnderflow ir ow
| otherwise = do
c0 <- readWord8Buf iraw ir
c1 <- readWord8Buf iraw (ir+1)
c2 <- readWord8Buf iraw (ir+2)
c3 <- readWord8Buf iraw (ir+3)
let x1 = chr4 c0 c1 c2 c3
if not (validate x1) then invalid else do
ow' <- writeCharBuf oraw ow x1
loop (ir+4) ow'
where
invalid = done InvalidSequence ir ow
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
in
loop ir0 ow0
utf32le_decode :: DecodeBuffer
utf32le_decode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| iw - ir < 4 = done InputUnderflow ir ow
| otherwise = do
c0 <- readWord8Buf iraw ir
c1 <- readWord8Buf iraw (ir+1)
c2 <- readWord8Buf iraw (ir+2)
c3 <- readWord8Buf iraw (ir+3)
let x1 = chr4 c3 c2 c1 c0
if not (validate x1) then invalid else do
ow' <- writeCharBuf oraw ow x1
loop (ir+4) ow'
where
invalid = done InvalidSequence ir ow
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
in
loop ir0 ow0
utf32be_encode :: EncodeBuffer
utf32be_encode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
loop !ir !ow
| ir >= iw = done InputUnderflow ir ow
| os - ow < 4 = done OutputUnderflow ir ow
| otherwise = do
(c,ir') <- readCharBuf iraw ir
if isSurrogate c then done InvalidSequence ir ow else do
let (c0,c1,c2,c3) = ord4 c
writeWord8Buf oraw ow c0
writeWord8Buf oraw (ow+1) c1
writeWord8Buf oraw (ow+2) c2
writeWord8Buf oraw (ow+3) c3
loop ir' (ow+4)
in
loop ir0 ow0
utf32le_encode :: EncodeBuffer
utf32le_encode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
loop !ir !ow
| ir >= iw = done InputUnderflow ir ow
| os - ow < 4 = done OutputUnderflow ir ow
| otherwise = do
(c,ir') <- readCharBuf iraw ir
if isSurrogate c then done InvalidSequence ir ow else do
let (c0,c1,c2,c3) = ord4 c
writeWord8Buf oraw ow c3
writeWord8Buf oraw (ow+1) c2
writeWord8Buf oraw (ow+2) c1
writeWord8Buf oraw (ow+3) c0
loop ir' (ow+4)
in
loop ir0 ow0
chr4 :: Word8 -> Word8 -> Word8 -> Word8 -> Char
chr4 (W8# x1#) (W8# x2#) (W8# x3#) (W8# x4#) =
C# (chr# (z1# +# z2# +# z3# +# z4#))
where
!y1# = word2Int# x1#
!y2# = word2Int# x2#
!y3# = word2Int# x3#
!y4# = word2Int# x4#
!z1# = uncheckedIShiftL# y1# 24#
!z2# = uncheckedIShiftL# y2# 16#
!z3# = uncheckedIShiftL# y3# 8#
!z4# = y4#
# INLINE chr4 #
ord4 :: Char -> (Word8,Word8,Word8,Word8)
ord4 c = (fromIntegral (x `shiftR` 24),
fromIntegral (x `shiftR` 16),
fromIntegral (x `shiftR` 8),
fromIntegral x)
where
x = ord c
# INLINE ord4 #
validate :: Char -> Bool
validate c = (x1 >= 0x0 && x1 < 0xD800) || (x1 > 0xDFFF && x1 <= 0x10FFFF)
where x1 = ord c
# INLINE validate #
|
41ef2dc0fd8e1f5d4d320016d8efa0bb4b2143580f386912ce04121f8d3cdcf5 | tomgstevensphd/Common-Lisp-Utilities-in-Lispworks | U-symbol-info.lisp | ;;******************************* U-symbol-info.lisp *********************
;;
;;Functions for writing code etc to find functions, function args, etc. to speed writing code
;;
;;SOME FUNCTIONS THAT MIGHT HELP
#|
do-symbols, do-external-symbols, and do-all-symbols iterate over the symbols of packages. For each symbol in the set of packages chosen, the var is bound to the symbol, and the statements in the body are executed. When all the symbols have been processed, result-form is evaluated and returned as the value of the macro.
do-symbols iterates over the symbols accessible in package. Statements may execute more than once for symbols that are inherited from multiple packages.
do-all-symbols iterates on every registered package. do-all-symbols will not process every symbol whatsoever, because a symbol not accessible in any registered package will not be processed. do-all-symbols may cause a symbol that is present in several packages to be processed more than once.
do-external-symbols iterates on the external symbols of package.
When result-form is evaluated, var is bound and has the value nil.
|#
ALSO SEE U-sexp.lisp
;;MAKUNBOUND-NESTED-VARS
2019
ddd
(defun makunbound-nested-vars (nested-varlists &key (unbind-level 99)
(convert-strings-p T)
(level 0))
"U-symbol-info, RETURNS: (values unbound-vars not-unbound-vars) UNBIND-LEVEL usually either 0 :1, or 99"
(let*
((unbound-vars)
(not-unbound-vars)
(itemsym)
)
(cond
((listp nested-varlists)
(loop
for item in nested-varlists
do
(cond
;;FOR LIST ITEMS
((listp item)
(cond
((> unbind-level level)
(multiple-value-bind ( unbound-vars1 not-unbound-vars1)
(makunbound-nested-vars item :unbind-level unbind-level
:convert-strings-p convert-strings-p :level (+ level 1))
(setf unbound-vars (append unbound-vars unbound-vars1)
not-unbound-vars (append not-unbound-vars not-unbound-vars1))) )
(t (setf not-unbound-vars (append not-unbound-vars item))))
end listp item
)
;;FOR NON-LIST ITEMS
(T
(when (and (stringp item) convert-strings-p)
(setf item (my-make-symbol item)))
(cond
((and (symbolp item)(not (constantp item)))
(setf unbound-vars (append unbound-vars
(list (makunbound item)))))
(t (setf not-unbound-vars (append not-unbound-vars (list item)))))
;;end T, cond,loop
)))
end
)
(t (cond
((and (stringp nested-varlists) convert-strings-p)
(setf itemsym (my-make-symbol nested-varlists)))
((and (symbolp nested-varlists)(not (constantp nested-varlists)))
(setf itemsym nested-varlists))
(t nil))
(when itemsym
(setf unbound-vars (append unbound-vars (list (makunbound nested-varlists)))))
))
(values unbound-vars not-unbound-vars)
;;let, makunbound-nested-vars
))
;;TEST
( makunbound - nested - vars ' ( v1 ( v11 v22 ( v111 v222 " strvl333 " ) ) ) )
works= ( V1 V11 V22 V111 V222 STRVL333 ) NIL
FOR UNBIND - LEVEL = 1
( makunbound - nested - vars ' ( v1 ( v11 v22 ( v111 v222 " strvl333 " ) ) ) : UNBIND - LEVEL 1 )
works= ( V1 V11 V22 ) ( V111 V222 " strvl333 " )
( makunbound - nested - vars ' ( v1 ( v11 v22 ( v111 v222 " strvl333 " ) v33 ) v2 ( v44 v55 ( v444 v555 ) v66 ) ) : UNBIND - LEVEL 1 )
works= ( V1 V11 V22 V33 V2 V44 V55 V66 ) ( V111 V222 " strvl333 " V444 V555 )
MAKUNBOUND - VARS
modified 2018 , 2020
ddd
(defun makunbound-vars (varlist &key (convert-strings-p t))
"U-symbol-info converts a list of symbols or strings to a list of UNBOUND VARS--even if they were previously bound. RETURNS (values unbound-vars varlist)"
(let
((unbound-vars)
)
(dolist (var varlist)
(when (and convert-strings-p (stringp var))
(setf var (my-make-symbol var)))
(unless (or (constantp var)(stringp var)
(not (symbolp var))(not (boundp var)))
;;(break)
(makunbound `,var))
(setf unbound-vars (append unbound-vars (list var)))
)
(values unbound-vars varlist)
))
;;TEST
( setf ' ( list ) ) = ( LIST )
( makunbound - vars ' ( " " ) ) = ( " " )
CL - USER 37 >
WORKS= Error : The variable is unbound .
;; for unbound var
;; (makunbound-vars '(unboundvar))
works= ( UNBOUNDVAR ) ( UNBOUNDVAR )
;;(makunbound-vars '("bb") :convert-strings-p T)
works= ( BB ) ( " bb " )
( : this ) = T
( setf thisxx ' ( a b ) ) ( ' thisxx ) = NIL
;; (makunbound-varlist
UNQUOTED - SYMBOLP
2016
ddd
(defmacro unquoted-symbolp (x)
"In U-symbol-info. Tests whether the the UNQUOTED symbol is a symbol"
`(symbolp (quote ,x)))
;;TEST
( unquoted - symbolp ' this ) = NIL
;; (unquoted-symbolp this) = T
UNQUOTED - BOUNDP
2016
ddd
(defmacro unquoted-boundp (x)
"In U-symbol-info, Tests whether the the UNQUOTED symbol is boiundp"
`(boundp (quote ,x)))
;;TEST
( unquoted - boundp this ) = NIL
;; (unquoted-boundp mother) = T
CONVERT - KEYWORD
2019
ddd
(defun convert-keyword (keyword )
"U-symbol-info RETURNS (values sym str)"
(let*
((str (format nil "~A" keyword))
(sym (my-make-symbol str))
)
(values sym str)
;;end let, convert-keyword
))
;;TEST
;; (convert-keyword :this)
works = THIS " THIS "
2019
ddd
(defun my-make-keyword (object)
"U-symbol-info object= str or sym. RETURNS keyword."
(let*
((str (format nil "~A" object))
(keyword (intern str "KEYWORD"))
)
(values keyword str)
;;end let, my-make-keyword
))
;;TEST
;; (my-make-keyword "THIS1")
;; works= :THIS1 "THIS1"
2020
ddd
(defun my-symbolp (item &key (not-keywordp T)(not-nil-p T))
"U-symbol-info.lisp. Filters out keywords and/or NILs & 'NILS"
(let
((result (symbolp item))
)
(when (and not-keywordp (keywordp item))
(setf result NIL))
(when (and not-nil-p (equal item 'NIL))
(setf result NIL))
result
;;end let,my-symbolp
))
;;TEST
;; (my-symbolp NIL) = nil
( my - symbolp ' NIL ) = NIL
( my - symbolp : NEWKEY ) = NIL
( my - symbolp : NEWKEY : not - keywordp NIL ) = T
;; (my-symbolp 'NIL :not-nil-p NIL) = T
( my - symbolp NIL : not - nil - p NIL ) = T
;;GET-SYM-DIMS
2020
ddd
(defun get-sym-dims (symbol &key (separators '("."))) ;;caused probs "-")))
"In U-symbol, Converts a symbol (or string) into dims list. RETURNS (values dimslist rootstr dims-list dims-str)."
(let
((symstr (cond ((stringp symbol) symbol)
(T (format nil "~A" symbol))))
;;(dims-str-list)
(n-dims)
)
(multiple-value-bind ( dims-str dimslist)
(convert-string-w-separators-to-list symstr :separators separators)
( setf dims - str - list ( format nil " ~{~a~^-~ } " ) )
(values dimslist dims-str)
;;end mvb,let, get-sym-dims
)))
;;TEST
;; (get-sym-dims 'a.b.c)
;; works = (A B C) ("A" "B" "C")
| null | https://raw.githubusercontent.com/tomgstevensphd/Common-Lisp-Utilities-in-Lispworks/a937e4f5f0ecc2430ac50560b14e87ec3b56ea37/U-symbol-info.lisp | lisp | ******************************* U-symbol-info.lisp *********************
Functions for writing code etc to find functions, function args, etc. to speed writing code
SOME FUNCTIONS THAT MIGHT HELP
do-symbols, do-external-symbols, and do-all-symbols iterate over the symbols of packages. For each symbol in the set of packages chosen, the var is bound to the symbol, and the statements in the body are executed. When all the symbols have been processed, result-form is evaluated and returned as the value of the macro.
do-symbols iterates over the symbols accessible in package. Statements may execute more than once for symbols that are inherited from multiple packages.
do-all-symbols iterates on every registered package. do-all-symbols will not process every symbol whatsoever, because a symbol not accessible in any registered package will not be processed. do-all-symbols may cause a symbol that is present in several packages to be processed more than once.
do-external-symbols iterates on the external symbols of package.
When result-form is evaluated, var is bound and has the value nil.
MAKUNBOUND-NESTED-VARS
FOR LIST ITEMS
FOR NON-LIST ITEMS
end T, cond,loop
let, makunbound-nested-vars
TEST
(break)
TEST
for unbound var
(makunbound-vars '(unboundvar))
(makunbound-vars '("bb") :convert-strings-p T)
(makunbound-varlist
TEST
(unquoted-symbolp this) = T
TEST
(unquoted-boundp mother) = T
end let, convert-keyword
TEST
(convert-keyword :this)
end let, my-make-keyword
TEST
(my-make-keyword "THIS1")
works= :THIS1 "THIS1"
end let,my-symbolp
TEST
(my-symbolp NIL) = nil
(my-symbolp 'NIL :not-nil-p NIL) = T
GET-SYM-DIMS
caused probs "-")))
(dims-str-list)
end mvb,let, get-sym-dims
TEST
(get-sym-dims 'a.b.c)
works = (A B C) ("A" "B" "C")
|
ALSO SEE U-sexp.lisp
2019
ddd
(defun makunbound-nested-vars (nested-varlists &key (unbind-level 99)
(convert-strings-p T)
(level 0))
"U-symbol-info, RETURNS: (values unbound-vars not-unbound-vars) UNBIND-LEVEL usually either 0 :1, or 99"
(let*
((unbound-vars)
(not-unbound-vars)
(itemsym)
)
(cond
((listp nested-varlists)
(loop
for item in nested-varlists
do
(cond
((listp item)
(cond
((> unbind-level level)
(multiple-value-bind ( unbound-vars1 not-unbound-vars1)
(makunbound-nested-vars item :unbind-level unbind-level
:convert-strings-p convert-strings-p :level (+ level 1))
(setf unbound-vars (append unbound-vars unbound-vars1)
not-unbound-vars (append not-unbound-vars not-unbound-vars1))) )
(t (setf not-unbound-vars (append not-unbound-vars item))))
end listp item
)
(T
(when (and (stringp item) convert-strings-p)
(setf item (my-make-symbol item)))
(cond
((and (symbolp item)(not (constantp item)))
(setf unbound-vars (append unbound-vars
(list (makunbound item)))))
(t (setf not-unbound-vars (append not-unbound-vars (list item)))))
)))
end
)
(t (cond
((and (stringp nested-varlists) convert-strings-p)
(setf itemsym (my-make-symbol nested-varlists)))
((and (symbolp nested-varlists)(not (constantp nested-varlists)))
(setf itemsym nested-varlists))
(t nil))
(when itemsym
(setf unbound-vars (append unbound-vars (list (makunbound nested-varlists)))))
))
(values unbound-vars not-unbound-vars)
))
( makunbound - nested - vars ' ( v1 ( v11 v22 ( v111 v222 " strvl333 " ) ) ) )
works= ( V1 V11 V22 V111 V222 STRVL333 ) NIL
FOR UNBIND - LEVEL = 1
( makunbound - nested - vars ' ( v1 ( v11 v22 ( v111 v222 " strvl333 " ) ) ) : UNBIND - LEVEL 1 )
works= ( V1 V11 V22 ) ( V111 V222 " strvl333 " )
( makunbound - nested - vars ' ( v1 ( v11 v22 ( v111 v222 " strvl333 " ) v33 ) v2 ( v44 v55 ( v444 v555 ) v66 ) ) : UNBIND - LEVEL 1 )
works= ( V1 V11 V22 V33 V2 V44 V55 V66 ) ( V111 V222 " strvl333 " V444 V555 )
MAKUNBOUND - VARS
modified 2018 , 2020
ddd
(defun makunbound-vars (varlist &key (convert-strings-p t))
"U-symbol-info converts a list of symbols or strings to a list of UNBOUND VARS--even if they were previously bound. RETURNS (values unbound-vars varlist)"
(let
((unbound-vars)
)
(dolist (var varlist)
(when (and convert-strings-p (stringp var))
(setf var (my-make-symbol var)))
(unless (or (constantp var)(stringp var)
(not (symbolp var))(not (boundp var)))
(makunbound `,var))
(setf unbound-vars (append unbound-vars (list var)))
)
(values unbound-vars varlist)
))
( setf ' ( list ) ) = ( LIST )
( makunbound - vars ' ( " " ) ) = ( " " )
CL - USER 37 >
WORKS= Error : The variable is unbound .
works= ( UNBOUNDVAR ) ( UNBOUNDVAR )
works= ( BB ) ( " bb " )
( : this ) = T
( setf thisxx ' ( a b ) ) ( ' thisxx ) = NIL
UNQUOTED - SYMBOLP
2016
ddd
(defmacro unquoted-symbolp (x)
"In U-symbol-info. Tests whether the the UNQUOTED symbol is a symbol"
`(symbolp (quote ,x)))
( unquoted - symbolp ' this ) = NIL
UNQUOTED - BOUNDP
2016
ddd
(defmacro unquoted-boundp (x)
"In U-symbol-info, Tests whether the the UNQUOTED symbol is boiundp"
`(boundp (quote ,x)))
( unquoted - boundp this ) = NIL
CONVERT - KEYWORD
2019
ddd
(defun convert-keyword (keyword )
"U-symbol-info RETURNS (values sym str)"
(let*
((str (format nil "~A" keyword))
(sym (my-make-symbol str))
)
(values sym str)
))
works = THIS " THIS "
2019
ddd
(defun my-make-keyword (object)
"U-symbol-info object= str or sym. RETURNS keyword."
(let*
((str (format nil "~A" object))
(keyword (intern str "KEYWORD"))
)
(values keyword str)
))
2020
ddd
(defun my-symbolp (item &key (not-keywordp T)(not-nil-p T))
"U-symbol-info.lisp. Filters out keywords and/or NILs & 'NILS"
(let
((result (symbolp item))
)
(when (and not-keywordp (keywordp item))
(setf result NIL))
(when (and not-nil-p (equal item 'NIL))
(setf result NIL))
result
))
( my - symbolp ' NIL ) = NIL
( my - symbolp : NEWKEY ) = NIL
( my - symbolp : NEWKEY : not - keywordp NIL ) = T
( my - symbolp NIL : not - nil - p NIL ) = T
2020
ddd
"In U-symbol, Converts a symbol (or string) into dims list. RETURNS (values dimslist rootstr dims-list dims-str)."
(let
((symstr (cond ((stringp symbol) symbol)
(T (format nil "~A" symbol))))
(n-dims)
)
(multiple-value-bind ( dims-str dimslist)
(convert-string-w-separators-to-list symstr :separators separators)
( setf dims - str - list ( format nil " ~{~a~^-~ } " ) )
(values dimslist dims-str)
)))
|
ac7c97323bc0a359372766df3f0291171ed1642524e937ad286eeb340255f542 | jrh13/hol-light | readable.ml | (* ========================================================================= *)
Miz3 interface for readable HOL Light tactics formal proofs
(* *)
( c ) Copyright , 2013
Distributed under the same license as HOL Light
(* *)
(* The primary meaning of readability is explained in the HOL Light tutorial *)
on page 81 after the proof of NSQRT_2 ( ported below ) ,
(* "We would like to claim that this proof can be read in isolation, without *)
running it in HOL . For each step , every fact we used is clearly labelled
(* somewhere else in the proof, and every assumption is given explicitly." *)
(* However readability is often improved by using tactics constructs like *)
SIMP_TAC and , which allow facts and assumptions to not be
(* given explicitly, so as to not lose sight of the proof. Readability is *)
(* improved by a miz3 interface with few type annotations, back-quotes or *)
(* double-quotes, and allowing HOL4/Isabelle math characters, e.g. *)
⇒ ⇔ ∧ ∃ ∈ ∉ α β γ λ θ μ ⊂ ∩ ━ ≡ ≅ ∡ ∥ → ╪ .
We use ideas for readable formal proofs due to ( " Towards
(* more readable proofs" of the tutorial and Examples/mizar.ml), Freek *)
Wiedijk ( / miz2a.ml , / miz3.ml and arxiv.org/pdf/1201.3601
" A Synthesis of Procedural and Declarative Styles of Interactive
Theorem Proving " ) , ( author of tactic constructs
INTRO_TAC , DESTRUCT_TAC & HYP ) , ( coauthor of
Isabelle Light ) , ( author of the Q - module
-Light-Q ) and ( author of HOL
Zero and Tactician ) . These readability ideas yield the miz3 - type
(* declarative constructs assume, consider and case_split. The semantics of *)
readable.ml is clear from an obvious translation to HOL Light proofs . An
(* interactive mode is useful in writing, debugging and displaying proofs. *)
(* *)
(* The construct "case_split" reducing the goal to various cases given by *)
" suppose " clauses . The construct " proof " [ ... ] " qed " allows arbitrarily
(* long proofs, which can be arbitrarily nested with other case_split and *)
proof / qed constructs . THENL is only implemented implicitly in case_split
(* (also eq_tac and conj_tac), and this requires adjustments, such as using *)
num_INDUCTION instead of INDUCT_TAC .
(* ========================================================================= *)
The library defines regexp functions needed to process strings .
#load "str.cma";;
(* parse_qproof uses system.ml quotexpander feature designed for miz3.ml to *)
(* turn backquoted expression `;[...]` into a string with no newline or *)
(* backslash problems. Note that miz3.ml defines parse_qproof differently. *)
let parse_qproof s = (String.sub s 1 (String.length s - 1));;
Allows HOL4 and style math characters .
let CleanMathFontsForHOL_Light s =
let rec clean s loStringPairs =
match loStringPairs with
| [] -> s
| hd :: tl ->
let s = Str.global_replace (Str.regexp (fst hd)) (snd hd) s in
clean s tl in
clean s ["⇒","==>"; "⇔","<=>"; "∧","/\\ "; "∨","\\/"; "¬","~";
"∀","!"; "∃","?"; "∈","IN"; "∉","NOTIN";
"α","alpha"; "β","beta"; "γ","gamma"; "λ","\\ "; "θ","theta"; "μ","mu";
"⊂","SUBSET"; "∩","INTER"; "∪","UNION"; "∅","{}"; "━","DIFF";
"≡","==="; "≅","cong"; "∡","angle"; "∥","parallel";
"∏","prod"; "∘","_o_"; "→","--->"; "╪","INSERT";
"≃", "TarskiCong"; "≊", "TarskiTriangleCong"; "ℬ", "TarskiBetween"];;
(* printReadExn prints uncluttered error messages via Readable_fail. This *)
is due to , who also explained exec below .
exception Readable_fail of string;;
let printReadExn e =
match e with
| Readable_fail s
-> print_string s
| _ -> print_string (Printexc.to_string e);;
#install_printer printReadExn;;
(* From update_database.ml: Execute any OCaml expression given as a string. *)
let exec = ignore o Toploop.execute_phrase false Format.std_formatter
o !Toploop.parse_toplevel_phrase o Lexing.from_string;;
(* Following miz3.ml, exec_thm returns the theorem representing a string, so *)
(* exec_thm "FORALL_PAIR_THM";; returns *)
val it : thm = |- ! P. ( ! p. P p ) < = > ( ! p1 p2 . P ( p1,p2 ) )
(* Extra error-checking is done to rule out the possibility of the theorem *)
(* string ending with a semicolon. *)
let thm_ref = ref TRUTH;;
let tactic_ref = ref ALL_TAC;;
let thmtactic_ref = ref MATCH_MP_TAC;;
let thmlist_tactic_ref = ref REWRITE_TAC;;
let termlist_thm_thm_ref = ref SPECL;;
let thm_thm_ref = ref GSYM;;
let term_thm_ref = ref ARITH_RULE;;
let thmlist_term_thm_ref = ref MESON;;
let exec_thm s =
if Str.string_match (Str.regexp "[^;]*;") s 0 then raise Noparse
else
try exec ("thm_ref := (("^ s ^"): thm);;");
!thm_ref
with _ -> raise Noparse;;
let exec_tactic s =
try exec ("tactic_ref := (("^ s ^"): tactic);;"); !tactic_ref
with _ -> raise Noparse;;
let exec_thmlist_tactic s =
try
exec ("thmlist_tactic_ref := (("^ s ^"): thm list -> tactic);;");
!thmlist_tactic_ref
with _ -> raise Noparse;;
let exec_thmtactic s =
try exec ("thmtactic_ref := (("^ s ^"): thm -> tactic);;"); !thmtactic_ref
with _ -> raise Noparse;;
let exec_termlist_thm_thm s =
try exec ("termlist_thm_thm_ref := (("^ s ^"): (term list -> thm -> thm));;");
!termlist_thm_thm_ref
with _ -> raise Noparse;;
let exec_thm_thm s =
try exec ("thm_thm_ref := (("^ s ^"): (thm -> thm));;");
!thm_thm_ref
with _ -> raise Noparse;;
let exec_term_thm s =
try exec ("term_thm_ref := (("^ s ^"): (term -> thm));;");
!term_thm_ref
with _ -> raise Noparse;;
let exec_thmlist_term_thm s =
try exec ("thmlist_term_thm_ref := (("^ s ^"): (thm list ->term -> thm));;");
!thmlist_term_thm_ref
with _ -> raise Noparse;;
make_env and parse_env_string ( following parse_term from parser.ml ,
/ miz2a.ml and -Light-Q ) turn a
(* string into a term with types inferred by the goal and assumption list. *)
let (make_env: goal -> (string * pretype) list) =
fun (asl, w) -> map ((fun (s, ty) -> (s, pretype_of_type ty)) o dest_var)
(freesl (w::(map (concl o snd) asl)));;
let parse_env_string env s =
let (ptm, l) = (parse_preterm o lex o explode) s in
if l = [] then (term_of_preterm o retypecheck env) ptm
else raise (Readable_fail
("Unparsed input at the end of the term\n" ^ s));;
(* versions of new_constant, parse_as_infix, new_definition and new_axiom *)
let NewConstant (x, y) = new_constant(CleanMathFontsForHOL_Light x, y);;
let ParseAsInfix (x, y) = parse_as_infix (CleanMathFontsForHOL_Light x, y);;
let NewDefinition s =
new_definition (parse_env_string [] (CleanMathFontsForHOL_Light s));;
let NewAxiom s =
new_axiom (parse_env_string [] (CleanMathFontsForHOL_Light s));;
String versions without type annotations of SUBGOAL_THEN , SUBGOAL_TAC ,
intro_TAC , EXISTS_TAC , X_GEN_TAC , and EXISTS_TAC , and also new miz3 - type
(* tactic constructs assume, consider and case_split. *)
(* subgoal_THEN stm ttac gl = (SUBGOAL_THEN t ttac) gl, *)
where is a string that turned into a statement t by make_env and
parse_env_string , using the goal gl . We call a string statement .
ttac is often the thm_tactic ( LABEL_TAC string ) or ( DESTRUCT_TAC string ) .
let subgoal_THEN stm ttac gl =
SUBGOAL_THEN (parse_env_string (make_env gl) stm) ttac gl;;
subgoal_TAC stm lab tac gl = ( SUBGOAL_TAC lab t [ tac ] ) gl ,
exists_TAC stm gl = ( EXISTS_TAC t ) gl , and
X_gen_TAC svar gl = ( X_GEN_TAC v ) gl , where
stm is a string statement which is turned into a statement t by make_env ,
(* parse_env_string and the goal gl. Similarly string svar is turned into a *)
(* variable v. *)
(* X_genl_TAC combines X_gen_TAC and GENL. Since below in StepToTactic the *)
(* string-term list uses whitespace as the delimiter and no braces, there is *)
no reason in readable.ml proofs to use X_gen_TAC instead X_genl_TAC .
intro_TAC is INTRO_TAC with the delimiter " ; " replaced with " , " .
(* eq_tac string tac *)
(* requires the goal to be an iff statement of the form x ⇔ y and then *)
(* performs an EQ_TAC. If string = "Right", then the tactic tac proves the *)
implication y ⇒ x , and the goal becomes the other implication x ⇒
(* If string = "Left", then tac proves x ⇒ y and the goal becomes y ⇒ x. *)
(* conj_tac string tac *)
(* requires the goal to be a conjunction statement x ∧ y and then performs a *)
(* CONJ_TAC. If string = "Left" then the tactic tac proves x, and the goal *)
(* becomes y. If string = "Right", tac proves y and the new goal is x. *)
(* consider svars stm lab tac *)
(* defines new variables given by the string svars = "v1 v2 ... vn" and the *)
string statement , which subgoal_THEN turns into statement t , labeled
(* by lab. The tactic tac proves the existential statement ?v1 ... vn. t. *)
case_split sDestruct tac listofDisj listofTac
(* reduces the goal to n cases which are solved separately. listofDisj is a *)
list of strings [ st_1 ; ... ; st_n ] whose disjunction st_1 \/ ... \/ st_n is a
(* string statement proved by tactic tac. listofTac is a list of tactics *)
(* [tac_1;...; tac_n] which prove the statements st_1,..., st_n. The string *)
sDestruct must have the form " lab_1 | ... | lab_n " , and lab_i is a label
used by tac_i to prove st_i . Each lab_i must be a nonempty string .
(* assume *)
is a version of ASM_CASES_TAC , and performs proofs by contradiction and
(* binary case_splits where one of the forks has a short proof. In general, *)
(* assume statement lab tac *)
(* turns the string statement into a term t, with the tactic tac a proof of *)
¬t ⇒ w , where w is the goal . There is a new assumption t labeled lab , and
the new goal is the result of applying the tactic SIMP_TAC [ t ] to
It 's recommended to only use assume with a short proof tac . Three uses
(* of assume arise when t = ¬w or t = ¬α, with w = α ∨ β or w = β ∨ α. *)
In all three cases write
assume statement [ lab ] by ;
(* and the new goal will be F (false) or β respectively, as a result of the *)
SIMP_TAC [ t ] . So do not use assume if [ t ] is disadvantageous .
let subgoal_TAC stm lab tac gl =
SUBGOAL_TAC lab (parse_env_string (make_env gl) stm) [tac] gl;;
let exists_TAC stm gl =
EXISTS_TAC (parse_env_string (make_env gl) stm) gl;;
let X_gen_TAC svar (asl, w as gl) =
let vartype = (snd o dest_var o fst o dest_forall) w in
X_GEN_TAC (mk_var (svar, vartype)) gl;;
let X_genl_TAC svarlist = MAP_EVERY X_gen_TAC svarlist;;
let intro_TAC s = INTRO_TAC (Str.global_replace (Str.regexp ",") ";" s);;
let assume statement lab tac (asl, w as gl) =
let t = parse_env_string (make_env gl) statement in
(DISJ_CASES_THEN (LABEL_TAC lab) (SPEC t EXCLUDED_MIDDLE) THENL
[ALL_TAC; FIRST_ASSUM MP_TAC THEN tac] THEN HYP SIMP_TAC lab []) gl;;
let eq_tac string tac =
if string = "Right" then CONV_TAC SYM_CONV THEN EQ_TAC THENL [tac; ALL_TAC]
else if string = "Left" then EQ_TAC THENL [tac; ALL_TAC]
else raise (Readable_fail
("eq_tac requires " ^ string ^" to be either Left or Right"));;
let conj_tac string tac =
if string = "Right" then ONCE_REWRITE_TAC [CONJ_SYM] THEN
CONJ_TAC THENL [tac; ALL_TAC]
else if string = "Left" then CONJ_TAC THENL [tac; ALL_TAC]
else raise (Readable_fail
("conj_tac requires " ^ string ^" to be either Left or Right"));;
let consider svars stm lab tac =
subgoal_THEN ("?"^ svars ^ ". "^ stm)
(DESTRUCT_TAC ("@"^ svars ^ "."^ lab)) THENL [tac; ALL_TAC];;
let case_split sDestruct tac listofDisj listofTac =
let disjunction = itlist
(fun s t -> if t = "" then "("^ s ^")" else "("^ s ^") \\/ "^ t)
listofDisj "" in
subgoal_TAC disjunction "" tac THEN
FIRST_X_ASSUM (DESTRUCT_TAC sDestruct) THENL listofTac;;
(* Following the HOL Light tutorial section "Towards more readable proofs." *)
let fol = MESON_TAC;;
let rewrite = REWRITE_TAC;;
let simplify = SIMP_TAC;;
let set = SET_TAC;;
let rewriteR = GEN_REWRITE_TAC (RAND_CONV);;
let rewriteL = GEN_REWRITE_TAC (LAND_CONV);;
let rewriteI = GEN_REWRITE_TAC I;;
let rewriteRLDepth = GEN_REWRITE_TAC (RAND_CONV o LAND_CONV o DEPTH_CONV);;
let TACtoThmTactic tac = fun ths -> MAP_EVERY MP_TAC ths THEN tac;;
let arithmetic = TACtoThmTactic ARITH_TAC;;
let real_arithmetic = TACtoThmTactic REAL_ARITH_TAC;;
let num_ring = TACtoThmTactic (CONV_TAC NUM_RING);;
let real_ring = TACtoThmTactic (CONV_TAC REAL_RING);;
let ws = "[ \t\n]+";;
let ws0 = "[ \t\n]*";;
let StringRegexpEqual r s = Str.string_match r s 0 &&
s = Str.matched_string s;;
FindMatch sleft sright s
turns strings sleft and sright into regexps , recursively searches string
s for matched pairs of substrings matching sleft and sright , and returns
the position after the first substring matched by sright which is not
(* paired with an sleft-matching substring. Often here sleft ends with *)
(* whitespace (ws) while sright begins with ws. The "degenerate" case of *)
(* X^ws^Y where X^ws matches sleft and ws^Y matches sright is handled by *)
(* backing up a character after an sleft match if the last character is ws. *)
let FindMatch sleft sright s =
let test = Str.regexp ("\("^ sleft ^"\|"^ sright ^"\)")
and left = Str.regexp sleft in
let rec FindMatchPosition s count =
if count = 1 then 0
else
try
ignore(Str.search_forward test s 0);
let TestMatch = Str.matched_group 1 s
and AfterTest = Str.match_end() in
let LastChar = Str.last_chars (Str.string_before s AfterTest) 1 in
let endpos =
if Str.string_match (Str.regexp ws) LastChar 0
then AfterTest - 1 else AfterTest in
let rest = Str.string_after s endpos
and increment =
if StringRegexpEqual left TestMatch then -1 else 1 in
endpos + (FindMatchPosition rest (count + increment))
with Not_found -> raise (Readable_fail
("No matching right bracket operator "^ sright ^
" to left bracket operator "^ sleft ^" in "^ s)) in
FindMatchPosition s 0;;
FindSemicolon uses FindMatch to find the position before the next
(* semicolon which is not a delimiter of a list. *)
let rec FindSemicolon s =
try
let rec FindMatchPosition s pos =
let start = Str.search_forward (Str.regexp ";\|\[") s pos in
if Str.matched_string s = ";" then start
else
let rest = Str.string_after s (start + 1) in
let MatchingSquareBrace = FindMatch "\[" "\]" rest in
let newpos = start + 1 + MatchingSquareBrace in
FindMatchPosition s newpos in
FindMatchPosition s 0
with Not_found -> raise (Readable_fail ("No final semicolon in "^ s));;
FindCases uses FindMatch to take a string
(* "suppose" proof_1 "end;" ... "suppose" proof_n "end;" *)
and return the list [ ; ; ... ; proof_n ] .
let rec FindCases s =
let sleftCase, srightCase = ws^ "suppose"^ws, ws^ "end" ^ws0^ ";" in
if Str.string_match (Str.regexp sleftCase) s 0 then
let CaseEndRest = Str.string_after s (Str.match_end()) in
let PosAfterEnd = FindMatch sleftCase srightCase CaseEndRest in
let pos = Str.search_backward (Str.regexp srightCase)
CaseEndRest PosAfterEnd in
let case = Str.string_before CaseEndRest pos
and rest = Str.string_after CaseEndRest PosAfterEnd in
case :: (FindCases rest)
else [];;
StringToList uses FindSemicolon to turns a string into the list of
(* substrings delimited by the semicolons which are not captured in lists. *)
let rec StringToList s =
if StringRegexpEqual (Str.regexp ws0) s then [] else
if Str.string_match (Str.regexp "[^;]*;") s 0 then
let pos = FindSemicolon s in
let head = Str.string_before s pos in
head :: (StringToList (Str.string_after s (pos + 1)))
else [s];;
(* ExtractWsStringList string = (["l1"; "l2"; ...; "ln"], rest), *)
(* if string = ws ^ "[l1; l2; ...; ln]" ^ rest. Raises Not_found otherwise. *)
let ExtractWsStringList string =
if Str.string_match (Str.regexp (ws^ "\[")) string 0 then
let listRest = Str.string_after string (Str.match_end()) in
let RightBrace = FindMatch "\[" "\]" listRest in
let rest = Str.string_after listRest RightBrace
and list = Str.string_before listRest (RightBrace - 1) in
(StringToList list, rest)
else raise Not_found;;
theoremify string goal returns a pair ( thm , rest ) ,
where thm is the first theorem found on string , using goal if needed , and
rest is the remainder of string . Theoremify uses 3 helping functions :
1 ) CombTermThm_Term , which produces a combination of a term->thm
( e.g. ) with a term ,
2 ) CombThmlistTermThm_Thmlist_Term , which combines a thmlist->term->thm
( e.g. ) with a thmlist and a term , and
3 ) CombTermlistThmThm_Termlist , which combines a termlist->thm->thm
( e.g. SPECL ) with a termlist and a thm produced by theoremify .
Similar functions CombThmtactic_Thm and CombThmlisttactic_Thmlist are
used below , along with theoremify , by StringToTactic .
let CombTermThm_Term word rest gl =
let TermThm = exec_term_thm word in
try
let (stermlist, wsRest) = ExtractWsStringList rest in
if length stermlist = 1 then
let term = (parse_env_string (make_env gl)) (hd stermlist) in
(TermThm term, wsRest)
else raise (Readable_fail ("term->thm "^ word
^" not followed by length 1 term list, but instead the list \n["^
String.concat ";" stermlist ^"]"))
with Not_found -> raise (Readable_fail ("term->thm "^ word
^" not followed by term list, but instead \n"^ rest));;
let rec theoremify string gl =
if Str.string_match (Str.regexp (ws^ "\([^][ \t\n]+\)")) string 0 then
let word = Str.matched_group 1 string
and rest = Str.string_after string (Str.match_end()) in
if word = "-" then (snd (hd (fst gl)), rest) else
try (exec_thm word, rest)
with _ ->
try (assoc word (fst gl), rest)
with _ ->
try firstPairMult (exec_thm_thm word) (theoremify rest gl)
with _ ->
try CombTermThm_Term word rest gl
with Noparse ->
try CombThmlistTermThm_Thmlist_Term word rest gl
with Noparse ->
try CombTermlistThmThm_Termlist word rest gl
with Noparse -> raise (Readable_fail ("Not a theorem:\n"^ string))
else raise (Readable_fail ("Empty theorem:\n"^ string))
and
firstPairMult f (a, b) = (f a, b)
and
CombTermlistThmThm_Termlist word rest gl =
let TermlistThmThm = exec_termlist_thm_thm word in
try
let (stermlist, WsThm) = ExtractWsStringList rest in
let termlist = map (parse_env_string (make_env gl)) stermlist in
firstPairMult (TermlistThmThm termlist) (theoremify WsThm gl)
with Not_found -> raise (Readable_fail ("termlist->thm->thm "^ word
^"\n not followed by term list in\n"^ rest))
and
CombThmlistTermThm_Thmlist_Term word rest gl =
let thm_create sthm =
let (thm, rest) = theoremify (" "^ sthm) gl in
if rest = "" then thm
else raise (Readable_fail ("an argument of thmlist->term->thm "^ word ^
"\n is not a theorem, but instead \n"^ sthm)) in
let ThmlistTermThm = exec_thmlist_term_thm word in
try
let (stermlist, wsTermRest) = ExtractWsStringList rest in
let thmlist = map thm_create stermlist in
if Str.string_match (Str.regexp (ws^ "\[")) wsTermRest 0 then
let termRest = Str.string_after wsTermRest (Str.match_end()) in
let RightBrace = FindMatch "\[" "\]" termRest in
let rest = Str.string_after termRest RightBrace
and sterm = Str.string_before termRest (RightBrace - 1) in
let term = parse_env_string (make_env gl) sterm in
(ThmlistTermThm thmlist term, rest)
else raise (Readable_fail ("thmlist->term->thm "^ word
^" followed by list of theorems ["^ String.concat ";" stermlist ^"]
not followed by term in\n"^ wsTermRest))
with Not_found -> raise (Readable_fail ("thmlist->term->thm "^ word
^" not followed by thm list in\n"^ rest));;
let CombThmtactic_Thm step =
if Str.string_match (Str.regexp (ws^ "\([a-zA-Z0-9_]+\)")) step 0 then
let sthm_tactic = Str.matched_group 1 step
and sthm = Str.string_after step (Str.match_end()) in
let thm_tactic = exec_thmtactic sthm_tactic in
fun gl ->
let (thm, rest) = theoremify sthm gl in
if rest = "" then thm_tactic thm gl
else raise (Readable_fail ("thm_tactic "^ sthm_tactic
^" not followed by a theorem, but instead\n"^ sthm))
else raise Not_found;;
let CombThmlisttactic_Thmlist step =
let rec makeThmListAccum string list gl =
if StringRegexpEqual (Str.regexp ws0) string then list else
let (thm, rest) = theoremify string gl in
makeThmListAccum rest (thm :: list) gl in
if Str.string_match (Str.regexp (ws^ "\([a-zA-Z0-9_]+\)")) step 0 then
let ttac = exec_thmlist_tactic (Str.matched_group 1 step)
and LabThmString = Str.string_after step (Str.match_end()) in
fun gl ->
let LabThmList = List.rev (makeThmListAccum LabThmString [] gl) in
ttac LabThmList gl
else raise Not_found;;
StringToTactic uses regexp functions from the library to transform a
string into a tactic . The allowable tactics are written in BNF form as
(* *)
Tactic : = ALL_TAC | Tactic THEN Tactic | thm->tactic Thm |
one - word - tactic ( e.g. ARITH_TAC ) | thmlist->tactic Thm - list |
(* intro_TAC string | exists_TAC term | X_genl_TAC term-list | *)
case_split string Tactic statement - list Tactic - list |
consider variable - list statement label Tactic |
(* eq_tac (Right | Left) Tactic | conj_tac (Right | Left) Tactic | *)
( assume | subgoal_TAC ) statement label Tactic
(* *)
(* Thm := theorem-name | label | - [i.e. last assumption] | thm->thm Thm | *)
(* term->thm term | thmlist->term->thm Thm-list term | *)
termlist->thm->thm term - list Thm
(* *)
The string proofs allowed by StringToTactic are written in BNF form as
(* *)
Proof : = Proof THEN Proof | case_split destruct_string ByProofQed
suppose statement ; Proof end ; ... suppose statement ; Proof end ; |
(* OneStepProof; | consider variable-list statement [label] ByProofQed | *)
(* eq_tac [Right|Left] ByProofQed | conj_tac [Right|Left] ByProofQed | *)
( assume | ) statement [ label ] ByProofQed
(* *)
OneStepProof : = one - word - tactic | thm->tactic Thm | intro_TAC string |
(* exists_TAC term-string | X_genl_TAC variable-string-list | *)
(* thmlist->tactic Thm-list *)
(* *)
ByProofQed : = by OneStepProof ; | proof Proof Proof ... Proof qed ;
(* *)
theorem is a version of prove based on the miz3.ml thm , with argument
statement ByProofQed
(* *)
(* Miz3-style comments are supported. If a line contains ::, then the *)
substring of the line beginning with : : is ignored by StringToTactic .
let rec StringToTactic s =
let s = Str.global_replace (Str.regexp "::[^\n]*") "" s in
if StringRegexpEqual (Str.regexp ws0) s then ALL_TAC
else
try makeCaseSplit s
with _ ->
let pos = FindSemicolon s in
let step, rest = Str.string_before s pos, Str.string_after s (pos + 1) in
try
let tactic = StepToTactic step in
tactic THEN StringToTactic rest
with Not_found ->
let (tactic, rest) = BigStepToTactic s step in
tactic THEN StringToTactic rest
and
GetProof ByProof s =
if ByProof = "by" then
let pos = FindSemicolon s in
let step, rest = Str.string_before s pos, Str.string_after s (pos + 1) in
(StepToTactic step, rest)
else
let pos_after_qed = FindMatch (ws^"proof"^ws) (ws^"qed"^ws0^";") s in
let pos = Str.search_backward (Str.regexp "qed") s pos_after_qed in
let proof = StringToTactic (Str.string_before s pos) in
(proof, Str.string_after s pos_after_qed)
and
makeCaseSplit s =
if Str.string_match (Str.regexp (ws^ "case_split" ^ws^ "\([^;]+\)" ^ws^
"\(by\|proof\)" ^ws)) s 0 then
let sDestruct = Str.matched_group 1 s
and (proof, rest) = GetProof (Str.matched_group 2 s)
(Str.string_after s (Str.group_end 2))
and SplitAtSemicolon case =
let pos = FindSemicolon case in
[Str.string_before case pos; Str.string_after case (pos + 1)] in
let list2Case = map SplitAtSemicolon (FindCases rest) in
let listofDisj = map hd list2Case
and listofTac = map (StringToTactic o hd o tl) list2Case in
case_split sDestruct proof listofDisj listofTac
else raise Not_found
and
StepToTactic step =
try
if StringRegexpEqual (Str.regexp (ws^ "\([^ \t\n]+\)" ^ws0)) step then
exec_tactic (Str.matched_group 1 step)
else raise Not_found
with _ ->
try CombThmtactic_Thm step
with _ ->
try CombThmlisttactic_Thmlist step
with _ ->
if Str.string_match (Str.regexp (ws^ "intro_TAC" ^ws)) step 0 then
let intro_string = Str.string_after step (Str.match_end()) in
intro_TAC intro_string
else if Str.string_match (Str.regexp (ws^ "exists_TAC" ^ws)) step 0 then
let exists_string = Str.string_after step (Str.match_end()) in
exists_TAC exists_string
else if Str.string_match (Str.regexp (ws^ "X_genl_TAC" ^ws)) step 0 then
let genl_string = Str.string_after step (Str.match_end()) in
let svarlist = Str.split (Str.regexp ws) genl_string in
X_genl_TAC svarlist
else raise Not_found
and
BigStepToTactic s step =
if Str.string_match (Str.regexp (ws^ "consider" ^ws^ "\(\(.\|\n\)+\)" ^ws^
"such" ^ws^ "that" ^ws^ "\(\(.\|\n\)+\)" ^ws^ "\[\(\(.\|\n\)*\)\]" ^ws^
"\(by\|proof\)" ^ws)) step 0 then
let vars, t = Str.matched_group 1 step, Str.matched_group 3 step
and lab = Str.matched_group 5 step
and KeyWord, endKeyWord = Str.matched_group 7 step, (Str.group_end 7) in
let (proof, rest) = GetProof KeyWord (Str.string_after s endKeyWord) in
(consider vars t lab proof, rest)
else
try
let start = Str.search_forward (Str.regexp
(ws^ "\[\([^]]*\)\]" ^ws^ "\(by\|proof\)" ^ws)) step 0 in
let statement = Str.string_before step start
and lab = Str.matched_group 1 step
and KeyWord = Str.matched_group 2 step
and AfterWord = Str.string_after s (Str.group_end 2) in
let (proof, rest) = GetProof KeyWord AfterWord in
if StringRegexpEqual (Str.regexp (ws^ "eq_tac")) statement
then (eq_tac lab proof, rest)
else if StringRegexpEqual (Str.regexp (ws^ "conj_tac")) statement
then (conj_tac lab proof, rest)
else if
Str.string_match (Str.regexp (ws^ "\(assume\)" ^ws)) statement 0
then
let statement = Str.string_after statement (Str.match_end()) in
(assume statement lab proof, rest)
else (subgoal_TAC statement lab proof, rest)
with Not_found -> raise (Readable_fail
("Can't parse as a Proof:\n"^ step));;
let theorem s =
let s = CleanMathFontsForHOL_Light s in
try
let start = Str.search_forward (Str.regexp
(ws^ "proof\(" ^ws^ "\(.\|\n\)*\)" ^ws ^ "qed" ^ws0^ ";" ^ws0)) s 0 in
let thm = Str.string_before s start
and proof = Str.matched_group 1 s
and rest = Str.string_after s (Str.match_end()) in
if rest = "" then prove (parse_env_string [] thm, StringToTactic proof)
else raise (Readable_fail
("Trailing garbage after the proof...qed:\n" ^ rest))
with Not_found ->
try
let start = Str.search_forward (Str.regexp (ws^ "by")) s 0 in
let thm = Str.string_before s start
and proof = Str.string_after s (Str.match_end()) in
try
prove (parse_env_string [] thm, StepToTactic proof)
with Not_found -> raise (Readable_fail ("Not a proof:\n" ^ proof))
with Not_found -> raise (Readable_fail
("Missing initial \"proof\", \"by\", or final \"qed;\" in\n" ^ s));;
let interactive_goal s =
let thm = CleanMathFontsForHOL_Light s in
g (parse_env_string [] thm);;
let interactive_proof s =
let proof = CleanMathFontsForHOL_Light s in
e (StringToTactic proof);;
Two examples illustrating intro_TAC , eq_tac , exists_TAC MP_TAC and SPECL ,
then a port of the HOL Light tutorial proof that sqrt 2 is irrational .
let SKOLEM_THM_GEN = theorem `;
∀P R. (∀x. P x ⇒ ∃y. R x y) ⇔ ∃f. ∀x. P x ⇒ R x (f x)
proof
intro_TAC ∀P R;
eq_tac [Right] by fol;
intro_TAC H1;
exists_TAC λx. @y. R x y;
fol H1;
qed;
`;;
let MOD_MOD_REFL' = theorem `;
∀m n. ¬(n = 0) ⇒ ((m MOD n) MOD n = m MOD n)
proof
intro_TAC !m n, H1;
MP_TAC SPECL [m; n; 1] MOD_MOD;
fol H1 MULT_CLAUSES MULT_EQ_0 ONE NOT_SUC;
qed;
`;;
let NSQRT_2 = theorem `;
∀p q. p * p = 2 * q * q ⇒ q = 0
proof
MATCH_MP_TAC num_WF;
intro_TAC ∀p, A, ∀q, B;
EVEN(p * p) ⇔ EVEN(2 * q * q) [] by fol B;
EVEN(p) [] by fol - EVEN_DOUBLE EVEN_MULT;
consider m such that p = 2 * m [C] by fol - EVEN_EXISTS;
case_split qp | pq by arithmetic;
suppose q < p;
q * q = 2 * m * m ⇒ m = 0 [] by fol qp A;
num_ring - B C;
end;
suppose p <= q;
p * p <= q * q [] by fol - LE_MULT2;
q * q = 0 [] by arithmetic - B;
num_ring -;
end;
qed;
`;;
(* The following interactive version of the above proof shows a feature of *)
proof / qed and case_split / suppose . You can evaluate an incomplete proof
(* of a statement in an interactive_proof and complete the proof afterward, *)
(* as indicated below. The "suppose" clauses of a case_split can also be *)
(* incomplete. Do not include code below the incomplete proof or case_split *)
(* in an interactive_proof body, for the usual THEN vs THENL reason. *)
interactive_goal `;∀p q. p * p = 2 * q * q ⇒ q = 0
`;;
interactive_proof `;
MATCH_MP_TAC num_WF;
intro_TAC ∀p, A, ∀q, B;
EVEN(p * p) ⇔ EVEN(2 * q * q) [] proof qed;
`;;
interactive_proof `;
fol B;
`;;
interactive_proof `;
EVEN(p) [] by fol - EVEN_DOUBLE EVEN_MULT;
consider m such that p = 2 * m [C] proof fol - EVEN_EXISTS; qed;
`;;
interactive_proof `;
case_split qp | pq by arithmetic;
suppose q < p;
end;
suppose p <= q;
end;
`;;
interactive_proof `;
q * q = 2 * m * m ⇒ m = 0 [] by fol qp A;
num_ring - B C;
`;;
interactive_proof `;
p * p <= q * q [] by fol - LE_MULT2;
q * q = 0 [] by arithmetic - B;
num_ring -;
`;;
let NSQRT_2 = top_thm();;
An port from arith.ml uses by instead of proof ... qed ; in a short proof :
let EXP_2 = theorem `;
∀n:num. n EXP 2 = n * n
by rewrite BIT0_THM BIT1_THM EXP EXP_ADD MULT_CLAUSES ADD_CLAUSES`;;
An example using GSYM , , and , reproving
the binomial theorem from sec 13.1 - -2 of the HOL Light tutorial .
let binom = define
`(!n. binom(n,0) = 1) /\
(!k. binom(0,SUC(k)) = 0) /\
(!n k. binom(SUC(n),SUC(k)) = binom(n,SUC(k)) + binom(n,k))`;;
let BINOM_LT = theorem `;
∀n k. n < k ⇒ binom(n,k) = 0
proof
INDUCT_TAC; INDUCT_TAC;
rewrite binom ARITH LT_SUC LT;
ASM_SIMP_TAC ARITH_RULE [n < k ==> n < SUC(k)] ARITH;
qed;
`;;
let BINOMIAL_THEOREM = theorem `;
∀n. (x + y) EXP n = nsum(0..n) (\k. binom(n,k) * x EXP k * y EXP (n - k))
proof
∀f n. nsum (0.. SUC n) f = f(0) + nsum (0..n) (λi. f (SUC i)) [Nsum0SUC] by simplify LE_0 ADD1 NSUM_CLAUSES_LEFT NSUM_OFFSET;
MATCH_MP_TAC num_INDUCTION;
simplify EXP NSUM_SING_NUMSEG binom SUB_0 MULT_CLAUSES;
intro_TAC ∀n, nThm;
rewrite Nsum0SUC binom RIGHT_ADD_DISTRIB NSUM_ADD_NUMSEG GSYM NSUM_LMUL ADD_ASSOC;
rewriteR ADD_SYM;
rewriteRLDepth SUB_SUC EXP;
rewrite MULT_AC EQ_ADD_LCANCEL MESON [binom] [1 = binom(n, 0)] GSYM Nsum0SUC;
simplify NSUM_CLAUSES_RIGHT ARITH_RULE [0 < SUC n ∧ 0 <= SUC n] LT BINOM_LT MULT_CLAUSES ADD_CLAUSES SUC_SUB1;
simplify ARITH_RULE [k <= n ⇒ SUC n - k = SUC(n - k)] EXP MULT_AC;
qed;
`;;
| null | https://raw.githubusercontent.com/jrh13/hol-light/d125b0ae73e546a63ed458a7891f4e14ae0409e2/RichterHilbertAxiomGeometry/readable.ml | ocaml | =========================================================================
The primary meaning of readability is explained in the HOL Light tutorial
"We would like to claim that this proof can be read in isolation, without
somewhere else in the proof, and every assumption is given explicitly."
However readability is often improved by using tactics constructs like
given explicitly, so as to not lose sight of the proof. Readability is
improved by a miz3 interface with few type annotations, back-quotes or
double-quotes, and allowing HOL4/Isabelle math characters, e.g.
more readable proofs" of the tutorial and Examples/mizar.ml), Freek
declarative constructs assume, consider and case_split. The semantics of
interactive mode is useful in writing, debugging and displaying proofs.
The construct "case_split" reducing the goal to various cases given by
long proofs, which can be arbitrarily nested with other case_split and
(also eq_tac and conj_tac), and this requires adjustments, such as using
=========================================================================
parse_qproof uses system.ml quotexpander feature designed for miz3.ml to
turn backquoted expression `;[...]` into a string with no newline or
backslash problems. Note that miz3.ml defines parse_qproof differently.
printReadExn prints uncluttered error messages via Readable_fail. This
From update_database.ml: Execute any OCaml expression given as a string.
Following miz3.ml, exec_thm returns the theorem representing a string, so
exec_thm "FORALL_PAIR_THM";; returns
Extra error-checking is done to rule out the possibility of the theorem
string ending with a semicolon.
string into a term with types inferred by the goal and assumption list.
versions of new_constant, parse_as_infix, new_definition and new_axiom
tactic constructs assume, consider and case_split.
subgoal_THEN stm ttac gl = (SUBGOAL_THEN t ttac) gl,
parse_env_string and the goal gl. Similarly string svar is turned into a
variable v.
X_genl_TAC combines X_gen_TAC and GENL. Since below in StepToTactic the
string-term list uses whitespace as the delimiter and no braces, there is
eq_tac string tac
requires the goal to be an iff statement of the form x ⇔ y and then
performs an EQ_TAC. If string = "Right", then the tactic tac proves the
If string = "Left", then tac proves x ⇒ y and the goal becomes y ⇒ x.
conj_tac string tac
requires the goal to be a conjunction statement x ∧ y and then performs a
CONJ_TAC. If string = "Left" then the tactic tac proves x, and the goal
becomes y. If string = "Right", tac proves y and the new goal is x.
consider svars stm lab tac
defines new variables given by the string svars = "v1 v2 ... vn" and the
by lab. The tactic tac proves the existential statement ?v1 ... vn. t.
reduces the goal to n cases which are solved separately. listofDisj is a
string statement proved by tactic tac. listofTac is a list of tactics
[tac_1;...; tac_n] which prove the statements st_1,..., st_n. The string
assume
binary case_splits where one of the forks has a short proof. In general,
assume statement lab tac
turns the string statement into a term t, with the tactic tac a proof of
of assume arise when t = ¬w or t = ¬α, with w = α ∨ β or w = β ∨ α.
and the new goal will be F (false) or β respectively, as a result of the
Following the HOL Light tutorial section "Towards more readable proofs."
paired with an sleft-matching substring. Often here sleft ends with
whitespace (ws) while sright begins with ws. The "degenerate" case of
X^ws^Y where X^ws matches sleft and ws^Y matches sright is handled by
backing up a character after an sleft match if the last character is ws.
semicolon which is not a delimiter of a list.
"suppose" proof_1 "end;" ... "suppose" proof_n "end;"
substrings delimited by the semicolons which are not captured in lists.
ExtractWsStringList string = (["l1"; "l2"; ...; "ln"], rest),
if string = ws ^ "[l1; l2; ...; ln]" ^ rest. Raises Not_found otherwise.
intro_TAC string | exists_TAC term | X_genl_TAC term-list |
eq_tac (Right | Left) Tactic | conj_tac (Right | Left) Tactic |
Thm := theorem-name | label | - [i.e. last assumption] | thm->thm Thm |
term->thm term | thmlist->term->thm Thm-list term |
OneStepProof; | consider variable-list statement [label] ByProofQed |
eq_tac [Right|Left] ByProofQed | conj_tac [Right|Left] ByProofQed |
exists_TAC term-string | X_genl_TAC variable-string-list |
thmlist->tactic Thm-list
Miz3-style comments are supported. If a line contains ::, then the
The following interactive version of the above proof shows a feature of
of a statement in an interactive_proof and complete the proof afterward,
as indicated below. The "suppose" clauses of a case_split can also be
incomplete. Do not include code below the incomplete proof or case_split
in an interactive_proof body, for the usual THEN vs THENL reason. | Miz3 interface for readable HOL Light tactics formal proofs
( c ) Copyright , 2013
Distributed under the same license as HOL Light
on page 81 after the proof of NSQRT_2 ( ported below ) ,
running it in HOL . For each step , every fact we used is clearly labelled
SIMP_TAC and , which allow facts and assumptions to not be
⇒ ⇔ ∧ ∃ ∈ ∉ α β γ λ θ μ ⊂ ∩ ━ ≡ ≅ ∡ ∥ → ╪ .
We use ideas for readable formal proofs due to ( " Towards
Wiedijk ( / miz2a.ml , / miz3.ml and arxiv.org/pdf/1201.3601
" A Synthesis of Procedural and Declarative Styles of Interactive
Theorem Proving " ) , ( author of tactic constructs
INTRO_TAC , DESTRUCT_TAC & HYP ) , ( coauthor of
Isabelle Light ) , ( author of the Q - module
-Light-Q ) and ( author of HOL
Zero and Tactician ) . These readability ideas yield the miz3 - type
readable.ml is clear from an obvious translation to HOL Light proofs . An
" suppose " clauses . The construct " proof " [ ... ] " qed " allows arbitrarily
proof / qed constructs . THENL is only implemented implicitly in case_split
num_INDUCTION instead of INDUCT_TAC .
The library defines regexp functions needed to process strings .
#load "str.cma";;
let parse_qproof s = (String.sub s 1 (String.length s - 1));;
Allows HOL4 and style math characters .
let CleanMathFontsForHOL_Light s =
let rec clean s loStringPairs =
match loStringPairs with
| [] -> s
| hd :: tl ->
let s = Str.global_replace (Str.regexp (fst hd)) (snd hd) s in
clean s tl in
clean s ["⇒","==>"; "⇔","<=>"; "∧","/\\ "; "∨","\\/"; "¬","~";
"∀","!"; "∃","?"; "∈","IN"; "∉","NOTIN";
"α","alpha"; "β","beta"; "γ","gamma"; "λ","\\ "; "θ","theta"; "μ","mu";
"⊂","SUBSET"; "∩","INTER"; "∪","UNION"; "∅","{}"; "━","DIFF";
"≡","==="; "≅","cong"; "∡","angle"; "∥","parallel";
"∏","prod"; "∘","_o_"; "→","--->"; "╪","INSERT";
"≃", "TarskiCong"; "≊", "TarskiTriangleCong"; "ℬ", "TarskiBetween"];;
is due to , who also explained exec below .
exception Readable_fail of string;;
let printReadExn e =
match e with
| Readable_fail s
-> print_string s
| _ -> print_string (Printexc.to_string e);;
#install_printer printReadExn;;
let exec = ignore o Toploop.execute_phrase false Format.std_formatter
o !Toploop.parse_toplevel_phrase o Lexing.from_string;;
val it : thm = |- ! P. ( ! p. P p ) < = > ( ! p1 p2 . P ( p1,p2 ) )
let thm_ref = ref TRUTH;;
let tactic_ref = ref ALL_TAC;;
let thmtactic_ref = ref MATCH_MP_TAC;;
let thmlist_tactic_ref = ref REWRITE_TAC;;
let termlist_thm_thm_ref = ref SPECL;;
let thm_thm_ref = ref GSYM;;
let term_thm_ref = ref ARITH_RULE;;
let thmlist_term_thm_ref = ref MESON;;
let exec_thm s =
if Str.string_match (Str.regexp "[^;]*;") s 0 then raise Noparse
else
try exec ("thm_ref := (("^ s ^"): thm);;");
!thm_ref
with _ -> raise Noparse;;
let exec_tactic s =
try exec ("tactic_ref := (("^ s ^"): tactic);;"); !tactic_ref
with _ -> raise Noparse;;
let exec_thmlist_tactic s =
try
exec ("thmlist_tactic_ref := (("^ s ^"): thm list -> tactic);;");
!thmlist_tactic_ref
with _ -> raise Noparse;;
let exec_thmtactic s =
try exec ("thmtactic_ref := (("^ s ^"): thm -> tactic);;"); !thmtactic_ref
with _ -> raise Noparse;;
let exec_termlist_thm_thm s =
try exec ("termlist_thm_thm_ref := (("^ s ^"): (term list -> thm -> thm));;");
!termlist_thm_thm_ref
with _ -> raise Noparse;;
let exec_thm_thm s =
try exec ("thm_thm_ref := (("^ s ^"): (thm -> thm));;");
!thm_thm_ref
with _ -> raise Noparse;;
let exec_term_thm s =
try exec ("term_thm_ref := (("^ s ^"): (term -> thm));;");
!term_thm_ref
with _ -> raise Noparse;;
let exec_thmlist_term_thm s =
try exec ("thmlist_term_thm_ref := (("^ s ^"): (thm list ->term -> thm));;");
!thmlist_term_thm_ref
with _ -> raise Noparse;;
make_env and parse_env_string ( following parse_term from parser.ml ,
/ miz2a.ml and -Light-Q ) turn a
let (make_env: goal -> (string * pretype) list) =
fun (asl, w) -> map ((fun (s, ty) -> (s, pretype_of_type ty)) o dest_var)
(freesl (w::(map (concl o snd) asl)));;
let parse_env_string env s =
let (ptm, l) = (parse_preterm o lex o explode) s in
if l = [] then (term_of_preterm o retypecheck env) ptm
else raise (Readable_fail
("Unparsed input at the end of the term\n" ^ s));;
let NewConstant (x, y) = new_constant(CleanMathFontsForHOL_Light x, y);;
let ParseAsInfix (x, y) = parse_as_infix (CleanMathFontsForHOL_Light x, y);;
let NewDefinition s =
new_definition (parse_env_string [] (CleanMathFontsForHOL_Light s));;
let NewAxiom s =
new_axiom (parse_env_string [] (CleanMathFontsForHOL_Light s));;
String versions without type annotations of SUBGOAL_THEN , SUBGOAL_TAC ,
intro_TAC , EXISTS_TAC , X_GEN_TAC , and EXISTS_TAC , and also new miz3 - type
where is a string that turned into a statement t by make_env and
parse_env_string , using the goal gl . We call a string statement .
ttac is often the thm_tactic ( LABEL_TAC string ) or ( DESTRUCT_TAC string ) .
let subgoal_THEN stm ttac gl =
SUBGOAL_THEN (parse_env_string (make_env gl) stm) ttac gl;;
subgoal_TAC stm lab tac gl = ( SUBGOAL_TAC lab t [ tac ] ) gl ,
exists_TAC stm gl = ( EXISTS_TAC t ) gl , and
X_gen_TAC svar gl = ( X_GEN_TAC v ) gl , where
stm is a string statement which is turned into a statement t by make_env ,
no reason in readable.ml proofs to use X_gen_TAC instead X_genl_TAC .
intro_TAC is INTRO_TAC with the delimiter " ; " replaced with " , " .
implication y ⇒ x , and the goal becomes the other implication x ⇒
string statement , which subgoal_THEN turns into statement t , labeled
case_split sDestruct tac listofDisj listofTac
list of strings [ st_1 ; ... ; st_n ] whose disjunction st_1 \/ ... \/ st_n is a
sDestruct must have the form " lab_1 | ... | lab_n " , and lab_i is a label
used by tac_i to prove st_i . Each lab_i must be a nonempty string .
is a version of ASM_CASES_TAC , and performs proofs by contradiction and
¬t ⇒ w , where w is the goal . There is a new assumption t labeled lab , and
the new goal is the result of applying the tactic SIMP_TAC [ t ] to
It 's recommended to only use assume with a short proof tac . Three uses
In all three cases write
assume statement [ lab ] by ;
SIMP_TAC [ t ] . So do not use assume if [ t ] is disadvantageous .
let subgoal_TAC stm lab tac gl =
SUBGOAL_TAC lab (parse_env_string (make_env gl) stm) [tac] gl;;
let exists_TAC stm gl =
EXISTS_TAC (parse_env_string (make_env gl) stm) gl;;
let X_gen_TAC svar (asl, w as gl) =
let vartype = (snd o dest_var o fst o dest_forall) w in
X_GEN_TAC (mk_var (svar, vartype)) gl;;
let X_genl_TAC svarlist = MAP_EVERY X_gen_TAC svarlist;;
let intro_TAC s = INTRO_TAC (Str.global_replace (Str.regexp ",") ";" s);;
let assume statement lab tac (asl, w as gl) =
let t = parse_env_string (make_env gl) statement in
(DISJ_CASES_THEN (LABEL_TAC lab) (SPEC t EXCLUDED_MIDDLE) THENL
[ALL_TAC; FIRST_ASSUM MP_TAC THEN tac] THEN HYP SIMP_TAC lab []) gl;;
let eq_tac string tac =
if string = "Right" then CONV_TAC SYM_CONV THEN EQ_TAC THENL [tac; ALL_TAC]
else if string = "Left" then EQ_TAC THENL [tac; ALL_TAC]
else raise (Readable_fail
("eq_tac requires " ^ string ^" to be either Left or Right"));;
let conj_tac string tac =
if string = "Right" then ONCE_REWRITE_TAC [CONJ_SYM] THEN
CONJ_TAC THENL [tac; ALL_TAC]
else if string = "Left" then CONJ_TAC THENL [tac; ALL_TAC]
else raise (Readable_fail
("conj_tac requires " ^ string ^" to be either Left or Right"));;
let consider svars stm lab tac =
subgoal_THEN ("?"^ svars ^ ". "^ stm)
(DESTRUCT_TAC ("@"^ svars ^ "."^ lab)) THENL [tac; ALL_TAC];;
let case_split sDestruct tac listofDisj listofTac =
let disjunction = itlist
(fun s t -> if t = "" then "("^ s ^")" else "("^ s ^") \\/ "^ t)
listofDisj "" in
subgoal_TAC disjunction "" tac THEN
FIRST_X_ASSUM (DESTRUCT_TAC sDestruct) THENL listofTac;;
let fol = MESON_TAC;;
let rewrite = REWRITE_TAC;;
let simplify = SIMP_TAC;;
let set = SET_TAC;;
let rewriteR = GEN_REWRITE_TAC (RAND_CONV);;
let rewriteL = GEN_REWRITE_TAC (LAND_CONV);;
let rewriteI = GEN_REWRITE_TAC I;;
let rewriteRLDepth = GEN_REWRITE_TAC (RAND_CONV o LAND_CONV o DEPTH_CONV);;
let TACtoThmTactic tac = fun ths -> MAP_EVERY MP_TAC ths THEN tac;;
let arithmetic = TACtoThmTactic ARITH_TAC;;
let real_arithmetic = TACtoThmTactic REAL_ARITH_TAC;;
let num_ring = TACtoThmTactic (CONV_TAC NUM_RING);;
let real_ring = TACtoThmTactic (CONV_TAC REAL_RING);;
let ws = "[ \t\n]+";;
let ws0 = "[ \t\n]*";;
let StringRegexpEqual r s = Str.string_match r s 0 &&
s = Str.matched_string s;;
FindMatch sleft sright s
turns strings sleft and sright into regexps , recursively searches string
s for matched pairs of substrings matching sleft and sright , and returns
the position after the first substring matched by sright which is not
let FindMatch sleft sright s =
let test = Str.regexp ("\("^ sleft ^"\|"^ sright ^"\)")
and left = Str.regexp sleft in
let rec FindMatchPosition s count =
if count = 1 then 0
else
try
ignore(Str.search_forward test s 0);
let TestMatch = Str.matched_group 1 s
and AfterTest = Str.match_end() in
let LastChar = Str.last_chars (Str.string_before s AfterTest) 1 in
let endpos =
if Str.string_match (Str.regexp ws) LastChar 0
then AfterTest - 1 else AfterTest in
let rest = Str.string_after s endpos
and increment =
if StringRegexpEqual left TestMatch then -1 else 1 in
endpos + (FindMatchPosition rest (count + increment))
with Not_found -> raise (Readable_fail
("No matching right bracket operator "^ sright ^
" to left bracket operator "^ sleft ^" in "^ s)) in
FindMatchPosition s 0;;
FindSemicolon uses FindMatch to find the position before the next
let rec FindSemicolon s =
try
let rec FindMatchPosition s pos =
let start = Str.search_forward (Str.regexp ";\|\[") s pos in
if Str.matched_string s = ";" then start
else
let rest = Str.string_after s (start + 1) in
let MatchingSquareBrace = FindMatch "\[" "\]" rest in
let newpos = start + 1 + MatchingSquareBrace in
FindMatchPosition s newpos in
FindMatchPosition s 0
with Not_found -> raise (Readable_fail ("No final semicolon in "^ s));;
FindCases uses FindMatch to take a string
and return the list [ ; ; ... ; proof_n ] .
let rec FindCases s =
let sleftCase, srightCase = ws^ "suppose"^ws, ws^ "end" ^ws0^ ";" in
if Str.string_match (Str.regexp sleftCase) s 0 then
let CaseEndRest = Str.string_after s (Str.match_end()) in
let PosAfterEnd = FindMatch sleftCase srightCase CaseEndRest in
let pos = Str.search_backward (Str.regexp srightCase)
CaseEndRest PosAfterEnd in
let case = Str.string_before CaseEndRest pos
and rest = Str.string_after CaseEndRest PosAfterEnd in
case :: (FindCases rest)
else [];;
StringToList uses FindSemicolon to turns a string into the list of
let rec StringToList s =
if StringRegexpEqual (Str.regexp ws0) s then [] else
if Str.string_match (Str.regexp "[^;]*;") s 0 then
let pos = FindSemicolon s in
let head = Str.string_before s pos in
head :: (StringToList (Str.string_after s (pos + 1)))
else [s];;
let ExtractWsStringList string =
if Str.string_match (Str.regexp (ws^ "\[")) string 0 then
let listRest = Str.string_after string (Str.match_end()) in
let RightBrace = FindMatch "\[" "\]" listRest in
let rest = Str.string_after listRest RightBrace
and list = Str.string_before listRest (RightBrace - 1) in
(StringToList list, rest)
else raise Not_found;;
theoremify string goal returns a pair ( thm , rest ) ,
where thm is the first theorem found on string , using goal if needed , and
rest is the remainder of string . Theoremify uses 3 helping functions :
1 ) CombTermThm_Term , which produces a combination of a term->thm
( e.g. ) with a term ,
2 ) CombThmlistTermThm_Thmlist_Term , which combines a thmlist->term->thm
( e.g. ) with a thmlist and a term , and
3 ) CombTermlistThmThm_Termlist , which combines a termlist->thm->thm
( e.g. SPECL ) with a termlist and a thm produced by theoremify .
Similar functions CombThmtactic_Thm and CombThmlisttactic_Thmlist are
used below , along with theoremify , by StringToTactic .
let CombTermThm_Term word rest gl =
let TermThm = exec_term_thm word in
try
let (stermlist, wsRest) = ExtractWsStringList rest in
if length stermlist = 1 then
let term = (parse_env_string (make_env gl)) (hd stermlist) in
(TermThm term, wsRest)
else raise (Readable_fail ("term->thm "^ word
^" not followed by length 1 term list, but instead the list \n["^
String.concat ";" stermlist ^"]"))
with Not_found -> raise (Readable_fail ("term->thm "^ word
^" not followed by term list, but instead \n"^ rest));;
let rec theoremify string gl =
if Str.string_match (Str.regexp (ws^ "\([^][ \t\n]+\)")) string 0 then
let word = Str.matched_group 1 string
and rest = Str.string_after string (Str.match_end()) in
if word = "-" then (snd (hd (fst gl)), rest) else
try (exec_thm word, rest)
with _ ->
try (assoc word (fst gl), rest)
with _ ->
try firstPairMult (exec_thm_thm word) (theoremify rest gl)
with _ ->
try CombTermThm_Term word rest gl
with Noparse ->
try CombThmlistTermThm_Thmlist_Term word rest gl
with Noparse ->
try CombTermlistThmThm_Termlist word rest gl
with Noparse -> raise (Readable_fail ("Not a theorem:\n"^ string))
else raise (Readable_fail ("Empty theorem:\n"^ string))
and
firstPairMult f (a, b) = (f a, b)
and
CombTermlistThmThm_Termlist word rest gl =
let TermlistThmThm = exec_termlist_thm_thm word in
try
let (stermlist, WsThm) = ExtractWsStringList rest in
let termlist = map (parse_env_string (make_env gl)) stermlist in
firstPairMult (TermlistThmThm termlist) (theoremify WsThm gl)
with Not_found -> raise (Readable_fail ("termlist->thm->thm "^ word
^"\n not followed by term list in\n"^ rest))
and
CombThmlistTermThm_Thmlist_Term word rest gl =
let thm_create sthm =
let (thm, rest) = theoremify (" "^ sthm) gl in
if rest = "" then thm
else raise (Readable_fail ("an argument of thmlist->term->thm "^ word ^
"\n is not a theorem, but instead \n"^ sthm)) in
let ThmlistTermThm = exec_thmlist_term_thm word in
try
let (stermlist, wsTermRest) = ExtractWsStringList rest in
let thmlist = map thm_create stermlist in
if Str.string_match (Str.regexp (ws^ "\[")) wsTermRest 0 then
let termRest = Str.string_after wsTermRest (Str.match_end()) in
let RightBrace = FindMatch "\[" "\]" termRest in
let rest = Str.string_after termRest RightBrace
and sterm = Str.string_before termRest (RightBrace - 1) in
let term = parse_env_string (make_env gl) sterm in
(ThmlistTermThm thmlist term, rest)
else raise (Readable_fail ("thmlist->term->thm "^ word
^" followed by list of theorems ["^ String.concat ";" stermlist ^"]
not followed by term in\n"^ wsTermRest))
with Not_found -> raise (Readable_fail ("thmlist->term->thm "^ word
^" not followed by thm list in\n"^ rest));;
let CombThmtactic_Thm step =
if Str.string_match (Str.regexp (ws^ "\([a-zA-Z0-9_]+\)")) step 0 then
let sthm_tactic = Str.matched_group 1 step
and sthm = Str.string_after step (Str.match_end()) in
let thm_tactic = exec_thmtactic sthm_tactic in
fun gl ->
let (thm, rest) = theoremify sthm gl in
if rest = "" then thm_tactic thm gl
else raise (Readable_fail ("thm_tactic "^ sthm_tactic
^" not followed by a theorem, but instead\n"^ sthm))
else raise Not_found;;
let CombThmlisttactic_Thmlist step =
let rec makeThmListAccum string list gl =
if StringRegexpEqual (Str.regexp ws0) string then list else
let (thm, rest) = theoremify string gl in
makeThmListAccum rest (thm :: list) gl in
if Str.string_match (Str.regexp (ws^ "\([a-zA-Z0-9_]+\)")) step 0 then
let ttac = exec_thmlist_tactic (Str.matched_group 1 step)
and LabThmString = Str.string_after step (Str.match_end()) in
fun gl ->
let LabThmList = List.rev (makeThmListAccum LabThmString [] gl) in
ttac LabThmList gl
else raise Not_found;;
StringToTactic uses regexp functions from the library to transform a
string into a tactic . The allowable tactics are written in BNF form as
Tactic : = ALL_TAC | Tactic THEN Tactic | thm->tactic Thm |
one - word - tactic ( e.g. ARITH_TAC ) | thmlist->tactic Thm - list |
case_split string Tactic statement - list Tactic - list |
consider variable - list statement label Tactic |
( assume | subgoal_TAC ) statement label Tactic
termlist->thm->thm term - list Thm
The string proofs allowed by StringToTactic are written in BNF form as
Proof : = Proof THEN Proof | case_split destruct_string ByProofQed
suppose statement ; Proof end ; ... suppose statement ; Proof end ; |
( assume | ) statement [ label ] ByProofQed
OneStepProof : = one - word - tactic | thm->tactic Thm | intro_TAC string |
ByProofQed : = by OneStepProof ; | proof Proof Proof ... Proof qed ;
theorem is a version of prove based on the miz3.ml thm , with argument
statement ByProofQed
substring of the line beginning with : : is ignored by StringToTactic .
let rec StringToTactic s =
let s = Str.global_replace (Str.regexp "::[^\n]*") "" s in
if StringRegexpEqual (Str.regexp ws0) s then ALL_TAC
else
try makeCaseSplit s
with _ ->
let pos = FindSemicolon s in
let step, rest = Str.string_before s pos, Str.string_after s (pos + 1) in
try
let tactic = StepToTactic step in
tactic THEN StringToTactic rest
with Not_found ->
let (tactic, rest) = BigStepToTactic s step in
tactic THEN StringToTactic rest
and
GetProof ByProof s =
if ByProof = "by" then
let pos = FindSemicolon s in
let step, rest = Str.string_before s pos, Str.string_after s (pos + 1) in
(StepToTactic step, rest)
else
let pos_after_qed = FindMatch (ws^"proof"^ws) (ws^"qed"^ws0^";") s in
let pos = Str.search_backward (Str.regexp "qed") s pos_after_qed in
let proof = StringToTactic (Str.string_before s pos) in
(proof, Str.string_after s pos_after_qed)
and
makeCaseSplit s =
if Str.string_match (Str.regexp (ws^ "case_split" ^ws^ "\([^;]+\)" ^ws^
"\(by\|proof\)" ^ws)) s 0 then
let sDestruct = Str.matched_group 1 s
and (proof, rest) = GetProof (Str.matched_group 2 s)
(Str.string_after s (Str.group_end 2))
and SplitAtSemicolon case =
let pos = FindSemicolon case in
[Str.string_before case pos; Str.string_after case (pos + 1)] in
let list2Case = map SplitAtSemicolon (FindCases rest) in
let listofDisj = map hd list2Case
and listofTac = map (StringToTactic o hd o tl) list2Case in
case_split sDestruct proof listofDisj listofTac
else raise Not_found
and
StepToTactic step =
try
if StringRegexpEqual (Str.regexp (ws^ "\([^ \t\n]+\)" ^ws0)) step then
exec_tactic (Str.matched_group 1 step)
else raise Not_found
with _ ->
try CombThmtactic_Thm step
with _ ->
try CombThmlisttactic_Thmlist step
with _ ->
if Str.string_match (Str.regexp (ws^ "intro_TAC" ^ws)) step 0 then
let intro_string = Str.string_after step (Str.match_end()) in
intro_TAC intro_string
else if Str.string_match (Str.regexp (ws^ "exists_TAC" ^ws)) step 0 then
let exists_string = Str.string_after step (Str.match_end()) in
exists_TAC exists_string
else if Str.string_match (Str.regexp (ws^ "X_genl_TAC" ^ws)) step 0 then
let genl_string = Str.string_after step (Str.match_end()) in
let svarlist = Str.split (Str.regexp ws) genl_string in
X_genl_TAC svarlist
else raise Not_found
and
BigStepToTactic s step =
if Str.string_match (Str.regexp (ws^ "consider" ^ws^ "\(\(.\|\n\)+\)" ^ws^
"such" ^ws^ "that" ^ws^ "\(\(.\|\n\)+\)" ^ws^ "\[\(\(.\|\n\)*\)\]" ^ws^
"\(by\|proof\)" ^ws)) step 0 then
let vars, t = Str.matched_group 1 step, Str.matched_group 3 step
and lab = Str.matched_group 5 step
and KeyWord, endKeyWord = Str.matched_group 7 step, (Str.group_end 7) in
let (proof, rest) = GetProof KeyWord (Str.string_after s endKeyWord) in
(consider vars t lab proof, rest)
else
try
let start = Str.search_forward (Str.regexp
(ws^ "\[\([^]]*\)\]" ^ws^ "\(by\|proof\)" ^ws)) step 0 in
let statement = Str.string_before step start
and lab = Str.matched_group 1 step
and KeyWord = Str.matched_group 2 step
and AfterWord = Str.string_after s (Str.group_end 2) in
let (proof, rest) = GetProof KeyWord AfterWord in
if StringRegexpEqual (Str.regexp (ws^ "eq_tac")) statement
then (eq_tac lab proof, rest)
else if StringRegexpEqual (Str.regexp (ws^ "conj_tac")) statement
then (conj_tac lab proof, rest)
else if
Str.string_match (Str.regexp (ws^ "\(assume\)" ^ws)) statement 0
then
let statement = Str.string_after statement (Str.match_end()) in
(assume statement lab proof, rest)
else (subgoal_TAC statement lab proof, rest)
with Not_found -> raise (Readable_fail
("Can't parse as a Proof:\n"^ step));;
let theorem s =
let s = CleanMathFontsForHOL_Light s in
try
let start = Str.search_forward (Str.regexp
(ws^ "proof\(" ^ws^ "\(.\|\n\)*\)" ^ws ^ "qed" ^ws0^ ";" ^ws0)) s 0 in
let thm = Str.string_before s start
and proof = Str.matched_group 1 s
and rest = Str.string_after s (Str.match_end()) in
if rest = "" then prove (parse_env_string [] thm, StringToTactic proof)
else raise (Readable_fail
("Trailing garbage after the proof...qed:\n" ^ rest))
with Not_found ->
try
let start = Str.search_forward (Str.regexp (ws^ "by")) s 0 in
let thm = Str.string_before s start
and proof = Str.string_after s (Str.match_end()) in
try
prove (parse_env_string [] thm, StepToTactic proof)
with Not_found -> raise (Readable_fail ("Not a proof:\n" ^ proof))
with Not_found -> raise (Readable_fail
("Missing initial \"proof\", \"by\", or final \"qed;\" in\n" ^ s));;
let interactive_goal s =
let thm = CleanMathFontsForHOL_Light s in
g (parse_env_string [] thm);;
let interactive_proof s =
let proof = CleanMathFontsForHOL_Light s in
e (StringToTactic proof);;
Two examples illustrating intro_TAC , eq_tac , exists_TAC MP_TAC and SPECL ,
then a port of the HOL Light tutorial proof that sqrt 2 is irrational .
let SKOLEM_THM_GEN = theorem `;
∀P R. (∀x. P x ⇒ ∃y. R x y) ⇔ ∃f. ∀x. P x ⇒ R x (f x)
proof
intro_TAC ∀P R;
eq_tac [Right] by fol;
intro_TAC H1;
exists_TAC λx. @y. R x y;
fol H1;
qed;
`;;
let MOD_MOD_REFL' = theorem `;
∀m n. ¬(n = 0) ⇒ ((m MOD n) MOD n = m MOD n)
proof
intro_TAC !m n, H1;
MP_TAC SPECL [m; n; 1] MOD_MOD;
fol H1 MULT_CLAUSES MULT_EQ_0 ONE NOT_SUC;
qed;
`;;
let NSQRT_2 = theorem `;
∀p q. p * p = 2 * q * q ⇒ q = 0
proof
MATCH_MP_TAC num_WF;
intro_TAC ∀p, A, ∀q, B;
EVEN(p * p) ⇔ EVEN(2 * q * q) [] by fol B;
EVEN(p) [] by fol - EVEN_DOUBLE EVEN_MULT;
consider m such that p = 2 * m [C] by fol - EVEN_EXISTS;
case_split qp | pq by arithmetic;
suppose q < p;
q * q = 2 * m * m ⇒ m = 0 [] by fol qp A;
num_ring - B C;
end;
suppose p <= q;
p * p <= q * q [] by fol - LE_MULT2;
q * q = 0 [] by arithmetic - B;
num_ring -;
end;
qed;
`;;
proof / qed and case_split / suppose . You can evaluate an incomplete proof
interactive_goal `;∀p q. p * p = 2 * q * q ⇒ q = 0
`;;
interactive_proof `;
MATCH_MP_TAC num_WF;
intro_TAC ∀p, A, ∀q, B;
EVEN(p * p) ⇔ EVEN(2 * q * q) [] proof qed;
`;;
interactive_proof `;
fol B;
`;;
interactive_proof `;
EVEN(p) [] by fol - EVEN_DOUBLE EVEN_MULT;
consider m such that p = 2 * m [C] proof fol - EVEN_EXISTS; qed;
`;;
interactive_proof `;
case_split qp | pq by arithmetic;
suppose q < p;
end;
suppose p <= q;
end;
`;;
interactive_proof `;
q * q = 2 * m * m ⇒ m = 0 [] by fol qp A;
num_ring - B C;
`;;
interactive_proof `;
p * p <= q * q [] by fol - LE_MULT2;
q * q = 0 [] by arithmetic - B;
num_ring -;
`;;
let NSQRT_2 = top_thm();;
An port from arith.ml uses by instead of proof ... qed ; in a short proof :
let EXP_2 = theorem `;
∀n:num. n EXP 2 = n * n
by rewrite BIT0_THM BIT1_THM EXP EXP_ADD MULT_CLAUSES ADD_CLAUSES`;;
An example using GSYM , , and , reproving
the binomial theorem from sec 13.1 - -2 of the HOL Light tutorial .
let binom = define
`(!n. binom(n,0) = 1) /\
(!k. binom(0,SUC(k)) = 0) /\
(!n k. binom(SUC(n),SUC(k)) = binom(n,SUC(k)) + binom(n,k))`;;
let BINOM_LT = theorem `;
∀n k. n < k ⇒ binom(n,k) = 0
proof
INDUCT_TAC; INDUCT_TAC;
rewrite binom ARITH LT_SUC LT;
ASM_SIMP_TAC ARITH_RULE [n < k ==> n < SUC(k)] ARITH;
qed;
`;;
let BINOMIAL_THEOREM = theorem `;
∀n. (x + y) EXP n = nsum(0..n) (\k. binom(n,k) * x EXP k * y EXP (n - k))
proof
∀f n. nsum (0.. SUC n) f = f(0) + nsum (0..n) (λi. f (SUC i)) [Nsum0SUC] by simplify LE_0 ADD1 NSUM_CLAUSES_LEFT NSUM_OFFSET;
MATCH_MP_TAC num_INDUCTION;
simplify EXP NSUM_SING_NUMSEG binom SUB_0 MULT_CLAUSES;
intro_TAC ∀n, nThm;
rewrite Nsum0SUC binom RIGHT_ADD_DISTRIB NSUM_ADD_NUMSEG GSYM NSUM_LMUL ADD_ASSOC;
rewriteR ADD_SYM;
rewriteRLDepth SUB_SUC EXP;
rewrite MULT_AC EQ_ADD_LCANCEL MESON [binom] [1 = binom(n, 0)] GSYM Nsum0SUC;
simplify NSUM_CLAUSES_RIGHT ARITH_RULE [0 < SUC n ∧ 0 <= SUC n] LT BINOM_LT MULT_CLAUSES ADD_CLAUSES SUC_SUB1;
simplify ARITH_RULE [k <= n ⇒ SUC n - k = SUC(n - k)] EXP MULT_AC;
qed;
`;;
|
550f2edcb96434b232e7d3e8aaaf02d8ae8dfc73775b399ce3f8e04a4a4b63f7 | HuwCampbell/grenade | feedforward.hs | # LANGUAGE BangPatterns #
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
import Control.Monad
import Control.Monad.Random
import Data.List ( foldl' )
import qualified Data.ByteString as B
import Data.Serialize
#if ! MIN_VERSION_base(4,13,0)
import Data.Semigroup ( (<>) )
#endif
import GHC.TypeLits
import qualified Numeric.LinearAlgebra.Static as SA
import Options.Applicative
import Grenade
-- The defininition for our simple feed forward network.
-- The type level lists represents the layers and the shapes passed through the layers.
-- One can see that for this demonstration we are using relu, tanh and logit non-linear
-- units, which can be easily subsituted for each other in and out.
--
With around 100000 examples , this should show two clear circles which have been learned by the network .
type FFNet = Network '[ FullyConnected 2 40, Tanh, FullyConnected 40 10, Relu, FullyConnected 10 1, Logit ]
'[ 'D1 2, 'D1 40, 'D1 40, 'D1 10, 'D1 10, 'D1 1, 'D1 1]
randomNet :: MonadRandom m => m FFNet
randomNet = randomNetwork
netTrain :: FFNet -> LearningParameters -> Int -> IO FFNet
netTrain net0 rate n = do
inps <- replicateM n $ do
s <- getRandom
return $ S1D $ SA.randomVector s SA.Uniform * 2 - 1
let outs = flip map inps $ \(S1D v) ->
if v `inCircle` (fromRational 0.33, 0.33) || v `inCircle` (fromRational (-0.33), 0.33)
then S1D $ fromRational 1
else S1D $ fromRational 0
let trained = foldl' trainEach net0 (zip inps outs)
return trained
where
inCircle :: KnownNat n => SA.R n -> (SA.R n, Double) -> Bool
v `inCircle` (o, r) = SA.norm_2 (v - o) <= r
trainEach !network (i,o) = train rate network i o
netLoad :: FilePath -> IO FFNet
netLoad modelPath = do
modelData <- B.readFile modelPath
either fail return $ runGet (get :: Get FFNet) modelData
netScore :: FFNet -> IO ()
netScore network = do
let testIns = [ [ (x,y) | x <- [0..50] ]
| y <- [0..20] ]
outMat = fmap (fmap (\(x,y) -> (render . normx) $ runNet network (S1D $ SA.vector [x / 25 - 1,y / 10 - 1]))) testIns
putStrLn $ unlines outMat
where
render n' | n' <= 0.2 = ' '
| n' <= 0.4 = '.'
| n' <= 0.6 = '-'
| n' <= 0.8 = '='
| otherwise = '#'
normx :: S ('D1 1) -> Double
normx (S1D r) = SA.mean r
data FeedForwardOpts = FeedForwardOpts Int LearningParameters (Maybe FilePath) (Maybe FilePath)
feedForward' :: Parser FeedForwardOpts
feedForward' =
FeedForwardOpts <$> option auto (long "examples" <> short 'e' <> value 100000)
<*> (LearningParameters
<$> option auto (long "train_rate" <> short 'r' <> value 0.01)
<*> option auto (long "momentum" <> value 0.9)
<*> option auto (long "l2" <> value 0.0005)
)
<*> optional (strOption (long "load"))
<*> optional (strOption (long "save"))
main :: IO ()
main = do
FeedForwardOpts examples rate load save <- execParser (info (feedForward' <**> helper) idm)
net0 <- case load of
Just loadFile -> netLoad loadFile
Nothing -> randomNet
net <- netTrain net0 rate examples
netScore net
case save of
Just saveFile -> B.writeFile saveFile $ runPut (put net)
Nothing -> return ()
| null | https://raw.githubusercontent.com/HuwCampbell/grenade/5206c95c423d9755e620f41576470a281ba59c89/examples/main/feedforward.hs | haskell | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
The defininition for our simple feed forward network.
The type level lists represents the layers and the shapes passed through the layers.
One can see that for this demonstration we are using relu, tanh and logit non-linear
units, which can be easily subsituted for each other in and out.
| # LANGUAGE BangPatterns #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
import Control.Monad
import Control.Monad.Random
import Data.List ( foldl' )
import qualified Data.ByteString as B
import Data.Serialize
#if ! MIN_VERSION_base(4,13,0)
import Data.Semigroup ( (<>) )
#endif
import GHC.TypeLits
import qualified Numeric.LinearAlgebra.Static as SA
import Options.Applicative
import Grenade
With around 100000 examples , this should show two clear circles which have been learned by the network .
type FFNet = Network '[ FullyConnected 2 40, Tanh, FullyConnected 40 10, Relu, FullyConnected 10 1, Logit ]
'[ 'D1 2, 'D1 40, 'D1 40, 'D1 10, 'D1 10, 'D1 1, 'D1 1]
randomNet :: MonadRandom m => m FFNet
randomNet = randomNetwork
netTrain :: FFNet -> LearningParameters -> Int -> IO FFNet
netTrain net0 rate n = do
inps <- replicateM n $ do
s <- getRandom
return $ S1D $ SA.randomVector s SA.Uniform * 2 - 1
let outs = flip map inps $ \(S1D v) ->
if v `inCircle` (fromRational 0.33, 0.33) || v `inCircle` (fromRational (-0.33), 0.33)
then S1D $ fromRational 1
else S1D $ fromRational 0
let trained = foldl' trainEach net0 (zip inps outs)
return trained
where
inCircle :: KnownNat n => SA.R n -> (SA.R n, Double) -> Bool
v `inCircle` (o, r) = SA.norm_2 (v - o) <= r
trainEach !network (i,o) = train rate network i o
netLoad :: FilePath -> IO FFNet
netLoad modelPath = do
modelData <- B.readFile modelPath
either fail return $ runGet (get :: Get FFNet) modelData
netScore :: FFNet -> IO ()
netScore network = do
let testIns = [ [ (x,y) | x <- [0..50] ]
| y <- [0..20] ]
outMat = fmap (fmap (\(x,y) -> (render . normx) $ runNet network (S1D $ SA.vector [x / 25 - 1,y / 10 - 1]))) testIns
putStrLn $ unlines outMat
where
render n' | n' <= 0.2 = ' '
| n' <= 0.4 = '.'
| n' <= 0.6 = '-'
| n' <= 0.8 = '='
| otherwise = '#'
normx :: S ('D1 1) -> Double
normx (S1D r) = SA.mean r
data FeedForwardOpts = FeedForwardOpts Int LearningParameters (Maybe FilePath) (Maybe FilePath)
feedForward' :: Parser FeedForwardOpts
feedForward' =
FeedForwardOpts <$> option auto (long "examples" <> short 'e' <> value 100000)
<*> (LearningParameters
<$> option auto (long "train_rate" <> short 'r' <> value 0.01)
<*> option auto (long "momentum" <> value 0.9)
<*> option auto (long "l2" <> value 0.0005)
)
<*> optional (strOption (long "load"))
<*> optional (strOption (long "save"))
main :: IO ()
main = do
FeedForwardOpts examples rate load save <- execParser (info (feedForward' <**> helper) idm)
net0 <- case load of
Just loadFile -> netLoad loadFile
Nothing -> randomNet
net <- netTrain net0 rate examples
netScore net
case save of
Just saveFile -> B.writeFile saveFile $ runPut (put net)
Nothing -> return ()
|
328387b8a1cb6a6935ecf4b482246b125fa3b2df02697179665ac1fbfe7c9da6 | rubenbarroso/EOPL | 4_14.scm | It might seem that the problem with the following expression
letrec
? even(? odd, ? x) =
if zero?(x) then 1 else (odd sub1(x))
in letrec
? odd(bool x) =
if zero?(x) then 0 else (even odd sub1(x))
in (odd 13)
is that, in the body of even, we use x as an int in zero?(x), but then we pass it to (odd sub1(x)), which defines its
parameter as a bool. But the real issue I think lies on odd not being in the scope of even, since odd is defined in
the body of the letrec. | null | https://raw.githubusercontent.com/rubenbarroso/EOPL/f9b3c03c2fcbaddf64694ee3243d54be95bfe31d/src/chapter4/4_14.scm | scheme | It might seem that the problem with the following expression
letrec
? even(? odd, ? x) =
if zero?(x) then 1 else (odd sub1(x))
in letrec
? odd(bool x) =
if zero?(x) then 0 else (even odd sub1(x))
in (odd 13)
is that, in the body of even, we use x as an int in zero?(x), but then we pass it to (odd sub1(x)), which defines its
parameter as a bool. But the real issue I think lies on odd not being in the scope of even, since odd is defined in
the body of the letrec. | |
47dfda9053bb08f7b4df4ca3cb2d294795959c4fb938e464310c5444b03171fd | kupl/FixML | sub6.ml | let rec max l=
match l with
[] ->0
|h::t->if(h>(max t)) then h else (max t);; | null | https://raw.githubusercontent.com/kupl/FixML/0a032a733d68cd8ccc8b1034d2908cd43b241fce/benchmarks/maxmin/maxmin/submissions/sub6.ml | ocaml | let rec max l=
match l with
[] ->0
|h::t->if(h>(max t)) then h else (max t);; | |
2bb60bac37e3b90c24fc5c7f2105b0cf7fbab40042289f0a60833c0cbfbe810d | awslabs/s2n-bignum | bignum_optneg.ml |
* Copyright Amazon.com , Inc. or its affiliates . All Rights Reserved .
* SPDX - License - Identifier : Apache-2.0 OR ISC
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0 OR ISC
*)
(* ========================================================================= *)
(* Optional negation of bignums. *)
(* ========================================================================= *)
(**** print_literal_from_elf "x86/generic/bignum_optneg.o";;
****)
let bignum_optneg_mc =
define_assert_from_elf "bignum_optneg_mc" "x86/generic/bignum_optneg.o"
[
0x48; 0x31; 0xc0; (* XOR (% rax) (% rax) *)
0x48; 0x85; 0xff; (* TEST (% rdi) (% rdi) *)
JE ( Imm8 ( word 50 ) )
0x48; 0xf7; 0xda; (* NEG (% rdx) *)
SBB ( % rdx ) ( % rdx )
0x48; 0x29; 0xd0; (* SUB (% rax) (% rdx) *)
0x4d; 0x31; 0xc9; (* XOR (% r9) (% r9) *)
MOV ( % r8 ) ( ( % % % ( rcx,3,r9 ) ) )
0x49; 0x31; 0xd0; (* XOR (% r8) (% rdx) *)
0x49; 0x01; 0xc0; (* ADD (% r8) (% rax) *)
0xb8; 0x00; 0x00; 0x00; 0x00;
MOV ( % eax ) ( Imm32 ( word 0 ) )
MOV ( ( % % % ( rsi,3,r9 ) ) ) ( % r8 )
0x48; 0x83; 0xd0; 0x00; (* ADC (% rax) (Imm8 (word 0)) *)
0x49; 0xff; 0xc1; (* INC (% r9) *)
CMP ( % r9 ) ( % rdi )
JB ( Imm8 ( word 225 ) )
0x48; 0x31; 0xd0; (* XOR (% rax) (% rdx) *)
AND ( % rax ) ( Imm8 ( word 1 ) )
RET
];;
let BIGNUM_OPTNEG_EXEC = X86_MK_CORE_EXEC_RULE bignum_optneg_mc;;
(* ------------------------------------------------------------------------- *)
(* Correctness proof. *)
(* ------------------------------------------------------------------------- *)
let BIGNUM_OPTNEG_CORRECT = prove
(`!k z p x a pc.
nonoverlapping (word pc,0x3b) (z,8 * val k) /\
(x = z \/ nonoverlapping(x,8 * val k) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) (BUTLAST bignum_optneg_mc) /\
read RIP s = word pc /\
C_ARGUMENTS [k;z;p;x] s /\
bignum_from_memory (x,val k) s = a)
(\s. read RIP s = word(pc + 0x3a) /\
bignum_from_memory(z,val k) s =
(if p = word 0 \/ a = 0 then a else 2 EXP (64 * val k) - a) /\
C_RETURN s = word(bitval(~(p = word 0) /\ ~(a = 0))))
(MAYCHANGE [RIP; RAX; RDX; R8; R9] ,,
MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k)])`,
W64_GEN_TAC `k:num` THEN
MAP_EVERY X_GEN_TAC
[`z:int64`; `p:int64`; `x:int64`; `a:num`; `pc:num`] THEN
REWRITE_TAC[NONOVERLAPPING_CLAUSES] THEN
REWRITE_TAC[C_ARGUMENTS; C_RETURN; SOME_FLAGS] THEN
DISCH_THEN(REPEAT_TCL CONJUNCTS_THEN ASSUME_TAC) THEN
BIGNUM_TERMRANGE_TAC `k:num` `a:num` THEN
(*** The trivial k = 0 case ***)
ASM_CASES_TAC `k = 0` THENL
[UNDISCH_THEN `k = 0` SUBST_ALL_TAC THEN RULE_ASSUM_TAC
(REWRITE_RULE[ARITH_RULE `a < 2 EXP (64 * 0) <=> a = 0`]) THEN
ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
ENSURES_INIT_TAC "s0" THEN X86_STEPS_TAC BIGNUM_OPTNEG_EXEC (1--3) THEN
ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN
REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES; BIGNUM_FROM_MEMORY_TRIVIAL] THEN
REWRITE_TAC[BITVAL_CLAUSES];
ALL_TAC] THEN
(*** Get a basic bound on k from the nonoverlapping assumptions ***)
FIRST_ASSUM(MP_TAC o MATCH_MP (ONCE_REWRITE_RULE[IMP_CONJ]
NONOVERLAPPING_IMP_SMALL_RIGHT_ALT)) THEN
ANTS_TAC THENL [CONV_TAC NUM_REDUCE_CONV; DISCH_TAC] THEN
(*** Main loop setup ***)
ABBREV_TAC `m <=> ~(p:int64 = word 0)` THEN
ENSURES_WHILE_UP_TAC `k:num` `pc + 0x14` `pc + 0x2e`
`\i s. read RSI s = z /\
read RCX s = x /\
read RDX s = word_neg(word(bitval m)) /\
read RDI s = word k /\
read R9 s = word i /\
val(read RAX s) <= 1 /\
bignum_from_memory (word_add x (word(8 * i)),k - i) s =
highdigits a i /\
2 EXP (64 * i) * val(read RAX s) + bignum_from_memory(z,i) s =
(if p:int64 = word 0 then lowdigits a i
else 2 EXP (64 * i) - lowdigits a i)` THEN
ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL
[ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
ENSURES_INIT_TAC "s0" THEN X86_STEPS_TAC BIGNUM_OPTNEG_EXEC (1--7) THEN
ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN
REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES; BIGNUM_FROM_MEMORY_TRIVIAL] THEN
REWRITE_TAC[LOWDIGITS_0; HIGHDIGITS_0; ADD_CLAUSES; MULT_CLAUSES;
SUB_0; BITVAL_CLAUSES; WORD_ADD_0] THEN
ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
SIMP_TAC[WORD_SUB_LZERO; WORD_NEG_NEG; VAL_WORD_BITVAL; BITVAL_BOUND] THEN
EXPAND_TAC "m" THEN
REWRITE_TAC[WORD_SUB_0; VAL_EQ_0; bitval; COND_SWAP] THEN
ASM_REWRITE_TAC[GSYM MSB_IVAL; DIMINDEX_64; ARITH_RULE `64 - 1 = 63`] THEN
ASM_CASES_TAC `p:int64 = word 0` THEN ASM_REWRITE_TAC[] THEN
CONV_TAC WORD_REDUCE_CONV THEN CONV_TAC NUM_REDUCE_CONV;
ALL_TAC; (*** Main loop invariant ***)
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
ENSURES_INIT_TAC "s0" THEN X86_STEPS_TAC BIGNUM_OPTNEG_EXEC (1--2) THEN
ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[];
GHOST_INTRO_TAC `cinn:num` `\s. val(read RAX s)` THEN
GLOBALIZE_PRECONDITION_TAC THEN
FIRST_X_ASSUM(X_CHOOSE_THEN `cin:bool` SUBST_ALL_TAC o
GEN_REWRITE_RULE I [NUM_AS_BITVAL]) THEN
REWRITE_TAC[VAL_EQ_BITVAL] THEN
ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
ENSURES_INIT_TAC "s0" THEN X86_STEPS_TAC BIGNUM_OPTNEG_EXEC (1--4) THEN
ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN
REWRITE_TAC[ADD_CLAUSES; WORD_SUB_LZERO; WORD_NEG_NEG] THEN
FIRST_X_ASSUM(MP_TAC o check (is_cond o rand o concl)) THEN
UNDISCH_THEN `~(p:int64 = word 0) <=> m` (SUBST1_TAC o SYM) THEN
ASM_SIMP_TAC[LOWDIGITS_SELF; WORD_XOR_CONDITIONS] THEN DISCH_THEN(fun th ->
MP_TAC(AP_TERM `\x. x DIV 2 EXP (64 * k)` th) THEN
MP_TAC(AP_TERM `\x. x MOD 2 EXP (64 * k)` th)) THEN
SIMP_TAC[DIV_MULT_ADD; MOD_MULT_ADD; EXP_EQ_0; ARITH_EQ] THEN
REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES] THEN
ASM_SIMP_TAC[DIV_LT; MOD_LT; BIGNUM_FROM_MEMORY_BOUND; ADD_CLAUSES] THEN
ASM_CASES_TAC `p:int64 = word 0` THEN
ASM_SIMP_TAC[BITVAL_CLAUSES; MOD_LT; DIV_LT] THEN
ASM_CASES_TAC `a = 0` THEN
ASM_SIMP_TAC[SUB_0; BITVAL_CLAUSES; BITVAL_EQ_1; BITVAL_EQ_0;
DIV_LT; MOD_LT; DIV_REFL; MOD_REFL; EXP_EQ_0; ARITH_EQ;
ARITH_RULE `~(m = 0) /\ ~(n = 0) ==> m - n < m`] THEN
CONV_TAC WORD_REDUCE_CONV] THEN
(*** Proof of the main invariant ***)
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
SUBGOAL_THEN `i:num < k` ASSUME_TAC THENL
[SIMPLE_ARITH_TAC; ALL_TAC] THEN
GHOST_INTRO_TAC `cinn:num` `\s. val(read RAX s)` THEN
GLOBALIZE_PRECONDITION_TAC THEN
REWRITE_TAC[VAL_WORD_GALOIS; DIMINDEX_64] THEN
GEN_REWRITE_TAC (RATOR_CONV o LAND_CONV o ONCE_DEPTH_CONV)
[BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS] THEN
ASM_REWRITE_TAC[SUB_EQ_0; GSYM NOT_LT] THEN
REWRITE_TAC[ARITH_RULE `k - i - 1 = k - (i + 1)`] THEN
REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN
REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
ENSURES_INIT_TAC "s0" THEN
X86_ACCSTEPS_TAC BIGNUM_OPTNEG_EXEC [3] (1--7) THEN
ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[GSYM WORD_ADD] THEN
REWRITE_TAC[VAL_WORD_BITVAL; BITVAL_BOUND] THEN
FIRST_X_ASSUM(X_CHOOSE_THEN `cin:bool` SUBST_ALL_TAC o
GEN_REWRITE_RULE I [NUM_AS_BITVAL]) THEN
FIRST_X_ASSUM(MP_TAC o check (is_cond o rand o concl)) THEN
REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ONCE_REWRITE_TAC[COND_RAND] THEN
SIMP_TAC[GSYM REAL_OF_NUM_SUB; LOWDIGITS_BOUND; LT_IMP_LE] THEN
REWRITE_TAC[ARITH_RULE `64 * (i + 1) = 64 * i + 64`] THEN
REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES; REAL_POW_ADD] THEN
DISCH_THEN(SUBST1_TAC o MATCH_MP (REAL_ARITH
`c + s:real = x ==> s = x - c`)) THEN
ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ o DESUM_RULE) THEN
DISCH_THEN SUBST1_TAC THEN REWRITE_TAC[WORD_XOR_MASK] THEN
UNDISCH_THEN `~(p:int64 = word 0) <=> m` (SUBST1_TAC o SYM) THEN
REWRITE_TAC[LOWDIGITS_CLAUSES; GSYM REAL_OF_NUM_CLAUSES] THEN
ASM_CASES_TAC `p:int64 = word 0` THEN
ASM_SIMP_TAC[VAL_WORD_EQ; BIGDIGIT_BOUND; DIMINDEX_64;
REAL_VAL_WORD_NOT] THEN
REAL_ARITH_TAC);;
let BIGNUM_OPTNEG_SUBROUTINE_CORRECT = prove
(`!k z p x a pc stackpointer returnaddress.
nonoverlapping (word pc,0x3b) (z,8 * val k) /\
nonoverlapping (stackpointer,8) (z,8 * val k) /\
(x = z \/ nonoverlapping(x,8 * val k) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) bignum_optneg_mc /\
read RIP s = word pc /\
read RSP s = stackpointer /\
read (memory :> bytes64 stackpointer) s = returnaddress /\
C_ARGUMENTS [k;z;p;x] s /\
bignum_from_memory (x,val k) s = a)
(\s. read RIP s = returnaddress /\
read RSP s = word_add stackpointer (word 8) /\
bignum_from_memory(z,val k) s =
(if p = word 0 \/ a = 0 then a else 2 EXP (64 * val k) - a) /\
C_RETURN s = word(bitval(~(p = word 0) /\ ~(a = 0))))
(MAYCHANGE [RIP; RSP; RAX; RDX; R8; R9] ,,
MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k)])`,
X86_PROMOTE_RETURN_NOSTACK_TAC bignum_optneg_mc BIGNUM_OPTNEG_CORRECT);;
(* ------------------------------------------------------------------------- *)
(* Correctness of Windows ABI version. *)
(* ------------------------------------------------------------------------- *)
let windows_bignum_optneg_mc = define_from_elf
"windows_bignum_optneg_mc" "x86/generic/bignum_optneg.obj";;
let WINDOWS_BIGNUM_OPTNEG_SUBROUTINE_CORRECT = prove
(`!k z p x a pc stackpointer returnaddress.
ALL (nonoverlapping (word_sub stackpointer (word 16),16))
[(word pc,0x4b); (x,8 * val k)] /\
nonoverlapping (word pc,0x4b) (z,8 * val k) /\
nonoverlapping (word_sub stackpointer (word 16),24) (z,8 * val k) /\
(x = z \/ nonoverlapping(x,8 * val k) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) windows_bignum_optneg_mc /\
read RIP s = word pc /\
read RSP s = stackpointer /\
read (memory :> bytes64 stackpointer) s = returnaddress /\
WINDOWS_C_ARGUMENTS [k;z;p;x] s /\
bignum_from_memory (x,val k) s = a)
(\s. read RIP s = returnaddress /\
read RSP s = word_add stackpointer (word 8) /\
bignum_from_memory(z,val k) s =
(if p = word 0 \/ a = 0 then a else 2 EXP (64 * val k) - a) /\
WINDOWS_C_RETURN s = word(bitval(~(p = word 0) /\ ~(a = 0))))
(MAYCHANGE [RIP; RSP; RCX; RAX; RDX; R8; R9] ,,
MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k);
memory :> bytes(word_sub stackpointer (word 16),16)])`,
WINDOWS_X86_WRAP_NOSTACK_TAC windows_bignum_optneg_mc bignum_optneg_mc
BIGNUM_OPTNEG_CORRECT);;
| null | https://raw.githubusercontent.com/awslabs/s2n-bignum/824c15f908d7a343af1b2f378cfedd36e880bdde/x86/proofs/bignum_optneg.ml | ocaml | =========================================================================
Optional negation of bignums.
=========================================================================
*** print_literal_from_elf "x86/generic/bignum_optneg.o";;
***
XOR (% rax) (% rax)
TEST (% rdi) (% rdi)
NEG (% rdx)
SUB (% rax) (% rdx)
XOR (% r9) (% r9)
XOR (% r8) (% rdx)
ADD (% r8) (% rax)
ADC (% rax) (Imm8 (word 0))
INC (% r9)
XOR (% rax) (% rdx)
-------------------------------------------------------------------------
Correctness proof.
-------------------------------------------------------------------------
** The trivial k = 0 case **
** Get a basic bound on k from the nonoverlapping assumptions **
** Main loop setup **
** Main loop invariant **
** Proof of the main invariant **
-------------------------------------------------------------------------
Correctness of Windows ABI version.
------------------------------------------------------------------------- |
* Copyright Amazon.com , Inc. or its affiliates . All Rights Reserved .
* SPDX - License - Identifier : Apache-2.0 OR ISC
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0 OR ISC
*)
let bignum_optneg_mc =
define_assert_from_elf "bignum_optneg_mc" "x86/generic/bignum_optneg.o"
[
JE ( Imm8 ( word 50 ) )
SBB ( % rdx ) ( % rdx )
MOV ( % r8 ) ( ( % % % ( rcx,3,r9 ) ) )
0xb8; 0x00; 0x00; 0x00; 0x00;
MOV ( % eax ) ( Imm32 ( word 0 ) )
MOV ( ( % % % ( rsi,3,r9 ) ) ) ( % r8 )
CMP ( % r9 ) ( % rdi )
JB ( Imm8 ( word 225 ) )
AND ( % rax ) ( Imm8 ( word 1 ) )
RET
];;
let BIGNUM_OPTNEG_EXEC = X86_MK_CORE_EXEC_RULE bignum_optneg_mc;;
let BIGNUM_OPTNEG_CORRECT = prove
(`!k z p x a pc.
nonoverlapping (word pc,0x3b) (z,8 * val k) /\
(x = z \/ nonoverlapping(x,8 * val k) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) (BUTLAST bignum_optneg_mc) /\
read RIP s = word pc /\
C_ARGUMENTS [k;z;p;x] s /\
bignum_from_memory (x,val k) s = a)
(\s. read RIP s = word(pc + 0x3a) /\
bignum_from_memory(z,val k) s =
(if p = word 0 \/ a = 0 then a else 2 EXP (64 * val k) - a) /\
C_RETURN s = word(bitval(~(p = word 0) /\ ~(a = 0))))
(MAYCHANGE [RIP; RAX; RDX; R8; R9] ,,
MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k)])`,
W64_GEN_TAC `k:num` THEN
MAP_EVERY X_GEN_TAC
[`z:int64`; `p:int64`; `x:int64`; `a:num`; `pc:num`] THEN
REWRITE_TAC[NONOVERLAPPING_CLAUSES] THEN
REWRITE_TAC[C_ARGUMENTS; C_RETURN; SOME_FLAGS] THEN
DISCH_THEN(REPEAT_TCL CONJUNCTS_THEN ASSUME_TAC) THEN
BIGNUM_TERMRANGE_TAC `k:num` `a:num` THEN
ASM_CASES_TAC `k = 0` THENL
[UNDISCH_THEN `k = 0` SUBST_ALL_TAC THEN RULE_ASSUM_TAC
(REWRITE_RULE[ARITH_RULE `a < 2 EXP (64 * 0) <=> a = 0`]) THEN
ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
ENSURES_INIT_TAC "s0" THEN X86_STEPS_TAC BIGNUM_OPTNEG_EXEC (1--3) THEN
ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN
REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES; BIGNUM_FROM_MEMORY_TRIVIAL] THEN
REWRITE_TAC[BITVAL_CLAUSES];
ALL_TAC] THEN
FIRST_ASSUM(MP_TAC o MATCH_MP (ONCE_REWRITE_RULE[IMP_CONJ]
NONOVERLAPPING_IMP_SMALL_RIGHT_ALT)) THEN
ANTS_TAC THENL [CONV_TAC NUM_REDUCE_CONV; DISCH_TAC] THEN
ABBREV_TAC `m <=> ~(p:int64 = word 0)` THEN
ENSURES_WHILE_UP_TAC `k:num` `pc + 0x14` `pc + 0x2e`
`\i s. read RSI s = z /\
read RCX s = x /\
read RDX s = word_neg(word(bitval m)) /\
read RDI s = word k /\
read R9 s = word i /\
val(read RAX s) <= 1 /\
bignum_from_memory (word_add x (word(8 * i)),k - i) s =
highdigits a i /\
2 EXP (64 * i) * val(read RAX s) + bignum_from_memory(z,i) s =
(if p:int64 = word 0 then lowdigits a i
else 2 EXP (64 * i) - lowdigits a i)` THEN
ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL
[ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
ENSURES_INIT_TAC "s0" THEN X86_STEPS_TAC BIGNUM_OPTNEG_EXEC (1--7) THEN
ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN
REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES; BIGNUM_FROM_MEMORY_TRIVIAL] THEN
REWRITE_TAC[LOWDIGITS_0; HIGHDIGITS_0; ADD_CLAUSES; MULT_CLAUSES;
SUB_0; BITVAL_CLAUSES; WORD_ADD_0] THEN
ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
SIMP_TAC[WORD_SUB_LZERO; WORD_NEG_NEG; VAL_WORD_BITVAL; BITVAL_BOUND] THEN
EXPAND_TAC "m" THEN
REWRITE_TAC[WORD_SUB_0; VAL_EQ_0; bitval; COND_SWAP] THEN
ASM_REWRITE_TAC[GSYM MSB_IVAL; DIMINDEX_64; ARITH_RULE `64 - 1 = 63`] THEN
ASM_CASES_TAC `p:int64 = word 0` THEN ASM_REWRITE_TAC[] THEN
CONV_TAC WORD_REDUCE_CONV THEN CONV_TAC NUM_REDUCE_CONV;
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
ENSURES_INIT_TAC "s0" THEN X86_STEPS_TAC BIGNUM_OPTNEG_EXEC (1--2) THEN
ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[];
GHOST_INTRO_TAC `cinn:num` `\s. val(read RAX s)` THEN
GLOBALIZE_PRECONDITION_TAC THEN
FIRST_X_ASSUM(X_CHOOSE_THEN `cin:bool` SUBST_ALL_TAC o
GEN_REWRITE_RULE I [NUM_AS_BITVAL]) THEN
REWRITE_TAC[VAL_EQ_BITVAL] THEN
ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
ENSURES_INIT_TAC "s0" THEN X86_STEPS_TAC BIGNUM_OPTNEG_EXEC (1--4) THEN
ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN
REWRITE_TAC[ADD_CLAUSES; WORD_SUB_LZERO; WORD_NEG_NEG] THEN
FIRST_X_ASSUM(MP_TAC o check (is_cond o rand o concl)) THEN
UNDISCH_THEN `~(p:int64 = word 0) <=> m` (SUBST1_TAC o SYM) THEN
ASM_SIMP_TAC[LOWDIGITS_SELF; WORD_XOR_CONDITIONS] THEN DISCH_THEN(fun th ->
MP_TAC(AP_TERM `\x. x DIV 2 EXP (64 * k)` th) THEN
MP_TAC(AP_TERM `\x. x MOD 2 EXP (64 * k)` th)) THEN
SIMP_TAC[DIV_MULT_ADD; MOD_MULT_ADD; EXP_EQ_0; ARITH_EQ] THEN
REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES] THEN
ASM_SIMP_TAC[DIV_LT; MOD_LT; BIGNUM_FROM_MEMORY_BOUND; ADD_CLAUSES] THEN
ASM_CASES_TAC `p:int64 = word 0` THEN
ASM_SIMP_TAC[BITVAL_CLAUSES; MOD_LT; DIV_LT] THEN
ASM_CASES_TAC `a = 0` THEN
ASM_SIMP_TAC[SUB_0; BITVAL_CLAUSES; BITVAL_EQ_1; BITVAL_EQ_0;
DIV_LT; MOD_LT; DIV_REFL; MOD_REFL; EXP_EQ_0; ARITH_EQ;
ARITH_RULE `~(m = 0) /\ ~(n = 0) ==> m - n < m`] THEN
CONV_TAC WORD_REDUCE_CONV] THEN
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
SUBGOAL_THEN `i:num < k` ASSUME_TAC THENL
[SIMPLE_ARITH_TAC; ALL_TAC] THEN
GHOST_INTRO_TAC `cinn:num` `\s. val(read RAX s)` THEN
GLOBALIZE_PRECONDITION_TAC THEN
REWRITE_TAC[VAL_WORD_GALOIS; DIMINDEX_64] THEN
GEN_REWRITE_TAC (RATOR_CONV o LAND_CONV o ONCE_DEPTH_CONV)
[BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS] THEN
ASM_REWRITE_TAC[SUB_EQ_0; GSYM NOT_LT] THEN
REWRITE_TAC[ARITH_RULE `k - i - 1 = k - (i + 1)`] THEN
REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN
REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN
ENSURES_INIT_TAC "s0" THEN
X86_ACCSTEPS_TAC BIGNUM_OPTNEG_EXEC [3] (1--7) THEN
ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[GSYM WORD_ADD] THEN
REWRITE_TAC[VAL_WORD_BITVAL; BITVAL_BOUND] THEN
FIRST_X_ASSUM(X_CHOOSE_THEN `cin:bool` SUBST_ALL_TAC o
GEN_REWRITE_RULE I [NUM_AS_BITVAL]) THEN
FIRST_X_ASSUM(MP_TAC o check (is_cond o rand o concl)) THEN
REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ONCE_REWRITE_TAC[COND_RAND] THEN
SIMP_TAC[GSYM REAL_OF_NUM_SUB; LOWDIGITS_BOUND; LT_IMP_LE] THEN
REWRITE_TAC[ARITH_RULE `64 * (i + 1) = 64 * i + 64`] THEN
REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES; REAL_POW_ADD] THEN
DISCH_THEN(SUBST1_TAC o MATCH_MP (REAL_ARITH
`c + s:real = x ==> s = x - c`)) THEN
ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ o DESUM_RULE) THEN
DISCH_THEN SUBST1_TAC THEN REWRITE_TAC[WORD_XOR_MASK] THEN
UNDISCH_THEN `~(p:int64 = word 0) <=> m` (SUBST1_TAC o SYM) THEN
REWRITE_TAC[LOWDIGITS_CLAUSES; GSYM REAL_OF_NUM_CLAUSES] THEN
ASM_CASES_TAC `p:int64 = word 0` THEN
ASM_SIMP_TAC[VAL_WORD_EQ; BIGDIGIT_BOUND; DIMINDEX_64;
REAL_VAL_WORD_NOT] THEN
REAL_ARITH_TAC);;
let BIGNUM_OPTNEG_SUBROUTINE_CORRECT = prove
(`!k z p x a pc stackpointer returnaddress.
nonoverlapping (word pc,0x3b) (z,8 * val k) /\
nonoverlapping (stackpointer,8) (z,8 * val k) /\
(x = z \/ nonoverlapping(x,8 * val k) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) bignum_optneg_mc /\
read RIP s = word pc /\
read RSP s = stackpointer /\
read (memory :> bytes64 stackpointer) s = returnaddress /\
C_ARGUMENTS [k;z;p;x] s /\
bignum_from_memory (x,val k) s = a)
(\s. read RIP s = returnaddress /\
read RSP s = word_add stackpointer (word 8) /\
bignum_from_memory(z,val k) s =
(if p = word 0 \/ a = 0 then a else 2 EXP (64 * val k) - a) /\
C_RETURN s = word(bitval(~(p = word 0) /\ ~(a = 0))))
(MAYCHANGE [RIP; RSP; RAX; RDX; R8; R9] ,,
MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k)])`,
X86_PROMOTE_RETURN_NOSTACK_TAC bignum_optneg_mc BIGNUM_OPTNEG_CORRECT);;
let windows_bignum_optneg_mc = define_from_elf
"windows_bignum_optneg_mc" "x86/generic/bignum_optneg.obj";;
let WINDOWS_BIGNUM_OPTNEG_SUBROUTINE_CORRECT = prove
(`!k z p x a pc stackpointer returnaddress.
ALL (nonoverlapping (word_sub stackpointer (word 16),16))
[(word pc,0x4b); (x,8 * val k)] /\
nonoverlapping (word pc,0x4b) (z,8 * val k) /\
nonoverlapping (word_sub stackpointer (word 16),24) (z,8 * val k) /\
(x = z \/ nonoverlapping(x,8 * val k) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) windows_bignum_optneg_mc /\
read RIP s = word pc /\
read RSP s = stackpointer /\
read (memory :> bytes64 stackpointer) s = returnaddress /\
WINDOWS_C_ARGUMENTS [k;z;p;x] s /\
bignum_from_memory (x,val k) s = a)
(\s. read RIP s = returnaddress /\
read RSP s = word_add stackpointer (word 8) /\
bignum_from_memory(z,val k) s =
(if p = word 0 \/ a = 0 then a else 2 EXP (64 * val k) - a) /\
WINDOWS_C_RETURN s = word(bitval(~(p = word 0) /\ ~(a = 0))))
(MAYCHANGE [RIP; RSP; RCX; RAX; RDX; R8; R9] ,,
MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k);
memory :> bytes(word_sub stackpointer (word 16),16)])`,
WINDOWS_X86_WRAP_NOSTACK_TAC windows_bignum_optneg_mc bignum_optneg_mc
BIGNUM_OPTNEG_CORRECT);;
|
39322045631afd51fe7ea2564e3550c0bd7678379af730a4f977b71edfb59315 | juspay/atlas | Sms.hs | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : Types . API.Sms
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : Types.API.Sms
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Types.API.Sms where
import EulerHS.Prelude
import Servant
type ReadSmsAPI =
"read"
:> Capture "mobile_number" Text
:> Get '[JSON] ReadSmsRes
type ReadSmsRes = [Text]
| null | https://raw.githubusercontent.com/juspay/atlas/e64b227dc17887fb01c2554db21c08284d18a806/app/mock-sms/src/Types/API/Sms.hs | haskell | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : Types . API.Sms
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : Types.API.Sms
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Types.API.Sms where
import EulerHS.Prelude
import Servant
type ReadSmsAPI =
"read"
:> Capture "mobile_number" Text
:> Get '[JSON] ReadSmsRes
type ReadSmsRes = [Text]
| |
528b117f5d7e37bc2fb689bed85c11c929ec2e8c45d197d93d1abdd936214a35 | metosin/packaging-clojure-examples | main.clj | (ns my-project.main
(:require [reitit.ring :as ring]
[ring.adapter.jetty :as jetty]))
(def app
(ring/ring-handler
(ring/router
["/api"
["/ping" {:get {:handler (fn [_]
{:status 200
:body "pong"})}}]])
(ring/routes
(ring/create-resource-handler {:path "/" :root "public"}) ;; serve index.html and built js
(ring/create-default-handler))))
(defn -main []
(println "Starting server on :3333")
(jetty/run-jetty app {:port 3333}))
| null | https://raw.githubusercontent.com/metosin/packaging-clojure-examples/9ce8df699b48fa9856d97d0b56334642ccc84dcf/lein/src/clj/my_project/main.clj | clojure | serve index.html and built js | (ns my-project.main
(:require [reitit.ring :as ring]
[ring.adapter.jetty :as jetty]))
(def app
(ring/ring-handler
(ring/router
["/api"
["/ping" {:get {:handler (fn [_]
{:status 200
:body "pong"})}}]])
(ring/routes
(ring/create-default-handler))))
(defn -main []
(println "Starting server on :3333")
(jetty/run-jetty app {:port 3333}))
|
93021161079573343ce9dd500a60a4a8540762e606dcecb341c172630956eafc | fakedata-haskell/fakedata | TwinPeaks.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Faker.Provider.TwinPeaks where
import Config
import Control.Monad.Catch
import Control.Monad.IO.Class
import Data.Map.Strict (Map)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Vector (Vector)
import Data.Yaml
import Faker
import Faker.Internal
import Faker.Provider.TH
import Language.Haskell.TH
parseTwinPeaks :: FromJSON a => FakerSettings -> Value -> Parser a
parseTwinPeaks settings (Object obj) = do
en <- obj .: (getLocaleKey settings)
faker <- en .: "faker"
twinPeaks <- faker .: "twin_peaks"
pure twinPeaks
parseTwinPeaks settings val = fail $ "expected Object, but got " <> (show val)
parseTwinPeaksField ::
(FromJSON a, Monoid a) => FakerSettings -> AesonKey -> Value -> Parser a
parseTwinPeaksField settings txt val = do
twinPeaks <- parseTwinPeaks settings val
field <- twinPeaks .:? txt .!= mempty
pure field
parseTwinPeaksFields ::
(FromJSON a, Monoid a) => FakerSettings -> [AesonKey] -> Value -> Parser a
parseTwinPeaksFields settings txts val = do
twinPeaks <- parseTwinPeaks settings val
helper twinPeaks txts
where
helper :: (FromJSON a) => Value -> [AesonKey] -> Parser a
helper a [] = parseJSON a
helper (Object a) (x:xs) = do
field <- a .: x
helper field xs
helper a (x:xs) = fail $ "expect Object, but got " <> (show a)
$(genParser "twinPeaks" "characters")
$(genProvider "twinPeaks" "characters")
$(genParser "twinPeaks" "locations")
$(genProvider "twinPeaks" "locations")
$(genParser "twinPeaks" "quotes")
$(genProvider "twinPeaks" "quotes")
| null | https://raw.githubusercontent.com/fakedata-haskell/fakedata/7b0875067386e9bb844c8b985c901c91a58842ff/src/Faker/Provider/TwinPeaks.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE TemplateHaskell #
module Faker.Provider.TwinPeaks where
import Config
import Control.Monad.Catch
import Control.Monad.IO.Class
import Data.Map.Strict (Map)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Vector (Vector)
import Data.Yaml
import Faker
import Faker.Internal
import Faker.Provider.TH
import Language.Haskell.TH
parseTwinPeaks :: FromJSON a => FakerSettings -> Value -> Parser a
parseTwinPeaks settings (Object obj) = do
en <- obj .: (getLocaleKey settings)
faker <- en .: "faker"
twinPeaks <- faker .: "twin_peaks"
pure twinPeaks
parseTwinPeaks settings val = fail $ "expected Object, but got " <> (show val)
parseTwinPeaksField ::
(FromJSON a, Monoid a) => FakerSettings -> AesonKey -> Value -> Parser a
parseTwinPeaksField settings txt val = do
twinPeaks <- parseTwinPeaks settings val
field <- twinPeaks .:? txt .!= mempty
pure field
parseTwinPeaksFields ::
(FromJSON a, Monoid a) => FakerSettings -> [AesonKey] -> Value -> Parser a
parseTwinPeaksFields settings txts val = do
twinPeaks <- parseTwinPeaks settings val
helper twinPeaks txts
where
helper :: (FromJSON a) => Value -> [AesonKey] -> Parser a
helper a [] = parseJSON a
helper (Object a) (x:xs) = do
field <- a .: x
helper field xs
helper a (x:xs) = fail $ "expect Object, but got " <> (show a)
$(genParser "twinPeaks" "characters")
$(genProvider "twinPeaks" "characters")
$(genParser "twinPeaks" "locations")
$(genProvider "twinPeaks" "locations")
$(genParser "twinPeaks" "quotes")
$(genProvider "twinPeaks" "quotes")
|
21445760c5191a50ef1cb9369a28f8e7f2993e0de46c266e43477240fb800e19 | gedge-platform/gedge-platform | prometheus_collector.erl | %% @doc
%% A collector for a set of metrics.
%%
%% Normal users should use {@link prometheus_gauge},
%% {@link prometheus_counter}, {@link prometheus_summary}
%% and {@link prometheus_histogram}.
%%
%% Implementing `:prometheus_collector' behaviour is for advanced uses
%% such as proxying metrics from another monitoring system.
%% It is it the responsibility of the implementer to ensure produced metrics
%% are valid.
%%
%% You will be working with Prometheus
%% data model directly (see {@link prometheus_model_helpers}).
%%
%% Callbacks:
%% - `collect_mf(Registry, Callback)' - called by exporters and formats.
%% Should call `Callback' for each `MetricFamily' of this collector;
- ` collect_metrics(Name , Data ) ' - called by ` ' constructor .
Should return Metric list for each identified by ` Name ' .
` Data ' is a term associated with by collect_mf .
%% - `deregister_cleanup(Registry)' - called when collector unregistered by
%% `Registry'. If collector is stateful you can put cleanup code here.
%%
%% Example (simplified `prometheus_vm_memory_collector'):
%% <pre lang="erlang">
%% -module(prometheus_vm_memory_collector).
%%
%% -export([deregister_cleanup/1,
%% collect_mf/2,
%% collect_metrics/2]).
%%
%% -behaviour(prometheus_collector).
%%
%% %%====================================================================
%% %% Collector API
%% %%====================================================================
%%
%% deregister_cleanup(_) -> ok.
%%
%% collect_mf(_Registry, Callback) ->
%% Memory = erlang:memory(),
Callback(create_gauge(erlang_vm_bytes_total ,
%% "The total amount of memory currently allocated. "
%% "This is the same as the sum of the memory size "
%% "for processes and system.",
%% Memory)),
%% ok.
%%
%% collect_metrics(erlang_vm_bytes_total, Memory) ->
%% prometheus_model_helpers:gauge_metrics(
%% [
{ [ { kind , system } ] , proplists : , Memory ) } ,
%% {[{kind, processes}], proplists:get_value(processes, Memory)}
%% ]).
%%
%% %%====================================================================
%% %% Private Parts
%% %%====================================================================
%%
create_gauge(Name , Help , Data ) - >
prometheus_model_helpers : create_mf(Name , Help , gauge , ? MODULE , Data ) .
%% </pre>
%% @end
-module(prometheus_collector).
-export([enabled_collectors/0,
collect_mf/3]).
-ifdef(TEST).
-export([collect_mf_to_list/1]).
-endif.
-export_type([collector/0,
data/0,
collect_mf_callback/0]).
-compile({no_auto_import, [register/2]}).
-define(DEFAULT_COLLECTORS,
[prometheus_boolean,
prometheus_counter,
prometheus_gauge,
prometheus_histogram,
prometheus_mnesia_collector,
prometheus_quantile_summary,
prometheus_summary,
prometheus_vm_dist_collector,
prometheus_vm_memory_collector,
prometheus_vm_msacc_collector,
prometheus_vm_statistics_collector,
prometheus_vm_system_info_collector]).
-include("prometheus.hrl").
-include("prometheus_model.hrl").
%%====================================================================
%% Types
%%====================================================================
-type collector() :: atom().
-type data() :: any().
-type collect_mf_callback() ::
fun((prometheus_model:'MetricFamily'()) -> any()).
%%====================================================================
%% Callbacks
%%====================================================================
-callback collect_mf(Registry, Callback) -> ok when
Registry :: prometheus_registry:registry(),
Callback :: collect_mf_callback().
%% %% TODO: either add mandatory Type argument here or track Type
%% %% automatically and don't ask collector implementers to care
%% -callback collect_metrics(Name, Data) -> Metrics when
%% Name :: prometheus_metric:name(),
%% Data :: data(),
%% Metrics :: prometheus_model:'Metric'() | [prometheus_model:'Metric'()].
-callback deregister_cleanup(Registry) -> ok when
Registry :: prometheus_registry:registry().
%%====================================================================
%% Public API
%%====================================================================
@private
-spec enabled_collectors() -> [collector()].
enabled_collectors() ->
lists:usort(
case application:get_env(prometheus, collectors) of
undefined -> all_known_collectors();
{ok, Collectors} -> catch_default_collectors(Collectors)
end).
@doc Calls ` Callback ' for each of this collector .
-spec collect_mf(Registry, Collector, Callback) -> ok when
Registry :: prometheus_registry:registry(),
Collector :: collector(),
Callback :: collect_mf_callback().
collect_mf(Registry, Collector, Callback0) ->
Callback = case application:get_env(prometheus, global_labels) of
undefined ->
Callback0;
{ok, Labels0} ->
Labels = prometheus_model_helpers:label_pairs(Labels0),
fun (MF=#'MetricFamily'{metric=Metrics0}) ->
Metrics = [M#'Metric'{label=Labels ++ ML}
|| M=#'Metric'{label=ML} <- Metrics0],
Callback0(MF#'MetricFamily'{metric=Metrics})
end
end,
ok = Collector:collect_mf(Registry, Callback).
%%====================================================================
%% Test only
%%====================================================================
-ifdef(TEST).
@private
collect_mf_to_list(Collector) ->
collect_mf_to_list(default, Collector).
collect_mf_to_list(Registry, Collector) ->
try
Callback = fun (MF) ->
put(Collector, [MF|get_list(Collector)])
end,
prometheus_collector:collect_mf(Registry, Collector, Callback),
get_list(Collector)
after
erase(Collector)
end.
get_list(Key) ->
case get(Key) of
undefined ->
[];
Value ->
Value
end.
-endif.
%%====================================================================
%% Private Parts
%%====================================================================
all_known_collectors() ->
lists:umerge(
prometheus_misc:behaviour_modules(prometheus_collector),
?DEFAULT_COLLECTORS).
catch_default_collectors(Collectors) ->
maybe_replace_default(Collectors, []).
maybe_replace_default([default|Rest], Acc) ->
maybe_replace_default(Rest, ?DEFAULT_COLLECTORS ++ Acc);
maybe_replace_default([], Acc) ->
Acc;
maybe_replace_default([H|R], Acc) ->
maybe_replace_default(R, [H|Acc]).
| null | https://raw.githubusercontent.com/gedge-platform/gedge-platform/97c1e87faf28ba2942a77196b6be0a952bff1c3e/gs-broker/broker-server/deps/prometheus/src/prometheus_collector.erl | erlang | @doc
A collector for a set of metrics.
Normal users should use {@link prometheus_gauge},
{@link prometheus_counter}, {@link prometheus_summary}
and {@link prometheus_histogram}.
Implementing `:prometheus_collector' behaviour is for advanced uses
such as proxying metrics from another monitoring system.
It is it the responsibility of the implementer to ensure produced metrics
are valid.
You will be working with Prometheus
data model directly (see {@link prometheus_model_helpers}).
Callbacks:
- `collect_mf(Registry, Callback)' - called by exporters and formats.
Should call `Callback' for each `MetricFamily' of this collector;
- `deregister_cleanup(Registry)' - called when collector unregistered by
`Registry'. If collector is stateful you can put cleanup code here.
Example (simplified `prometheus_vm_memory_collector'):
<pre lang="erlang">
-module(prometheus_vm_memory_collector).
-export([deregister_cleanup/1,
collect_mf/2,
collect_metrics/2]).
-behaviour(prometheus_collector).
%%====================================================================
%% Collector API
%%====================================================================
deregister_cleanup(_) -> ok.
collect_mf(_Registry, Callback) ->
Memory = erlang:memory(),
"The total amount of memory currently allocated. "
"This is the same as the sum of the memory size "
"for processes and system.",
Memory)),
ok.
collect_metrics(erlang_vm_bytes_total, Memory) ->
prometheus_model_helpers:gauge_metrics(
[
{[{kind, processes}], proplists:get_value(processes, Memory)}
]).
%%====================================================================
%% Private Parts
%%====================================================================
</pre>
@end
====================================================================
Types
====================================================================
====================================================================
Callbacks
====================================================================
%% TODO: either add mandatory Type argument here or track Type
%% automatically and don't ask collector implementers to care
-callback collect_metrics(Name, Data) -> Metrics when
Name :: prometheus_metric:name(),
Data :: data(),
Metrics :: prometheus_model:'Metric'() | [prometheus_model:'Metric'()].
====================================================================
Public API
====================================================================
====================================================================
Test only
====================================================================
====================================================================
Private Parts
==================================================================== | - ` collect_metrics(Name , Data ) ' - called by ` ' constructor .
Should return Metric list for each identified by ` Name ' .
` Data ' is a term associated with by collect_mf .
Callback(create_gauge(erlang_vm_bytes_total ,
{ [ { kind , system } ] , proplists : , Memory ) } ,
create_gauge(Name , Help , Data ) - >
prometheus_model_helpers : create_mf(Name , Help , gauge , ? MODULE , Data ) .
-module(prometheus_collector).
-export([enabled_collectors/0,
collect_mf/3]).
-ifdef(TEST).
-export([collect_mf_to_list/1]).
-endif.
-export_type([collector/0,
data/0,
collect_mf_callback/0]).
-compile({no_auto_import, [register/2]}).
-define(DEFAULT_COLLECTORS,
[prometheus_boolean,
prometheus_counter,
prometheus_gauge,
prometheus_histogram,
prometheus_mnesia_collector,
prometheus_quantile_summary,
prometheus_summary,
prometheus_vm_dist_collector,
prometheus_vm_memory_collector,
prometheus_vm_msacc_collector,
prometheus_vm_statistics_collector,
prometheus_vm_system_info_collector]).
-include("prometheus.hrl").
-include("prometheus_model.hrl").
-type collector() :: atom().
-type data() :: any().
-type collect_mf_callback() ::
fun((prometheus_model:'MetricFamily'()) -> any()).
-callback collect_mf(Registry, Callback) -> ok when
Registry :: prometheus_registry:registry(),
Callback :: collect_mf_callback().
-callback deregister_cleanup(Registry) -> ok when
Registry :: prometheus_registry:registry().
@private
-spec enabled_collectors() -> [collector()].
enabled_collectors() ->
lists:usort(
case application:get_env(prometheus, collectors) of
undefined -> all_known_collectors();
{ok, Collectors} -> catch_default_collectors(Collectors)
end).
@doc Calls ` Callback ' for each of this collector .
-spec collect_mf(Registry, Collector, Callback) -> ok when
Registry :: prometheus_registry:registry(),
Collector :: collector(),
Callback :: collect_mf_callback().
collect_mf(Registry, Collector, Callback0) ->
Callback = case application:get_env(prometheus, global_labels) of
undefined ->
Callback0;
{ok, Labels0} ->
Labels = prometheus_model_helpers:label_pairs(Labels0),
fun (MF=#'MetricFamily'{metric=Metrics0}) ->
Metrics = [M#'Metric'{label=Labels ++ ML}
|| M=#'Metric'{label=ML} <- Metrics0],
Callback0(MF#'MetricFamily'{metric=Metrics})
end
end,
ok = Collector:collect_mf(Registry, Callback).
-ifdef(TEST).
@private
collect_mf_to_list(Collector) ->
collect_mf_to_list(default, Collector).
collect_mf_to_list(Registry, Collector) ->
try
Callback = fun (MF) ->
put(Collector, [MF|get_list(Collector)])
end,
prometheus_collector:collect_mf(Registry, Collector, Callback),
get_list(Collector)
after
erase(Collector)
end.
get_list(Key) ->
case get(Key) of
undefined ->
[];
Value ->
Value
end.
-endif.
all_known_collectors() ->
lists:umerge(
prometheus_misc:behaviour_modules(prometheus_collector),
?DEFAULT_COLLECTORS).
catch_default_collectors(Collectors) ->
maybe_replace_default(Collectors, []).
maybe_replace_default([default|Rest], Acc) ->
maybe_replace_default(Rest, ?DEFAULT_COLLECTORS ++ Acc);
maybe_replace_default([], Acc) ->
Acc;
maybe_replace_default([H|R], Acc) ->
maybe_replace_default(R, [H|Acc]).
|
2e8f3db28bf95374f090533d7218b19ff5494c1c8049031b5211afedbc35f679 | andrewMacmurray/haskell-book-solutions | Practice.hs | module Ch24.Practice where
import Text.Trifecta
one :: Parser Char
one = char '1'
oneTwo :: Parser Char
oneTwo = one >> char '2'
oneTwoStop :: Parser ()
oneTwoStop = oneTwo >> eof
allThree :: Parser String
allThree = do
n <- (show <$> integer)
_ <- eof
return n
string' :: String -> Parser String
string' str = go str mempty
where
go (x:xs) parsed = char x >>= (\x' -> go xs (parsed ++ [x']))
go [] parsed = return parsed
main :: IO ()
main = do
print $ parseString oneTwo mempty "12"
print $ parseString oneTwoStop mempty "12"
print $ parseString oneTwoStop mempty "123"
print $ parseString allThree mempty "12"
print $ parseString allThree mempty "123"
print $ parseString (string' "hello") mempty "hello"
| null | https://raw.githubusercontent.com/andrewMacmurray/haskell-book-solutions/f4fd386187c03828d1736d9a43642ab4f0ec6462/src/ch24/Practice.hs | haskell | module Ch24.Practice where
import Text.Trifecta
one :: Parser Char
one = char '1'
oneTwo :: Parser Char
oneTwo = one >> char '2'
oneTwoStop :: Parser ()
oneTwoStop = oneTwo >> eof
allThree :: Parser String
allThree = do
n <- (show <$> integer)
_ <- eof
return n
string' :: String -> Parser String
string' str = go str mempty
where
go (x:xs) parsed = char x >>= (\x' -> go xs (parsed ++ [x']))
go [] parsed = return parsed
main :: IO ()
main = do
print $ parseString oneTwo mempty "12"
print $ parseString oneTwoStop mempty "12"
print $ parseString oneTwoStop mempty "123"
print $ parseString allThree mempty "12"
print $ parseString allThree mempty "123"
print $ parseString (string' "hello") mempty "hello"
| |
7e7d9d479a66fb8cf8b026abc947284e265f92edf6dca1da21c8e011a037b527 | puppetlabs/puppetserver | certificate_authority_disabled_test.clj | (ns puppetlabs.services.ca.certificate-authority-disabled-test
(:require [clojure.test :refer :all]
[me.raynes.fs :as fs]
[puppetlabs.services.ca.certificate-authority-disabled-service :as disabled]
[puppetlabs.services.jruby.jruby-puppet-testutils :as jruby-testutils]
[puppetlabs.trapperkeeper.app :as tk-app]
[puppetlabs.trapperkeeper.testutils.logging :as logutils]
[puppetlabs.trapperkeeper.testutils.bootstrap :as tk-testutils]
[puppetlabs.trapperkeeper.services.authorization.authorization-service :as tk-auth]
[puppetlabs.services.jruby.jruby-puppet-service :as jruby]
[puppetlabs.kitchensink.core :as ks]))
(deftest ca-disabled-files-test
(testing "Ensure no certificates are generated when CA disabled service is enabled."
(logutils/with-test-logging
(let [puppet-conf-dir (str (ks/temp-dir))
config (-> (jruby-testutils/jruby-puppet-tk-config
(jruby-testutils/jruby-puppet-config
{:max-active-instances 1}))
(assoc-in [:jruby-puppet :server-conf-dir]
puppet-conf-dir)
(assoc :puppet {"vardir" (str puppet-conf-dir "/var")}))]
(tk-testutils/with-app-with-config
app
(jruby-testutils/add-mock-jruby-pool-manager-service
(conj jruby-testutils/jruby-service-and-dependencies
disabled/certificate-authority-disabled-service
tk-auth/authorization-service)
config)
config
(let [jruby-service (tk-app/get-service app :JRubyPuppetService)]
(jruby/with-jruby-puppet
jruby-puppet jruby-service :ca-disabled-files-test
(is (not (fs/exists? (fs/file puppet-conf-dir "ssl")))))))))))
| null | https://raw.githubusercontent.com/puppetlabs/puppetserver/2d6ca01b4b72716ca543b606f752261b969e401b/test/unit/puppetlabs/services/ca/certificate_authority_disabled_test.clj | clojure | (ns puppetlabs.services.ca.certificate-authority-disabled-test
(:require [clojure.test :refer :all]
[me.raynes.fs :as fs]
[puppetlabs.services.ca.certificate-authority-disabled-service :as disabled]
[puppetlabs.services.jruby.jruby-puppet-testutils :as jruby-testutils]
[puppetlabs.trapperkeeper.app :as tk-app]
[puppetlabs.trapperkeeper.testutils.logging :as logutils]
[puppetlabs.trapperkeeper.testutils.bootstrap :as tk-testutils]
[puppetlabs.trapperkeeper.services.authorization.authorization-service :as tk-auth]
[puppetlabs.services.jruby.jruby-puppet-service :as jruby]
[puppetlabs.kitchensink.core :as ks]))
(deftest ca-disabled-files-test
(testing "Ensure no certificates are generated when CA disabled service is enabled."
(logutils/with-test-logging
(let [puppet-conf-dir (str (ks/temp-dir))
config (-> (jruby-testutils/jruby-puppet-tk-config
(jruby-testutils/jruby-puppet-config
{:max-active-instances 1}))
(assoc-in [:jruby-puppet :server-conf-dir]
puppet-conf-dir)
(assoc :puppet {"vardir" (str puppet-conf-dir "/var")}))]
(tk-testutils/with-app-with-config
app
(jruby-testutils/add-mock-jruby-pool-manager-service
(conj jruby-testutils/jruby-service-and-dependencies
disabled/certificate-authority-disabled-service
tk-auth/authorization-service)
config)
config
(let [jruby-service (tk-app/get-service app :JRubyPuppetService)]
(jruby/with-jruby-puppet
jruby-puppet jruby-service :ca-disabled-files-test
(is (not (fs/exists? (fs/file puppet-conf-dir "ssl")))))))))))
| |
f52a06569247c3326d6134994d4d6e7d8d1621122130720560e1925eb428ca57 | ucsd-progsys/liquidhaskell | DupFunSigs.hs | {-@ LIQUID "--expect-error-containing=Multiple specifications for `DupFunSigs.fromWeekDayNum`" @-}
-- See -progsys/liquidhaskell/issues/1137
module DupFunSigs where
import Data.List
@ type WeekDayNum = { i : Int | 0 < i & & i < = 7 } @
Mon = = 1 , ... , Sun = = 7
data WeekDay = Mon | Tue | Wed | Thu | Fri | Sat | Sun
deriving (Read, Show, Eq, Bounded)
@ weekdays : : { wd:[WeekDay ] | len wd = = 7 } @
weekdays :: [WeekDay]
weekdays = [Mon, Tue, Wed, Thu, Fri, Sat, Sun]
{-@ fromWeekDayNum :: WeekDayNum -> WeekDay @-}
fromWeekDayNum :: WeekDayNum -> WeekDay
fromWeekDayNum i = weekdays !! (i-1)
@ fromWeekDayNum : : - > WeekDayNum @
toWeekDayNum :: WeekDay -> WeekDayNum
toWeekDayNum wd = case wd `elemIndex` weekdays of
Just i -> i + 1
main :: IO ()
main = pure ()
| null | https://raw.githubusercontent.com/ucsd-progsys/liquidhaskell/f46dbafd6ce1f61af5b56f31924c21639c982a8a/tests/errors/DupFunSigs.hs | haskell | @ LIQUID "--expect-error-containing=Multiple specifications for `DupFunSigs.fromWeekDayNum`" @
See -progsys/liquidhaskell/issues/1137
@ fromWeekDayNum :: WeekDayNum -> WeekDay @ |
module DupFunSigs where
import Data.List
@ type WeekDayNum = { i : Int | 0 < i & & i < = 7 } @
Mon = = 1 , ... , Sun = = 7
data WeekDay = Mon | Tue | Wed | Thu | Fri | Sat | Sun
deriving (Read, Show, Eq, Bounded)
@ weekdays : : { wd:[WeekDay ] | len wd = = 7 } @
weekdays :: [WeekDay]
weekdays = [Mon, Tue, Wed, Thu, Fri, Sat, Sun]
fromWeekDayNum :: WeekDayNum -> WeekDay
fromWeekDayNum i = weekdays !! (i-1)
@ fromWeekDayNum : : - > WeekDayNum @
toWeekDayNum :: WeekDay -> WeekDayNum
toWeekDayNum wd = case wd `elemIndex` weekdays of
Just i -> i + 1
main :: IO ()
main = pure ()
|
20a22a34f4c95a0a3e5e96c974da9ca9c436a5130ae4ed7ac999844417e7709e | erlang/otp | extra_return.erl | -module(extra_return).
-export([t1/0, t2/0]).
% Should warn about having `undefined` as return value when it is not returned by the function
-spec t1() -> true | false | 'other'.
t1() ->
case rand:uniform(2) of
1 -> true;
2 -> false
end.
% Should not warn about extra return
-dialyzer({no_extra_return, t2/0}).
-spec t2() -> true | false | 'other'.
t2() ->
case rand:uniform(2) of
1 -> true;
2 -> false
end.
| null | https://raw.githubusercontent.com/erlang/otp/c9728daf140e03840a49488a607bdd622d82aa1f/lib/dialyzer/test/extra_return_SUITE_data/src/extra_return.erl | erlang | Should warn about having `undefined` as return value when it is not returned by the function
Should not warn about extra return | -module(extra_return).
-export([t1/0, t2/0]).
-spec t1() -> true | false | 'other'.
t1() ->
case rand:uniform(2) of
1 -> true;
2 -> false
end.
-dialyzer({no_extra_return, t2/0}).
-spec t2() -> true | false | 'other'.
t2() ->
case rand:uniform(2) of
1 -> true;
2 -> false
end.
|
f22fe93d95ed14d8ac303a2f5c5b1a109d69598a532f205eba0c87c9d701f0f4 | greglook/merkle-db | validate.clj | (ns merkle-db.tools.validate
"Validation framework for testing database MerkleDAG structures."
(:refer-clojure :exclude [run!])
(:require
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.test :as test]
[merkle-db.index :as index]
[merkle-db.key :as key]
[merkle-db.partition :as part]
[merkle-db.record :as record]
[merkle-db.tablet :as tablet]
[merkledag.core :as mdag]
[merkledag.link :as link]
[merkledag.node :as node]
[multihash.core :as multihash]))
;; Path from the validation root to the node being checked.
(s/def ::path string?)
;; Type of validation check performed.
(s/def ::type qualified-keyword?)
;; Validation state.
(s/def ::state #{:pass :warn :fail :error})
;; Validation message describing the check in a human-readable way.
(s/def ::message string?)
(s/def ::expected any?)
(s/def ::actual any?)
;; Map representing a specific check on a node in the tree.
(s/def ::result
(s/keys :req [::type ::state ::message]
:opt [::expected ::actual]))
(def ^:dynamic *context*
"Dynamically-bound validation context."
nil)
(defmacro collect-results
[& body]
`(binding [*context* {::results (atom [])
::next (atom [])}]
(let [value# (do ~@body)]
{:value value#
:results @(::results *context*)
:next @(::next *context*)})))
(defn check-next!
"Register a validation function to run against the linked node."
[check-fn link params]
(swap! (::next *context*)
conj
{::check check-fn
::node/id (:target link)
::link-name (:name link)
::params params}))
(defn report!
"Record a validation result. The function accepts a multihash node id, a
keyword validation type, a result key (:pass, :warn, :fail, or :error) and a
message string."
[type-key state message expected actual]
(when-let [results (::results *context*)]
(swap! results
conj
{::path (::path *context*)
::type type-key
::state state
::message message
::expected expected
::actual actual})
nil))
(defmacro check
([type-key test-form message]
`(check ~type-key ~test-form ~message :fail))
([type-key test-form message bad-state]
`(try
~(cond
;; Equality test.
(and (list? test-form)
(= '= (first test-form))
(= 3 (count test-form)))
`(let [expected# ~(nth test-form 1)
actual# ~(nth test-form 2)
condition# (= expected# actual#)
state# (if condition# :pass ~bad-state)]
(report! ~type-key state# ~message expected# actual#)
condition#)
Comparison test .
(and (list? test-form)
(contains? #{'< '> '<= '>=} (first test-form))
(= 3 (count test-form)))
`(let [v0# ~(nth test-form 1)
v1# ~(nth test-form 2)
condition# (~(first test-form) v0# v1#)
state# (if condition# :pass ~bad-state)]
(report! ~type-key state# ~message
'~test-form
(if condition#
condition#
(list '~'not (list '~(first test-form) v0# v1#))))
condition#)
;; Predicate test.
(and (list? test-form)
(= 2 (count test-form)))
`(let [actual# ~(second test-form)
condition# (~(first test-form) actual#)
state# (if condition# :pass ~bad-state)]
(report! ~type-key state# ~message
'~test-form
(if condition#
condition#
(list '~'not (list '~(first test-form) actual#))))
condition#)
:else
`(let [condition# ~test-form
state# (if condition# :pass ~bad-state)]
(report! ~type-key state# ~message '~test-form condition#)
condition#))
(catch Throwable t#
(report! ~type-key :error
(str "Error checking assertion "
(pr-str '~test-form) ": "
(.getName (class t#)) " "
(.getMessage t#))
'~test-form t#)
nil))))
(defn run!
"Validate a full graph structure by walking the nodes and links with
validation functions. Returns a map from node ids to information maps, which
contain a `:paths` set and a `:results` vector with assertion result maps."
[store root-id root-check params]
(loop [checks (conj clojure.lang.PersistentQueue/EMPTY
{::path []
::check root-check
::node/id root-id
::params params})
results {}]
(if-let [next-check (peek checks)]
Process next check task .
(let [node-id (::node/id next-check)
path (::path next-check)
data (mdag/get-data store node-id nil ::missing-node)]
(if (identical? ::missing-node data)
;; Node not found, add error result.
(recur (pop checks)
(assoc results node-id
{::paths #{path}
::results [{::type ::missing-node, ::status :error}]}))
;; Run check function on node data.
(let [check (::check next-check)
;_ (prn ::run-check check data (::params next-check))
output (collect-results (check data (::params next-check)))]
(recur (into (pop checks)
(map #(-> %
(assoc ::path (conj path (::link-name %)))
(dissoc ::link-name)))
(:next output))
(-> results
(update-in [node-id ::paths] (fnil conj #{}) path)
(update-in [node-id ::results] (fnil into []) (:results output)))))))
;; No more checks, return result aggregate.
results)))
(defmacro check-asserts
[results]
`(doseq [[node-id# info#] ~results
result# (::results info#)]
(test/do-report
{:type (::state result#)
:message (format "Node %s (%s): %s"
(multihash/base58 node-id#)
(str/join ", " (map (partial str/join "/") (::paths info#)))
(::message result#))
:expected (::expected
result#
[(::type result#)
(str/join \/ (::path result#))
(::node/id result#)])
:actual (::actual result# (::state result#))})))
# # Validation Functions
(defn validate-tablet
[tablet params]
(when (check :data/type
(= :merkle-db/tablet (:data/type tablet))
"Node has expected data type")
(check ::spec
(s/valid? ::tablet/node-data tablet)
(s/explain-str ::tablet/node-data tablet))
(check ::record/count
(seq (tablet/read-all tablet))
"Tablet should not be empty")
(when-let [family-keys (get (::record/families params)
(::record/family-key params))]
(let [bad-fields (->> (::records tablet)
(mapcat (comp clojure.core/keys second))
(remove (set family-keys))
(set))]
(check ::record/families
(empty? bad-fields)
(format "Tablet record data should only contain values for fields in family %s (%s)"
(::record/family-key params)
family-keys))))
(when-let [boundary (::record/first-key params)]
(check ::record/first-key
(not (key/before? (tablet/first-key tablet) boundary))
"First key in partition is within the subtree boundary"))
(when-let [boundary (::record/last-key params)]
(check ::record/last-key
(not (key/after? (tablet/last-key tablet) boundary))
"Last key in partition is within the subtree boundary"))
;; TODO: records are sorted by key
nil))
(defn validate-partition
[part params]
(when (check :data/type
(= :merkle-db/partition (:data/type part))
"Node has expected data type")
(check ::spec
(s/valid? ::part/node-data part)
(s/explain-str ::part/node-data part))
;; TODO: warn when partition limit or families don't match params
(when (and (::part/limit params)
(::record/count params)
(<= (::part/limit params) (::record/count params)))
(check ::part/underflow
(<= (Math/ceil (/ (::part/limit params) 2)) (::record/count part))
"Partition is at least half full if tree has at least :merkle-db.partition/limit records"))
(check ::part/overflow
(<= (::record/count part) (::part/limit params))
"Partition has at most :merkle-db.partition/limit records")
(when-let [boundary (::record/first-key params)]
(check ::record/first-key
(not (key/before? (::record/first-key part) boundary))
"First key in partition is within the subtree boundary"))
(when-let [boundary (::record/last-key params)]
(check ::record/last-key
(not (key/after? (::record/last-key part) boundary))
"Last key in partition is within the subtree boundary"))
(check ::base-tablet
(:base (::part/tablets part))
"Partition contains a base tablet")
TODO : partition first - key matches actual first record key in base tablet
;; TODO: partition last-key matches actual last record key in base tablet
;; TODO: record/count is accurate
;; TODO: every key present tests true against membership filter
(doseq [[tablet-family link] (::part/tablets part)]
(check-next!
validate-tablet link
(assoc params
::record/families (::record/families part)
::record/family-key tablet-family
::record/first-key (::record/first-key part)
::record/last-key (::record/last-key part))))))
(defn validate-index
[index params]
(when (check :data/type
(= :merkle-db/index (:data/type index))
"Node has expected data type")
(check ::spec
(s/valid? ::index/node-data index)
(s/explain-str ::index/node-data index))
(check ::index/keys
(= (dec (count (::index/children index)))
(count (::index/keys index)))
"Index nodes have one fewer key than child links")
(if (::index/root? params)
(check ::index/fan-out
(<= 2 (count (::index/children index)) (::index/fan-out params))
"Root index node has at between [2, f] children")
(check ::index/fan-out
(<= (int (Math/ceil (/ (::index/fan-out params) 2)))
(count (::index/children index))
(::index/fan-out params))
"Internal index node has between [ceil(f/2), f] children"))
(check ::index/height
(= (::index/height params) (::index/height index))
"Index node has expected height")
(doseq [[first-key child-link last-key]
(map vector
(cons (::record/first-key params) (::index/keys index))
(::index/children index)
(conj (::index/keys index) (::record/last-key params)))
:let [height' (dec (::index/height index))]]
(check-next!
(if (zero? height')
validate-partition
validate-index)
child-link
(assoc params
::index/root? false
::index/height height'
::record/first-key first-key
::record/last-key last-key)))))
(defn validate-data-tree
[root params]
(cond
(zero? (::record/count params))
(check ::index/empty
(nil? root)
"Empty tree has nil root")
(or (< (::record/count params) (::part/limit params))
(and (= (::record/count params) (::part/limit params))
(= :merkle-db/partition (:data/type root))))
(validate-partition root params)
:else
(validate-index
root
(assoc params
::index/root? true
::index/height (::index/height root)))))
| null | https://raw.githubusercontent.com/greglook/merkle-db/449f58005cac0390a40ea15f9bf5403443f3be7d/lib/tools/src/merkle_db/tools/validate.clj | clojure | Path from the validation root to the node being checked.
Type of validation check performed.
Validation state.
Validation message describing the check in a human-readable way.
Map representing a specific check on a node in the tree.
Equality test.
Predicate test.
Node not found, add error result.
Run check function on node data.
_ (prn ::run-check check data (::params next-check))
No more checks, return result aggregate.
TODO: records are sorted by key
TODO: warn when partition limit or families don't match params
TODO: partition last-key matches actual last record key in base tablet
TODO: record/count is accurate
TODO: every key present tests true against membership filter | (ns merkle-db.tools.validate
"Validation framework for testing database MerkleDAG structures."
(:refer-clojure :exclude [run!])
(:require
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.test :as test]
[merkle-db.index :as index]
[merkle-db.key :as key]
[merkle-db.partition :as part]
[merkle-db.record :as record]
[merkle-db.tablet :as tablet]
[merkledag.core :as mdag]
[merkledag.link :as link]
[merkledag.node :as node]
[multihash.core :as multihash]))
(s/def ::path string?)
(s/def ::type qualified-keyword?)
(s/def ::state #{:pass :warn :fail :error})
(s/def ::message string?)
(s/def ::expected any?)
(s/def ::actual any?)
(s/def ::result
(s/keys :req [::type ::state ::message]
:opt [::expected ::actual]))
(def ^:dynamic *context*
"Dynamically-bound validation context."
nil)
(defmacro collect-results
[& body]
`(binding [*context* {::results (atom [])
::next (atom [])}]
(let [value# (do ~@body)]
{:value value#
:results @(::results *context*)
:next @(::next *context*)})))
(defn check-next!
"Register a validation function to run against the linked node."
[check-fn link params]
(swap! (::next *context*)
conj
{::check check-fn
::node/id (:target link)
::link-name (:name link)
::params params}))
(defn report!
"Record a validation result. The function accepts a multihash node id, a
keyword validation type, a result key (:pass, :warn, :fail, or :error) and a
message string."
[type-key state message expected actual]
(when-let [results (::results *context*)]
(swap! results
conj
{::path (::path *context*)
::type type-key
::state state
::message message
::expected expected
::actual actual})
nil))
(defmacro check
([type-key test-form message]
`(check ~type-key ~test-form ~message :fail))
([type-key test-form message bad-state]
`(try
~(cond
(and (list? test-form)
(= '= (first test-form))
(= 3 (count test-form)))
`(let [expected# ~(nth test-form 1)
actual# ~(nth test-form 2)
condition# (= expected# actual#)
state# (if condition# :pass ~bad-state)]
(report! ~type-key state# ~message expected# actual#)
condition#)
Comparison test .
(and (list? test-form)
(contains? #{'< '> '<= '>=} (first test-form))
(= 3 (count test-form)))
`(let [v0# ~(nth test-form 1)
v1# ~(nth test-form 2)
condition# (~(first test-form) v0# v1#)
state# (if condition# :pass ~bad-state)]
(report! ~type-key state# ~message
'~test-form
(if condition#
condition#
(list '~'not (list '~(first test-form) v0# v1#))))
condition#)
(and (list? test-form)
(= 2 (count test-form)))
`(let [actual# ~(second test-form)
condition# (~(first test-form) actual#)
state# (if condition# :pass ~bad-state)]
(report! ~type-key state# ~message
'~test-form
(if condition#
condition#
(list '~'not (list '~(first test-form) actual#))))
condition#)
:else
`(let [condition# ~test-form
state# (if condition# :pass ~bad-state)]
(report! ~type-key state# ~message '~test-form condition#)
condition#))
(catch Throwable t#
(report! ~type-key :error
(str "Error checking assertion "
(pr-str '~test-form) ": "
(.getName (class t#)) " "
(.getMessage t#))
'~test-form t#)
nil))))
(defn run!
"Validate a full graph structure by walking the nodes and links with
validation functions. Returns a map from node ids to information maps, which
contain a `:paths` set and a `:results` vector with assertion result maps."
[store root-id root-check params]
(loop [checks (conj clojure.lang.PersistentQueue/EMPTY
{::path []
::check root-check
::node/id root-id
::params params})
results {}]
(if-let [next-check (peek checks)]
Process next check task .
(let [node-id (::node/id next-check)
path (::path next-check)
data (mdag/get-data store node-id nil ::missing-node)]
(if (identical? ::missing-node data)
(recur (pop checks)
(assoc results node-id
{::paths #{path}
::results [{::type ::missing-node, ::status :error}]}))
(let [check (::check next-check)
output (collect-results (check data (::params next-check)))]
(recur (into (pop checks)
(map #(-> %
(assoc ::path (conj path (::link-name %)))
(dissoc ::link-name)))
(:next output))
(-> results
(update-in [node-id ::paths] (fnil conj #{}) path)
(update-in [node-id ::results] (fnil into []) (:results output)))))))
results)))
(defmacro check-asserts
[results]
`(doseq [[node-id# info#] ~results
result# (::results info#)]
(test/do-report
{:type (::state result#)
:message (format "Node %s (%s): %s"
(multihash/base58 node-id#)
(str/join ", " (map (partial str/join "/") (::paths info#)))
(::message result#))
:expected (::expected
result#
[(::type result#)
(str/join \/ (::path result#))
(::node/id result#)])
:actual (::actual result# (::state result#))})))
# # Validation Functions
(defn validate-tablet
[tablet params]
(when (check :data/type
(= :merkle-db/tablet (:data/type tablet))
"Node has expected data type")
(check ::spec
(s/valid? ::tablet/node-data tablet)
(s/explain-str ::tablet/node-data tablet))
(check ::record/count
(seq (tablet/read-all tablet))
"Tablet should not be empty")
(when-let [family-keys (get (::record/families params)
(::record/family-key params))]
(let [bad-fields (->> (::records tablet)
(mapcat (comp clojure.core/keys second))
(remove (set family-keys))
(set))]
(check ::record/families
(empty? bad-fields)
(format "Tablet record data should only contain values for fields in family %s (%s)"
(::record/family-key params)
family-keys))))
(when-let [boundary (::record/first-key params)]
(check ::record/first-key
(not (key/before? (tablet/first-key tablet) boundary))
"First key in partition is within the subtree boundary"))
(when-let [boundary (::record/last-key params)]
(check ::record/last-key
(not (key/after? (tablet/last-key tablet) boundary))
"Last key in partition is within the subtree boundary"))
nil))
(defn validate-partition
[part params]
(when (check :data/type
(= :merkle-db/partition (:data/type part))
"Node has expected data type")
(check ::spec
(s/valid? ::part/node-data part)
(s/explain-str ::part/node-data part))
(when (and (::part/limit params)
(::record/count params)
(<= (::part/limit params) (::record/count params)))
(check ::part/underflow
(<= (Math/ceil (/ (::part/limit params) 2)) (::record/count part))
"Partition is at least half full if tree has at least :merkle-db.partition/limit records"))
(check ::part/overflow
(<= (::record/count part) (::part/limit params))
"Partition has at most :merkle-db.partition/limit records")
(when-let [boundary (::record/first-key params)]
(check ::record/first-key
(not (key/before? (::record/first-key part) boundary))
"First key in partition is within the subtree boundary"))
(when-let [boundary (::record/last-key params)]
(check ::record/last-key
(not (key/after? (::record/last-key part) boundary))
"Last key in partition is within the subtree boundary"))
(check ::base-tablet
(:base (::part/tablets part))
"Partition contains a base tablet")
TODO : partition first - key matches actual first record key in base tablet
(doseq [[tablet-family link] (::part/tablets part)]
(check-next!
validate-tablet link
(assoc params
::record/families (::record/families part)
::record/family-key tablet-family
::record/first-key (::record/first-key part)
::record/last-key (::record/last-key part))))))
(defn validate-index
[index params]
(when (check :data/type
(= :merkle-db/index (:data/type index))
"Node has expected data type")
(check ::spec
(s/valid? ::index/node-data index)
(s/explain-str ::index/node-data index))
(check ::index/keys
(= (dec (count (::index/children index)))
(count (::index/keys index)))
"Index nodes have one fewer key than child links")
(if (::index/root? params)
(check ::index/fan-out
(<= 2 (count (::index/children index)) (::index/fan-out params))
"Root index node has at between [2, f] children")
(check ::index/fan-out
(<= (int (Math/ceil (/ (::index/fan-out params) 2)))
(count (::index/children index))
(::index/fan-out params))
"Internal index node has between [ceil(f/2), f] children"))
(check ::index/height
(= (::index/height params) (::index/height index))
"Index node has expected height")
(doseq [[first-key child-link last-key]
(map vector
(cons (::record/first-key params) (::index/keys index))
(::index/children index)
(conj (::index/keys index) (::record/last-key params)))
:let [height' (dec (::index/height index))]]
(check-next!
(if (zero? height')
validate-partition
validate-index)
child-link
(assoc params
::index/root? false
::index/height height'
::record/first-key first-key
::record/last-key last-key)))))
(defn validate-data-tree
[root params]
(cond
(zero? (::record/count params))
(check ::index/empty
(nil? root)
"Empty tree has nil root")
(or (< (::record/count params) (::part/limit params))
(and (= (::record/count params) (::part/limit params))
(= :merkle-db/partition (:data/type root))))
(validate-partition root params)
:else
(validate-index
root
(assoc params
::index/root? true
::index/height (::index/height root)))))
|
5ccbadaffc90d5dbea991cc2eee80a43295798c6421b90445e4db1565608fe7d | auser/alice | mochiweb_skel.erl | -module(mochiweb_skel).
-export([skelcopy/2]).
-include_lib("kernel/include/file.hrl").
%% External API
skelcopy(DestDir, Name) ->
ok = ensuredir(DestDir),
LDst = case length(filename:dirname(DestDir)) of
handle case when returns " / "
0;
N ->
N + 1
end,
skelcopy(src(), DestDir, Name, LDst),
ok = file:make_symlink(
filename:join(filename:dirname(code:which(?MODULE)), ".."),
filename:join([DestDir, Name, "deps", "mochiweb-src"])).
%% Internal API
src() ->
Dir = filename:dirname(code:which(?MODULE)),
filename:join(Dir, "../priv/skel").
skel() ->
"skel".
skelcopy(Src, DestDir, Name, LDst) ->
Dest = re:replace(filename:basename(Src), skel(), Name,
[global, {return, list}]),
case file:read_file_info(Src) of
{ok, #file_info{type=directory, mode=Mode}} ->
Dir = DestDir ++ "/" ++ Dest,
EDst = lists:nthtail(LDst, Dir),
ok = ensuredir(Dir),
ok = file:write_file_info(Dir, #file_info{mode=Mode}),
{ok, Files} = file:list_dir(Src),
io:format("~s/~n", [EDst]),
lists:foreach(fun ("." ++ _) -> ok;
(F) ->
skelcopy(filename:join(Src, F),
Dir,
Name,
LDst)
end,
Files),
ok;
{ok, #file_info{type=regular, mode=Mode}} ->
OutFile = filename:join(DestDir, Dest),
{ok, B} = file:read_file(Src),
S = re:replace(binary_to_list(B), skel(), Name,
[{return, list}, global]),
ok = file:write_file(OutFile, list_to_binary(S)),
ok = file:write_file_info(OutFile, #file_info{mode=Mode}),
io:format(" ~s~n", [filename:basename(Src)]),
ok;
{ok, _} ->
io:format("ignored source file: ~p~n", [Src]),
ok
end.
ensuredir(Dir) ->
case file:make_dir(Dir) of
ok ->
ok;
{error, eexist} ->
ok;
E ->
E
end.
| null | https://raw.githubusercontent.com/auser/alice/e0f867071ede99f451d09053608bd6719c72d1c9/deps/mochiweb/src/mochiweb_skel.erl | erlang | External API
Internal API | -module(mochiweb_skel).
-export([skelcopy/2]).
-include_lib("kernel/include/file.hrl").
skelcopy(DestDir, Name) ->
ok = ensuredir(DestDir),
LDst = case length(filename:dirname(DestDir)) of
handle case when returns " / "
0;
N ->
N + 1
end,
skelcopy(src(), DestDir, Name, LDst),
ok = file:make_symlink(
filename:join(filename:dirname(code:which(?MODULE)), ".."),
filename:join([DestDir, Name, "deps", "mochiweb-src"])).
src() ->
Dir = filename:dirname(code:which(?MODULE)),
filename:join(Dir, "../priv/skel").
skel() ->
"skel".
skelcopy(Src, DestDir, Name, LDst) ->
Dest = re:replace(filename:basename(Src), skel(), Name,
[global, {return, list}]),
case file:read_file_info(Src) of
{ok, #file_info{type=directory, mode=Mode}} ->
Dir = DestDir ++ "/" ++ Dest,
EDst = lists:nthtail(LDst, Dir),
ok = ensuredir(Dir),
ok = file:write_file_info(Dir, #file_info{mode=Mode}),
{ok, Files} = file:list_dir(Src),
io:format("~s/~n", [EDst]),
lists:foreach(fun ("." ++ _) -> ok;
(F) ->
skelcopy(filename:join(Src, F),
Dir,
Name,
LDst)
end,
Files),
ok;
{ok, #file_info{type=regular, mode=Mode}} ->
OutFile = filename:join(DestDir, Dest),
{ok, B} = file:read_file(Src),
S = re:replace(binary_to_list(B), skel(), Name,
[{return, list}, global]),
ok = file:write_file(OutFile, list_to_binary(S)),
ok = file:write_file_info(OutFile, #file_info{mode=Mode}),
io:format(" ~s~n", [filename:basename(Src)]),
ok;
{ok, _} ->
io:format("ignored source file: ~p~n", [Src]),
ok
end.
ensuredir(Dir) ->
case file:make_dir(Dir) of
ok ->
ok;
{error, eexist} ->
ok;
E ->
E
end.
|
4c395c0c1b7c48158860ff0d976d7d766f4c48b5a7d810d17f25151f417daf27 | korya/efuns | xbuffer.mli | (***********************************************************************)
(* *)
xlib for
(* *)
Fabrice Le Fessant , projet Para / SOR , INRIA Rocquencourt
(* *)
Copyright 1998 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
(* *)
(***********************************************************************)
val int_of_enum : 'a -> int
val enum_of_int : int -> 'a
val list_of_mask : int -> 'a
val mask_of_list : 'a list -> int
val setCard8 : string -> int -> int -> unit
val setCard16 : string -> int -> int -> unit
val setCard32 : string -> int -> int -> unit
val getCard8 : string -> int -> int
val getCard16 : string -> int -> int
val getCard32 : string -> int -> int
val setInt8 : string -> int -> int -> unit
val setInt16 : string -> int -> int -> unit
val setInt32 : string -> int -> int -> unit
val getInt8 : string -> int -> int
val getInt16 : string -> int -> int
val getInt32 : string -> int -> int
val setEnum8 : string -> int -> 'a -> unit
val setEnum16 : string -> int -> 'a -> unit
val setEnum32 : string -> int -> 'a -> unit
val getEnum8 : string -> int -> 'a
val getEnum16 : string -> int -> 'a
val getEnum32 : string -> int -> 'a
val string_blit : string -> int -> string -> int -> int -> unit
val setString : string -> int -> string -> unit
val getString : string -> int -> int -> string
val strLen : int -> int
val get_card32_list : string -> int -> int -> int list
val get_enum32_list : string -> int -> int -> 'a list
val set_int32_list : string -> int -> int list -> unit
val set_enum32_list : string -> int -> 'a list -> unit
val get_card32_array : string -> int -> int -> int array
val set_card32_array : string -> int -> int array -> unit
val newString : int -> string
val set_str_list : string -> int -> string list -> unit
val get_str_list : string -> int -> int -> string list
val setString16 : string -> int -> int array -> unit
val getXError : string -> Xtypes.errorEvent
val getXServerInfo : string -> Unix.file_descr -> int -> int -> int -> string
-> Xtypes.display
val getWindow : string -> int -> Xtypes.window
val setWindow : string -> int -> Xtypes.window -> unit
val getFont : string -> int -> Xtypes.window
val setFont : string -> int -> Xtypes.font -> unit
val getColormap : string -> int -> Xtypes.colormap
val setColormap : string -> int -> Xtypes.colormap -> unit
val getAtom : string -> int -> Xtypes.atom
val setAtom : string -> int -> Xtypes.atom -> unit
val getTime : string -> int -> Xtypes.time
val setTime : string -> int -> Xtypes.time -> unit | null | https://raw.githubusercontent.com/korya/efuns/78b21d9dff45b7eec764c63132c7a564f5367c30/inliner/tests/xbuffer.mli | ocaml | *********************************************************************
********************************************************************* | xlib for
Fabrice Le Fessant , projet Para / SOR , INRIA Rocquencourt
Copyright 1998 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
val int_of_enum : 'a -> int
val enum_of_int : int -> 'a
val list_of_mask : int -> 'a
val mask_of_list : 'a list -> int
val setCard8 : string -> int -> int -> unit
val setCard16 : string -> int -> int -> unit
val setCard32 : string -> int -> int -> unit
val getCard8 : string -> int -> int
val getCard16 : string -> int -> int
val getCard32 : string -> int -> int
val setInt8 : string -> int -> int -> unit
val setInt16 : string -> int -> int -> unit
val setInt32 : string -> int -> int -> unit
val getInt8 : string -> int -> int
val getInt16 : string -> int -> int
val getInt32 : string -> int -> int
val setEnum8 : string -> int -> 'a -> unit
val setEnum16 : string -> int -> 'a -> unit
val setEnum32 : string -> int -> 'a -> unit
val getEnum8 : string -> int -> 'a
val getEnum16 : string -> int -> 'a
val getEnum32 : string -> int -> 'a
val string_blit : string -> int -> string -> int -> int -> unit
val setString : string -> int -> string -> unit
val getString : string -> int -> int -> string
val strLen : int -> int
val get_card32_list : string -> int -> int -> int list
val get_enum32_list : string -> int -> int -> 'a list
val set_int32_list : string -> int -> int list -> unit
val set_enum32_list : string -> int -> 'a list -> unit
val get_card32_array : string -> int -> int -> int array
val set_card32_array : string -> int -> int array -> unit
val newString : int -> string
val set_str_list : string -> int -> string list -> unit
val get_str_list : string -> int -> int -> string list
val setString16 : string -> int -> int array -> unit
val getXError : string -> Xtypes.errorEvent
val getXServerInfo : string -> Unix.file_descr -> int -> int -> int -> string
-> Xtypes.display
val getWindow : string -> int -> Xtypes.window
val setWindow : string -> int -> Xtypes.window -> unit
val getFont : string -> int -> Xtypes.window
val setFont : string -> int -> Xtypes.font -> unit
val getColormap : string -> int -> Xtypes.colormap
val setColormap : string -> int -> Xtypes.colormap -> unit
val getAtom : string -> int -> Xtypes.atom
val setAtom : string -> int -> Xtypes.atom -> unit
val getTime : string -> int -> Xtypes.time
val setTime : string -> int -> Xtypes.time -> unit |
aa5a0313fbf1823e821ecdd2025bfe8947c868ca72c79093987810e6bb47631c | larcenists/larceny | 114.constructors.scm | SRFI 114 : Comparators
Copyright ( C ) 2013 . All Rights Reserved .
;;
;; Permission is hereby granted, free of charge, to any person
;; obtaining a copy of this software and associated documentation
files ( the " Software " ) , to deal in the Software without restriction ,
;; including without limitation the rights to use, copy, modify, merge,
publish , distribute , sublicense , and/or sell copies of the Software ,
and to permit persons to whom the Software is furnished to do so ,
;; subject to the following conditions:
;;
;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT .
;; IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT ,
;; TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
;; SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
;;;; Standard comparators and comparator constructors
;;; Standard atomic comparators
(define (boolean-comparison a b)
(cond
((and a b) 0)
(a 1)
(b -1)
(else 0)))
; now imported from ( srfi 128 )
(define (boolean-hash obj) (if obj 1 0))
(define boolean-comparator
(make-comparator boolean? boolean=? boolean-comparison boolean-hash))
(define char-comparison (make-comparison=/< char=? char<?))
; now imported from ( srfi 128 )
(define (char-hash obj) (abs (char->integer obj)))
(define char-comparator
(make-comparator char? char=? char-comparison char-hash))
(define char-ci-comparison (make-comparison=/< char-ci=? char-ci<?))
; now imported from ( srfi 128 )
(define (char-ci-hash obj) (abs (char->integer (char-foldcase obj))))
(define char-ci-comparator
(make-comparator char? char-ci=? char-ci-comparison char-ci-hash))
(define string-comparison (make-comparison=/< string=? string<?))
(define string-ci-comparison (make-comparison=/< string-ci=? string-ci<?))
(define (symbol<? a b) (string<? (symbol->string a) (symbol->string b)))
(define symbol-comparison (make-comparison=/< symbol=? symbol<?))
Comparison procedure for real numbers only
(define (real-comparison a b)
(cond
((< a b) -1)
((> a b) 1)
(else 0)))
Comparison procedure for non - real numbers .
(define (complex-comparison a b)
(let ((real-result (real-comparison (real-part a) (real-part b))))
(if (= real-result 0)
(real-comparison (imag-part a) (imag-part b))
real-result)))
;;; FIXME: this was broken in the reference implementation
;(define (number-hash obj) (exact (abs obj)))
; now imported from ( srfi 128 )
(define (number-hash z)
(cond ((exact-integer? z)
(abs z))
((and (exact? z) (real? z))
(+ (numerator z) (denominator z)))
((not (real? z))
(+ (number-hash (real-part z))
(* 3 (number-hash (imag-part z)))))
((nan? z)
10286062)
((infinite? z)
(if (= z +inf.0)
11610730
4191912))
(else
(+ 861625
(number-hash (exact z))))))
(define number-comparator
(make-comparator number? = complex-comparison number-hash))
(define complex-comparator
(make-comparator complex? = complex-comparison number-hash))
(define real-comparator
(make-comparator real? = real-comparison number-hash))
(define rational-comparator
(make-comparator rational? = real-comparison number-hash))
(define integer-comparator
(make-comparator integer? = real-comparison number-hash))
(define exact-integer-comparator
(make-comparator exact-integer? = real-comparison number-hash))
;;; Inexact real comparator
;; Test procedure for inexact reals
(define (inexact-real? obj) (and (number? obj) (inexact? obj) (real? obj)))
;; Return a number appropriately rounded to epsilon
(define (rounded-to x epsilon rounding)
(let ((quo (/ x epsilon)))
(cond
((procedure? rounding) (rounding x epsilon))
((eq? rounding 'round) (round quo))
((eq? rounding 'ceiling) (ceiling quo))
((eq? rounding 'floor) (floor quo))
((eq? rounding 'truncate) (truncate quo))
(else (error "invalid rounding specification" rounding)))))
Returns result of comparing a NaN with a non - NaN
(define (nan-comparison nan-handling which other)
(cond
((procedure? nan-handling)
(nan-handling other))
((eq? nan-handling 'error)
(error "Attempt to compare NaN with non-NaN"))
((eq? nan-handling 'min)
(if (eq? which 'a-nan) -1 1))
((eq? nan-handling 'max)
(if (eq? which 'a-nan) 1 -1))
(else
(error "Invalid nan-handling specification"))))
(define (make-inexact-real-comparison epsilon rounding nan-handling)
(lambda (a b)
(let ((a-nan? (nan? a)) (b-nan? (nan? b)))
(cond
((and a-nan? b-nan?) 0)
(a-nan? (nan-comparison nan-handling 'a-nan b))
(b-nan? (nan-comparison nan-handling 'b-nan a))
(else (real-comparison
(rounded-to a epsilon rounding)
(rounded-to b epsilon rounding)))))))
Return 0 for NaN , number - hash otherwise
(define (make-inexact-real-hash epsilon rounding)
(lambda (obj)
(if (nan? obj) 0 (number-hash (rounded-to obj epsilon rounding)))))
(define (make-inexact-real-comparator epsilon rounding nan-handling)
(make-comparator
inexact-real?
#t
(make-inexact-real-comparison epsilon rounding nan-handling)
(make-inexact-real-hash epsilon rounding)))
Sequence comparator constructors and comparators
The hash functions are based on djb2 , but
modulo 2 ^ 20 instead of 2 ^ 32 in hopes of sticking to fixnums .
(define limit (expt 2 20))
Makes a comparison procedure that works
(define (make-listwise-comparison comparison null? car cdr)
(letrec ((proc
(lambda (a b)
(let ((a-null? (null? a)) (b-null? (null? b)))
(cond
((and a-null? b-null?) 0)
(a-null? -1)
(b-null? 1)
(else (let ((result (comparison (car a) (car b))))
(if (= result 0) (proc (cdr a) (cdr b)) result))))))))
proc))
Makes a hash function that works
(define (make-listwise-hash hash null? car cdr)
(lambda (obj)
(let loop ((obj obj) (result 5381))
(if (null? obj)
0
(let* ((prod (modulo (* result 33) limit))
(sum (+ prod (hash (car obj)))))
(loop (cdr obj) sum))))))
Makes a comparison procedure that works vectorwise
;;
FIXME : the reference implementation blew up when comparing two empty vectors
(define (make-vectorwise-comparison comparison length ref)
(lambda (a b)
(let* ((a-length (length a))
(b-length (length b)))
(cond
((< a-length b-length) -1)
((> a-length b-length) 1)
(else
(let loop ((index 0))
(if (= index a-length)
0
(let ((result (comparison (ref a index) (ref b index))))
(if (= result 0)
(loop (+ index 1))
result)))))))))
Makes a hash function that works vectorwise
;;
;; FIXME: the reference implementation ignored element 0
(define (make-vectorwise-hash hash length ref)
(lambda (obj)
(let loop ((index (- (length obj) 1)) (result 5381))
(if (< index 0)
result
(let* ((prod (modulo (* result 33) limit))
(sum (modulo (+ prod (hash (ref obj index))) limit)))
(loop (- index 1) sum))))))
; now imported from ( srfi 128 )
(define string-hash
(make-vectorwise-hash char-hash string-length string-ref))
(define string-comparator
(make-comparator string? string=? string-comparison string-hash))
; now imported from ( srfi 128 )
(define (string-ci-hash obj) (string-hash (string-foldcase obj)))
(define string-ci-comparator
(make-comparator string? string-ci=? string-ci-comparison string-ci-hash))
; now imported from ( srfi 128 )
(define (symbol-hash obj) (string-hash (symbol->string obj)))
(define symbol-comparator
(make-comparator symbol? symbol=? symbol-comparison symbol-hash))
(define (make-listwise-comparator test comparator null? car cdr)
(make-comparator
test
#t
(make-listwise-comparison
(comparator-comparison-procedure comparator) null? car cdr)
(make-listwise-hash
(comparator-hash-function comparator) null? car cdr)))
(define (make-vectorwise-comparator test comparator length ref)
(make-comparator
test
#t
(make-vectorwise-comparison
(comparator-comparison-procedure comparator) length ref)
(make-vectorwise-hash
(comparator-hash-function comparator) length ref)))
; now imported from ( srfi 128 ) with a different number of arguments
(define (make-list-comparator comparator)
(make-listwise-comparator
(lambda (obj) (or (null? obj) (pair? obj)))
comparator null? car cdr))
SRFI 128 adds four more formal parameters to make - list - comparator
;(define list-comparator (make-list-comparator default-comparator))
(define list-comparator
(make-list-comparator default-comparator
(lambda (obj) (or (null? obj) (pair? obj)))
null?
car
cdr))
; now imported from ( srfi 128 ) with a different number of arguments
(define (make-vector-comparator comparator)
(make-vectorwise-comparator vector? comparator vector-length vector-ref))
SRFI 128 adds three more formal parameters to make - vector - comparator
;(define vector-comparator (make-vector-comparator default-comparator))
(define vector-comparator
(make-vector-comparator default-comparator vector? vector-length vector-ref))
(define vector-comparison (comparator-comparison-procedure vector-comparator))
(define vector-hash (comparator-hash-function vector-comparator))
(define (make-bytevector-comparator comparator)
(make-vectorwise-comparator
bytevector? comparator bytevector-length bytevector-u8-ref))
(define bytevector-comparator (make-bytevector-comparator default-comparator))
(define bytevector-comparison
(comparator-comparison-procedure bytevector-comparator))
(define bytevector-hash
(comparator-hash-function bytevector-comparator))
;;; Pair comparator constructors
(define (make-car-comparator comparator)
(make-comparator
pair?
#t
(lambda (a b)
(comparator-compare comparator (car a) (car b)))
(lambda (obj) (comparator-hash-function comparator))))
(define (make-cdr-comparator comparator)
(make-comparator
pair?
#t
(lambda (a b)
(comparator-compare comparator (cdr a) (cdr b)))
(lambda (obj) (comparator-hash comparator obj))))
(define (make-pair-comparison car-comparator cdr-comparator)
(lambda (a b)
(let ((result (comparator-compare car-comparator (car a) (car b))))
(if (= result 0)
(comparator-compare cdr-comparator (cdr a) (cdr b))
result))))
(define pair-comparison
(make-pair-comparison default-comparator default-comparator))
(define (make-pair-hash car-comparator cdr-comparator)
(lambda (obj)
(+
(comparator-hash car-comparator (car obj))
(comparator-hash cdr-comparator (cdr obj)))))
; now imported from ( srfi 128 )
(define (make-pair-comparator car-comparator cdr-comparator)
(make-comparator
pair?
#t
(make-pair-comparison car-comparator cdr-comparator)
(make-pair-hash car-comparator cdr-comparator)))
(define pair-comparator
(make-pair-comparator default-comparator default-comparator))
(define pair-hash (comparator-hash-function pair-comparator))
;; Compute type index for inexact list comparisons
(define (improper-list-type obj)
(cond
((null? obj) 0)
((pair? obj) 1)
(else 2)))
(define (make-improper-list-comparison comparator)
(let ((pair-comparison (make-pair-comparison comparator comparator)))
(lambda (a b)
(let* ((a-type (improper-list-type a))
(b-type (improper-list-type b))
(result (real-comparison a-type b-type)))
(cond
((not (= result 0)) result)
((null? a) 0)
((pair? a) (pair-comparison a b))
(else (comparator-compare comparator a b)))))))
(define (make-improper-list-hash comparator)
(lambda (obj)
(cond
((null? obj) 0)
((pair? obj) (+ (comparator-hash comparator (car obj))
(comparator-hash comparator (cdr obj))))
(else (comparator-hash comparator obj)))))
(define (make-improper-list-comparator comparator)
(make-comparator
#t
#t
(make-improper-list-comparison comparator)
(make-improper-list-hash comparator)))
;;; Wrapped equality predicates
;;; These comparators don't have comparison functions.
#|
(define eq-comparator
(make-comparator
#t
eq?
#f
default-hash-function))
(define eqv-comparator
(make-comparator
#t
eqv?
#f
default-hash-function))
(define equal-comparator
(make-comparator
#t
equal?
#f
default-hash-function))
|#
(define eq-comparator (make-eq-comparator))
(define eqv-comparator (make-eqv-comparator))
(define equal-comparator (make-equal-comparator))
| null | https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/lib/SRFI/srfi/114.constructors.scm | scheme |
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
including without limitation the rights to use, copy, modify, merge,
subject to the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Standard comparators and comparator constructors
Standard atomic comparators
now imported from ( srfi 128 )
now imported from ( srfi 128 )
now imported from ( srfi 128 )
FIXME: this was broken in the reference implementation
(define (number-hash obj) (exact (abs obj)))
now imported from ( srfi 128 )
Inexact real comparator
Test procedure for inexact reals
Return a number appropriately rounded to epsilon
FIXME: the reference implementation ignored element 0
now imported from ( srfi 128 )
now imported from ( srfi 128 )
now imported from ( srfi 128 )
now imported from ( srfi 128 ) with a different number of arguments
(define list-comparator (make-list-comparator default-comparator))
now imported from ( srfi 128 ) with a different number of arguments
(define vector-comparator (make-vector-comparator default-comparator))
Pair comparator constructors
now imported from ( srfi 128 )
Compute type index for inexact list comparisons
Wrapped equality predicates
These comparators don't have comparison functions.
(define eq-comparator
(make-comparator
#t
eq?
#f
default-hash-function))
(define eqv-comparator
(make-comparator
#t
eqv?
#f
default-hash-function))
(define equal-comparator
(make-comparator
#t
equal?
#f
default-hash-function))
| SRFI 114 : Comparators
Copyright ( C ) 2013 . All Rights Reserved .
files ( the " Software " ) , to deal in the Software without restriction ,
publish , distribute , sublicense , and/or sell copies of the Software ,
and to permit persons to whom the Software is furnished to do so ,
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT .
CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT ,
(define (boolean-comparison a b)
(cond
((and a b) 0)
(a 1)
(b -1)
(else 0)))
(define (boolean-hash obj) (if obj 1 0))
(define boolean-comparator
(make-comparator boolean? boolean=? boolean-comparison boolean-hash))
(define char-comparison (make-comparison=/< char=? char<?))
(define (char-hash obj) (abs (char->integer obj)))
(define char-comparator
(make-comparator char? char=? char-comparison char-hash))
(define char-ci-comparison (make-comparison=/< char-ci=? char-ci<?))
(define (char-ci-hash obj) (abs (char->integer (char-foldcase obj))))
(define char-ci-comparator
(make-comparator char? char-ci=? char-ci-comparison char-ci-hash))
(define string-comparison (make-comparison=/< string=? string<?))
(define string-ci-comparison (make-comparison=/< string-ci=? string-ci<?))
(define (symbol<? a b) (string<? (symbol->string a) (symbol->string b)))
(define symbol-comparison (make-comparison=/< symbol=? symbol<?))
Comparison procedure for real numbers only
(define (real-comparison a b)
(cond
((< a b) -1)
((> a b) 1)
(else 0)))
Comparison procedure for non - real numbers .
(define (complex-comparison a b)
(let ((real-result (real-comparison (real-part a) (real-part b))))
(if (= real-result 0)
(real-comparison (imag-part a) (imag-part b))
real-result)))
(define (number-hash z)
(cond ((exact-integer? z)
(abs z))
((and (exact? z) (real? z))
(+ (numerator z) (denominator z)))
((not (real? z))
(+ (number-hash (real-part z))
(* 3 (number-hash (imag-part z)))))
((nan? z)
10286062)
((infinite? z)
(if (= z +inf.0)
11610730
4191912))
(else
(+ 861625
(number-hash (exact z))))))
(define number-comparator
(make-comparator number? = complex-comparison number-hash))
(define complex-comparator
(make-comparator complex? = complex-comparison number-hash))
(define real-comparator
(make-comparator real? = real-comparison number-hash))
(define rational-comparator
(make-comparator rational? = real-comparison number-hash))
(define integer-comparator
(make-comparator integer? = real-comparison number-hash))
(define exact-integer-comparator
(make-comparator exact-integer? = real-comparison number-hash))
(define (inexact-real? obj) (and (number? obj) (inexact? obj) (real? obj)))
(define (rounded-to x epsilon rounding)
(let ((quo (/ x epsilon)))
(cond
((procedure? rounding) (rounding x epsilon))
((eq? rounding 'round) (round quo))
((eq? rounding 'ceiling) (ceiling quo))
((eq? rounding 'floor) (floor quo))
((eq? rounding 'truncate) (truncate quo))
(else (error "invalid rounding specification" rounding)))))
Returns result of comparing a NaN with a non - NaN
(define (nan-comparison nan-handling which other)
(cond
((procedure? nan-handling)
(nan-handling other))
((eq? nan-handling 'error)
(error "Attempt to compare NaN with non-NaN"))
((eq? nan-handling 'min)
(if (eq? which 'a-nan) -1 1))
((eq? nan-handling 'max)
(if (eq? which 'a-nan) 1 -1))
(else
(error "Invalid nan-handling specification"))))
(define (make-inexact-real-comparison epsilon rounding nan-handling)
(lambda (a b)
(let ((a-nan? (nan? a)) (b-nan? (nan? b)))
(cond
((and a-nan? b-nan?) 0)
(a-nan? (nan-comparison nan-handling 'a-nan b))
(b-nan? (nan-comparison nan-handling 'b-nan a))
(else (real-comparison
(rounded-to a epsilon rounding)
(rounded-to b epsilon rounding)))))))
Return 0 for NaN , number - hash otherwise
(define (make-inexact-real-hash epsilon rounding)
(lambda (obj)
(if (nan? obj) 0 (number-hash (rounded-to obj epsilon rounding)))))
(define (make-inexact-real-comparator epsilon rounding nan-handling)
(make-comparator
inexact-real?
#t
(make-inexact-real-comparison epsilon rounding nan-handling)
(make-inexact-real-hash epsilon rounding)))
Sequence comparator constructors and comparators
The hash functions are based on djb2 , but
modulo 2 ^ 20 instead of 2 ^ 32 in hopes of sticking to fixnums .
(define limit (expt 2 20))
Makes a comparison procedure that works
(define (make-listwise-comparison comparison null? car cdr)
(letrec ((proc
(lambda (a b)
(let ((a-null? (null? a)) (b-null? (null? b)))
(cond
((and a-null? b-null?) 0)
(a-null? -1)
(b-null? 1)
(else (let ((result (comparison (car a) (car b))))
(if (= result 0) (proc (cdr a) (cdr b)) result))))))))
proc))
Makes a hash function that works
(define (make-listwise-hash hash null? car cdr)
(lambda (obj)
(let loop ((obj obj) (result 5381))
(if (null? obj)
0
(let* ((prod (modulo (* result 33) limit))
(sum (+ prod (hash (car obj)))))
(loop (cdr obj) sum))))))
Makes a comparison procedure that works vectorwise
FIXME : the reference implementation blew up when comparing two empty vectors
(define (make-vectorwise-comparison comparison length ref)
(lambda (a b)
(let* ((a-length (length a))
(b-length (length b)))
(cond
((< a-length b-length) -1)
((> a-length b-length) 1)
(else
(let loop ((index 0))
(if (= index a-length)
0
(let ((result (comparison (ref a index) (ref b index))))
(if (= result 0)
(loop (+ index 1))
result)))))))))
Makes a hash function that works vectorwise
(define (make-vectorwise-hash hash length ref)
(lambda (obj)
(let loop ((index (- (length obj) 1)) (result 5381))
(if (< index 0)
result
(let* ((prod (modulo (* result 33) limit))
(sum (modulo (+ prod (hash (ref obj index))) limit)))
(loop (- index 1) sum))))))
(define string-hash
(make-vectorwise-hash char-hash string-length string-ref))
(define string-comparator
(make-comparator string? string=? string-comparison string-hash))
(define (string-ci-hash obj) (string-hash (string-foldcase obj)))
(define string-ci-comparator
(make-comparator string? string-ci=? string-ci-comparison string-ci-hash))
(define (symbol-hash obj) (string-hash (symbol->string obj)))
(define symbol-comparator
(make-comparator symbol? symbol=? symbol-comparison symbol-hash))
(define (make-listwise-comparator test comparator null? car cdr)
(make-comparator
test
#t
(make-listwise-comparison
(comparator-comparison-procedure comparator) null? car cdr)
(make-listwise-hash
(comparator-hash-function comparator) null? car cdr)))
(define (make-vectorwise-comparator test comparator length ref)
(make-comparator
test
#t
(make-vectorwise-comparison
(comparator-comparison-procedure comparator) length ref)
(make-vectorwise-hash
(comparator-hash-function comparator) length ref)))
(define (make-list-comparator comparator)
(make-listwise-comparator
(lambda (obj) (or (null? obj) (pair? obj)))
comparator null? car cdr))
SRFI 128 adds four more formal parameters to make - list - comparator
(define list-comparator
(make-list-comparator default-comparator
(lambda (obj) (or (null? obj) (pair? obj)))
null?
car
cdr))
(define (make-vector-comparator comparator)
(make-vectorwise-comparator vector? comparator vector-length vector-ref))
SRFI 128 adds three more formal parameters to make - vector - comparator
(define vector-comparator
(make-vector-comparator default-comparator vector? vector-length vector-ref))
(define vector-comparison (comparator-comparison-procedure vector-comparator))
(define vector-hash (comparator-hash-function vector-comparator))
(define (make-bytevector-comparator comparator)
(make-vectorwise-comparator
bytevector? comparator bytevector-length bytevector-u8-ref))
(define bytevector-comparator (make-bytevector-comparator default-comparator))
(define bytevector-comparison
(comparator-comparison-procedure bytevector-comparator))
(define bytevector-hash
(comparator-hash-function bytevector-comparator))
(define (make-car-comparator comparator)
(make-comparator
pair?
#t
(lambda (a b)
(comparator-compare comparator (car a) (car b)))
(lambda (obj) (comparator-hash-function comparator))))
(define (make-cdr-comparator comparator)
(make-comparator
pair?
#t
(lambda (a b)
(comparator-compare comparator (cdr a) (cdr b)))
(lambda (obj) (comparator-hash comparator obj))))
(define (make-pair-comparison car-comparator cdr-comparator)
(lambda (a b)
(let ((result (comparator-compare car-comparator (car a) (car b))))
(if (= result 0)
(comparator-compare cdr-comparator (cdr a) (cdr b))
result))))
(define pair-comparison
(make-pair-comparison default-comparator default-comparator))
(define (make-pair-hash car-comparator cdr-comparator)
(lambda (obj)
(+
(comparator-hash car-comparator (car obj))
(comparator-hash cdr-comparator (cdr obj)))))
(define (make-pair-comparator car-comparator cdr-comparator)
(make-comparator
pair?
#t
(make-pair-comparison car-comparator cdr-comparator)
(make-pair-hash car-comparator cdr-comparator)))
(define pair-comparator
(make-pair-comparator default-comparator default-comparator))
(define pair-hash (comparator-hash-function pair-comparator))
(define (improper-list-type obj)
(cond
((null? obj) 0)
((pair? obj) 1)
(else 2)))
(define (make-improper-list-comparison comparator)
(let ((pair-comparison (make-pair-comparison comparator comparator)))
(lambda (a b)
(let* ((a-type (improper-list-type a))
(b-type (improper-list-type b))
(result (real-comparison a-type b-type)))
(cond
((not (= result 0)) result)
((null? a) 0)
((pair? a) (pair-comparison a b))
(else (comparator-compare comparator a b)))))))
(define (make-improper-list-hash comparator)
(lambda (obj)
(cond
((null? obj) 0)
((pair? obj) (+ (comparator-hash comparator (car obj))
(comparator-hash comparator (cdr obj))))
(else (comparator-hash comparator obj)))))
(define (make-improper-list-comparator comparator)
(make-comparator
#t
#t
(make-improper-list-comparison comparator)
(make-improper-list-hash comparator)))
(define eq-comparator (make-eq-comparator))
(define eqv-comparator (make-eqv-comparator))
(define equal-comparator (make-equal-comparator))
|
2f74e07d556144eed3d8fa94320abaeb141b7384346b6e07ba2078751c6bb800 | zk/clojuredocs | static.clj | (ns clojuredocs.search.static)
(def clojure-namespaces
'[clojure.core
clojure.core.async
clojure.core.logic
clojure.core.logic.fd
clojure.core.logic.pldb
clojure.core.protocols
clojure.core.reducers
clojure.core.server
clojure.data
clojure.datafy
clojure.edn
clojure.inspector
clojure.instant
clojure.java.browse
clojure.java.io
clojure.java.javadoc
clojure.java.shell
clojure.main
clojure.pprint
clojure.reflect
clojure.repl
clojure.set
clojure.spec.alpha
clojure.stacktrace
clojure.string
clojure.template
clojure.test
clojure.test.junit
clojure.test.tap
clojure.walk
clojure.xml
clojure.zip])
(def special-forms
(->> [{:name 'def
:ns "clojure.core"
:doc "Creates and interns or locates a global var with the name of symbol and a
namespace of the value of the current namespace (*ns*). See
for more information."}
{:name 'if
:ns "clojure.core"
:doc "Evaluates test."}
{:name 'do
:ns "clojure.core"
:doc "Evaluates the expressions in order and returns the value of the last. If no
expressions are supplied, returns nil. See
for more information."}
{:name 'quote
:ns "clojure.core"
:doc "Yields the unevaluated form. See for more
information."}
{:name 'var
:ns "clojure.core"
:doc "The symbol must resolve to a var, and the Var object itself (not its value)
is returned. The reader macro #'x expands to (var x). See
for more information."}
{:name 'recur
:ns "clojure.core"
:doc "Evaluates the exprs in order, then, in parallel, rebinds the bindings of
the recursion point to the values of the exprs. See
for more information."}
{:name 'throw
:ns "clojure.core"
:doc "The expr is evaluated and thrown, therefore it should yield an instance of
some derivee of Throwable. Please see #throw"}
{:name 'try
:ns "clojure.core"
:doc "The exprs are evaluated and, if no exceptions occur, the value of the last
is returned. If an exception occurs and catch clauses are provided, each is
examined in turn and the first for which the thrown exception is an instance
of the named class is considered a matching catch clause. If there is a
matching catch clause, its exprs are evaluated in a context in which name is
bound to the thrown exception, and the value of the last is the return value
of the function. If there is no matching catch clause, the exception
propagates out of the function. Before returning, normally or abnormally,
any finally exprs will be evaluated for their side effects. See
for more information."}
{:name 'catch
:ns "clojure.core"
:doc "The exprs are evaluated and, if no exceptions occur, the value of the last
is returned. If an exception occurs and catch clauses are provided, each is
examined in turn and the first for which the thrown exception is an instance
of the named class is considered a matching catch clause. If there is a
matching catch clause, its exprs are evaluated in a context in which name is
bound to the thrown exception, and the value of the last is the return value
of the function. If there is no matching catch clause, the exception
propagates out of the function. Before returning, normally or abnormally,
any finally exprs will be evaluated for their side effects. See
for more information."}
{:name 'finally
:ns "clojure.core"
:doc "The exprs are evaluated and, if no exceptions occur, the value of the last
is returned. If an exception occurs and catch clauses are provided, each is
examined in turn and the first for which the thrown exception is an instance
of the named class is considered a matching catch clause. If there is a
matching catch clause, its exprs are evaluated in a context in which name is
bound to the thrown exception, and the value of the last is the return value
of the function. If there is no matching catch clause, the exception
propagates out of the function. Before returning, normally or abnormally,
any finally exprs will be evaluated for their side effects. See
for more information."}
{:name '.
:ns "clojure.core"
:doc "The '.' special form is the basis for access to Java. It can be considered
a member-access operator, and/or read as 'in the scope of'. See
for more information."}
{:name 'set!
:ns "clojure.core"
:doc "Assignment special form. When the first operand is a field member access
form, the assignment is to the corresponding field. If it is an instance
field, the instance expr will be evaluated, then the expr. In all cases
the value of expr is returned. Note - you cannot assign to function params
or local bindings. Only Java fields, Vars, Refs and Agents are mutable in
Clojure. See for more information."}
{:name 'monitor-enter
:ns "clojure.core"
:doc "A synchronization primitive that should be avoided in user code. Use the
locking macro. See for more information."}
{:name 'monitor-exit
:ns "clojure.core"
:doc "A synchronization primitive that should be avoided in user code. Use the
locking macro. See for more information."}
{:name 'new
:ns "clojure.core"
:doc "Instantiate a class. See #new for
more information."}]
(map #(assoc % :type "special-form"))))
(def concept-pages
[{:name "Destructuring"
:keywords "destructuring destructure destruct"
:href "/concepts/destructuring"
:desc "Destructuring allows you to assign names to values based on the structure of a parameter."}
{:name "Functional Programming"
:keywords "functional programming"
:href "/concepts/functional-programming"
:desc "Rooted in lambda calculus, functional programming is a the style of building programs in a declarative way favoring composition of first-class, pure, and higher-order functions, immutable data structures, laziness, and the elimination of side effects. "}])
(def searchable-pages
(->> [{:name "Quick Reference"
:keywords "help, getting started, quickref, quick reference"
:href "/quickref"
:desc "Clojure functions broken down by conceptual area (string manipulation, collections, etc)."}
{:name "Laziness in Clojure"
:keywords "lazy laziness lazyness sequences seq lazy evaluation"
:href "/concepts/lazyness"
:desc "Laziness is the deferred or delayed execution of some bit of code, opposite of eager or immediate evaluation. Laziness is used Clojure to enable execution composition and solutions to problems that involve infinite sequences. FIX THIS"}
{:name "Read-Eval-Print Loop (REPL)"
:keywords "repl read eval print loop"
:href "/concepts/repl"
:desc "A read–eval–print loop (REPL), also known as an interactive toplevel or language shell, is a simple, interactive computer programming environment that takes single user inputs (i.e. single expressions), evaluates them, and returns the result to the user; a program written in a REPL environment is executed piecewise. The term is most usually used to refer to programming interfaces similar to the classic Lisp interactive environment. Common examples include command line shells and similar environments for programming languages."}
{:name "Thrush Operators (->, ->>)"
:keywords "thrush operators -> ->> as->"
:href "/concepts/thrush"
:desc "-Thrush-Operator/"}
{:name "Recursion"
:keywords "recursion loop recur trampoline"
:href ""
:desc "Recursion is the process of repeating items in a self-similar way. For instance, when the surfaces of two mirrors are exactly parallel with each other the nested images that occur are a form of infinite recursion."}]
(concat concept-pages)
(map #(assoc % :type "page"))))
| null | https://raw.githubusercontent.com/zk/clojuredocs/28f5ee500f4349039ee81c70d7ac40acbb19e5d8/src/clj/clojuredocs/search/static.clj | clojure | (ns clojuredocs.search.static)
(def clojure-namespaces
'[clojure.core
clojure.core.async
clojure.core.logic
clojure.core.logic.fd
clojure.core.logic.pldb
clojure.core.protocols
clojure.core.reducers
clojure.core.server
clojure.data
clojure.datafy
clojure.edn
clojure.inspector
clojure.instant
clojure.java.browse
clojure.java.io
clojure.java.javadoc
clojure.java.shell
clojure.main
clojure.pprint
clojure.reflect
clojure.repl
clojure.set
clojure.spec.alpha
clojure.stacktrace
clojure.string
clojure.template
clojure.test
clojure.test.junit
clojure.test.tap
clojure.walk
clojure.xml
clojure.zip])
(def special-forms
(->> [{:name 'def
:ns "clojure.core"
:doc "Creates and interns or locates a global var with the name of symbol and a
namespace of the value of the current namespace (*ns*). See
for more information."}
{:name 'if
:ns "clojure.core"
:doc "Evaluates test."}
{:name 'do
:ns "clojure.core"
:doc "Evaluates the expressions in order and returns the value of the last. If no
expressions are supplied, returns nil. See
for more information."}
{:name 'quote
:ns "clojure.core"
:doc "Yields the unevaluated form. See for more
information."}
{:name 'var
:ns "clojure.core"
:doc "The symbol must resolve to a var, and the Var object itself (not its value)
is returned. The reader macro #'x expands to (var x). See
for more information."}
{:name 'recur
:ns "clojure.core"
:doc "Evaluates the exprs in order, then, in parallel, rebinds the bindings of
the recursion point to the values of the exprs. See
for more information."}
{:name 'throw
:ns "clojure.core"
:doc "The expr is evaluated and thrown, therefore it should yield an instance of
some derivee of Throwable. Please see #throw"}
{:name 'try
:ns "clojure.core"
:doc "The exprs are evaluated and, if no exceptions occur, the value of the last
is returned. If an exception occurs and catch clauses are provided, each is
examined in turn and the first for which the thrown exception is an instance
of the named class is considered a matching catch clause. If there is a
matching catch clause, its exprs are evaluated in a context in which name is
bound to the thrown exception, and the value of the last is the return value
of the function. If there is no matching catch clause, the exception
propagates out of the function. Before returning, normally or abnormally,
any finally exprs will be evaluated for their side effects. See
for more information."}
{:name 'catch
:ns "clojure.core"
:doc "The exprs are evaluated and, if no exceptions occur, the value of the last
is returned. If an exception occurs and catch clauses are provided, each is
examined in turn and the first for which the thrown exception is an instance
of the named class is considered a matching catch clause. If there is a
matching catch clause, its exprs are evaluated in a context in which name is
bound to the thrown exception, and the value of the last is the return value
of the function. If there is no matching catch clause, the exception
propagates out of the function. Before returning, normally or abnormally,
any finally exprs will be evaluated for their side effects. See
for more information."}
{:name 'finally
:ns "clojure.core"
:doc "The exprs are evaluated and, if no exceptions occur, the value of the last
is returned. If an exception occurs and catch clauses are provided, each is
examined in turn and the first for which the thrown exception is an instance
of the named class is considered a matching catch clause. If there is a
matching catch clause, its exprs are evaluated in a context in which name is
bound to the thrown exception, and the value of the last is the return value
of the function. If there is no matching catch clause, the exception
propagates out of the function. Before returning, normally or abnormally,
any finally exprs will be evaluated for their side effects. See
for more information."}
{:name '.
:ns "clojure.core"
:doc "The '.' special form is the basis for access to Java. It can be considered
a member-access operator, and/or read as 'in the scope of'. See
for more information."}
{:name 'set!
:ns "clojure.core"
:doc "Assignment special form. When the first operand is a field member access
form, the assignment is to the corresponding field. If it is an instance
field, the instance expr will be evaluated, then the expr. In all cases
the value of expr is returned. Note - you cannot assign to function params
or local bindings. Only Java fields, Vars, Refs and Agents are mutable in
Clojure. See for more information."}
{:name 'monitor-enter
:ns "clojure.core"
:doc "A synchronization primitive that should be avoided in user code. Use the
locking macro. See for more information."}
{:name 'monitor-exit
:ns "clojure.core"
:doc "A synchronization primitive that should be avoided in user code. Use the
locking macro. See for more information."}
{:name 'new
:ns "clojure.core"
:doc "Instantiate a class. See #new for
more information."}]
(map #(assoc % :type "special-form"))))
(def concept-pages
[{:name "Destructuring"
:keywords "destructuring destructure destruct"
:href "/concepts/destructuring"
:desc "Destructuring allows you to assign names to values based on the structure of a parameter."}
{:name "Functional Programming"
:keywords "functional programming"
:href "/concepts/functional-programming"
:desc "Rooted in lambda calculus, functional programming is a the style of building programs in a declarative way favoring composition of first-class, pure, and higher-order functions, immutable data structures, laziness, and the elimination of side effects. "}])
(def searchable-pages
(->> [{:name "Quick Reference"
:keywords "help, getting started, quickref, quick reference"
:href "/quickref"
:desc "Clojure functions broken down by conceptual area (string manipulation, collections, etc)."}
{:name "Laziness in Clojure"
:keywords "lazy laziness lazyness sequences seq lazy evaluation"
:href "/concepts/lazyness"
:desc "Laziness is the deferred or delayed execution of some bit of code, opposite of eager or immediate evaluation. Laziness is used Clojure to enable execution composition and solutions to problems that involve infinite sequences. FIX THIS"}
{:name "Read-Eval-Print Loop (REPL)"
:keywords "repl read eval print loop"
:href "/concepts/repl"
:desc "A read–eval–print loop (REPL), also known as an interactive toplevel or language shell, is a simple, interactive computer programming environment that takes single user inputs (i.e. single expressions), evaluates them, and returns the result to the user; a program written in a REPL environment is executed piecewise. The term is most usually used to refer to programming interfaces similar to the classic Lisp interactive environment. Common examples include command line shells and similar environments for programming languages."}
{:name "Thrush Operators (->, ->>)"
:keywords "thrush operators -> ->> as->"
:href "/concepts/thrush"
:desc "-Thrush-Operator/"}
{:name "Recursion"
:keywords "recursion loop recur trampoline"
:href ""
:desc "Recursion is the process of repeating items in a self-similar way. For instance, when the surfaces of two mirrors are exactly parallel with each other the nested images that occur are a form of infinite recursion."}]
(concat concept-pages)
(map #(assoc % :type "page"))))
| |
1bb00a78d30da5052aa0c9cae211ee12ae227b7c1b48a81aa78254f962553a77 | weavejester/ring-reload-modified | nsdeps.clj | (ns ring.util.nsdeps
"Parsing namespace declarations for dependency information."
(:use [clojure.set :only (union)]))
(defn- deps-from-libspec [prefix form]
(cond (list? form) (apply union (map (fn [f] (deps-from-libspec
(symbol (str (when prefix (str prefix "."))
(first form)))
f))
(rest form)))
(vector? form) (deps-from-libspec prefix (first form))
(symbol? form) #{(symbol (str (when prefix (str prefix ".")) form))}
(keyword? form) #{}
:else (throw (IllegalArgumentException.
(pr-str "Unparsable namespace form:" form)))))
(defn- deps-from-ns-form [form]
(when (and (list? form)
(contains? #{:use :require} (first form)))
(apply union (map #(deps-from-libspec nil %) (rest form)))))
(defn deps-from-ns-decl
"Given a (quoted) ns declaration, returns a set of symbols naming
the dependencies of that namespace. Handles :use and :require clauses."
[decl]
(apply union (map deps-from-ns-form decl)))
| null | https://raw.githubusercontent.com/weavejester/ring-reload-modified/efe0b26287226baedb99446e064519971e292bfe/src/ring/util/nsdeps.clj | clojure | (ns ring.util.nsdeps
"Parsing namespace declarations for dependency information."
(:use [clojure.set :only (union)]))
(defn- deps-from-libspec [prefix form]
(cond (list? form) (apply union (map (fn [f] (deps-from-libspec
(symbol (str (when prefix (str prefix "."))
(first form)))
f))
(rest form)))
(vector? form) (deps-from-libspec prefix (first form))
(symbol? form) #{(symbol (str (when prefix (str prefix ".")) form))}
(keyword? form) #{}
:else (throw (IllegalArgumentException.
(pr-str "Unparsable namespace form:" form)))))
(defn- deps-from-ns-form [form]
(when (and (list? form)
(contains? #{:use :require} (first form)))
(apply union (map #(deps-from-libspec nil %) (rest form)))))
(defn deps-from-ns-decl
"Given a (quoted) ns declaration, returns a set of symbols naming
the dependencies of that namespace. Handles :use and :require clauses."
[decl]
(apply union (map deps-from-ns-form decl)))
| |
9496062dd03e14be99b9288c6dae3132099c25893a21551afceafe709998da95 | ocsigen/macaque | type_field.ml | let _ = << t.foo | t in $Base.recette$ >>
| null | https://raw.githubusercontent.com/ocsigen/macaque/a92e91c7ed443086551d909c3cfad22c71144f54/src/error_tests/type_field.ml | ocaml | let _ = << t.foo | t in $Base.recette$ >>
| |
cda99eae61eb07bc19527919d4664bc321053fb9f5bd52e0a1f2715c932d9351 | swannodette/mori | macros.clj | (ns mori.macros
(:require [cljs.compiler :as comp]
[cljs.analyzer :as ana]
[cljs.util :as util]
[cljs.analyzer.api :as ana-api]))
(alias 'core 'clojure.core)
(defn make-inspectable-1 [x]
`(aset (.-prototype ~x) "inspect"
(fn []
(~'this-as coll#
(.toString coll#)))))
(defmacro make-inspectable [& xs]
`(do ~@(map make-inspectable-1 xs)))
(defmacro mori-export [exportf coref]
(let [{:keys [ns name arglists]} (ana-api/resolve &env coref)
arglists (cond-> arglists
(= (first arglists) 'quote) rest)]
(letfn [(export-method [arglist]
(let [c (count arglist)]
`(js/goog.exportSymbol
~(str "mori." (core/name exportf) ".f" c)
~(symbol (str ns)
(str (core/name name) ".cljs$core$IFn$_invoke$arity$" c)))))]
`(do
(js/goog.exportSymbol ~(str "mori." (core/name exportf)) ~coref) ~(list 'js* ";")
~@(when (and arglists (< 1 (count arglists)))
(map export-method (remove #(some '#{&} %) arglists)))))))
(comment
;; setup
(require
'[cljs.env :as env]
'[clojure.pprint :as pp])
(def cenv (env/default-compiler-env))
(def aenv
(update-in (ana/empty-env) [:ns]
assoc
:name 'mori
:excludes '#{vector assoc-in conj}))
;; analyze core
(binding [ana/*cljs-ns* 'cljs.user]
(env/with-compiler-env cenv
(comp/with-core-cljs nil
(fn []))))
;; check fn
(pp/pprint
(env/with-compiler-env cenv
(ana-api/resolve aenv 'cljs.core/assoc-in)))
;; verify macro
(env/with-compiler-env cenv
(binding [ana/*cljs-ns* 'mori]
(pp/write
(ana/macroexpand-1 aenv
`(mori-export assocIn cljs.core/assoc-in))
;:dispatch pp/code-dispatch
)))
(env/with-compiler-env cenv
(binding [ana/*cljs-ns* 'mori
*ns* (create-ns 'mori)]
(pp/write
(ana/macroexpand-1 aenv
`(mori-export conj cljs.core/conj))
;:dispatch pp/code-dispatch
)))
)
| null | https://raw.githubusercontent.com/swannodette/mori/46c9194b3c4bc93fc4b402925b9417add3b0a3ba/src/mori/macros.clj | clojure | setup
analyze core
check fn
verify macro
:dispatch pp/code-dispatch
:dispatch pp/code-dispatch | (ns mori.macros
(:require [cljs.compiler :as comp]
[cljs.analyzer :as ana]
[cljs.util :as util]
[cljs.analyzer.api :as ana-api]))
(alias 'core 'clojure.core)
(defn make-inspectable-1 [x]
`(aset (.-prototype ~x) "inspect"
(fn []
(~'this-as coll#
(.toString coll#)))))
(defmacro make-inspectable [& xs]
`(do ~@(map make-inspectable-1 xs)))
(defmacro mori-export [exportf coref]
(let [{:keys [ns name arglists]} (ana-api/resolve &env coref)
arglists (cond-> arglists
(= (first arglists) 'quote) rest)]
(letfn [(export-method [arglist]
(let [c (count arglist)]
`(js/goog.exportSymbol
~(str "mori." (core/name exportf) ".f" c)
~(symbol (str ns)
(str (core/name name) ".cljs$core$IFn$_invoke$arity$" c)))))]
`(do
(js/goog.exportSymbol ~(str "mori." (core/name exportf)) ~coref) ~(list 'js* ";")
~@(when (and arglists (< 1 (count arglists)))
(map export-method (remove #(some '#{&} %) arglists)))))))
(comment
(require
'[cljs.env :as env]
'[clojure.pprint :as pp])
(def cenv (env/default-compiler-env))
(def aenv
(update-in (ana/empty-env) [:ns]
assoc
:name 'mori
:excludes '#{vector assoc-in conj}))
(binding [ana/*cljs-ns* 'cljs.user]
(env/with-compiler-env cenv
(comp/with-core-cljs nil
(fn []))))
(pp/pprint
(env/with-compiler-env cenv
(ana-api/resolve aenv 'cljs.core/assoc-in)))
(env/with-compiler-env cenv
(binding [ana/*cljs-ns* 'mori]
(pp/write
(ana/macroexpand-1 aenv
`(mori-export assocIn cljs.core/assoc-in))
)))
(env/with-compiler-env cenv
(binding [ana/*cljs-ns* 'mori
*ns* (create-ns 'mori)]
(pp/write
(ana/macroexpand-1 aenv
`(mori-export conj cljs.core/conj))
)))
)
|
d42d14d6cd26bc46d72c1087a394f9076911cc5bb1ddaaf7a2e91ac7f8eee284 | 3b/3bil | lambda-list.lisp | (in-package :avm2-compiler)
;;; misc util functions for dealing with lambda lists
(defun parse-lambda-list (lambda-list &key (normalize t)
(normalize-optional normalize)
(normalize-keyword normalize)
(normalize-auxilary normalize))
"modified from alexandria::parse-ordinary-lambda-list, adding support
for &AREST etc
Parses an ordinary lambda-list, returning as multiple values:
1. Required parameters.
2. Optional parameter specifications, normalized into form (NAME INIT SUPPLIEDP)
where SUPPLIEDP is NIL if not present.
3. Name of the rest parameter, or NIL.
4. Keyword parameter specifications, normalized into form ((KEYWORD-NAME NAME) INIT SUPPLIEDP)
where SUPPLIEDP is NIL if not present.
5. Boolean indicating &ALLOW-OTHER-KEYS presence.
6. &AUX parameter specifications, normalized into form (NAME INIT).
--
7. &AREST parameter or nil
Signals a PROGRAM-ERROR is the lambda-list is malformed."
(let ((state :required)
(allow-other-keys nil)
(auxp nil)
(required nil)
(optional nil)
(rest nil)
(arest nil)
(keys nil)
(aux nil))
(labels ((fail (elt)
(alexandria:simple-program-error
"Misplaced ~S in ordinary lambda-list:~% ~S"
elt lambda-list))
(check-variable (elt what)
(unless (and (symbolp elt) (not (constantp elt)))
(alexandria:simple-program-error
"Invalid ~A ~S in ordinary lambda-list:~% ~S"
what elt lambda-list)))
(check-spec (spec what)
(destructuring-bind (init suppliedp) spec
(declare (ignore init))
(check-variable suppliedp what))))
(dolist (elt lambda-list)
(case elt
(&optional
(if (eq state :required)
(setf state elt)
(fail elt)))
(&rest
(if (member state '(:required &optional))
(setf state elt)
(fail elt)))
(&arest
(if (member state '(:required &optional))
(setf state elt)
(fail elt)))
(&key
(if (member state '(:required &optional :after-rest))
(setf state elt)
(fail elt)))
(&allow-other-keys
(if (eq state '&key)
(setf allow-other-keys t
state elt)
(fail elt)))
(&aux
(cond ((eq state '&rest)
(fail elt))
((eq state '&arest)
(fail elt))
(auxp
(alexandria:simple-program-error
"Multiple ~S in ordinary lambda-list:~% ~S"
elt lambda-list))
(t
(setf auxp t
state elt))
))
(otherwise
(when (member elt '#.(set-difference lambda-list-keywords
'(&optional &rest &arest &key &allow-other-keys &aux)))
(alexandria:simple-program-error
"Bad lambda-list keyword ~S in ordinary lambda-list:~% ~S"
elt lambda-list))
(case state
(:required
(check-variable elt "required parameter")
(push elt required))
(&optional
(cond ((consp elt)
(destructuring-bind (name &rest tail) elt
(check-variable name "optional parameter")
(cond ((cdr tail)
(check-spec tail "optional-supplied-p parameter"))
(normalize-optional
(setf elt (append elt '(nil)))))))
(t
(check-variable elt "optional parameter")
(when normalize-optional
(setf elt (cons elt '(nil nil))))))
(push (alexandria:ensure-list elt) optional))
(&rest
(check-variable elt "rest parameter")
(setf rest elt
state :after-rest))
(&arest
(check-variable elt "arest parameter")
(setf arest elt
state :after-rest))
(&key
(cond ((consp elt)
(destructuring-bind (var-or-kv &rest tail) elt
(cond ((consp var-or-kv)
(destructuring-bind (keyword var) var-or-kv
(unless (symbolp keyword)
(alexandria:simple-program-error "Invalid keyword name ~S in ordinary ~
lambda-list:~% ~S"
keyword lambda-list))
(check-variable var "keyword parameter")))
(t
(check-variable var-or-kv "keyword parameter")
(when normalize-keyword
(setf var-or-kv (list (alexandria:make-keyword
var-or-kv)
var-or-kv)))))
(if (cdr tail)
(check-spec tail "keyword-supplied-p parameter")
(when normalize-keyword
(setf tail (append tail '(nil)))))
(setf elt (cons var-or-kv tail))))
(t
(check-variable elt "keyword parameter")
(setf elt (if normalize-keyword
(list (list (alexandria:make-keyword elt)
elt)
nil nil)
(list elt)))))
(push elt keys))
(&aux
(if (consp elt)
(destructuring-bind (var &optional init) elt
(declare (ignore init))
(check-variable var "&aux parameter"))
(progn
(check-variable elt "&aux parameter")
(setf elt (list* elt (when normalize-auxilary
'(nil))))))
(push elt aux))
(t
(alexandria:simple-program-error
"Invalid ordinary lambda-list:~% ~S" lambda-list)))))))
(values (nreverse required)
(nreverse optional)
rest
(nreverse keys)
allow-other-keys
(nreverse aux)
arest)))
;; fixme: places this is still used probably shouldn't be seeing full
;; lambda lists anymore, so check them and remove this if so
(defun lambda-list-vars (llist)
(multiple-value-bind (required optional rest keys allow-other-keys aux arest)
(parse-lambda-list llist)
(declare (ignore allow-other-keys))
;; opt = ((name default supplied-p)*)
;; key = (((:key name) default supplied-p)*)
;; aux = ((name default)*)?
(when aux
(error "got &aux ~s" aux))
(when optional
(error "got &optional ~s" optional))
(when keys
(error "got &key ~s" keys))
(append required
(when rest (list rest))
(when arest (list arest))
(loop for (name nil p) in optional
when name collect name
when p collect p)
(loop for ((nil name) nil p) in keys
when name collect name
when p collect p)
(mapcar 'car aux))))
#++
(defun old-alphatize-lambda-list (lambda-list alphatized-names)
(loop for i in lambda-list
for alpha = (assoc i alphatized-names)
;; normal var or keyword, replace with alphatized version if any
when alpha collect (cadr alpha)
;; defaulted vars
else when (consp i)
collect (cons
;; var name, possibly with explicit keyword (not alphatized)
(if (consp (car i))
(list (caar i)
(or (cdr (assoc (cadar i) alphatized-names))
(cadar i)))
(or (cadr (assoc (car i) alphatized-names))
(car i)))
;; default and supplied-p var
(if (= 1 (length (cdr i)))
(cdr i) ;; no supplied var
(list (second i)
(or (cadr (assoc (third i) alphatized-names))
(third i)))))
else collect i))
#+-
(defun alphatize-lambda-list (lambda-list alphatize recur-init)
(break)
(multiple-value-bind (required optional rest keys allow-other-keys aux arest)
(parse-lambda-list lambda-list)
#++(when (or rest keys aux)
(error "lambda-list keywords &rest &keys &aux not supported yet..."))
(let ((vars (append required
(when rest (list rest))
(when arest (list arest))
(loop for (name nil p) in optional
when name collect name
when p collect p)
(loop for ((nil name) nil p) in keys
when name collect name
when p collect p)
(mapcar 'car aux))))
(list
:vars ;; fixme: get these directly instead of parsing lambda list twice
(loop for i in (lambda-list-vars lambda-list)
for a = (assoc i alphatized-names)
when a collect (cadr a)
else do (error "var not alphatized? ~s - ~s" i a))
:required
(loop for i in required
for alpha = (assoc i alphatized-names)
when alpha
collect (cadr alpha)
else do (error "var not alphatized yet? ~s - ~s" i alpha))
:rest (cadr (assoc rest alphatized-names))
:arest (cadr (assoc arest alphatized-names))
:allow-other-keys allow-other-keys
:optional
(loop for (name init p) in optional
for alpha = (assoc name alphatized-names)
for init-r = (funcall recur-init init)
for alphap = (assoc p alphatized-names)
do (format t "&o (~s ~s ~s)->(~s ~s ~s)~%"
name init p
alpha init-r alphap)
when (and alpha (if p alphap t))
collect (list (cadr alpha) init-r (cadr alphap))
else do (error "opt var not alphatized yet? ~s/~s - ~s/~s"
name p alpha alphap))
:key
(loop for ((key name) init p) in keys
for alpha = (assoc name alphatized-names)
for init-r = (funcall recur-init init)
for alphap = (assoc p alphatized-names)
do (format t "&k (~s ~s ~s)->(~s ~s ~s)~%"
name init p
alpha init-r alphap)
when (and alpha (if p alphap t))
collect (list (list key (cadr alpha)) init-r (cadr alphap))
else do (error "key var not alphatized yet? ~s/~s - ~s/~s"
name p alpha alphap))
:aux
(loop for (name init) in aux
for alpha = (assoc name alphatized-names)
for init-r = (funcall recur-init init)
do (format t "&a (~s ~s)->(~s ~s)~%" name init alpha init-r)
when alpha
collect (list (cadr alpha) init-r)
else do (error "aux var not alphatized yet? ~s - ~s"
name alpha))))))
( alphatize - lambda - list ' ( v1 & optional v2 ( v3 3 ) ( v4 4 v4p ) & key ( ( v5k v5 ) 5 v5p ) ) ' ( ( v1 v1a ) ( v2 v2a ) ( v3 v3a ) ( v4 v4a ) ( v4p v4pa ) ( v5 v5a ) ( v5p v5pa ) ( v5k v5ka ) ) # ' identity )
| null | https://raw.githubusercontent.com/3b/3bil/c852181848bedf476373e901869ca29471f926ee/compile/lambda-list.lisp | lisp | misc util functions for dealing with lambda lists
fixme: places this is still used probably shouldn't be seeing full
lambda lists anymore, so check them and remove this if so
opt = ((name default supplied-p)*)
key = (((:key name) default supplied-p)*)
aux = ((name default)*)?
normal var or keyword, replace with alphatized version if any
defaulted vars
var name, possibly with explicit keyword (not alphatized)
default and supplied-p var
no supplied var
fixme: get these directly instead of parsing lambda list twice | (in-package :avm2-compiler)
(defun parse-lambda-list (lambda-list &key (normalize t)
(normalize-optional normalize)
(normalize-keyword normalize)
(normalize-auxilary normalize))
"modified from alexandria::parse-ordinary-lambda-list, adding support
for &AREST etc
Parses an ordinary lambda-list, returning as multiple values:
1. Required parameters.
2. Optional parameter specifications, normalized into form (NAME INIT SUPPLIEDP)
where SUPPLIEDP is NIL if not present.
3. Name of the rest parameter, or NIL.
4. Keyword parameter specifications, normalized into form ((KEYWORD-NAME NAME) INIT SUPPLIEDP)
where SUPPLIEDP is NIL if not present.
5. Boolean indicating &ALLOW-OTHER-KEYS presence.
6. &AUX parameter specifications, normalized into form (NAME INIT).
--
7. &AREST parameter or nil
Signals a PROGRAM-ERROR is the lambda-list is malformed."
(let ((state :required)
(allow-other-keys nil)
(auxp nil)
(required nil)
(optional nil)
(rest nil)
(arest nil)
(keys nil)
(aux nil))
(labels ((fail (elt)
(alexandria:simple-program-error
"Misplaced ~S in ordinary lambda-list:~% ~S"
elt lambda-list))
(check-variable (elt what)
(unless (and (symbolp elt) (not (constantp elt)))
(alexandria:simple-program-error
"Invalid ~A ~S in ordinary lambda-list:~% ~S"
what elt lambda-list)))
(check-spec (spec what)
(destructuring-bind (init suppliedp) spec
(declare (ignore init))
(check-variable suppliedp what))))
(dolist (elt lambda-list)
(case elt
(&optional
(if (eq state :required)
(setf state elt)
(fail elt)))
(&rest
(if (member state '(:required &optional))
(setf state elt)
(fail elt)))
(&arest
(if (member state '(:required &optional))
(setf state elt)
(fail elt)))
(&key
(if (member state '(:required &optional :after-rest))
(setf state elt)
(fail elt)))
(&allow-other-keys
(if (eq state '&key)
(setf allow-other-keys t
state elt)
(fail elt)))
(&aux
(cond ((eq state '&rest)
(fail elt))
((eq state '&arest)
(fail elt))
(auxp
(alexandria:simple-program-error
"Multiple ~S in ordinary lambda-list:~% ~S"
elt lambda-list))
(t
(setf auxp t
state elt))
))
(otherwise
(when (member elt '#.(set-difference lambda-list-keywords
'(&optional &rest &arest &key &allow-other-keys &aux)))
(alexandria:simple-program-error
"Bad lambda-list keyword ~S in ordinary lambda-list:~% ~S"
elt lambda-list))
(case state
(:required
(check-variable elt "required parameter")
(push elt required))
(&optional
(cond ((consp elt)
(destructuring-bind (name &rest tail) elt
(check-variable name "optional parameter")
(cond ((cdr tail)
(check-spec tail "optional-supplied-p parameter"))
(normalize-optional
(setf elt (append elt '(nil)))))))
(t
(check-variable elt "optional parameter")
(when normalize-optional
(setf elt (cons elt '(nil nil))))))
(push (alexandria:ensure-list elt) optional))
(&rest
(check-variable elt "rest parameter")
(setf rest elt
state :after-rest))
(&arest
(check-variable elt "arest parameter")
(setf arest elt
state :after-rest))
(&key
(cond ((consp elt)
(destructuring-bind (var-or-kv &rest tail) elt
(cond ((consp var-or-kv)
(destructuring-bind (keyword var) var-or-kv
(unless (symbolp keyword)
(alexandria:simple-program-error "Invalid keyword name ~S in ordinary ~
lambda-list:~% ~S"
keyword lambda-list))
(check-variable var "keyword parameter")))
(t
(check-variable var-or-kv "keyword parameter")
(when normalize-keyword
(setf var-or-kv (list (alexandria:make-keyword
var-or-kv)
var-or-kv)))))
(if (cdr tail)
(check-spec tail "keyword-supplied-p parameter")
(when normalize-keyword
(setf tail (append tail '(nil)))))
(setf elt (cons var-or-kv tail))))
(t
(check-variable elt "keyword parameter")
(setf elt (if normalize-keyword
(list (list (alexandria:make-keyword elt)
elt)
nil nil)
(list elt)))))
(push elt keys))
(&aux
(if (consp elt)
(destructuring-bind (var &optional init) elt
(declare (ignore init))
(check-variable var "&aux parameter"))
(progn
(check-variable elt "&aux parameter")
(setf elt (list* elt (when normalize-auxilary
'(nil))))))
(push elt aux))
(t
(alexandria:simple-program-error
"Invalid ordinary lambda-list:~% ~S" lambda-list)))))))
(values (nreverse required)
(nreverse optional)
rest
(nreverse keys)
allow-other-keys
(nreverse aux)
arest)))
(defun lambda-list-vars (llist)
(multiple-value-bind (required optional rest keys allow-other-keys aux arest)
(parse-lambda-list llist)
(declare (ignore allow-other-keys))
(when aux
(error "got &aux ~s" aux))
(when optional
(error "got &optional ~s" optional))
(when keys
(error "got &key ~s" keys))
(append required
(when rest (list rest))
(when arest (list arest))
(loop for (name nil p) in optional
when name collect name
when p collect p)
(loop for ((nil name) nil p) in keys
when name collect name
when p collect p)
(mapcar 'car aux))))
#++
(defun old-alphatize-lambda-list (lambda-list alphatized-names)
(loop for i in lambda-list
for alpha = (assoc i alphatized-names)
when alpha collect (cadr alpha)
else when (consp i)
collect (cons
(if (consp (car i))
(list (caar i)
(or (cdr (assoc (cadar i) alphatized-names))
(cadar i)))
(or (cadr (assoc (car i) alphatized-names))
(car i)))
(if (= 1 (length (cdr i)))
(list (second i)
(or (cadr (assoc (third i) alphatized-names))
(third i)))))
else collect i))
#+-
(defun alphatize-lambda-list (lambda-list alphatize recur-init)
(break)
(multiple-value-bind (required optional rest keys allow-other-keys aux arest)
(parse-lambda-list lambda-list)
#++(when (or rest keys aux)
(error "lambda-list keywords &rest &keys &aux not supported yet..."))
(let ((vars (append required
(when rest (list rest))
(when arest (list arest))
(loop for (name nil p) in optional
when name collect name
when p collect p)
(loop for ((nil name) nil p) in keys
when name collect name
when p collect p)
(mapcar 'car aux))))
(list
(loop for i in (lambda-list-vars lambda-list)
for a = (assoc i alphatized-names)
when a collect (cadr a)
else do (error "var not alphatized? ~s - ~s" i a))
:required
(loop for i in required
for alpha = (assoc i alphatized-names)
when alpha
collect (cadr alpha)
else do (error "var not alphatized yet? ~s - ~s" i alpha))
:rest (cadr (assoc rest alphatized-names))
:arest (cadr (assoc arest alphatized-names))
:allow-other-keys allow-other-keys
:optional
(loop for (name init p) in optional
for alpha = (assoc name alphatized-names)
for init-r = (funcall recur-init init)
for alphap = (assoc p alphatized-names)
do (format t "&o (~s ~s ~s)->(~s ~s ~s)~%"
name init p
alpha init-r alphap)
when (and alpha (if p alphap t))
collect (list (cadr alpha) init-r (cadr alphap))
else do (error "opt var not alphatized yet? ~s/~s - ~s/~s"
name p alpha alphap))
:key
(loop for ((key name) init p) in keys
for alpha = (assoc name alphatized-names)
for init-r = (funcall recur-init init)
for alphap = (assoc p alphatized-names)
do (format t "&k (~s ~s ~s)->(~s ~s ~s)~%"
name init p
alpha init-r alphap)
when (and alpha (if p alphap t))
collect (list (list key (cadr alpha)) init-r (cadr alphap))
else do (error "key var not alphatized yet? ~s/~s - ~s/~s"
name p alpha alphap))
:aux
(loop for (name init) in aux
for alpha = (assoc name alphatized-names)
for init-r = (funcall recur-init init)
do (format t "&a (~s ~s)->(~s ~s)~%" name init alpha init-r)
when alpha
collect (list (cadr alpha) init-r)
else do (error "aux var not alphatized yet? ~s - ~s"
name alpha))))))
( alphatize - lambda - list ' ( v1 & optional v2 ( v3 3 ) ( v4 4 v4p ) & key ( ( v5k v5 ) 5 v5p ) ) ' ( ( v1 v1a ) ( v2 v2a ) ( v3 v3a ) ( v4 v4a ) ( v4p v4pa ) ( v5 v5a ) ( v5p v5pa ) ( v5k v5ka ) ) # ' identity )
|
6f14689891d4728286b647848690d7b496bbfe96190c05dd69795a29b1d52834 | racket/racket7 | tcp-connect.rkt | #lang racket/base
(require "../common/check.rkt"
"../common/resource.rkt"
"../host/thread.rkt"
"../host/rktio.rkt"
"../host/error.rkt"
"../security/main.rkt"
"../format/main.rkt"
"tcp-port.rkt"
"port-number.rkt"
"address.rkt"
"evt.rkt"
"error.rkt")
(provide tcp-connect
tcp-connect/enable-break)
(define/who (tcp-connect hostname port-no [local-hostname #f] [local-port-no #f])
(do-tcp-connect who hostname port-no local-hostname local-port-no))
(define/who (tcp-connect/enable-break hostname port-no [local-hostname #f] [local-port-no #f])
(do-tcp-connect who #:enable-break? #t hostname port-no local-hostname local-port-no))
(define (do-tcp-connect who hostname port-no [local-hostname #f] [local-port-no #f]
#:enable-break? [enable-break? #f])
(check who string? hostname)
(check who port-number? port-no)
(check who string? #:or-false local-hostname)
(check who port-number? #:or-false local-port-no)
(when (and local-hostname (not local-port-no))
(raise-arguments-error who
"no local port number supplied when local hostname was supplied"
"hostname" local-hostname))
;; in atomic mode (but exits atomic mode to raise an exception)
(define (raise-connect-error err
[what "connection failed"]
[hostname hostname]
[port-no port-no])
(end-atomic)
(raise-network-error who err
(string-append what
(if hostname
(format "\n hostname: ~a" hostname)
"")
(if port-no
(format "\n port number: ~a" port-no)
""))))
(security-guard-check-network who hostname port-no #t)
(atomically
(call-with-resolved-address
hostname port-no
#:enable-break? enable-break?
;; in atomic mode
(lambda (remote-addr)
(cond
[(rktio-error? remote-addr)
(raise-connect-error remote-addr "host not found")]
[else
(call-with-resolved-address
local-hostname local-port-no
#:enable-break? enable-break?
;; in atomic mode
(lambda (local-addr)
(cond
[(rktio-error? local-addr)
(raise-connect-error local-addr "local host not found" local-hostname local-port-no)]
[else
(call-with-resource
(box (rktio_start_connect rktio remote-addr local-addr))
;; in atomic mode
(lambda (conn-box)
(define conn (unbox conn-box))
(when conn
(rktio_connect_stop rktio conn)))
;; in atomic mode
(lambda (conn-box)
(define conn (unbox conn-box))
(cond
[(rktio-error? conn)
(raise-connect-error conn)]
[else
(let loop ()
(cond
[(eqv? (rktio_poll_connect_ready rktio conn)
RKTIO_POLL_NOT_READY)
(end-atomic)
((if enable-break? sync/enable-break sync)
(rktio-evt (lambda ()
(not (eqv? (rktio_poll_connect_ready rktio conn)
RKTIO_POLL_NOT_READY)))
(lambda (ps)
(rktio_poll_add_connect rktio conn ps))))
(start-atomic)
(loop)]
[else
(check-current-custodian who)
(define fd (rktio_connect_finish rktio conn))
(cond
[(rktio-error? fd)
(cond
[(racket-error? fd RKTIO_ERROR_CONNECT_TRYING_NEXT)
(loop)]
[else
;; other errors imply that `conn` is destroyed
(set-box! conn-box #f)
(raise-connect-error fd)])]
[else
(define name (string->immutable-string hostname))
(open-input-output-tcp fd name)])]))])))])))])))))
| null | https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/racket/src/io/network/tcp-connect.rkt | racket | in atomic mode (but exits atomic mode to raise an exception)
in atomic mode
in atomic mode
in atomic mode
in atomic mode
other errors imply that `conn` is destroyed | #lang racket/base
(require "../common/check.rkt"
"../common/resource.rkt"
"../host/thread.rkt"
"../host/rktio.rkt"
"../host/error.rkt"
"../security/main.rkt"
"../format/main.rkt"
"tcp-port.rkt"
"port-number.rkt"
"address.rkt"
"evt.rkt"
"error.rkt")
(provide tcp-connect
tcp-connect/enable-break)
(define/who (tcp-connect hostname port-no [local-hostname #f] [local-port-no #f])
(do-tcp-connect who hostname port-no local-hostname local-port-no))
(define/who (tcp-connect/enable-break hostname port-no [local-hostname #f] [local-port-no #f])
(do-tcp-connect who #:enable-break? #t hostname port-no local-hostname local-port-no))
(define (do-tcp-connect who hostname port-no [local-hostname #f] [local-port-no #f]
#:enable-break? [enable-break? #f])
(check who string? hostname)
(check who port-number? port-no)
(check who string? #:or-false local-hostname)
(check who port-number? #:or-false local-port-no)
(when (and local-hostname (not local-port-no))
(raise-arguments-error who
"no local port number supplied when local hostname was supplied"
"hostname" local-hostname))
(define (raise-connect-error err
[what "connection failed"]
[hostname hostname]
[port-no port-no])
(end-atomic)
(raise-network-error who err
(string-append what
(if hostname
(format "\n hostname: ~a" hostname)
"")
(if port-no
(format "\n port number: ~a" port-no)
""))))
(security-guard-check-network who hostname port-no #t)
(atomically
(call-with-resolved-address
hostname port-no
#:enable-break? enable-break?
(lambda (remote-addr)
(cond
[(rktio-error? remote-addr)
(raise-connect-error remote-addr "host not found")]
[else
(call-with-resolved-address
local-hostname local-port-no
#:enable-break? enable-break?
(lambda (local-addr)
(cond
[(rktio-error? local-addr)
(raise-connect-error local-addr "local host not found" local-hostname local-port-no)]
[else
(call-with-resource
(box (rktio_start_connect rktio remote-addr local-addr))
(lambda (conn-box)
(define conn (unbox conn-box))
(when conn
(rktio_connect_stop rktio conn)))
(lambda (conn-box)
(define conn (unbox conn-box))
(cond
[(rktio-error? conn)
(raise-connect-error conn)]
[else
(let loop ()
(cond
[(eqv? (rktio_poll_connect_ready rktio conn)
RKTIO_POLL_NOT_READY)
(end-atomic)
((if enable-break? sync/enable-break sync)
(rktio-evt (lambda ()
(not (eqv? (rktio_poll_connect_ready rktio conn)
RKTIO_POLL_NOT_READY)))
(lambda (ps)
(rktio_poll_add_connect rktio conn ps))))
(start-atomic)
(loop)]
[else
(check-current-custodian who)
(define fd (rktio_connect_finish rktio conn))
(cond
[(rktio-error? fd)
(cond
[(racket-error? fd RKTIO_ERROR_CONNECT_TRYING_NEXT)
(loop)]
[else
(set-box! conn-box #f)
(raise-connect-error fd)])]
[else
(define name (string->immutable-string hostname))
(open-input-output-tcp fd name)])]))])))])))])))))
|
548a8855cdc98121e15d665d34b2424dca0b7cc0b11660545fa1883eb5ff6874 | billstclair/trubanc-lisp | packages.lisp | ;;;; -*- Mode: lisp; indent-tabs-mode: nil -*-
;;;
;;; package.lisp --- Package definition for Babel
;;;
Copyright ( C ) 2007 ,
;;;
;;; Permission is hereby granted, free of charge, to any person
;;; obtaining a copy of this software and associated documentation
files ( the " Software " ) , to deal in the Software without
;;; restriction, including without limitation the rights to use, copy,
;;; modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software , and to permit persons to whom the Software is
;;; furnished to do so, subject to the following conditions:
;;;
;;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
;;; MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
;;; HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
;;; WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
;;; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
;;; DEALINGS IN THE SOFTWARE.
(in-package #:cl-user)
(defpackage #:babel-encodings
(:use #:common-lisp #:alexandria)
(:export
;; character encoding objects
#:list-character-encodings
#:character-encoding
#:*default-character-encoding*
#:get-character-encoding
#:enc-name
#:enc-aliases
#:enc-code-unit-size
#:enc-max-units-per-char
#:enc-native-endianness
#:enc-decode-literal-code-unit-limit
#:enc-encode-literal-code-unit-limit
#:enc-use-bom
#:enc-bom-encoding
#:enc-nul-encoding
#:enc-default-replacement
;; concrete mappings
#:instantiate-concrete-mappings
#:encoder
#:decoder
#:octet-counter
#:code-point-counter
#:lookup-mapping
#:with-simple-vector
#:with-checked-simple-vector
#:*suppress-character-coding-errors*
;; errors
#:character-coding-error
#:character-coding-error-encoding ; accessor
#:character-coding-error-buffer ; accessor
#:character-coding-error-position ; accessor
#:character-decoding-error
#:character-decoding-error-octets ; accessor
#:character-encoding-error
#:character-encoding-error-code ; accessor
#:end-of-input-in-character
#:character-out-of-range
#:invalid-utf8-starter-byte
#:invalid-utf8-continuation-byte
#:overlong-utf8-sequence))
(defpackage #:babel
(:use #:common-lisp #:babel-encodings #:alexandria)
(:import-from #:babel-encodings)
(:export
;; types
#:unicode-char
#:unicode-char-code-limit
#:unicode-string
#:simple-unicode-string
;; fixed sharp-backslash reader
#:enable-sharp-backslash-syntax
#:set-sharp-backslash-syntax-in-readtable
;; external formats
#:external-format
#:make-external-format
#:ensure-external-format
#:external-format-encoding
#:external-format-eol-style
#:external-format-equal
#:*default-eol-style*
;; general user API
#:*default-character-encoding*
#:list-character-encodings
#:string-to-octets
#:octets-to-string
#:concatenate-strings-to-octets
#:string-size-in-octets
#:vector-size-in-chars
;; errors
#:character-coding-error
#:character-coding-error-encoding ; accessor
#:character-coding-error-buffer ; accessor
#:character-coding-error-position ; accessor
#:character-decoding-error
#:character-decoding-error-octets ; accessor
#:character-encoding-error
#:character-encoding-error-code ; accessor
#:end-of-input-in-character
#:character-out-of-range
#:invalid-utf8-starter-byte
#:invalid-utf8-continuation-byte
#:overlong-utf8-sequence))
| null | https://raw.githubusercontent.com/billstclair/trubanc-lisp/5436d2eca5b1ed10bc47eec7080f6cb90f98ca65/systems/babel_0.3.1/src/packages.lisp | lisp | -*- Mode: lisp; indent-tabs-mode: nil -*-
package.lisp --- Package definition for Babel
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
character encoding objects
concrete mappings
errors
accessor
accessor
accessor
accessor
accessor
types
fixed sharp-backslash reader
external formats
general user API
errors
accessor
accessor
accessor
accessor
accessor | Copyright ( C ) 2007 ,
files ( the " Software " ) , to deal in the Software without
of the Software , and to permit persons to whom the Software is
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
(in-package #:cl-user)
(defpackage #:babel-encodings
(:use #:common-lisp #:alexandria)
(:export
#:list-character-encodings
#:character-encoding
#:*default-character-encoding*
#:get-character-encoding
#:enc-name
#:enc-aliases
#:enc-code-unit-size
#:enc-max-units-per-char
#:enc-native-endianness
#:enc-decode-literal-code-unit-limit
#:enc-encode-literal-code-unit-limit
#:enc-use-bom
#:enc-bom-encoding
#:enc-nul-encoding
#:enc-default-replacement
#:instantiate-concrete-mappings
#:encoder
#:decoder
#:octet-counter
#:code-point-counter
#:lookup-mapping
#:with-simple-vector
#:with-checked-simple-vector
#:*suppress-character-coding-errors*
#:character-coding-error
#:character-decoding-error
#:character-encoding-error
#:end-of-input-in-character
#:character-out-of-range
#:invalid-utf8-starter-byte
#:invalid-utf8-continuation-byte
#:overlong-utf8-sequence))
(defpackage #:babel
(:use #:common-lisp #:babel-encodings #:alexandria)
(:import-from #:babel-encodings)
(:export
#:unicode-char
#:unicode-char-code-limit
#:unicode-string
#:simple-unicode-string
#:enable-sharp-backslash-syntax
#:set-sharp-backslash-syntax-in-readtable
#:external-format
#:make-external-format
#:ensure-external-format
#:external-format-encoding
#:external-format-eol-style
#:external-format-equal
#:*default-eol-style*
#:*default-character-encoding*
#:list-character-encodings
#:string-to-octets
#:octets-to-string
#:concatenate-strings-to-octets
#:string-size-in-octets
#:vector-size-in-chars
#:character-coding-error
#:character-decoding-error
#:character-encoding-error
#:end-of-input-in-character
#:character-out-of-range
#:invalid-utf8-starter-byte
#:invalid-utf8-continuation-byte
#:overlong-utf8-sequence))
|
6b7097d805096d3b2a1b6e609b5ed8bb8fffcb11efebd57de9cd54d37f33311e | gvolpe/haskell-book-exercises | exercises.hs | import Data.Char
filterUppercase :: String -> String
filterUppercase xs = filter isUpper xs
capitalize :: String -> String
capitalize [] = []
capitalize (x:xs) = toUpper x : xs
toUppercase :: String -> String
toUppercase [] = []
toUppercase (x:xs) = toUpper x : toUppercase xs
capitalizeHead :: String -> Maybe Char
capitalizeHead [] = Nothing
capitalizeHead (x:xs) = Just (toUpper x)
capitalizeHead' :: String -> Char
capitalizeHead' = toUpper . head
-- more
myOr :: [Bool] -> Bool
myOr [] = False
myOr (x:xs) = if x == True then True else myOr xs
myAny :: (a -> Bool) -> [a] -> Bool
myAny _ [] = False
myAny f (x:xs) = if f x then True else myAny f xs
myElem :: Eq a => a -> [a] -> Bool
myElem _ [] = False
myElem x (y:ys) = if x == y then True else myElem x ys
myElem' :: Eq a => a -> [a] -> Bool
myElem' _ [] = False
myElem' x xs = any (==x) xs
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = myReverse xs ++ x : []
squish :: [[a]] -> [a]
squish [] = []
squish (x:xs) = x ++ squish xs
squishMap :: (a -> [b]) -> [a] -> [b]
squishMap _ [] = []
squishMap f xs = squish $ map f xs
| null | https://raw.githubusercontent.com/gvolpe/haskell-book-exercises/5c1b9d8dc729ee5a90c8709b9c889cbacb30a2cb/chapter9/exercises.hs | haskell | more | import Data.Char
filterUppercase :: String -> String
filterUppercase xs = filter isUpper xs
capitalize :: String -> String
capitalize [] = []
capitalize (x:xs) = toUpper x : xs
toUppercase :: String -> String
toUppercase [] = []
toUppercase (x:xs) = toUpper x : toUppercase xs
capitalizeHead :: String -> Maybe Char
capitalizeHead [] = Nothing
capitalizeHead (x:xs) = Just (toUpper x)
capitalizeHead' :: String -> Char
capitalizeHead' = toUpper . head
myOr :: [Bool] -> Bool
myOr [] = False
myOr (x:xs) = if x == True then True else myOr xs
myAny :: (a -> Bool) -> [a] -> Bool
myAny _ [] = False
myAny f (x:xs) = if f x then True else myAny f xs
myElem :: Eq a => a -> [a] -> Bool
myElem _ [] = False
myElem x (y:ys) = if x == y then True else myElem x ys
myElem' :: Eq a => a -> [a] -> Bool
myElem' _ [] = False
myElem' x xs = any (==x) xs
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = myReverse xs ++ x : []
squish :: [[a]] -> [a]
squish [] = []
squish (x:xs) = x ++ squish xs
squishMap :: (a -> [b]) -> [a] -> [b]
squishMap _ [] = []
squishMap f xs = squish $ map f xs
|
c39b01d0b70eed6ffc89876bb08796713044cd1d5a9be41e21d53d28e7e4a14c | binsec/binsec | binpatcher.mli | (**************************************************************************)
This file is part of BINSEC .
(* *)
Copyright ( C ) 2016 - 2022
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(** Definitions for binary patcher *)
* { 2 Patch map }
module PatchMap : sig
type t
val empty : t
(** The empty patch map *)
val load_file : string -> t
* [ load_file filename ] loads patches contained in [ filename ]
The patches can be given in the following format ,
defined in file [ parser / parser.mly ] :
{ ul { - ( address " .... " ) }
{ - ( address ( i_1 ... i_n ) ) } }
i.e. the file contains a list of small S - expressions where :
{ ul { - the first element is an address ( hexadecimal or decimal integer ) ; }
{ - the second element is either
{ ul { - a string ( interpreted as a sequence of bytes ) ; }
{ - or a ( non - empty ) list of integers , where each integer corresponds to
a byte ( 0 < = n < = 255 ) . } }
}
}
The patches can be given in the following format,
defined in file [parser/parser.mly]:
{ul {- (address "....")}
{- (address (i_1 ... i_n))}}
i.e. the file contains a list of small S-expressions where:
{ul {- the first element is an address (hexadecimal or decimal integer); }
{- the second element is either
{ul {- a string (interpreted as a sequence of bytes); }
{- or a (non-empty) list of integers, where each integer corresponds to
a byte (0 <= n <= 255). }}
}
}
*)
val of_list : (Virtual_address.t * Binstream.t) list -> t
(** [of_list l] converts an association list [l] of addresses to opcodes to a
patch map *)
val add_bytes : Virtual_address.t -> Binstream.t -> t -> t
(** [add_bytes address bytes patchmap] writes the opcode [bytes] to [address]
in the [patchmap]. The full length of the byte sequence is writtent
starting at [address]. *)
end
* { 2 Writable loader }
*
This module is a simple extension to the loaders of [ binsec ] where it is
allowed to rewrite parts of the binary .
The extension is non - destructive .
This module is a simple extension to the loaders of [binsec] where it is
allowed to rewrite parts of the binary.
The extension is non-destructive.
*)
module WritableLoader : sig
type t
val create : Loader.Img.t -> PatchMap.t -> t
val create_from_files : executable:string -> patch_file:string -> unit -> t
(** [create_from_files ~executable ~patch_file] creates a writable loader from
a binary file and a series of patches read from a given file
*)
val get : int -> t -> Loader_types.u8
(** [get addr t] gets the byte stored at address [addr] *)
val dim : t -> int
(** [dim t] gives the size of the loaded image in bytes *)
val pp_to_file : filename:string -> t -> unit
* [ pp_to_file w ] writes the contents of the image to the file
[ filename ]
[filename] *)
end
val run : executable:string -> unit
* Run the binary patcher on patch file { ! Binpatcher_options . PatchFile }
for the given [ executable ] .
The patched result is written to { ! Binpatcher_options . PatchOutFile } .
for the given [executable].
The patched result is written to {!Binpatcher_options.PatchOutFile}.
*)
| null | https://raw.githubusercontent.com/binsec/binsec/8ed9991d36451a3ae7487b966c4b38acca21a5b3/src/binpatcher/binpatcher.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Definitions for binary patcher
* The empty patch map
* [of_list l] converts an association list [l] of addresses to opcodes to a
patch map
* [add_bytes address bytes patchmap] writes the opcode [bytes] to [address]
in the [patchmap]. The full length of the byte sequence is writtent
starting at [address].
* [create_from_files ~executable ~patch_file] creates a writable loader from
a binary file and a series of patches read from a given file
* [get addr t] gets the byte stored at address [addr]
* [dim t] gives the size of the loaded image in bytes | This file is part of BINSEC .
Copyright ( C ) 2016 - 2022
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
* { 2 Patch map }
module PatchMap : sig
type t
val empty : t
val load_file : string -> t
* [ load_file filename ] loads patches contained in [ filename ]
The patches can be given in the following format ,
defined in file [ parser / parser.mly ] :
{ ul { - ( address " .... " ) }
{ - ( address ( i_1 ... i_n ) ) } }
i.e. the file contains a list of small S - expressions where :
{ ul { - the first element is an address ( hexadecimal or decimal integer ) ; }
{ - the second element is either
{ ul { - a string ( interpreted as a sequence of bytes ) ; }
{ - or a ( non - empty ) list of integers , where each integer corresponds to
a byte ( 0 < = n < = 255 ) . } }
}
}
The patches can be given in the following format,
defined in file [parser/parser.mly]:
{ul {- (address "....")}
{- (address (i_1 ... i_n))}}
i.e. the file contains a list of small S-expressions where:
{ul {- the first element is an address (hexadecimal or decimal integer); }
{- the second element is either
{ul {- a string (interpreted as a sequence of bytes); }
{- or a (non-empty) list of integers, where each integer corresponds to
a byte (0 <= n <= 255). }}
}
}
*)
val of_list : (Virtual_address.t * Binstream.t) list -> t
val add_bytes : Virtual_address.t -> Binstream.t -> t -> t
end
* { 2 Writable loader }
*
This module is a simple extension to the loaders of [ binsec ] where it is
allowed to rewrite parts of the binary .
The extension is non - destructive .
This module is a simple extension to the loaders of [binsec] where it is
allowed to rewrite parts of the binary.
The extension is non-destructive.
*)
module WritableLoader : sig
type t
val create : Loader.Img.t -> PatchMap.t -> t
val create_from_files : executable:string -> patch_file:string -> unit -> t
val get : int -> t -> Loader_types.u8
val dim : t -> int
val pp_to_file : filename:string -> t -> unit
* [ pp_to_file w ] writes the contents of the image to the file
[ filename ]
[filename] *)
end
val run : executable:string -> unit
* Run the binary patcher on patch file { ! Binpatcher_options . PatchFile }
for the given [ executable ] .
The patched result is written to { ! Binpatcher_options . PatchOutFile } .
for the given [executable].
The patched result is written to {!Binpatcher_options.PatchOutFile}.
*)
|
bcae2a8bac442089498602189f343e5abc35cf92e8259d4ea5c4749684077aeb | jepsen-io/jepsen | append.clj | (ns jepsen.stolon.append
"Test for transactional list append."
(:require [clojure.tools.logging :refer [info warn]]
[clojure [pprint :refer [pprint]]
[string :as str]]
[dom-top.core :refer [with-retry]]
[elle.core :as elle]
[jepsen [checker :as checker]
[client :as client]
[generator :as gen]
[util :as util :refer [parse-long]]]
[jepsen.checker.timeline :as timeline]
[jepsen.tests.cycle.append :as append]
[jepsen.stolon [client :as c]]
[next.jdbc :as j]
[next.jdbc.result-set :as rs]
[next.jdbc.sql.builder :as sqlb]
[slingshot.slingshot :refer [try+ throw+]]))
(def default-table-count 3)
(defn table-name
"Takes an integer and constructs a table name."
[i]
(str "txn" i))
(defn table-for
"What table should we use for the given key?"
[table-count k]
(table-name (mod (hash k) table-count)))
(defn append-using-on-conflict!
"Appends an element to a key using an INSERT ... ON CONFLICT statement."
[conn test table k e]
(j/execute!
conn
[(str "insert into " table " as t"
" (id, sk, val) values (?, ?, ?)"
" on conflict (id) do update set"
" val = CONCAT(t.val, ',', ?) where "
"t.id"
( if ( < ( rand ) 0.5 ) " t.id " " t.sk " )
" = ?")
k k e e k]))
(defn insert!
"Performs an initial insert of a key with initial element e. Catches
duplicate key exceptions, returning true if succeeded. If the insert fails
due to a duplicate key, it'll break the rest of the transaction, assuming
we're in a transaction, so we establish a savepoint before inserting and roll
back to it on failure."
[conn test txn? table k e]
(try
(info (if txn? "" "not") "in transaction")
(when txn? (j/execute! conn ["savepoint upsert"]))
(info :insert (j/execute! conn
[(str "insert into " table " (id, sk, val)"
" values (?, ?, ?)")
k k e]))
(when txn? (j/execute! conn ["release savepoint upsert"]))
true
(catch org.postgresql.util.PSQLException e
(if (re-find #"duplicate key value" (.getMessage e))
(do (info (if txn? "txn") "insert failed: " (.getMessage e))
(when txn? (j/execute! conn ["rollback to savepoint upsert"]))
false)
(throw e)))))
(defn update!
"Performs an update of a key k, adding element e. Returns true if the update
succeeded, false otherwise."
[conn test table k e]
(let [res (-> conn
(j/execute-one! [(str "update " table " set val = CONCAT(val, ',', ?)"
" where id = ?") e k]))]
(info :update res)
(-> res
:next.jdbc/update-count
pos?)))
(defn mop!
"Executes a transactional micro-op on a connection. Returns the completed
micro-op."
[conn test txn? [f k v]]
(let [table-count (:table-count test default-table-count)
table (table-for table-count k)]
(Thread/sleep (rand-int 10))
[f k (case f
:r (let [r (j/execute! conn
[(str "select (val) from " table " where "
( if ( < ( rand ) 0.5 ) " i d " " sk " )
"id"
" = ? ")
k]
{:builder-fn rs/as-unqualified-lower-maps})]
(when-let [v (:val (first r))]
(mapv parse-long (str/split v #","))))
:append
(let [vs (str v)]
(if (:on-conflict test)
; Use ON CONFLICT
(append-using-on-conflict! conn test table k vs)
; Try an update, and if that fails, back off to an insert.
(or (update! conn test table k vs)
; No dice, fall back to an insert
(insert! conn test txn? table k vs)
; OK if THAT failed then we probably raced with another
; insert; let's try updating again.
(update! conn test table k vs)
; And if THAT failed, all bets are off. This happens even
; under SERIALIZABLE, but I don't think it technically
VIOLATES serializability .
(throw+ {:type ::homebrew-upsert-failed
:key k
:element v})))
v))]))
initialized ? is an atom which we set when we first use the connection -- we set
; up initial isolation levels, logging info, etc. This has to be stateful
; because we don't necessarily know what process is going to use the connection
; at open! time.
(defrecord Client [node conn initialized?]
client/Client
(open! [this test node]
(let [c (c/open test node)]
(assoc this
:node node
:conn c
:initialized? (atom false))))
(setup! [_ test]
(dotimes [i (:table-count test default-table-count)]
OK , so first worrying thing : why can this throw duplicate key errors if
; it's executed with "if not exists"?
(with-retry [conn conn
tries 10]
(j/execute! conn
[(str "create table if not exists " (table-name i)
" (id int not null primary key,
sk int not null,
val text)")])
(catch org.postgresql.util.PSQLException e
(condp re-find (.getMessage e)
#"duplicate key value violates unique constraint"
:dup
#"An I/O error occurred|connection has been closed"
(do (when (zero? tries)
(throw e))
(info "Retrying IO error")
(Thread/sleep 1000)
(c/close! conn)
(retry (c/await-open node)
(dec tries)))
(throw e))))
; Make sure we start fresh--in case we're using an existing postgres
; cluster and the DB automation isn't wiping the state for us.
(j/execute! conn [(str "delete from " (table-name i))])))
(invoke! [_ test op]
One - time connection setup
(when (compare-and-set! initialized? false true)
(j/execute! conn [(str "set application_name = 'jepsen process "
(:process op) "'")])
(c/set-transaction-isolation! conn (:isolation test)))
(c/with-errors op
(let [txn (:value op)
use-txn? (< 1 (count txn))
txn' (if use-txn?
;(if true
(j/with-transaction [t conn
{:isolation (:isolation test)}]
(mapv (partial mop! t test true) txn))
(mapv (partial mop! conn test false) txn))]
(assoc op :type :ok, :value txn'))))
(teardown! [_ test])
(close! [this test]
(c/close! conn)))
(defn workload
"A list append workload."
[opts]
(-> (append/test (assoc (select-keys opts [:key-count
:max-txn-length
:max-writes-per-key])
:min-txn-length 1
:consistency-models [(:expected-consistency-model opts)]))
(assoc :client (Client. nil nil nil))))
| null | https://raw.githubusercontent.com/jepsen-io/jepsen/d4006a43e92e061b16105375cd3d36fb96cd0bc6/stolon/src/jepsen/stolon/append.clj | clojure | Use ON CONFLICT
Try an update, and if that fails, back off to an insert.
No dice, fall back to an insert
OK if THAT failed then we probably raced with another
insert; let's try updating again.
And if THAT failed, all bets are off. This happens even
under SERIALIZABLE, but I don't think it technically
up initial isolation levels, logging info, etc. This has to be stateful
because we don't necessarily know what process is going to use the connection
at open! time.
it's executed with "if not exists"?
Make sure we start fresh--in case we're using an existing postgres
cluster and the DB automation isn't wiping the state for us.
(if true | (ns jepsen.stolon.append
"Test for transactional list append."
(:require [clojure.tools.logging :refer [info warn]]
[clojure [pprint :refer [pprint]]
[string :as str]]
[dom-top.core :refer [with-retry]]
[elle.core :as elle]
[jepsen [checker :as checker]
[client :as client]
[generator :as gen]
[util :as util :refer [parse-long]]]
[jepsen.checker.timeline :as timeline]
[jepsen.tests.cycle.append :as append]
[jepsen.stolon [client :as c]]
[next.jdbc :as j]
[next.jdbc.result-set :as rs]
[next.jdbc.sql.builder :as sqlb]
[slingshot.slingshot :refer [try+ throw+]]))
(def default-table-count 3)
(defn table-name
"Takes an integer and constructs a table name."
[i]
(str "txn" i))
(defn table-for
"What table should we use for the given key?"
[table-count k]
(table-name (mod (hash k) table-count)))
(defn append-using-on-conflict!
"Appends an element to a key using an INSERT ... ON CONFLICT statement."
[conn test table k e]
(j/execute!
conn
[(str "insert into " table " as t"
" (id, sk, val) values (?, ?, ?)"
" on conflict (id) do update set"
" val = CONCAT(t.val, ',', ?) where "
"t.id"
( if ( < ( rand ) 0.5 ) " t.id " " t.sk " )
" = ?")
k k e e k]))
(defn insert!
"Performs an initial insert of a key with initial element e. Catches
duplicate key exceptions, returning true if succeeded. If the insert fails
due to a duplicate key, it'll break the rest of the transaction, assuming
we're in a transaction, so we establish a savepoint before inserting and roll
back to it on failure."
[conn test txn? table k e]
(try
(info (if txn? "" "not") "in transaction")
(when txn? (j/execute! conn ["savepoint upsert"]))
(info :insert (j/execute! conn
[(str "insert into " table " (id, sk, val)"
" values (?, ?, ?)")
k k e]))
(when txn? (j/execute! conn ["release savepoint upsert"]))
true
(catch org.postgresql.util.PSQLException e
(if (re-find #"duplicate key value" (.getMessage e))
(do (info (if txn? "txn") "insert failed: " (.getMessage e))
(when txn? (j/execute! conn ["rollback to savepoint upsert"]))
false)
(throw e)))))
(defn update!
"Performs an update of a key k, adding element e. Returns true if the update
succeeded, false otherwise."
[conn test table k e]
(let [res (-> conn
(j/execute-one! [(str "update " table " set val = CONCAT(val, ',', ?)"
" where id = ?") e k]))]
(info :update res)
(-> res
:next.jdbc/update-count
pos?)))
(defn mop!
"Executes a transactional micro-op on a connection. Returns the completed
micro-op."
[conn test txn? [f k v]]
(let [table-count (:table-count test default-table-count)
table (table-for table-count k)]
(Thread/sleep (rand-int 10))
[f k (case f
:r (let [r (j/execute! conn
[(str "select (val) from " table " where "
( if ( < ( rand ) 0.5 ) " i d " " sk " )
"id"
" = ? ")
k]
{:builder-fn rs/as-unqualified-lower-maps})]
(when-let [v (:val (first r))]
(mapv parse-long (str/split v #","))))
:append
(let [vs (str v)]
(if (:on-conflict test)
(append-using-on-conflict! conn test table k vs)
(or (update! conn test table k vs)
(insert! conn test txn? table k vs)
(update! conn test table k vs)
VIOLATES serializability .
(throw+ {:type ::homebrew-upsert-failed
:key k
:element v})))
v))]))
initialized ? is an atom which we set when we first use the connection -- we set
(defrecord Client [node conn initialized?]
client/Client
(open! [this test node]
(let [c (c/open test node)]
(assoc this
:node node
:conn c
:initialized? (atom false))))
(setup! [_ test]
(dotimes [i (:table-count test default-table-count)]
OK , so first worrying thing : why can this throw duplicate key errors if
(with-retry [conn conn
tries 10]
(j/execute! conn
[(str "create table if not exists " (table-name i)
" (id int not null primary key,
sk int not null,
val text)")])
(catch org.postgresql.util.PSQLException e
(condp re-find (.getMessage e)
#"duplicate key value violates unique constraint"
:dup
#"An I/O error occurred|connection has been closed"
(do (when (zero? tries)
(throw e))
(info "Retrying IO error")
(Thread/sleep 1000)
(c/close! conn)
(retry (c/await-open node)
(dec tries)))
(throw e))))
(j/execute! conn [(str "delete from " (table-name i))])))
(invoke! [_ test op]
One - time connection setup
(when (compare-and-set! initialized? false true)
(j/execute! conn [(str "set application_name = 'jepsen process "
(:process op) "'")])
(c/set-transaction-isolation! conn (:isolation test)))
(c/with-errors op
(let [txn (:value op)
use-txn? (< 1 (count txn))
txn' (if use-txn?
(j/with-transaction [t conn
{:isolation (:isolation test)}]
(mapv (partial mop! t test true) txn))
(mapv (partial mop! conn test false) txn))]
(assoc op :type :ok, :value txn'))))
(teardown! [_ test])
(close! [this test]
(c/close! conn)))
(defn workload
"A list append workload."
[opts]
(-> (append/test (assoc (select-keys opts [:key-count
:max-txn-length
:max-writes-per-key])
:min-txn-length 1
:consistency-models [(:expected-consistency-model opts)]))
(assoc :client (Client. nil nil nil))))
|
af32ceb8c7537c1e4e6b6364632c9b8b11ef8ef6d6e712d224278e3d1c3699e9 | magehash/magehash | index.clj | (ns asset.index
(:require [coast :refer [first! q pull]]
[clojure.string :as string]
[coast.time :as time]
[components :refer [card content title table th tr td tbody]]))
(defn view [{member-id :member/id :as req}]
(let [props (q '[:pull [{:property/site [site/url
{:site/assets [asset/hash asset/name asset/updated-at asset/created-at]}]}]
:where [property/member ?member/id]]
{:member/id member-id})]
[:div
[:h2 {:class "f4 lh-title pt0 mid-gray fw3 dash-subtitle"}
"Assets"]
(for [prop props]
(card
(title (-> prop :property/site :site/url))
(content
(table
[:thead
(tr
(th "Asset")
(th "SHA1")
(th "Last Change Detected"))
(tbody
(for [{:asset/keys [hash name updated-at created-at]} (-> prop :property/site :site/assets)]
(tr
(td (if (string/blank? name) "inline" name))
(td hash)
(td [:time
(-> (or updated-at created-at)
time/parse
.toInstant)]))))]))))]))
| null | https://raw.githubusercontent.com/magehash/magehash/9545d65a10a570536f8c6d9ebbafc4982e07cedc/src/asset/index.clj | clojure | (ns asset.index
(:require [coast :refer [first! q pull]]
[clojure.string :as string]
[coast.time :as time]
[components :refer [card content title table th tr td tbody]]))
(defn view [{member-id :member/id :as req}]
(let [props (q '[:pull [{:property/site [site/url
{:site/assets [asset/hash asset/name asset/updated-at asset/created-at]}]}]
:where [property/member ?member/id]]
{:member/id member-id})]
[:div
[:h2 {:class "f4 lh-title pt0 mid-gray fw3 dash-subtitle"}
"Assets"]
(for [prop props]
(card
(title (-> prop :property/site :site/url))
(content
(table
[:thead
(tr
(th "Asset")
(th "SHA1")
(th "Last Change Detected"))
(tbody
(for [{:asset/keys [hash name updated-at created-at]} (-> prop :property/site :site/assets)]
(tr
(td (if (string/blank? name) "inline" name))
(td hash)
(td [:time
(-> (or updated-at created-at)
time/parse
.toInstant)]))))]))))]))
| |
0d0f3c4859bbed3d2b59296ed133b3b7b76946c92e852ceb1381af605c91fe89 | linkfluence/inventory | deploy.clj | (ns com.linkfluence.inventory.api.deploy
(:require [compojure.core :refer :all]
[compojure.route :as route]
[clojure.tools.logging :as log]
;;import inventory handler
[com.linkfluence.inventory.deploy :as deploy]
[com.linkfluence.utils :as u]))
;;deploy action definition
; update : update both base and app
; app : deploy/update only app
; base : deploy/update base environment
;;deploy specific routes
(defroutes DEPLOY
(GET "/resource/:id" [id] (do (deploy/deploy-resource id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/resource/:id/update" [id] (do (deploy/update-resource id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/resource/:id/app" [id] (do (deploy/deploy-app-resource id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/resource/:id/base" [id] (do (deploy/deploy-base-resource id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/resource/:id/finish" [id] (do (deploy/end-deployment id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/group/:id" [id] (do (deploy/deploy-group id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/group/:id/update" [id] (do (deploy/update-group id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/group/:id/app" [id] (do (deploy/deploy-app-group id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/group/:id/base" [id] (do (deploy/deploy-base-group id)
(u/mk-resp 200 "success" {} "Operation submitted"))))
| null | https://raw.githubusercontent.com/linkfluence/inventory/6e5e829fd08159b33c0d3e502b10ed6002643f49/src/clj/com/linkfluence/inventory/api/deploy.clj | clojure | import inventory handler
deploy action definition
update : update both base and app
app : deploy/update only app
base : deploy/update base environment
deploy specific routes | (ns com.linkfluence.inventory.api.deploy
(:require [compojure.core :refer :all]
[compojure.route :as route]
[clojure.tools.logging :as log]
[com.linkfluence.inventory.deploy :as deploy]
[com.linkfluence.utils :as u]))
(defroutes DEPLOY
(GET "/resource/:id" [id] (do (deploy/deploy-resource id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/resource/:id/update" [id] (do (deploy/update-resource id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/resource/:id/app" [id] (do (deploy/deploy-app-resource id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/resource/:id/base" [id] (do (deploy/deploy-base-resource id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/resource/:id/finish" [id] (do (deploy/end-deployment id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/group/:id" [id] (do (deploy/deploy-group id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/group/:id/update" [id] (do (deploy/update-group id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/group/:id/app" [id] (do (deploy/deploy-app-group id)
(u/mk-resp 200 "success" {} "Operation submitted")))
(GET "/group/:id/base" [id] (do (deploy/deploy-base-group id)
(u/mk-resp 200 "success" {} "Operation submitted"))))
|
5a68f520e8803a2b46ad73507606bbd3c7b96589bd5aa7283676b3b6ae39147e | ideas-edu/ideas | Prefix.hs | # LANGUAGE TypeFamilies #
-----------------------------------------------------------------------------
Copyright 2019 , Ideas project team . This file is distributed under the
terms of the Apache License 2.0 . For more information , see the files
" LICENSE.txt " and " NOTICE.txt " , which are included in the distribution .
-----------------------------------------------------------------------------
-- |
-- Maintainer :
-- Stability : provisional
Portability : portable ( depends on ghc )
--
-- Basic machinery for fully executing a strategy expression, or only
-- partially. Partial execution results in a prefix that keeps the current
-- locations in the strategy (a list of @Path@s) for continuing the execution
-- later on. A path can be used to reconstruct the sequence of steps already
performed ( a so - called trace ) . Prefixes can be merged with the Monoid
-- operation.
--
-----------------------------------------------------------------------------
module Ideas.Common.Strategy.Prefix
( -- * Prefix
Prefix, noPrefix, makePrefix, firstsOrdered
, replayProcess
, isEmptyPrefix, majorPrefix, searchModePrefix, prefixPaths
-- * Path
, Path, emptyPath, readPath, readPaths
) where
import Data.Char
import Data.List (intercalate)
import Data.Maybe
import Data.Semigroup as Sem
import Ideas.Common.Classes
import Ideas.Common.Environment
import Ideas.Common.Rewriting.Term
import Ideas.Common.Rule
import Ideas.Common.Strategy.Choice
import Ideas.Common.Strategy.Process
import Ideas.Common.Strategy.Sequence
import Ideas.Common.Strategy.StrategyTree
import Ideas.Utils.Prelude (splitsWithElem, readM)
--------------------------------------------------------------------------------
-- Prefix datatype
data Prefix a = Prefix
{ getPaths :: [Path]
, remainder :: Menu (Rule a) (a, Environment, Prefix a)
}
instance Show (Prefix a) where
show = intercalate ";" . map show . prefixPaths
instance Sem.Semigroup (Prefix a) where
(Prefix xs p) <> (Prefix ys q) = Prefix (xs ++ ys) (p .|. q)
instance Monoid (Prefix a) where
mempty = noPrefix
mappend = (<>)
instance Firsts (Prefix a) where
type Elem (Prefix a) = (Rule a, a, Environment)
ready = hasDone . remainder
firsts = map reorder . bests . remainder
firstsOrdered :: (Rule a -> Rule a -> Ordering) -> Prefix a -> [((Rule a, a, Environment), Prefix a)]
firstsOrdered cmp = map reorder . bestsOrdered cmp . remainder
reorder :: (a, (b, env, c)) -> ((a, b, env), c)
reorder (x, (y, env, z)) = ((x, y, env), z)
--------------------------------------------------------------------------------
-- Constructing a prefix
-- | The error prefix (i.e., without a location in the strategy).
noPrefix :: Prefix a
noPrefix = Prefix [] empty
-- | Make a prefix from a core strategy and a start term.
makePrefix :: Process (Leaf a) -> a -> Prefix a
makePrefix = snd . replayProcess emptyPath
| Construct a prefix by replaying a path in a core strategy : the third
-- argument is the current term.
replayProcess :: Path -> Process (Leaf a) -> ([Rule a], a -> Prefix a)
replayProcess (Path is) = fromMaybe ([], const noPrefix) . replay [] is
where
replay acc path p =
case path of
[] -> return (reverse acc, createPrefix p)
Input _:_ -> Nothing
Index n:ns -> do
(leaf, q) <- getByIndex n (menu p)
case (leaf, ns) of
(LeafRule r, _) -> replay (r:acc) ns q
(LeafDyn d, Input t:ns2) -> do
a <- dynamicFromTerm d t
replay acc ns2 (treeToProcess a .*. q)
_ -> Nothing
createPrefix p = Prefix [Path is] . flip (rec []) p
rec ns a = cut . onMenuWithIndex f doneMenu . menu
where
f n (LeafDyn d) p = fromMaybe empty $ do
t <- dynamicToTerm d a
s <- dynamicFromTerm d t
return (rec (Input t:Index n:ns) a (treeToProcess s .*. p))
f n (LeafRule r) p = choice
[ r ?~> (b, env, mk b)
| (b, env) <- transApply (transformation r) a
]
where
ms = Index n:ns
path = Path (is ++ reverse ms)
mk b = Prefix [path] (rec ms b p)
x ?~> y@(_, _, q)
| isMinor r && stopped q = empty
| otherwise = x |-> y
stopped :: Prefix a -> Bool
stopped = isEmpty . remainder
--------------------------------------------------------------------------------
Prefix
isEmptyPrefix :: Prefix a -> Bool
isEmptyPrefix = all (== emptyPath) . getPaths
-- | Transforms the prefix such that only major steps are kept in the remaining
-- strategy.
majorPrefix :: Prefix a -> Prefix a
majorPrefix prfx = prfx { remainder = onMenu f doneMenu (remainder prfx) }
where
f r (a, env, p)
| isMajor r = r |-> (a, env, majorPrefix p)
| otherwise = remainder (majorPrefix p)
-- | The searchModePrefix transformation changes the process in such a way that
all intermediate states can only be reached by one path . A prerequisite is
-- that symbols are unique (or only used once).
searchModePrefix :: Prefix a -> Prefix a
searchModePrefix prfx =
prfx { remainder = rec (remainder (majorPrefix prfx)) }
where
rec m | hasDone m = doneMenu
| otherwise = process (bests m)
process [] = empty
process ((r, (a, env, pr)):xs) =
(r |-> (a, env, pr { remainder = rec (remainder pr) }))
.|. process (concatMap (change r) xs)
change y (r, pair) =
bests (filterPrefix (/= y) r pair)
filterPrefix :: (Rule a -> Bool) -> Rule a -> (a, Environment, Prefix a) -> Menu (Rule a) (a, Environment, Prefix a)
filterPrefix cond = f
where
rec = onMenu f doneMenu
f r (a, env, pr) = if cond r then r |-> (a, env, pr { remainder = rec (remainder pr) }) else empty
-- | Returns the current @Path@.
prefixPaths :: Prefix a -> [Path]
prefixPaths = getPaths
--------------------------------------------------------------------------------
-- Path
-- | A path encodes a location in a strategy. Paths are represented as a list
-- of integers and terms (the latter act as input for the dynamic strategies).
newtype Path = Path [PathItem]
deriving Eq
data PathItem = Index Int | Input Term
deriving Eq
instance Show PathItem where
show (Index n) = show n
show (Input t) = show t
instance Read PathItem where
readsPrec n s =
case dropWhile isSpace s of
s2@(c:_) | isDigit c -> map (mapFirst Index) (readsPrec n s2)
s2 -> map (mapFirst Input) (readsPrec n s2)
instance Show Path where
show (Path is) = show is
showList = (++) . intercalate ";" . map show
-- | The empty path.
emptyPath :: Path
emptyPath = Path []
readPath :: Monad m => String -> m Path
readPath = fmap Path . readM
readPaths :: Monad m => String -> m [Path]
readPaths = mapM readPath . splitsWithElem ';' | null | https://raw.githubusercontent.com/ideas-edu/ideas/f84907f92a8c407b7313f99e65a08d2646dc1565/src/Ideas/Common/Strategy/Prefix.hs | haskell | ---------------------------------------------------------------------------
---------------------------------------------------------------------------
|
Maintainer :
Stability : provisional
Basic machinery for fully executing a strategy expression, or only
partially. Partial execution results in a prefix that keeps the current
locations in the strategy (a list of @Path@s) for continuing the execution
later on. A path can be used to reconstruct the sequence of steps already
operation.
---------------------------------------------------------------------------
* Prefix
* Path
------------------------------------------------------------------------------
Prefix datatype
------------------------------------------------------------------------------
Constructing a prefix
| The error prefix (i.e., without a location in the strategy).
| Make a prefix from a core strategy and a start term.
argument is the current term.
------------------------------------------------------------------------------
| Transforms the prefix such that only major steps are kept in the remaining
strategy.
| The searchModePrefix transformation changes the process in such a way that
that symbols are unique (or only used once).
| Returns the current @Path@.
------------------------------------------------------------------------------
Path
| A path encodes a location in a strategy. Paths are represented as a list
of integers and terms (the latter act as input for the dynamic strategies).
| The empty path.
| # LANGUAGE TypeFamilies #
Copyright 2019 , Ideas project team . This file is distributed under the
terms of the Apache License 2.0 . For more information , see the files
" LICENSE.txt " and " NOTICE.txt " , which are included in the distribution .
Portability : portable ( depends on ghc )
performed ( a so - called trace ) . Prefixes can be merged with the Monoid
module Ideas.Common.Strategy.Prefix
Prefix, noPrefix, makePrefix, firstsOrdered
, replayProcess
, isEmptyPrefix, majorPrefix, searchModePrefix, prefixPaths
, Path, emptyPath, readPath, readPaths
) where
import Data.Char
import Data.List (intercalate)
import Data.Maybe
import Data.Semigroup as Sem
import Ideas.Common.Classes
import Ideas.Common.Environment
import Ideas.Common.Rewriting.Term
import Ideas.Common.Rule
import Ideas.Common.Strategy.Choice
import Ideas.Common.Strategy.Process
import Ideas.Common.Strategy.Sequence
import Ideas.Common.Strategy.StrategyTree
import Ideas.Utils.Prelude (splitsWithElem, readM)
data Prefix a = Prefix
{ getPaths :: [Path]
, remainder :: Menu (Rule a) (a, Environment, Prefix a)
}
instance Show (Prefix a) where
show = intercalate ";" . map show . prefixPaths
instance Sem.Semigroup (Prefix a) where
(Prefix xs p) <> (Prefix ys q) = Prefix (xs ++ ys) (p .|. q)
instance Monoid (Prefix a) where
mempty = noPrefix
mappend = (<>)
instance Firsts (Prefix a) where
type Elem (Prefix a) = (Rule a, a, Environment)
ready = hasDone . remainder
firsts = map reorder . bests . remainder
firstsOrdered :: (Rule a -> Rule a -> Ordering) -> Prefix a -> [((Rule a, a, Environment), Prefix a)]
firstsOrdered cmp = map reorder . bestsOrdered cmp . remainder
reorder :: (a, (b, env, c)) -> ((a, b, env), c)
reorder (x, (y, env, z)) = ((x, y, env), z)
noPrefix :: Prefix a
noPrefix = Prefix [] empty
makePrefix :: Process (Leaf a) -> a -> Prefix a
makePrefix = snd . replayProcess emptyPath
| Construct a prefix by replaying a path in a core strategy : the third
replayProcess :: Path -> Process (Leaf a) -> ([Rule a], a -> Prefix a)
replayProcess (Path is) = fromMaybe ([], const noPrefix) . replay [] is
where
replay acc path p =
case path of
[] -> return (reverse acc, createPrefix p)
Input _:_ -> Nothing
Index n:ns -> do
(leaf, q) <- getByIndex n (menu p)
case (leaf, ns) of
(LeafRule r, _) -> replay (r:acc) ns q
(LeafDyn d, Input t:ns2) -> do
a <- dynamicFromTerm d t
replay acc ns2 (treeToProcess a .*. q)
_ -> Nothing
createPrefix p = Prefix [Path is] . flip (rec []) p
rec ns a = cut . onMenuWithIndex f doneMenu . menu
where
f n (LeafDyn d) p = fromMaybe empty $ do
t <- dynamicToTerm d a
s <- dynamicFromTerm d t
return (rec (Input t:Index n:ns) a (treeToProcess s .*. p))
f n (LeafRule r) p = choice
[ r ?~> (b, env, mk b)
| (b, env) <- transApply (transformation r) a
]
where
ms = Index n:ns
path = Path (is ++ reverse ms)
mk b = Prefix [path] (rec ms b p)
x ?~> y@(_, _, q)
| isMinor r && stopped q = empty
| otherwise = x |-> y
stopped :: Prefix a -> Bool
stopped = isEmpty . remainder
Prefix
isEmptyPrefix :: Prefix a -> Bool
isEmptyPrefix = all (== emptyPath) . getPaths
majorPrefix :: Prefix a -> Prefix a
majorPrefix prfx = prfx { remainder = onMenu f doneMenu (remainder prfx) }
where
f r (a, env, p)
| isMajor r = r |-> (a, env, majorPrefix p)
| otherwise = remainder (majorPrefix p)
all intermediate states can only be reached by one path . A prerequisite is
searchModePrefix :: Prefix a -> Prefix a
searchModePrefix prfx =
prfx { remainder = rec (remainder (majorPrefix prfx)) }
where
rec m | hasDone m = doneMenu
| otherwise = process (bests m)
process [] = empty
process ((r, (a, env, pr)):xs) =
(r |-> (a, env, pr { remainder = rec (remainder pr) }))
.|. process (concatMap (change r) xs)
change y (r, pair) =
bests (filterPrefix (/= y) r pair)
filterPrefix :: (Rule a -> Bool) -> Rule a -> (a, Environment, Prefix a) -> Menu (Rule a) (a, Environment, Prefix a)
filterPrefix cond = f
where
rec = onMenu f doneMenu
f r (a, env, pr) = if cond r then r |-> (a, env, pr { remainder = rec (remainder pr) }) else empty
prefixPaths :: Prefix a -> [Path]
prefixPaths = getPaths
newtype Path = Path [PathItem]
deriving Eq
data PathItem = Index Int | Input Term
deriving Eq
instance Show PathItem where
show (Index n) = show n
show (Input t) = show t
instance Read PathItem where
readsPrec n s =
case dropWhile isSpace s of
s2@(c:_) | isDigit c -> map (mapFirst Index) (readsPrec n s2)
s2 -> map (mapFirst Input) (readsPrec n s2)
instance Show Path where
show (Path is) = show is
showList = (++) . intercalate ";" . map show
emptyPath :: Path
emptyPath = Path []
readPath :: Monad m => String -> m Path
readPath = fmap Path . readM
readPaths :: Monad m => String -> m [Path]
readPaths = mapM readPath . splitsWithElem ';' |
acc0e6a31e7fddd763e23b39ea6fa07977333ee391a73113836840010c99953f | MyDataFlow/ttalk-server | mustache_ctx.erl | %% The MIT License
%%
Copyright ( c ) 2009 < >
%%
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
See the README at for additional
%% documentation and usage examples.
-module(mustache_ctx).
-define(MODULE_KEY, '__mod__').
-define(NEW_EXIT(Data), exit({improper_ctx, Data})).
-export([ new/0, new/1, to_list/1 ]).
-export([ merge/2 ]).
-export([ module/1, module/2 ]).
-export([ get/2 ]).
-ifdef(EUNIT).
-compile(export_all).
-endif.
%% ===================================================================
%% Create new context
%% ===================================================================
new() -> new([]).
new(List) when is_list(List) ->
try dict:from_list(List)
catch
_:_ -> ?NEW_EXIT(List)
end;
new(Data) when is_tuple(Data) ->
case erlang:element(1, Data) of
dict -> Data;
_ -> ?NEW_EXIT(Data)
end;
new(Data) ->
?NEW_EXIT(Data).
to_list(Ctx) ->
List = dict:to_list(Ctx),
lists:keydelete(?MODULE_KEY, 1, List).
%% ===================================================================
%% Merge
%% ===================================================================
merge(Ctx1, Ctx2) ->
dict:merge(fun(_, Value1, _) -> Value1 end, Ctx1, Ctx2).
%% ===================================================================
Dynamic data module
%% ===================================================================
module(Ctx) ->
case dict:find(?MODULE_KEY, Ctx) of
{ok, Module} -> {ok, Module};
error -> {error, module_not_set}
end.
module(Module, Ctx) ->
dict:store(?MODULE_KEY, Module, Ctx).
%% ===================================================================
%% Module
%% ===================================================================
get(Key, Ctx) ->
case dict:find(Key, Ctx) of
{ok, Value} -> {ok, Value};
error ->
get_from_module(Key, Ctx)
end.
get_from_module(Key, Ctx) ->
FunList = case module(Ctx) of
{error, _} -> [];
{ok, Module} -> [
fun() -> Module:Key(Ctx) end,
fun() -> Module:Key() end
]
end,
get_from_module(FunList).
get_from_module([]) -> {error, not_found};
get_from_module([ Fun | Rest ]) ->
try Value = Fun(),
{ok, Value}
catch
_:_ ->
get_from_module(Rest)
end.
| null | https://raw.githubusercontent.com/MyDataFlow/ttalk-server/07a60d5d74cd86aedd1f19c922d9d3abf2ebf28d/deps/mustache/src/mustache_ctx.erl | erlang | The MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
documentation and usage examples.
===================================================================
Create new context
===================================================================
===================================================================
Merge
===================================================================
===================================================================
===================================================================
===================================================================
Module
=================================================================== | Copyright ( c ) 2009 < >
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
See the README at for additional
-module(mustache_ctx).
-define(MODULE_KEY, '__mod__').
-define(NEW_EXIT(Data), exit({improper_ctx, Data})).
-export([ new/0, new/1, to_list/1 ]).
-export([ merge/2 ]).
-export([ module/1, module/2 ]).
-export([ get/2 ]).
-ifdef(EUNIT).
-compile(export_all).
-endif.
new() -> new([]).
new(List) when is_list(List) ->
try dict:from_list(List)
catch
_:_ -> ?NEW_EXIT(List)
end;
new(Data) when is_tuple(Data) ->
case erlang:element(1, Data) of
dict -> Data;
_ -> ?NEW_EXIT(Data)
end;
new(Data) ->
?NEW_EXIT(Data).
to_list(Ctx) ->
List = dict:to_list(Ctx),
lists:keydelete(?MODULE_KEY, 1, List).
merge(Ctx1, Ctx2) ->
dict:merge(fun(_, Value1, _) -> Value1 end, Ctx1, Ctx2).
Dynamic data module
module(Ctx) ->
case dict:find(?MODULE_KEY, Ctx) of
{ok, Module} -> {ok, Module};
error -> {error, module_not_set}
end.
module(Module, Ctx) ->
dict:store(?MODULE_KEY, Module, Ctx).
get(Key, Ctx) ->
case dict:find(Key, Ctx) of
{ok, Value} -> {ok, Value};
error ->
get_from_module(Key, Ctx)
end.
get_from_module(Key, Ctx) ->
FunList = case module(Ctx) of
{error, _} -> [];
{ok, Module} -> [
fun() -> Module:Key(Ctx) end,
fun() -> Module:Key() end
]
end,
get_from_module(FunList).
get_from_module([]) -> {error, not_found};
get_from_module([ Fun | Rest ]) ->
try Value = Fun(),
{ok, Value}
catch
_:_ ->
get_from_module(Rest)
end.
|
89f4569786edf7898822d54eca6a7dc2060f3a49e85f9fdb7541bdb7e28c8cff | screenshotbot/screenshotbot-oss | java_adapter.lisp | ;;;
;;;
Copyright ( c ) 2009 ,
;;; All rights reserved by the author.
;;;
;;; Permission is hereby granted, free of charge, to any person
;;; obtaining a copy of this software and associated documentation
files ( the " Software " ) , to deal in the Software without
;;; restriction, including without limitation the rights to use, copy,
;;; modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software , and to permit persons to whom the Software is
;;; furnished to do so, subject to the following conditions:
;;;
;;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
;;; MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
;;; HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
;;; WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
;;; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
;;; DEALINGS IN THE SOFTWARE.
;;;
;;;
(in-package :cl+j)
;(declaim (optimize debug))
( declaim ( optimize ( debug 0 ) ( speed 3 ) ) )
;;; Some standard callbacks.
(def-java-native
"cl_j.RunInLisp"
("int" "funcall" (("int" fun-ref)) ()
(new-lisp-reference (funcall (lisp-reference-value fun-ref))))
("void" "freeLispReference" (("int" ref)) ()
(free-lisp-reference ref))
)
;;;
| null | https://raw.githubusercontent.com/screenshotbot/screenshotbot-oss/fb88dc205c052caf98d6833d03594323390800d3/third-party/cl%2Bj-0.4/java_adapter.lisp | lisp |
All rights reserved by the author.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
(declaim (optimize debug))
Some standard callbacks.
| Copyright ( c ) 2009 ,
files ( the " Software " ) , to deal in the Software without
of the Software , and to permit persons to whom the Software is
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
(in-package :cl+j)
( declaim ( optimize ( debug 0 ) ( speed 3 ) ) )
(def-java-native
"cl_j.RunInLisp"
("int" "funcall" (("int" fun-ref)) ()
(new-lisp-reference (funcall (lisp-reference-value fun-ref))))
("void" "freeLispReference" (("int" ref)) ()
(free-lisp-reference ref))
)
|
759b5caf85737f8fc472bf6173891e122f5d00d5e63c1d3389ed96a27d2e174c | draperlaboratory/cbat_tools | cbat_clp.ml | (***************************************************************************)
(* *)
Copyright ( C ) 2018/2019 The Charles Stark Draper Laboratory , Inc.
(* *)
(* This file is provided under the license found in the LICENSE file in *)
(* the top-level directory of this project. *)
(* *)
This work is funded in part by ONR / NAWC Contract N6833518C0107 . Its
content does not necessarily reflect the position or policy of the US
(* Government and no official endorsement should be inferred. *)
(* *)
(***************************************************************************)
open Bap.Std
include Cbat_vsa_utils
module W = Word
module Option = Core_kernel.Option
module Sexp = Core_kernel.Sexp
module List = Core_kernel.List
open !Cbat_word_ops
A CLP { base ; step ; card } represents the following set :
{ base + n * step | 0 < = n < cardn } . Note that since can be 2^sz , it
is stored in a sz+1 bit word . If step * > = 2^sz in Z , then the CLP is
conservatively approximated as cardn = 2^sz . This simplifies to the following
set : { ( base + n * step ) % sz | n in } . We refer to these as ' infinite ' CLPs .
TODO : is there a better name than infinite CLPs ?
Invariants :
W.bitwidth base = W.bitwidth step
W.bitwidth = ( W.bitwidth base ) + 1
Note that for an arbitrary CLP , cardn is an approximation of its cardinality .
Its actual cardinality could be less since the CLP may overlap itself .
Note : module - internal code should be careful of any operations that treat
the CLP as an interval from its beginning to its end since this is likely to break
equality between certain infinite CLPs ( specifically those that are and are n't
finite ) .
{base + n * step | 0 <= n < cardn}. Note that since cardn can be 2^sz, it
is stored in a sz+1 bit word. If step * cardn >= 2^sz in Z, then the CLP is
conservatively approximated as cardn = 2^sz. This simplifies to the following
set: {(base + n * step) % sz | n in Nat}. We refer to these as 'infinite' CLPs.
TODO: is there a better name than infinite CLPs?
Invariants:
W.bitwidth base = W.bitwidth step
W.bitwidth cardn = (W.bitwidth base) + 1
Note that for an arbitrary CLP, cardn is an approximation of its cardinality.
Its actual cardinality could be less since the CLP may overlap itself.
Note: module-internal code should be careful of any operations that treat
the CLP as an interval from its beginning to its end since this is likely to break
equality between certain infinite CLPs (specifically those that are and aren't
finite).
*)
type t = {base : word; step : word; cardn : word}
[@@deriving bin_io]
Creates a new CLP that ranges from base to base + step * ( pred num )
in increments of size step . Note that num = 0 represents the empty
set and that this function approximates progressions that wrap
around themselves .
' width ' is the desired bitwidth of the CLP .
Defaults to the bitwidth of base .
Step and cardn default such that [ create n ] represents the set { n }
Note that canonization depends on these defaults .
in increments of size step. Note that num = 0 represents the empty
set and that this function approximates progressions that wrap
around themselves.
'width' is the desired bitwidth of the CLP.
Defaults to the bitwidth of base.
Step and cardn default such that [create n] represents the set {n}
Note that canonization depends on these defaults.
*)
let create ?(width : int option) ?(step = W.b1) ?(cardn = W.b1) base : t =
let width = Option.value ~default:(W.bitwidth base) width in
(* truncates or extends the word to the appropriate size *)
let fit = W.extract_exn ~hi:(width - 1) in
While fit produces the correct behavior for the step ( modulo the ) ,
a sufficiently large cardinality is instead equivalent to the maximum
cardinality at the given width .
a sufficiently large cardinality is instead equivalent to the maximum
cardinality at the given width.
*)
let cardn = Cbat_word_ops.cap_at_width ~width:(width + 1) cardn in
{base = fit base; step = fit step; cardn}
let singleton w = create w
let bitwidth (p : t) : int = W.bitwidth p.base
helper function ; constructs a CLP from its lower bound , upper bound
and step . Note that if the following equation does not hold , then
the upper bound will not be an element of the resultant CLP :
exists i such that upper = lower + step*i
TODO : check ; there are some places this could be used
and step. Note that if the following equation does not hold, then
the upper bound will not be an element of the resultant CLP:
exists i such that upper = lower + step*i
TODO: check; there are some places this could be used
*)
let cardn_from_bounds base step e : word =
let width = W.bitwidth base in
assert(W.bitwidth step = width);
Compute the cardinality . Integer division ( floor ) produces
the correct behavior .
the correct behavior.
*)
if W.is_zero step then W.one (width + 1) else
let div_by_step = W.div (W.sub e base) step in
(* extract adds an extra bit at the high end so that
the call to succ never wraps to 0.
*)
W.succ (W.extract_exn ~hi:width div_by_step)
helper function ; computes the minimum / canonical CLP to represent
the set of words of the form b + sn for any integer n
Note : returns a canonized result
the set of words of the form b + sn for any integer n
Note: returns a canonized result
*)
let infinite (b, s) : t =
let width = W.bitwidth b in
assert (width = W.bitwidth s);
if W.is_zero s then create b
else let div, twos = factor_2s s in
let step = W.div s div in
let base = W.modulo b step in
let cardn = dom_size ~width:(width + 1) (width - twos) in
{base; step; cardn}
helper function ; determines whether a given CLP represents
a set of the form { ( b + s*i ) % z | i in }
Note that in the terminology of this module , a set may be both
infinite and finite . A set is both iff e + s = b % z where
e is the end of the CLP and z is its bitwidth .
a set of the form {(b + s*i) % z | i in Nat}
Note that in the terminology of this module, a set may be both
infinite and finite. A set is both iff e + s = b % z where
e is the end of the CLP and z is its bitwidth.
*)
let is_infinite p : bool =
let width = bitwidth p in
let ds = dom_size ~width:(2*width + 1) width in
let mul = mul_exact p.cardn p.step in
W.(>=) mul ds
outputs a canonized CLP
let bottom (width : int) : t =
assert(width > 0);
create ~width W.b0 ~cardn:W.b0
let top (i : int) : t =
assert(i > 0);
let zero = W.zero i in
let one = W.one i in
infinite (zero, one)
helper function ; converts a CLP into a unique representation .
A canonized infinite CLP has the following properties :
( elem i ( b , s ) ) /\ i < j < i + s = > not ( elem j ( b , s ) )
b < s
A canonized infinite CLP has the following properties:
(elem i (b,s)) /\ i < j < i + s => not (elem j (b,s))
b < s
*)
let canonize (p : t) : t =
let szInt = bitwidth p in
let two = W.of_int ~width:(szInt + 1) 2 in
Each CLP with step s and cardinality c such that 0 < sc < = 2^szInt
2 < c < 2^szInt has a single representation
2 < c < 2^szInt has a single representation
*)
A cardinality of 0 represents the empty set
if W.is_zero p.cardn then bottom szInt
Either a nonzero cardinality and step of 0 or a cardinality of 1
represents a singleton set .
represents a singleton set.
*)
else if W.is_zero p.step || is_one p.cardn then create p.base ~step:W.b0
(* Cardinality 2 sets can be flipped; we represent them in the order
where the end is greater than the beginning.
*)
else if W.(=) p.cardn two then
let e = W.add p.base p.step in
if W.(>=) e p.base then p
else create e ~step:(W.neg p.step) ~cardn:two
If p.cardn steps traverse the full domain , we approximate
with an infinite CLP . In other words , all cardinalities c
such that c*p.step < = 2^szIn are treated equivalently .
with an infinite CLP. In other words, all cardinalities c
such that c*p.step <= 2^szIn are treated equivalently.
*)
else if is_infinite p then infinite(p.base, p.step)
else p
Returns the cardinality of p as a sz+1 width word ,
where sz is the bitwidth of p.
where sz is the bitwidth of p.
*)
let cardinality (p : t) : word = (canonize p).cardn
helper function ; computes the last point in a finite CLP .
Note that this function may return misleading results for an
infinite CLP . The result will be a point on the CLP , but is in
no way an ' end ' since an infinite CLP has no notion of end .
Note that this function may return misleading results for an
infinite CLP. The result will be a point on the CLP, but is in
no way an 'end' since an infinite CLP has no notion of end.
*)
let finite_end (p : t) : word option =
let width = bitwidth p in
let n = W.extract_exn ~hi:(width - 1) p.cardn in
if W.is_zero p.cardn then None
else Some (W.add p.base (W.mul p.step (W.pred n)))
(* Note: this function is fully precise *)
let lnot (p : t) : t =
We use the finite end even in the infinite case since it is always
guaranteed to be some point on the CLP and for the infinite CLP ,
any point works as the base .
guaranteed to be some point on the CLP and for the infinite CLP,
any point works as the base.
*)
Option.value_map ~default:(bottom (bitwidth p)) (finite_end p)
~f:(fun e -> {base = W.lnot e; step = p.step; cardn = p.cardn})
Returns a list containing all of the elements represented by
the input CLP .
the input CLP.
*)
let iter (p : t) : word list =
let rec iter_acc b s n acc =
if W.is_zero n then acc
else let n' = W.pred n in
iter_acc (W.add b s) s n' (b :: acc) in
let p' = canonize p in
iter_acc p'.base p'.step p'.cardn []
computes the closest element of p that precedes
i , i.e. the first element reached by starting from i and decreasing .
Assumes that the inputs have the same bitwidth .
i, i.e. the first element reached by starting from i and decreasing.
Assumes that the inputs have the same bitwidth.
*)
let nearest_pred (i : word) (p : t) : word option =
assert(bitwidth p = W.bitwidth i);
let open Monads.Std.Monad.Option.Syntax in
If finite_end returns none , the CLP is empty
finite_end p >>= fun e ->
if W.is_zero p.step then !!(p.base)
else
(* We translate i and p by -p.base to avoid wrapping over 0 *)
let diff = W.sub i p.base in
let rm = W.modulo diff p.step in
(* Compute the end of p translated by -p.base *)
let end' = W.sub e p.base in
if is_infinite p then !!(W.sub i rm)
else if W.(>=) diff end' then !!(W.add end' p.base) else
(* The remainder is the difference between i and the result*)
!!(W.sub i rm)
(* helper function;
Some (nearest_inf_pred w b p) = nearest_pred w (infinite b p) *)
let nearest_inf_pred (w : word) (base : word) (step : word) : word =
if W.is_zero step then base else
let diff = W.sub w base in
let rm = W.modulo diff step in
W.sub w rm
let nearest_succ (i : word) (p : t) : word option =
Option.map ~f:W.lnot (nearest_pred (W.lnot i) (lnot p))
helper function ;
Some ( nearest_inf_succ w b p ) = nearest_succ w ( infinite b p )
Some (nearest_inf_succ w b p) = nearest_succ w (infinite b p) *)
let nearest_inf_succ (w : word) (base : word) (step : word) : word =
W.lnot (nearest_inf_pred (W.lnot w) (W.lnot base) step)
let max_elem (p : t) : word option =
let max_wd = W.ones (bitwidth p) in
nearest_pred max_wd p
let min_elem (p : t) : word option =
let min_wd = W.zero (bitwidth p) in
nearest_succ min_wd p
* The elem of a signed CLP is going to be the nearest precedessor
of ( 2^w / 2 ) - 1 , where [ w ] is the bit - width of the words in the CLP .
For instance , if we assume [ w = 3 ] ( 3 - bits in each word ) , a
circle with the numbers 0 through 7 , when signed , becomes a
circle with the numbers -4 through 3 . So the maximum element in the
CLP is going to be the nearest predecessor to 3 .
We can get ( 2^w / 2 ) by getting the [ dom_size ] of [ w - 1 ] . E.g. ,
the domain size when [ w = 3 ] is 8 , but half of that is the same as
the domain size when [ w = 2 ] , namely 4 .
Note that the element this function returns is not signed ( like
all BAP words ) . To see the signed value , use [ W.signed word ] .
of (2^w / 2) - 1, where [w] is the bit-width of the words in the CLP.
For instance, if we assume [w = 3] (3-bits in each word), a
circle with the numbers 0 through 7, when signed, becomes a
circle with the numbers -4 through 3. So the maximum element in the
CLP is going to be the nearest predecessor to 3.
We can get (2^w / 2) by getting the [dom_size] of [w - 1]. E.g.,
the domain size when [w = 3] is 8, but half of that is the same as
the domain size when [w = 2], namely 4.
Note that the max element this function returns is not signed (like
all BAP words). To see the signed value, use [W.signed word].*)
let max_elem_signed (p : t) : word option =
let width = bitwidth p in
let half_way_point = dom_size ~width:width (width - 1) in
let max_dom_elem = W.pred half_way_point in
nearest_pred max_dom_elem p
* The min elem of a signed CLP is going to be the nearest successor
of ( 2^w / 2 ) . Note that the returned word is unsigned . To see
the signed value , use [ W.signed word ] .
of (2^w / 2). Note that the returned word is unsigned. To see
the signed value, use [W.signed word]. *)
let min_elem_signed (p : t) : word option =
let width = bitwidth p in
let half_way_point = dom_size ~width:width (width - 1) in
nearest_succ half_way_point p
let splits_by (p : t) (w : word) : bool =
let p = canonize p in
let divides a b = W.is_zero (W.modulo b a) in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:true begin
min_elem p >>= fun min_p ->
finite_end p >>= fun e ->
if W.is_zero p.step then !!true else
(* The Clp contains more than one element *)
!!(divides w p.step &&
(* if it wraps 0, w must divide the gap *)
(W.(=) p.base min_p ||
divides w (W.sub e p.base)))
end
let min_separation (p : t) : word option =
let p = canonize p in
let open Monads.Std.Monad.Option.Syntax in
finite_end p >>= fun e ->
if W.(=) p.base e then None
else !!(min p.step (min (W.sub p.base e) (W.sub e p.base)))
Decides membership in the set represented by the CLP
let elem (i : word) (p : t) : bool =
assert (W.bitwidth i = bitwidth p);
match nearest_pred i p with
| None -> false
| Some j -> W.(=) i j
helper function ; checks whether two CLPs have the same size
and if so , outputs that size
and if so, outputs that size
*)
let get_and_check_sizes (p1 : t) (p2 : t) : int =
let sz1 = bitwidth p1 in
let sz2 = bitwidth p2 in
let err_str = Printf.sprintf "Input CLPs of different sizes: %i and %i" sz1 sz2 in
if sz1 <> sz2 then raise (Invalid_argument err_str);
sz1
Checks whether a given CLP is the top element of the lattice .
Works by checking whether the step is coprime with the size of
the domain since the domain can be seen as a cyclic group
of the form ( Z_(2^n ) , + ) .
Works by checking whether the step is coprime with the size of
the domain since the domain can be seen as a cyclic group
of the form (Z_(2^n), +).
*)
let is_top (p : t) : bool = canonize p = top (bitwidth p)
(* decides whether the input represents the empty set *)
let is_bottom (p : t) : bool = W.is_zero p.cardn
helper function ; checks whether the first infinite progression
is a subprogression ( i.e. aligned and with a step that contains
at least as many factors of 2 ) of the second .
is a subprogression (i.e. aligned and with a step that contains
at least as many factors of 2) of the second.
*)
let subprogression (b1,s1) (b2,s2) : bool =
if W.is_zero s1 then
return true if b1 is in the second progression . In other words ,
if any number of s2 - sized steps gets from b1 to b2
if any number of s2-sized steps gets from b1 to b2
*)
let diff = W.sub b2 b1 in
let rem = W.modulo diff s2 in
W.is_zero rem
else if W.is_zero s2 then W.is_zero s1 && b1 = b2
else
let coprime1, _ = factor_2s s1 in
let coprime2, _ = factor_2s s2 in
let powTwo1 = W.div s1 coprime1 in
let powTwo2 = W.div s2 coprime2 in
let bRoot1 = W.modulo b1 powTwo2 in
let bRoot2 = W.modulo b2 powTwo2 in
W.(>=) powTwo1 powTwo2 && W.(=) bRoot1 bRoot2
determines whether two CLPs are equivalent . Much faster than subset .
let equal (p1 : t) (p2 : t) : bool =
let _ = get_and_check_sizes p1 p2 in
canonize p1 = canonize p2
let unwrap (p : t) : t =
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom (bitwidth p)) begin
min_elem p >>= fun base ->
max_elem p >>= fun e ->
let step = p.step in
let cardn = cardn_from_bounds base step e in
!!{base; step; cardn}
end
let unwrap_signed (p : t) : t =
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(top (bitwidth p)) begin
min_elem_signed p >>= fun base ->
max_elem_signed p >>= fun e ->
let step = p.step in
let cardn = cardn_from_bounds base step e in
!!{base; step; cardn}
end
helper function ; takes two inclusive circular intervals and returns
the smallest interval that is a superset of the two . Assumes that
all inputs have the same bitwidth .
the smallest interval that is a superset of the two. Assumes that
all inputs have the same bitwidth.
*)
let interval_union (a1,b1) (a2,b2) : (word * word) =
let szInt = W.bitwidth a1 in
we translate both intervals by a1 so that one interval starts at 0
let b1' = W.sub b1 a1 in
let a2' = W.sub a2 a1 in
let b2' = W.sub b2 a1 in
let zero = W.zero szInt in
If b2 ' < a2 ' , then the second interval wraps over the zero point and
a1 is in the interval ( a2,b2 ) . Also , if a2 ' ( and so also b2 ' ) is in
the first interval , then the two wrap fully around the circle .
a1 is in the interval (a2,b2). Also, if a2' (and so also b2') is in
the first interval, then the two wrap fully around the circle.
*)
if W.(>=) b1' a2' && W.(<) b2' a2' then (b1, W.pred b1)
If a2 ' and b2 ' are both between 0 and b1 ' then a2 and b2 are between
a1 and b1 . Since the case above has been ruled out , the first interval
subsumes the second .
a1 and b1. Since the case above has been ruled out, the first interval
subsumes the second.
*)
else if W.(>=) b1' a2' && W.(>=) b1' b2' then (a1, b1)
By excluding the above two cases , we know that b2 ' is not in ( 0,b1 ' ) .
Since a2 ' is , the intervals overlap and stretch from a1 to b2 .
Since a2' is, the intervals overlap and stretch from a1 to b2.
*)
else if W.(>=) b1' a2' then (a1, b2)
(* In all following cases, a2' is not within (0, b1'). If b2' is, then
the intervals overlap in the other direction and stretch from a2 to b1.
*)
else if W.(<) b2' b1' then (a2, b1)
(* In all following cases, neither a2' or b2' are in (0, b1'). If b2' < a2'
then (a2', b2') wraps around 0, so it must subsume (0, b1).
*)
else if W.(<) b2' a2' then (a2, b2)
Otherwise , there are 2 gaps and we include the smaller one .
else if W.(>) (W.sub a2' b1') (W.sub zero b2') then (a2,b1)
else (a1, b2)
(* helper function; moves a CLP around the number circle without
changing its cardinality or step size.
*)
let translate (p : t) i : t =
{base = W.add p.base i; step = p.step; cardn = p.cardn}
helper function ; Given two progressions with a start and a step size ,
computes the largest step size that contains both points and each
point that they step to . Equivalenently , computes the step size of
the union of the two infinite CLPs . Assumes that the two CLPs
have the same bitwidth . We take the gcd of the step sizes and
the ( shortest ) distance between the bases . Thus , the resulting
step , when started from either base , will eventually reach all
points in both progressions in the same order as the original CLPs .
computes the largest step size that contains both points and each
point that they step to. Equivalenently, computes the step size of
the union of the two infinite CLPs. Assumes that the two CLPs
have the same bitwidth. We take the gcd of the step sizes and
the (shortest) distance between the bases. Thus, the resulting
step, when started from either base, will eventually reach all
points in both progressions in the same order as the original CLPs.
*)
let common_step (b1,s1) (b2,s2) : word =
let bDiff = if W.(>) b1 b2 then W.sub b1 b2 else W.sub b2 b1 in
if W.is_zero s1 then bounded_gcd s2 bDiff
else if W.is_zero s2 then bounded_gcd s1 bDiff
else let gcdS = (bounded_gcd s1 s2) in
bounded_gcd gcdS bDiff
defines a partial order on CLPs in terms of the sets that
they represent .
they represent.
*)
let subset (p1 : t) (p2 : t) : bool =
let width = get_and_check_sizes p1 p2 in
(* canonization allows us to treat the CLPs as finite *)
let p1 = canonize p1 in
let p2 = canonize p2 in
We translate both CLPs such that one of them starts at 0
in order to simplify our reasoning .
in order to simplify our reasoning.
*)
let nb2 = W.neg p2.base in
let p1 = translate p1 nb2 in
let p2 = translate p2 nb2 in
One finite CLP is a subset of another if its
step is a multiple of the other 's step , its bounds
are within the bounds of the other , and they
overlap on at least one point .
Note that since singleton CLPs can have any step ,
the step condition does not need to be checked .
step is a multiple of the other's step, its bounds
are within the bounds of the other, and they
overlap on at least one point.
Note that since singleton CLPs can have any step,
the step condition does not need to be checked.
*)
let end1 = finite_end p1 in
let end2 = finite_end p2 in
begin match end1, end2 with
| None, _ -> true
| Some _, None -> false
| Some e1, Some e2 ->
(* p1 is a subset of p2 if its bounds are within p2's bounds
and p2's step covers all elements in their union.
*)
let in_bounds = W.(<=) e1 e2 && W.(<=) p1.base e2 in
let step_and_overlap = W.(=) (common_step (p1.base,p1.step) (W.zero width, p2.step)) p2.step in
let singleton_elem = is_one p1.cardn && elem p1.base p2 in
singleton_elem || (in_bounds && step_and_overlap)
end
To find the start of the intersection , we need to find the
first point base with the following constraints :
base = p1.base ( mod p1.step )
base = p1.base ( mod p2.step )
base in [ p1.base , e1 ]
base in [ p2.base , e2 ]
where e1 and e2 are the ends of p1 and p2 respectively .
We translate both CLPs by -b1 to get the following constraints
base - p1.base = 0 ( mod p1.step )
base - p1.base = p2.base - p1.base ( mod p2.step )
base - p1.base < = e1 - p1.base
base - p1.base > = p2.base - p1.base
base - p1.base < = e2 - p1.base
Let x ' denote x - p1.base for all x. We equivalently have the following :
exists j. base ' = j * p1.step
exists k. base ' = p2.base ' + k * p2.step
p2.base ' < = base ' < = min(e1 ' , e2 ' )
Thus , base ' is ( j0 * p1.step ) if j0 is the least solution
to the linear diophantine equation j * p1.step - k * p2.step = p2.base ' and ,
in the finite case , p2.base ' < = base ' < = min(e1 ' , e2 ' )
first point base with the following constraints:
base = p1.base (mod p1.step)
base = p1.base (mod p2.step)
base in [p1.base, e1]
base in [p2.base, e2]
where e1 and e2 are the ends of p1 and p2 respectively.
We translate both CLPs by -b1 to get the following constraints
base - p1.base = 0 (mod p1.step)
base - p1.base = p2.base - p1.base (mod p2.step)
base - p1.base <= e1 - p1.base
base - p1.base >= p2.base - p1.base
base - p1.base <= e2 - p1.base
Let x' denote x - p1.base for all x. We equivalently have the following:
exists j. base' = j * p1.step
exists k. base' = p2.base' + k * p2.step
p2.base' <= base' <= min(e1', e2')
Thus, base' is (j0 * p1.step) if j0 is the least solution
to the linear diophantine equation j * p1.step - k * p2.step = p2.base' and,
in the finite case, p2.base' <= base' <= min(e1', e2')
*)
let intersection (p1 : t) (p2 : t) : t =
let width = get_and_check_sizes p1 p2 in
let bot = bottom width in
Canonize to ensure that the CLPs may be treated as finite in most cases
and simplify other handling
and simplify other handling
*)
let p1 = canonize p1 in
let p2 = canonize p2 in
(* ensure that p1.base >= p2.base so that neither end crosses p1.base *)
let p1, p2 = if W.(>=) p1.base p2.base then p1, p2 else p2, p1 in
Translate the CLPs so that p1 starts at 0
let translation = p1.base in
let translated_p1 = translate p1 (W.neg p1.base) in
let translated_p2 = translate p2 (W.neg p1.base) in
let p1, p2 = translated_p1, translated_p2 in
let p1_infinite = is_infinite p1 in
let p2_infinite = is_infinite p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:bot begin
finite_end p1 >>= fun e1 ->
finite_end p2 >>= fun e2 ->
let step = W.lcm_exn p1.step p2.step in
if W.is_zero step then begin
If there is no bounded LCM then s1 or s2 are 0 . In other words ,
one of the inputs is a singleton . Thus the intersection is either
equal to the singleton or empty .
one of the inputs is a singleton. Thus the intersection is either
equal to the singleton or empty.
*)
if W.is_zero p2.step then
Option.some_if (elem p2.base p1) () >>= fun _ ->
!!(create p2.base)
else
Option.some_if (elem p1.base p2) () >>= fun _ ->
!! (create p1.base)
end else begin
bounded_diophantine p1.step p2.step p2.base >>= fun (x,_) ->
the result is the first point on p1 that is also on the infinite CLP
approximation of p2 . Due to the translation of the CLPs , this is the
least possible value ( before translating back ) that can inhabit the
intersection .
approximation of p2. Due to the translation of the CLPs, this is the
least possible value (before translating back) that can inhabit the
intersection.
*)
let base = W.mul x p1.step in
(* e1 and e2 are meaningful iff p1 and p2 respectively are non-infinite.
We therefore guard their use so that when either argument is
infinite, we compute the proper end.
*)
let minE = if p1_infinite then e2
else if p2_infinite then e1
else min e1 e2 in
(* If the least possible value in resulting set is greater than the
maximum possible value, then the set is empty. This does not apply
when p1 is infinite since sometimes the base of p2 (and therefore
the result) can wrap backwards over 0.
*)
Option.some_if (W.(<=) base minE) () >>= fun _ ->
let cardn = cardn_from_bounds base step minE in
!!(create base ~step ~cardn)
end
(* whatever the result, we translate it by the original p1.base
to make up for the translation at the start.
*)
end |> (fun p -> translate p translation)
(* Computes whether there exists any element in both CLPs
*)
let overlap (p1 : t) (p2 : t) : bool = not (is_bottom (intersection p1 p2))
Approximates the union of the two abstracted sets .
Input CLPs should have the same bitwidth .
Input CLPs should have the same bitwidth.
*)
let union (p1 : t) ( p2 : t) : t =
CLPs canonized so that they may be treated as finite
let p1 = canonize p1 in
let p2 = canonize p2 in
If either CLP is empty , return the other one
Option.value_map ~default:p2 (finite_end p1) ~f:begin fun e1 ->
Option.value_map ~default:p1 (finite_end p2) ~f:begin fun e2 ->
Compute the bounds of the interval that contains this CLP
let base, newE = interval_union (p1.base, e1) (p2.base, e2) in
let step = common_step (p1.base, p1.step) (p2.base, p2.step) in
(* if the common step is 0, then base = newE *)
let cardn = cardn_from_bounds base step newE in
create base ~step ~cardn
end
end
let add (p1 : t) (p2 : t) : t =
let sz = get_and_check_sizes p1 p2 in
If either CLP is empty , return bottom
Option.value_map ~default:(bottom sz) (finite_end p1) ~f:begin fun e1 ->
Option.value_map ~default:(bottom sz) (finite_end p2) ~f:begin fun e2 ->
if either CLP has step 0 it is a singleton and
we simply add its value to each element of the other
This case is computed exactly .
we simply add its value to each element of the other
This case is computed exactly.
*)
if W.is_zero p1.step || is_one p1.cardn then translate p2 p1.base
else if W.is_zero p2.step || is_one p2.cardn then translate p1 p2.base
else if is_infinite p1 || is_infinite p2 then
infinite (W.add p1.base p2.base, bounded_gcd p1.step p2.step)
else
(* Translate p1 by -b1 and p2 by -b2 so that they each
start at 0. We can just add b1 + b2 at the end to
reverse this.
*)
let e1' = W.sub e1 p1.base in
let e2' = W.sub e2 p2.base in
let e' = W.add e1' e2' in
let base = W.add p1.base p2.base in
let step = bounded_gcd p1.step p2.step in
(* If the sum wraps around, then it encompasses the full circle *)
if W.(<) e' e1' then infinite (base, step)
else let cardn = cardn_from_bounds (W.zero sz) step e' in
create base ~step ~cardn
end
end
(* Note: this function is fully precise *)
let neg (p : t) : t =
We use the finite end even in the infinite case since it is always
guaranteed to be some point on the CLP and for the infinite CLP ,
any point works as the base .
guaranteed to be some point on the CLP and for the infinite CLP,
any point works as the base.
*)
Option.value_map ~default:(bottom (bitwidth p)) (finite_end p)
~f:(fun e -> {base = W.neg e; step = p.step; cardn = p.cardn})
let sub (p1: t) (p2 : t) : t = add p1 (neg p2)
let mul (p1 : t) (p2 : t) : t =
let sz = get_and_check_sizes p1 p2 in
If either CLP is empty , return bottom
Option.value_map ~default:(bottom sz) (finite_end p1) ~f:begin fun e1 ->
Option.value_map ~default:(bottom sz) (finite_end p2) ~f:begin fun e2 ->
if either CLP is a singleton , we simply
multiply each element of the other by its value
This case is computed exactly .
multiply each element of the other by its value
This case is computed exactly.
*)
if W.is_zero p1.step || is_one p1.cardn then
let base = W.mul p2.base p1.base in
let step = W.mul p2.step p1.base in
create base ~step ~cardn:p2.cardn
else if W.is_zero p2.step || is_one p2.cardn then
let base = W.mul p1.base p2.base in
let step = W.mul p1.step p2.base in
create base ~step ~cardn:p1.cardn
else
let base = mul_exact p1.base p2.base in
let e'_exact = mul_exact e1 e2 in
let step = bounded_gcd (mul_exact p1.base p2.step)
(bounded_gcd (mul_exact p2.base p1.step)
(mul_exact p1.step p2.step)) in
let end_diff = W.sub e'_exact base in
let div_res = W.div end_diff step in
let cardn = W.succ @@ add_bit div_res in
if is_infinite p1 ||is_infinite p2 then
let fit = W.extract_exn ~hi:(sz - 1) in
infinite (fit base, fit step)
else create ~width:sz base ~step ~cardn
end
end
helper function ;
TODO : describe
TODO : should be in word_ops.ml ? ? ?
TODO : check ! ! !
TODO: describe
TODO: should be in word_ops.ml???
TODO: check!!!
*)
let lead_1_bit_run (w : word) ~hi ~lo : int =
let rec lead_help (hi : int) (lo : int) : int =
if hi = lo then hi else
let mid = (hi + lo) / 2 in
let hi_part = W.extract_exn ~hi ~lo:(mid + 1) w in
if W.is_zero (W.lnot hi_part) then lead_help mid lo
else lead_help hi (mid + 1)
in
assert(lo >= 0);
assert(hi >= lo);
(lead_help hi lo) + 1
let compute_l_s_b lsb1 lsb2 b1 b2 : int = if lsb1 < lsb2 then
let interval = W.extract_exn ~hi:(lsb2 - 1) ~lo:lsb1 b2 in
let w, bit = factor_2s interval in
if W.is_zero w then lsb2 else bit + lsb1
else if lsb1 > lsb2 then
let interval = W.extract_exn ~hi:(lsb1 - 1) ~lo:lsb2 b1 in
let w, bit = factor_2s interval in
if W.is_zero w then lsb1 else bit + lsb2
else lsb1 (* lsb1 = lsb2 *)
let compute_m_s_b msb1 msb2 b1 b2 : int = if msb1 > msb2 then
let interval = W.extract_exn ~hi:msb1 ~lo:(msb2 + 1) b2 in
Option.value_map ~default:msb2
(lead_1_bit interval)
~f:(fun b -> b + msb2 + 1)
else if msb1 < msb2 then
let interval = W.extract_exn ~hi:msb2 ~lo:(msb1 + 1) b1 in
Option.value_map ~default:msb1
(lead_1_bit interval)
~f:(fun b -> b + msb1 + 1)
else msb1 (* msb1 = msb2 *)
let compute_range_sep msb msb1 msb2 b1 b2 : int = if msb1 > msb2
then if msb = msb1 then lead_1_bit_run b2 ~hi:msb1 ~lo:(msb2 + 1) else msb + 1
else if msb1 < msb2 then
if msb = msb2 then lead_1_bit_run b1 ~hi:msb2 ~lo:(msb1 + 1) else msb + 1
(* TODO: this last branch is a guess; it is not explained in the paper
Figure out whether it is correct.
*)
else -1
This algorithm closely follows the one in " Circular Linear Progressions
in " . It converts a CLP into an AP ( terminology from the paper )
by using the least non - wrapping superset .
in SWEET". It converts a CLP into an AP (terminology from the paper)
by using the least non-wrapping superset.
*)
let logand (p1 : t) (p2 : t) : t =
let sz = get_and_check_sizes p1 p2 in
let bot = bottom sz in
let cardn_two = W.of_int ~width:(sz + 1) 2 in
(* We canonize the inputs so that their n1 and n2 are their
true cardinalities and they are finite
*)
let cp1 = canonize p1 in
let cp2 = canonize p2 in
We ensure that the cardinality of the second CLP is at
least the cardinality of the first to collapse the two cases where
once CLP has cardinality 1 and the other has cardinality 2 .
least the cardinality of the first to collapse the two cases where
once CLP has cardinality 1 and the other has cardinality 2.
*)
let p1, p2 = if W.(<=) (cardinality cp1) (cardinality cp2)
then (cp1, cp2) else (cp2, cp1) in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:bot begin
if W.is_zero p1.cardn || W.is_zero p2.cardn then !!bot
else if W.is_one p1.cardn && W.is_one p2.cardn then
!!(create (W.logand p1.base p2.base))
else if W.is_one p1.cardn && W.(=) p2.cardn cardn_two then
CLPs can represent any two - element set exactly , so
we compute the two elements of the set and return them .
we compute the two elements of the set and return them.
*)
finite_end p2 >>= fun e2 ->
let base = W.logand p1.base p2.base in
let newE = W.logand p1.base e2 in
let step = W.sub newE base in
let cardn = cardn_two in
!!(create base ~step ~cardn)
else
min_elem p1 >>= fun min_elem_p1 ->
max_elem p1 >>= fun max_elem_p1 ->
min_elem p2 >>= fun min_elem_p2 ->
max_elem p2 >>= fun max_elem_p2 ->
let _, twos_in_s1 = factor_2s p1.step in
let _, twos_in_s2 = factor_2s p2.step in
let least_significant_bit_p1 = if W.is_one p1.cardn then sz
since the CLP is canonized , if p1.cardn > 1 then p1.step < > 0
else twos_in_s1 in
let least_significant_bit_p2 = if W.is_one p2.cardn then sz
since the CLP is canonized , if p2.cardn > 1 then p2.step < > 0
else twos_in_s2 in
There is no most significant bit iff the cardinality is 1
let most_significant_bit_p1 = Option.value ~default:(-1)
(lead_1_bit (W.logxor min_elem_p1 max_elem_p1)) in
let most_significant_bit_p2 = Option.value ~default:(-1)
(lead_1_bit (W.logxor min_elem_p2 max_elem_p2)) in
let l_s_b = compute_l_s_b
least_significant_bit_p1
least_significant_bit_p2
min_elem_p1 min_elem_p2 in
let m_s_b = compute_m_s_b
most_significant_bit_p1
most_significant_bit_p2
min_elem_p1 min_elem_p2 in
if l_s_b > m_s_b then
(* The result is a singleton *)
let base = W.logand min_elem_p1 min_elem_p2 in
!!(create base)
else
let range_sep = compute_range_sep m_s_b
most_significant_bit_p1
most_significant_bit_p2
min_elem_p1 min_elem_p2
in
let mask = if l_s_b >= range_sep then W.zero sz
else let ones = W.ones (range_sep - l_s_b) in
let sized_ones = W.extract_exn ~hi:(sz - 1) ones in
Word.lshift sized_ones (W.of_int ~width:sz l_s_b) in
let safe_lower_bound =
W.logand min_elem_p1 min_elem_p2 |> W.logand (W.lnot mask) in
let safe_upper_bound = W.logand max_elem_p1 max_elem_p2 |>
W.logor mask |>
W.min max_elem_p1 |>
W.min max_elem_p2 in
let twos_step = W.lshift (W.of_int 1 ~width:sz)
(W.of_int l_s_b ~width:sz) in
let step = if most_significant_bit_p1 > most_significant_bit_p2 &&
m_s_b = most_significant_bit_p1 &&
range_sep = l_s_b then
W.max p1.step twos_step
else if most_significant_bit_p2 > most_significant_bit_p1 &&
m_s_b = most_significant_bit_p2 &&
range_sep = l_s_b then
W.max p2.step twos_step
else twos_step in
let b1_and_b2 = W.logand min_elem_p1 min_elem_p2 in
let frac = cdiv (W.sub safe_lower_bound b1_and_b2) step in
let base = W.add b1_and_b2 (W.mul step frac) in
(* TODO: use cardn_from_bounds *)
let cardn = W.div (W.sub safe_upper_bound base) step |> succ_exact in
!!(create base ~step ~cardn)
end
let logor (p1 : t) (p2 : t) : t = lnot (logand (lnot p1) (lnot p2))
let logxor (p1 : t) (p2 : t) : t =
let width = get_and_check_sizes p1 p2 in
let two = create (W.of_int 2 ~width) in
let approx1 = logor (logand p1 (lnot p2)) (logand (lnot p1) p2) in
equality taken from Hacker 's Delight
let approx2 = sub (add p1 p2) (mul (logand p1 p2) two) in
(* since both approximations are sound, their intersection is sound *)
intersection approx1 approx2
Note : this operation accepts inputs of different sizes as per the BAP IR
let lshift (p1 : t) (p2 : t) : t =
let sz1 = bitwidth p1 in
let p2 = canonize p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom sz1) begin
finite_end p1 >>= fun e1 ->
min_elem p2 >>= fun min_p2 ->
max_elem p2 >>= fun max_p2 ->
if W.(>=) max_p2 (W.of_int sz1 ~width:(W.bitwidth max_p2)) then
let msg = "During lshift, maximum element of CLP2 is >= CLP1's width" in
!!(not_implemented ~top:(top sz1) msg)
else
let max_p2_int = W.to_int_exn max_p2 in
let base = W.lshift p1.base min_p2 in
let step = if is_one p2.cardn
then W.lshift p1.step min_p2
else W.lshift (bounded_gcd p1.base p1.step) min_p2 in
let e_no_wrap = lshift_exact e1 max_p2_int in
let e_width = W.bitwidth e_no_wrap in
same as cardn_from_bounds , but adapted to e_no_wrap 's
let cardn = if W.is_zero step then W.one 1 else
let base_ext = W.extract_exn ~hi:(e_width - 1) base in
let step_ext = W.extract_exn ~hi:(e_width - 1) step in
let div_by_step = W.div (W.sub e_no_wrap base_ext) step_ext in
(* extract adds an extra bit at the high end so that
the call to succ never wraps to 0.
*)
W.succ (W.extract_exn ~hi:e_width div_by_step) in
!!(create base ~step ~cardn)
end
Note , this function only accepts CLPs that are the same bitwidth .
let rshift_step rshift ~p1 ~p2 ~e2 ~sz1 ~sz2 =
assert(sz1 = sz2);
let _, b1twos = factor_2s p1.base in
let _, s1twos = factor_2s p1.step in
let s1_divisible = W.(>=) (W.of_int s1twos ~width:sz1) e2 in
let b1_divisible = W.(>=) (W.of_int b1twos ~width:sz1) e2 in
let b1_initial_ones = count_initial_1s p1.base in
if (s1_divisible && W.is_one p2.cardn) ||
(s1_divisible && b1_divisible) ||
(s1_divisible && W.(>=) (W.of_int ~width:sz2 b1_initial_ones) e2)
then bounded_gcd (rshift p1.step e2) @@ W.sub (rshift p1.base @@ W.sub e2 p2.step)
(rshift p1.base @@ e2)
else W.one sz1
(* Note: [p1] and [p2] must have the same bitwidth, since this function
depends on the [rshift_step] function above. *)
let rshift (p1 : t) (p2 : t) : t =
let sz1 = bitwidth p1 in
let sz2 = bitwidth p2 in
let p1 = unwrap @@ canonize p1 in
let p2 = unwrap @@ canonize p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom sz1) begin
finite_end p1 >>= fun e1 ->
finite_end p2 >>= fun e2 ->
let base = W.rshift p1.base e2 in
if W.is_one p1.cardn && W.is_one p2.cardn
then !!(create base)
else
let step = rshift_step W.rshift ~p1 ~p2 ~e2 ~sz1 ~sz2 in
let cardn = cardn_from_bounds base step (W.rshift e1 p2.base) in
!!(create base ~step ~cardn)
end
Note : [ p1 ] and [ p2 ] must have the same bitwidth , since this function
depends on the [ rshift_step ] function above .
Note also that the SWEET paper 's algorithm for arshift is incorrect .
To compute the new [ n ] ( cardinality ) on p. 26 , it has this condition :
- [ if b1 > = c1[n1 - 1 ] ]
We have altered that condition to this :
- [ if 0 > = c1[n1 - 1 ] ]
depends on the [rshift_step] function above.
Note also that the SWEET paper's algorithm for arshift is incorrect.
To compute the new [n] (cardinality) on p. 26, it has this condition:
- [if b1 >= c1[n1 - 1]]
We have altered that condition to this:
- [if 0 >= c1[n1 - 1]]
*)
let arshift (p1 : t) (p2 : t) : t =
let sz1 = bitwidth p1 in
let sz2 = bitwidth p2 in
let zero = W.zero sz1 in
let p1 = unwrap_signed @@ canonize p1 in
let p2 = unwrap @@ canonize p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom sz1) begin
finite_end p1 >>= fun e1 ->
finite_end p2 >>= fun e2 ->
if W.is_one p1.cardn && W.is_one p2.cardn
then
let base = W.arshift (W.signed p1.base) p2.base in
!!(create base ~width:sz1)
else
let base =
if W.(>=) (W.signed p1.base) zero
then W.arshift (W.signed p1.base) e2
else W.arshift (W.signed p1.base) p2.base
in
let step = rshift_step W.arshift ~p1 ~p2 ~e2 ~sz1 ~sz2 in
let new_end =
if W.(>=) (W.signed e1) zero
then W.arshift (W.signed e1) p2.base
else W.arshift (W.signed e1) e2
in
let cardn = cardn_from_bounds base step new_end in
!!(create base ~step ~cardn)
end
helper function ; splits a CLP into two segments : one that contains all
points from the base up to n inclusive and another that contains the rest .
points from the base up to n inclusive and another that contains the rest.
*)
let split_at_n (p : t) n : t * t =
The CLP is canonized so that it can be treated as finite
let p = canonize p in
The first set extends to the highest point on the CLP up to e
let cardn1 = min (cardn_from_bounds p.base p.step n) p.cardn in
The second set contains all of the other elements
let cardn2 = W.sub p.cardn cardn1 in
let p2_base = nearest_inf_succ (W.succ n) p.base p.step in
{base = p.base; step = p.step; cardn = cardn1},
{base = p2_base; step = p.step; cardn = cardn2}
helper function ; produces a result that contains exactly the
elements of the input extended to the given width . Always
returns disjoint CLPs .
Expects that width > = : in most cases , p2 will represent the empty set . If dealing with
both CLPs is difficult or imprecise , this represents an opportunity
for optimization .
elements of the input clp extended to the given width. Always
returns disjoint CLPs.
Expects that width >= bitwidth p.
Note: in most cases, p2 will represent the empty set. If dealing with
both CLPs is difficult or imprecise, this represents an opportunity
for optimization.
*)
let extract_exact ~width:(width : int) (p : t) : t * t =
let p_width = bitwidth p in
assert (width >= p_width);
The CLP is canonized so that infinite CLPs do not have to be treated
specially .
specially.
*)
let lastn = W.ones p_width in
let p1, p2 = split_at_n p lastn in
create ~width p1.base ~step:p1.step ~cardn:p1.cardn,
create ~width p2.base ~step:p2.step ~cardn:p2.cardn
let extract_lo ?(lo = 0) (p : t) : t =
let p = canonize p in
let width = bitwidth p in
let res_width = width - lo in
assert(lo < width);
if lo = 0 then p else
let default = bottom res_width in
Option.value_map ~default (finite_end p) ~f:begin fun e ->
let base = W.extract_exn ~lo p.base in
let ext_lo w = W.extract_exn ~hi:(lo - 1) w in
let base_mod_2lo = ext_lo p.base in
let step_mod_2lo = ext_lo p.step in
(* If no carry is ever triggered, the low bits can be ignored.
The low bits increase monotonically until they carry over
into the high bits, so it suffices to show that the sum of the
low bits never carries. Note that this computation assumes that
p.cardn > 0.
*)
let max_step_effect = add_exact base_mod_2lo @@
mul_exact step_mod_2lo (W.pred p.cardn) in
let carry_bound = dom_size ~width:(W.bitwidth max_step_effect) lo in
if W.(<) max_step_effect carry_bound then
create base ~step:(W.extract_exn ~lo p.step) ~cardn:p.cardn
else
let e = W.extract_exn ~lo e in
let step = W.one res_width in
let cardn = cardn_from_bounds base step e in
create base ~step ~cardn
end
let extract_hi ?(hi = None) ?(signed = false) (p : t) : t =
let sz = bitwidth p in
let hiv = Option.value ~default:(bitwidth p - 1) hi in
if not signed && hiv + 1 >= sz then
let res1, res2 = extract_exact ~width:(hiv + 1) p in
union res1 res2
else if not signed then
create ~width:(hiv+1) p.base ~step:p.step ~cardn:p.cardn
else
TODO : signed case is old ; check and update
let ext = W.extract_exn ~hi:hiv in
let ext_signed w = W.extract_exn ~hi:hiv (W.signed w) in
Option.value_map ~default:(bottom (hiv + 1)) (finite_end p) ~f:begin
fun e ->
(* TODO: in particular this infinite case should be checked *)
if is_infinite p then infinite (ext p.base, ext p.step)
else
negmin is the number of the form 10 * ; the most negative
number when interpreted as signed . When signed , its predecessor
is the most positive ( signed ) number .
number when interpreted as signed. When signed, its predecessor
is the most positive (signed) number.
*)
let negmin = W.lshift (W.one sz) (W.of_int ~width:sz (sz-1)) in
let posmax = W.pred negmin in
if elem negmin p && elem posmax p &&
(p.base <> negmin || e <> posmax) then
(* TODO: this case in highly nontrivial and will require
a loss of precision.
*)
not_implemented ~top:(top (hiv + 1))
"extract signed crossing max signed int"
else
let e' = W.sub e p.base in
let newE' = ext e' in
let base = if signed then ext_signed p.base else ext p.base in
let step = ext p.step in
(* If newE' wraps around, then it encompasses the full circle *)
if W.(<) newE' e' then infinite(base, step)
else
let cardn = cardn_from_bounds (W.zero (hiv + 1)) step newE' in
create base ~step ~cardn
end
let extract_internal ?hi ?(lo = 0) ?(signed = false) (p : t) : t =
let hi = Option.map hi ~f:(fun hi -> hi - lo) in
extract_lo ~lo p |> extract_hi ~hi ~signed
let cast ct (sz : int) (p : t) : t = match ct with
| Bil.UNSIGNED -> extract_internal ~hi:(sz - 1) p
| Bil.SIGNED -> extract_internal ~hi:(sz - 1) ~signed:true p
| Bil.LOW -> extract_internal ~hi:(sz - 1) p
| Bil.HIGH -> extract_internal ~lo:(bitwidth p - sz) p
let extract ?hi:hi ?lo:lo (p : t) : t = extract_internal ?hi ?lo p
let concat (p1 : t) (p2 : t) : t =
let width1 = bitwidth p1 in
let width2 = bitwidth p2 in
let width = width1 + width2 in
let p1'base = lshift_exact p1.base width2 in
let p1'step = lshift_exact p1.step width2 in
let p1' = create p1'base ~step:p1'step ~cardn:p1.cardn in
let p2' = cast Bil.UNSIGNED width p2 in
(* TODO: bug in intersection? The following should work but does
not on wrapping:
(intersection (logor p1' p2') (add p1' p2'))
*)
(add p1' p2')
(* creates a CLP that soundly approximates the elements of the list *)
let of_list ~width l : t =
assert (width > 0);
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom width) begin
let l = List.map l ~f:(W.extract_exn ~hi:(width - 1) ~lo:0) in
let l = List.sort ~compare:W.compare l in
let diff_list = List.map2_exn l (rotate_list l) ~f:W.sub in
let idx, _, step = List.foldi diff_list ~init:(0, W.zero width, W.zero width)
~f:(fun i (idx, diff, step) d ->
assert(W.bitwidth diff = W.bitwidth step);
assert(W.bitwidth diff = W.bitwidth d);
if W.(>) d diff
then i, d, bounded_gcd diff step
else idx, diff, bounded_gcd d step) in
let l = idx
" rotate " the list left by the index of the desired first element
|> List.split_n l
|> (fun (end_l, start_l) -> List.append start_l end_l) in
List.hd l >>= fun base ->
List.last l >>= fun e ->
let cardn = cardn_from_bounds base step e in
assert(W.bitwidth base = width);
!!{base; step; cardn}
end
Note : BAP uses Z.div ( standard division truncating
towards 0 and obeying the rule of signs ) internally
for signed division and Z.ediv ( the Euclidean algorithm )
for unsigned division . This seems strange since Z.div and
Z.ediv work the same way for nonnegative inputs . The
implementations of div and sdiv here are consistent with
both this approach and using just Z.div ( since it would
work the same way ) .
TODO : Compute a more accurate step for the division .
towards 0 and obeying the rule of signs) internally
for signed division and Z.ediv (the Euclidean algorithm)
for unsigned division. This seems strange since Z.div and
Z.ediv work the same way for nonnegative inputs. The
implementations of div and sdiv here are consistent with
both this approach and using just Z.div (since it would
work the same way).
TODO: Compute a more accurate step for the division.
*)
let div (p1 : t) (p2 : t) : t =
let width = get_and_check_sizes p1 p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom width) begin
min_elem p1 >>= fun min_e1 ->
min_elem p2 >>= fun min_e2 ->
max_elem p1 >>= fun max_e1 ->
max_elem p2 >>= fun max_e2 ->
if elem (W.zero width) p2 then not_implemented "Clp division by zero"
else
let base = W.div min_e1 max_e2 in
let e = W.div max_e1 min_e2 in
let step = if W.is_one (cardinality p2) &&
W.is_zero (W.modulo p1.step p2.base)
then bounded_gcd (W.div p1.step p2.base)
(W.sub (W.div p1.base p2.base) base)
(* TODO: improve step precision in cases where
every element of the divisor divides every
element of the dividend
*)
else W.one width in
let cardn = cardn_from_bounds base step e in
!!{base; step; cardn}
end
let sdiv (p1 : t) (p2 : t) : t =
let wsdiv a b = W.div (W.signed a) (W.signed b) in
let width = get_and_check_sizes p1 p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom width) begin
min_elem p1 >>= fun min_e1 ->
min_elem p2 >>= fun min_e2 ->
max_elem p1 >>= fun max_e1 ->
max_elem p2 >>= fun max_e2 ->
if elem (W.zero width) p2 then not_implemented "Clp division by zero"
else if W.is_one (cardinality p1) &&
W.is_one (cardinality p2)
then !!(singleton (wsdiv p1.base p2.base))
else
let minmax = wsdiv min_e1 max_e2 in
let minmin = wsdiv min_e1 min_e2 in
let maxmax = wsdiv max_e1 max_e2 in
let maxmin = wsdiv max_e1 min_e2 in
let base =
min minmax @@
min minmin @@
min maxmax maxmin in
let e =
max minmax @@
max minmin @@
max maxmax maxmin in
(* TODO: improve step accuracy *)
let step = W.one width in
let cardn = cardn_from_bounds base step e in
!!{base; step; cardn}
end
(* implemented as per the "SWEET" paper *)
let modulo (p1 : t) (p2 : t) : t =
sub p1 (mul (div p1 p2) p2)
(* implemented as per the "SWEET" paper *)
let smodulo (p1 : t) (p2 : t) : t =
sub p1 (mul (sdiv p1 p2) p2)
(* Implement indexed lattice *)
type idx = int
let get_idx = bitwidth
let precedes = subset
let join = union
let meet = intersection
(* TODO: There is still room for improvement on this *)
let widen_join (p1 : t) (p2 : t) =
assert (subset p1 p2);
if equal p1 p2 then p1 else
infinite (p2.base, p2.step)
(* Implement the Value interface *)
NOTE : this compare function is solely for implementation of the value
interface . It has the property that compare a b = 0 iff equal a b , but
otherwise the ordering is arbitrary ( though still properly transitive ) .
Importantly , precedes a b DOES NOT imply compare a b < = 0 or vice versa .
interface. It has the property that compare a b = 0 iff equal a b, but
otherwise the ordering is arbitrary (though still properly transitive).
Importantly, precedes a b DOES NOT imply compare a b <= 0 or vice versa.
*)
let compare (p1 : t) (p2 : t) : int =
let p1 = canonize p1 in
let p2 = canonize p2 in
let base_comp = W.compare p1.base p2.base in
let step_comp = W.compare p1.step p2.step in
let cardn_comp = W.compare p1.cardn p2.cardn in
let if_nzero_else a b = if a = 0 then b else a in
if_nzero_else base_comp @@
if_nzero_else step_comp @@
cardn_comp
let sexp_of_t (p : t) : Sexp.t =
let p = canonize p in
Sexp.List begin match finite_end p with
| Some e ->
if W.is_one p.cardn then [W.sexp_of_t p.base]
else if W.is_one @@ W.pred @@ p.cardn then
[W.sexp_of_t p.base; W.sexp_of_t e]
else [W.sexp_of_t p.base;
W.sexp_of_t (W.add p.base p.step);
Sexp.Atom "...";
W.sexp_of_t e]
| None -> [Sexp.Atom "{}"; Sexp.Atom (string_of_int (bitwidth p))]
end
let t_of_sexp : Sexp.t -> t = function
| Sexp.List [Sexp.Atom "{}"; Sexp.Atom s] ->
let width = int_of_string s in
bottom width
| Sexp.List [be] ->
let base = Word.t_of_sexp be in
create base
| Sexp.List [be; ne as ee]
| Sexp.List [be; ne; Sexp.Atom "..."; ee] ->
let base = Word.t_of_sexp be in
let next = Word.t_of_sexp ne in
let e = Word.t_of_sexp ee in
let step = Word.sub next base in
let cardn = cardn_from_bounds base step e in
{base; step; cardn}
| Sexp.List _
| Sexp.Atom _ -> failwith "Sexp not a CLP"
(* Printing *)
let pp ppf (p : t) =
let p = canonize p in
let width = bitwidth p in
match finite_end p with
| None -> Format.fprintf ppf "{}:%i" width
| Some _ when W.is_one p.cardn ->
Format.fprintf ppf "@[{%a}:%i@]" W.pp p.base width
| Some e when W.is_one @@ W.pred p.cardn ->
Format.fprintf ppf "@[{%a,@ %a}:%i@]"
W.pp p.base
W.pp e
width
| Some e ->
Format.fprintf ppf "@[{%a,@ %a,@ ...,@ %a}:%i@]"
W.pp p.base
W.pp (W.add p.base p.step)
W.pp e
width
let spp (p : t) : string = Format.asprintf "%a" pp p
| null | https://raw.githubusercontent.com/draperlaboratory/cbat_tools/334523cb4415f187e33f190a691e34b73733ef24/vsa/value_set/lib/src/cbat_clp.ml | ocaml | *************************************************************************
This file is provided under the license found in the LICENSE file in
the top-level directory of this project.
Government and no official endorsement should be inferred.
*************************************************************************
truncates or extends the word to the appropriate size
extract adds an extra bit at the high end so that
the call to succ never wraps to 0.
Cardinality 2 sets can be flipped; we represent them in the order
where the end is greater than the beginning.
Note: this function is fully precise
We translate i and p by -p.base to avoid wrapping over 0
Compute the end of p translated by -p.base
The remainder is the difference between i and the result
helper function;
Some (nearest_inf_pred w b p) = nearest_pred w (infinite b p)
The Clp contains more than one element
if it wraps 0, w must divide the gap
decides whether the input represents the empty set
In all following cases, a2' is not within (0, b1'). If b2' is, then
the intervals overlap in the other direction and stretch from a2 to b1.
In all following cases, neither a2' or b2' are in (0, b1'). If b2' < a2'
then (a2', b2') wraps around 0, so it must subsume (0, b1).
helper function; moves a CLP around the number circle without
changing its cardinality or step size.
canonization allows us to treat the CLPs as finite
p1 is a subset of p2 if its bounds are within p2's bounds
and p2's step covers all elements in their union.
ensure that p1.base >= p2.base so that neither end crosses p1.base
e1 and e2 are meaningful iff p1 and p2 respectively are non-infinite.
We therefore guard their use so that when either argument is
infinite, we compute the proper end.
If the least possible value in resulting set is greater than the
maximum possible value, then the set is empty. This does not apply
when p1 is infinite since sometimes the base of p2 (and therefore
the result) can wrap backwards over 0.
whatever the result, we translate it by the original p1.base
to make up for the translation at the start.
Computes whether there exists any element in both CLPs
if the common step is 0, then base = newE
Translate p1 by -b1 and p2 by -b2 so that they each
start at 0. We can just add b1 + b2 at the end to
reverse this.
If the sum wraps around, then it encompasses the full circle
Note: this function is fully precise
lsb1 = lsb2
msb1 = msb2
TODO: this last branch is a guess; it is not explained in the paper
Figure out whether it is correct.
We canonize the inputs so that their n1 and n2 are their
true cardinalities and they are finite
The result is a singleton
TODO: use cardn_from_bounds
since both approximations are sound, their intersection is sound
extract adds an extra bit at the high end so that
the call to succ never wraps to 0.
Note: [p1] and [p2] must have the same bitwidth, since this function
depends on the [rshift_step] function above.
If no carry is ever triggered, the low bits can be ignored.
The low bits increase monotonically until they carry over
into the high bits, so it suffices to show that the sum of the
low bits never carries. Note that this computation assumes that
p.cardn > 0.
TODO: in particular this infinite case should be checked
TODO: this case in highly nontrivial and will require
a loss of precision.
If newE' wraps around, then it encompasses the full circle
TODO: bug in intersection? The following should work but does
not on wrapping:
(intersection (logor p1' p2') (add p1' p2'))
creates a CLP that soundly approximates the elements of the list
TODO: improve step precision in cases where
every element of the divisor divides every
element of the dividend
TODO: improve step accuracy
implemented as per the "SWEET" paper
implemented as per the "SWEET" paper
Implement indexed lattice
TODO: There is still room for improvement on this
Implement the Value interface
Printing | Copyright ( C ) 2018/2019 The Charles Stark Draper Laboratory , Inc.
This work is funded in part by ONR / NAWC Contract N6833518C0107 . Its
content does not necessarily reflect the position or policy of the US
open Bap.Std
include Cbat_vsa_utils
module W = Word
module Option = Core_kernel.Option
module Sexp = Core_kernel.Sexp
module List = Core_kernel.List
open !Cbat_word_ops
A CLP { base ; step ; card } represents the following set :
{ base + n * step | 0 < = n < cardn } . Note that since can be 2^sz , it
is stored in a sz+1 bit word . If step * > = 2^sz in Z , then the CLP is
conservatively approximated as cardn = 2^sz . This simplifies to the following
set : { ( base + n * step ) % sz | n in } . We refer to these as ' infinite ' CLPs .
TODO : is there a better name than infinite CLPs ?
Invariants :
W.bitwidth base = W.bitwidth step
W.bitwidth = ( W.bitwidth base ) + 1
Note that for an arbitrary CLP , cardn is an approximation of its cardinality .
Its actual cardinality could be less since the CLP may overlap itself .
Note : module - internal code should be careful of any operations that treat
the CLP as an interval from its beginning to its end since this is likely to break
equality between certain infinite CLPs ( specifically those that are and are n't
finite ) .
{base + n * step | 0 <= n < cardn}. Note that since cardn can be 2^sz, it
is stored in a sz+1 bit word. If step * cardn >= 2^sz in Z, then the CLP is
conservatively approximated as cardn = 2^sz. This simplifies to the following
set: {(base + n * step) % sz | n in Nat}. We refer to these as 'infinite' CLPs.
TODO: is there a better name than infinite CLPs?
Invariants:
W.bitwidth base = W.bitwidth step
W.bitwidth cardn = (W.bitwidth base) + 1
Note that for an arbitrary CLP, cardn is an approximation of its cardinality.
Its actual cardinality could be less since the CLP may overlap itself.
Note: module-internal code should be careful of any operations that treat
the CLP as an interval from its beginning to its end since this is likely to break
equality between certain infinite CLPs (specifically those that are and aren't
finite).
*)
type t = {base : word; step : word; cardn : word}
[@@deriving bin_io]
Creates a new CLP that ranges from base to base + step * ( pred num )
in increments of size step . Note that num = 0 represents the empty
set and that this function approximates progressions that wrap
around themselves .
' width ' is the desired bitwidth of the CLP .
Defaults to the bitwidth of base .
Step and cardn default such that [ create n ] represents the set { n }
Note that canonization depends on these defaults .
in increments of size step. Note that num = 0 represents the empty
set and that this function approximates progressions that wrap
around themselves.
'width' is the desired bitwidth of the CLP.
Defaults to the bitwidth of base.
Step and cardn default such that [create n] represents the set {n}
Note that canonization depends on these defaults.
*)
let create ?(width : int option) ?(step = W.b1) ?(cardn = W.b1) base : t =
let width = Option.value ~default:(W.bitwidth base) width in
let fit = W.extract_exn ~hi:(width - 1) in
While fit produces the correct behavior for the step ( modulo the ) ,
a sufficiently large cardinality is instead equivalent to the maximum
cardinality at the given width .
a sufficiently large cardinality is instead equivalent to the maximum
cardinality at the given width.
*)
let cardn = Cbat_word_ops.cap_at_width ~width:(width + 1) cardn in
{base = fit base; step = fit step; cardn}
let singleton w = create w
let bitwidth (p : t) : int = W.bitwidth p.base
helper function ; constructs a CLP from its lower bound , upper bound
and step . Note that if the following equation does not hold , then
the upper bound will not be an element of the resultant CLP :
exists i such that upper = lower + step*i
TODO : check ; there are some places this could be used
and step. Note that if the following equation does not hold, then
the upper bound will not be an element of the resultant CLP:
exists i such that upper = lower + step*i
TODO: check; there are some places this could be used
*)
let cardn_from_bounds base step e : word =
let width = W.bitwidth base in
assert(W.bitwidth step = width);
Compute the cardinality . Integer division ( floor ) produces
the correct behavior .
the correct behavior.
*)
if W.is_zero step then W.one (width + 1) else
let div_by_step = W.div (W.sub e base) step in
W.succ (W.extract_exn ~hi:width div_by_step)
helper function ; computes the minimum / canonical CLP to represent
the set of words of the form b + sn for any integer n
Note : returns a canonized result
the set of words of the form b + sn for any integer n
Note: returns a canonized result
*)
let infinite (b, s) : t =
let width = W.bitwidth b in
assert (width = W.bitwidth s);
if W.is_zero s then create b
else let div, twos = factor_2s s in
let step = W.div s div in
let base = W.modulo b step in
let cardn = dom_size ~width:(width + 1) (width - twos) in
{base; step; cardn}
helper function ; determines whether a given CLP represents
a set of the form { ( b + s*i ) % z | i in }
Note that in the terminology of this module , a set may be both
infinite and finite . A set is both iff e + s = b % z where
e is the end of the CLP and z is its bitwidth .
a set of the form {(b + s*i) % z | i in Nat}
Note that in the terminology of this module, a set may be both
infinite and finite. A set is both iff e + s = b % z where
e is the end of the CLP and z is its bitwidth.
*)
let is_infinite p : bool =
let width = bitwidth p in
let ds = dom_size ~width:(2*width + 1) width in
let mul = mul_exact p.cardn p.step in
W.(>=) mul ds
outputs a canonized CLP
let bottom (width : int) : t =
assert(width > 0);
create ~width W.b0 ~cardn:W.b0
let top (i : int) : t =
assert(i > 0);
let zero = W.zero i in
let one = W.one i in
infinite (zero, one)
helper function ; converts a CLP into a unique representation .
A canonized infinite CLP has the following properties :
( elem i ( b , s ) ) /\ i < j < i + s = > not ( elem j ( b , s ) )
b < s
A canonized infinite CLP has the following properties:
(elem i (b,s)) /\ i < j < i + s => not (elem j (b,s))
b < s
*)
let canonize (p : t) : t =
let szInt = bitwidth p in
let two = W.of_int ~width:(szInt + 1) 2 in
Each CLP with step s and cardinality c such that 0 < sc < = 2^szInt
2 < c < 2^szInt has a single representation
2 < c < 2^szInt has a single representation
*)
A cardinality of 0 represents the empty set
if W.is_zero p.cardn then bottom szInt
Either a nonzero cardinality and step of 0 or a cardinality of 1
represents a singleton set .
represents a singleton set.
*)
else if W.is_zero p.step || is_one p.cardn then create p.base ~step:W.b0
else if W.(=) p.cardn two then
let e = W.add p.base p.step in
if W.(>=) e p.base then p
else create e ~step:(W.neg p.step) ~cardn:two
If p.cardn steps traverse the full domain , we approximate
with an infinite CLP . In other words , all cardinalities c
such that c*p.step < = 2^szIn are treated equivalently .
with an infinite CLP. In other words, all cardinalities c
such that c*p.step <= 2^szIn are treated equivalently.
*)
else if is_infinite p then infinite(p.base, p.step)
else p
Returns the cardinality of p as a sz+1 width word ,
where sz is the bitwidth of p.
where sz is the bitwidth of p.
*)
let cardinality (p : t) : word = (canonize p).cardn
helper function ; computes the last point in a finite CLP .
Note that this function may return misleading results for an
infinite CLP . The result will be a point on the CLP , but is in
no way an ' end ' since an infinite CLP has no notion of end .
Note that this function may return misleading results for an
infinite CLP. The result will be a point on the CLP, but is in
no way an 'end' since an infinite CLP has no notion of end.
*)
let finite_end (p : t) : word option =
let width = bitwidth p in
let n = W.extract_exn ~hi:(width - 1) p.cardn in
if W.is_zero p.cardn then None
else Some (W.add p.base (W.mul p.step (W.pred n)))
let lnot (p : t) : t =
We use the finite end even in the infinite case since it is always
guaranteed to be some point on the CLP and for the infinite CLP ,
any point works as the base .
guaranteed to be some point on the CLP and for the infinite CLP,
any point works as the base.
*)
Option.value_map ~default:(bottom (bitwidth p)) (finite_end p)
~f:(fun e -> {base = W.lnot e; step = p.step; cardn = p.cardn})
Returns a list containing all of the elements represented by
the input CLP .
the input CLP.
*)
let iter (p : t) : word list =
let rec iter_acc b s n acc =
if W.is_zero n then acc
else let n' = W.pred n in
iter_acc (W.add b s) s n' (b :: acc) in
let p' = canonize p in
iter_acc p'.base p'.step p'.cardn []
computes the closest element of p that precedes
i , i.e. the first element reached by starting from i and decreasing .
Assumes that the inputs have the same bitwidth .
i, i.e. the first element reached by starting from i and decreasing.
Assumes that the inputs have the same bitwidth.
*)
let nearest_pred (i : word) (p : t) : word option =
assert(bitwidth p = W.bitwidth i);
let open Monads.Std.Monad.Option.Syntax in
If finite_end returns none , the CLP is empty
finite_end p >>= fun e ->
if W.is_zero p.step then !!(p.base)
else
let diff = W.sub i p.base in
let rm = W.modulo diff p.step in
let end' = W.sub e p.base in
if is_infinite p then !!(W.sub i rm)
else if W.(>=) diff end' then !!(W.add end' p.base) else
!!(W.sub i rm)
let nearest_inf_pred (w : word) (base : word) (step : word) : word =
if W.is_zero step then base else
let diff = W.sub w base in
let rm = W.modulo diff step in
W.sub w rm
let nearest_succ (i : word) (p : t) : word option =
Option.map ~f:W.lnot (nearest_pred (W.lnot i) (lnot p))
helper function ;
Some ( nearest_inf_succ w b p ) = nearest_succ w ( infinite b p )
Some (nearest_inf_succ w b p) = nearest_succ w (infinite b p) *)
let nearest_inf_succ (w : word) (base : word) (step : word) : word =
W.lnot (nearest_inf_pred (W.lnot w) (W.lnot base) step)
let max_elem (p : t) : word option =
let max_wd = W.ones (bitwidth p) in
nearest_pred max_wd p
let min_elem (p : t) : word option =
let min_wd = W.zero (bitwidth p) in
nearest_succ min_wd p
* The elem of a signed CLP is going to be the nearest precedessor
of ( 2^w / 2 ) - 1 , where [ w ] is the bit - width of the words in the CLP .
For instance , if we assume [ w = 3 ] ( 3 - bits in each word ) , a
circle with the numbers 0 through 7 , when signed , becomes a
circle with the numbers -4 through 3 . So the maximum element in the
CLP is going to be the nearest predecessor to 3 .
We can get ( 2^w / 2 ) by getting the [ dom_size ] of [ w - 1 ] . E.g. ,
the domain size when [ w = 3 ] is 8 , but half of that is the same as
the domain size when [ w = 2 ] , namely 4 .
Note that the element this function returns is not signed ( like
all BAP words ) . To see the signed value , use [ W.signed word ] .
of (2^w / 2) - 1, where [w] is the bit-width of the words in the CLP.
For instance, if we assume [w = 3] (3-bits in each word), a
circle with the numbers 0 through 7, when signed, becomes a
circle with the numbers -4 through 3. So the maximum element in the
CLP is going to be the nearest predecessor to 3.
We can get (2^w / 2) by getting the [dom_size] of [w - 1]. E.g.,
the domain size when [w = 3] is 8, but half of that is the same as
the domain size when [w = 2], namely 4.
Note that the max element this function returns is not signed (like
all BAP words). To see the signed value, use [W.signed word].*)
let max_elem_signed (p : t) : word option =
let width = bitwidth p in
let half_way_point = dom_size ~width:width (width - 1) in
let max_dom_elem = W.pred half_way_point in
nearest_pred max_dom_elem p
* The min elem of a signed CLP is going to be the nearest successor
of ( 2^w / 2 ) . Note that the returned word is unsigned . To see
the signed value , use [ W.signed word ] .
of (2^w / 2). Note that the returned word is unsigned. To see
the signed value, use [W.signed word]. *)
let min_elem_signed (p : t) : word option =
let width = bitwidth p in
let half_way_point = dom_size ~width:width (width - 1) in
nearest_succ half_way_point p
let splits_by (p : t) (w : word) : bool =
let p = canonize p in
let divides a b = W.is_zero (W.modulo b a) in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:true begin
min_elem p >>= fun min_p ->
finite_end p >>= fun e ->
if W.is_zero p.step then !!true else
!!(divides w p.step &&
(W.(=) p.base min_p ||
divides w (W.sub e p.base)))
end
let min_separation (p : t) : word option =
let p = canonize p in
let open Monads.Std.Monad.Option.Syntax in
finite_end p >>= fun e ->
if W.(=) p.base e then None
else !!(min p.step (min (W.sub p.base e) (W.sub e p.base)))
Decides membership in the set represented by the CLP
let elem (i : word) (p : t) : bool =
assert (W.bitwidth i = bitwidth p);
match nearest_pred i p with
| None -> false
| Some j -> W.(=) i j
helper function ; checks whether two CLPs have the same size
and if so , outputs that size
and if so, outputs that size
*)
let get_and_check_sizes (p1 : t) (p2 : t) : int =
let sz1 = bitwidth p1 in
let sz2 = bitwidth p2 in
let err_str = Printf.sprintf "Input CLPs of different sizes: %i and %i" sz1 sz2 in
if sz1 <> sz2 then raise (Invalid_argument err_str);
sz1
Checks whether a given CLP is the top element of the lattice .
Works by checking whether the step is coprime with the size of
the domain since the domain can be seen as a cyclic group
of the form ( Z_(2^n ) , + ) .
Works by checking whether the step is coprime with the size of
the domain since the domain can be seen as a cyclic group
of the form (Z_(2^n), +).
*)
let is_top (p : t) : bool = canonize p = top (bitwidth p)
let is_bottom (p : t) : bool = W.is_zero p.cardn
helper function ; checks whether the first infinite progression
is a subprogression ( i.e. aligned and with a step that contains
at least as many factors of 2 ) of the second .
is a subprogression (i.e. aligned and with a step that contains
at least as many factors of 2) of the second.
*)
let subprogression (b1,s1) (b2,s2) : bool =
if W.is_zero s1 then
return true if b1 is in the second progression . In other words ,
if any number of s2 - sized steps gets from b1 to b2
if any number of s2-sized steps gets from b1 to b2
*)
let diff = W.sub b2 b1 in
let rem = W.modulo diff s2 in
W.is_zero rem
else if W.is_zero s2 then W.is_zero s1 && b1 = b2
else
let coprime1, _ = factor_2s s1 in
let coprime2, _ = factor_2s s2 in
let powTwo1 = W.div s1 coprime1 in
let powTwo2 = W.div s2 coprime2 in
let bRoot1 = W.modulo b1 powTwo2 in
let bRoot2 = W.modulo b2 powTwo2 in
W.(>=) powTwo1 powTwo2 && W.(=) bRoot1 bRoot2
determines whether two CLPs are equivalent . Much faster than subset .
let equal (p1 : t) (p2 : t) : bool =
let _ = get_and_check_sizes p1 p2 in
canonize p1 = canonize p2
let unwrap (p : t) : t =
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom (bitwidth p)) begin
min_elem p >>= fun base ->
max_elem p >>= fun e ->
let step = p.step in
let cardn = cardn_from_bounds base step e in
!!{base; step; cardn}
end
let unwrap_signed (p : t) : t =
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(top (bitwidth p)) begin
min_elem_signed p >>= fun base ->
max_elem_signed p >>= fun e ->
let step = p.step in
let cardn = cardn_from_bounds base step e in
!!{base; step; cardn}
end
helper function ; takes two inclusive circular intervals and returns
the smallest interval that is a superset of the two . Assumes that
all inputs have the same bitwidth .
the smallest interval that is a superset of the two. Assumes that
all inputs have the same bitwidth.
*)
let interval_union (a1,b1) (a2,b2) : (word * word) =
let szInt = W.bitwidth a1 in
we translate both intervals by a1 so that one interval starts at 0
let b1' = W.sub b1 a1 in
let a2' = W.sub a2 a1 in
let b2' = W.sub b2 a1 in
let zero = W.zero szInt in
If b2 ' < a2 ' , then the second interval wraps over the zero point and
a1 is in the interval ( a2,b2 ) . Also , if a2 ' ( and so also b2 ' ) is in
the first interval , then the two wrap fully around the circle .
a1 is in the interval (a2,b2). Also, if a2' (and so also b2') is in
the first interval, then the two wrap fully around the circle.
*)
if W.(>=) b1' a2' && W.(<) b2' a2' then (b1, W.pred b1)
If a2 ' and b2 ' are both between 0 and b1 ' then a2 and b2 are between
a1 and b1 . Since the case above has been ruled out , the first interval
subsumes the second .
a1 and b1. Since the case above has been ruled out, the first interval
subsumes the second.
*)
else if W.(>=) b1' a2' && W.(>=) b1' b2' then (a1, b1)
By excluding the above two cases , we know that b2 ' is not in ( 0,b1 ' ) .
Since a2 ' is , the intervals overlap and stretch from a1 to b2 .
Since a2' is, the intervals overlap and stretch from a1 to b2.
*)
else if W.(>=) b1' a2' then (a1, b2)
else if W.(<) b2' b1' then (a2, b1)
else if W.(<) b2' a2' then (a2, b2)
Otherwise , there are 2 gaps and we include the smaller one .
else if W.(>) (W.sub a2' b1') (W.sub zero b2') then (a2,b1)
else (a1, b2)
let translate (p : t) i : t =
{base = W.add p.base i; step = p.step; cardn = p.cardn}
helper function ; Given two progressions with a start and a step size ,
computes the largest step size that contains both points and each
point that they step to . Equivalenently , computes the step size of
the union of the two infinite CLPs . Assumes that the two CLPs
have the same bitwidth . We take the gcd of the step sizes and
the ( shortest ) distance between the bases . Thus , the resulting
step , when started from either base , will eventually reach all
points in both progressions in the same order as the original CLPs .
computes the largest step size that contains both points and each
point that they step to. Equivalenently, computes the step size of
the union of the two infinite CLPs. Assumes that the two CLPs
have the same bitwidth. We take the gcd of the step sizes and
the (shortest) distance between the bases. Thus, the resulting
step, when started from either base, will eventually reach all
points in both progressions in the same order as the original CLPs.
*)
let common_step (b1,s1) (b2,s2) : word =
let bDiff = if W.(>) b1 b2 then W.sub b1 b2 else W.sub b2 b1 in
if W.is_zero s1 then bounded_gcd s2 bDiff
else if W.is_zero s2 then bounded_gcd s1 bDiff
else let gcdS = (bounded_gcd s1 s2) in
bounded_gcd gcdS bDiff
defines a partial order on CLPs in terms of the sets that
they represent .
they represent.
*)
let subset (p1 : t) (p2 : t) : bool =
let width = get_and_check_sizes p1 p2 in
let p1 = canonize p1 in
let p2 = canonize p2 in
We translate both CLPs such that one of them starts at 0
in order to simplify our reasoning .
in order to simplify our reasoning.
*)
let nb2 = W.neg p2.base in
let p1 = translate p1 nb2 in
let p2 = translate p2 nb2 in
One finite CLP is a subset of another if its
step is a multiple of the other 's step , its bounds
are within the bounds of the other , and they
overlap on at least one point .
Note that since singleton CLPs can have any step ,
the step condition does not need to be checked .
step is a multiple of the other's step, its bounds
are within the bounds of the other, and they
overlap on at least one point.
Note that since singleton CLPs can have any step,
the step condition does not need to be checked.
*)
let end1 = finite_end p1 in
let end2 = finite_end p2 in
begin match end1, end2 with
| None, _ -> true
| Some _, None -> false
| Some e1, Some e2 ->
let in_bounds = W.(<=) e1 e2 && W.(<=) p1.base e2 in
let step_and_overlap = W.(=) (common_step (p1.base,p1.step) (W.zero width, p2.step)) p2.step in
let singleton_elem = is_one p1.cardn && elem p1.base p2 in
singleton_elem || (in_bounds && step_and_overlap)
end
To find the start of the intersection , we need to find the
first point base with the following constraints :
base = p1.base ( mod p1.step )
base = p1.base ( mod p2.step )
base in [ p1.base , e1 ]
base in [ p2.base , e2 ]
where e1 and e2 are the ends of p1 and p2 respectively .
We translate both CLPs by -b1 to get the following constraints
base - p1.base = 0 ( mod p1.step )
base - p1.base = p2.base - p1.base ( mod p2.step )
base - p1.base < = e1 - p1.base
base - p1.base > = p2.base - p1.base
base - p1.base < = e2 - p1.base
Let x ' denote x - p1.base for all x. We equivalently have the following :
exists j. base ' = j * p1.step
exists k. base ' = p2.base ' + k * p2.step
p2.base ' < = base ' < = min(e1 ' , e2 ' )
Thus , base ' is ( j0 * p1.step ) if j0 is the least solution
to the linear diophantine equation j * p1.step - k * p2.step = p2.base ' and ,
in the finite case , p2.base ' < = base ' < = min(e1 ' , e2 ' )
first point base with the following constraints:
base = p1.base (mod p1.step)
base = p1.base (mod p2.step)
base in [p1.base, e1]
base in [p2.base, e2]
where e1 and e2 are the ends of p1 and p2 respectively.
We translate both CLPs by -b1 to get the following constraints
base - p1.base = 0 (mod p1.step)
base - p1.base = p2.base - p1.base (mod p2.step)
base - p1.base <= e1 - p1.base
base - p1.base >= p2.base - p1.base
base - p1.base <= e2 - p1.base
Let x' denote x - p1.base for all x. We equivalently have the following:
exists j. base' = j * p1.step
exists k. base' = p2.base' + k * p2.step
p2.base' <= base' <= min(e1', e2')
Thus, base' is (j0 * p1.step) if j0 is the least solution
to the linear diophantine equation j * p1.step - k * p2.step = p2.base' and,
in the finite case, p2.base' <= base' <= min(e1', e2')
*)
let intersection (p1 : t) (p2 : t) : t =
let width = get_and_check_sizes p1 p2 in
let bot = bottom width in
Canonize to ensure that the CLPs may be treated as finite in most cases
and simplify other handling
and simplify other handling
*)
let p1 = canonize p1 in
let p2 = canonize p2 in
let p1, p2 = if W.(>=) p1.base p2.base then p1, p2 else p2, p1 in
Translate the CLPs so that p1 starts at 0
let translation = p1.base in
let translated_p1 = translate p1 (W.neg p1.base) in
let translated_p2 = translate p2 (W.neg p1.base) in
let p1, p2 = translated_p1, translated_p2 in
let p1_infinite = is_infinite p1 in
let p2_infinite = is_infinite p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:bot begin
finite_end p1 >>= fun e1 ->
finite_end p2 >>= fun e2 ->
let step = W.lcm_exn p1.step p2.step in
if W.is_zero step then begin
If there is no bounded LCM then s1 or s2 are 0 . In other words ,
one of the inputs is a singleton . Thus the intersection is either
equal to the singleton or empty .
one of the inputs is a singleton. Thus the intersection is either
equal to the singleton or empty.
*)
if W.is_zero p2.step then
Option.some_if (elem p2.base p1) () >>= fun _ ->
!!(create p2.base)
else
Option.some_if (elem p1.base p2) () >>= fun _ ->
!! (create p1.base)
end else begin
bounded_diophantine p1.step p2.step p2.base >>= fun (x,_) ->
the result is the first point on p1 that is also on the infinite CLP
approximation of p2 . Due to the translation of the CLPs , this is the
least possible value ( before translating back ) that can inhabit the
intersection .
approximation of p2. Due to the translation of the CLPs, this is the
least possible value (before translating back) that can inhabit the
intersection.
*)
let base = W.mul x p1.step in
let minE = if p1_infinite then e2
else if p2_infinite then e1
else min e1 e2 in
Option.some_if (W.(<=) base minE) () >>= fun _ ->
let cardn = cardn_from_bounds base step minE in
!!(create base ~step ~cardn)
end
end |> (fun p -> translate p translation)
let overlap (p1 : t) (p2 : t) : bool = not (is_bottom (intersection p1 p2))
Approximates the union of the two abstracted sets .
Input CLPs should have the same bitwidth .
Input CLPs should have the same bitwidth.
*)
let union (p1 : t) ( p2 : t) : t =
CLPs canonized so that they may be treated as finite
let p1 = canonize p1 in
let p2 = canonize p2 in
If either CLP is empty , return the other one
Option.value_map ~default:p2 (finite_end p1) ~f:begin fun e1 ->
Option.value_map ~default:p1 (finite_end p2) ~f:begin fun e2 ->
Compute the bounds of the interval that contains this CLP
let base, newE = interval_union (p1.base, e1) (p2.base, e2) in
let step = common_step (p1.base, p1.step) (p2.base, p2.step) in
let cardn = cardn_from_bounds base step newE in
create base ~step ~cardn
end
end
let add (p1 : t) (p2 : t) : t =
let sz = get_and_check_sizes p1 p2 in
If either CLP is empty , return bottom
Option.value_map ~default:(bottom sz) (finite_end p1) ~f:begin fun e1 ->
Option.value_map ~default:(bottom sz) (finite_end p2) ~f:begin fun e2 ->
if either CLP has step 0 it is a singleton and
we simply add its value to each element of the other
This case is computed exactly .
we simply add its value to each element of the other
This case is computed exactly.
*)
if W.is_zero p1.step || is_one p1.cardn then translate p2 p1.base
else if W.is_zero p2.step || is_one p2.cardn then translate p1 p2.base
else if is_infinite p1 || is_infinite p2 then
infinite (W.add p1.base p2.base, bounded_gcd p1.step p2.step)
else
let e1' = W.sub e1 p1.base in
let e2' = W.sub e2 p2.base in
let e' = W.add e1' e2' in
let base = W.add p1.base p2.base in
let step = bounded_gcd p1.step p2.step in
if W.(<) e' e1' then infinite (base, step)
else let cardn = cardn_from_bounds (W.zero sz) step e' in
create base ~step ~cardn
end
end
let neg (p : t) : t =
We use the finite end even in the infinite case since it is always
guaranteed to be some point on the CLP and for the infinite CLP ,
any point works as the base .
guaranteed to be some point on the CLP and for the infinite CLP,
any point works as the base.
*)
Option.value_map ~default:(bottom (bitwidth p)) (finite_end p)
~f:(fun e -> {base = W.neg e; step = p.step; cardn = p.cardn})
let sub (p1: t) (p2 : t) : t = add p1 (neg p2)
let mul (p1 : t) (p2 : t) : t =
let sz = get_and_check_sizes p1 p2 in
If either CLP is empty , return bottom
Option.value_map ~default:(bottom sz) (finite_end p1) ~f:begin fun e1 ->
Option.value_map ~default:(bottom sz) (finite_end p2) ~f:begin fun e2 ->
if either CLP is a singleton , we simply
multiply each element of the other by its value
This case is computed exactly .
multiply each element of the other by its value
This case is computed exactly.
*)
if W.is_zero p1.step || is_one p1.cardn then
let base = W.mul p2.base p1.base in
let step = W.mul p2.step p1.base in
create base ~step ~cardn:p2.cardn
else if W.is_zero p2.step || is_one p2.cardn then
let base = W.mul p1.base p2.base in
let step = W.mul p1.step p2.base in
create base ~step ~cardn:p1.cardn
else
let base = mul_exact p1.base p2.base in
let e'_exact = mul_exact e1 e2 in
let step = bounded_gcd (mul_exact p1.base p2.step)
(bounded_gcd (mul_exact p2.base p1.step)
(mul_exact p1.step p2.step)) in
let end_diff = W.sub e'_exact base in
let div_res = W.div end_diff step in
let cardn = W.succ @@ add_bit div_res in
if is_infinite p1 ||is_infinite p2 then
let fit = W.extract_exn ~hi:(sz - 1) in
infinite (fit base, fit step)
else create ~width:sz base ~step ~cardn
end
end
helper function ;
TODO : describe
TODO : should be in word_ops.ml ? ? ?
TODO : check ! ! !
TODO: describe
TODO: should be in word_ops.ml???
TODO: check!!!
*)
let lead_1_bit_run (w : word) ~hi ~lo : int =
let rec lead_help (hi : int) (lo : int) : int =
if hi = lo then hi else
let mid = (hi + lo) / 2 in
let hi_part = W.extract_exn ~hi ~lo:(mid + 1) w in
if W.is_zero (W.lnot hi_part) then lead_help mid lo
else lead_help hi (mid + 1)
in
assert(lo >= 0);
assert(hi >= lo);
(lead_help hi lo) + 1
let compute_l_s_b lsb1 lsb2 b1 b2 : int = if lsb1 < lsb2 then
let interval = W.extract_exn ~hi:(lsb2 - 1) ~lo:lsb1 b2 in
let w, bit = factor_2s interval in
if W.is_zero w then lsb2 else bit + lsb1
else if lsb1 > lsb2 then
let interval = W.extract_exn ~hi:(lsb1 - 1) ~lo:lsb2 b1 in
let w, bit = factor_2s interval in
if W.is_zero w then lsb1 else bit + lsb2
let compute_m_s_b msb1 msb2 b1 b2 : int = if msb1 > msb2 then
let interval = W.extract_exn ~hi:msb1 ~lo:(msb2 + 1) b2 in
Option.value_map ~default:msb2
(lead_1_bit interval)
~f:(fun b -> b + msb2 + 1)
else if msb1 < msb2 then
let interval = W.extract_exn ~hi:msb2 ~lo:(msb1 + 1) b1 in
Option.value_map ~default:msb1
(lead_1_bit interval)
~f:(fun b -> b + msb1 + 1)
let compute_range_sep msb msb1 msb2 b1 b2 : int = if msb1 > msb2
then if msb = msb1 then lead_1_bit_run b2 ~hi:msb1 ~lo:(msb2 + 1) else msb + 1
else if msb1 < msb2 then
if msb = msb2 then lead_1_bit_run b1 ~hi:msb2 ~lo:(msb1 + 1) else msb + 1
else -1
This algorithm closely follows the one in " Circular Linear Progressions
in " . It converts a CLP into an AP ( terminology from the paper )
by using the least non - wrapping superset .
in SWEET". It converts a CLP into an AP (terminology from the paper)
by using the least non-wrapping superset.
*)
let logand (p1 : t) (p2 : t) : t =
let sz = get_and_check_sizes p1 p2 in
let bot = bottom sz in
let cardn_two = W.of_int ~width:(sz + 1) 2 in
let cp1 = canonize p1 in
let cp2 = canonize p2 in
We ensure that the cardinality of the second CLP is at
least the cardinality of the first to collapse the two cases where
once CLP has cardinality 1 and the other has cardinality 2 .
least the cardinality of the first to collapse the two cases where
once CLP has cardinality 1 and the other has cardinality 2.
*)
let p1, p2 = if W.(<=) (cardinality cp1) (cardinality cp2)
then (cp1, cp2) else (cp2, cp1) in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:bot begin
if W.is_zero p1.cardn || W.is_zero p2.cardn then !!bot
else if W.is_one p1.cardn && W.is_one p2.cardn then
!!(create (W.logand p1.base p2.base))
else if W.is_one p1.cardn && W.(=) p2.cardn cardn_two then
CLPs can represent any two - element set exactly , so
we compute the two elements of the set and return them .
we compute the two elements of the set and return them.
*)
finite_end p2 >>= fun e2 ->
let base = W.logand p1.base p2.base in
let newE = W.logand p1.base e2 in
let step = W.sub newE base in
let cardn = cardn_two in
!!(create base ~step ~cardn)
else
min_elem p1 >>= fun min_elem_p1 ->
max_elem p1 >>= fun max_elem_p1 ->
min_elem p2 >>= fun min_elem_p2 ->
max_elem p2 >>= fun max_elem_p2 ->
let _, twos_in_s1 = factor_2s p1.step in
let _, twos_in_s2 = factor_2s p2.step in
let least_significant_bit_p1 = if W.is_one p1.cardn then sz
since the CLP is canonized , if p1.cardn > 1 then p1.step < > 0
else twos_in_s1 in
let least_significant_bit_p2 = if W.is_one p2.cardn then sz
since the CLP is canonized , if p2.cardn > 1 then p2.step < > 0
else twos_in_s2 in
There is no most significant bit iff the cardinality is 1
let most_significant_bit_p1 = Option.value ~default:(-1)
(lead_1_bit (W.logxor min_elem_p1 max_elem_p1)) in
let most_significant_bit_p2 = Option.value ~default:(-1)
(lead_1_bit (W.logxor min_elem_p2 max_elem_p2)) in
let l_s_b = compute_l_s_b
least_significant_bit_p1
least_significant_bit_p2
min_elem_p1 min_elem_p2 in
let m_s_b = compute_m_s_b
most_significant_bit_p1
most_significant_bit_p2
min_elem_p1 min_elem_p2 in
if l_s_b > m_s_b then
let base = W.logand min_elem_p1 min_elem_p2 in
!!(create base)
else
let range_sep = compute_range_sep m_s_b
most_significant_bit_p1
most_significant_bit_p2
min_elem_p1 min_elem_p2
in
let mask = if l_s_b >= range_sep then W.zero sz
else let ones = W.ones (range_sep - l_s_b) in
let sized_ones = W.extract_exn ~hi:(sz - 1) ones in
Word.lshift sized_ones (W.of_int ~width:sz l_s_b) in
let safe_lower_bound =
W.logand min_elem_p1 min_elem_p2 |> W.logand (W.lnot mask) in
let safe_upper_bound = W.logand max_elem_p1 max_elem_p2 |>
W.logor mask |>
W.min max_elem_p1 |>
W.min max_elem_p2 in
let twos_step = W.lshift (W.of_int 1 ~width:sz)
(W.of_int l_s_b ~width:sz) in
let step = if most_significant_bit_p1 > most_significant_bit_p2 &&
m_s_b = most_significant_bit_p1 &&
range_sep = l_s_b then
W.max p1.step twos_step
else if most_significant_bit_p2 > most_significant_bit_p1 &&
m_s_b = most_significant_bit_p2 &&
range_sep = l_s_b then
W.max p2.step twos_step
else twos_step in
let b1_and_b2 = W.logand min_elem_p1 min_elem_p2 in
let frac = cdiv (W.sub safe_lower_bound b1_and_b2) step in
let base = W.add b1_and_b2 (W.mul step frac) in
let cardn = W.div (W.sub safe_upper_bound base) step |> succ_exact in
!!(create base ~step ~cardn)
end
let logor (p1 : t) (p2 : t) : t = lnot (logand (lnot p1) (lnot p2))
let logxor (p1 : t) (p2 : t) : t =
let width = get_and_check_sizes p1 p2 in
let two = create (W.of_int 2 ~width) in
let approx1 = logor (logand p1 (lnot p2)) (logand (lnot p1) p2) in
equality taken from Hacker 's Delight
let approx2 = sub (add p1 p2) (mul (logand p1 p2) two) in
intersection approx1 approx2
Note : this operation accepts inputs of different sizes as per the BAP IR
let lshift (p1 : t) (p2 : t) : t =
let sz1 = bitwidth p1 in
let p2 = canonize p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom sz1) begin
finite_end p1 >>= fun e1 ->
min_elem p2 >>= fun min_p2 ->
max_elem p2 >>= fun max_p2 ->
if W.(>=) max_p2 (W.of_int sz1 ~width:(W.bitwidth max_p2)) then
let msg = "During lshift, maximum element of CLP2 is >= CLP1's width" in
!!(not_implemented ~top:(top sz1) msg)
else
let max_p2_int = W.to_int_exn max_p2 in
let base = W.lshift p1.base min_p2 in
let step = if is_one p2.cardn
then W.lshift p1.step min_p2
else W.lshift (bounded_gcd p1.base p1.step) min_p2 in
let e_no_wrap = lshift_exact e1 max_p2_int in
let e_width = W.bitwidth e_no_wrap in
same as cardn_from_bounds , but adapted to e_no_wrap 's
let cardn = if W.is_zero step then W.one 1 else
let base_ext = W.extract_exn ~hi:(e_width - 1) base in
let step_ext = W.extract_exn ~hi:(e_width - 1) step in
let div_by_step = W.div (W.sub e_no_wrap base_ext) step_ext in
W.succ (W.extract_exn ~hi:e_width div_by_step) in
!!(create base ~step ~cardn)
end
Note , this function only accepts CLPs that are the same bitwidth .
let rshift_step rshift ~p1 ~p2 ~e2 ~sz1 ~sz2 =
assert(sz1 = sz2);
let _, b1twos = factor_2s p1.base in
let _, s1twos = factor_2s p1.step in
let s1_divisible = W.(>=) (W.of_int s1twos ~width:sz1) e2 in
let b1_divisible = W.(>=) (W.of_int b1twos ~width:sz1) e2 in
let b1_initial_ones = count_initial_1s p1.base in
if (s1_divisible && W.is_one p2.cardn) ||
(s1_divisible && b1_divisible) ||
(s1_divisible && W.(>=) (W.of_int ~width:sz2 b1_initial_ones) e2)
then bounded_gcd (rshift p1.step e2) @@ W.sub (rshift p1.base @@ W.sub e2 p2.step)
(rshift p1.base @@ e2)
else W.one sz1
let rshift (p1 : t) (p2 : t) : t =
let sz1 = bitwidth p1 in
let sz2 = bitwidth p2 in
let p1 = unwrap @@ canonize p1 in
let p2 = unwrap @@ canonize p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom sz1) begin
finite_end p1 >>= fun e1 ->
finite_end p2 >>= fun e2 ->
let base = W.rshift p1.base e2 in
if W.is_one p1.cardn && W.is_one p2.cardn
then !!(create base)
else
let step = rshift_step W.rshift ~p1 ~p2 ~e2 ~sz1 ~sz2 in
let cardn = cardn_from_bounds base step (W.rshift e1 p2.base) in
!!(create base ~step ~cardn)
end
Note : [ p1 ] and [ p2 ] must have the same bitwidth , since this function
depends on the [ rshift_step ] function above .
Note also that the SWEET paper 's algorithm for arshift is incorrect .
To compute the new [ n ] ( cardinality ) on p. 26 , it has this condition :
- [ if b1 > = c1[n1 - 1 ] ]
We have altered that condition to this :
- [ if 0 > = c1[n1 - 1 ] ]
depends on the [rshift_step] function above.
Note also that the SWEET paper's algorithm for arshift is incorrect.
To compute the new [n] (cardinality) on p. 26, it has this condition:
- [if b1 >= c1[n1 - 1]]
We have altered that condition to this:
- [if 0 >= c1[n1 - 1]]
*)
let arshift (p1 : t) (p2 : t) : t =
let sz1 = bitwidth p1 in
let sz2 = bitwidth p2 in
let zero = W.zero sz1 in
let p1 = unwrap_signed @@ canonize p1 in
let p2 = unwrap @@ canonize p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom sz1) begin
finite_end p1 >>= fun e1 ->
finite_end p2 >>= fun e2 ->
if W.is_one p1.cardn && W.is_one p2.cardn
then
let base = W.arshift (W.signed p1.base) p2.base in
!!(create base ~width:sz1)
else
let base =
if W.(>=) (W.signed p1.base) zero
then W.arshift (W.signed p1.base) e2
else W.arshift (W.signed p1.base) p2.base
in
let step = rshift_step W.arshift ~p1 ~p2 ~e2 ~sz1 ~sz2 in
let new_end =
if W.(>=) (W.signed e1) zero
then W.arshift (W.signed e1) p2.base
else W.arshift (W.signed e1) e2
in
let cardn = cardn_from_bounds base step new_end in
!!(create base ~step ~cardn)
end
helper function ; splits a CLP into two segments : one that contains all
points from the base up to n inclusive and another that contains the rest .
points from the base up to n inclusive and another that contains the rest.
*)
let split_at_n (p : t) n : t * t =
The CLP is canonized so that it can be treated as finite
let p = canonize p in
The first set extends to the highest point on the CLP up to e
let cardn1 = min (cardn_from_bounds p.base p.step n) p.cardn in
The second set contains all of the other elements
let cardn2 = W.sub p.cardn cardn1 in
let p2_base = nearest_inf_succ (W.succ n) p.base p.step in
{base = p.base; step = p.step; cardn = cardn1},
{base = p2_base; step = p.step; cardn = cardn2}
helper function ; produces a result that contains exactly the
elements of the input extended to the given width . Always
returns disjoint CLPs .
Expects that width > = : in most cases , p2 will represent the empty set . If dealing with
both CLPs is difficult or imprecise , this represents an opportunity
for optimization .
elements of the input clp extended to the given width. Always
returns disjoint CLPs.
Expects that width >= bitwidth p.
Note: in most cases, p2 will represent the empty set. If dealing with
both CLPs is difficult or imprecise, this represents an opportunity
for optimization.
*)
let extract_exact ~width:(width : int) (p : t) : t * t =
let p_width = bitwidth p in
assert (width >= p_width);
The CLP is canonized so that infinite CLPs do not have to be treated
specially .
specially.
*)
let lastn = W.ones p_width in
let p1, p2 = split_at_n p lastn in
create ~width p1.base ~step:p1.step ~cardn:p1.cardn,
create ~width p2.base ~step:p2.step ~cardn:p2.cardn
let extract_lo ?(lo = 0) (p : t) : t =
let p = canonize p in
let width = bitwidth p in
let res_width = width - lo in
assert(lo < width);
if lo = 0 then p else
let default = bottom res_width in
Option.value_map ~default (finite_end p) ~f:begin fun e ->
let base = W.extract_exn ~lo p.base in
let ext_lo w = W.extract_exn ~hi:(lo - 1) w in
let base_mod_2lo = ext_lo p.base in
let step_mod_2lo = ext_lo p.step in
let max_step_effect = add_exact base_mod_2lo @@
mul_exact step_mod_2lo (W.pred p.cardn) in
let carry_bound = dom_size ~width:(W.bitwidth max_step_effect) lo in
if W.(<) max_step_effect carry_bound then
create base ~step:(W.extract_exn ~lo p.step) ~cardn:p.cardn
else
let e = W.extract_exn ~lo e in
let step = W.one res_width in
let cardn = cardn_from_bounds base step e in
create base ~step ~cardn
end
let extract_hi ?(hi = None) ?(signed = false) (p : t) : t =
let sz = bitwidth p in
let hiv = Option.value ~default:(bitwidth p - 1) hi in
if not signed && hiv + 1 >= sz then
let res1, res2 = extract_exact ~width:(hiv + 1) p in
union res1 res2
else if not signed then
create ~width:(hiv+1) p.base ~step:p.step ~cardn:p.cardn
else
TODO : signed case is old ; check and update
let ext = W.extract_exn ~hi:hiv in
let ext_signed w = W.extract_exn ~hi:hiv (W.signed w) in
Option.value_map ~default:(bottom (hiv + 1)) (finite_end p) ~f:begin
fun e ->
if is_infinite p then infinite (ext p.base, ext p.step)
else
negmin is the number of the form 10 * ; the most negative
number when interpreted as signed . When signed , its predecessor
is the most positive ( signed ) number .
number when interpreted as signed. When signed, its predecessor
is the most positive (signed) number.
*)
let negmin = W.lshift (W.one sz) (W.of_int ~width:sz (sz-1)) in
let posmax = W.pred negmin in
if elem negmin p && elem posmax p &&
(p.base <> negmin || e <> posmax) then
not_implemented ~top:(top (hiv + 1))
"extract signed crossing max signed int"
else
let e' = W.sub e p.base in
let newE' = ext e' in
let base = if signed then ext_signed p.base else ext p.base in
let step = ext p.step in
if W.(<) newE' e' then infinite(base, step)
else
let cardn = cardn_from_bounds (W.zero (hiv + 1)) step newE' in
create base ~step ~cardn
end
let extract_internal ?hi ?(lo = 0) ?(signed = false) (p : t) : t =
let hi = Option.map hi ~f:(fun hi -> hi - lo) in
extract_lo ~lo p |> extract_hi ~hi ~signed
let cast ct (sz : int) (p : t) : t = match ct with
| Bil.UNSIGNED -> extract_internal ~hi:(sz - 1) p
| Bil.SIGNED -> extract_internal ~hi:(sz - 1) ~signed:true p
| Bil.LOW -> extract_internal ~hi:(sz - 1) p
| Bil.HIGH -> extract_internal ~lo:(bitwidth p - sz) p
let extract ?hi:hi ?lo:lo (p : t) : t = extract_internal ?hi ?lo p
let concat (p1 : t) (p2 : t) : t =
let width1 = bitwidth p1 in
let width2 = bitwidth p2 in
let width = width1 + width2 in
let p1'base = lshift_exact p1.base width2 in
let p1'step = lshift_exact p1.step width2 in
let p1' = create p1'base ~step:p1'step ~cardn:p1.cardn in
let p2' = cast Bil.UNSIGNED width p2 in
(add p1' p2')
let of_list ~width l : t =
assert (width > 0);
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom width) begin
let l = List.map l ~f:(W.extract_exn ~hi:(width - 1) ~lo:0) in
let l = List.sort ~compare:W.compare l in
let diff_list = List.map2_exn l (rotate_list l) ~f:W.sub in
let idx, _, step = List.foldi diff_list ~init:(0, W.zero width, W.zero width)
~f:(fun i (idx, diff, step) d ->
assert(W.bitwidth diff = W.bitwidth step);
assert(W.bitwidth diff = W.bitwidth d);
if W.(>) d diff
then i, d, bounded_gcd diff step
else idx, diff, bounded_gcd d step) in
let l = idx
" rotate " the list left by the index of the desired first element
|> List.split_n l
|> (fun (end_l, start_l) -> List.append start_l end_l) in
List.hd l >>= fun base ->
List.last l >>= fun e ->
let cardn = cardn_from_bounds base step e in
assert(W.bitwidth base = width);
!!{base; step; cardn}
end
Note : BAP uses Z.div ( standard division truncating
towards 0 and obeying the rule of signs ) internally
for signed division and Z.ediv ( the Euclidean algorithm )
for unsigned division . This seems strange since Z.div and
Z.ediv work the same way for nonnegative inputs . The
implementations of div and sdiv here are consistent with
both this approach and using just Z.div ( since it would
work the same way ) .
TODO : Compute a more accurate step for the division .
towards 0 and obeying the rule of signs) internally
for signed division and Z.ediv (the Euclidean algorithm)
for unsigned division. This seems strange since Z.div and
Z.ediv work the same way for nonnegative inputs. The
implementations of div and sdiv here are consistent with
both this approach and using just Z.div (since it would
work the same way).
TODO: Compute a more accurate step for the division.
*)
let div (p1 : t) (p2 : t) : t =
let width = get_and_check_sizes p1 p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom width) begin
min_elem p1 >>= fun min_e1 ->
min_elem p2 >>= fun min_e2 ->
max_elem p1 >>= fun max_e1 ->
max_elem p2 >>= fun max_e2 ->
if elem (W.zero width) p2 then not_implemented "Clp division by zero"
else
let base = W.div min_e1 max_e2 in
let e = W.div max_e1 min_e2 in
let step = if W.is_one (cardinality p2) &&
W.is_zero (W.modulo p1.step p2.base)
then bounded_gcd (W.div p1.step p2.base)
(W.sub (W.div p1.base p2.base) base)
else W.one width in
let cardn = cardn_from_bounds base step e in
!!{base; step; cardn}
end
let sdiv (p1 : t) (p2 : t) : t =
let wsdiv a b = W.div (W.signed a) (W.signed b) in
let width = get_and_check_sizes p1 p2 in
let open Monads.Std.Monad.Option.Syntax in
Option.value ~default:(bottom width) begin
min_elem p1 >>= fun min_e1 ->
min_elem p2 >>= fun min_e2 ->
max_elem p1 >>= fun max_e1 ->
max_elem p2 >>= fun max_e2 ->
if elem (W.zero width) p2 then not_implemented "Clp division by zero"
else if W.is_one (cardinality p1) &&
W.is_one (cardinality p2)
then !!(singleton (wsdiv p1.base p2.base))
else
let minmax = wsdiv min_e1 max_e2 in
let minmin = wsdiv min_e1 min_e2 in
let maxmax = wsdiv max_e1 max_e2 in
let maxmin = wsdiv max_e1 min_e2 in
let base =
min minmax @@
min minmin @@
min maxmax maxmin in
let e =
max minmax @@
max minmin @@
max maxmax maxmin in
let step = W.one width in
let cardn = cardn_from_bounds base step e in
!!{base; step; cardn}
end
let modulo (p1 : t) (p2 : t) : t =
sub p1 (mul (div p1 p2) p2)
let smodulo (p1 : t) (p2 : t) : t =
sub p1 (mul (sdiv p1 p2) p2)
type idx = int
let get_idx = bitwidth
let precedes = subset
let join = union
let meet = intersection
let widen_join (p1 : t) (p2 : t) =
assert (subset p1 p2);
if equal p1 p2 then p1 else
infinite (p2.base, p2.step)
NOTE : this compare function is solely for implementation of the value
interface . It has the property that compare a b = 0 iff equal a b , but
otherwise the ordering is arbitrary ( though still properly transitive ) .
Importantly , precedes a b DOES NOT imply compare a b < = 0 or vice versa .
interface. It has the property that compare a b = 0 iff equal a b, but
otherwise the ordering is arbitrary (though still properly transitive).
Importantly, precedes a b DOES NOT imply compare a b <= 0 or vice versa.
*)
let compare (p1 : t) (p2 : t) : int =
let p1 = canonize p1 in
let p2 = canonize p2 in
let base_comp = W.compare p1.base p2.base in
let step_comp = W.compare p1.step p2.step in
let cardn_comp = W.compare p1.cardn p2.cardn in
let if_nzero_else a b = if a = 0 then b else a in
if_nzero_else base_comp @@
if_nzero_else step_comp @@
cardn_comp
let sexp_of_t (p : t) : Sexp.t =
let p = canonize p in
Sexp.List begin match finite_end p with
| Some e ->
if W.is_one p.cardn then [W.sexp_of_t p.base]
else if W.is_one @@ W.pred @@ p.cardn then
[W.sexp_of_t p.base; W.sexp_of_t e]
else [W.sexp_of_t p.base;
W.sexp_of_t (W.add p.base p.step);
Sexp.Atom "...";
W.sexp_of_t e]
| None -> [Sexp.Atom "{}"; Sexp.Atom (string_of_int (bitwidth p))]
end
let t_of_sexp : Sexp.t -> t = function
| Sexp.List [Sexp.Atom "{}"; Sexp.Atom s] ->
let width = int_of_string s in
bottom width
| Sexp.List [be] ->
let base = Word.t_of_sexp be in
create base
| Sexp.List [be; ne as ee]
| Sexp.List [be; ne; Sexp.Atom "..."; ee] ->
let base = Word.t_of_sexp be in
let next = Word.t_of_sexp ne in
let e = Word.t_of_sexp ee in
let step = Word.sub next base in
let cardn = cardn_from_bounds base step e in
{base; step; cardn}
| Sexp.List _
| Sexp.Atom _ -> failwith "Sexp not a CLP"
let pp ppf (p : t) =
let p = canonize p in
let width = bitwidth p in
match finite_end p with
| None -> Format.fprintf ppf "{}:%i" width
| Some _ when W.is_one p.cardn ->
Format.fprintf ppf "@[{%a}:%i@]" W.pp p.base width
| Some e when W.is_one @@ W.pred p.cardn ->
Format.fprintf ppf "@[{%a,@ %a}:%i@]"
W.pp p.base
W.pp e
width
| Some e ->
Format.fprintf ppf "@[{%a,@ %a,@ ...,@ %a}:%i@]"
W.pp p.base
W.pp (W.add p.base p.step)
W.pp e
width
let spp (p : t) : string = Format.asprintf "%a" pp p
|
e6f3fcad66a8062257da9d4ef144a06a44c001dce3a4b39b26e86c3dc9a39af3 | frenchy64/Logic-Starter | core.clj | (ns logic-introduction.test.core
(:use [logic-introduction.core])
(:use [clojure.test]))
(deftest replace-me ;; FIXME: write
(is false "No tests have been written."))
| null | https://raw.githubusercontent.com/frenchy64/Logic-Starter/cc3999836759965e7dcc1a7d1c9731216681dcf8/test/logic_introduction/test/core.clj | clojure | FIXME: write | (ns logic-introduction.test.core
(:use [logic-introduction.core])
(:use [clojure.test]))
(is false "No tests have been written."))
|
d325ec1ebf54ecca84e36ec28c871ccfc12598798b1648d645cec6d8ff2985d2 | archaelus/erlmail | imapd_app.erl | %%%---------------------------------------------------------------------------------------
@author < > [ ]
2006 - 2007 Simple Enigma , Inc. All Rights Reserved .
%%% @doc IMAP Server OTP applicaiton file
@reference See < a href=" / modules / erlmail " target="_top">Erlang Software Framework</a > for more information
%%% @reference See <a href="" target="_top">ErlMail Google Code Repository</a> for more information
%%% @version 0.0.6
@since 0.0.6
%%% @end
%%%
%%%
The MIT License
%%%
Copyright ( c ) 2007 , Simple Enigma , Inc. All Righs Reserved
%%%
%%% Permission is hereby granted, free of charge, to any person obtaining a copy
%%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%%% furnished to do so, subject to the following conditions:
%%%
%%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%%% THE SOFTWARE.
%%%
%%%
%%%---------------------------------------------------------------------------------------
-module(imapd_app).
-author('').
-include("../include/imap.hrl").
-behaviour(application).
%% Application callbacks
-export([start/2, stop/1]).
%%----------------------------------------------------------------------
%% Application behaviour callbacks
%%----------------------------------------------------------------------
start(_Type, _Args) ->
ListenPort = erlmail_util:get_app_env(server_imap_port, 143),
supervisor:start_link({local, ?MODULE}, ?MODULE, [ListenPort, imapd_fsm]).
stop(_S) ->
ok.
| null | https://raw.githubusercontent.com/archaelus/erlmail/fe69b0e936f1512b4f349666e56c31c0af7b672c/src/imapd_app.erl | erlang | ---------------------------------------------------------------------------------------
@doc IMAP Server OTP applicaiton file
@reference See <a href="" target="_top">ErlMail Google Code Repository</a> for more information
@version 0.0.6
@end
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
---------------------------------------------------------------------------------------
Application callbacks
----------------------------------------------------------------------
Application behaviour callbacks
----------------------------------------------------------------------
| @author < > [ ]
2006 - 2007 Simple Enigma , Inc. All Rights Reserved .
@reference See < a href=" / modules / erlmail " target="_top">Erlang Software Framework</a > for more information
@since 0.0.6
The MIT License
Copyright ( c ) 2007 , Simple Enigma , Inc. All Righs Reserved
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-module(imapd_app).
-author('').
-include("../include/imap.hrl").
-behaviour(application).
-export([start/2, stop/1]).
start(_Type, _Args) ->
ListenPort = erlmail_util:get_app_env(server_imap_port, 143),
supervisor:start_link({local, ?MODULE}, ?MODULE, [ListenPort, imapd_fsm]).
stop(_S) ->
ok.
|
3ec58bd6afedf6f3aedc3ded194d47ca0e703d32ef6365358c3f51112999b171 | dalaing/little-languages | Infer.hs | module Test.Term.Infer (
inferTests
) where
import Test.Tasty
import Test.Tasty.QuickCheck
import Data.Maybe (mapMaybe)
import Control.Monad.Except (runExcept)
import Term.Gen
import Term.Infer
import Type.Error
inferTests :: TestTree
inferTests = testGroup "infer" [
testProperty "wellTypedInfer" propWellTypedInfer
, testProperty "illTypedInfer" propIllTypedInfer
, testProperty "inferUnique" propInferUnique
, testProperty "neverUnknown" propNeverUnknown
]
isRight :: Either a b -> Bool
isRight (Right _) = True
isRight _ = False
isLeft :: Either a b -> Bool
isLeft (Left _) = True
isLeft _ = False
propWellTypedInfer :: WellTypedTerm -> Bool
propWellTypedInfer (WellTypedTerm t) =
isRight . runExcept . infer $ t
propIllTypedInfer :: IllTypedTerm -> Bool
propIllTypedInfer (IllTypedTerm t) =
isLeft . runExcept . infer $ t
propInferUnique :: AnyTerm -> Property
propInferUnique (AnyTerm t) =
matches === 1
where
matches =
length .
fmap runExcept .
mapMaybe ($ t) $
inferRules
propNeverUnknown :: AnyTerm -> Bool
propNeverUnknown (AnyTerm t) =
case (runExcept . infer) t of
Left (TeUnknownType _) -> False
_ -> True
| null | https://raw.githubusercontent.com/dalaing/little-languages/9f089f646a5344b8f7178700455a36a755d29b1f/code/old/multityped/nb-srcloc/tests/Test/Term/Infer.hs | haskell | module Test.Term.Infer (
inferTests
) where
import Test.Tasty
import Test.Tasty.QuickCheck
import Data.Maybe (mapMaybe)
import Control.Monad.Except (runExcept)
import Term.Gen
import Term.Infer
import Type.Error
inferTests :: TestTree
inferTests = testGroup "infer" [
testProperty "wellTypedInfer" propWellTypedInfer
, testProperty "illTypedInfer" propIllTypedInfer
, testProperty "inferUnique" propInferUnique
, testProperty "neverUnknown" propNeverUnknown
]
isRight :: Either a b -> Bool
isRight (Right _) = True
isRight _ = False
isLeft :: Either a b -> Bool
isLeft (Left _) = True
isLeft _ = False
propWellTypedInfer :: WellTypedTerm -> Bool
propWellTypedInfer (WellTypedTerm t) =
isRight . runExcept . infer $ t
propIllTypedInfer :: IllTypedTerm -> Bool
propIllTypedInfer (IllTypedTerm t) =
isLeft . runExcept . infer $ t
propInferUnique :: AnyTerm -> Property
propInferUnique (AnyTerm t) =
matches === 1
where
matches =
length .
fmap runExcept .
mapMaybe ($ t) $
inferRules
propNeverUnknown :: AnyTerm -> Bool
propNeverUnknown (AnyTerm t) =
case (runExcept . infer) t of
Left (TeUnknownType _) -> False
_ -> True
| |
a8a6c1e672730239efa70b28f6dff2ada6be678b969bfde6e357147a09416149 | smucclaw/dsl | WordNet.hs | # LANGUAGE TransformListComp #
module LS.NLP.WordNet where
import Data.List (isPrefixOf, sortOn, isSuffixOf, nub)
import Data.List.Split
import Text.EditDistance
import qualified Data.Text as Text
import Data.Text (Text)
import WordNet.DB
import WordNet.Structured
wnNounDerivations :: Text -> IO Text
wnNounDerivations ogWord = do
Suppose that ogWord is " respond " . The word " respond " belongs to three synsets :
1 ) sWords=[react , respond ] , defn = show a response or a reaction to something
2 ) sWords=[answer , reply , respond ] , = react verbally
3 ) sWords=[respond ] = respond favorably ; " cancer responded to therapy "
In vanilla WordNet , derivation is a morphological feature , so you would't get " reaction " for " respond " .
But in haskell - wordnet , the function getDerivations ' takes a single word , like " respond " ,
and looks up the derivations of /all words in all its synsets/ : [ react , respond , answer , reply ] .
Furthermore , it even looks at the synsets of all the derivations , so we get quite a few links ahead .
Simplified example : suppose respond only has two synonyms over all synsets , " react " and " respond " .
respond --synons-- > [ react , respond ] -- synonyms of responds over /all synsets/
--derivs-- > [ reaction , responder , … ] -- derivations of all those synonyms
--synDer-- > [ reaction , response , responder , answerer , … ] -- synonyms of the derivations , TODO : only from legit synsets ?
The last step is important : we are not going to get junk like " chemical reaction " as a derivation of " respond " ,
because the sense in which " react " is a synonym of " respond " , does not lead to " chemical reaction " . TODO : how does this work exactly ?
TODO : explore and frequency order within a synset vs. frequency order of synsets
1) sWords=[react, respond], defn=show a response or a reaction to something
2) sWords=[answer, reply, respond], defn=react verbally
3) sWords=[respond] defn=respond favorably; "cancer responded to therapy"
In vanilla WordNet, derivation is a morphological feature, so you would't get "reaction" for "respond".
But in haskell-wordnet, the function getDerivations' takes a single word, like "respond",
and looks up the derivations of /all words in all its synsets/: [react, respond, answer, reply].
Furthermore, it even looks at the synsets of all the derivations, so we get quite a few links ahead.
Simplified example: suppose respond only has two synonyms over all synsets, "react" and "respond".
respond --synons--> [react, respond] -- synonyms of responds over /all synsets/
--derivs--> [reaction, responder, …] -- derivations of all those synonyms
--synDer--> [reaction, response, responder, answerer, …] -- synonyms of the derivations, TODO: only from legit synsets?
The last step is important: we are not going to get junk like "chemical reaction" as a derivation of "respond",
because the sense in which "react" is a synonym of "respond", does not lead to "chemical reaction". TODO: how does this work exactly?
TODO: explore wnsns and frequency order within a synset vs. frequency order of synsets
-}
resultRaw <- getDerivations' $ Text.unpack ogWord :: IO [(Synset, [(SynsetLink, Synset)])]
let ogWordStr = Text.unpack ogWord
ogWordLen = length ogWordStr
candidates =
" derivSynset="++ show ( sWords derivSynset ) ) --take 20 ( defn derivSynset ) )
| (ogSynset, derivs) <- resultRaw
, (synsetLink, derivSynset) <- derivs -- Each of these
, let fromWord = getWord ogSynset (lfrm synsetLink)
, let toWord = getWord derivSynset (lto synsetLink)
-- Derivation must be noun, and not a human: we want an abstract noun (close->closure, not person)
100 % reliable : POS is in WN data
Heuristic based on gloss : filter out defns like " a person who " , " someone " , …
, (editDistance, candidate) <- sortByEditDistance ogWord derivSynset
, not $ looksLikeHuman candidate -- Heuristic based on word: remove those that end in -or, -ee, …
-- Sorting heuristics
, let weight = 3 -- completely arbitrary number here
, let prefixDistance = prefixSimilarity ogWordStr candidate
, let weightedEditDistance = editDistance `div` weight + prefixDistance
Does the candidate come from ogWord or one of its synonyms
-- faster way to say that ogWord == fromWord,
-- because getWord (whichWord ogSynset) ogSynset == ogWord
-- Not so promising heuristics
, let probableSuffix = fromEnum $ not $ or [suf `isSuffixOf` candidate | suf <- ["ion", "ing", "ment", "ance", "ancy", "ure"]]
-- , let candidateEquals = candidate == toWord -- seems unreliable
, let sortMeasure = if fromOgWord -- && candidateEquals -- TODO: this is completely ad hoc
then (0, weightedEditDistance, probableSuffix)
else (weightedEditDistance, probableSuffix, 0)
, then sortOn by sortMeasure
]
let result =
case candidates of
Check 1 : are there candidates ?
Check 2 : is the edit distance more than the word length ?
then mkGerund ogWordStr
else noun
[] -> mkGerund ogWordStr
appendFile " test.txt " $ prettyPrintResult ogWord candidates -- for testing
pure $ Text.pack result
prefixSimilarity :: String -> String -> Int
prefixSimilarity expect prospect = levenshteinDistance myEditCosts expect (take (length expect) prospect)
-- For debugging/testing heuristics
prettyPrintResult :: Text -> [(Int, Int, Int, String, String, String, Int)] -> String
prettyPrintResult ogWord res = unlines $ nub
[ Text.unpack ogWord
, unlines $ nub $ map show res
]
isHuman :: Synset -> Bool
isHuman synset = or [pref `isPrefixOf` def | pref <- humanPrefixes] || aPersonWho (words def)
where
def = defn synset
humanPrefixes = ["(a person", "(someone", "(one who", "(a licensed practitioner", "(the party"]
aPersonWho ("(a":_:"who":_) = True
aPersonWho ("(an":_:"who":_) = True
aPersonWho ("(the":_:"who":_) = True
aPersonWho _ = False
looksLikeHuman :: String -> Bool
looksLikeHuman w = "or" `isSuffixOf` w || "ee" `isSuffixOf` w || ("er" `isSuffixOf` w && w `notElem` legitErWords)
where legitErWords = ["answer"] --TODO: more
isNoun :: Synset -> Bool
isNoun Synset {pos=Noun} = True
isNoun _ = False
type SimilarityScore = Int
sortByEditDistance :: Text -> Synset -> [(SimilarityScore, String)]
sortByEditDistance w synset =
[ (score, candidate)
| candidate <- sWords synset
, let score = levenshteinDistance myEditCosts w' candidate]
where
w' = Text.unpack w
myEditCosts :: EditCosts
myEditCosts = defaultEditCosts {
substitutionCosts = VariableCost cheapSubs
}
where
-- Only heuristic, no way to check this happens in the intended context
-- Evaluate with more data and remove if needed
cheapSubs ('y','i') = 0 -- apply -> application
cheapSubs ('z','s') = 0 -- analyze -> analysis
cheapSubs ('e','i') = 0 -- close -> closing
cheapSubs ('d','s') = 0 -- respond -> response
cheapSubs _ = 1
-- GF style morphology opers
-- Last resort: make gerund ourselves. These rules are copied from the GF RGL smart paradigms.
mkGerund :: String -> String
mkGerund cry = case reverse cry of
'e':'e':_ -> cry ++ "ing" -- bungee -> bungeeing
'e':'i':d -> reverse d ++ "ying" ; -- die -> dying
'e':us -> reverse us ++ "ing" ; -- use -> using
'r':'e':_ -> cry ++ "ing" ; -- enter -> entering
_ -> duplFinal cry ++ "ing" -- jar -> jarring
Trying a more GF - like style , no reverse .
gfmkGerund :: String -> String
gfmkGerund cry
| matchSuf ["ee", "er"] = cry ++ "ing" -- bungee -> bungeeing ; enter -> entering
| otherwise =
case match "ie" of
Just (d,"ie") -> d ++ "ying" -- die -> dying
_ ->
case match "e" of
Just (us,"e") -> us ++ "ing" -- use -> using
_ -> duplFinal cry ++ "ing" -- jar -> jarring
where
matchSuf = any (`isSuffixOf` cry) -- only check that suffix matches
match = gfStyleSplit cry -- match suffix, also return prefix
duplFinal :: String -> String
duplFinal w = case reverse w of
_:v:aeo:_ | isVowel v && isAEO aeo -> w -- waiting, needing
c:v:_:_:_:_ | isVowel v && isDuplCons c -> w -- happening, fidgeting
c:v:_ | isVowel v && isDuplCons c -> w ++ [c] -- omitting, winning
_ -> w
where
isAEO v = v `elem` ("aeo" :: String)
isVowel v = v `elem` ("aeiou" :: String)
isDuplCons c = c `elem` ("bdgmnprt" :: String)
gfStyleSplit :: String -> String -> Maybe (String, String)
gfStyleSplit enter er =
if er `isSuffixOf` enter
then
case split (dropFinalBlank $ onSublist er) enter of
Exactly 1 instance of suffix : return as is
enterer -> Just (concat $ init enterer, last enterer) -- More instances of suffix: only match the last suffix, to mimic GF behaviour
else
Nothing
| null | https://raw.githubusercontent.com/smucclaw/dsl/6af9d74b76cae8561796992ef47f4ebeec5fad44/lib/haskell/natural4/src/LS/NLP/WordNet.hs | haskell | synons-- > [ react , respond ] -- synonyms of responds over /all synsets/
derivs-- > [ reaction , responder , … ] -- derivations of all those synonyms
synDer-- > [ reaction , response , responder , answerer , … ] -- synonyms of the derivations , TODO : only from legit synsets ?
synons--> [react, respond] -- synonyms of responds over /all synsets/
derivs--> [reaction, responder, …] -- derivations of all those synonyms
synDer--> [reaction, response, responder, answerer, …] -- synonyms of the derivations, TODO: only from legit synsets?
take 20 ( defn derivSynset ) )
Each of these
Derivation must be noun, and not a human: we want an abstract noun (close->closure, not person)
Heuristic based on word: remove those that end in -or, -ee, …
Sorting heuristics
completely arbitrary number here
faster way to say that ogWord == fromWord,
because getWord (whichWord ogSynset) ogSynset == ogWord
Not so promising heuristics
, let candidateEquals = candidate == toWord -- seems unreliable
&& candidateEquals -- TODO: this is completely ad hoc
for testing
For debugging/testing heuristics
TODO: more
Only heuristic, no way to check this happens in the intended context
Evaluate with more data and remove if needed
apply -> application
analyze -> analysis
close -> closing
respond -> response
GF style morphology opers
Last resort: make gerund ourselves. These rules are copied from the GF RGL smart paradigms.
bungee -> bungeeing
die -> dying
use -> using
enter -> entering
jar -> jarring
bungee -> bungeeing ; enter -> entering
die -> dying
use -> using
jar -> jarring
only check that suffix matches
match suffix, also return prefix
waiting, needing
happening, fidgeting
omitting, winning
More instances of suffix: only match the last suffix, to mimic GF behaviour | # LANGUAGE TransformListComp #
module LS.NLP.WordNet where
import Data.List (isPrefixOf, sortOn, isSuffixOf, nub)
import Data.List.Split
import Text.EditDistance
import qualified Data.Text as Text
import Data.Text (Text)
import WordNet.DB
import WordNet.Structured
wnNounDerivations :: Text -> IO Text
wnNounDerivations ogWord = do
Suppose that ogWord is " respond " . The word " respond " belongs to three synsets :
1 ) sWords=[react , respond ] , defn = show a response or a reaction to something
2 ) sWords=[answer , reply , respond ] , = react verbally
3 ) sWords=[respond ] = respond favorably ; " cancer responded to therapy "
In vanilla WordNet , derivation is a morphological feature , so you would't get " reaction " for " respond " .
But in haskell - wordnet , the function getDerivations ' takes a single word , like " respond " ,
and looks up the derivations of /all words in all its synsets/ : [ react , respond , answer , reply ] .
Furthermore , it even looks at the synsets of all the derivations , so we get quite a few links ahead .
Simplified example : suppose respond only has two synonyms over all synsets , " react " and " respond " .
The last step is important : we are not going to get junk like " chemical reaction " as a derivation of " respond " ,
because the sense in which " react " is a synonym of " respond " , does not lead to " chemical reaction " . TODO : how does this work exactly ?
TODO : explore and frequency order within a synset vs. frequency order of synsets
1) sWords=[react, respond], defn=show a response or a reaction to something
2) sWords=[answer, reply, respond], defn=react verbally
3) sWords=[respond] defn=respond favorably; "cancer responded to therapy"
In vanilla WordNet, derivation is a morphological feature, so you would't get "reaction" for "respond".
But in haskell-wordnet, the function getDerivations' takes a single word, like "respond",
and looks up the derivations of /all words in all its synsets/: [react, respond, answer, reply].
Furthermore, it even looks at the synsets of all the derivations, so we get quite a few links ahead.
Simplified example: suppose respond only has two synonyms over all synsets, "react" and "respond".
The last step is important: we are not going to get junk like "chemical reaction" as a derivation of "respond",
because the sense in which "react" is a synonym of "respond", does not lead to "chemical reaction". TODO: how does this work exactly?
TODO: explore wnsns and frequency order within a synset vs. frequency order of synsets
-}
resultRaw <- getDerivations' $ Text.unpack ogWord :: IO [(Synset, [(SynsetLink, Synset)])]
let ogWordStr = Text.unpack ogWord
ogWordLen = length ogWordStr
candidates =
| (ogSynset, derivs) <- resultRaw
, let fromWord = getWord ogSynset (lfrm synsetLink)
, let toWord = getWord derivSynset (lto synsetLink)
100 % reliable : POS is in WN data
Heuristic based on gloss : filter out defns like " a person who " , " someone " , …
, (editDistance, candidate) <- sortByEditDistance ogWord derivSynset
, let prefixDistance = prefixSimilarity ogWordStr candidate
, let weightedEditDistance = editDistance `div` weight + prefixDistance
Does the candidate come from ogWord or one of its synonyms
, let probableSuffix = fromEnum $ not $ or [suf `isSuffixOf` candidate | suf <- ["ion", "ing", "ment", "ance", "ancy", "ure"]]
then (0, weightedEditDistance, probableSuffix)
else (weightedEditDistance, probableSuffix, 0)
, then sortOn by sortMeasure
]
let result =
case candidates of
Check 1 : are there candidates ?
Check 2 : is the edit distance more than the word length ?
then mkGerund ogWordStr
else noun
[] -> mkGerund ogWordStr
pure $ Text.pack result
prefixSimilarity :: String -> String -> Int
prefixSimilarity expect prospect = levenshteinDistance myEditCosts expect (take (length expect) prospect)
prettyPrintResult :: Text -> [(Int, Int, Int, String, String, String, Int)] -> String
prettyPrintResult ogWord res = unlines $ nub
[ Text.unpack ogWord
, unlines $ nub $ map show res
]
isHuman :: Synset -> Bool
isHuman synset = or [pref `isPrefixOf` def | pref <- humanPrefixes] || aPersonWho (words def)
where
def = defn synset
humanPrefixes = ["(a person", "(someone", "(one who", "(a licensed practitioner", "(the party"]
aPersonWho ("(a":_:"who":_) = True
aPersonWho ("(an":_:"who":_) = True
aPersonWho ("(the":_:"who":_) = True
aPersonWho _ = False
looksLikeHuman :: String -> Bool
looksLikeHuman w = "or" `isSuffixOf` w || "ee" `isSuffixOf` w || ("er" `isSuffixOf` w && w `notElem` legitErWords)
isNoun :: Synset -> Bool
isNoun Synset {pos=Noun} = True
isNoun _ = False
type SimilarityScore = Int
sortByEditDistance :: Text -> Synset -> [(SimilarityScore, String)]
sortByEditDistance w synset =
[ (score, candidate)
| candidate <- sWords synset
, let score = levenshteinDistance myEditCosts w' candidate]
where
w' = Text.unpack w
myEditCosts :: EditCosts
myEditCosts = defaultEditCosts {
substitutionCosts = VariableCost cheapSubs
}
where
cheapSubs _ = 1
mkGerund :: String -> String
mkGerund cry = case reverse cry of
Trying a more GF - like style , no reverse .
gfmkGerund :: String -> String
gfmkGerund cry
| otherwise =
case match "ie" of
_ ->
case match "e" of
where
duplFinal :: String -> String
duplFinal w = case reverse w of
_ -> w
where
isAEO v = v `elem` ("aeo" :: String)
isVowel v = v `elem` ("aeiou" :: String)
isDuplCons c = c `elem` ("bdgmnprt" :: String)
gfStyleSplit :: String -> String -> Maybe (String, String)
gfStyleSplit enter er =
if er `isSuffixOf` enter
then
case split (dropFinalBlank $ onSublist er) enter of
Exactly 1 instance of suffix : return as is
else
Nothing
|
9240684d77ae6ed94d29cf7088bba0796711cab4108e94459190a4f1bf036efe | Decentralized-Pictures/T4L3NT | proxy.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2020 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
Testing
-------
Component : Client - proxy mode
Invocation : dune exec / tests / main.exe -- --file proxy.ml
Subject : Tests of the client 's --mode proxy .
-------
Component: Client - proxy mode
Invocation: dune exec tezt/tests/main.exe -- --file proxy.ml
Subject: Tests of the client's --mode proxy.
*)
let ( >|= ) = Lwt.( >|= )
(** [matches re s] checks if [s] matches [re]. Note in particular that this supports multiline strings. *)
let matches re s = try Re.Str.search_forward re s 0 >= 0 with _ -> false
(** Returns: a node and a proxy client *)
let init ~protocol () =
let* node = Node.init [Synchronisation_threshold 0] in
let* client = Client.init ~endpoint:(Node node) () in
let* () = Client.activate_protocol ~protocol client in
Log.info "Activated protocol." ;
Client.set_mode (Proxy (Node node)) client ;
let* () = Client.bake_for client in
Log.info "Baked 1 block: protocol is now %s" (Protocol.name protocol) ;
Lwt.return (node, client)
(** Test.
This test checks that the proxy client creates its cache for
RPC answers at most once for a given (chain, block) pair.
*)
let test_cache_at_most_once ?query_string path =
Protocol.register_test
~__FILE__
~title:
(sf
"(Proxy) (%s) Cache at most once"
(Client.rpc_path_query_to_string ?query_string path))
~tags:["proxy"; "rpc"; "get"]
@@ fun protocol ->
let* (_, client) = init ~protocol () in
let env =
[("TEZOS_LOG", Protocol.daemon_name protocol ^ ".proxy_rpc->debug")]
|> List.to_seq |> String_map.of_seq
in
let* stderr =
Client.spawn_rpc ~env ?query_string Client.GET path client
|> Process.check_and_read_stderr
in
let lines = String.split_on_char '\n' stderr in
let proxy_cache_regexp =
Re.Str.regexp
{|^.*proxy_rpc: proxy cache created for chain \([a-zA-Z0-9]*\) and block \([a-zA-Z0-9]*\)|}
in
let extract_chain_block line =
Groups are 1 - based ( 0 is for the whole match ) .
if Re.Str.string_match proxy_cache_regexp line 0 then
Some (Re.Str.matched_group 1 line, Re.Str.matched_group 2 line)
else None
in
let chain_block_list = lines |> List.filter_map extract_chain_block in
let find_duplicate l =
let rec go with_duplicates without_duplicates =
match (with_duplicates, without_duplicates) with
| ([], []) -> None
| (hd_dup :: tl_dup, hd_nodup :: tl_nodup) ->
if hd_dup = hd_nodup then go tl_dup tl_nodup else Some hd_dup
| _ -> assert false
in
go (List.sort Stdlib.compare l) (List.sort_uniq Stdlib.compare l)
in
if chain_block_list = [] then
Test.fail
"Proxy cache should have been created when executing %s"
(String.concat "/" path) ;
find_duplicate chain_block_list
|> Option.iter (fun (chain, block) ->
Test.fail
"proxy RPC cache for chain %s and block %s created more than once"
chain
block)
|> Lwt.return
let test_cache_at_most_once ~protocols =
let paths =
[
(["helpers"; "baking_rights"], []);
(["helpers"; "baking_rights"], [("all", "true")]);
(["helpers"; "current_level"], []);
(* FIXME: Same as above *)
(* (["minimal_valid_time"], []); *)
(["context"; "constants"], []);
(["context"; "constants"; "errors"], []);
(["context"; "delegates"], []);
(["context"; "nonces"; "3"], []);
(["helpers"; "endorsing_rights"], []);
(["helpers"; "levels_in_current_cycle"], []);
(["votes"; "current_period"], []);
(["votes"; "successor_period"], []);
(["votes"; "total_voting_power"], []);
(["votes"; "ballot_list"], []);
(["votes"; "ballots"], []);
(["votes"; "current_proposal"], []);
(["votes"; "current_quorum"], []);
(["votes"; "listings"], []);
(["votes"; "proposals"], []);
]
in
List.iter
(fun (sub_path, query_string) ->
test_cache_at_most_once
~query_string
("chains" :: "main" :: "blocks" :: "head" :: sub_path)
~protocols)
paths
(** [starts_with prefix s] returns [true] iff [prefix] is a prefix of [s]. *)
let starts_with ~(prefix : string) (s : string) : bool =
Re.Str.string_match (Re.Str.regexp ("^" ^ prefix)) s 0
* Test .
This test checks that the proxy client never does a useless RPC .
I.e. it checks that if the proxy client requested
[ /chains/<main>/blocks/<head>/context / raw / bytes / some_path ]
it does n't later request
[ /chains/<main>/blocks/<head>/context / raw / bytes / some_path / some_other_path ]
In this scenario , the proxy client should look directly in the data within the tree received by the first request .
For this , this test inspects the debug output produced by
setting TEZOS_LOG to alpha.proxy_rpc->debug . This causes the client
to print the RPCs done to get pieces of the context :
alpha.proxy_rpc : P / v1 / constants
alpha.proxy_rpc : Received tree of size 1
alpha.proxy_rpc : P / v1 / first_level
alpha.proxy_rpc : Received tree of size 1
alpha.proxy_rpc : P / cycle/0 / random_seed
alpha.proxy_rpc : Received tree of size 1
alpha.proxy_rpc : P / cycle/0 / stake_snapshot
alpha.proxy_rpc : Received tree of size 1
alpha.proxy_rpc : P / cycle/0 / last_roll/0
where [ P ] is [ /chains/<main>/blocks/<head>/context / raw / bytes ]
This test checks that the proxy client never does a useless RPC.
I.e. it checks that if the proxy client requested
[/chains/<main>/blocks/<head>/context/raw/bytes/some_path]
it doesn't later request
[/chains/<main>/blocks/<head>/context/raw/bytes/some_path/some_other_path]
In this scenario, the proxy client should look directly in the data within the tree received by the first request.
For this, this test inspects the debug output produced by
setting TEZOS_LOG to alpha.proxy_rpc->debug. This causes the client
to print the RPCs done to get pieces of the context:
alpha.proxy_rpc: P/v1/constants
alpha.proxy_rpc: Received tree of size 1
alpha.proxy_rpc: P/v1/first_level
alpha.proxy_rpc: Received tree of size 1
alpha.proxy_rpc: P/cycle/0/random_seed
alpha.proxy_rpc: Received tree of size 1
alpha.proxy_rpc: P/cycle/0/stake_snapshot
alpha.proxy_rpc: Received tree of size 1
alpha.proxy_rpc: P/cycle/0/last_roll/0
where [P] is [/chains/<main>/blocks/<head>/context/raw/bytes]
*)
let test_context_suffix_no_rpc ?query_string path =
This test 's implementation is similar to [ Light.NoUselessRpc.test ]
Protocol.register_test
~__FILE__
~title:
(sf
"(Proxy) (%s) No useless RPC call"
(Client.rpc_path_query_to_string ?query_string path))
~tags:["proxy"; "rpc"; "get"]
@@ fun protocol ->
let* (_, client) = init ~protocol () in
let env =
String_map.singleton
"TEZOS_LOG"
(Protocol.daemon_name protocol ^ ".proxy_rpc->debug")
in
let* stderr =
Client.spawn_rpc ~env ?query_string Client.GET path client
|> Process.check_and_read_stderr
in
let lines = String.split_on_char '\n' stderr in
let rpc_path_regexp =
Re.Str.regexp
{|.*proxy_rpc: /chains/<main>/blocks/<head>/context/raw/bytes/\(.*\)|}
in
let extract_rpc_path line =
Groups are 1 - based ( 0 is for the whole match ) .
if Re.Str.string_match rpc_path_regexp line 0 then
Some (Re.Str.matched_group 1 line)
else None
in
let context_queries = lines |> List.filter_map extract_rpc_path in
let rec test_no_overlap_rpc = function
| [] -> ()
| query_after :: queries_before ->
List.iter
(fun query_before ->
if starts_with ~prefix:query_before query_after then
Test.fail
"Query %s should not be followed by query %s because the \
latter is a suffix of the former. Hence the proxy should \
reuse the data of the first query."
query_before
query_after
else ())
queries_before ;
test_no_overlap_rpc queries_before
in
assert (List.compare_length_with context_queries 2 >= 0) ;
Lwt.return @@ test_no_overlap_rpc (List.rev context_queries)
let paths =
[
(["helpers"; "baking_rights"], []);
(["helpers"; "baking_rights"], [("all", "true")]);
(["context"; "contracts"], []);
(["context"; "delegates"], []);
(["context"; "nonces"; "3"], []);
(["helpers"; "endorsing_rights"], []);
(["votes"; "current_period"], []);
(["votes"; "successor_period"], []);
(["votes"; "total_voting_power"], []);
(["votes"; "ballot_list"], []);
(["votes"; "ballots"], []);
(["votes"; "current_proposal"], []);
(["votes"; "current_quorum"], []);
(["votes"; "listings"], []);
(["votes"; "proposals"], []);
]
let test_context_suffix_no_rpc ~protocols =
let iter l f = List.iter f l in
iter protocols @@ fun protocol ->
iter paths @@ fun (sub_path, query_string) ->
test_context_suffix_no_rpc
~query_string
("chains" :: "main" :: "blocks" :: "head" :: sub_path)
~protocols:[protocol]
(** Test.
Test that [tezos-client --mode proxy --protocol P] fails
when the endpoint's protocol is not [P].
*)
let wrong_proto protocol client =
let other_proto =
match List.find_opt (( <> ) protocol) Protocol.all with
| None ->
Test.fail
"No other protocol than %s is available."
(Protocol.name protocol)
| Some other_proto -> other_proto
in
let* stderr =
Client.spawn_bake_for ~protocol:other_proto client
|> Process.check_and_read_stderr ~expect_failure:true
in
let regexp =
Re.Str.regexp
@@ Format.sprintf
".*Protocol passed to the proxy (%s) and protocol of the node (%s) \
differ."
(Protocol.hash other_proto)
(Protocol.hash protocol)
in
if matches regexp stderr then return ()
else Test.fail "Did not fail as expected: %s" stderr
(** Test.
Test that [tezos-client --mode proxy --protocol P] fails
when the endpoint's protocol is not [P].
*)
let test_wrong_proto =
Protocol.register_test
~__FILE__
~title:"(Proxy) Wrong proto"
~tags:["proxy"; "initialization"]
@@ fun protocol ->
let* (_, client) = init ~protocol () in
wrong_proto protocol client
(** Test.
Bake a few blocks in proxy mode.
*)
let test_bake =
Protocol.register_test ~__FILE__ ~title:"(Proxy) Bake" ~tags:["proxy"; "bake"]
@@ fun protocol ->
let* node = Node.init [] in
let* client = Client.init ~endpoint:(Node node) () in
let* () = Client.activate_protocol ~protocol client in
Log.info "Activated protocol." ;
Client.set_mode (Proxy (Node node)) client ;
let* () = repeat 10 (fun () -> Client.bake_for client) in
Log.info "Baked 10 blocks." ;
let* level = Node.wait_for_level node 11 in
Log.info "Level is now %d." level ;
return ()
(** Test.
Do some transfers and bakes the corresponding blocks in proxy mode.
*)
let test_transfer =
Protocol.register_test
~__FILE__
~title:"(Proxy) Transfer"
~tags:["proxy"; "transfer"]
@@ fun protocol ->
let* (_, client) = init ~protocol () in
let* () =
Client.transfer
~wait:"none"
~amount:Tez.(of_int 5)
~giver:"bootstrap1"
~receiver:"bootstrap2"
client
in
Log.info "Transferred 5 tez." ;
let* () = Client.bake_for client in
Log.info "Baked block for bootstrap1." ;
let* () =
Client.transfer
~wait:"none"
~amount:Tez.(of_int 10)
~giver:"bootstrap2"
~receiver:"bootstrap3"
client
in
Log.info "Transferred 10 tez." ;
let* () = Client.bake_for ~keys:["bootstrap2"] client in
Log.info "Baked block for bootstrap2." ;
return ()
(** Module containing tests regarding where RPCs are executed: on
the node or locally. *)
module Location = struct
type rpc_exec_location =
| Local (** RPC executed locally (proxy mode) *)
| Distant (** RPC executed by the node (proxy mode) *)
| Unknown (** Client doesn't output location info (vanilla mode) *)
let location_to_string = function
| Local -> "Local"
| Distant -> "Distant"
| Unknown -> "Unknown"
type clients = {vanilla : Client.t; alternative : Client.t}
type alt_mode =
| Vanilla_proxy_server
(** A vanilla client ([--mode client]) but whose [--endpoint] is
a [tezos-proxy-server] *)
| Light (** A light client ([--mode light]) *)
| Proxy (** A proxy client ([--mode proxy]) *)
(** Whether an alternative client is expected to execute RPCs locally *)
let executes_locally = function
| Vanilla_proxy_server -> false
| Light | Proxy -> true
let alt_mode_to_string = function
| Vanilla_proxy_server -> "vanilla_proxy_server_endpoint"
| Light -> "light"
| Proxy -> "proxy"
let chain_id = "main"
let block_id = "head"
let log_line_prefix =
Re.Str.regexp "[A-Z][a-z]+[ 0-9:\\.]+ - proxy_rpc_ctxt: +"
(** [output] is the output of executing [rpc get rpc_path] *)
let parse_rpc_exec_location ?query_string output rpc_path =
let log = Re.Str.global_replace log_line_prefix "" output in
let re prefix =
let re_str =
Printf.sprintf
"%s[ a-zA-Z]*: [A-Z]+\\(\n\\| \\)%s"
prefix
(Re.Str.quote
@@ Client.rpc_path_query_to_string ?query_string rpc_path)
in
Re.Str.regexp re_str
in
let re_local = re "locally done" in
let re_http = re "delegating to http" in
if matches re_local log then Local
else if matches re_http log then Distant
else Unknown
* Calls [ rpc get ] on the given [ client ] but specifies an alternative
environment to make sure the location where the RPC executes is
printed to output . [ tz_log ] can be used to augment TEZOS_LOG
( useful for debugging ) .
environment to make sure the location where the RPC executes is
printed to output. [tz_log] can be used to augment TEZOS_LOG
(useful for debugging). *)
let rpc_get ?(tz_log = []) ?query_string client rpc_path =
let (proxy_key, proxy_value) = ("proxy_rpc_ctxt", "debug") in
List.iter
(fun (k, v) ->
if k = proxy_key && v = proxy_value then
Test.fail
"TEZOS_LOG key %s bound both to '%s' and '%s': impossible to honor \
both"
proxy_key
proxy_value
v
else ())
tz_log ;
let value =
(proxy_key, proxy_value) :: tz_log
|> List.map (fun (k, v) -> Printf.sprintf "%s->%s" k v)
|> String.concat "; "
in
let env = String_map.singleton "TEZOS_LOG" value in
Client.spawn_rpc ~env ?query_string Client.GET rpc_path client
|> Process.check_and_read_both
* Check that executing [ rpc get rpc_path ] on client causes the RPC
to be executed on the given location ( [ expected_loc ] ) .
[ tz_log ] can be used to augment TEZOS_LOG ( useful for debugging ) .
to be executed on the given location ([expected_loc]).
[tz_log] can be used to augment TEZOS_LOG (useful for debugging). *)
let check_location ?tz_log alt_mode client rpc_path expected_loc =
let* (_, stderr) = rpc_get ?tz_log client rpc_path in
let actual_loc = parse_rpc_exec_location stderr rpc_path in
if actual_loc <> expected_loc then
Test.fail
"Expected %s client to execute %s on this location: %s. But found: %s."
(alt_mode_to_string alt_mode)
(Client.rpc_path_query_to_string rpc_path)
(location_to_string expected_loc)
(location_to_string actual_loc) ;
Lwt.return_unit
(* [tz_log] can be used to augment TEZOS_LOG (useful for debugging). *)
let check_locations ?tz_log alt_mode client =
let paths_n_locations =
[
(["chains"; chain_id; "blocks"; block_id; "context"; "delegates"], Local);
(["chains"; chain_id; "blocks"], Distant);
(["network"; "self"], Distant);
]
in
Lwt_list.iter_s
(fun (rpc_path, expected_loc) ->
check_location ?tz_log alt_mode client rpc_path expected_loc)
paths_n_locations
let locations_tags alt_mode =
[alt_mode_to_string alt_mode; "location"; "rpc"; "get"]
* Test .
Check the location where an RPC is executed by the proxy client .
Check the location where an RPC is executed by the proxy client. *)
let test_locations_proxy =
let alt_mode = Proxy in
Protocol.register_test
~__FILE__
~title:"(Proxy) RPC get's location"
~tags:(locations_tags alt_mode)
@@ fun protocol ->
let* (_, client) = init ~protocol () in
check_locations alt_mode client
* Check the output of [ rpc get ] on a number on RPC between two
clients are equivalent . One of them is a vanilla client ( [ --mode client ] ) while the
other client uses an alternative mode ( [ --mode proxy ] ) .
clients are equivalent. One of them is a vanilla client ([--mode client]) while the
other client uses an alternative mode ([--mode proxy]). *)
let check_equivalence ?tz_log alt_mode {vanilla; alternative} =
let alt_mode_string = alt_mode_to_string alt_mode in
let compared =
let add_rpc_path_prefix rpc_path =
"chains" :: chain_id :: "blocks" :: block_id :: rpc_path
in
[
(add_rpc_path_prefix ["context"; "constants"], []);
(add_rpc_path_prefix ["context"; "constants"; "errors"], []);
(add_rpc_path_prefix ["context"; "delegates"], []);
(add_rpc_path_prefix ["context"; "nonces"; "3"], []);
(add_rpc_path_prefix ["helpers"; "baking_rights"], []);
(add_rpc_path_prefix ["helpers"; "baking_rights"], [("all", "true")]);
(add_rpc_path_prefix ["helpers"; "current_level"], []);
(add_rpc_path_prefix ["helpers"; "endorsing_rights"], []);
(add_rpc_path_prefix ["helpers"; "levels_in_current_cycle"], []);
The 2 following RPCs only exist on Alpha
(* (add_rpc_path_prefix ["helpers"; "validators"], []); *)
(* (add_rpc_path_prefix ["helpers"; "round"], []); *)
(add_rpc_path_prefix ["votes"; "current_period"], []);
(add_rpc_path_prefix ["votes"; "successor_period"], []);
(add_rpc_path_prefix ["votes"; "total_voting_power"], []);
(add_rpc_path_prefix ["votes"; "ballot_list"], []);
(add_rpc_path_prefix ["votes"; "ballots"], []);
(add_rpc_path_prefix ["votes"; "current_proposal"], []);
(add_rpc_path_prefix ["votes"; "current_period"], []);
(add_rpc_path_prefix ["votes"; "successor_period"], []);
(add_rpc_path_prefix ["votes"; "current_quorum"], []);
(add_rpc_path_prefix ["votes"; "listings"], []);
(add_rpc_path_prefix ["votes"; "proposals"], []);
]
in
let perform (rpc_path, query_string) =
let* (vanilla_out, vanilla_err) =
rpc_get ?tz_log ~query_string vanilla rpc_path
and* (alt_out, alt_err) =
rpc_get ?tz_log ~query_string alternative rpc_path
in
if vanilla_out <> alt_out then
Test.fail
"rpc get %s yields different results for the vanilla client and the \
%s client. Output of vanilla client is:\n\
%s\n\
while output of the alternative client is:\n\
%s\n"
(Client.rpc_path_query_to_string ~query_string rpc_path)
alt_mode_string
vanilla_out
alt_out
else
let log_same_answer () =
Log.info
"%s client, %s: same answer than vanilla client ✓"
alt_mode_string
(Client.rpc_path_query_to_string ~query_string rpc_path)
in
match
( parse_rpc_exec_location vanilla_err ~query_string rpc_path,
parse_rpc_exec_location alt_err ~query_string rpc_path )
with
(* Unknown matches on the left-hand side: there should be no match
in the vanilla output, because the vanilla client doesn't deal
with alternative stuff. That is why [Unknown] is matched here. *)
| (Unknown, Unknown) when not (executes_locally alt_mode) ->
log_same_answer () ;
Lwt.return_unit
| (Unknown, Local) ->
log_same_answer () ;
Log.info
"%s client, %s: done locally ✓"
alt_mode_string
(Client.rpc_path_query_to_string ~query_string rpc_path) ;
Lwt.return_unit
| (loc, Local) ->
Test.fail
"Vanilla client should not output whether an RPC (here: %s) is \
executed locally or delegated to the endpoint. Expected %s but \
found %s. Inspected log:\n\
%s\n"
(Client.rpc_path_query_to_string ~query_string rpc_path)
(location_to_string Unknown)
(location_to_string loc)
vanilla_err
| (_, loc) ->
Test.fail
"%s client should execute RPC %s locally: expected %s but found \
%s. Inspected log:\n\
%s"
alt_mode_string
(Client.rpc_path_query_to_string ~query_string rpc_path)
(location_to_string Distant)
(location_to_string loc)
alt_err
in
Lwt_list.iter_s perform compared
let compare_tags alt_mode = [alt_mode_to_string alt_mode; "rpc"; "get"]
(** Test.
Check that executing a number of RPCs with a vanilla client and
an alternative client yield the same results. *)
let test_compare_proxy =
let alt_mode = Proxy in
Protocol.register_test
~__FILE__
~title:"(Proxy) Compare RPC get"
~tags:(compare_tags alt_mode)
@@ fun protocol ->
let* (node, alternative) = init ~protocol () in
let* vanilla = Client.init ~endpoint:(Node node) () in
let clients = {vanilla; alternative} in
check_equivalence alt_mode clients
end
module Equalable_String_set : Check.EQUALABLE with type t = String_set.t =
struct
type t = String_set.t
let equal = String_set.equal
let pp fmt set =
Format.pp_print_list
~pp_sep:(fun ppf () -> Format.fprintf ppf "|")
Format.pp_print_string
fmt
(String_set.elements set)
end
let string_set = Check.equalable_module (module Equalable_String_set)
let show_mode mode = match mode with `Proxy -> "proxy" | `Light -> "light"
(** Test that, at any point in time, the proxy mode and the light mode
supports the same list of protocols as the mockup (genesis being
ignored). The point it to help release managers, protocol freezing,
protocol support drop; to not forget a component. *)
let test_supported_protocols_like_mockup (mode : [< `Proxy | `Light]) =
let mode_str = show_mode mode in
Test.register
~__FILE__
~title:
(sf
"%s supported protocols are the same as the mockup protocols"
mode_str)
~tags:["client"; mode_str; "list"; "protocols"]
@@ fun () ->
let client = Client.create () in
let* mockup_protocols =
Client.list_protocols `Mockup client >|= String_set.of_list
in
let* mode_protocols =
Client.list_protocols mode client
Filter out Genesis , which the mockup does n't support ; but which light and
proxy modes do . We want to compare the other protocols .
proxy modes do. We want to compare the other protocols. *)
>|= List.filter (fun str -> str =~! rex "Genesis.*")
>|= String_set.of_list
in
let error_msg =
"Mockup protocols list is %L, but " ^ mode_str ^ " protocols list is %R"
in
Check.((mockup_protocols = mode_protocols) string_set ~error_msg) ;
unit
* Test that , at any point in time , the proxy mode and the light mode
support Alpha and at least three other protocols ( genesis being ignored ) .
This is stated in the public documentation .
support Alpha and at least three other protocols (genesis being ignored).
This is stated in the public documentation. *)
let test_support_four_protocols (mode : [< `Proxy | `Light]) =
let mode_str = show_mode mode in
Test.register
~__FILE__
~title:(sf "%s supports alpha and at least 3 immutable protocols" mode_str)
~tags:["client"; mode_str; "list"; "protocols"]
@@ fun () ->
let client = Client.create () in
let* mode_protocols =
Client.list_protocols mode client
Filter out Genesis . We are interested in other protocols .
List.filter (fun str -> str =~! rex "Genesis.*")
>|= String_set.of_list
in
let non_alpha_protocols =
String_set.filter (fun str -> str =~! rex "^ProtoALpha.*") mode_protocols
in
let alpha_error_msg =
Format.asprintf
"Alpha should be supported, but it's not found in the list of protocols: \
%a"
Equalable_String_set.pp
mode_protocols
in
Check.(
(String_set.cardinal non_alpha_protocols
= String_set.cardinal mode_protocols - 1)
int
~error_msg:alpha_error_msg) ;
let error_msg =
Format.asprintf
"%s should support at least three non-alpha protocols, but non-alpha \
supported protocols are %a"
mode_str
Equalable_String_set.pp
non_alpha_protocols
in
let nb_non_alpha_protocols = String_set.cardinal non_alpha_protocols in
Check.((nb_non_alpha_protocols >= 3) int ~error_msg) ;
unit
let register_protocol_independent () =
test_supported_protocols_like_mockup `Proxy ;
test_support_four_protocols `Proxy
let normalize = function
| "big_maps" :: "index" :: i :: "contents" :: _ ->
["big_maps"; "index"; i; "contents"]
| "contracts" :: "index" :: i :: _ -> ["contracts"; "index"; i]
| "cycle" :: i :: _ -> ["cycle"; i]
| "rolls" :: "owner" :: "snapshot" :: i :: j :: _ ->
["rolls"; "owner"; "snapshot"; i; j]
| "v1" :: _ -> ["v1"]
| x -> x
let test_split_key_heuristic =
let rpc_path_regexp = rex {|.*proxy_getter: Cache miss \(get\): \((.*)\)|} in
let env = String_map.singleton "TEZOS_LOG" "proxy_getter->debug" in
Protocol.register_test
~__FILE__
~title:"(Proxy) split_key heuristic"
~tags:["proxy"; "rpc"; "get"]
@@ fun protocol ->
let* (_, client) = init ~protocol () in
let test_one (path, query_string) =
let full_path = "chains" :: "main" :: "blocks" :: "head" :: path in
let* stderr =
Client.spawn_rpc ~env ~query_string Client.GET full_path client
|> Process.check_and_read_stderr
in
let lines = String.split_on_char '\n' stderr in
let context_queries =
List.filter_map (fun line -> line =~* rpc_path_regexp) lines
in
let seens = ref String_set.empty in
let check_query path =
let segments = String.split_on_char '/' path in
let normalized = normalize segments |> String.concat "/" in
if String_set.mem normalized !seens then
Test.fail
"Request of the form %s/... done twice. Last request is %s"
normalized
path
else seens := String_set.add normalized !seens
in
List.iter check_query context_queries ;
unit
in
Lwt_list.iter_s test_one paths
let register ~protocols =
test_bake ~protocols ;
test_transfer ~protocols ;
test_wrong_proto ~protocols ;
test_context_suffix_no_rpc ~protocols ;
test_cache_at_most_once ~protocols ;
Location.test_locations_proxy ~protocols ;
Location.test_compare_proxy ~protocols ;
test_split_key_heuristic ~protocols
| null | https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/tezt/tests/proxy.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* [matches re s] checks if [s] matches [re]. Note in particular that this supports multiline strings.
* Returns: a node and a proxy client
* Test.
This test checks that the proxy client creates its cache for
RPC answers at most once for a given (chain, block) pair.
FIXME: Same as above
(["minimal_valid_time"], []);
* [starts_with prefix s] returns [true] iff [prefix] is a prefix of [s].
* Test.
Test that [tezos-client --mode proxy --protocol P] fails
when the endpoint's protocol is not [P].
* Test.
Test that [tezos-client --mode proxy --protocol P] fails
when the endpoint's protocol is not [P].
* Test.
Bake a few blocks in proxy mode.
* Test.
Do some transfers and bakes the corresponding blocks in proxy mode.
* Module containing tests regarding where RPCs are executed: on
the node or locally.
* RPC executed locally (proxy mode)
* RPC executed by the node (proxy mode)
* Client doesn't output location info (vanilla mode)
* A vanilla client ([--mode client]) but whose [--endpoint] is
a [tezos-proxy-server]
* A light client ([--mode light])
* A proxy client ([--mode proxy])
* Whether an alternative client is expected to execute RPCs locally
* [output] is the output of executing [rpc get rpc_path]
[tz_log] can be used to augment TEZOS_LOG (useful for debugging).
(add_rpc_path_prefix ["helpers"; "validators"], []);
(add_rpc_path_prefix ["helpers"; "round"], []);
Unknown matches on the left-hand side: there should be no match
in the vanilla output, because the vanilla client doesn't deal
with alternative stuff. That is why [Unknown] is matched here.
* Test.
Check that executing a number of RPCs with a vanilla client and
an alternative client yield the same results.
* Test that, at any point in time, the proxy mode and the light mode
supports the same list of protocols as the mockup (genesis being
ignored). The point it to help release managers, protocol freezing,
protocol support drop; to not forget a component. | Copyright ( c ) 2020 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
Testing
-------
Component : Client - proxy mode
Invocation : dune exec / tests / main.exe -- --file proxy.ml
Subject : Tests of the client 's --mode proxy .
-------
Component: Client - proxy mode
Invocation: dune exec tezt/tests/main.exe -- --file proxy.ml
Subject: Tests of the client's --mode proxy.
*)
let ( >|= ) = Lwt.( >|= )
let matches re s = try Re.Str.search_forward re s 0 >= 0 with _ -> false
let init ~protocol () =
let* node = Node.init [Synchronisation_threshold 0] in
let* client = Client.init ~endpoint:(Node node) () in
let* () = Client.activate_protocol ~protocol client in
Log.info "Activated protocol." ;
Client.set_mode (Proxy (Node node)) client ;
let* () = Client.bake_for client in
Log.info "Baked 1 block: protocol is now %s" (Protocol.name protocol) ;
Lwt.return (node, client)
let test_cache_at_most_once ?query_string path =
Protocol.register_test
~__FILE__
~title:
(sf
"(Proxy) (%s) Cache at most once"
(Client.rpc_path_query_to_string ?query_string path))
~tags:["proxy"; "rpc"; "get"]
@@ fun protocol ->
let* (_, client) = init ~protocol () in
let env =
[("TEZOS_LOG", Protocol.daemon_name protocol ^ ".proxy_rpc->debug")]
|> List.to_seq |> String_map.of_seq
in
let* stderr =
Client.spawn_rpc ~env ?query_string Client.GET path client
|> Process.check_and_read_stderr
in
let lines = String.split_on_char '\n' stderr in
let proxy_cache_regexp =
Re.Str.regexp
{|^.*proxy_rpc: proxy cache created for chain \([a-zA-Z0-9]*\) and block \([a-zA-Z0-9]*\)|}
in
let extract_chain_block line =
Groups are 1 - based ( 0 is for the whole match ) .
if Re.Str.string_match proxy_cache_regexp line 0 then
Some (Re.Str.matched_group 1 line, Re.Str.matched_group 2 line)
else None
in
let chain_block_list = lines |> List.filter_map extract_chain_block in
let find_duplicate l =
let rec go with_duplicates without_duplicates =
match (with_duplicates, without_duplicates) with
| ([], []) -> None
| (hd_dup :: tl_dup, hd_nodup :: tl_nodup) ->
if hd_dup = hd_nodup then go tl_dup tl_nodup else Some hd_dup
| _ -> assert false
in
go (List.sort Stdlib.compare l) (List.sort_uniq Stdlib.compare l)
in
if chain_block_list = [] then
Test.fail
"Proxy cache should have been created when executing %s"
(String.concat "/" path) ;
find_duplicate chain_block_list
|> Option.iter (fun (chain, block) ->
Test.fail
"proxy RPC cache for chain %s and block %s created more than once"
chain
block)
|> Lwt.return
let test_cache_at_most_once ~protocols =
let paths =
[
(["helpers"; "baking_rights"], []);
(["helpers"; "baking_rights"], [("all", "true")]);
(["helpers"; "current_level"], []);
(["context"; "constants"], []);
(["context"; "constants"; "errors"], []);
(["context"; "delegates"], []);
(["context"; "nonces"; "3"], []);
(["helpers"; "endorsing_rights"], []);
(["helpers"; "levels_in_current_cycle"], []);
(["votes"; "current_period"], []);
(["votes"; "successor_period"], []);
(["votes"; "total_voting_power"], []);
(["votes"; "ballot_list"], []);
(["votes"; "ballots"], []);
(["votes"; "current_proposal"], []);
(["votes"; "current_quorum"], []);
(["votes"; "listings"], []);
(["votes"; "proposals"], []);
]
in
List.iter
(fun (sub_path, query_string) ->
test_cache_at_most_once
~query_string
("chains" :: "main" :: "blocks" :: "head" :: sub_path)
~protocols)
paths
let starts_with ~(prefix : string) (s : string) : bool =
Re.Str.string_match (Re.Str.regexp ("^" ^ prefix)) s 0
* Test .
This test checks that the proxy client never does a useless RPC .
I.e. it checks that if the proxy client requested
[ /chains/<main>/blocks/<head>/context / raw / bytes / some_path ]
it does n't later request
[ /chains/<main>/blocks/<head>/context / raw / bytes / some_path / some_other_path ]
In this scenario , the proxy client should look directly in the data within the tree received by the first request .
For this , this test inspects the debug output produced by
setting TEZOS_LOG to alpha.proxy_rpc->debug . This causes the client
to print the RPCs done to get pieces of the context :
alpha.proxy_rpc : P / v1 / constants
alpha.proxy_rpc : Received tree of size 1
alpha.proxy_rpc : P / v1 / first_level
alpha.proxy_rpc : Received tree of size 1
alpha.proxy_rpc : P / cycle/0 / random_seed
alpha.proxy_rpc : Received tree of size 1
alpha.proxy_rpc : P / cycle/0 / stake_snapshot
alpha.proxy_rpc : Received tree of size 1
alpha.proxy_rpc : P / cycle/0 / last_roll/0
where [ P ] is [ /chains/<main>/blocks/<head>/context / raw / bytes ]
This test checks that the proxy client never does a useless RPC.
I.e. it checks that if the proxy client requested
[/chains/<main>/blocks/<head>/context/raw/bytes/some_path]
it doesn't later request
[/chains/<main>/blocks/<head>/context/raw/bytes/some_path/some_other_path]
In this scenario, the proxy client should look directly in the data within the tree received by the first request.
For this, this test inspects the debug output produced by
setting TEZOS_LOG to alpha.proxy_rpc->debug. This causes the client
to print the RPCs done to get pieces of the context:
alpha.proxy_rpc: P/v1/constants
alpha.proxy_rpc: Received tree of size 1
alpha.proxy_rpc: P/v1/first_level
alpha.proxy_rpc: Received tree of size 1
alpha.proxy_rpc: P/cycle/0/random_seed
alpha.proxy_rpc: Received tree of size 1
alpha.proxy_rpc: P/cycle/0/stake_snapshot
alpha.proxy_rpc: Received tree of size 1
alpha.proxy_rpc: P/cycle/0/last_roll/0
where [P] is [/chains/<main>/blocks/<head>/context/raw/bytes]
*)
let test_context_suffix_no_rpc ?query_string path =
This test 's implementation is similar to [ Light.NoUselessRpc.test ]
Protocol.register_test
~__FILE__
~title:
(sf
"(Proxy) (%s) No useless RPC call"
(Client.rpc_path_query_to_string ?query_string path))
~tags:["proxy"; "rpc"; "get"]
@@ fun protocol ->
let* (_, client) = init ~protocol () in
let env =
String_map.singleton
"TEZOS_LOG"
(Protocol.daemon_name protocol ^ ".proxy_rpc->debug")
in
let* stderr =
Client.spawn_rpc ~env ?query_string Client.GET path client
|> Process.check_and_read_stderr
in
let lines = String.split_on_char '\n' stderr in
let rpc_path_regexp =
Re.Str.regexp
{|.*proxy_rpc: /chains/<main>/blocks/<head>/context/raw/bytes/\(.*\)|}
in
let extract_rpc_path line =
Groups are 1 - based ( 0 is for the whole match ) .
if Re.Str.string_match rpc_path_regexp line 0 then
Some (Re.Str.matched_group 1 line)
else None
in
let context_queries = lines |> List.filter_map extract_rpc_path in
let rec test_no_overlap_rpc = function
| [] -> ()
| query_after :: queries_before ->
List.iter
(fun query_before ->
if starts_with ~prefix:query_before query_after then
Test.fail
"Query %s should not be followed by query %s because the \
latter is a suffix of the former. Hence the proxy should \
reuse the data of the first query."
query_before
query_after
else ())
queries_before ;
test_no_overlap_rpc queries_before
in
assert (List.compare_length_with context_queries 2 >= 0) ;
Lwt.return @@ test_no_overlap_rpc (List.rev context_queries)
let paths =
[
(["helpers"; "baking_rights"], []);
(["helpers"; "baking_rights"], [("all", "true")]);
(["context"; "contracts"], []);
(["context"; "delegates"], []);
(["context"; "nonces"; "3"], []);
(["helpers"; "endorsing_rights"], []);
(["votes"; "current_period"], []);
(["votes"; "successor_period"], []);
(["votes"; "total_voting_power"], []);
(["votes"; "ballot_list"], []);
(["votes"; "ballots"], []);
(["votes"; "current_proposal"], []);
(["votes"; "current_quorum"], []);
(["votes"; "listings"], []);
(["votes"; "proposals"], []);
]
let test_context_suffix_no_rpc ~protocols =
let iter l f = List.iter f l in
iter protocols @@ fun protocol ->
iter paths @@ fun (sub_path, query_string) ->
test_context_suffix_no_rpc
~query_string
("chains" :: "main" :: "blocks" :: "head" :: sub_path)
~protocols:[protocol]
let wrong_proto protocol client =
let other_proto =
match List.find_opt (( <> ) protocol) Protocol.all with
| None ->
Test.fail
"No other protocol than %s is available."
(Protocol.name protocol)
| Some other_proto -> other_proto
in
let* stderr =
Client.spawn_bake_for ~protocol:other_proto client
|> Process.check_and_read_stderr ~expect_failure:true
in
let regexp =
Re.Str.regexp
@@ Format.sprintf
".*Protocol passed to the proxy (%s) and protocol of the node (%s) \
differ."
(Protocol.hash other_proto)
(Protocol.hash protocol)
in
if matches regexp stderr then return ()
else Test.fail "Did not fail as expected: %s" stderr
let test_wrong_proto =
Protocol.register_test
~__FILE__
~title:"(Proxy) Wrong proto"
~tags:["proxy"; "initialization"]
@@ fun protocol ->
let* (_, client) = init ~protocol () in
wrong_proto protocol client
let test_bake =
Protocol.register_test ~__FILE__ ~title:"(Proxy) Bake" ~tags:["proxy"; "bake"]
@@ fun protocol ->
let* node = Node.init [] in
let* client = Client.init ~endpoint:(Node node) () in
let* () = Client.activate_protocol ~protocol client in
Log.info "Activated protocol." ;
Client.set_mode (Proxy (Node node)) client ;
let* () = repeat 10 (fun () -> Client.bake_for client) in
Log.info "Baked 10 blocks." ;
let* level = Node.wait_for_level node 11 in
Log.info "Level is now %d." level ;
return ()
let test_transfer =
Protocol.register_test
~__FILE__
~title:"(Proxy) Transfer"
~tags:["proxy"; "transfer"]
@@ fun protocol ->
let* (_, client) = init ~protocol () in
let* () =
Client.transfer
~wait:"none"
~amount:Tez.(of_int 5)
~giver:"bootstrap1"
~receiver:"bootstrap2"
client
in
Log.info "Transferred 5 tez." ;
let* () = Client.bake_for client in
Log.info "Baked block for bootstrap1." ;
let* () =
Client.transfer
~wait:"none"
~amount:Tez.(of_int 10)
~giver:"bootstrap2"
~receiver:"bootstrap3"
client
in
Log.info "Transferred 10 tez." ;
let* () = Client.bake_for ~keys:["bootstrap2"] client in
Log.info "Baked block for bootstrap2." ;
return ()
module Location = struct
type rpc_exec_location =
let location_to_string = function
| Local -> "Local"
| Distant -> "Distant"
| Unknown -> "Unknown"
type clients = {vanilla : Client.t; alternative : Client.t}
type alt_mode =
| Vanilla_proxy_server
let executes_locally = function
| Vanilla_proxy_server -> false
| Light | Proxy -> true
let alt_mode_to_string = function
| Vanilla_proxy_server -> "vanilla_proxy_server_endpoint"
| Light -> "light"
| Proxy -> "proxy"
let chain_id = "main"
let block_id = "head"
let log_line_prefix =
Re.Str.regexp "[A-Z][a-z]+[ 0-9:\\.]+ - proxy_rpc_ctxt: +"
let parse_rpc_exec_location ?query_string output rpc_path =
let log = Re.Str.global_replace log_line_prefix "" output in
let re prefix =
let re_str =
Printf.sprintf
"%s[ a-zA-Z]*: [A-Z]+\\(\n\\| \\)%s"
prefix
(Re.Str.quote
@@ Client.rpc_path_query_to_string ?query_string rpc_path)
in
Re.Str.regexp re_str
in
let re_local = re "locally done" in
let re_http = re "delegating to http" in
if matches re_local log then Local
else if matches re_http log then Distant
else Unknown
* Calls [ rpc get ] on the given [ client ] but specifies an alternative
environment to make sure the location where the RPC executes is
printed to output . [ tz_log ] can be used to augment TEZOS_LOG
( useful for debugging ) .
environment to make sure the location where the RPC executes is
printed to output. [tz_log] can be used to augment TEZOS_LOG
(useful for debugging). *)
let rpc_get ?(tz_log = []) ?query_string client rpc_path =
let (proxy_key, proxy_value) = ("proxy_rpc_ctxt", "debug") in
List.iter
(fun (k, v) ->
if k = proxy_key && v = proxy_value then
Test.fail
"TEZOS_LOG key %s bound both to '%s' and '%s': impossible to honor \
both"
proxy_key
proxy_value
v
else ())
tz_log ;
let value =
(proxy_key, proxy_value) :: tz_log
|> List.map (fun (k, v) -> Printf.sprintf "%s->%s" k v)
|> String.concat "; "
in
let env = String_map.singleton "TEZOS_LOG" value in
Client.spawn_rpc ~env ?query_string Client.GET rpc_path client
|> Process.check_and_read_both
* Check that executing [ rpc get rpc_path ] on client causes the RPC
to be executed on the given location ( [ expected_loc ] ) .
[ tz_log ] can be used to augment TEZOS_LOG ( useful for debugging ) .
to be executed on the given location ([expected_loc]).
[tz_log] can be used to augment TEZOS_LOG (useful for debugging). *)
let check_location ?tz_log alt_mode client rpc_path expected_loc =
let* (_, stderr) = rpc_get ?tz_log client rpc_path in
let actual_loc = parse_rpc_exec_location stderr rpc_path in
if actual_loc <> expected_loc then
Test.fail
"Expected %s client to execute %s on this location: %s. But found: %s."
(alt_mode_to_string alt_mode)
(Client.rpc_path_query_to_string rpc_path)
(location_to_string expected_loc)
(location_to_string actual_loc) ;
Lwt.return_unit
let check_locations ?tz_log alt_mode client =
let paths_n_locations =
[
(["chains"; chain_id; "blocks"; block_id; "context"; "delegates"], Local);
(["chains"; chain_id; "blocks"], Distant);
(["network"; "self"], Distant);
]
in
Lwt_list.iter_s
(fun (rpc_path, expected_loc) ->
check_location ?tz_log alt_mode client rpc_path expected_loc)
paths_n_locations
let locations_tags alt_mode =
[alt_mode_to_string alt_mode; "location"; "rpc"; "get"]
* Test .
Check the location where an RPC is executed by the proxy client .
Check the location where an RPC is executed by the proxy client. *)
let test_locations_proxy =
let alt_mode = Proxy in
Protocol.register_test
~__FILE__
~title:"(Proxy) RPC get's location"
~tags:(locations_tags alt_mode)
@@ fun protocol ->
let* (_, client) = init ~protocol () in
check_locations alt_mode client
* Check the output of [ rpc get ] on a number on RPC between two
clients are equivalent . One of them is a vanilla client ( [ --mode client ] ) while the
other client uses an alternative mode ( [ --mode proxy ] ) .
clients are equivalent. One of them is a vanilla client ([--mode client]) while the
other client uses an alternative mode ([--mode proxy]). *)
let check_equivalence ?tz_log alt_mode {vanilla; alternative} =
let alt_mode_string = alt_mode_to_string alt_mode in
let compared =
let add_rpc_path_prefix rpc_path =
"chains" :: chain_id :: "blocks" :: block_id :: rpc_path
in
[
(add_rpc_path_prefix ["context"; "constants"], []);
(add_rpc_path_prefix ["context"; "constants"; "errors"], []);
(add_rpc_path_prefix ["context"; "delegates"], []);
(add_rpc_path_prefix ["context"; "nonces"; "3"], []);
(add_rpc_path_prefix ["helpers"; "baking_rights"], []);
(add_rpc_path_prefix ["helpers"; "baking_rights"], [("all", "true")]);
(add_rpc_path_prefix ["helpers"; "current_level"], []);
(add_rpc_path_prefix ["helpers"; "endorsing_rights"], []);
(add_rpc_path_prefix ["helpers"; "levels_in_current_cycle"], []);
The 2 following RPCs only exist on Alpha
(add_rpc_path_prefix ["votes"; "current_period"], []);
(add_rpc_path_prefix ["votes"; "successor_period"], []);
(add_rpc_path_prefix ["votes"; "total_voting_power"], []);
(add_rpc_path_prefix ["votes"; "ballot_list"], []);
(add_rpc_path_prefix ["votes"; "ballots"], []);
(add_rpc_path_prefix ["votes"; "current_proposal"], []);
(add_rpc_path_prefix ["votes"; "current_period"], []);
(add_rpc_path_prefix ["votes"; "successor_period"], []);
(add_rpc_path_prefix ["votes"; "current_quorum"], []);
(add_rpc_path_prefix ["votes"; "listings"], []);
(add_rpc_path_prefix ["votes"; "proposals"], []);
]
in
let perform (rpc_path, query_string) =
let* (vanilla_out, vanilla_err) =
rpc_get ?tz_log ~query_string vanilla rpc_path
and* (alt_out, alt_err) =
rpc_get ?tz_log ~query_string alternative rpc_path
in
if vanilla_out <> alt_out then
Test.fail
"rpc get %s yields different results for the vanilla client and the \
%s client. Output of vanilla client is:\n\
%s\n\
while output of the alternative client is:\n\
%s\n"
(Client.rpc_path_query_to_string ~query_string rpc_path)
alt_mode_string
vanilla_out
alt_out
else
let log_same_answer () =
Log.info
"%s client, %s: same answer than vanilla client ✓"
alt_mode_string
(Client.rpc_path_query_to_string ~query_string rpc_path)
in
match
( parse_rpc_exec_location vanilla_err ~query_string rpc_path,
parse_rpc_exec_location alt_err ~query_string rpc_path )
with
| (Unknown, Unknown) when not (executes_locally alt_mode) ->
log_same_answer () ;
Lwt.return_unit
| (Unknown, Local) ->
log_same_answer () ;
Log.info
"%s client, %s: done locally ✓"
alt_mode_string
(Client.rpc_path_query_to_string ~query_string rpc_path) ;
Lwt.return_unit
| (loc, Local) ->
Test.fail
"Vanilla client should not output whether an RPC (here: %s) is \
executed locally or delegated to the endpoint. Expected %s but \
found %s. Inspected log:\n\
%s\n"
(Client.rpc_path_query_to_string ~query_string rpc_path)
(location_to_string Unknown)
(location_to_string loc)
vanilla_err
| (_, loc) ->
Test.fail
"%s client should execute RPC %s locally: expected %s but found \
%s. Inspected log:\n\
%s"
alt_mode_string
(Client.rpc_path_query_to_string ~query_string rpc_path)
(location_to_string Distant)
(location_to_string loc)
alt_err
in
Lwt_list.iter_s perform compared
let compare_tags alt_mode = [alt_mode_to_string alt_mode; "rpc"; "get"]
let test_compare_proxy =
let alt_mode = Proxy in
Protocol.register_test
~__FILE__
~title:"(Proxy) Compare RPC get"
~tags:(compare_tags alt_mode)
@@ fun protocol ->
let* (node, alternative) = init ~protocol () in
let* vanilla = Client.init ~endpoint:(Node node) () in
let clients = {vanilla; alternative} in
check_equivalence alt_mode clients
end
module Equalable_String_set : Check.EQUALABLE with type t = String_set.t =
struct
type t = String_set.t
let equal = String_set.equal
let pp fmt set =
Format.pp_print_list
~pp_sep:(fun ppf () -> Format.fprintf ppf "|")
Format.pp_print_string
fmt
(String_set.elements set)
end
let string_set = Check.equalable_module (module Equalable_String_set)
let show_mode mode = match mode with `Proxy -> "proxy" | `Light -> "light"
let test_supported_protocols_like_mockup (mode : [< `Proxy | `Light]) =
let mode_str = show_mode mode in
Test.register
~__FILE__
~title:
(sf
"%s supported protocols are the same as the mockup protocols"
mode_str)
~tags:["client"; mode_str; "list"; "protocols"]
@@ fun () ->
let client = Client.create () in
let* mockup_protocols =
Client.list_protocols `Mockup client >|= String_set.of_list
in
let* mode_protocols =
Client.list_protocols mode client
Filter out Genesis , which the mockup does n't support ; but which light and
proxy modes do . We want to compare the other protocols .
proxy modes do. We want to compare the other protocols. *)
>|= List.filter (fun str -> str =~! rex "Genesis.*")
>|= String_set.of_list
in
let error_msg =
"Mockup protocols list is %L, but " ^ mode_str ^ " protocols list is %R"
in
Check.((mockup_protocols = mode_protocols) string_set ~error_msg) ;
unit
* Test that , at any point in time , the proxy mode and the light mode
support Alpha and at least three other protocols ( genesis being ignored ) .
This is stated in the public documentation .
support Alpha and at least three other protocols (genesis being ignored).
This is stated in the public documentation. *)
let test_support_four_protocols (mode : [< `Proxy | `Light]) =
let mode_str = show_mode mode in
Test.register
~__FILE__
~title:(sf "%s supports alpha and at least 3 immutable protocols" mode_str)
~tags:["client"; mode_str; "list"; "protocols"]
@@ fun () ->
let client = Client.create () in
let* mode_protocols =
Client.list_protocols mode client
Filter out Genesis . We are interested in other protocols .
List.filter (fun str -> str =~! rex "Genesis.*")
>|= String_set.of_list
in
let non_alpha_protocols =
String_set.filter (fun str -> str =~! rex "^ProtoALpha.*") mode_protocols
in
let alpha_error_msg =
Format.asprintf
"Alpha should be supported, but it's not found in the list of protocols: \
%a"
Equalable_String_set.pp
mode_protocols
in
Check.(
(String_set.cardinal non_alpha_protocols
= String_set.cardinal mode_protocols - 1)
int
~error_msg:alpha_error_msg) ;
let error_msg =
Format.asprintf
"%s should support at least three non-alpha protocols, but non-alpha \
supported protocols are %a"
mode_str
Equalable_String_set.pp
non_alpha_protocols
in
let nb_non_alpha_protocols = String_set.cardinal non_alpha_protocols in
Check.((nb_non_alpha_protocols >= 3) int ~error_msg) ;
unit
let register_protocol_independent () =
test_supported_protocols_like_mockup `Proxy ;
test_support_four_protocols `Proxy
let normalize = function
| "big_maps" :: "index" :: i :: "contents" :: _ ->
["big_maps"; "index"; i; "contents"]
| "contracts" :: "index" :: i :: _ -> ["contracts"; "index"; i]
| "cycle" :: i :: _ -> ["cycle"; i]
| "rolls" :: "owner" :: "snapshot" :: i :: j :: _ ->
["rolls"; "owner"; "snapshot"; i; j]
| "v1" :: _ -> ["v1"]
| x -> x
let test_split_key_heuristic =
let rpc_path_regexp = rex {|.*proxy_getter: Cache miss \(get\): \((.*)\)|} in
let env = String_map.singleton "TEZOS_LOG" "proxy_getter->debug" in
Protocol.register_test
~__FILE__
~title:"(Proxy) split_key heuristic"
~tags:["proxy"; "rpc"; "get"]
@@ fun protocol ->
let* (_, client) = init ~protocol () in
let test_one (path, query_string) =
let full_path = "chains" :: "main" :: "blocks" :: "head" :: path in
let* stderr =
Client.spawn_rpc ~env ~query_string Client.GET full_path client
|> Process.check_and_read_stderr
in
let lines = String.split_on_char '\n' stderr in
let context_queries =
List.filter_map (fun line -> line =~* rpc_path_regexp) lines
in
let seens = ref String_set.empty in
let check_query path =
let segments = String.split_on_char '/' path in
let normalized = normalize segments |> String.concat "/" in
if String_set.mem normalized !seens then
Test.fail
"Request of the form %s/... done twice. Last request is %s"
normalized
path
else seens := String_set.add normalized !seens
in
List.iter check_query context_queries ;
unit
in
Lwt_list.iter_s test_one paths
let register ~protocols =
test_bake ~protocols ;
test_transfer ~protocols ;
test_wrong_proto ~protocols ;
test_context_suffix_no_rpc ~protocols ;
test_cache_at_most_once ~protocols ;
Location.test_locations_proxy ~protocols ;
Location.test_compare_proxy ~protocols ;
test_split_key_heuristic ~protocols
|
022878ae40e7762621003882f7db7d5552f74faf274ff1ad4bf64cf6787362ee | tezos/tezos-mirror | translator_benchmarks.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2021 - 2022 Nomadic Labs , < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Protocol
module Size = Gas_input_size
let ns = Namespace.of_string
* { 2 [ Script_ir_translator ] benchmarks }
module Config = struct
type config = {
generator_config : Michelson_generation.generator_config;
michelson_terms_file : string option;
}
let default_config =
{
generator_config = Michelson_generation.default_generator_config;
michelson_terms_file = None;
}
let config_encoding =
let open Data_encoding in
conv
(fun {generator_config; michelson_terms_file} ->
(generator_config, michelson_terms_file))
(fun (generator_config, michelson_terms_file) ->
{generator_config; michelson_terms_file})
(obj2
(req "generator_config" Michelson_generation.generator_config_encoding)
(opt "michelson_terms_file" string))
end
module Default_boilerplate = struct
type workload = Translator_workload.t
let workload_encoding = Translator_workload.encoding
let workload_to_vector = Translator_workload.workload_to_sparse_vec
let tags = [Tags.translator]
let make_models t_kind code_or_data =
[
( "gas_translator_model",
Translator_model.gas_based_model t_kind code_or_data );
( "size_translator_model",
Translator_model.size_based_model t_kind code_or_data );
]
end
(* ----------------------------------------------------------------------- *)
(* Error handling *)
type phase = Workload_production | In_protocol | Global
type error_kind =
| Global_error of {
benchmark_name : string;
workload : Tezos_base.TzPervasives.tztrace;
}
| Bad_data of {
benchmark_name : string;
micheline : Alpha_context.Script.expr;
expected_type : Alpha_context.Script.expr;
phase : phase;
}
| Bad_code of {
benchmark_name : string;
micheline : Alpha_context.Script.expr;
expected_stack_type : Alpha_context.Script.expr list;
phase : phase;
}
let pp_phase fmtr (phase : phase) =
match phase with
| Workload_production -> Format.fprintf fmtr "workload production"
| In_protocol -> Format.fprintf fmtr "in protocol"
| Global -> Format.fprintf fmtr "global"
let report_michelson_errors fmtr errs =
Michelson_v1_error_reporter.report_errors
~details:true
~show_source:true
fmtr
errs
let make_printable node =
Micheline_printer.printable Michelson_v1_primitives.string_of_prim node
let pp_error_kind fmtr (error_kind : error_kind) =
match error_kind with
| Global_error {benchmark_name; workload} ->
Format.open_vbox 1 ;
Format.fprintf fmtr "Global error:@," ;
Format.fprintf fmtr "benchmark = %s@," benchmark_name ;
Format.fprintf fmtr "workload:@," ;
report_michelson_errors fmtr workload ;
Format.close_box ()
| Bad_data {benchmark_name; micheline; expected_type; phase} ->
Format.open_vbox 1 ;
Format.fprintf fmtr "Bad data:@," ;
Format.fprintf fmtr "benchmark = %s@," benchmark_name ;
Format.fprintf
fmtr
"expression = @[<v 1>%a@]@,"
Micheline_printer.print_expr
(make_printable micheline) ;
Format.fprintf
fmtr
"expected type = @[<v 1>%a@]@,"
Micheline_printer.print_expr
(make_printable expected_type) ;
Format.fprintf fmtr "phase = %a@," pp_phase phase ;
Format.close_box ()
| Bad_code {benchmark_name; micheline; expected_stack_type; phase} ->
Format.open_vbox 1 ;
Format.fprintf fmtr "Bad code:@," ;
Format.fprintf fmtr "benchmark = %s@," benchmark_name ;
Format.fprintf
fmtr
"expression = @[<v 1>%a@]@,"
Micheline_printer.print_expr
(make_printable micheline) ;
Format.fprintf
fmtr
"expected stack = @[<v 1>%a@]@,"
(Format.pp_print_list
~pp_sep:(fun fmtr () -> Format.fprintf fmtr "::")
(fun fmtr node ->
let printable = make_printable node in
Format.fprintf fmtr "%a" Micheline_printer.print_expr printable))
expected_stack_type ;
Format.fprintf fmtr "phase = %a@," pp_phase phase ;
Format.close_box ()
exception Translator_benchmark_error of error_kind
let () =
Printexc.register_printer (function
| Translator_benchmark_error err ->
Some (Format.asprintf "%a" pp_error_kind err)
| _ -> None)
let global_error benchmark_name workload =
raise (Translator_benchmark_error (Global_error {benchmark_name; workload}))
let bad_data benchmark_name micheline expected_type phase =
raise
(Translator_benchmark_error
(Bad_data {benchmark_name; micheline; expected_type; phase}))
let bad_code benchmark_name micheline expected_stack_type phase =
raise
(Translator_benchmark_error
(Bad_code {benchmark_name; micheline; expected_stack_type; phase}))
(* ----------------------------------------------------------------------- *)
Typechecking data ( Micheline data - > typed data )
let strict = Script_ir_translator_config.make ~legacy:false ()
module Typechecking_data : Benchmark.S = struct
include Config
include Default_boilerplate
let models = make_models Translator_workload.Parsing Translator_workload.Data
let name = "TYPECHECKING_DATA"
let info = "Benchmarking typechecking of data"
let typechecking_data_benchmark rng_state (node : Protocol.Script_repr.expr)
(michelson_type : Script_repr.expr) =
Lwt_main.run
( Execution_context.make ~rng_state >>=? fun (ctxt, _) ->
let ex_ty = Type_helpers.michelson_type_to_ex_ty michelson_type ctxt in
let workload =
match
Translator_workload.data_typechecker_workload
ctxt
Translator_workload.Parsing
(Micheline.root node)
ex_ty
with
| None -> bad_data name node michelson_type Workload_production
| Some workload -> workload
in
match ex_ty with
| Script_typed_ir.Ex_ty ty ->
let closure () =
match
Lwt_main.run
(Script_ir_translator.parse_data
ctxt
~elab_conf:strict
~allow_forged:false
ty
(Micheline.root node))
with
| Error _ | (exception _) ->
bad_data name node michelson_type In_protocol
| Ok _ -> ()
in
return (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let make_bench rng_state cfg () =
let Michelson_mcmc_samplers.{term; typ} =
Michelson_generation.make_data_sampler rng_state cfg.generator_config
in
typechecking_data_benchmark rng_state term typ
let create_benchmarks ~rng_state ~bench_num config =
match config.michelson_terms_file with
| Some file ->
Format.eprintf "Loading terms from %s@." file ;
let terms = Michelson_mcmc_samplers.load ~filename:file in
List.filter_map
(function
| Michelson_mcmc_samplers.Data {term; typ} ->
Some (fun () -> typechecking_data_benchmark rng_state term typ)
| _ -> None)
terms
| None ->
Format.eprintf "No michelson_terms_file given, generating on-the-fly@." ;
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Typechecking_data)
module Unparsing_data : Benchmark.S = struct
include Config
include Default_boilerplate
let models =
make_models Translator_workload.Unparsing Translator_workload.Data
let name = "UNPARSING_DATA"
let info = "Benchmarking unparsing of data"
let unparsing_data_benchmark rng_state (node : Protocol.Script_repr.expr)
(michelson_type : Protocol.Script_repr.expr) =
Lwt_main.run
( Execution_context.make ~rng_state >>=? fun (ctxt, _) ->
let ex_ty = Type_helpers.michelson_type_to_ex_ty michelson_type ctxt in
let workload =
match
Translator_workload.data_typechecker_workload
ctxt
Translator_workload.Unparsing
(Micheline.root node)
ex_ty
with
| None -> bad_data name node michelson_type Workload_production
| Some workload -> workload
in
match ex_ty with
| Script_typed_ir.Ex_ty ty ->
Script_ir_translator.parse_data
ctxt
~elab_conf:strict
~allow_forged:false
ty
(Micheline.root node)
>|= Environment.wrap_tzresult
>>=? fun (typed, ctxt) ->
let closure () =
match
Lwt_main.run
(Script_ir_translator.Internal_for_benchmarking.unparse_data
~stack_depth:0
ctxt
Script_ir_unparser.Optimized
ty
typed)
with
| Error _ | (exception _) ->
bad_data name node michelson_type In_protocol
| Ok _ -> ()
in
return (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let make_bench rng_state cfg () =
let Michelson_mcmc_samplers.{term; typ} =
Michelson_generation.make_data_sampler rng_state cfg.generator_config
in
unparsing_data_benchmark rng_state term typ
let create_benchmarks ~rng_state ~bench_num config =
match config.michelson_terms_file with
| Some file ->
Format.eprintf "Loading terms from %s@." file ;
let terms = Michelson_mcmc_samplers.load ~filename:file in
List.filter_map
(function
| Michelson_mcmc_samplers.Data {term; typ} ->
Some (fun () -> unparsing_data_benchmark rng_state term typ)
| _ -> None)
terms
| None ->
Format.eprintf "No michelson_terms_file given, generating on-the-fly@." ;
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Unparsing_data)
module Typechecking_code : Benchmark.S = struct
include Config
include Default_boilerplate
let models = make_models Translator_workload.Parsing Translator_workload.Code
let name = "TYPECHECKING_CODE"
let info = "Benchmarking typechecking of code"
let typechecking_code_benchmark rng_state (node : Protocol.Script_repr.expr)
(stack : Script_repr.expr list) =
Lwt_main.run
( Execution_context.make ~rng_state >>=? fun (ctxt, _) ->
let ex_stack_ty =
Type_helpers.michelson_type_list_to_ex_stack_ty stack ctxt
in
let workload =
match
Translator_workload.code_typechecker_workload
ctxt
Translator_workload.Parsing
(Micheline.root node)
ex_stack_ty
with
| None -> bad_code name node stack Workload_production
| Some workload -> workload
in
let (Script_ir_translator.Ex_stack_ty bef) = ex_stack_ty in
let closure () =
let result =
Lwt_main.run
(Script_ir_translator.parse_instr
Script_tc_context.data
ctxt
~elab_conf:strict
(Micheline.root node)
bef)
in
match Environment.wrap_tzresult result with
| Error errs ->
Format.eprintf "%a@." Error_monad.pp_print_trace errs ;
bad_code name node stack In_protocol
| Ok _ -> ()
in
return (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let make_bench rng_state (cfg : Config.config) () =
let open Michelson_generation in
let Michelson_mcmc_samplers.{term; bef; aft = _} =
make_code_sampler rng_state cfg.generator_config
in
typechecking_code_benchmark rng_state term bef
let create_benchmarks ~rng_state ~bench_num config =
match config.michelson_terms_file with
| Some file ->
Format.eprintf "Loading terms from %s@." file ;
let terms = Michelson_mcmc_samplers.load ~filename:file in
List.filter_map
(function
| Michelson_mcmc_samplers.Code {term; bef; aft = _} ->
Some (fun () -> typechecking_code_benchmark rng_state term bef)
| _ -> None)
terms
| None ->
Format.eprintf "No michelson_terms_file given, generating on-the-fly@." ;
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Typechecking_code)
module Unparsing_code : Benchmark.S = struct
include Config
include Default_boilerplate
let models =
make_models Translator_workload.Unparsing Translator_workload.Code
let name = "UNPARSING_CODE"
let info = "Benchmarking unparsing of code"
let unparsing_code_benchmark rng_state (node : Protocol.Script_repr.expr)
(stack : Script_repr.expr list) =
Lwt_main.run
( Execution_context.make ~rng_state >>=? fun (ctxt, _) ->
let ex_stack_ty =
Type_helpers.michelson_type_list_to_ex_stack_ty stack ctxt
in
let workload =
match
Translator_workload.code_typechecker_workload
ctxt
Translator_workload.Unparsing
(Micheline.root node)
ex_stack_ty
with
| None -> bad_code name node stack Workload_production
| Some workload -> workload
in
let (Script_ir_translator.Ex_stack_ty bef) = ex_stack_ty in
(* We parse the code just to check it is well-typed. *)
Script_ir_translator.parse_instr
Script_tc_context.data
ctxt
~elab_conf:strict
(Micheline.root node)
bef
>|= Environment.wrap_tzresult
>>=? fun (_typed, ctxt) ->
let closure () =
let result =
Lwt_main.run
(Script_ir_translator.Internal_for_benchmarking.unparse_code
~stack_depth:0
ctxt
Optimized
(Micheline.root node))
in
match Environment.wrap_tzresult result with
| Error errs ->
Format.eprintf "%a@." Error_monad.pp_print_trace errs ;
bad_code name node stack In_protocol
| Ok _ -> ()
in
return (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let make_bench rng_state (cfg : Config.config) () =
let open Michelson_generation in
let Michelson_mcmc_samplers.{term; bef; aft = _} =
make_code_sampler rng_state cfg.generator_config
in
unparsing_code_benchmark rng_state term bef
let create_benchmarks ~rng_state ~bench_num config =
match config.michelson_terms_file with
| Some file ->
Format.eprintf "Loading terms from %s@." file ;
let terms = Michelson_mcmc_samplers.load ~filename:file in
List.filter_map
(function
| Michelson_mcmc_samplers.Code {term; bef; aft = _} ->
Some (fun () -> unparsing_code_benchmark rng_state term bef)
| _ -> None)
terms
| None -> List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Unparsing_code)
let rec check_printable_ascii v i =
if Compare.Int.(i < 0) then true
else
match v.[i] with
| '\n' | '\x20' .. '\x7E' -> check_printable_ascii v (i - 1)
| _ -> false
let check_printable_benchmark =
let open Tezos_shell_benchmarks.Encoding_benchmarks_helpers in
linear_shared
~name:"CHECK_PRINTABLE"
~generator:(fun rng_state ->
let open Base_samplers in
let string =
readable_ascii_string rng_state ~size:{min = 1; max = 1024}
in
(string, {Shared_linear.bytes = String.length string}))
~make_bench:(fun generator () ->
let generated, workload = generator () in
let closure () =
ignore (check_printable_ascii generated (String.length generated - 1))
in
Generator.Plain {workload; closure})
()
let () = Registration_helpers.register check_printable_benchmark
module Ty_eq : Benchmark.S = struct
type config = {max_size : int}
let config_encoding =
let open Data_encoding in
conv
(fun {max_size} -> max_size)
(fun max_size -> {max_size})
(obj1 (req "max_size" int31))
let default_config = {max_size = 64}
type workload = Ty_eq_workload of {nodes : int; consumed : Size.t}
let workload_encoding =
let open Data_encoding in
conv
(function Ty_eq_workload {nodes; consumed} -> (nodes, consumed))
(fun (nodes, consumed) -> Ty_eq_workload {nodes; consumed})
(obj2 (req "nodes" int31) (req "consumed" int31))
let workload_to_vector = function
| Ty_eq_workload {nodes; consumed} ->
Sparse_vec.String.of_list
[("nodes", float_of_int nodes); ("consumed", float_of_int consumed)]
let name = "TY_EQ"
let info = "Benchmarking equating types"
let tags = [Tags.translator]
let intercept_var = Free_variable.of_string (Format.asprintf "%s_const" name)
let coeff_var = Free_variable.of_string (Format.asprintf "%s_coeff" name)
let size_model =
Model.make
~conv:(function Ty_eq_workload {nodes; _} -> (nodes, ()))
~model:
(Model.affine ~name:(ns name) ~intercept:intercept_var ~coeff:coeff_var)
let codegen_model =
Model.make
~conv:(function Ty_eq_workload {nodes; _} -> (nodes, ()))
~model:
(Model.affine ~name:(ns name) ~intercept:intercept_var ~coeff:coeff_var)
let models =
[("size_translator_model", size_model); ("codegen", codegen_model)]
let ty_eq_benchmark rng_state nodes (ty : Script_typed_ir.ex_ty) =
Lwt_main.run
( Execution_context.make ~rng_state >>=? fun (ctxt, _) ->
let ctxt = Gas_helpers.set_limit ctxt in
match ty with
| Ex_ty ty ->
let dummy_loc = 0 in
Lwt.return
(Gas_monad.run ctxt
@@ Script_ir_translator.ty_eq
~error_details:(Informative dummy_loc)
ty
ty)
>|= Environment.wrap_tzresult
>>=? fun (_, ctxt') ->
let consumed =
Alpha_context.Gas.consumed ~since:ctxt ~until:ctxt'
in
let workload =
Ty_eq_workload
{nodes; consumed = Z.to_int (Gas_helpers.fp_to_z consumed)}
in
let closure () =
ignore
(Gas_monad.run ctxt
@@ Script_ir_translator.ty_eq
~error_details:(Informative dummy_loc)
ty
ty)
in
return (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let make_bench rng_state (cfg : config) () =
let nodes =
Base_samplers.(
sample_in_interval ~range:{min = 1; max = cfg.max_size} rng_state)
in
let ty =
Michelson_generation.Samplers.Random_type.m_type ~size:nodes rng_state
in
ty_eq_benchmark rng_state nodes ty
let create_benchmarks ~rng_state ~bench_num config =
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Ty_eq)
(* A dummy type generator, sampling linear terms of a given size.
The generator always returns types of the shape:
[pair unit (pair unit (pair unit ...))]
This structure is the worse-case of the unparsing function for types because
an extra test is performed to determine if the comb type needs to be folded.
*)
let rec dummy_type_generator size =
let open Script_typed_ir in
if size <= 1 then Ex_ty unit_t
else
match dummy_type_generator (size - 2) with
| Ex_ty r -> (
let l = unit_t in
match pair_t (-1) l r with
| Error _ -> assert false
| Ok (Ty_ex_c t) -> Ex_ty t)
(* A dummy comparable type generator, sampling linear terms of a given size. *)
let rec dummy_comparable_type_generator size =
let open Script_ir_translator in
let open Script_typed_ir in
if size <= 0 then Ex_comparable_ty unit_t
else
match dummy_comparable_type_generator (size - 2) with
| Ex_comparable_ty r ->
let l = unit_t in
Ex_comparable_ty
(match comparable_pair_t (-1) l r with
| Error _ -> assert false
| Ok t -> t)
module Parse_type_shared = struct
type config = {max_size : int}
let default_config = {max_size = Constants_repr.michelson_maximum_type_size}
let config_encoding =
let open Data_encoding in
conv
(fun {max_size} -> max_size)
(fun max_size -> {max_size})
(obj1 (req "max_size" int31))
type workload = Type_workload of {nodes : int; consumed : Size.t}
let workload_encoding =
let open Data_encoding in
conv
(function Type_workload {nodes; consumed} -> (nodes, consumed))
(fun (nodes, consumed) -> Type_workload {nodes; consumed})
(obj2 (req "nodes" int31) (req "consumed" int31))
let workload_to_vector = function
| Type_workload {nodes; consumed} ->
Sparse_vec.String.of_list
[("nodes", float_of_int nodes); ("consumed", float_of_int consumed)]
let tags = [Tags.translator]
end
let parse_ty ctxt node =
Script_ir_translator.parse_ty
ctxt
~legacy:true
~allow_lazy_storage:true
~allow_operation:true
~allow_contract:true
~allow_ticket:true
node
let unparse_ty ctxt ty = Script_ir_unparser.unparse_ty ~loc:(-1) ctxt ty
module Parse_type_benchmark : Benchmark.S = struct
include Parse_type_shared
let name = "PARSE_TYPE"
let info = "Benchmarking parse_ty"
let make_bench rng_state config () =
( Lwt_main.run (Execution_context.make ~rng_state) >>? fun (ctxt, _) ->
let ctxt = Gas_helpers.set_limit ctxt in
let size = Random.State.int rng_state config.max_size in
let ty = dummy_type_generator size in
match ty with
| Ex_ty ty ->
Environment.wrap_tzresult @@ unparse_ty ctxt ty
>>? fun (unparsed, _) ->
Environment.wrap_tzresult @@ parse_ty ctxt unparsed
>>? fun (_, ctxt') ->
let consumed =
Z.to_int
(Gas_helpers.fp_to_z
(Alpha_context.Gas.consumed ~since:ctxt ~until:ctxt'))
in
let nodes =
let x = Script_typed_ir.ty_size ty in
Saturation_repr.to_int @@ Script_typed_ir.Type_size.to_int x
in
let workload = Type_workload {nodes; consumed} in
let closure () = ignore (parse_ty ctxt unparsed) in
ok (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let size_model =
Model.make
~conv:(function Type_workload {nodes; consumed = _} -> (nodes, ()))
~model:
(Model.affine
~name:(ns name)
~intercept:
(Free_variable.of_string (Format.asprintf "%s_const" name))
~coeff:(Free_variable.of_string (Format.asprintf "%s_coeff" name)))
let models = [("size_translator_model", size_model)]
let create_benchmarks ~rng_state ~bench_num config =
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Parse_type_benchmark)
module Unparse_type_benchmark : Benchmark.S = struct
include Parse_type_shared
let name = "UNPARSE_TYPE"
let info = "Benchmarking unparse_ty"
let make_bench rng_state config () =
( Lwt_main.run (Execution_context.make ~rng_state) >>? fun (ctxt, _) ->
let ctxt = Gas_helpers.set_limit ctxt in
let size = Random.State.int rng_state config.max_size in
let ty = dummy_type_generator size in
match ty with
| Ex_ty ty ->
Environment.wrap_tzresult @@ unparse_ty ctxt ty >>? fun (_, ctxt') ->
let consumed =
Z.to_int
(Gas_helpers.fp_to_z
(Alpha_context.Gas.consumed ~since:ctxt ~until:ctxt'))
in
let nodes =
let x = Script_typed_ir.ty_size ty in
Saturation_repr.to_int @@ Script_typed_ir.Type_size.to_int x
in
let workload = Type_workload {nodes; consumed} in
let closure () = ignore (unparse_ty ctxt ty) in
ok (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let size_model =
Model.make
~conv:(function Type_workload {nodes; consumed = _} -> (nodes, ()))
~model:
(Model.affine
~name:(ns name)
~intercept:
(Free_variable.of_string (Format.asprintf "%s_const" name))
~coeff:(Free_variable.of_string (Format.asprintf "%s_coeff" name)))
let models = [("size_translator_model", size_model)]
let create_benchmarks ~rng_state ~bench_num config =
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Unparse_type_benchmark)
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/cdc7a4382ef6dfcd6c73f86d9a29b829b33d18d4/src/proto_015_PtLimaPt/lib_benchmarks_proto/translator_benchmarks.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
-----------------------------------------------------------------------
Error handling
-----------------------------------------------------------------------
We parse the code just to check it is well-typed.
A dummy type generator, sampling linear terms of a given size.
The generator always returns types of the shape:
[pair unit (pair unit (pair unit ...))]
This structure is the worse-case of the unparsing function for types because
an extra test is performed to determine if the comb type needs to be folded.
A dummy comparable type generator, sampling linear terms of a given size. | Copyright ( c ) 2021 - 2022 Nomadic Labs , < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Protocol
module Size = Gas_input_size
let ns = Namespace.of_string
* { 2 [ Script_ir_translator ] benchmarks }
module Config = struct
type config = {
generator_config : Michelson_generation.generator_config;
michelson_terms_file : string option;
}
let default_config =
{
generator_config = Michelson_generation.default_generator_config;
michelson_terms_file = None;
}
let config_encoding =
let open Data_encoding in
conv
(fun {generator_config; michelson_terms_file} ->
(generator_config, michelson_terms_file))
(fun (generator_config, michelson_terms_file) ->
{generator_config; michelson_terms_file})
(obj2
(req "generator_config" Michelson_generation.generator_config_encoding)
(opt "michelson_terms_file" string))
end
module Default_boilerplate = struct
type workload = Translator_workload.t
let workload_encoding = Translator_workload.encoding
let workload_to_vector = Translator_workload.workload_to_sparse_vec
let tags = [Tags.translator]
let make_models t_kind code_or_data =
[
( "gas_translator_model",
Translator_model.gas_based_model t_kind code_or_data );
( "size_translator_model",
Translator_model.size_based_model t_kind code_or_data );
]
end
type phase = Workload_production | In_protocol | Global
type error_kind =
| Global_error of {
benchmark_name : string;
workload : Tezos_base.TzPervasives.tztrace;
}
| Bad_data of {
benchmark_name : string;
micheline : Alpha_context.Script.expr;
expected_type : Alpha_context.Script.expr;
phase : phase;
}
| Bad_code of {
benchmark_name : string;
micheline : Alpha_context.Script.expr;
expected_stack_type : Alpha_context.Script.expr list;
phase : phase;
}
let pp_phase fmtr (phase : phase) =
match phase with
| Workload_production -> Format.fprintf fmtr "workload production"
| In_protocol -> Format.fprintf fmtr "in protocol"
| Global -> Format.fprintf fmtr "global"
let report_michelson_errors fmtr errs =
Michelson_v1_error_reporter.report_errors
~details:true
~show_source:true
fmtr
errs
let make_printable node =
Micheline_printer.printable Michelson_v1_primitives.string_of_prim node
let pp_error_kind fmtr (error_kind : error_kind) =
match error_kind with
| Global_error {benchmark_name; workload} ->
Format.open_vbox 1 ;
Format.fprintf fmtr "Global error:@," ;
Format.fprintf fmtr "benchmark = %s@," benchmark_name ;
Format.fprintf fmtr "workload:@," ;
report_michelson_errors fmtr workload ;
Format.close_box ()
| Bad_data {benchmark_name; micheline; expected_type; phase} ->
Format.open_vbox 1 ;
Format.fprintf fmtr "Bad data:@," ;
Format.fprintf fmtr "benchmark = %s@," benchmark_name ;
Format.fprintf
fmtr
"expression = @[<v 1>%a@]@,"
Micheline_printer.print_expr
(make_printable micheline) ;
Format.fprintf
fmtr
"expected type = @[<v 1>%a@]@,"
Micheline_printer.print_expr
(make_printable expected_type) ;
Format.fprintf fmtr "phase = %a@," pp_phase phase ;
Format.close_box ()
| Bad_code {benchmark_name; micheline; expected_stack_type; phase} ->
Format.open_vbox 1 ;
Format.fprintf fmtr "Bad code:@," ;
Format.fprintf fmtr "benchmark = %s@," benchmark_name ;
Format.fprintf
fmtr
"expression = @[<v 1>%a@]@,"
Micheline_printer.print_expr
(make_printable micheline) ;
Format.fprintf
fmtr
"expected stack = @[<v 1>%a@]@,"
(Format.pp_print_list
~pp_sep:(fun fmtr () -> Format.fprintf fmtr "::")
(fun fmtr node ->
let printable = make_printable node in
Format.fprintf fmtr "%a" Micheline_printer.print_expr printable))
expected_stack_type ;
Format.fprintf fmtr "phase = %a@," pp_phase phase ;
Format.close_box ()
exception Translator_benchmark_error of error_kind
let () =
Printexc.register_printer (function
| Translator_benchmark_error err ->
Some (Format.asprintf "%a" pp_error_kind err)
| _ -> None)
let global_error benchmark_name workload =
raise (Translator_benchmark_error (Global_error {benchmark_name; workload}))
let bad_data benchmark_name micheline expected_type phase =
raise
(Translator_benchmark_error
(Bad_data {benchmark_name; micheline; expected_type; phase}))
let bad_code benchmark_name micheline expected_stack_type phase =
raise
(Translator_benchmark_error
(Bad_code {benchmark_name; micheline; expected_stack_type; phase}))
Typechecking data ( Micheline data - > typed data )
let strict = Script_ir_translator_config.make ~legacy:false ()
module Typechecking_data : Benchmark.S = struct
include Config
include Default_boilerplate
let models = make_models Translator_workload.Parsing Translator_workload.Data
let name = "TYPECHECKING_DATA"
let info = "Benchmarking typechecking of data"
let typechecking_data_benchmark rng_state (node : Protocol.Script_repr.expr)
(michelson_type : Script_repr.expr) =
Lwt_main.run
( Execution_context.make ~rng_state >>=? fun (ctxt, _) ->
let ex_ty = Type_helpers.michelson_type_to_ex_ty michelson_type ctxt in
let workload =
match
Translator_workload.data_typechecker_workload
ctxt
Translator_workload.Parsing
(Micheline.root node)
ex_ty
with
| None -> bad_data name node michelson_type Workload_production
| Some workload -> workload
in
match ex_ty with
| Script_typed_ir.Ex_ty ty ->
let closure () =
match
Lwt_main.run
(Script_ir_translator.parse_data
ctxt
~elab_conf:strict
~allow_forged:false
ty
(Micheline.root node))
with
| Error _ | (exception _) ->
bad_data name node michelson_type In_protocol
| Ok _ -> ()
in
return (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let make_bench rng_state cfg () =
let Michelson_mcmc_samplers.{term; typ} =
Michelson_generation.make_data_sampler rng_state cfg.generator_config
in
typechecking_data_benchmark rng_state term typ
let create_benchmarks ~rng_state ~bench_num config =
match config.michelson_terms_file with
| Some file ->
Format.eprintf "Loading terms from %s@." file ;
let terms = Michelson_mcmc_samplers.load ~filename:file in
List.filter_map
(function
| Michelson_mcmc_samplers.Data {term; typ} ->
Some (fun () -> typechecking_data_benchmark rng_state term typ)
| _ -> None)
terms
| None ->
Format.eprintf "No michelson_terms_file given, generating on-the-fly@." ;
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Typechecking_data)
module Unparsing_data : Benchmark.S = struct
include Config
include Default_boilerplate
let models =
make_models Translator_workload.Unparsing Translator_workload.Data
let name = "UNPARSING_DATA"
let info = "Benchmarking unparsing of data"
let unparsing_data_benchmark rng_state (node : Protocol.Script_repr.expr)
(michelson_type : Protocol.Script_repr.expr) =
Lwt_main.run
( Execution_context.make ~rng_state >>=? fun (ctxt, _) ->
let ex_ty = Type_helpers.michelson_type_to_ex_ty michelson_type ctxt in
let workload =
match
Translator_workload.data_typechecker_workload
ctxt
Translator_workload.Unparsing
(Micheline.root node)
ex_ty
with
| None -> bad_data name node michelson_type Workload_production
| Some workload -> workload
in
match ex_ty with
| Script_typed_ir.Ex_ty ty ->
Script_ir_translator.parse_data
ctxt
~elab_conf:strict
~allow_forged:false
ty
(Micheline.root node)
>|= Environment.wrap_tzresult
>>=? fun (typed, ctxt) ->
let closure () =
match
Lwt_main.run
(Script_ir_translator.Internal_for_benchmarking.unparse_data
~stack_depth:0
ctxt
Script_ir_unparser.Optimized
ty
typed)
with
| Error _ | (exception _) ->
bad_data name node michelson_type In_protocol
| Ok _ -> ()
in
return (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let make_bench rng_state cfg () =
let Michelson_mcmc_samplers.{term; typ} =
Michelson_generation.make_data_sampler rng_state cfg.generator_config
in
unparsing_data_benchmark rng_state term typ
let create_benchmarks ~rng_state ~bench_num config =
match config.michelson_terms_file with
| Some file ->
Format.eprintf "Loading terms from %s@." file ;
let terms = Michelson_mcmc_samplers.load ~filename:file in
List.filter_map
(function
| Michelson_mcmc_samplers.Data {term; typ} ->
Some (fun () -> unparsing_data_benchmark rng_state term typ)
| _ -> None)
terms
| None ->
Format.eprintf "No michelson_terms_file given, generating on-the-fly@." ;
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Unparsing_data)
module Typechecking_code : Benchmark.S = struct
include Config
include Default_boilerplate
let models = make_models Translator_workload.Parsing Translator_workload.Code
let name = "TYPECHECKING_CODE"
let info = "Benchmarking typechecking of code"
let typechecking_code_benchmark rng_state (node : Protocol.Script_repr.expr)
(stack : Script_repr.expr list) =
Lwt_main.run
( Execution_context.make ~rng_state >>=? fun (ctxt, _) ->
let ex_stack_ty =
Type_helpers.michelson_type_list_to_ex_stack_ty stack ctxt
in
let workload =
match
Translator_workload.code_typechecker_workload
ctxt
Translator_workload.Parsing
(Micheline.root node)
ex_stack_ty
with
| None -> bad_code name node stack Workload_production
| Some workload -> workload
in
let (Script_ir_translator.Ex_stack_ty bef) = ex_stack_ty in
let closure () =
let result =
Lwt_main.run
(Script_ir_translator.parse_instr
Script_tc_context.data
ctxt
~elab_conf:strict
(Micheline.root node)
bef)
in
match Environment.wrap_tzresult result with
| Error errs ->
Format.eprintf "%a@." Error_monad.pp_print_trace errs ;
bad_code name node stack In_protocol
| Ok _ -> ()
in
return (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let make_bench rng_state (cfg : Config.config) () =
let open Michelson_generation in
let Michelson_mcmc_samplers.{term; bef; aft = _} =
make_code_sampler rng_state cfg.generator_config
in
typechecking_code_benchmark rng_state term bef
let create_benchmarks ~rng_state ~bench_num config =
match config.michelson_terms_file with
| Some file ->
Format.eprintf "Loading terms from %s@." file ;
let terms = Michelson_mcmc_samplers.load ~filename:file in
List.filter_map
(function
| Michelson_mcmc_samplers.Code {term; bef; aft = _} ->
Some (fun () -> typechecking_code_benchmark rng_state term bef)
| _ -> None)
terms
| None ->
Format.eprintf "No michelson_terms_file given, generating on-the-fly@." ;
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Typechecking_code)
module Unparsing_code : Benchmark.S = struct
include Config
include Default_boilerplate
let models =
make_models Translator_workload.Unparsing Translator_workload.Code
let name = "UNPARSING_CODE"
let info = "Benchmarking unparsing of code"
let unparsing_code_benchmark rng_state (node : Protocol.Script_repr.expr)
(stack : Script_repr.expr list) =
Lwt_main.run
( Execution_context.make ~rng_state >>=? fun (ctxt, _) ->
let ex_stack_ty =
Type_helpers.michelson_type_list_to_ex_stack_ty stack ctxt
in
let workload =
match
Translator_workload.code_typechecker_workload
ctxt
Translator_workload.Unparsing
(Micheline.root node)
ex_stack_ty
with
| None -> bad_code name node stack Workload_production
| Some workload -> workload
in
let (Script_ir_translator.Ex_stack_ty bef) = ex_stack_ty in
Script_ir_translator.parse_instr
Script_tc_context.data
ctxt
~elab_conf:strict
(Micheline.root node)
bef
>|= Environment.wrap_tzresult
>>=? fun (_typed, ctxt) ->
let closure () =
let result =
Lwt_main.run
(Script_ir_translator.Internal_for_benchmarking.unparse_code
~stack_depth:0
ctxt
Optimized
(Micheline.root node))
in
match Environment.wrap_tzresult result with
| Error errs ->
Format.eprintf "%a@." Error_monad.pp_print_trace errs ;
bad_code name node stack In_protocol
| Ok _ -> ()
in
return (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let make_bench rng_state (cfg : Config.config) () =
let open Michelson_generation in
let Michelson_mcmc_samplers.{term; bef; aft = _} =
make_code_sampler rng_state cfg.generator_config
in
unparsing_code_benchmark rng_state term bef
let create_benchmarks ~rng_state ~bench_num config =
match config.michelson_terms_file with
| Some file ->
Format.eprintf "Loading terms from %s@." file ;
let terms = Michelson_mcmc_samplers.load ~filename:file in
List.filter_map
(function
| Michelson_mcmc_samplers.Code {term; bef; aft = _} ->
Some (fun () -> unparsing_code_benchmark rng_state term bef)
| _ -> None)
terms
| None -> List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Unparsing_code)
let rec check_printable_ascii v i =
if Compare.Int.(i < 0) then true
else
match v.[i] with
| '\n' | '\x20' .. '\x7E' -> check_printable_ascii v (i - 1)
| _ -> false
let check_printable_benchmark =
let open Tezos_shell_benchmarks.Encoding_benchmarks_helpers in
linear_shared
~name:"CHECK_PRINTABLE"
~generator:(fun rng_state ->
let open Base_samplers in
let string =
readable_ascii_string rng_state ~size:{min = 1; max = 1024}
in
(string, {Shared_linear.bytes = String.length string}))
~make_bench:(fun generator () ->
let generated, workload = generator () in
let closure () =
ignore (check_printable_ascii generated (String.length generated - 1))
in
Generator.Plain {workload; closure})
()
let () = Registration_helpers.register check_printable_benchmark
module Ty_eq : Benchmark.S = struct
type config = {max_size : int}
let config_encoding =
let open Data_encoding in
conv
(fun {max_size} -> max_size)
(fun max_size -> {max_size})
(obj1 (req "max_size" int31))
let default_config = {max_size = 64}
type workload = Ty_eq_workload of {nodes : int; consumed : Size.t}
let workload_encoding =
let open Data_encoding in
conv
(function Ty_eq_workload {nodes; consumed} -> (nodes, consumed))
(fun (nodes, consumed) -> Ty_eq_workload {nodes; consumed})
(obj2 (req "nodes" int31) (req "consumed" int31))
let workload_to_vector = function
| Ty_eq_workload {nodes; consumed} ->
Sparse_vec.String.of_list
[("nodes", float_of_int nodes); ("consumed", float_of_int consumed)]
let name = "TY_EQ"
let info = "Benchmarking equating types"
let tags = [Tags.translator]
let intercept_var = Free_variable.of_string (Format.asprintf "%s_const" name)
let coeff_var = Free_variable.of_string (Format.asprintf "%s_coeff" name)
let size_model =
Model.make
~conv:(function Ty_eq_workload {nodes; _} -> (nodes, ()))
~model:
(Model.affine ~name:(ns name) ~intercept:intercept_var ~coeff:coeff_var)
let codegen_model =
Model.make
~conv:(function Ty_eq_workload {nodes; _} -> (nodes, ()))
~model:
(Model.affine ~name:(ns name) ~intercept:intercept_var ~coeff:coeff_var)
let models =
[("size_translator_model", size_model); ("codegen", codegen_model)]
let ty_eq_benchmark rng_state nodes (ty : Script_typed_ir.ex_ty) =
Lwt_main.run
( Execution_context.make ~rng_state >>=? fun (ctxt, _) ->
let ctxt = Gas_helpers.set_limit ctxt in
match ty with
| Ex_ty ty ->
let dummy_loc = 0 in
Lwt.return
(Gas_monad.run ctxt
@@ Script_ir_translator.ty_eq
~error_details:(Informative dummy_loc)
ty
ty)
>|= Environment.wrap_tzresult
>>=? fun (_, ctxt') ->
let consumed =
Alpha_context.Gas.consumed ~since:ctxt ~until:ctxt'
in
let workload =
Ty_eq_workload
{nodes; consumed = Z.to_int (Gas_helpers.fp_to_z consumed)}
in
let closure () =
ignore
(Gas_monad.run ctxt
@@ Script_ir_translator.ty_eq
~error_details:(Informative dummy_loc)
ty
ty)
in
return (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let make_bench rng_state (cfg : config) () =
let nodes =
Base_samplers.(
sample_in_interval ~range:{min = 1; max = cfg.max_size} rng_state)
in
let ty =
Michelson_generation.Samplers.Random_type.m_type ~size:nodes rng_state
in
ty_eq_benchmark rng_state nodes ty
let create_benchmarks ~rng_state ~bench_num config =
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Ty_eq)
let rec dummy_type_generator size =
let open Script_typed_ir in
if size <= 1 then Ex_ty unit_t
else
match dummy_type_generator (size - 2) with
| Ex_ty r -> (
let l = unit_t in
match pair_t (-1) l r with
| Error _ -> assert false
| Ok (Ty_ex_c t) -> Ex_ty t)
let rec dummy_comparable_type_generator size =
let open Script_ir_translator in
let open Script_typed_ir in
if size <= 0 then Ex_comparable_ty unit_t
else
match dummy_comparable_type_generator (size - 2) with
| Ex_comparable_ty r ->
let l = unit_t in
Ex_comparable_ty
(match comparable_pair_t (-1) l r with
| Error _ -> assert false
| Ok t -> t)
module Parse_type_shared = struct
type config = {max_size : int}
let default_config = {max_size = Constants_repr.michelson_maximum_type_size}
let config_encoding =
let open Data_encoding in
conv
(fun {max_size} -> max_size)
(fun max_size -> {max_size})
(obj1 (req "max_size" int31))
type workload = Type_workload of {nodes : int; consumed : Size.t}
let workload_encoding =
let open Data_encoding in
conv
(function Type_workload {nodes; consumed} -> (nodes, consumed))
(fun (nodes, consumed) -> Type_workload {nodes; consumed})
(obj2 (req "nodes" int31) (req "consumed" int31))
let workload_to_vector = function
| Type_workload {nodes; consumed} ->
Sparse_vec.String.of_list
[("nodes", float_of_int nodes); ("consumed", float_of_int consumed)]
let tags = [Tags.translator]
end
let parse_ty ctxt node =
Script_ir_translator.parse_ty
ctxt
~legacy:true
~allow_lazy_storage:true
~allow_operation:true
~allow_contract:true
~allow_ticket:true
node
let unparse_ty ctxt ty = Script_ir_unparser.unparse_ty ~loc:(-1) ctxt ty
module Parse_type_benchmark : Benchmark.S = struct
include Parse_type_shared
let name = "PARSE_TYPE"
let info = "Benchmarking parse_ty"
let make_bench rng_state config () =
( Lwt_main.run (Execution_context.make ~rng_state) >>? fun (ctxt, _) ->
let ctxt = Gas_helpers.set_limit ctxt in
let size = Random.State.int rng_state config.max_size in
let ty = dummy_type_generator size in
match ty with
| Ex_ty ty ->
Environment.wrap_tzresult @@ unparse_ty ctxt ty
>>? fun (unparsed, _) ->
Environment.wrap_tzresult @@ parse_ty ctxt unparsed
>>? fun (_, ctxt') ->
let consumed =
Z.to_int
(Gas_helpers.fp_to_z
(Alpha_context.Gas.consumed ~since:ctxt ~until:ctxt'))
in
let nodes =
let x = Script_typed_ir.ty_size ty in
Saturation_repr.to_int @@ Script_typed_ir.Type_size.to_int x
in
let workload = Type_workload {nodes; consumed} in
let closure () = ignore (parse_ty ctxt unparsed) in
ok (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let size_model =
Model.make
~conv:(function Type_workload {nodes; consumed = _} -> (nodes, ()))
~model:
(Model.affine
~name:(ns name)
~intercept:
(Free_variable.of_string (Format.asprintf "%s_const" name))
~coeff:(Free_variable.of_string (Format.asprintf "%s_coeff" name)))
let models = [("size_translator_model", size_model)]
let create_benchmarks ~rng_state ~bench_num config =
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Parse_type_benchmark)
module Unparse_type_benchmark : Benchmark.S = struct
include Parse_type_shared
let name = "UNPARSE_TYPE"
let info = "Benchmarking unparse_ty"
let make_bench rng_state config () =
( Lwt_main.run (Execution_context.make ~rng_state) >>? fun (ctxt, _) ->
let ctxt = Gas_helpers.set_limit ctxt in
let size = Random.State.int rng_state config.max_size in
let ty = dummy_type_generator size in
match ty with
| Ex_ty ty ->
Environment.wrap_tzresult @@ unparse_ty ctxt ty >>? fun (_, ctxt') ->
let consumed =
Z.to_int
(Gas_helpers.fp_to_z
(Alpha_context.Gas.consumed ~since:ctxt ~until:ctxt'))
in
let nodes =
let x = Script_typed_ir.ty_size ty in
Saturation_repr.to_int @@ Script_typed_ir.Type_size.to_int x
in
let workload = Type_workload {nodes; consumed} in
let closure () = ignore (unparse_ty ctxt ty) in
ok (Generator.Plain {workload; closure}) )
|> function
| Ok closure -> closure
| Error errs -> global_error name errs
let size_model =
Model.make
~conv:(function Type_workload {nodes; consumed = _} -> (nodes, ()))
~model:
(Model.affine
~name:(ns name)
~intercept:
(Free_variable.of_string (Format.asprintf "%s_const" name))
~coeff:(Free_variable.of_string (Format.asprintf "%s_coeff" name)))
let models = [("size_translator_model", size_model)]
let create_benchmarks ~rng_state ~bench_num config =
List.repeat bench_num (make_bench rng_state config)
let name = Namespace.of_string name
end
let () = Registration_helpers.register (module Unparse_type_benchmark)
|
6839ed3167ef95fe3177324cc1905f097204055efa8a683b04da58db0e3df226 | Metaxal/text-table | main.rkt | #lang racket/base
(require racket/format
racket/list
racket/dict
racket/string
racket/match
racket/contract
"utils.rkt")
(provide
string-length=/c
(rename-out [border-styles named-border-styles])
border-style/c
border-style1/c
border-style2/c
border-style-frame/c
(contract-out
(table->string table->string/c)
(simple-table->string table->string/c))
print-table
print-simple-table)
(define ((string-length=/c n) x)
(and (string? x)
(= n (string-length x))))
;==============;
;=== Frames ===;
;==============;
;; "Window" style frames.
;; Easier to specify, and more flexible since col seps may be different for top, middle and bottom.
(define table-frames
'((space
" "
" "
" "
" ")
(single
"┌─┬┐"
"│ ││"
"├─┼┤"
"└─┴┘")
(space-single
"┌──┐"
"│ │"
"├──┤"
"└──┘")
(rounded
"╭─┬╮"
"│ ││"
"├─┼┤"
"╰─┴╯")
(double
"╔═╦╗"
"║ ║║"
"╠═╬╣"
"╚═╩╝")
(heavy
"┏━┳┓"
"┃ ┃┃"
"┣━╋┫"
"┗━┻┛")))
(define border-style-frame/c
(list/c (string-length=/c 4)
(string-length=/c 4)
(string-length=/c 4)
(string-length=/c 4)))
(define (frame->border2 frame)
(map (λ (s) (map string (string->list s))) frame))
;; See
;; -drawing_character
;; -chartable.de/unicode-utf8-table.pl?start=9472&unicodeinhtml=dec
;; old border styles
(define table-borders
(cons
'(empty ("" " " "" "") ("" " " "" "") ("" " " "" "") ("" " " "" ""))
(for/list ([(name frame) (in-dict table-frames)])
(cons name (frame->border2 frame))))
#; ; equivalent to
'((space . (#\space (" " " " " ") (" " " " " ") (" " " " " ") (" " " " " ")))
(space-single . (#\─ ("│" " " "│") ("┌" "─" "┐") ("├" "─" "┤") ("└" "─" "┘")))
(single . (#\─ ("│" "│" "│") ("┌" "┬" "┐") ("├" "┼" "┤") ("└" "┴" "┘")))
(rounded . (#\─ ("│" "│" "│") ("╭" "┬" "╮") ("├" "┼" "┤") ("╰" "┴" "╯")))
(double . (#\═ ("║" "║" "║") ("╔" "╦" "╗") ("╠" "╬" "╣") ("╚" "╩" "╝")))
(heavy . (#\━ ("┃" "┃" "┃") ("┏" "┳" "┓") ("┣" "╋" "┫") ("┗" "┻" "┛")))))
(define border-style1/c
(list/c char?
(list/c string? string? string?)
(list/c string? string? string?)
(list/c string? string? string?)
(list/c string? string? string?)))
(define border-style2/c
(list/c (list/c string? string? string? string?)
(list/c string? string? string? string?)
(list/c string? string? string? string?)
(list/c string? string? string? string?)))
(define (border1->border2 border)
(match border
[(list sep-char (list rowl rowm rowr) (list tl tm tr) (list ml mm mr) (list bl bm br))
(define sep (string sep-char))
; default pad-char is " "
(list (list tl sep tm tr)
(list rowl " " rowm rowr)
(list ml sep mm mr)
(list bl sep bm br))]))
(define border-styles
(cons 'latex (dict-keys table-borders)))
(define border-style/c
(apply or/c
; custom (old) style, kept for backward compatibility
border-style1/c
new style , with one row separator per row type
border-style2/c
; custom "window" style
border-style-frame/c
; default styles
border-styles))
(define (make-latex-border-style align framed? col-sep?s)
(define (align-ref al sep?)
(string-append (if sep? "|" "")
(case al [(left) "l"] [(right) "r"] [(center) "c"])))
(define als (string-append
"\\begin{tabular}{"
(if framed? "|" "")
(string-append*
(align-ref (first align) #f)
(map align-ref (rest align) col-sep?s))
(if framed? "|}\n\\hline" "}")))
`((,als "" "" "")
("" " " " & " " \\\\")
("\\hline" "" "" "")
(,(if framed? "\\hline\n\\end{tabular}" "\\end{tabular}") "" "" "")))
;==================;
;=== Alignments ===;
;==================;
col : ( string ? )
;; align: (or/c 'left 'center 'right)
(define (align-column col align pad-string)
(define width (apply max (map string-length col)))
(map (λ (str)
(~a str #:min-width width #:align align #:pad-string pad-string))
col))
;; mrow: 2d-list?
;; align: (or/c 'top 'center 'bottom)
(define (align-row mrow align pad-string)
(define height (apply max (map length mrow)))
(map (λ (mcell)
(define n (- height (length mcell)))
(define str-len (string-length (first mcell)))
(define pad (string-repeat pad-string str-len))
(case align
[(top) (append mcell (make-list n pad))]
[(bottom) (append (make-list n pad) mcell)]
[(center)
(define h (length mcell))
(define ntop (quotient (- height h) 2))
(append (make-list ntop pad) mcell (make-list (- height h ntop) pad))]
[else (error "Unknown align-row align:" align)]))
mrow))
(define numeric-rx #px"^\\s*([-+]?)\\s*(\\d*)(\\.?)(\\d*)(e?)([-+]?)(\\d*)\\s*$")
(define (align-column-numeric col align pad-string)
(define cols
(transpose
(map
(λ (str)
(define m (regexp-match numeric-rx str))
(if m
(cons #f (rest m))
(cons str (make-list 7 ""))))
col)))
(define rows
(transpose
(cons (first cols)
(map (λ (col align pad) (align-column col align pad))
(rest cols)
'(right right left left left left right)
(list pad-string pad-string "." "0" "e" "+" "0")))))
(align-column
(for/list ([row (in-list rows)])
(or (first row)
(string-append* (rest row))))
align
pad-string))
;=====================;
;=== table->string ===;
;=====================;
(define table->string/c
(->* ((listof list?))
(#:->string (pattern-list-of (procedure-arity-includes/c 1))
#:border-style border-style/c
#:framed? boolean?
#:row-sep? (pattern-list-of boolean?)
#:col-sep? (pattern-list-of boolean?)
#:align (pattern-list-of (or/c 'left 'center 'right))
#:row-align (pattern-list-of (or/c 'top 'center 'bottom)))
string?))
(define-syntax-rule (print-table args ...)
(displayln (table->string args ...)))
(define-syntax-rule (print-simple-table args ...)
(displayln (simple-table->string args ...)))
If only I could use ` define2 ` … :-/
(define (simple-table->string ll
#:border-style [border-style 'space]
#:framed? [framed? #false]
#:row-sep? [row-sep? #false]
#:col-sep? [col-sep? #false]
#:->string [->string ~a]
#:align [align 'left]
#:row-align [row-align 'top])
(table->string ll
#:border-style border-style
#:framed? framed?
#:->string ->string
#:row-sep? row-sep?
#:align align
#:row-align row-align))
(define (table->string ll
#:border-style [border-style 'single]
#:framed? [framed? #true]
#:row-sep? [row-sep? #true]
#:col-sep? [col-sep? #true]
#:->string [->string ~a]
#:align [align 'left]
#:row-align [row-align 'top])
;::::::::::::::::::;
;:: Check inputs ::;
;::::::::::::::::::;
(unless (and (list? ll) (not (empty? ll)) (andmap list? ll))
(raise-argument-error 'table->string
"nonempty list of lists of the same lengths"
0 ll))
(define lens (map length ll))
(define n-rows (length ll))
(define n-columns (first lens))
(unless (andmap (λ (len) (= len n-columns)) (rest lens))
(error "All rows must have the same length"))
;::::::::::::::::::::::::::;
;:: Expand pattern lists ::;
;::::::::::::::::::::::::::;
(define ->string-list (pattern-list->list ->string n-columns #:truncate-ok? #t))
(define align-list (pattern-list->list align n-columns #:truncate-ok? #t))
(define row-align-list (pattern-list->list row-align n-rows #:truncate-ok? #t))
(define col-sep?s (pattern-list->list col-sep? (- n-columns 1) #:truncate-ok? #t))
(define row-sep?s (pattern-list->list row-sep? (- n-rows 1) #:truncate-ok? #t))
;:::::::::::::::::::;
;:: Prepare style ::;
;:::::::::::::::::::;
(define style
(cond [(eq? border-style 'latex)
(define new-style (make-latex-border-style align-list framed? col-sep?s))
; force borders
(set! framed? #t)
(set! col-sep?s (make-list (- n-columns 1) #t))
new-style]
[(symbol? border-style)
(dict-ref table-borders border-style)]
[(border-style2/c border-style)
border-style]
[(border-style1/c border-style) ; old style
(border1->border2 border-style)]
[(border-style-frame/c border-style)
(frame->border2 border-style)]
[else
(error "Unrecognized style" border-style)]))
(define-values (top-row-corners col-seps mid-row-corners bottom-row-corners)
(apply values style))
(define pad-string (list-ref col-seps 1))
;:::::::::::::::::::::::::;
;:: Transform the table ::;
;:::::::::::::::::::::::::;
;; ll: 2d-list of any/c
;; 0. Each cell initially contains a string, possibly with newlines, turn
; them into lists of strings without newline.
;; TODO: We can't consider that a list in a cell is a multiline,
;; but we could have a `cell` struct that can contains multiple elements
;; to be displayed on several lines
(define ll1
(map (λ (row) (map (λ (cell ->string)
(define res (string-split (if (string? cell) cell (->string cell))
"\n"))
(if (empty? res) '("") res))
row
->string-list))
ll))
( ll1 )
;; ll1: 3d-list of string
;; (cells are list of strings)
1 . transpose table ,
;; align-column, so that all lines in a cell of all cells of the column have the same width
;; transpose table back
(define ll2
(transpose
(map (λ (mcol align) (apply/2d-list-as-list align-column mcol align pad-string))
(transpose ll1)
align-list)))
( writeln ll2 )
2 . align - row , to create the missing lines in the cell , so all cells in the same
;; row have the same number of lines (same height)
(define ll3 (map (λ (mrow align) (align-row mrow align pad-string))
ll2
row-align-list))
( writeln ll3 )
(define cell-widths (map (λ (mcell) (string-length (first mcell)))
(first ll3)))
(define (make-row-line strs row-corners)
(define (@ n) (list-ref row-corners n))
(define row-sep (@ 2))
; Special case for latex
(define no-sep (@ 1))
(string-append
(if framed? (@ 0) "")
(first strs)
(string-append*
(append-map (λ (str sep?) (if sep? (list row-sep str) (list no-sep str)))
(rest strs)
col-sep?s))
(if framed? (@ 3) ""))
#;
(string-join strs
(@ 2)
#:before-first (if framed? (@ 0) "")
#:after-last (if framed? (@ 3) "")))
3 . For each mrow , transpose the mrow , then string - join the lines of a rows
to obtain a simple list of strings , one per line , but without the frame rows .
(define ll4
(map (λ (mrow)
(string-join
(map (λ (strs) (make-row-line strs col-seps))
(transpose mrow))
"\n"))
ll3))
#;(writeln ll4)
(define (make-sep-line row-corners)
(define row-sep (list-ref row-corners 1))
(define row-sep-len (string-length row-sep))
(make-row-line
(if (= row-sep-len 0)
(make-list n-columns "")
(for/list ([len (in-list cell-widths)])
(string-repeat row-sep len)))
row-corners
#;(string-repeat make-string (string-length pad-string) row-sep)))
(define mid-sep-line (make-sep-line mid-row-corners))
4 . Finally , append all the lines together , adding the frame lines if applicable .
(string-join
#:before-first
(if framed?
(string-append (make-sep-line top-row-corners) "\n")
"")
(if row-sep?
(cons (first ll4)
(append-map (λ (row sep?) (if sep? (list mid-sep-line row) (list row)))
(rest ll4)
row-sep?s))
ll4)
"\n"
#:after-last
(if framed?
(string-append "\n" (make-sep-line bottom-row-corners))
"")))
;============;
;=== Main ===;
;============;
;; Usage example. To see the output, run:
;; racket -l text-table
(module+ main
(define table
'((a b c d e f gggg h)
(123 456 77 54 1 5646547987 41 1)
(111 22 3333 44 5 6 7 8888)))
(define aligns
one alignment per column
(for* ([align (in-list (list 'left 'center 'right aligns))])
(newline)
(newline)
; Print values
(displayln
(table->string
(list (list '#:align align))))
; Example
(displayln
(table->string
table
#:align align)))
(for* ([border-style (in-list border-styles)]
[framed? (in-list '(#t #f))]
[row-sep? (in-list '(#t #f))])
(newline)
(newline)
; Print values
(displayln
(table->string
(list (list '#:border-style border-style)
(list '#:framed? framed?)
(list '#:row-sep? row-sep?))))
; Example
(displayln
(table->string
table
#:align aligns
#:border-style border-style
#:row-sep? row-sep?
#:framed? framed?)))
(newline)
(newline)
(displayln "Multiline")
(newline)
(displayln
(table->string
`(["hello\nworld" "1\n2\n3" "3" ""]
["" "" "" ""]
["a\nbb\nccc\ndddd" "1" "22\n22" ""]))))
(module+ drracket
(for ([col (list
(map ~a '(1 100 1000))
(map ~a '(1 100 1e3))
(map ~a '(1 100 1000 -12))
(map ~a '(1 100 1000 1.12))
(map ~a '(1 100 1000 3e25))
(map ~a '(1 100 1000 3e25 2.12e31))
'("hello" "1.2e34" "+inf.0" "12e34"
"12345" "12.34" "1.234e-3" "2.322e+03" "-nan.0" "-13.3"))])
(displayln (string-join (align-column-numeric col 'center "_") "\n"))
(newline))
(define mcol
'(("hello") ("1.2e34") ("+inf.0" "12e34")
("12345" "12.34" "1.234e-3" "2.322e+03") ("-nan.0") ("-13.3")))
(apply/2d-list-as-list align-column-numeric mcol 'right "_")
(flatten (apply/2d-list-as-list align-column-numeric mcol 'right "_")))
| null | https://raw.githubusercontent.com/Metaxal/text-table/b20f34b068aedbe6c1756628dc4c9df9b1cba2e1/main.rkt | racket | ==============;
=== Frames ===;
==============;
"Window" style frames.
Easier to specify, and more flexible since col seps may be different for top, middle and bottom.
See
-drawing_character
-chartable.de/unicode-utf8-table.pl?start=9472&unicodeinhtml=dec
old border styles
; equivalent to
default pad-char is " "
custom (old) style, kept for backward compatibility
custom "window" style
default styles
==================;
=== Alignments ===;
==================;
align: (or/c 'left 'center 'right)
mrow: 2d-list?
align: (or/c 'top 'center 'bottom)
=====================;
=== table->string ===;
=====================;
::::::::::::::::::;
:: Check inputs ::;
::::::::::::::::::;
::::::::::::::::::::::::::;
:: Expand pattern lists ::;
::::::::::::::::::::::::::;
:::::::::::::::::::;
:: Prepare style ::;
:::::::::::::::::::;
force borders
old style
:::::::::::::::::::::::::;
:: Transform the table ::;
:::::::::::::::::::::::::;
ll: 2d-list of any/c
0. Each cell initially contains a string, possibly with newlines, turn
them into lists of strings without newline.
TODO: We can't consider that a list in a cell is a multiline,
but we could have a `cell` struct that can contains multiple elements
to be displayed on several lines
ll1: 3d-list of string
(cells are list of strings)
align-column, so that all lines in a cell of all cells of the column have the same width
transpose table back
row have the same number of lines (same height)
Special case for latex
(writeln ll4)
(string-repeat make-string (string-length pad-string) row-sep)))
============;
=== Main ===;
============;
Usage example. To see the output, run:
racket -l text-table
Print values
Example
Print values
Example | #lang racket/base
(require racket/format
racket/list
racket/dict
racket/string
racket/match
racket/contract
"utils.rkt")
(provide
string-length=/c
(rename-out [border-styles named-border-styles])
border-style/c
border-style1/c
border-style2/c
border-style-frame/c
(contract-out
(table->string table->string/c)
(simple-table->string table->string/c))
print-table
print-simple-table)
(define ((string-length=/c n) x)
(and (string? x)
(= n (string-length x))))
(define table-frames
'((space
" "
" "
" "
" ")
(single
"┌─┬┐"
"│ ││"
"├─┼┤"
"└─┴┘")
(space-single
"┌──┐"
"│ │"
"├──┤"
"└──┘")
(rounded
"╭─┬╮"
"│ ││"
"├─┼┤"
"╰─┴╯")
(double
"╔═╦╗"
"║ ║║"
"╠═╬╣"
"╚═╩╝")
(heavy
"┏━┳┓"
"┃ ┃┃"
"┣━╋┫"
"┗━┻┛")))
(define border-style-frame/c
(list/c (string-length=/c 4)
(string-length=/c 4)
(string-length=/c 4)
(string-length=/c 4)))
(define (frame->border2 frame)
(map (λ (s) (map string (string->list s))) frame))
(define table-borders
(cons
'(empty ("" " " "" "") ("" " " "" "") ("" " " "" "") ("" " " "" ""))
(for/list ([(name frame) (in-dict table-frames)])
(cons name (frame->border2 frame))))
'((space . (#\space (" " " " " ") (" " " " " ") (" " " " " ") (" " " " " ")))
(space-single . (#\─ ("│" " " "│") ("┌" "─" "┐") ("├" "─" "┤") ("└" "─" "┘")))
(single . (#\─ ("│" "│" "│") ("┌" "┬" "┐") ("├" "┼" "┤") ("└" "┴" "┘")))
(rounded . (#\─ ("│" "│" "│") ("╭" "┬" "╮") ("├" "┼" "┤") ("╰" "┴" "╯")))
(double . (#\═ ("║" "║" "║") ("╔" "╦" "╗") ("╠" "╬" "╣") ("╚" "╩" "╝")))
(heavy . (#\━ ("┃" "┃" "┃") ("┏" "┳" "┓") ("┣" "╋" "┫") ("┗" "┻" "┛")))))
(define border-style1/c
(list/c char?
(list/c string? string? string?)
(list/c string? string? string?)
(list/c string? string? string?)
(list/c string? string? string?)))
(define border-style2/c
(list/c (list/c string? string? string? string?)
(list/c string? string? string? string?)
(list/c string? string? string? string?)
(list/c string? string? string? string?)))
(define (border1->border2 border)
(match border
[(list sep-char (list rowl rowm rowr) (list tl tm tr) (list ml mm mr) (list bl bm br))
(define sep (string sep-char))
(list (list tl sep tm tr)
(list rowl " " rowm rowr)
(list ml sep mm mr)
(list bl sep bm br))]))
(define border-styles
(cons 'latex (dict-keys table-borders)))
(define border-style/c
(apply or/c
border-style1/c
new style , with one row separator per row type
border-style2/c
border-style-frame/c
border-styles))
(define (make-latex-border-style align framed? col-sep?s)
(define (align-ref al sep?)
(string-append (if sep? "|" "")
(case al [(left) "l"] [(right) "r"] [(center) "c"])))
(define als (string-append
"\\begin{tabular}{"
(if framed? "|" "")
(string-append*
(align-ref (first align) #f)
(map align-ref (rest align) col-sep?s))
(if framed? "|}\n\\hline" "}")))
`((,als "" "" "")
("" " " " & " " \\\\")
("\\hline" "" "" "")
(,(if framed? "\\hline\n\\end{tabular}" "\\end{tabular}") "" "" "")))
col : ( string ? )
(define (align-column col align pad-string)
(define width (apply max (map string-length col)))
(map (λ (str)
(~a str #:min-width width #:align align #:pad-string pad-string))
col))
(define (align-row mrow align pad-string)
(define height (apply max (map length mrow)))
(map (λ (mcell)
(define n (- height (length mcell)))
(define str-len (string-length (first mcell)))
(define pad (string-repeat pad-string str-len))
(case align
[(top) (append mcell (make-list n pad))]
[(bottom) (append (make-list n pad) mcell)]
[(center)
(define h (length mcell))
(define ntop (quotient (- height h) 2))
(append (make-list ntop pad) mcell (make-list (- height h ntop) pad))]
[else (error "Unknown align-row align:" align)]))
mrow))
(define numeric-rx #px"^\\s*([-+]?)\\s*(\\d*)(\\.?)(\\d*)(e?)([-+]?)(\\d*)\\s*$")
(define (align-column-numeric col align pad-string)
(define cols
(transpose
(map
(λ (str)
(define m (regexp-match numeric-rx str))
(if m
(cons #f (rest m))
(cons str (make-list 7 ""))))
col)))
(define rows
(transpose
(cons (first cols)
(map (λ (col align pad) (align-column col align pad))
(rest cols)
'(right right left left left left right)
(list pad-string pad-string "." "0" "e" "+" "0")))))
(align-column
(for/list ([row (in-list rows)])
(or (first row)
(string-append* (rest row))))
align
pad-string))
(define table->string/c
(->* ((listof list?))
(#:->string (pattern-list-of (procedure-arity-includes/c 1))
#:border-style border-style/c
#:framed? boolean?
#:row-sep? (pattern-list-of boolean?)
#:col-sep? (pattern-list-of boolean?)
#:align (pattern-list-of (or/c 'left 'center 'right))
#:row-align (pattern-list-of (or/c 'top 'center 'bottom)))
string?))
(define-syntax-rule (print-table args ...)
(displayln (table->string args ...)))
(define-syntax-rule (print-simple-table args ...)
(displayln (simple-table->string args ...)))
If only I could use ` define2 ` … :-/
(define (simple-table->string ll
#:border-style [border-style 'space]
#:framed? [framed? #false]
#:row-sep? [row-sep? #false]
#:col-sep? [col-sep? #false]
#:->string [->string ~a]
#:align [align 'left]
#:row-align [row-align 'top])
(table->string ll
#:border-style border-style
#:framed? framed?
#:->string ->string
#:row-sep? row-sep?
#:align align
#:row-align row-align))
(define (table->string ll
#:border-style [border-style 'single]
#:framed? [framed? #true]
#:row-sep? [row-sep? #true]
#:col-sep? [col-sep? #true]
#:->string [->string ~a]
#:align [align 'left]
#:row-align [row-align 'top])
(unless (and (list? ll) (not (empty? ll)) (andmap list? ll))
(raise-argument-error 'table->string
"nonempty list of lists of the same lengths"
0 ll))
(define lens (map length ll))
(define n-rows (length ll))
(define n-columns (first lens))
(unless (andmap (λ (len) (= len n-columns)) (rest lens))
(error "All rows must have the same length"))
(define ->string-list (pattern-list->list ->string n-columns #:truncate-ok? #t))
(define align-list (pattern-list->list align n-columns #:truncate-ok? #t))
(define row-align-list (pattern-list->list row-align n-rows #:truncate-ok? #t))
(define col-sep?s (pattern-list->list col-sep? (- n-columns 1) #:truncate-ok? #t))
(define row-sep?s (pattern-list->list row-sep? (- n-rows 1) #:truncate-ok? #t))
(define style
(cond [(eq? border-style 'latex)
(define new-style (make-latex-border-style align-list framed? col-sep?s))
(set! framed? #t)
(set! col-sep?s (make-list (- n-columns 1) #t))
new-style]
[(symbol? border-style)
(dict-ref table-borders border-style)]
[(border-style2/c border-style)
border-style]
(border1->border2 border-style)]
[(border-style-frame/c border-style)
(frame->border2 border-style)]
[else
(error "Unrecognized style" border-style)]))
(define-values (top-row-corners col-seps mid-row-corners bottom-row-corners)
(apply values style))
(define pad-string (list-ref col-seps 1))
(define ll1
(map (λ (row) (map (λ (cell ->string)
(define res (string-split (if (string? cell) cell (->string cell))
"\n"))
(if (empty? res) '("") res))
row
->string-list))
ll))
( ll1 )
1 . transpose table ,
(define ll2
(transpose
(map (λ (mcol align) (apply/2d-list-as-list align-column mcol align pad-string))
(transpose ll1)
align-list)))
( writeln ll2 )
2 . align - row , to create the missing lines in the cell , so all cells in the same
(define ll3 (map (λ (mrow align) (align-row mrow align pad-string))
ll2
row-align-list))
( writeln ll3 )
(define cell-widths (map (λ (mcell) (string-length (first mcell)))
(first ll3)))
(define (make-row-line strs row-corners)
(define (@ n) (list-ref row-corners n))
(define row-sep (@ 2))
(define no-sep (@ 1))
(string-append
(if framed? (@ 0) "")
(first strs)
(string-append*
(append-map (λ (str sep?) (if sep? (list row-sep str) (list no-sep str)))
(rest strs)
col-sep?s))
(if framed? (@ 3) ""))
(string-join strs
(@ 2)
#:before-first (if framed? (@ 0) "")
#:after-last (if framed? (@ 3) "")))
3 . For each mrow , transpose the mrow , then string - join the lines of a rows
to obtain a simple list of strings , one per line , but without the frame rows .
(define ll4
(map (λ (mrow)
(string-join
(map (λ (strs) (make-row-line strs col-seps))
(transpose mrow))
"\n"))
ll3))
(define (make-sep-line row-corners)
(define row-sep (list-ref row-corners 1))
(define row-sep-len (string-length row-sep))
(make-row-line
(if (= row-sep-len 0)
(make-list n-columns "")
(for/list ([len (in-list cell-widths)])
(string-repeat row-sep len)))
row-corners
(define mid-sep-line (make-sep-line mid-row-corners))
4 . Finally , append all the lines together , adding the frame lines if applicable .
(string-join
#:before-first
(if framed?
(string-append (make-sep-line top-row-corners) "\n")
"")
(if row-sep?
(cons (first ll4)
(append-map (λ (row sep?) (if sep? (list mid-sep-line row) (list row)))
(rest ll4)
row-sep?s))
ll4)
"\n"
#:after-last
(if framed?
(string-append "\n" (make-sep-line bottom-row-corners))
"")))
(module+ main
(define table
'((a b c d e f gggg h)
(123 456 77 54 1 5646547987 41 1)
(111 22 3333 44 5 6 7 8888)))
(define aligns
one alignment per column
(for* ([align (in-list (list 'left 'center 'right aligns))])
(newline)
(newline)
(displayln
(table->string
(list (list '#:align align))))
(displayln
(table->string
table
#:align align)))
(for* ([border-style (in-list border-styles)]
[framed? (in-list '(#t #f))]
[row-sep? (in-list '(#t #f))])
(newline)
(newline)
(displayln
(table->string
(list (list '#:border-style border-style)
(list '#:framed? framed?)
(list '#:row-sep? row-sep?))))
(displayln
(table->string
table
#:align aligns
#:border-style border-style
#:row-sep? row-sep?
#:framed? framed?)))
(newline)
(newline)
(displayln "Multiline")
(newline)
(displayln
(table->string
`(["hello\nworld" "1\n2\n3" "3" ""]
["" "" "" ""]
["a\nbb\nccc\ndddd" "1" "22\n22" ""]))))
(module+ drracket
(for ([col (list
(map ~a '(1 100 1000))
(map ~a '(1 100 1e3))
(map ~a '(1 100 1000 -12))
(map ~a '(1 100 1000 1.12))
(map ~a '(1 100 1000 3e25))
(map ~a '(1 100 1000 3e25 2.12e31))
'("hello" "1.2e34" "+inf.0" "12e34"
"12345" "12.34" "1.234e-3" "2.322e+03" "-nan.0" "-13.3"))])
(displayln (string-join (align-column-numeric col 'center "_") "\n"))
(newline))
(define mcol
'(("hello") ("1.2e34") ("+inf.0" "12e34")
("12345" "12.34" "1.234e-3" "2.322e+03") ("-nan.0") ("-13.3")))
(apply/2d-list-as-list align-column-numeric mcol 'right "_")
(flatten (apply/2d-list-as-list align-column-numeric mcol 'right "_")))
|
db3228c9df037f8a6e4b258397171f77053cef050d13c822edcec07996e3963a | nasa/Common-Metadata-Repository | collection_keyword_validation_test.clj | (ns cmr.system-int-test.ingest.collection-keyword-validation-test
"CMR Ingest keyword validation integration tests"
(:require
[clojure.test :refer :all]
[cmr.common.util :refer [are2 are3]]
[cmr.ingest.services.messages :as msg]
[cmr.system-int-test.data2.core :as d]
[cmr.system-int-test.data2.umm-spec-collection :as data-umm-c]
[cmr.system-int-test.data2.umm-spec-common :as data-umm-cmn]
[cmr.system-int-test.utils.ingest-util :as ingest]))
(defn assert-invalid
([coll-attributes field-path errors]
(assert-invalid coll-attributes field-path errors nil))
([coll-attributes field-path errors options]
(let [response (d/ingest-umm-spec-collection
"PROV1"
(data-umm-c/collection coll-attributes)
(merge {:allow-failure? true} options))]
(is (= {:status 422
:errors [{:path field-path
:errors errors}]}
(select-keys response [:status :errors]))))))
(defn assert-valid
([coll-attributes]
(assert-valid coll-attributes nil))
([coll-attributes options]
(let [collection (assoc (data-umm-c/collection coll-attributes)
:native-id (:native-id coll-attributes))
provider-id (get coll-attributes :provider-id "PROV1")
response (d/ingest-umm-spec-collection provider-id collection options)]
(is (#{{:status 200} {:status 201}} (select-keys response [:status :errors]))))))
(defn assert-invalid-keywords
[coll-attributes field-path errors]
(assert-invalid coll-attributes field-path errors {:validate-keywords true
:format :umm-json
:accept-format :json}))
(defn assert-valid-keywords
[coll-attributes]
(assert-valid coll-attributes {:validate-keywords true
:format :umm-json
:accept-format :json}))
(use-fixtures :each (ingest/reset-fixture {"provguid1" "PROV1" "provguid2" "PROV2"}))
(deftest collection-keyword-validation-test
;; For a list of the valid keywords during testing see dev-system/resources/kms_examples
(testing "Keyword validation errors using validation endpoint"
(let [concept (data-umm-c/collection-concept
{:Platforms [(data-umm-cmn/platform {:ShortName "foo"
:LongName "Airbus A340-600"
:Type "Jet"})]
:DataCenters [(data-umm-cmn/data-center {:Roles ["ARCHIVER"]
:ShortName "SomeCenter"})]})
response (ingest/validate-concept concept {:validate-keywords true})]
(is (= {:status 422
:errors [{:path ["Platforms" 0]
:errors [(str "Platform short name [foo], long name [Airbus A340-600], "
"and type [Jet] was not a valid keyword combination.")]}
{:path ["DataCenters" 0]
:errors [(str "Data center short name [SomeCenter] was not a valid "
"keyword.")]}]}
response))))
(testing "Keyword validation warnings using validation endpoint"
(let [concept (data-umm-c/collection-concept
{:Platforms [(data-umm-cmn/platform {:ShortName "foo"
:LongName "Airbus A340-600"
:Type "Jet"})]
:DataCenters [(data-umm-cmn/data-center {:Roles ["ARCHIVER"]
:ShortName "SomeCenter"
:ContactInformation {:RelatedUrls [{:URL ""
:Description "description"
:Type "GET DATA"
:URLContentType "DistributionURL"
:Subtype "DIRECT DOWNLOAD"
:GetData {:Format "RelatedUrls: BadFormat1"
:MimeType "RelatedUrls: BadMimeType1"
:Size 0.0
:Unit "KB"}}]}})]
:RelatedUrls [{:URL ""
:Description "Bad data, throws warning"
:Type "GET DATA"
:URLContentType "DistributionURL"
:Subtype "DIRECT DOWNLOAD"
:GetData {:Format "RelatedUrls: BadFormat2"
:MimeType "RelatedUrls: BadMimeType2"
:Size 0.0
:Unit "KB"}}
{:URL ""
:Description "No Warning: KMS only content type (not in valid-url-content-types-map)"
:URLContentType "PublicationURL"
:Type "VIEW RELATED INFORMATION"
:Subtype "DATA PRODUCT SPECIFICATION"}]}
:umm-json)
response (ingest/validate-concept concept {:validate-keywords false})]
(is (= {:status 200
:warnings "After translating item to UMM-C the metadata had the following issue(s): [:DataCenters 0 :ContactInformation :RelatedUrls 0] URLContentType must be DataCenterURL for DataCenter RelatedUrls;; [:RelatedUrls 0 :GetData :MimeType] MimeType [RelatedUrls: BadMimeType2] was not a valid keyword.;; [:Platforms 0] Platform short name [foo], long name [Airbus A340-600], and type [Jet] was not a valid keyword combination.;; [:DataCenters 0] Data center short name [SomeCenter] was not a valid keyword.;; [:DataCenters 0 :ContactInformation :RelatedUrls 0 :GetData :MimeType] MimeType [RelatedUrls: BadMimeType1] was not a valid keyword."}
response))))
(testing "ArchiveAndDistributionInformation and RelatedUrls keyword validation"
(let [format (data-umm-c/collection-concept
{:ArchiveAndDistributionInformation
{:FileDistributionInformation
[(data-umm-c/file-distribution-information
{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Fees "None currently"
:Format "8-track tape"})]
:FileArchiveInformation
[(data-umm-c/file-archive-information
{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Format "8-track tape"})]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL"
:Type "GET DATA"
:GetData {:Format "8-track tape"
:Size 10.0
:Unit "MB"
:Fees "fees"}}
{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL"
:Type "GET DATA"}]}
:umm-json)
response (ingest/validate-concept format {:validate-keywords true})]
(is (= {:status 422
:errors [{:path ["RelatedUrls" 0 "GetData" "Format"]
:errors [(str "Format [8-track tape] was not a valid keyword.")]}
{:path ["ArchiveAndDistributionInformation"
"FileDistributionInformation"
0]
:errors [(str "Format [8-track tape] was not a valid keyword.")]}
{:path ["ArchiveAndDistributionInformation"
"FileArchiveInformation"
0]
:errors [(str "Format [8-track tape] was not a valid keyword.")]}]}
response)))
(are3 [attribs]
(assert-valid-keywords attribs)
"Valid Case Sensitive"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Fees "None currently"
:Format "HDF5"}]
:FileArchiveInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Format "HDF5"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL"
:Type "GET DATA"
:GetData {:Format "HDF5"
:Size 10.0
:Unit "MB"
:Fees "fees"}}]}
"Valid Case Insensitive"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Fees "None currently"
:Format "hdf5"}]
:FileArchiveInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Format "hdf5"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL"
:Type "GET DATA"
:GetData {:Format "hdf5"
:Size 10.0
:Unit "MB"
:Fees "fees"}}]}
"Valid Case Sensitive"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Fees "None currently"
:Format "JPEG"}]
:FileArchiveInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Format "JPEG"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL"
:Type "GET DATA"
:GetData {:Format "JPEG"
:Size 10.0
:Unit "MB"
:Fees "fees"}}]})
;; Test that correct but missmatched enums are not allowed
(are3 [attribs]
(assert-invalid-keywords
attribs
["RelatedUrls" 0]
[(msg/related-url-content-type-type-subtype-not-matching-kms-keywords
(first (:RelatedUrls attribs)))])
"- Missmatched ContentType and Type/Subtype pair"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation [{:Format "hdf5"}]
:FileArchiveInformation [{:Format "hdf5"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL" ; wrong enum in this context
:Type "GET RELATED VISUALIZATION"
:Subtype "MAP"}]}
"- Missmatched ContentType/Type pair and Subtype"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation [{:Format "HDF5"}]
:FileArchiveInformation [{:Format "HDF5"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "VisualizationURL"
:Type "GET RELATED VISUALIZATION"
:Subtype "HITIDE"}]} ; wrong enum in this context
"- Missmatched Type from ContentType/Subtype pair"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation [{:Format "JPEG"}]
:FileArchiveInformation [{:Format "JPEG"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "VisualizationURL"
:Type "DOWNLOAD SOFTWARE" ; wrong enum in this context
:Subtype "MAP"}]})
(testing "Project keyword validation"
(are2 [short-name long-name]
(assert-invalid-keywords
{:Projects [(assoc (data-umm-cmn/project short-name "") :LongName long-name)]}
["Projects" 0]
[(format (str "Project short name [%s] and long name [%s]"
" was not a valid keyword combination.")
short-name (str long-name))])
"Invalid short name"
"foo" "European Digital Archive of Soil Maps"
"Invalid with nil long name"
"foo" nil
"Invalid long name"
"EUDASM" "foo"
"Long name was nil in KMS"
"EUCREX-94" "foo"
"Invalid combination"
"SEDAC/GISS CROP-CLIM DBQ" "European Digital Archive of Soil Maps")
(are2 [short-name long-name]
(assert-valid-keywords
{:Projects [(assoc (data-umm-cmn/project short-name "") :LongName long-name)]})
"Exact match"
"EUDASM" "European Digital Archive of Soil Maps"
"Nil long name in project and in KMS"
"EUCREX-94" nil
"Case Insensitive"
"EUDaSM" "European DIgItal ArchIve of SoIl MAps"))
(testing "Platform keyword validation"
(are2 [short-name long-name type]
(assert-invalid-keywords
{:Platforms [(data-umm-cmn/platform {:ShortName short-name
:LongName long-name
:Type type})]}
["Platforms" 0]
[(format (str "Platform short name [%s], long name [%s], and type [%s]"
" was not a valid keyword combination.")
short-name long-name type)])
"Invalid short name"
"foo" "Airbus A340-600" "Jet"
"Long name is nil in KMS"
"AIRCRAFT" "Airbus A340-600" "Aircraft"
"Invalid long name"
"DMSP 5B/F3" "foo" "Earth Observation Satellites"
"Invalid long name"
"DMSP 5B/F3" "Defense Meteorological Satellite Program-F3" "foo"
"Invalid combination"
"DMSP 5B/F3" "Airbus A340-600" "Earth Observation Satellites"
CMR-4400
"Long name is in Platform and nil in KMS"
"ALTUS" "foo" "Aircraft")
(are2 [short-name long-name type]
(assert-valid-keywords
{:Platforms [(data-umm-cmn/platform {:ShortName short-name
:LongName long-name
:Type type})]})
"Exact match"
"A340-600" "Airbus A340-600" "Jet"
"Case Insensitive"
"a340-600" "aiRBus A340-600" "jET"
Next three scenarios are for CMR-4400
"Long name is in Platform and KMS"
"B-200" "Beechcraft King Air B-200" "Propeller"
"Long name is nil in Platform and nil in KMS"
"CESSNA 188" nil "Propeller"
"Long name is nil in Platform and not nil in KMS"
"DHC-3" nil "Propeller"))
(testing "DataCenter keyword validation"
(testing "Invalid short name"
(let [dc (data-umm-cmn/data-center {:Roles ["ARCHIVER"]
:ShortName "AARHUS-HYDRO-Invalid"
:LongName "Hydrogeophysics Group, Aarhus University "})]
(assert-invalid-keywords
{:DataCenters [dc]}
["DataCenters" 0]
[(msg/data-center-not-matches-kms-keywords dc)])))
(are3 [attribs]
(let [dc (data-umm-cmn/data-center attribs)]
(assert-valid-keywords {:DataCenters [dc]}))
"Valid Case Sensitive"
{:Roles ["ARCHIVER"]
:ShortName "AARHUS-HYDRO"
:LongName "Hydrogeophysics Group, Aarhus University "}
"Valid Case Insensitive"
{:Roles ["ARCHIVER"]
:ShortName "aArHUS-HYDRO"
:LongName "hYdrogeophysics Group, Aarhus University "}
"Invalid long name is ok"
{:Roles ["ARCHIVER"]
:ShortName "AARHUS-HYDRO"
:LongName "Hydrogeophysics Group, Aarhus University Invalid"}
"Nil long name is ok"
{:Roles ["ARCHIVER"]
:ShortName "AARHUS-HYDRO"}))
(testing "DirectoryName keyword validation"
(are3 [attribs]
(let [dn (data-umm-c/directory-name attribs)]
(assert-invalid-keywords
{:DirectoryNames [dn]}
["DirectoryNames" 0]
[(msg/directory-name-not-matches-kms-keywords dn)]))
"Invalid short name"
{:ShortName "SN Invalid"
:LongName "LN NOT VALIDATED"})
(are3 [attribs]
(let [dn (data-umm-c/directory-name attribs)]
(assert-valid-keywords {:DirectoryNames [dn]}))
"Valid Case Sensitive"
{:ShortName "GOSIC/GTOS"
:LongName "LN NOT VALIDATED "}
"Valid Case Insensitive"
{:ShortName "gOSIC/GtOS"
:LongName "LN NOT VALIDATED"}))
(testing "ISOTopicCategories keyword validation"
(are3 [itc]
(assert-invalid-keywords
{:ISOTopicCategories [itc]}
["IsoTopicCategories" 0]
[(msg/iso-topic-category-not-matches-kms-keywords itc)])
"Invalid ISOTopicCategory"
"Invalid ISOTopicCategory")
(are3 [itc]
(assert-valid-keywords {:ISOTopicCategories [itc]})
"Valid Case Sensitive"
"BIOTA"
"Valid Case Insensitive"
"bIoTa"))
(testing "Instrument keyword validation"
(are2 [short-name long-name]
(assert-invalid-keywords
{:Platforms
[(data-umm-cmn/platform
{:ShortName "A340-600"
:LongName "Airbus A340-600"
:Type "Jet"
:Instruments [(data-umm-cmn/instrument {:ShortName short-name
:LongName long-name})]})]}
["Platforms" 0 "Instruments" 0]
[(format (str "Instrument short name [%s] and long name [%s]"
" was not a valid keyword combination.")
short-name long-name)])
"Invalid short name"
"foo" "Airborne Topographic Mapper"
"Long name is nil in KMS"
"ACOUSTIC SOUNDERS" "foo"
"Invalid long name"
"ATM" "foo"
"Invalid combination"
"ATM" "Land, Vegetation, and Ice Sensor")
(are2 [short-name long-name]
(assert-valid-keywords
{:Platforms
[(data-umm-cmn/platform
{:ShortName "A340-600"
:LongName "Airbus A340-600"
:Type "Jet"
:Instruments [(data-umm-cmn/instrument {:ShortName short-name
:LongName long-name})]})]})
"Exact match"
"ATM" "Airborne Topographic Mapper"
"Nil long name in project and in KMS"
"ACOUSTIC SOUNDERS" nil
"Case Insensitive"
"Atm" "aIRBORNE Topographic Mapper"))
(testing "Science Keyword validation"
(are [attribs]
(let [sk (data-umm-cmn/science-keyword attribs)]
(assert-invalid-keywords
{:ScienceKeywords [sk]}
["ScienceKeywords" 0]
[(msg/science-keyword-not-matches-kms-keywords attribs)]))
{:Category "foo"
:Topic "DATA ANALYSIS AND VISUALIZATION"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"}
{:Category "EARTH SCIENCE SERVICES"
:Topic "foo"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"}
{:Category "EARTH SCIENCE SERVICES"
:Topic "DATA ANALYSIS AND VISUALIZATION"
:Term "foo"}
{:Category "EARTH SCIENCE SERVICES"
:Topic "DATA ANALYSIS AND VISUALIZATION"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"
:VariableLevel1 "foo"}
{:Category "EARTH SCIENCE"
:Topic "ATMOSPHERE"
:Term "AEROSOLS"
:VariableLevel1 "AEROSOL OPTICAL DEPTH/THICKNESS"
:VariableLevel2 "foo"}
{:Category "EARTH SCIENCE"
:Topic "ATMOSPHERE"
:Term "ATMOSPHERIC TEMPERATURE"
:VariableLevel1 "SURFACE TEMPERATURE"
:VariableLevel2 "MAXIMUM/MINIMUM TEMPERATURE"
:VariableLevel3 "foo"}
Invalid combination . Topic is valid but not with these other Terms
{:Category "EARTH SCIENCE SERVICES"
:Topic "ATMOSPHERE"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"})
(are [attribs]
(assert-valid-keywords {:ScienceKeywords [(data-umm-cmn/science-keyword attribs)]})
{:Category "EARTH SCIENCE SERVICES"
:Topic "DATA ANALYSIS AND VISUALIZATION"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"}
{:Category "EARTH SCIENCE SERVICES"
:Topic "DATA ANALYSIS AND VISUALIZATION"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"
:VariableLevel1 "MOBILE GEOGRAPHIC INFORMATION SYSTEMS"}
{:Category "EARTH SCIENCE"
:Topic "ATMOSPHERE"
:Term "AEROSOLS"
:VariableLevel1 "AEROSOL OPTICAL DEPTH/THICKNESS"
:VariableLevel2 "ANGSTROM EXPONENT"}
{:Category "EARTH SCIENCE"
:Topic "ATMOSPHERE"
:Term "ATMOSPHERIC TEMPERATURE"
:VariableLevel1 "SURFACE TEMPERATURE"
:VariableLevel2 "MAXIMUM/MINIMUM TEMPERATURE"
:VariableLevel3 "24 HOUR MAXIMUM TEMPERATURE"
:DetailedVariable "This is ignored"}
{:Category "EARTH SCiENCE"
:Topic "ATMOsPHERE"
:Term "ATMOSpHERIC TEMPERATURE"
:VariableLevel1 "SuRFACE TEMPERATURE"
:VariableLevel2 "MAXiMUM/MiNiMUM TEMPERATURE"
:VariableLevel3 "24 HOUR MAXiMUM TEMPERATURE"}))
(testing "Location Keyword validation"
(are3 [attribs]
(let [lk (data-umm-c/location-keyword attribs)]
(assert-valid-keywords {:LocationKeywords [lk]}))
"Valid location keyword"
{:Category "CONTINENT"
:Type "AFRICA"
:Subregion1 "CENTRAL AFRICA"}
"Valid full Location Keyword"
{:Category "CONTINENT"
:Type "ASIA"
:Subregion1 "WESTERN ASIA"
:Subregion2 "MIDDLE EAST"
:Subregion3 "GAZA STRIP"
:DetailedLocation "Testing Detailed Location"})
(are3 [attribs]
(let [lk (data-umm-c/location-keyword attribs)]
(assert-invalid-keywords
{:LocationKeywords [lk]}
["LocationKeywords" 0]
[(msg/location-keyword-not-matches-kms-keywords attribs)]))
"Invalid Type"
{:Category "CONTINENT"
:Type "GAZA"
:Subregion1 "WESTERN ASIA"
:Subregion2 "MIDDLE EAST"
:Subregion3 "GAZA STRIP"
:DetailedLocation "Testing Detailed Location"}
"Invalid Category"
{:Category "XYZ"}
"Invalid Subregion"
{:Category "CONTINENT"
:Type "AFRICA"
:Subregion1 "WESTERN ASIA"}))))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/7a217ca3758f9f7b2d894f31d0e23c42819cc3e0/system-int-test/test/cmr/system_int_test/ingest/collection_keyword_validation_test.clj | clojure | For a list of the valid keywords during testing see dev-system/resources/kms_examples
Test that correct but missmatched enums are not allowed
wrong enum in this context
wrong enum in this context
wrong enum in this context | (ns cmr.system-int-test.ingest.collection-keyword-validation-test
"CMR Ingest keyword validation integration tests"
(:require
[clojure.test :refer :all]
[cmr.common.util :refer [are2 are3]]
[cmr.ingest.services.messages :as msg]
[cmr.system-int-test.data2.core :as d]
[cmr.system-int-test.data2.umm-spec-collection :as data-umm-c]
[cmr.system-int-test.data2.umm-spec-common :as data-umm-cmn]
[cmr.system-int-test.utils.ingest-util :as ingest]))
(defn assert-invalid
([coll-attributes field-path errors]
(assert-invalid coll-attributes field-path errors nil))
([coll-attributes field-path errors options]
(let [response (d/ingest-umm-spec-collection
"PROV1"
(data-umm-c/collection coll-attributes)
(merge {:allow-failure? true} options))]
(is (= {:status 422
:errors [{:path field-path
:errors errors}]}
(select-keys response [:status :errors]))))))
(defn assert-valid
([coll-attributes]
(assert-valid coll-attributes nil))
([coll-attributes options]
(let [collection (assoc (data-umm-c/collection coll-attributes)
:native-id (:native-id coll-attributes))
provider-id (get coll-attributes :provider-id "PROV1")
response (d/ingest-umm-spec-collection provider-id collection options)]
(is (#{{:status 200} {:status 201}} (select-keys response [:status :errors]))))))
(defn assert-invalid-keywords
[coll-attributes field-path errors]
(assert-invalid coll-attributes field-path errors {:validate-keywords true
:format :umm-json
:accept-format :json}))
(defn assert-valid-keywords
[coll-attributes]
(assert-valid coll-attributes {:validate-keywords true
:format :umm-json
:accept-format :json}))
(use-fixtures :each (ingest/reset-fixture {"provguid1" "PROV1" "provguid2" "PROV2"}))
(deftest collection-keyword-validation-test
(testing "Keyword validation errors using validation endpoint"
(let [concept (data-umm-c/collection-concept
{:Platforms [(data-umm-cmn/platform {:ShortName "foo"
:LongName "Airbus A340-600"
:Type "Jet"})]
:DataCenters [(data-umm-cmn/data-center {:Roles ["ARCHIVER"]
:ShortName "SomeCenter"})]})
response (ingest/validate-concept concept {:validate-keywords true})]
(is (= {:status 422
:errors [{:path ["Platforms" 0]
:errors [(str "Platform short name [foo], long name [Airbus A340-600], "
"and type [Jet] was not a valid keyword combination.")]}
{:path ["DataCenters" 0]
:errors [(str "Data center short name [SomeCenter] was not a valid "
"keyword.")]}]}
response))))
(testing "Keyword validation warnings using validation endpoint"
(let [concept (data-umm-c/collection-concept
{:Platforms [(data-umm-cmn/platform {:ShortName "foo"
:LongName "Airbus A340-600"
:Type "Jet"})]
:DataCenters [(data-umm-cmn/data-center {:Roles ["ARCHIVER"]
:ShortName "SomeCenter"
:ContactInformation {:RelatedUrls [{:URL ""
:Description "description"
:Type "GET DATA"
:URLContentType "DistributionURL"
:Subtype "DIRECT DOWNLOAD"
:GetData {:Format "RelatedUrls: BadFormat1"
:MimeType "RelatedUrls: BadMimeType1"
:Size 0.0
:Unit "KB"}}]}})]
:RelatedUrls [{:URL ""
:Description "Bad data, throws warning"
:Type "GET DATA"
:URLContentType "DistributionURL"
:Subtype "DIRECT DOWNLOAD"
:GetData {:Format "RelatedUrls: BadFormat2"
:MimeType "RelatedUrls: BadMimeType2"
:Size 0.0
:Unit "KB"}}
{:URL ""
:Description "No Warning: KMS only content type (not in valid-url-content-types-map)"
:URLContentType "PublicationURL"
:Type "VIEW RELATED INFORMATION"
:Subtype "DATA PRODUCT SPECIFICATION"}]}
:umm-json)
response (ingest/validate-concept concept {:validate-keywords false})]
(is (= {:status 200
:warnings "After translating item to UMM-C the metadata had the following issue(s): [:DataCenters 0 :ContactInformation :RelatedUrls 0] URLContentType must be DataCenterURL for DataCenter RelatedUrls;; [:RelatedUrls 0 :GetData :MimeType] MimeType [RelatedUrls: BadMimeType2] was not a valid keyword.;; [:Platforms 0] Platform short name [foo], long name [Airbus A340-600], and type [Jet] was not a valid keyword combination.;; [:DataCenters 0] Data center short name [SomeCenter] was not a valid keyword.;; [:DataCenters 0 :ContactInformation :RelatedUrls 0 :GetData :MimeType] MimeType [RelatedUrls: BadMimeType1] was not a valid keyword."}
response))))
(testing "ArchiveAndDistributionInformation and RelatedUrls keyword validation"
(let [format (data-umm-c/collection-concept
{:ArchiveAndDistributionInformation
{:FileDistributionInformation
[(data-umm-c/file-distribution-information
{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Fees "None currently"
:Format "8-track tape"})]
:FileArchiveInformation
[(data-umm-c/file-archive-information
{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Format "8-track tape"})]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL"
:Type "GET DATA"
:GetData {:Format "8-track tape"
:Size 10.0
:Unit "MB"
:Fees "fees"}}
{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL"
:Type "GET DATA"}]}
:umm-json)
response (ingest/validate-concept format {:validate-keywords true})]
(is (= {:status 422
:errors [{:path ["RelatedUrls" 0 "GetData" "Format"]
:errors [(str "Format [8-track tape] was not a valid keyword.")]}
{:path ["ArchiveAndDistributionInformation"
"FileDistributionInformation"
0]
:errors [(str "Format [8-track tape] was not a valid keyword.")]}
{:path ["ArchiveAndDistributionInformation"
"FileArchiveInformation"
0]
:errors [(str "Format [8-track tape] was not a valid keyword.")]}]}
response)))
(are3 [attribs]
(assert-valid-keywords attribs)
"Valid Case Sensitive"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Fees "None currently"
:Format "HDF5"}]
:FileArchiveInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Format "HDF5"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL"
:Type "GET DATA"
:GetData {:Format "HDF5"
:Size 10.0
:Unit "MB"
:Fees "fees"}}]}
"Valid Case Insensitive"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Fees "None currently"
:Format "hdf5"}]
:FileArchiveInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Format "hdf5"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL"
:Type "GET DATA"
:GetData {:Format "hdf5"
:Size 10.0
:Unit "MB"
:Fees "fees"}}]}
"Valid Case Sensitive"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Fees "None currently"
:Format "JPEG"}]
:FileArchiveInformation
[{:FormatType "Native"
:AverageFileSize 50
:AverageFileSizeUnit "MB"
:Format "JPEG"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "DistributionURL"
:Type "GET DATA"
:GetData {:Format "JPEG"
:Size 10.0
:Unit "MB"
:Fees "fees"}}]})
(are3 [attribs]
(assert-invalid-keywords
attribs
["RelatedUrls" 0]
[(msg/related-url-content-type-type-subtype-not-matching-kms-keywords
(first (:RelatedUrls attribs)))])
"- Missmatched ContentType and Type/Subtype pair"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation [{:Format "hdf5"}]
:FileArchiveInformation [{:Format "hdf5"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:Type "GET RELATED VISUALIZATION"
:Subtype "MAP"}]}
"- Missmatched ContentType/Type pair and Subtype"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation [{:Format "HDF5"}]
:FileArchiveInformation [{:Format "HDF5"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "VisualizationURL"
:Type "GET RELATED VISUALIZATION"
"- Missmatched Type from ContentType/Subtype pair"
{:ArchiveAndDistributionInformation
{:FileDistributionInformation [{:Format "JPEG"}]
:FileArchiveInformation [{:Format "JPEG"}]}
:RelatedUrls
[{:Description "Related url description"
:URL ""
:URLContentType "VisualizationURL"
:Subtype "MAP"}]})
(testing "Project keyword validation"
(are2 [short-name long-name]
(assert-invalid-keywords
{:Projects [(assoc (data-umm-cmn/project short-name "") :LongName long-name)]}
["Projects" 0]
[(format (str "Project short name [%s] and long name [%s]"
" was not a valid keyword combination.")
short-name (str long-name))])
"Invalid short name"
"foo" "European Digital Archive of Soil Maps"
"Invalid with nil long name"
"foo" nil
"Invalid long name"
"EUDASM" "foo"
"Long name was nil in KMS"
"EUCREX-94" "foo"
"Invalid combination"
"SEDAC/GISS CROP-CLIM DBQ" "European Digital Archive of Soil Maps")
(are2 [short-name long-name]
(assert-valid-keywords
{:Projects [(assoc (data-umm-cmn/project short-name "") :LongName long-name)]})
"Exact match"
"EUDASM" "European Digital Archive of Soil Maps"
"Nil long name in project and in KMS"
"EUCREX-94" nil
"Case Insensitive"
"EUDaSM" "European DIgItal ArchIve of SoIl MAps"))
(testing "Platform keyword validation"
(are2 [short-name long-name type]
(assert-invalid-keywords
{:Platforms [(data-umm-cmn/platform {:ShortName short-name
:LongName long-name
:Type type})]}
["Platforms" 0]
[(format (str "Platform short name [%s], long name [%s], and type [%s]"
" was not a valid keyword combination.")
short-name long-name type)])
"Invalid short name"
"foo" "Airbus A340-600" "Jet"
"Long name is nil in KMS"
"AIRCRAFT" "Airbus A340-600" "Aircraft"
"Invalid long name"
"DMSP 5B/F3" "foo" "Earth Observation Satellites"
"Invalid long name"
"DMSP 5B/F3" "Defense Meteorological Satellite Program-F3" "foo"
"Invalid combination"
"DMSP 5B/F3" "Airbus A340-600" "Earth Observation Satellites"
CMR-4400
"Long name is in Platform and nil in KMS"
"ALTUS" "foo" "Aircraft")
(are2 [short-name long-name type]
(assert-valid-keywords
{:Platforms [(data-umm-cmn/platform {:ShortName short-name
:LongName long-name
:Type type})]})
"Exact match"
"A340-600" "Airbus A340-600" "Jet"
"Case Insensitive"
"a340-600" "aiRBus A340-600" "jET"
Next three scenarios are for CMR-4400
"Long name is in Platform and KMS"
"B-200" "Beechcraft King Air B-200" "Propeller"
"Long name is nil in Platform and nil in KMS"
"CESSNA 188" nil "Propeller"
"Long name is nil in Platform and not nil in KMS"
"DHC-3" nil "Propeller"))
(testing "DataCenter keyword validation"
(testing "Invalid short name"
(let [dc (data-umm-cmn/data-center {:Roles ["ARCHIVER"]
:ShortName "AARHUS-HYDRO-Invalid"
:LongName "Hydrogeophysics Group, Aarhus University "})]
(assert-invalid-keywords
{:DataCenters [dc]}
["DataCenters" 0]
[(msg/data-center-not-matches-kms-keywords dc)])))
(are3 [attribs]
(let [dc (data-umm-cmn/data-center attribs)]
(assert-valid-keywords {:DataCenters [dc]}))
"Valid Case Sensitive"
{:Roles ["ARCHIVER"]
:ShortName "AARHUS-HYDRO"
:LongName "Hydrogeophysics Group, Aarhus University "}
"Valid Case Insensitive"
{:Roles ["ARCHIVER"]
:ShortName "aArHUS-HYDRO"
:LongName "hYdrogeophysics Group, Aarhus University "}
"Invalid long name is ok"
{:Roles ["ARCHIVER"]
:ShortName "AARHUS-HYDRO"
:LongName "Hydrogeophysics Group, Aarhus University Invalid"}
"Nil long name is ok"
{:Roles ["ARCHIVER"]
:ShortName "AARHUS-HYDRO"}))
(testing "DirectoryName keyword validation"
(are3 [attribs]
(let [dn (data-umm-c/directory-name attribs)]
(assert-invalid-keywords
{:DirectoryNames [dn]}
["DirectoryNames" 0]
[(msg/directory-name-not-matches-kms-keywords dn)]))
"Invalid short name"
{:ShortName "SN Invalid"
:LongName "LN NOT VALIDATED"})
(are3 [attribs]
(let [dn (data-umm-c/directory-name attribs)]
(assert-valid-keywords {:DirectoryNames [dn]}))
"Valid Case Sensitive"
{:ShortName "GOSIC/GTOS"
:LongName "LN NOT VALIDATED "}
"Valid Case Insensitive"
{:ShortName "gOSIC/GtOS"
:LongName "LN NOT VALIDATED"}))
(testing "ISOTopicCategories keyword validation"
(are3 [itc]
(assert-invalid-keywords
{:ISOTopicCategories [itc]}
["IsoTopicCategories" 0]
[(msg/iso-topic-category-not-matches-kms-keywords itc)])
"Invalid ISOTopicCategory"
"Invalid ISOTopicCategory")
(are3 [itc]
(assert-valid-keywords {:ISOTopicCategories [itc]})
"Valid Case Sensitive"
"BIOTA"
"Valid Case Insensitive"
"bIoTa"))
(testing "Instrument keyword validation"
(are2 [short-name long-name]
(assert-invalid-keywords
{:Platforms
[(data-umm-cmn/platform
{:ShortName "A340-600"
:LongName "Airbus A340-600"
:Type "Jet"
:Instruments [(data-umm-cmn/instrument {:ShortName short-name
:LongName long-name})]})]}
["Platforms" 0 "Instruments" 0]
[(format (str "Instrument short name [%s] and long name [%s]"
" was not a valid keyword combination.")
short-name long-name)])
"Invalid short name"
"foo" "Airborne Topographic Mapper"
"Long name is nil in KMS"
"ACOUSTIC SOUNDERS" "foo"
"Invalid long name"
"ATM" "foo"
"Invalid combination"
"ATM" "Land, Vegetation, and Ice Sensor")
(are2 [short-name long-name]
(assert-valid-keywords
{:Platforms
[(data-umm-cmn/platform
{:ShortName "A340-600"
:LongName "Airbus A340-600"
:Type "Jet"
:Instruments [(data-umm-cmn/instrument {:ShortName short-name
:LongName long-name})]})]})
"Exact match"
"ATM" "Airborne Topographic Mapper"
"Nil long name in project and in KMS"
"ACOUSTIC SOUNDERS" nil
"Case Insensitive"
"Atm" "aIRBORNE Topographic Mapper"))
(testing "Science Keyword validation"
(are [attribs]
(let [sk (data-umm-cmn/science-keyword attribs)]
(assert-invalid-keywords
{:ScienceKeywords [sk]}
["ScienceKeywords" 0]
[(msg/science-keyword-not-matches-kms-keywords attribs)]))
{:Category "foo"
:Topic "DATA ANALYSIS AND VISUALIZATION"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"}
{:Category "EARTH SCIENCE SERVICES"
:Topic "foo"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"}
{:Category "EARTH SCIENCE SERVICES"
:Topic "DATA ANALYSIS AND VISUALIZATION"
:Term "foo"}
{:Category "EARTH SCIENCE SERVICES"
:Topic "DATA ANALYSIS AND VISUALIZATION"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"
:VariableLevel1 "foo"}
{:Category "EARTH SCIENCE"
:Topic "ATMOSPHERE"
:Term "AEROSOLS"
:VariableLevel1 "AEROSOL OPTICAL DEPTH/THICKNESS"
:VariableLevel2 "foo"}
{:Category "EARTH SCIENCE"
:Topic "ATMOSPHERE"
:Term "ATMOSPHERIC TEMPERATURE"
:VariableLevel1 "SURFACE TEMPERATURE"
:VariableLevel2 "MAXIMUM/MINIMUM TEMPERATURE"
:VariableLevel3 "foo"}
Invalid combination . Topic is valid but not with these other Terms
{:Category "EARTH SCIENCE SERVICES"
:Topic "ATMOSPHERE"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"})
(are [attribs]
(assert-valid-keywords {:ScienceKeywords [(data-umm-cmn/science-keyword attribs)]})
{:Category "EARTH SCIENCE SERVICES"
:Topic "DATA ANALYSIS AND VISUALIZATION"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"}
{:Category "EARTH SCIENCE SERVICES"
:Topic "DATA ANALYSIS AND VISUALIZATION"
:Term "GEOGRAPHIC INFORMATION SYSTEMS"
:VariableLevel1 "MOBILE GEOGRAPHIC INFORMATION SYSTEMS"}
{:Category "EARTH SCIENCE"
:Topic "ATMOSPHERE"
:Term "AEROSOLS"
:VariableLevel1 "AEROSOL OPTICAL DEPTH/THICKNESS"
:VariableLevel2 "ANGSTROM EXPONENT"}
{:Category "EARTH SCIENCE"
:Topic "ATMOSPHERE"
:Term "ATMOSPHERIC TEMPERATURE"
:VariableLevel1 "SURFACE TEMPERATURE"
:VariableLevel2 "MAXIMUM/MINIMUM TEMPERATURE"
:VariableLevel3 "24 HOUR MAXIMUM TEMPERATURE"
:DetailedVariable "This is ignored"}
{:Category "EARTH SCiENCE"
:Topic "ATMOsPHERE"
:Term "ATMOSpHERIC TEMPERATURE"
:VariableLevel1 "SuRFACE TEMPERATURE"
:VariableLevel2 "MAXiMUM/MiNiMUM TEMPERATURE"
:VariableLevel3 "24 HOUR MAXiMUM TEMPERATURE"}))
(testing "Location Keyword validation"
(are3 [attribs]
(let [lk (data-umm-c/location-keyword attribs)]
(assert-valid-keywords {:LocationKeywords [lk]}))
"Valid location keyword"
{:Category "CONTINENT"
:Type "AFRICA"
:Subregion1 "CENTRAL AFRICA"}
"Valid full Location Keyword"
{:Category "CONTINENT"
:Type "ASIA"
:Subregion1 "WESTERN ASIA"
:Subregion2 "MIDDLE EAST"
:Subregion3 "GAZA STRIP"
:DetailedLocation "Testing Detailed Location"})
(are3 [attribs]
(let [lk (data-umm-c/location-keyword attribs)]
(assert-invalid-keywords
{:LocationKeywords [lk]}
["LocationKeywords" 0]
[(msg/location-keyword-not-matches-kms-keywords attribs)]))
"Invalid Type"
{:Category "CONTINENT"
:Type "GAZA"
:Subregion1 "WESTERN ASIA"
:Subregion2 "MIDDLE EAST"
:Subregion3 "GAZA STRIP"
:DetailedLocation "Testing Detailed Location"}
"Invalid Category"
{:Category "XYZ"}
"Invalid Subregion"
{:Category "CONTINENT"
:Type "AFRICA"
:Subregion1 "WESTERN ASIA"}))))
|
353399b38a3de3594ae81bfa3b479aaa1f543c2ee5afc7444a03f9d3fd0a2183 | mbenelli/klio | simple-sort.scm | ;; simple-sort.scm - A simple implementation of merge sort.
;;
Copyright ( c ) 2011 by < >
All Rights Reserved .
;;
Based on Sort - r1 from Gambit 's Dumping Ground . Original copyright :
;;
;;;============================================================================
File : " Sort.scm " , Time - stamp : < 2008 - 03 - 18 15:21:35 feeley >
Copyright ( c ) 2006 - 2008 by , All Rights Reserved .
;;;============================================================================
;;; (sort sequence less?) sorts a sequence (a list or vector) in a
;;; non-destructive way ordered using the comparison predicate less?.
;;;
;;; Sample use:
;;;
( sort ( vector 3 1 4 1 5 ) > ) = > # ( 5 4 3 1 1 )
;;;============================================================================
(##namespace ("simple-sort#"))
(##include "~~lib/gambit#.scm")
(define (sort sequence less?)
(declare (standard-bindings) (not safe))
(define (sort-list lst less?)
(define (mergesort lst)
(define (merge lst1 lst2)
(cond ((not (pair? lst1))
lst2)
((not (pair? lst2))
lst1)
(else
(let ((e1 (car lst1)) (e2 (car lst2)))
(if (less? e1 e2)
(cons e1 (merge (cdr lst1) lst2))
(cons e2 (merge lst1 (cdr lst2))))))))
(define (split lst)
(if (or (not (pair? lst)) (not (pair? (cdr lst))))
lst
(cons (car lst) (split (cddr lst)))))
(if (or (not (pair? lst)) (not (pair? (cdr lst))))
lst
(let* ((lst1 (mergesort (split lst)))
(lst2 (mergesort (split (cdr lst)))))
(merge lst1 lst2))))
(mergesort lst))
(cond ((not (procedure? less?))
(error "procedure expected"))
((or (null? sequence)
(pair? sequence))
(sort-list sequence less?))
((vector? sequence)
(list->vector (sort-list (vector->list sequence) less?)))
(else
(error "vector or list expected"))))
;;;============================================================================
| null | https://raw.githubusercontent.com/mbenelli/klio/33c11700d6080de44a22a27a5147f97899583f6e/klio/simple-sort.scm | scheme | simple-sort.scm - A simple implementation of merge sort.
============================================================================
============================================================================
(sort sequence less?) sorts a sequence (a list or vector) in a
non-destructive way ordered using the comparison predicate less?.
Sample use:
============================================================================
============================================================================ | Copyright ( c ) 2011 by < >
All Rights Reserved .
Based on Sort - r1 from Gambit 's Dumping Ground . Original copyright :
File : " Sort.scm " , Time - stamp : < 2008 - 03 - 18 15:21:35 feeley >
Copyright ( c ) 2006 - 2008 by , All Rights Reserved .
( sort ( vector 3 1 4 1 5 ) > ) = > # ( 5 4 3 1 1 )
(##namespace ("simple-sort#"))
(##include "~~lib/gambit#.scm")
(define (sort sequence less?)
(declare (standard-bindings) (not safe))
(define (sort-list lst less?)
(define (mergesort lst)
(define (merge lst1 lst2)
(cond ((not (pair? lst1))
lst2)
((not (pair? lst2))
lst1)
(else
(let ((e1 (car lst1)) (e2 (car lst2)))
(if (less? e1 e2)
(cons e1 (merge (cdr lst1) lst2))
(cons e2 (merge lst1 (cdr lst2))))))))
(define (split lst)
(if (or (not (pair? lst)) (not (pair? (cdr lst))))
lst
(cons (car lst) (split (cddr lst)))))
(if (or (not (pair? lst)) (not (pair? (cdr lst))))
lst
(let* ((lst1 (mergesort (split lst)))
(lst2 (mergesort (split (cdr lst)))))
(merge lst1 lst2))))
(mergesort lst))
(cond ((not (procedure? less?))
(error "procedure expected"))
((or (null? sequence)
(pair? sequence))
(sort-list sequence less?))
((vector? sequence)
(list->vector (sort-list (vector->list sequence) less?)))
(else
(error "vector or list expected"))))
|
b763698cb8aedb8524f017431188700f9c7a1b00ff9897d6cdc8989b7b76d59e | footprintanalytics/footprint-web | impl.clj | (ns metabase.util.i18n.impl
"Lower-level implementation functions for `metabase.util.i18n`. Most of this is not meant to be used directly; use the
functions and macros in `metabase.util.i18n` instead."
(:require [clojure.java.io :as io]
[clojure.string :as str]
[clojure.tools.logging :as log]
[clojure.tools.reader.edn :as edn]
[metabase.plugins.classloader :as classloader]
[metabase.util.i18n.plural :as i18n.plural]
[potemkin.types :as p.types])
(:import java.text.MessageFormat
java.util.Locale
org.apache.commons.lang3.LocaleUtils))
(p.types/defprotocol+ CoerceToLocale
"Protocol for anything that can be coerced to a `java.util.Locale`."
(locale ^java.util.Locale [this]
"Coerce `this` to a `java.util.Locale`."))
(defn normalized-locale-string
"Normalize a locale string to the canonical format.
(normalized-locale-string \"EN-US\") ;-> \"en_US\"
Returns `nil` for invalid strings -- you can use this to check whether a String is valid."
^String [s]
{:pre [((some-fn nil? string?) s)]}
(when (string? s)
(when-let [[_ language country] (re-matches #"^(\w{2})(?:[-_](\w{2}))?$" s)]
(let [language (str/lower-case language)]
(if country
(str language \_ (some-> country str/upper-case))
language)))))
(extend-protocol CoerceToLocale
nil
(locale [_] nil)
Locale
(locale [this] this)
String
(locale [^String s]
(some-> (normalized-locale-string s) LocaleUtils/toLocale))
Support namespaced keywords like ` : en / US ` and ` : en / UK ` because we can
clojure.lang.Keyword
(locale [this]
(locale (if-let [namespce (namespace this)]
(str namespce \_ (name this))
(name this)))))
(defn available-locale?
"True if `locale` (a string, keyword, or `Locale`) is a valid locale available on this system. Normalizes args
automatically."
[locale-or-name]
(boolean
(when-let [locale (locale locale-or-name)]
(LocaleUtils/isAvailableLocale locale))))
(defn- available-locale-names*
[]
(log/info "Reading available locales from locales.clj...")
(some-> (io/resource "locales.clj") slurp edn/read-string :locales (->> (apply sorted-set))))
(def ^{:arglists '([])} available-locale-names
"Return sorted set of available locales, as Strings.
- > # { \"en\ " \"nl\ " \"pt - BR\ " \"zh\ " } "
(let [locales (delay (available-locale-names*))]
(fn [] @locales)))
(defn- find-fallback-locale*
^Locale [^Locale a-locale]
(some (fn [locale-name]
(let [try-locale (locale locale-name)]
The language - only Locale is tried first by virtue of the
;; list being sorted.
(when (and (= (.getLanguage try-locale) (.getLanguage a-locale))
(not (= try-locale a-locale)))
try-locale)))
(available-locale-names)))
(def ^:private ^{:arglists '([a-locale])} find-fallback-locale
(memoize find-fallback-locale*))
(defn fallback-locale
"Find a translated fallback Locale in the following order:
1) If it is a language + country Locale, try the language-only Locale
2) If the language-only Locale isn't translated or the input is a language-only Locale,
find the first language + country Locale we have a translation for.
Return `nil` if no fallback Locale can be found or the input is invalid.
(fallback-locale \"en_US\") ; -> #locale\"en\"
(fallback-locale \"pt\") ; -> #locale\"pt_BR\"
(fallback-locale \"pt_PT\") ; -> #locale\"pt_BR\""
^Locale [locale-or-name]
(when-let [a-locale (locale locale-or-name)]
(find-fallback-locale a-locale)))
(defn- locale-edn-resource
"The resource URL for the edn file containing translations for `locale-or-name`. These files are built by the
scripts in `bin/i18n` from `.po` files from POEditor.
- > # object[java.net . URL \"file:/home / cam / metabase / resources / metabase / es.edn\ " ] "
^java.net.URL [locale-or-name]
(when-let [a-locale (locale locale-or-name)]
(let [locale-name (-> (normalized-locale-string (str a-locale))
(str/replace #"_" "-"))
filename (format "i18n/%s.edn" locale-name)]
(io/resource filename (classloader/the-classloader)))))
(defn- translations* [a-locale]
(when-let [resource (locale-edn-resource a-locale)]
(edn/read-string (slurp resource))))
(def ^:private ^{:arglists '([locale-or-name])} translations
"Fetch a map of original untranslated message format string -> translated message format string for `locale-or-name`
by reading the corresponding EDN resource file. Does not include translations for parent locale(s). Memoized.
(translations \"es\") ;-> {:headers { ... }
:messages {\"Username\" \"Nombre Usuario\", ...}}"
(comp (memoize translations*) locale))
(defn- translated-format-string*
"Find the translated version of `format-string` for `locale-or-name`, or `nil` if none can be found.
Does not search 'parent' (language-only) translations.
`n` is a number used for translations with plural forms, used to compute the index of the translation to
return."
^String [locale-or-name format-string n]
(when (seq format-string)
(when-let [locale (locale locale-or-name)]
(when-let [translations (translations locale)]
(when-let [string-or-strings (get-in translations [:messages format-string])]
(if (string? string-or-strings)
;; Only a singular form defined; ignore `n`
string-or-strings
(if-let [plural-forms-header (get-in translations [:headers "Plural-Forms"])]
(get string-or-strings (i18n.plural/index plural-forms-header n))
;; Fall-back to singular if no header is present
(first string-or-strings))))))))
(defn- translated-format-string
"Find the translated version of `format-string` for `locale-or-name`, or `nil` if none can be found. Searches parent
(language-only) translations if none exist for a language + country locale."
^String [locale-or-name format-string {:keys [n format-string-pl]}]
(when-let [a-locale (locale locale-or-name)]
(or (when (= (.getLanguage a-locale) "en")
(if (or (nil? n) (= n 1))
format-string
format-string-pl))
(translated-format-string* a-locale format-string n)
(when-let [fallback-locale (fallback-locale a-locale)]
(log/tracef "No translated string found, trying fallback locale %s" (pr-str fallback-locale))
(translated-format-string* fallback-locale format-string n))
format-string)))
(defn- message-format ^MessageFormat [locale-or-name ^String format-string pluralization-opts]
(or (when-let [a-locale (locale locale-or-name)]
(when-let [^String translated (translated-format-string a-locale format-string pluralization-opts)]
(MessageFormat. translated a-locale)))
(MessageFormat. format-string)))
(defn translate
"Find the translated version of `format-string` for a `locale-or-name`, then format it. Translates using the resource
bundles generated by the `./bin/i18n/build-translation-resources` script; these live in
`./resources/metabase/Metabase/Messages_<locale>.class`. Attempts to translate with `language-country` Locale if
specified, falling back to `language` (without country), finally falling back to English (i.e., not formatting the
original untranslated `format-string`) if no matching bundles/translations exist, or if translation fails for some
other reason.
`n` is used for strings with plural forms and essentially represents the quantity of items being described by the
translated string. Defaults to 1 (the singular form).
Will attempt to translate `format-string`, but if for some reason we're not able to (such as a typo in the
translated version of the string), log the failure but return the original (untranslated) string. This is a
workaround for translations that, due to a typo, will fail to parse using Java's message formatter.
- > \"deben tener 140 caracteres o menos\ " "
([locale-or-name ^String format-string]
(translate locale-or-name format-string []))
([locale-or-name ^String format-string args]
(translate locale-or-name format-string args {}))
([locale-or-name ^String format-string args pluralization-opts]
(when (seq format-string)
(try
(.format (message-format locale-or-name format-string pluralization-opts) (to-array args))
(catch Throwable e
Not translating this string to prevent an unfortunate stack overflow . If this string happened to be the one
;; that had the typo, we'd just recur endlessly without logging an error.
(log/errorf e "Unable to translate string %s to %s" (pr-str format-string) (str locale-or-name))
(try
(.format (MessageFormat. format-string) (to-array args))
(catch Throwable _
(log/errorf e "Invalid format string %s" (pr-str format-string))
format-string)))))))
;; We can't fetch the system locale until the application DB has been initiailized. Once that's done, we don't need to
do the check anymore -- swapping out the getter fn with the simpler one speeds things up substantially
(def ^:private site-locale-from-setting-fn
(atom
(fn []
(when-let [db-is-set-up? (resolve 'metabase.db/db-is-set-up?)]
(when (and (bound? db-is-set-up?)
(db-is-set-up?))
(when-let [get-value-of-type (resolve 'metabase.models.setting/get-value-of-type)]
(when (bound? get-value-of-type)
(let [f (fn [] (get-value-of-type :string :site-locale))]
(reset! site-locale-from-setting-fn f)
(f)))))))))
(defn site-locale-from-setting
"Fetch the value of the `site-locale` Setting.
When metabase is shutting down, we need to log some messages after the db connection is closed, so we keep around a
cached-site-locale for that purpose."
[]
(let [cached-site-locale (atom "en")]
(try
(let [site-locale (@site-locale-from-setting-fn)]
(reset! cached-site-locale site-locale)
site-locale)
(catch Exception _ @cached-site-locale))))
(defmethod print-method Locale
[locale ^java.io.Writer writer]
((get-method print-dup Locale) locale writer))
(defmethod print-dup Locale
[locale ^java.io.Writer writer]
(.write writer (format "#locale %s" (pr-str (str locale)))))
| null | https://raw.githubusercontent.com/footprintanalytics/footprint-web/d3090d943dd9fcea493c236f79e7ef8a36ae17fc/src/metabase/util/i18n/impl.clj | clojure | use the
-> \"en_US\"
list being sorted.
-> #locale\"en\"
-> #locale\"pt_BR\"
-> #locale\"pt_BR\""
-> {:headers { ... }
Only a singular form defined; ignore `n`
Fall-back to singular if no header is present
these live in
that had the typo, we'd just recur endlessly without logging an error.
We can't fetch the system locale until the application DB has been initiailized. Once that's done, we don't need to | (ns metabase.util.i18n.impl
functions and macros in `metabase.util.i18n` instead."
(:require [clojure.java.io :as io]
[clojure.string :as str]
[clojure.tools.logging :as log]
[clojure.tools.reader.edn :as edn]
[metabase.plugins.classloader :as classloader]
[metabase.util.i18n.plural :as i18n.plural]
[potemkin.types :as p.types])
(:import java.text.MessageFormat
java.util.Locale
org.apache.commons.lang3.LocaleUtils))
(p.types/defprotocol+ CoerceToLocale
"Protocol for anything that can be coerced to a `java.util.Locale`."
(locale ^java.util.Locale [this]
"Coerce `this` to a `java.util.Locale`."))
(defn normalized-locale-string
"Normalize a locale string to the canonical format.
Returns `nil` for invalid strings -- you can use this to check whether a String is valid."
^String [s]
{:pre [((some-fn nil? string?) s)]}
(when (string? s)
(when-let [[_ language country] (re-matches #"^(\w{2})(?:[-_](\w{2}))?$" s)]
(let [language (str/lower-case language)]
(if country
(str language \_ (some-> country str/upper-case))
language)))))
(extend-protocol CoerceToLocale
nil
(locale [_] nil)
Locale
(locale [this] this)
String
(locale [^String s]
(some-> (normalized-locale-string s) LocaleUtils/toLocale))
Support namespaced keywords like ` : en / US ` and ` : en / UK ` because we can
clojure.lang.Keyword
(locale [this]
(locale (if-let [namespce (namespace this)]
(str namespce \_ (name this))
(name this)))))
(defn available-locale?
"True if `locale` (a string, keyword, or `Locale`) is a valid locale available on this system. Normalizes args
automatically."
[locale-or-name]
(boolean
(when-let [locale (locale locale-or-name)]
(LocaleUtils/isAvailableLocale locale))))
(defn- available-locale-names*
[]
(log/info "Reading available locales from locales.clj...")
(some-> (io/resource "locales.clj") slurp edn/read-string :locales (->> (apply sorted-set))))
(def ^{:arglists '([])} available-locale-names
"Return sorted set of available locales, as Strings.
- > # { \"en\ " \"nl\ " \"pt - BR\ " \"zh\ " } "
(let [locales (delay (available-locale-names*))]
(fn [] @locales)))
(defn- find-fallback-locale*
^Locale [^Locale a-locale]
(some (fn [locale-name]
(let [try-locale (locale locale-name)]
The language - only Locale is tried first by virtue of the
(when (and (= (.getLanguage try-locale) (.getLanguage a-locale))
(not (= try-locale a-locale)))
try-locale)))
(available-locale-names)))
(def ^:private ^{:arglists '([a-locale])} find-fallback-locale
(memoize find-fallback-locale*))
(defn fallback-locale
"Find a translated fallback Locale in the following order:
1) If it is a language + country Locale, try the language-only Locale
2) If the language-only Locale isn't translated or the input is a language-only Locale,
find the first language + country Locale we have a translation for.
Return `nil` if no fallback Locale can be found or the input is invalid.
^Locale [locale-or-name]
(when-let [a-locale (locale locale-or-name)]
(find-fallback-locale a-locale)))
(defn- locale-edn-resource
"The resource URL for the edn file containing translations for `locale-or-name`. These files are built by the
scripts in `bin/i18n` from `.po` files from POEditor.
- > # object[java.net . URL \"file:/home / cam / metabase / resources / metabase / es.edn\ " ] "
^java.net.URL [locale-or-name]
(when-let [a-locale (locale locale-or-name)]
(let [locale-name (-> (normalized-locale-string (str a-locale))
(str/replace #"_" "-"))
filename (format "i18n/%s.edn" locale-name)]
(io/resource filename (classloader/the-classloader)))))
(defn- translations* [a-locale]
(when-let [resource (locale-edn-resource a-locale)]
(edn/read-string (slurp resource))))
(def ^:private ^{:arglists '([locale-or-name])} translations
"Fetch a map of original untranslated message format string -> translated message format string for `locale-or-name`
by reading the corresponding EDN resource file. Does not include translations for parent locale(s). Memoized.
:messages {\"Username\" \"Nombre Usuario\", ...}}"
(comp (memoize translations*) locale))
(defn- translated-format-string*
"Find the translated version of `format-string` for `locale-or-name`, or `nil` if none can be found.
Does not search 'parent' (language-only) translations.
`n` is a number used for translations with plural forms, used to compute the index of the translation to
return."
^String [locale-or-name format-string n]
(when (seq format-string)
(when-let [locale (locale locale-or-name)]
(when-let [translations (translations locale)]
(when-let [string-or-strings (get-in translations [:messages format-string])]
(if (string? string-or-strings)
string-or-strings
(if-let [plural-forms-header (get-in translations [:headers "Plural-Forms"])]
(get string-or-strings (i18n.plural/index plural-forms-header n))
(first string-or-strings))))))))
(defn- translated-format-string
"Find the translated version of `format-string` for `locale-or-name`, or `nil` if none can be found. Searches parent
(language-only) translations if none exist for a language + country locale."
^String [locale-or-name format-string {:keys [n format-string-pl]}]
(when-let [a-locale (locale locale-or-name)]
(or (when (= (.getLanguage a-locale) "en")
(if (or (nil? n) (= n 1))
format-string
format-string-pl))
(translated-format-string* a-locale format-string n)
(when-let [fallback-locale (fallback-locale a-locale)]
(log/tracef "No translated string found, trying fallback locale %s" (pr-str fallback-locale))
(translated-format-string* fallback-locale format-string n))
format-string)))
(defn- message-format ^MessageFormat [locale-or-name ^String format-string pluralization-opts]
(or (when-let [a-locale (locale locale-or-name)]
(when-let [^String translated (translated-format-string a-locale format-string pluralization-opts)]
(MessageFormat. translated a-locale)))
(MessageFormat. format-string)))
(defn translate
"Find the translated version of `format-string` for a `locale-or-name`, then format it. Translates using the resource
`./resources/metabase/Metabase/Messages_<locale>.class`. Attempts to translate with `language-country` Locale if
specified, falling back to `language` (without country), finally falling back to English (i.e., not formatting the
original untranslated `format-string`) if no matching bundles/translations exist, or if translation fails for some
other reason.
`n` is used for strings with plural forms and essentially represents the quantity of items being described by the
translated string. Defaults to 1 (the singular form).
Will attempt to translate `format-string`, but if for some reason we're not able to (such as a typo in the
translated version of the string), log the failure but return the original (untranslated) string. This is a
workaround for translations that, due to a typo, will fail to parse using Java's message formatter.
- > \"deben tener 140 caracteres o menos\ " "
([locale-or-name ^String format-string]
(translate locale-or-name format-string []))
([locale-or-name ^String format-string args]
(translate locale-or-name format-string args {}))
([locale-or-name ^String format-string args pluralization-opts]
(when (seq format-string)
(try
(.format (message-format locale-or-name format-string pluralization-opts) (to-array args))
(catch Throwable e
Not translating this string to prevent an unfortunate stack overflow . If this string happened to be the one
(log/errorf e "Unable to translate string %s to %s" (pr-str format-string) (str locale-or-name))
(try
(.format (MessageFormat. format-string) (to-array args))
(catch Throwable _
(log/errorf e "Invalid format string %s" (pr-str format-string))
format-string)))))))
do the check anymore -- swapping out the getter fn with the simpler one speeds things up substantially
(def ^:private site-locale-from-setting-fn
(atom
(fn []
(when-let [db-is-set-up? (resolve 'metabase.db/db-is-set-up?)]
(when (and (bound? db-is-set-up?)
(db-is-set-up?))
(when-let [get-value-of-type (resolve 'metabase.models.setting/get-value-of-type)]
(when (bound? get-value-of-type)
(let [f (fn [] (get-value-of-type :string :site-locale))]
(reset! site-locale-from-setting-fn f)
(f)))))))))
(defn site-locale-from-setting
"Fetch the value of the `site-locale` Setting.
When metabase is shutting down, we need to log some messages after the db connection is closed, so we keep around a
cached-site-locale for that purpose."
[]
(let [cached-site-locale (atom "en")]
(try
(let [site-locale (@site-locale-from-setting-fn)]
(reset! cached-site-locale site-locale)
site-locale)
(catch Exception _ @cached-site-locale))))
(defmethod print-method Locale
[locale ^java.io.Writer writer]
((get-method print-dup Locale) locale writer))
(defmethod print-dup Locale
[locale ^java.io.Writer writer]
(.write writer (format "#locale %s" (pr-str (str locale)))))
|
054a6dd877d80858d28c7fbc8c65b00fbddd8f4c45396f0518d6750862a35997 | michiakig/sicp | ex-1.32.scm | ;;;; Structure and Interpretation of Computer Programs
Chapter 1 Section 3 Formulating Abstractions with Higher - Order Procedures
Exercise 1.32
;; Recursive accumulate function
(define accumulate
(lambda (combiner null-value term a next b)
(if (> a b)
null-value
(combiner (term a)
(accumulate combiner null-value term (next a) next b)))))
;; ... and iterative one:
(define accumulate
(lambda (combiner null-value term a next b)
(define iter
(lambda (a result)
(if (> a b)
result
(iter (next a) (combiner result (term a))))))
(iter a null-value))) ; null-value goes here at the start
| null | https://raw.githubusercontent.com/michiakig/sicp/1aa445f00b7895dbfaa29cf6984b825b4e5af492/ch1/ex-1.32.scm | scheme | Structure and Interpretation of Computer Programs
Recursive accumulate function
... and iterative one:
null-value goes here at the start | Chapter 1 Section 3 Formulating Abstractions with Higher - Order Procedures
Exercise 1.32
(define accumulate
(lambda (combiner null-value term a next b)
(if (> a b)
null-value
(combiner (term a)
(accumulate combiner null-value term (next a) next b)))))
(define accumulate
(lambda (combiner null-value term a next b)
(define iter
(lambda (a result)
(if (> a b)
result
(iter (next a) (combiner result (term a))))))
|
699f468b8175822eedd76a3cc9ba8a1b9c00710008a9b6473d45d7d417113d2c | DanielG/ghc-mod | Subdir.hs | module Check.Test.Subdir (subdir) where
subdir :: String
subdir = "subdir"
| null | https://raw.githubusercontent.com/DanielG/ghc-mod/391e187a5dfef4421aab2508fa6ff7875cc8259d/test/data/check-test-subdir/src/Check/Test/Subdir.hs | haskell | module Check.Test.Subdir (subdir) where
subdir :: String
subdir = "subdir"
| |
f58825da236f8a0f075c5b44abd2b211fa1be8c89df0294921021db7a6eff1e3 | tolitius/envoy | core.clj | (ns envoy.core
(:require [cheshire.core :as json]
[clojure.data :refer [diff]]
[clojure.core.async :refer [go-loop go <! >! >!! alt! chan]]
[org.httpkit.client :as http]
[envoy.tools :as tools]
[clojure.string :as string])
(:import [java.util Base64]))
(defn- recurse [path]
(str path "?recurse"))
(defn- index-of [resp]
(-> resp
:headers
:x-consul-index))
(defn- with-ops [ops]
{:query-params (tools/remove-nils ops)})
(defn- read-index
([path]
(read-index path {}))
([path ops]
(-> (http/get path (with-ops ops))
index-of)))
(defn- fromBase64 [^String s]
(String. (.decode (Base64/getDecoder) s)))
(defn- read-values
([resp]
(read-values resp true))
([{:keys [body error status opts] :as resp} to-keys?]
(if (or error (not= status 200))
(if (= 404 status)
(throw (RuntimeException. (str "could not find path in consul" {:path (:url opts)})))
(throw (RuntimeException. (str "failed to read from consul" {:path (:url opts)
:error error
:http-status status}))))
(into {}
(for [{:keys [Key Value]} (json/parse-string body true)]
[(if to-keys? (keyword Key) Key)
(when Value (fromBase64 Value))])))))
(defn- find-consul-node [hosts]
(let [at (atom -1)]
#(nth hosts (mod (swap! at inc)
(count hosts)))))
(defn url-builder
"Create an envoy kv-path builder"
[{:keys [hosts port secure?]
:or {hosts ["localhost"] port 8500 secure? false}
:as conf}]
(let [proto (if secure? "https://" "http://")
consul-node (find-consul-node hosts)]
(fn [& [path]]
(let [node (consul-node)]
(str proto node ":" port "/v1/kv" (when (seq path)
(str "/" (tools/clean-slash path))))))))
(defn put
([path v]
(put path v {}))
([path v ops]
;; (println "@(http/put" path (merge {:body v} (with-ops ops)))
(let [{:keys [status] :as resp} @(http/put path (merge {:body v}
(with-ops ops)))]
(when-not (= 200 status)
(throw (RuntimeException. (str "could not PUT to consul due to: " resp)))))))
(defn delete
([path]
(delete path {}))
([path ops]
@(http/delete (recurse path)
(with-ops ops))))
(defn get-all
([path]
(get-all path {}))
([path {:keys [keywordize?] :as ops
:or {keywordize? true}}]
(-> @(http/get (recurse (tools/with-slash path))
(with-ops (dissoc ops :keywordize?)))
(read-values keywordize?))))
(defn- start-watcher
([path fun stop?]
(start-watcher path fun stop? {}))
([path fun stop? ops]
(let [ch (chan)]
(go-loop [index nil current (get-all path)]
(http/get path
(with-ops (merge ops
{:index (or index (read-index path ops))}))
#(>!! ch %))
(alt!
stop? ([_]
(prn "stopping" path "watcher"))
ch ([resp]
(let [new-idx (index-of resp)
new-vs (read-values resp)]
first time there is no index
(when-let [changes (first (diff new-vs current))]
(fun changes)))
(recur new-idx new-vs))))))))
(defprotocol Stoppable
(stop [this]))
(deftype Watcher [ch]
Stoppable
(stop [_]
(>!! ch :done)))
(defn watch-path
([path fun]
(watch-path path fun {}))
([path fun ops]
(let [stop-ch (chan)]
(start-watcher (recurse path) fun stop-ch ops)
(Watcher. stop-ch))))
(defn strip-offset [xs offset]
(let [stripped (get-in xs (tools/cpath->kpath offset))]
(or stripped
(throw (RuntimeException. (str "could not remove offset" {:data xs :offset offset :reason (str "this usually happens if both prefix and offset are used. for example (envoy/consul->map ':8500/v1/kv/hubble' {:offset 'mission'}) while it should have been (envoy/consul->map ':8500/v1/kv' {:offset '/hubble/mission'})")}))))))
(defn consul->map
[path & [{:keys [serializer offset preserve-offset] :or {serializer :edn} :as ops}]]
(let [full-path (if offset
(tools/concat-with-slash path offset)
path)
consul-map (-> (partial get-all full-path (merge
(dissoc ops :serializer :offset :preserve-offset)
{:keywordize? false}))
(tools/props->map serializer))]
(if preserve-offset
consul-map
(strip-offset consul-map offset))))
(defn- overwrite-with
[kv-path m & [{:keys [serializer] :or {serializer :edn} :as ops}]]
(let [[consul-url sub-path] (string/split kv-path #"kv" 2)
update-kv-path (str consul-url "kv")
kpath (tools/cpath->kpath sub-path)
stored-map (reduce (fn [acc [k v]]
(merge acc (consul->map
(str kv-path "/" (name k))
{:serializer serializer})))
{} m)
;;to update correctly seq we need to pre-serialize map
[to-add to-remove _] (diff (tools/serialize-map m serializer)
(tools/serialize-map (get-in stored-map kpath) serializer))]
;;add
(doseq [[k v] (tools/map->props to-add serializer)]
(put (str kv-path "/" k) (str v) (dissoc ops :serializer :update)))
;;remove
(doseq [[k v] (tools/map->props to-remove serializer)]
(when (nil? (get-in to-add (tools/cpath->kpath k) nil))
@(http/delete (str kv-path "/" k))))))
(defn map->consul
[kv-path m & [{:keys [serializer overwrite?] :or {serializer :edn overwrite? false} :as ops}]]
(let [kv-path (tools/without-slash kv-path)]
(if-not overwrite?
(doseq [[k v] (tools/map->props m serializer)]
(put (str kv-path "/" k) (str v) (dissoc ops :serializer :update)))
(overwrite-with kv-path m ops))))
(defn copy
([path from to]
(copy path from to {}))
([path from to opts]
(let [data (consul->map path
(merge opts {:offset from}))
new-map (->> (tools/cpath->kpath to)
(tools/nest-map data))]
(map->consul path
new-map
opts))))
(defn move
([path from to]
(move path from to {}))
([path from to opts]
(let [dpath (str (tools/with-slash path)
(-> (tools/without-slash from {:slash :first})
(tools/with-slash)))]
(copy path from to opts)
(delete dpath opts))))
(defn merge-with-consul
([m path]
(merge-with-consul m path {}))
([m path ops]
(if-let [consul (consul->map path ops)]
(tools/merge-maps m consul)
m)))
| null | https://raw.githubusercontent.com/tolitius/envoy/cd319e47f6e87ab56103069ed300a35d535ae3e7/src/envoy/core.clj | clojure | (println "@(http/put" path (merge {:body v} (with-ops ops)))
to update correctly seq we need to pre-serialize map
add
remove | (ns envoy.core
(:require [cheshire.core :as json]
[clojure.data :refer [diff]]
[clojure.core.async :refer [go-loop go <! >! >!! alt! chan]]
[org.httpkit.client :as http]
[envoy.tools :as tools]
[clojure.string :as string])
(:import [java.util Base64]))
(defn- recurse [path]
(str path "?recurse"))
(defn- index-of [resp]
(-> resp
:headers
:x-consul-index))
(defn- with-ops [ops]
{:query-params (tools/remove-nils ops)})
(defn- read-index
([path]
(read-index path {}))
([path ops]
(-> (http/get path (with-ops ops))
index-of)))
(defn- fromBase64 [^String s]
(String. (.decode (Base64/getDecoder) s)))
(defn- read-values
([resp]
(read-values resp true))
([{:keys [body error status opts] :as resp} to-keys?]
(if (or error (not= status 200))
(if (= 404 status)
(throw (RuntimeException. (str "could not find path in consul" {:path (:url opts)})))
(throw (RuntimeException. (str "failed to read from consul" {:path (:url opts)
:error error
:http-status status}))))
(into {}
(for [{:keys [Key Value]} (json/parse-string body true)]
[(if to-keys? (keyword Key) Key)
(when Value (fromBase64 Value))])))))
(defn- find-consul-node [hosts]
(let [at (atom -1)]
#(nth hosts (mod (swap! at inc)
(count hosts)))))
(defn url-builder
"Create an envoy kv-path builder"
[{:keys [hosts port secure?]
:or {hosts ["localhost"] port 8500 secure? false}
:as conf}]
(let [proto (if secure? "https://" "http://")
consul-node (find-consul-node hosts)]
(fn [& [path]]
(let [node (consul-node)]
(str proto node ":" port "/v1/kv" (when (seq path)
(str "/" (tools/clean-slash path))))))))
(defn put
([path v]
(put path v {}))
([path v ops]
(let [{:keys [status] :as resp} @(http/put path (merge {:body v}
(with-ops ops)))]
(when-not (= 200 status)
(throw (RuntimeException. (str "could not PUT to consul due to: " resp)))))))
(defn delete
([path]
(delete path {}))
([path ops]
@(http/delete (recurse path)
(with-ops ops))))
(defn get-all
([path]
(get-all path {}))
([path {:keys [keywordize?] :as ops
:or {keywordize? true}}]
(-> @(http/get (recurse (tools/with-slash path))
(with-ops (dissoc ops :keywordize?)))
(read-values keywordize?))))
(defn- start-watcher
([path fun stop?]
(start-watcher path fun stop? {}))
([path fun stop? ops]
(let [ch (chan)]
(go-loop [index nil current (get-all path)]
(http/get path
(with-ops (merge ops
{:index (or index (read-index path ops))}))
#(>!! ch %))
(alt!
stop? ([_]
(prn "stopping" path "watcher"))
ch ([resp]
(let [new-idx (index-of resp)
new-vs (read-values resp)]
first time there is no index
(when-let [changes (first (diff new-vs current))]
(fun changes)))
(recur new-idx new-vs))))))))
(defprotocol Stoppable
(stop [this]))
(deftype Watcher [ch]
Stoppable
(stop [_]
(>!! ch :done)))
(defn watch-path
([path fun]
(watch-path path fun {}))
([path fun ops]
(let [stop-ch (chan)]
(start-watcher (recurse path) fun stop-ch ops)
(Watcher. stop-ch))))
(defn strip-offset [xs offset]
(let [stripped (get-in xs (tools/cpath->kpath offset))]
(or stripped
(throw (RuntimeException. (str "could not remove offset" {:data xs :offset offset :reason (str "this usually happens if both prefix and offset are used. for example (envoy/consul->map ':8500/v1/kv/hubble' {:offset 'mission'}) while it should have been (envoy/consul->map ':8500/v1/kv' {:offset '/hubble/mission'})")}))))))
(defn consul->map
[path & [{:keys [serializer offset preserve-offset] :or {serializer :edn} :as ops}]]
(let [full-path (if offset
(tools/concat-with-slash path offset)
path)
consul-map (-> (partial get-all full-path (merge
(dissoc ops :serializer :offset :preserve-offset)
{:keywordize? false}))
(tools/props->map serializer))]
(if preserve-offset
consul-map
(strip-offset consul-map offset))))
(defn- overwrite-with
[kv-path m & [{:keys [serializer] :or {serializer :edn} :as ops}]]
(let [[consul-url sub-path] (string/split kv-path #"kv" 2)
update-kv-path (str consul-url "kv")
kpath (tools/cpath->kpath sub-path)
stored-map (reduce (fn [acc [k v]]
(merge acc (consul->map
(str kv-path "/" (name k))
{:serializer serializer})))
{} m)
[to-add to-remove _] (diff (tools/serialize-map m serializer)
(tools/serialize-map (get-in stored-map kpath) serializer))]
(doseq [[k v] (tools/map->props to-add serializer)]
(put (str kv-path "/" k) (str v) (dissoc ops :serializer :update)))
(doseq [[k v] (tools/map->props to-remove serializer)]
(when (nil? (get-in to-add (tools/cpath->kpath k) nil))
@(http/delete (str kv-path "/" k))))))
(defn map->consul
[kv-path m & [{:keys [serializer overwrite?] :or {serializer :edn overwrite? false} :as ops}]]
(let [kv-path (tools/without-slash kv-path)]
(if-not overwrite?
(doseq [[k v] (tools/map->props m serializer)]
(put (str kv-path "/" k) (str v) (dissoc ops :serializer :update)))
(overwrite-with kv-path m ops))))
(defn copy
([path from to]
(copy path from to {}))
([path from to opts]
(let [data (consul->map path
(merge opts {:offset from}))
new-map (->> (tools/cpath->kpath to)
(tools/nest-map data))]
(map->consul path
new-map
opts))))
(defn move
([path from to]
(move path from to {}))
([path from to opts]
(let [dpath (str (tools/with-slash path)
(-> (tools/without-slash from {:slash :first})
(tools/with-slash)))]
(copy path from to opts)
(delete dpath opts))))
(defn merge-with-consul
([m path]
(merge-with-consul m path {}))
([m path ops]
(if-let [consul (consul->map path ops)]
(tools/merge-maps m consul)
m)))
|
d3bd6a30cbf172f38e6ca77516f174c07f86d7cc7b7063eb9dd1c8f53ff33e8f | Helium4Haskell/helium | PatternMatchBug4.hs | module PatternMatchBug4 where
main = f ["a", "a"]
f ["a", x] = x
| null | https://raw.githubusercontent.com/Helium4Haskell/helium/5928bff479e6f151b4ceb6c69bbc15d71e29eb47/test/correct/PatternMatchBug4.hs | haskell | module PatternMatchBug4 where
main = f ["a", "a"]
f ["a", x] = x
| |
0ec077eb8649bc8522d4ecae75a8657228195ebf105a60800831da13f6d6150c | spawngrid/htoad | htoad.erl | -module(htoad).
-export([start/0, add_rules/1, assert/1, assert/2, retract/1, retract/2]).
-include_lib("htoad/include/htoad.hrl").
start() ->
start(htoad).
start(App) ->
case application:start(App) of
{error, {not_started, Dep}} ->
start(Dep),
start(App);
Other ->
Other
end.
add_rules(Rules) ->
seresye:add_rules(?ENGINE, Rules),
case Rules of
Module when is_atom(Module) ->
ok;
{Module, _} ->
ok
end,
FReqs = proplists:get_value(htoad_file_requests, Module:module_info(attributes), []),
assert([{file_request, F} || F <- FReqs]).
assert(Fact) when is_list(Fact); is_tuple(Fact) ->
seresye:assert(?ENGINE, Fact).
assert(Engine, Fact) when is_list(Fact); is_tuple(Fact) ->
{Fun, Args} = seresye_engine:get_fired_rule(Engine),
htoad_trace:assert(Engine, Fun, Args, Fact),
seresye_engine:assert(Engine, Fact).
retract(Fact) when is_list(Fact); is_tuple(Fact) ->
seresye:assert(?ENGINE, Fact).
retract(Engine, Fact) when is_list(Fact); is_tuple(Fact) ->
seresye_engine:retract(Engine, Fact).
| null | https://raw.githubusercontent.com/spawngrid/htoad/f0c7dfbd911b29fb0c406b7c26606f553af11194/apps/htoad/src/htoad.erl | erlang | -module(htoad).
-export([start/0, add_rules/1, assert/1, assert/2, retract/1, retract/2]).
-include_lib("htoad/include/htoad.hrl").
start() ->
start(htoad).
start(App) ->
case application:start(App) of
{error, {not_started, Dep}} ->
start(Dep),
start(App);
Other ->
Other
end.
add_rules(Rules) ->
seresye:add_rules(?ENGINE, Rules),
case Rules of
Module when is_atom(Module) ->
ok;
{Module, _} ->
ok
end,
FReqs = proplists:get_value(htoad_file_requests, Module:module_info(attributes), []),
assert([{file_request, F} || F <- FReqs]).
assert(Fact) when is_list(Fact); is_tuple(Fact) ->
seresye:assert(?ENGINE, Fact).
assert(Engine, Fact) when is_list(Fact); is_tuple(Fact) ->
{Fun, Args} = seresye_engine:get_fired_rule(Engine),
htoad_trace:assert(Engine, Fun, Args, Fact),
seresye_engine:assert(Engine, Fact).
retract(Fact) when is_list(Fact); is_tuple(Fact) ->
seresye:assert(?ENGINE, Fact).
retract(Engine, Fact) when is_list(Fact); is_tuple(Fact) ->
seresye_engine:retract(Engine, Fact).
| |
c0c79924da8146a1dec2171c40a2dcb544f15e07d6a47c953d1c7b60cbb3132c | open-company/open-company-web | section.cljs | (ns oc.web.stores.section
(:require [taoensso.timbre :as timbre]
[oc.web.dispatcher :as dispatcher]
[oc.web.lib.utils :as utils]
[oc.web.utils.org :as ou]
[oc.web.utils.board :as bu]
[oc.web.utils.activity :as au]))
(defmethod dispatcher/action :section
[db [_ org-slug _section-slug sort-type section-data]]
(let [db-loading (if (:is-loaded section-data)
(dissoc db :loading)
db)
with-entries (:entries section-data)
fixed-section-data (au/parse-board section-data (dispatcher/change-data db) (dispatcher/active-users) (dispatcher/follow-boards-list) sort-type)
old-section-data (get-in db (dispatcher/board-data-key org-slug (:slug section-data) sort-type))
with-current-edit (if (and (:is-loaded section-data)
(:entry-editing db))
old-section-data
fixed-section-data)
posts-key (dispatcher/posts-data-key org-slug)
merged-items (merge (get-in db posts-key)
(:fixed-items fixed-section-data))
with-merged-items (if with-entries
(assoc-in db-loading posts-key merged-items)
db-loading)
is-drafts-board? (= (:slug section-data) utils/default-drafts-board-slug)
org-drafts-count-key (vec (conj (dispatcher/org-data-key org-slug) :drafts-count))]
(-> with-merged-items
(assoc-in (dispatcher/board-data-key org-slug (:slug section-data) sort-type)
(dissoc with-current-edit :fixed-items))
(update-in org-drafts-count-key #(if is-drafts-board?
(ou/disappearing-count-value % (:total-count section-data))
%)))))
( defn fix - posts - new - label
; [db changes]
; (let [posts-data (dispatcher/posts-data db)
; org-slug (:org (:router-path db))]
; (reduce
# ( let [ posts - key ( dispatcher / activity - key org - slug (: uuid % 2 ) ) ]
( update - in % 1 posts - key merge { : unread ( au / entry - unread ? % 2 changes ) } ) )
; db
; (vals posts-data))))
(defmethod dispatcher/action :section-change
[db [_ _section-uuid]]
db)
(defmethod dispatcher/action :section-edit-save
[db [_ _org-slug _section-data]]
(assoc-in db [dispatcher/section-editing-key :loading] true))
(defmethod dispatcher/action :section-edit-save/finish
[db [_ org-slug section-data]]
(let [section-slug (:slug section-data)
board-key (dispatcher/board-data-key org-slug section-slug)
org-data-key (dispatcher/org-data-key org-slug)
org-boards-data-key (conj org-data-key :boards)
Parse the new section data
fixed-section-data (au/parse-board section-data (dispatcher/change-data db) (dispatcher/active-users) (dispatcher/follow-boards-list))
old-board-data (get-in db board-key)
;; Replace the old section data
;; w/o overriding the posts and links to avoid breaking pagination
next-board-data (merge fixed-section-data
(select-keys old-board-data [:posts-list :items-to-render :fixed-items :links]))]
(-> db
(assoc-in board-key next-board-data)
(update-in org-boards-data-key #(map (fn [board]
(if (= (:uuid board) (:uuid fixed-section-data))
fixed-section-data
board))
%))
(update dispatcher/section-editing-key #(dissoc % :loading :has-changes)))))
(defmethod dispatcher/action :section-edit/dismiss
[db [_]]
(dissoc db dispatcher/section-editing-key))
(defmethod dispatcher/action :private-section-user-add
[db [_ user user-type]]
(let [section-data (get db dispatcher/section-editing-key)
current-notifications (filterv #(not= (:user-id %) (:user-id user))
(:private-notifications section-data))
current-authors (filterv #(not= % (:user-id user)) (:authors section-data))
current-viewers (filterv #(not= % (:user-id user)) (:viewers section-data))
next-authors (if (#{:admin :author} user-type)
(vec (conj current-authors (:user-id user)))
current-authors)
next-viewers (if (= user-type :viewer)
(vec (conj current-viewers (:user-id user)))
current-viewers)
next-notifications (vec (conj current-notifications user))]
(assoc db dispatcher/section-editing-key
(merge section-data {:has-changes true
:authors next-authors
:viewers next-viewers
:private-notifications next-notifications}))))
(defmethod dispatcher/action :private-section-user-remove
[db [_ user]]
(let [section-data (get db dispatcher/section-editing-key)
private-notifications (filterv #(not= (:user-id %) (:user-id user))
(:private-notifications section-data))
next-authors (filterv #(not= % (:user-id user)) (:authors section-data))
next-viewers (filterv #(not= % (:user-id user)) (:viewers section-data))]
(assoc db dispatcher/section-editing-key
(merge section-data {:has-changes true
:authors next-authors
:viewers next-viewers
:private-notifications private-notifications}))))
(defmethod dispatcher/action :private-section-kick-out-self/finish
[db [_ success]]
(if success
Force board editing dismiss
(dissoc db dispatcher/section-editing-key)
;; An error occurred while kicking the user out, no-op to let the user retry
db))
(defmethod dispatcher/action :section-delete
[db [_ org-slug section-slug]]
(let [section-key (dispatcher/board-key org-slug section-slug)
org-sections-key (vec (conj (dispatcher/org-data-key org-slug) :boards))
remaining-sections (remove #(= (:slug %) section-slug) (get-in db org-sections-key))
posts-key (dispatcher/posts-data-key org-slug)
old-posts (get-in db posts-key)
removed-posts (filterv (fn [p] (not= (:board-slug p) section-slug))
(vals old-posts))
cmail-state (get-in db dispatcher/cmail-state-key)
first-editable-section (first
(filter #(and (not (:draft %)) (utils/link-for (:links %) "create"))
(sort-by :name remaining-sections)))
next-db (if (and (:collapsed cmail-state)
first-editable-section)
(-> db
(assoc-in (conj dispatcher/cmail-state-key :key) (utils/activity-uuid))
(assoc-in dispatcher/cmail-data-key {:board-name (:name first-editable-section)
:board-slug (:slug first-editable-section)
:publisher-board (:publisher-board first-editable-section)}))
db)]
(-> next-db
(update-in (butlast section-key) dissoc (last section-key))
(assoc posts-key (zipmap (map :uuid removed-posts) removed-posts))
(assoc org-sections-key remaining-sections)
(dissoc dispatcher/section-editing-key))))
(defmethod dispatcher/action :container/status
[db [_ change-data _replace-change-data?]]
(timbre/debug "Change status received:" change-data)
(if change-data
(let [org-data (dispatcher/org-data db)
old-change-data (dispatcher/change-data db)
new-change-data (merge old-change-data change-data)
active-users (dispatcher/active-users (:slug org-data) db)
follow-publishers-list (dispatcher/follow-publishers-list (:slug org-data) db)]
(timbre/debug "Change status data:" new-change-data)
(-> db
(au/update-all-containers org-data change-data active-users follow-publishers-list)
(assoc-in (dispatcher/change-data-key (:slug org-data)) new-change-data)))
db))
(defn update-unseen-unread-remove [old-change-data item-id container-id new-changes]
(let [old-container-change-data (get old-change-data container-id)
old-unseen (or (:unseen old-container-change-data) [])
next-unseen (filter #(not= % item-id) old-unseen)
old-unread (or (:unread old-container-change-data) [])
next-unread (filter #(not= % item-id) old-unread)
next-container-change-data (if old-container-change-data
(assoc old-container-change-data
:unseen next-unseen
:unread next-unread)
{:container-id container-id
:unseen next-unseen
:unread next-unread})]
(assoc old-change-data container-id next-container-change-data)))
(defmethod dispatcher/action :item-delete/unseen
[db [_ org-slug change-data]]
(let [item-id (:item-id change-data)
container-id (:container-id change-data)
change-key (dispatcher/change-data-key org-slug)
old-change-data (get-in db change-key)]
(assoc-in db change-key (update-unseen-unread-remove old-change-data item-id container-id change-data))))
(defn update-unseen-unread-add [old-change-data item-id container-id new-changes]
(let [old-container-change-data (get old-change-data container-id)
old-unseen (or (:unseen old-container-change-data) [])
next-unseen (vec (seq (conj old-unseen item-id)))
old-unread (or (:unread old-container-change-data) [])
next-unread (vec (seq (conj old-unread item-id)))
next-container-change-data (if old-container-change-data
(assoc old-container-change-data
:unseen next-unseen
:unread next-unread)
{:container-id container-id
:unseen next-unseen
:unread next-unread})]
(assoc old-change-data container-id next-container-change-data)))
(defmethod dispatcher/action :item-add/unseen
[db [_ org-slug change-data]]
(let [item-id (:item-id change-data)
container-id (:container-id change-data)
change-key (dispatcher/change-data-key org-slug)
old-change-data (get-in db change-key)]
(assoc-in db change-key (update-unseen-unread-add old-change-data item-id container-id change-data))))
(defmethod dispatcher/action :section-more
[db [_ org-slug board-slug sort-type]]
(let [container-key (dispatcher/board-data-key org-slug board-slug sort-type)
container-data (get-in db container-key)
next-container-data (assoc container-data :loading-more true)]
(assoc-in db container-key next-container-data)))
(defmethod dispatcher/action :section-more/finish
[db [_ org board sort-type direction next-board-data]]
(if next-board-data
(let [container-key (dispatcher/board-data-key org board sort-type)
container-data (get-in db container-key)
posts-data-key (dispatcher/posts-data-key org)
old-posts (get-in db posts-data-key)
prepare-board-data (merge next-board-data {:posts-list (:posts-list container-data)
:old-links (:links container-data)})
fixed-posts-data (au/parse-board prepare-board-data (dispatcher/change-data db) (dispatcher/active-users) (dispatcher/follow-boards-list) sort-type direction)
new-items-map (merge old-posts (:fixed-items fixed-posts-data))
new-container-data (-> fixed-posts-data
(assoc :direction direction)
(dissoc :loading-more))]
(-> db
(assoc-in container-key new-container-data)
(assoc-in posts-data-key new-items-map)))
db))
(defmethod dispatcher/action :setup-section-editing
[db [_ board-data]]
(assoc db :initial-section-editing board-data))
(defmethod dispatcher/action :item-move
[db [_ org-slug change-data]]
(let [old-container-id (:old-container-id change-data)
container-id (:container-id change-data)
item-id (:item-id change-data)
change-key (dispatcher/change-data-key org-slug)
old-change-data (get-in db change-key)
old-container-change-data (get old-change-data old-container-id)
is-unseen? (utils/in? (:unseen old-container-change-data) item-id)
is-unread? (utils/in? (:unread old-container-change-data) item-id)
next-old-unseen (filterv #(not= % item-id) (:unseen old-container-change-data))
next-old-unread (filterv #(not= % item-id) (:unread old-container-change-data))
next-old-container-change-data (-> old-container-change-data
(assoc :unseen next-old-unseen)
(assoc :unread next-old-unread))
new-container-change-data (get old-change-data container-id)
next-new-unseen (concat (:unseen new-container-change-data) (if is-unseen? [item-id] []))
next-new-unread (concat (:unread new-container-change-data) (if is-unread? [item-id] []))
next-new-container-change-data (-> new-container-change-data
(assoc :unseen next-new-unseen)
(assoc :unread next-new-unread))
next-change-data (-> old-change-data
(assoc old-container-id next-old-container-change-data)
(assoc container-id next-new-container-change-data))]
(assoc-in db change-key next-change-data)))
(defmethod dispatcher/action :section-editor/toggle-mirror-channel
[db [_ slack-org-id channel]]
(update-in db [dispatcher/section-editing-key :slack-mirror]
(fn [old-slack-mirror]
(let [slack-mirror (or old-slack-mirror [])
selected? (bu/contains-channel? slack-mirror slack-org-id channel)]
(if selected?
(filterv (comp not (bu/compare-channel slack-org-id channel)) slack-mirror)
(-> slack-mirror
(conj (bu/format-mirror-channel slack-org-id channel))
vec)))))) | null | https://raw.githubusercontent.com/open-company/open-company-web/246cccb2818cc3bb9f05adeea4364ffa7171a586/src/main/oc/web/stores/section.cljs | clojure | [db changes]
(let [posts-data (dispatcher/posts-data db)
org-slug (:org (:router-path db))]
(reduce
db
(vals posts-data))))
Replace the old section data
w/o overriding the posts and links to avoid breaking pagination
An error occurred while kicking the user out, no-op to let the user retry | (ns oc.web.stores.section
(:require [taoensso.timbre :as timbre]
[oc.web.dispatcher :as dispatcher]
[oc.web.lib.utils :as utils]
[oc.web.utils.org :as ou]
[oc.web.utils.board :as bu]
[oc.web.utils.activity :as au]))
(defmethod dispatcher/action :section
[db [_ org-slug _section-slug sort-type section-data]]
(let [db-loading (if (:is-loaded section-data)
(dissoc db :loading)
db)
with-entries (:entries section-data)
fixed-section-data (au/parse-board section-data (dispatcher/change-data db) (dispatcher/active-users) (dispatcher/follow-boards-list) sort-type)
old-section-data (get-in db (dispatcher/board-data-key org-slug (:slug section-data) sort-type))
with-current-edit (if (and (:is-loaded section-data)
(:entry-editing db))
old-section-data
fixed-section-data)
posts-key (dispatcher/posts-data-key org-slug)
merged-items (merge (get-in db posts-key)
(:fixed-items fixed-section-data))
with-merged-items (if with-entries
(assoc-in db-loading posts-key merged-items)
db-loading)
is-drafts-board? (= (:slug section-data) utils/default-drafts-board-slug)
org-drafts-count-key (vec (conj (dispatcher/org-data-key org-slug) :drafts-count))]
(-> with-merged-items
(assoc-in (dispatcher/board-data-key org-slug (:slug section-data) sort-type)
(dissoc with-current-edit :fixed-items))
(update-in org-drafts-count-key #(if is-drafts-board?
(ou/disappearing-count-value % (:total-count section-data))
%)))))
( defn fix - posts - new - label
# ( let [ posts - key ( dispatcher / activity - key org - slug (: uuid % 2 ) ) ]
( update - in % 1 posts - key merge { : unread ( au / entry - unread ? % 2 changes ) } ) )
(defmethod dispatcher/action :section-change
[db [_ _section-uuid]]
db)
(defmethod dispatcher/action :section-edit-save
[db [_ _org-slug _section-data]]
(assoc-in db [dispatcher/section-editing-key :loading] true))
(defmethod dispatcher/action :section-edit-save/finish
[db [_ org-slug section-data]]
(let [section-slug (:slug section-data)
board-key (dispatcher/board-data-key org-slug section-slug)
org-data-key (dispatcher/org-data-key org-slug)
org-boards-data-key (conj org-data-key :boards)
Parse the new section data
fixed-section-data (au/parse-board section-data (dispatcher/change-data db) (dispatcher/active-users) (dispatcher/follow-boards-list))
old-board-data (get-in db board-key)
next-board-data (merge fixed-section-data
(select-keys old-board-data [:posts-list :items-to-render :fixed-items :links]))]
(-> db
(assoc-in board-key next-board-data)
(update-in org-boards-data-key #(map (fn [board]
(if (= (:uuid board) (:uuid fixed-section-data))
fixed-section-data
board))
%))
(update dispatcher/section-editing-key #(dissoc % :loading :has-changes)))))
(defmethod dispatcher/action :section-edit/dismiss
[db [_]]
(dissoc db dispatcher/section-editing-key))
(defmethod dispatcher/action :private-section-user-add
[db [_ user user-type]]
(let [section-data (get db dispatcher/section-editing-key)
current-notifications (filterv #(not= (:user-id %) (:user-id user))
(:private-notifications section-data))
current-authors (filterv #(not= % (:user-id user)) (:authors section-data))
current-viewers (filterv #(not= % (:user-id user)) (:viewers section-data))
next-authors (if (#{:admin :author} user-type)
(vec (conj current-authors (:user-id user)))
current-authors)
next-viewers (if (= user-type :viewer)
(vec (conj current-viewers (:user-id user)))
current-viewers)
next-notifications (vec (conj current-notifications user))]
(assoc db dispatcher/section-editing-key
(merge section-data {:has-changes true
:authors next-authors
:viewers next-viewers
:private-notifications next-notifications}))))
(defmethod dispatcher/action :private-section-user-remove
[db [_ user]]
(let [section-data (get db dispatcher/section-editing-key)
private-notifications (filterv #(not= (:user-id %) (:user-id user))
(:private-notifications section-data))
next-authors (filterv #(not= % (:user-id user)) (:authors section-data))
next-viewers (filterv #(not= % (:user-id user)) (:viewers section-data))]
(assoc db dispatcher/section-editing-key
(merge section-data {:has-changes true
:authors next-authors
:viewers next-viewers
:private-notifications private-notifications}))))
(defmethod dispatcher/action :private-section-kick-out-self/finish
[db [_ success]]
(if success
Force board editing dismiss
(dissoc db dispatcher/section-editing-key)
db))
(defmethod dispatcher/action :section-delete
[db [_ org-slug section-slug]]
(let [section-key (dispatcher/board-key org-slug section-slug)
org-sections-key (vec (conj (dispatcher/org-data-key org-slug) :boards))
remaining-sections (remove #(= (:slug %) section-slug) (get-in db org-sections-key))
posts-key (dispatcher/posts-data-key org-slug)
old-posts (get-in db posts-key)
removed-posts (filterv (fn [p] (not= (:board-slug p) section-slug))
(vals old-posts))
cmail-state (get-in db dispatcher/cmail-state-key)
first-editable-section (first
(filter #(and (not (:draft %)) (utils/link-for (:links %) "create"))
(sort-by :name remaining-sections)))
next-db (if (and (:collapsed cmail-state)
first-editable-section)
(-> db
(assoc-in (conj dispatcher/cmail-state-key :key) (utils/activity-uuid))
(assoc-in dispatcher/cmail-data-key {:board-name (:name first-editable-section)
:board-slug (:slug first-editable-section)
:publisher-board (:publisher-board first-editable-section)}))
db)]
(-> next-db
(update-in (butlast section-key) dissoc (last section-key))
(assoc posts-key (zipmap (map :uuid removed-posts) removed-posts))
(assoc org-sections-key remaining-sections)
(dissoc dispatcher/section-editing-key))))
(defmethod dispatcher/action :container/status
[db [_ change-data _replace-change-data?]]
(timbre/debug "Change status received:" change-data)
(if change-data
(let [org-data (dispatcher/org-data db)
old-change-data (dispatcher/change-data db)
new-change-data (merge old-change-data change-data)
active-users (dispatcher/active-users (:slug org-data) db)
follow-publishers-list (dispatcher/follow-publishers-list (:slug org-data) db)]
(timbre/debug "Change status data:" new-change-data)
(-> db
(au/update-all-containers org-data change-data active-users follow-publishers-list)
(assoc-in (dispatcher/change-data-key (:slug org-data)) new-change-data)))
db))
(defn update-unseen-unread-remove [old-change-data item-id container-id new-changes]
(let [old-container-change-data (get old-change-data container-id)
old-unseen (or (:unseen old-container-change-data) [])
next-unseen (filter #(not= % item-id) old-unseen)
old-unread (or (:unread old-container-change-data) [])
next-unread (filter #(not= % item-id) old-unread)
next-container-change-data (if old-container-change-data
(assoc old-container-change-data
:unseen next-unseen
:unread next-unread)
{:container-id container-id
:unseen next-unseen
:unread next-unread})]
(assoc old-change-data container-id next-container-change-data)))
(defmethod dispatcher/action :item-delete/unseen
[db [_ org-slug change-data]]
(let [item-id (:item-id change-data)
container-id (:container-id change-data)
change-key (dispatcher/change-data-key org-slug)
old-change-data (get-in db change-key)]
(assoc-in db change-key (update-unseen-unread-remove old-change-data item-id container-id change-data))))
(defn update-unseen-unread-add [old-change-data item-id container-id new-changes]
(let [old-container-change-data (get old-change-data container-id)
old-unseen (or (:unseen old-container-change-data) [])
next-unseen (vec (seq (conj old-unseen item-id)))
old-unread (or (:unread old-container-change-data) [])
next-unread (vec (seq (conj old-unread item-id)))
next-container-change-data (if old-container-change-data
(assoc old-container-change-data
:unseen next-unseen
:unread next-unread)
{:container-id container-id
:unseen next-unseen
:unread next-unread})]
(assoc old-change-data container-id next-container-change-data)))
(defmethod dispatcher/action :item-add/unseen
[db [_ org-slug change-data]]
(let [item-id (:item-id change-data)
container-id (:container-id change-data)
change-key (dispatcher/change-data-key org-slug)
old-change-data (get-in db change-key)]
(assoc-in db change-key (update-unseen-unread-add old-change-data item-id container-id change-data))))
(defmethod dispatcher/action :section-more
[db [_ org-slug board-slug sort-type]]
(let [container-key (dispatcher/board-data-key org-slug board-slug sort-type)
container-data (get-in db container-key)
next-container-data (assoc container-data :loading-more true)]
(assoc-in db container-key next-container-data)))
(defmethod dispatcher/action :section-more/finish
[db [_ org board sort-type direction next-board-data]]
(if next-board-data
(let [container-key (dispatcher/board-data-key org board sort-type)
container-data (get-in db container-key)
posts-data-key (dispatcher/posts-data-key org)
old-posts (get-in db posts-data-key)
prepare-board-data (merge next-board-data {:posts-list (:posts-list container-data)
:old-links (:links container-data)})
fixed-posts-data (au/parse-board prepare-board-data (dispatcher/change-data db) (dispatcher/active-users) (dispatcher/follow-boards-list) sort-type direction)
new-items-map (merge old-posts (:fixed-items fixed-posts-data))
new-container-data (-> fixed-posts-data
(assoc :direction direction)
(dissoc :loading-more))]
(-> db
(assoc-in container-key new-container-data)
(assoc-in posts-data-key new-items-map)))
db))
(defmethod dispatcher/action :setup-section-editing
[db [_ board-data]]
(assoc db :initial-section-editing board-data))
(defmethod dispatcher/action :item-move
[db [_ org-slug change-data]]
(let [old-container-id (:old-container-id change-data)
container-id (:container-id change-data)
item-id (:item-id change-data)
change-key (dispatcher/change-data-key org-slug)
old-change-data (get-in db change-key)
old-container-change-data (get old-change-data old-container-id)
is-unseen? (utils/in? (:unseen old-container-change-data) item-id)
is-unread? (utils/in? (:unread old-container-change-data) item-id)
next-old-unseen (filterv #(not= % item-id) (:unseen old-container-change-data))
next-old-unread (filterv #(not= % item-id) (:unread old-container-change-data))
next-old-container-change-data (-> old-container-change-data
(assoc :unseen next-old-unseen)
(assoc :unread next-old-unread))
new-container-change-data (get old-change-data container-id)
next-new-unseen (concat (:unseen new-container-change-data) (if is-unseen? [item-id] []))
next-new-unread (concat (:unread new-container-change-data) (if is-unread? [item-id] []))
next-new-container-change-data (-> new-container-change-data
(assoc :unseen next-new-unseen)
(assoc :unread next-new-unread))
next-change-data (-> old-change-data
(assoc old-container-id next-old-container-change-data)
(assoc container-id next-new-container-change-data))]
(assoc-in db change-key next-change-data)))
(defmethod dispatcher/action :section-editor/toggle-mirror-channel
[db [_ slack-org-id channel]]
(update-in db [dispatcher/section-editing-key :slack-mirror]
(fn [old-slack-mirror]
(let [slack-mirror (or old-slack-mirror [])
selected? (bu/contains-channel? slack-mirror slack-org-id channel)]
(if selected?
(filterv (comp not (bu/compare-channel slack-org-id channel)) slack-mirror)
(-> slack-mirror
(conj (bu/format-mirror-channel slack-org-id channel))
vec)))))) |
e0eaa3c170bbdca4cf059806d77dbf1a581418cf0d6d1075c872cd9dcb166d06 | oakes/play-cljc | project.clj | (defproject play-cljc/lein-template "0.9.6.3"
:description "A template for making play-cljc projects"
:url "-cljc"
:license {:name "Public Domain"
:url ""}
:repositories [["clojars" {:url ""
:sign-releases false}]])
| null | https://raw.githubusercontent.com/oakes/play-cljc/7278dd4997298b55c5e6771cad22b99d76706f5f/template/project.clj | clojure | (defproject play-cljc/lein-template "0.9.6.3"
:description "A template for making play-cljc projects"
:url "-cljc"
:license {:name "Public Domain"
:url ""}
:repositories [["clojars" {:url ""
:sign-releases false}]])
| |
8502eae2076676f2dda05a6c396576df765a00b7fc833baa9e473580df4bbf42 | damballa/inet.data | ip.clj | (ns inet.data.ip
"Functions for interacting with IP addresses and networks."
(:require [clojure.string :as str])
(:use [inet.data.util :only [ignore-errors case-expr ubyte sbyte longest-run
bytes-hash-code doto-let]]
[hier-set.core :only [hier-set-by]])
(:import [clojure.lang IFn IObj ILookup BigInt Indexed Seqable]
[inet.data.ip IPParser IPNetworkComparison]
[java.io Serializable]
[java.util Arrays]
[java.net InetAddress]))
(defprotocol ^:no-doc IPAddressConstruction
"Construct a full address object."
(^:private -address [addr]
"Produce an IPAddress from `addr`."))
(defprotocol ^:no-doc IPAddressOperations
"Operations on objects which may be treated as addresses."
(^:private -address? [addr]
"Returns whether or not the value represents a valid address.")
(^bytes address-bytes [addr]
"Retrieve the bytes representation of this address.")
(^long address-length [addr]
"The length in bits of this address."))
(defprotocol ^:no-doc IPNetworkConstruction
"Construct a full network object."
(^:private -network [net] [prefix length]
"Produce an IPNetwork from `net` or `prefix` & `length`."))
(defprotocol ^:no-doc IPNetworkOperations
"Operations on objects which may be treated as networks."
(^:private network?* [net] [addr length]
"Returns whether or not the value represents a valid network.")
(network-length [net]
"The length in bits of the network prefix."))
(defn ^:private string-address-ipv4
[^bytes bytes]
(->> bytes (map ubyte) (str/join ".")))
(letfn [(->short [[m x]] (-> m (bit-shift-left 8) (bit-or x)))
(->str [xs] (->> xs (map #(format "%x" %)) (str/join ":")))]
(defn ^:private string-address-ipv6
[^bytes bytes]
(let [shorts (->> bytes (map ubyte) (partition 2) (map ->short))]
(if-let [[nt nd] (longest-run 0 shorts)]
(str (->str (take nt shorts)) "::" (->str (drop (+ nt nd) shorts)))
(->str shorts)))))
(defn ^:private string-address
[^bytes bytes]
(case-expr (alength bytes)
IPParser/IPV4_BYTE_LEN (string-address-ipv4 bytes)
IPParser/IPV6_BYTE_LEN (string-address-ipv6 bytes)))
(deftype IPAddress [meta, ^bytes bytes]
Serializable
Object
(toString [this] (string-address bytes))
(hashCode [this] (bytes-hash-code bytes))
(equals [this other]
(or (identical? this other)
(and (instance? IPAddress other)
(Arrays/equals bytes ^bytes (address-bytes other)))))
IObj
(meta [this] meta)
(withMeta [this new-meta] (IPAddress. new-meta bytes))
Comparable
(compareTo [this other]
(let [plen1 (long (address-length bytes))
^bytes prefix2 (address-bytes other),
plen2 (long (network-length other))]
(IPNetworkComparison/networkCompare bytes plen1 prefix2 plen2)))
IPAddressOperations
(-address? [this] true)
(address-bytes [this] bytes)
(address-length [this] (address-length bytes))
IPNetworkOperations
(network?* [this] false)
(network-length [this] (address-length bytes)))
(ns-unmap *ns* '->IPAddress)
(defn address
"The IP address for representation `addr`."
{:tag `IPAddress}
[addr] (-address addr))
BigInteger mapping is internal - only . BigInteger does n't preserve the input
;; byte-array size, so we need to prepend a pseudo-magic prefix to retain the
;; address length.
(defn ^:private address->BigInteger
"Convert `addr` to an internal-format BigInteger."
{:tag `BigInteger}
[addr] (->> addr address-bytes (cons (byte 63)) byte-array BigInteger.))
(defn address-add
"The `n`th address following `addr` numerically."
{:tag `IPAddress}
[addr n]
(->> (condp instance? n
BigInteger n
BigInt (.toBigInteger ^BigInt n)
,,,,,, (BigInteger/valueOf (long n)))
(.add (address->BigInteger addr))
address))
(defn address-range
"Sequence of addresses from `start` to `stop` *inclusive*."
[start stop]
(let [stop (address->BigInteger stop)]
((fn step [^BigInteger addr]
(lazy-seq
(when-not (pos? (.compareTo addr stop))
(cons (address addr) (step (.add addr BigInteger/ONE))))))
(address->BigInteger start))))
(defn network-compare
"Compare the prefixes of networks `left` and `right`, with the same result
semantics as `compare`. When `stable` is true (the default), 0 will only be
returned when the networks are value-identical; when `stable` is false, 0 will
be returned as long as the networks are identical up to their minimum common
prefix length."
(^long [left right] (network-compare true left right))
(^long [stable left right]
(let [^bytes prefix1 (address-bytes left), plen1 (network-length left)
^bytes prefix2 (address-bytes right), plen2 (network-length right)]
(IPNetworkComparison/networkCompare stable prefix1 plen1 prefix2 plen2))))
(defn network-contains?
"Determine if network `net` contains the address/network `addr`."
[net addr]
(let [length (network-length net)]
(and (<= length (network-length addr))
(zero? (network-compare false net addr)))))
(defn network-count
"Count of addresses in network `net`."
[net]
(let [nbits (- (address-length net) (network-length net))]
(if (> 63 nbits)
(bit-shift-left 1 nbits)
(BigInt/fromBigInteger (.shiftLeft BigInteger/ONE nbits)))))
(defn network-nth
"The `n`th address in the network `net`. Negative `n`s count backwards
from the final address at -1."
[net n] (address-add net (if (neg? n) (+ n (network-count net)) n)))
(deftype IPNetwork [meta, ^bytes prefix, ^long length]
Serializable
Object
(toString [this] (str (string-address prefix) "/" length))
(hashCode [this] (bytes-hash-code prefix length))
(equals [this other]
(or (identical? this other)
(and (instance? IPNetwork other)
(= length (network-length other))
(Arrays/equals prefix ^bytes (address-bytes other)))))
IObj
(meta [this] meta)
(withMeta [this new-meta] (IPNetwork. new-meta prefix length))
Comparable
(compareTo [this other]
(let [^bytes prefix2 (address-bytes other),
plen2 (long (network-length other))]
(IPNetworkComparison/networkCompare prefix length prefix2 plen2)))
ILookup
(valAt [this key]
(when (network-contains? this key) key))
(valAt [this key default]
(if (network-contains? this key) key default))
IFn
(invoke [this key]
(when (network-contains? this key) key))
(invoke [this key default]
(if (network-contains? this key) key default))
Indexed
(count [this] (network-count this))
(nth [this n] (network-nth this n))
Seqable
(seq [this]
(address-range (nth this 0) (nth this -1)))
IPAddressOperations
(-address? [this] false)
(address-bytes [this] prefix)
(address-length [this] (address-length prefix))
IPNetworkOperations
(network?* [this] true)
(network-length [this] length))
(ns-unmap *ns* '->IPNetwork)
(defn ^:private address*
[orig ^bytes bytes]
(when (-address? bytes)
(IPAddress. nil bytes)))
(defn network
"The IP network for representation `net` or `prefix` & `length`."
{:tag `IPNetwork}
([net] (-network net))
([prefix length] (-network prefix length)))
(defn ^:private network*
[orig ^bytes bytes ^long length]
(when (network?* bytes length)
(IPNetwork. nil bytes length)))
(defn address?
"Determine if `addr` is a value which represents an IP address."
[addr] (and (satisfies? IPAddressOperations addr)
(boolean (-address? addr))))
(defn network?
"Determine if `net` is a value which represents an IP network."
([net]
(and (satisfies? IPNetworkOperations net)
(boolean (network?* net))))
([addr length]
(and (satisfies? IPNetworkOperations addr)
(boolean (network?* addr length)))))
(defn inet-address
"Generate a java.net.InetAddress from the provided value."
{:tag `InetAddress}
[addr] (InetAddress/getByAddress (address-bytes addr)))
(defn network-trunc
"Create a network with a prefix consisting of the first `length` bits of
`prefix` and a length of `length`."
{:tag `IPNetwork}
([prefix]
(network-trunc prefix (network-length prefix)))
([prefix length]
(network (doto-let [prefix (byte-array (address-bytes prefix))]
(loop [zbits (long (- (address-length prefix) length)),
i (->> prefix alength dec long)]
(cond (>= zbits 8) (do (aset prefix i (byte 0))
(recur (- zbits 8) (dec i)))
(pos? zbits) (->> (bit-shift-left -1 zbits)
(bit-and (long (aget prefix i)))
byte (aset prefix i)))))
length)))
(defn ->network-set
"Create a hierarchical set from networks in `coll`."
[coll]
(-> (apply hier-set-by network-contains? network-compare
(map network coll))
(vary-meta assoc :type ::network-set)))
(defn network-set
"Create a hierarchical set from networks `nets`."
[& nets] (->network-set nets))
(defmethod clojure.core/print-method ::network-set
[nets ^java.io.Writer w]
(.write w "#ip/network-set #{")
(loop [first? true, nets (seq nets)]
(when nets
(when-not first? (.write w " "))
(print-method (first nets) w)
(recur false (next nets))))
(.write w "}"))
(defn network-supernet
"Network containing the network `net` with a prefix shorter by `n` bits,
default 1."
([net] (network-supernet net 1))
([net n]
(let [pbits (- (network-length net) n)]
(when-not (neg? pbits)
(network-trunc net pbits)))))
(defn network-subnets
"Set of networks within the network `net` which have `n` additional bits of
network prefix, default 1."
([net] (network-subnets net 1))
([net n]
(let [pbits (+ (network-length net) n)
nbits (- (address-length net) pbits)
one (.shiftLeft BigInteger/ONE nbits)
lower (address->BigInteger net)
over (.add lower (.shiftLeft one n))
step (fn step [^BigInteger addr]
(lazy-seq
(when (neg? (.compareTo addr over))
(cons (network addr pbits) (step (.add addr one))))))]
(apply network-set (step lower)))))
(defn address-zero?
"True iff address `addr` is the zero address."
[addr] (every? zero? (address-bytes addr)))
(defn address-networks
"Minimal set of networks containing only the addresses in the range from
`start` to `stop` *inclusive*."
[start stop]
(let [stop (address stop)
nnet (fn [net]
(let [net' (network-supernet net)]
(if (or (nil? net')
(pos? (network-compare start (network-nth net' 0)))
(neg? (network-compare stop (network-nth net' -1))))
net
(recur net'))))
step (fn step [start]
(lazy-seq
(when-not (pos? (network-compare start stop))
(let [net (nnet (network start))
start' (address-add net (network-count net))]
(cons net (when-not (address-zero? start')
(step start')))))))]
(apply network-set (step (address start)))))
(extend-type IPAddress
IPAddressConstruction
(-address [this] this)
IPNetworkConstruction
(-network
([this] (IPNetwork. nil (address-bytes this) (address-length this)))
([this length] (network* this (address-bytes this) length))))
(extend-type IPNetwork
IPAddressConstruction
(-address [this] (IPAddress. nil (address-bytes this)))
IPNetworkConstruction
(-network
([this] this)
([this length] (network* this (address-bytes this) length))))
(extend-type (java.lang.Class/forName "[B")
IPAddressConstruction
(-address [this] (address* this this))
IPAddressOperations
(-address? [this]
(let [len (alength ^bytes this)]
(or (= len IPParser/IPV4_BYTE_LEN)
(= len IPParser/IPV6_BYTE_LEN))))
(address-bytes [this] this)
(address-length [this] (* 8 (alength ^bytes this)))
IPNetworkConstruction
(-network
([this] (network* this this (address-length this)))
([this length] (network* this this length)))
IPNetworkOperations
(network?*
([this] false)
([this length]
(and (-address? this)
(>= length 0)
(<= length (address-length this))
(->> (iterate #(if (pos? %) (- % 8) 0) length)
(map (fn [b rem]
(let [mask (if (<= 8 rem) 0 (bit-shift-right 0xff rem))]
(bit-and b mask)))
this)
(every? zero?)))))
(network-length [this] (address-length this)))
(defn ^:private string-network-split
[net] (str/split net #"/" 2))
(defn ^:private string-network-parts
[net] (let [[prefix length] (string-network-split net)
length (when length
(or (ignore-errors (Long/parseLong length)) -1))]
[(IPParser/parse prefix) length]))
(extend-type String
IPAddressConstruction
(-address [addr] (address* addr (address-bytes addr)))
IPAddressOperations
(-address? [this]
(IPParser/isValid (first (string-network-split this))))
(address-bytes [this]
(IPParser/parse (first (string-network-split this))))
(address-length [this]
(IPParser/length (first (string-network-split this))))
IPNetworkConstruction
(-network
([this]
(let [[prefix length] (string-network-parts this)]
(if length
(network* this prefix length)
(network* this prefix (address-length prefix)))))
([this length]
(network* this (address-bytes this) length)))
IPNetworkOperations
(network?*
([this]
(let [[prefix length] (string-network-parts this)]
(when length
(network?* prefix length))))
([this length]
(let [[prefix _] (string-network-parts this)]
(network?* prefix length))))
(network-length [this]
(let [[_ length] (string-network-parts this)]
(or length (address-length this)))))
(extend-type InetAddress
IPAddressConstruction
(-address [addr]
(address* (.getHostAddress addr) (.getAddress addr)))
IPAddressOperations
(-address? [addr] true)
(address-bytes [addr] (.getAddress addr))
(address-length [addr]
(case-expr (class addr)
java.net.Inet4Address IPParser/IPV4_BIT_LEN
java.net.Inet6Address IPParser/IPV6_BIT_LEN
-1))
IPNetworkConstruction
(-network
([this] (IPNetwork. nil (address-bytes this) (address-length this)))
([this length] (network* this (address-bytes this) length)))
IPNetworkOperations
(network?*
([this] false)
([this length] (network?* (address-bytes this) length)))
(network-length [this] (address-length this)))
(extend-type BigInteger
IPAddressConstruction
(-address [addr] (address* addr (address-bytes addr)))
IPAddressOperations
(-address? [addr] true)
(address-bytes [addr]
(let [b (.toByteArray addr),
n (if (> (alength b) IPParser/IPV6_BYTE_LEN)
IPParser/IPV6_BYTE_LEN
IPParser/IPV4_BYTE_LEN)]
(byte-array (take-last n b))))
(address-length [addr]
(if (> (.bitLength addr) IPParser/IPV6_BIT_LEN)
IPParser/IPV6_BIT_LEN
IPParser/IPV4_BIT_LEN))
IPNetworkConstruction
(-network
([this] (IPNetwork. nil (address-bytes this) (address-length this)))
([this length] (network* this (address-bytes this) length)))
IPNetworkOperations
(network?*
([this] false)
([this length] (network?* (address-bytes this) length)))
(network-length [this] (address-length this)))
(defmethod clojure.core/print-method IPAddress
([^IPAddress addr ^java.io.Writer w]
(.write w "#ip/address \"")
(.write w (str addr))
(.write w "\"")))
(defmethod clojure.core/print-method IPNetwork
([^IPNetwork net ^java.io.Writer w]
(.write w "#ip/network \"")
(.write w (str net))
(.write w "\"")))
| null | https://raw.githubusercontent.com/damballa/inet.data/259eb578546bc0b8deb941df9642c0bf81b81dd0/src/clojure/inet/data/ip.clj | clojure | byte-array size, so we need to prepend a pseudo-magic prefix to retain the
address length.
when `stable` is false, 0 will | (ns inet.data.ip
"Functions for interacting with IP addresses and networks."
(:require [clojure.string :as str])
(:use [inet.data.util :only [ignore-errors case-expr ubyte sbyte longest-run
bytes-hash-code doto-let]]
[hier-set.core :only [hier-set-by]])
(:import [clojure.lang IFn IObj ILookup BigInt Indexed Seqable]
[inet.data.ip IPParser IPNetworkComparison]
[java.io Serializable]
[java.util Arrays]
[java.net InetAddress]))
(defprotocol ^:no-doc IPAddressConstruction
"Construct a full address object."
(^:private -address [addr]
"Produce an IPAddress from `addr`."))
(defprotocol ^:no-doc IPAddressOperations
"Operations on objects which may be treated as addresses."
(^:private -address? [addr]
"Returns whether or not the value represents a valid address.")
(^bytes address-bytes [addr]
"Retrieve the bytes representation of this address.")
(^long address-length [addr]
"The length in bits of this address."))
(defprotocol ^:no-doc IPNetworkConstruction
"Construct a full network object."
(^:private -network [net] [prefix length]
"Produce an IPNetwork from `net` or `prefix` & `length`."))
(defprotocol ^:no-doc IPNetworkOperations
"Operations on objects which may be treated as networks."
(^:private network?* [net] [addr length]
"Returns whether or not the value represents a valid network.")
(network-length [net]
"The length in bits of the network prefix."))
(defn ^:private string-address-ipv4
[^bytes bytes]
(->> bytes (map ubyte) (str/join ".")))
(letfn [(->short [[m x]] (-> m (bit-shift-left 8) (bit-or x)))
(->str [xs] (->> xs (map #(format "%x" %)) (str/join ":")))]
(defn ^:private string-address-ipv6
[^bytes bytes]
(let [shorts (->> bytes (map ubyte) (partition 2) (map ->short))]
(if-let [[nt nd] (longest-run 0 shorts)]
(str (->str (take nt shorts)) "::" (->str (drop (+ nt nd) shorts)))
(->str shorts)))))
(defn ^:private string-address
[^bytes bytes]
(case-expr (alength bytes)
IPParser/IPV4_BYTE_LEN (string-address-ipv4 bytes)
IPParser/IPV6_BYTE_LEN (string-address-ipv6 bytes)))
(deftype IPAddress [meta, ^bytes bytes]
Serializable
Object
(toString [this] (string-address bytes))
(hashCode [this] (bytes-hash-code bytes))
(equals [this other]
(or (identical? this other)
(and (instance? IPAddress other)
(Arrays/equals bytes ^bytes (address-bytes other)))))
IObj
(meta [this] meta)
(withMeta [this new-meta] (IPAddress. new-meta bytes))
Comparable
(compareTo [this other]
(let [plen1 (long (address-length bytes))
^bytes prefix2 (address-bytes other),
plen2 (long (network-length other))]
(IPNetworkComparison/networkCompare bytes plen1 prefix2 plen2)))
IPAddressOperations
(-address? [this] true)
(address-bytes [this] bytes)
(address-length [this] (address-length bytes))
IPNetworkOperations
(network?* [this] false)
(network-length [this] (address-length bytes)))
(ns-unmap *ns* '->IPAddress)
(defn address
"The IP address for representation `addr`."
{:tag `IPAddress}
[addr] (-address addr))
BigInteger mapping is internal - only . BigInteger does n't preserve the input
(defn ^:private address->BigInteger
"Convert `addr` to an internal-format BigInteger."
{:tag `BigInteger}
[addr] (->> addr address-bytes (cons (byte 63)) byte-array BigInteger.))
(defn address-add
"The `n`th address following `addr` numerically."
{:tag `IPAddress}
[addr n]
(->> (condp instance? n
BigInteger n
BigInt (.toBigInteger ^BigInt n)
,,,,,, (BigInteger/valueOf (long n)))
(.add (address->BigInteger addr))
address))
(defn address-range
"Sequence of addresses from `start` to `stop` *inclusive*."
[start stop]
(let [stop (address->BigInteger stop)]
((fn step [^BigInteger addr]
(lazy-seq
(when-not (pos? (.compareTo addr stop))
(cons (address addr) (step (.add addr BigInteger/ONE))))))
(address->BigInteger start))))
(defn network-compare
"Compare the prefixes of networks `left` and `right`, with the same result
semantics as `compare`. When `stable` is true (the default), 0 will only be
be returned as long as the networks are identical up to their minimum common
prefix length."
(^long [left right] (network-compare true left right))
(^long [stable left right]
(let [^bytes prefix1 (address-bytes left), plen1 (network-length left)
^bytes prefix2 (address-bytes right), plen2 (network-length right)]
(IPNetworkComparison/networkCompare stable prefix1 plen1 prefix2 plen2))))
(defn network-contains?
"Determine if network `net` contains the address/network `addr`."
[net addr]
(let [length (network-length net)]
(and (<= length (network-length addr))
(zero? (network-compare false net addr)))))
(defn network-count
"Count of addresses in network `net`."
[net]
(let [nbits (- (address-length net) (network-length net))]
(if (> 63 nbits)
(bit-shift-left 1 nbits)
(BigInt/fromBigInteger (.shiftLeft BigInteger/ONE nbits)))))
(defn network-nth
"The `n`th address in the network `net`. Negative `n`s count backwards
from the final address at -1."
[net n] (address-add net (if (neg? n) (+ n (network-count net)) n)))
(deftype IPNetwork [meta, ^bytes prefix, ^long length]
Serializable
Object
(toString [this] (str (string-address prefix) "/" length))
(hashCode [this] (bytes-hash-code prefix length))
(equals [this other]
(or (identical? this other)
(and (instance? IPNetwork other)
(= length (network-length other))
(Arrays/equals prefix ^bytes (address-bytes other)))))
IObj
(meta [this] meta)
(withMeta [this new-meta] (IPNetwork. new-meta prefix length))
Comparable
(compareTo [this other]
(let [^bytes prefix2 (address-bytes other),
plen2 (long (network-length other))]
(IPNetworkComparison/networkCompare prefix length prefix2 plen2)))
ILookup
(valAt [this key]
(when (network-contains? this key) key))
(valAt [this key default]
(if (network-contains? this key) key default))
IFn
(invoke [this key]
(when (network-contains? this key) key))
(invoke [this key default]
(if (network-contains? this key) key default))
Indexed
(count [this] (network-count this))
(nth [this n] (network-nth this n))
Seqable
(seq [this]
(address-range (nth this 0) (nth this -1)))
IPAddressOperations
(-address? [this] false)
(address-bytes [this] prefix)
(address-length [this] (address-length prefix))
IPNetworkOperations
(network?* [this] true)
(network-length [this] length))
(ns-unmap *ns* '->IPNetwork)
(defn ^:private address*
[orig ^bytes bytes]
(when (-address? bytes)
(IPAddress. nil bytes)))
(defn network
"The IP network for representation `net` or `prefix` & `length`."
{:tag `IPNetwork}
([net] (-network net))
([prefix length] (-network prefix length)))
(defn ^:private network*
[orig ^bytes bytes ^long length]
(when (network?* bytes length)
(IPNetwork. nil bytes length)))
(defn address?
"Determine if `addr` is a value which represents an IP address."
[addr] (and (satisfies? IPAddressOperations addr)
(boolean (-address? addr))))
(defn network?
"Determine if `net` is a value which represents an IP network."
([net]
(and (satisfies? IPNetworkOperations net)
(boolean (network?* net))))
([addr length]
(and (satisfies? IPNetworkOperations addr)
(boolean (network?* addr length)))))
(defn inet-address
"Generate a java.net.InetAddress from the provided value."
{:tag `InetAddress}
[addr] (InetAddress/getByAddress (address-bytes addr)))
(defn network-trunc
"Create a network with a prefix consisting of the first `length` bits of
`prefix` and a length of `length`."
{:tag `IPNetwork}
([prefix]
(network-trunc prefix (network-length prefix)))
([prefix length]
(network (doto-let [prefix (byte-array (address-bytes prefix))]
(loop [zbits (long (- (address-length prefix) length)),
i (->> prefix alength dec long)]
(cond (>= zbits 8) (do (aset prefix i (byte 0))
(recur (- zbits 8) (dec i)))
(pos? zbits) (->> (bit-shift-left -1 zbits)
(bit-and (long (aget prefix i)))
byte (aset prefix i)))))
length)))
(defn ->network-set
"Create a hierarchical set from networks in `coll`."
[coll]
(-> (apply hier-set-by network-contains? network-compare
(map network coll))
(vary-meta assoc :type ::network-set)))
(defn network-set
"Create a hierarchical set from networks `nets`."
[& nets] (->network-set nets))
(defmethod clojure.core/print-method ::network-set
[nets ^java.io.Writer w]
(.write w "#ip/network-set #{")
(loop [first? true, nets (seq nets)]
(when nets
(when-not first? (.write w " "))
(print-method (first nets) w)
(recur false (next nets))))
(.write w "}"))
(defn network-supernet
"Network containing the network `net` with a prefix shorter by `n` bits,
default 1."
([net] (network-supernet net 1))
([net n]
(let [pbits (- (network-length net) n)]
(when-not (neg? pbits)
(network-trunc net pbits)))))
(defn network-subnets
"Set of networks within the network `net` which have `n` additional bits of
network prefix, default 1."
([net] (network-subnets net 1))
([net n]
(let [pbits (+ (network-length net) n)
nbits (- (address-length net) pbits)
one (.shiftLeft BigInteger/ONE nbits)
lower (address->BigInteger net)
over (.add lower (.shiftLeft one n))
step (fn step [^BigInteger addr]
(lazy-seq
(when (neg? (.compareTo addr over))
(cons (network addr pbits) (step (.add addr one))))))]
(apply network-set (step lower)))))
(defn address-zero?
"True iff address `addr` is the zero address."
[addr] (every? zero? (address-bytes addr)))
(defn address-networks
"Minimal set of networks containing only the addresses in the range from
`start` to `stop` *inclusive*."
[start stop]
(let [stop (address stop)
nnet (fn [net]
(let [net' (network-supernet net)]
(if (or (nil? net')
(pos? (network-compare start (network-nth net' 0)))
(neg? (network-compare stop (network-nth net' -1))))
net
(recur net'))))
step (fn step [start]
(lazy-seq
(when-not (pos? (network-compare start stop))
(let [net (nnet (network start))
start' (address-add net (network-count net))]
(cons net (when-not (address-zero? start')
(step start')))))))]
(apply network-set (step (address start)))))
(extend-type IPAddress
IPAddressConstruction
(-address [this] this)
IPNetworkConstruction
(-network
([this] (IPNetwork. nil (address-bytes this) (address-length this)))
([this length] (network* this (address-bytes this) length))))
(extend-type IPNetwork
IPAddressConstruction
(-address [this] (IPAddress. nil (address-bytes this)))
IPNetworkConstruction
(-network
([this] this)
([this length] (network* this (address-bytes this) length))))
(extend-type (java.lang.Class/forName "[B")
IPAddressConstruction
(-address [this] (address* this this))
IPAddressOperations
(-address? [this]
(let [len (alength ^bytes this)]
(or (= len IPParser/IPV4_BYTE_LEN)
(= len IPParser/IPV6_BYTE_LEN))))
(address-bytes [this] this)
(address-length [this] (* 8 (alength ^bytes this)))
IPNetworkConstruction
(-network
([this] (network* this this (address-length this)))
([this length] (network* this this length)))
IPNetworkOperations
(network?*
([this] false)
([this length]
(and (-address? this)
(>= length 0)
(<= length (address-length this))
(->> (iterate #(if (pos? %) (- % 8) 0) length)
(map (fn [b rem]
(let [mask (if (<= 8 rem) 0 (bit-shift-right 0xff rem))]
(bit-and b mask)))
this)
(every? zero?)))))
(network-length [this] (address-length this)))
(defn ^:private string-network-split
[net] (str/split net #"/" 2))
(defn ^:private string-network-parts
[net] (let [[prefix length] (string-network-split net)
length (when length
(or (ignore-errors (Long/parseLong length)) -1))]
[(IPParser/parse prefix) length]))
(extend-type String
IPAddressConstruction
(-address [addr] (address* addr (address-bytes addr)))
IPAddressOperations
(-address? [this]
(IPParser/isValid (first (string-network-split this))))
(address-bytes [this]
(IPParser/parse (first (string-network-split this))))
(address-length [this]
(IPParser/length (first (string-network-split this))))
IPNetworkConstruction
(-network
([this]
(let [[prefix length] (string-network-parts this)]
(if length
(network* this prefix length)
(network* this prefix (address-length prefix)))))
([this length]
(network* this (address-bytes this) length)))
IPNetworkOperations
(network?*
([this]
(let [[prefix length] (string-network-parts this)]
(when length
(network?* prefix length))))
([this length]
(let [[prefix _] (string-network-parts this)]
(network?* prefix length))))
(network-length [this]
(let [[_ length] (string-network-parts this)]
(or length (address-length this)))))
(extend-type InetAddress
IPAddressConstruction
(-address [addr]
(address* (.getHostAddress addr) (.getAddress addr)))
IPAddressOperations
(-address? [addr] true)
(address-bytes [addr] (.getAddress addr))
(address-length [addr]
(case-expr (class addr)
java.net.Inet4Address IPParser/IPV4_BIT_LEN
java.net.Inet6Address IPParser/IPV6_BIT_LEN
-1))
IPNetworkConstruction
(-network
([this] (IPNetwork. nil (address-bytes this) (address-length this)))
([this length] (network* this (address-bytes this) length)))
IPNetworkOperations
(network?*
([this] false)
([this length] (network?* (address-bytes this) length)))
(network-length [this] (address-length this)))
(extend-type BigInteger
IPAddressConstruction
(-address [addr] (address* addr (address-bytes addr)))
IPAddressOperations
(-address? [addr] true)
(address-bytes [addr]
(let [b (.toByteArray addr),
n (if (> (alength b) IPParser/IPV6_BYTE_LEN)
IPParser/IPV6_BYTE_LEN
IPParser/IPV4_BYTE_LEN)]
(byte-array (take-last n b))))
(address-length [addr]
(if (> (.bitLength addr) IPParser/IPV6_BIT_LEN)
IPParser/IPV6_BIT_LEN
IPParser/IPV4_BIT_LEN))
IPNetworkConstruction
(-network
([this] (IPNetwork. nil (address-bytes this) (address-length this)))
([this length] (network* this (address-bytes this) length)))
IPNetworkOperations
(network?*
([this] false)
([this length] (network?* (address-bytes this) length)))
(network-length [this] (address-length this)))
(defmethod clojure.core/print-method IPAddress
([^IPAddress addr ^java.io.Writer w]
(.write w "#ip/address \"")
(.write w (str addr))
(.write w "\"")))
(defmethod clojure.core/print-method IPNetwork
([^IPNetwork net ^java.io.Writer w]
(.write w "#ip/network \"")
(.write w (str net))
(.write w "\"")))
|
faf0ed7542ba760eacb0de9b75f39193242a23ed70e1a8c2043e446d5904a246 | profmaad/bitcaml | main.ml | open! Core.Std
open Bitcoin.Protocol
let local_version () =
let services_set = Service.Set.singleton Bitcoin.Protocol.Service.NetworkNodeService in
let localhost_address_string = (String.make 10 '\x00') ^ "\xff\xff" ^ "\127\000\000\001" in
let receiver_address = {
Bitcoin.Protocol.services = services_set;
address = localhost_address_string;
port = Bitcaml_config.peer_port;
} in
let sender_address = receiver_address in
let random_nonce = Random.int64 Int64.max_value in
{
Bitcoin.Protocol.protocol_version = Bitcaml_config.bitcoin_protocol_version;
node_services = services_set;
timestamp = Unix.localtime (Unix.time ());
receiver_address = receiver_address;
sender_address = Some sender_address;
random_nonce = Some random_nonce;
user_agent = Some Bitcaml_config.user_agent;
start_height = Some 0;
relay = Some false;
}
;;
let connect_to_peer ip_address port =
let socket = Unix.socket Unix.PF_INET Unix.SOCK_STREAM 0 in
let peer_addr = Unix.ADDR_INET(Unix.Inet_addr.of_string ip_address, port) in
Unix.connect socket peer_addr;
socket
;;
let close_peer_connection socket =
Unix.sleep 1;
Unix.close socket
;;
let no_debug peer = { peer with Bitcoin.Peer.peer_debug = false };;
let debug peer = { peer with Bitcoin.Peer.peer_debug = true };;
(* main *)
let () =
Random.self_init ();
print_string "Sanity testing genesis block against its own hash...\t";
let calculated_genesis_hash = Bitcoin.Protocol.Generator.block_hash Bitcaml_config.testnet3_genesis_block_header in
if calculated_genesis_hash = Bitcaml_config.testnet3_genesis_block_hash then
print_endline "PASSED"
else (
Printf.printf"FAILED: %s != %s\n" calculated_genesis_hash Bitcaml_config.testnet3_genesis_block_hash;
exit 1;
);
print_string "Testing difficulty calculation...\t";
let difficulty_test_results = [
Bitcoin.Blockchain.DB.log_difficulty_of_difficulty_bits { Bitcoin.Protocol.bits_base = 0x00ffff; bits_exponent = 0x1d; };
Bitcoin.Blockchain.DB.log_difficulty_of_difficulty_bits { Bitcoin.Protocol.bits_base = 0x0404cb; bits_exponent = 0x1b; };
] in
print_endline (String.concat ~sep:", " (List.map ~f:(Printf.sprintf "%f") difficulty_test_results));
print_endline "Testing script parser and pretty printer...";
let test_script = "\x76\xa9\x14\x2f\xef\x8e\xdc\xc4\x50\x19\xac\xba\x3b\xb1\x46\xb7\x6c\xbd\x2f\x84\x8b\xe5\xd6\x88\xac" in
let parsed_script = Bitcoin.Script.Parser.parse_script (Bitstring.bitstring_of_string test_script) in
Bitcoin.Script.PP.print_script parsed_script;
print_endline "DONE";
print_string "Sanity testing script generator against parser...\t";
let test_script = "\x76\xa9\x14\x2f\xef\x8e\xdc\xc4\x50\x19\xac\xba\x3b\xb1\x46\xb7\x6c\xbd\x2f\x84\x8b\xe5\xd6\x88\xac" in
let parsed_script = Bitcoin.Script.Parser.parse_script (Bitstring.bitstring_of_string test_script) in
let generated_script = Bitstring.string_of_bitstring (Bitcoin.Script.Generator.bitstring_of_script parsed_script) in
if test_script = generated_script then print_endline "PASSED"
else
Printf.printf "FAILED:\nExpected: %s\nActual : %s\n" (Utils.hex_string_of_string test_script) (Utils.hex_string_of_string generated_script)
;
print_endline "Testing script engine...";
let block_new, _ = Bitcoin.Protocol.Parser.parse_block (Bitstring.bitstring_of_file "/tmp/block_new.dat") in
let block_new = Option.value_exn block_new in
let tx_new = List.nth_exn block_new.Bitcoin.Protocol.block_transactions 1 in
Bitcoin.Protocol.PP.print_transaction tx_new; Out_channel.newline stdout;
let block_old, _ = Bitcoin.Protocol.Parser.parse_block (Bitstring.bitstring_of_file "/tmp/block_old.dat") in
let block_old = Option.value_exn block_old in
let tx_old = List.nth_exn block_old.Bitcoin.Protocol.block_transactions 4 in
Bitcoin.Protocol.PP.print_transaction tx_old; Out_channel.newline stdout;
(* let tx_new = { *)
transaction_data_format_version = 1 ;
(* transaction_inputs = [ *)
(* { *)
(* previous_transaction_output = { *)
(* referenced_transaction_hash = Utils.hex_decode_rev "0437cd7f8525ceed2324359c2d0ba26006d92d856a9c20fa0241106ee5a597c9"; *)
transaction_output_index = 0l ;
(* }; *)
signature_script = " \071 " ^ Utils.hex_decode " 304402204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4acdd12909d831cc56cbbac4622082221a8768d1d0901 " ;
transaction_sequence_number = 0xffffffffl ;
(* } *)
(* ]; *)
(* transaction_outputs = [ *)
(* { *)
transaction_output_value = 1000000000L ;
output_script = Utils.hex_decode " 4104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1baded5c72a704f7e6cd84cac " ;
(* }; *)
(* { *)
transaction_output_value = 4000000000L ;
(* output_script = Utils.hex_decode "410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac"; *)
(* } *)
(* ]; *)
transaction_lock_time = AlwaysLockedTransaction ;
(* } in *)
let pubkey_script = (List.hd_exn tx_new.Bitcoin.Protocol.transaction_inputs).Bitcoin.Protocol.signature_script in
let pubkey_script_asm = Bitcoin.Script.Parser.parse_script (Bitstring.bitstring_of_string pubkey_script) in
let output_script = (List.nth_exn tx_old.Bitcoin.Protocol.transaction_outputs 1).Bitcoin.Protocol.output_script in
(* let output_script = Utils.hex_decode "410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac" in *)
let output_script_asm = Bitcoin.Script.Parser.parse_script (Bitstring.bitstring_of_string output_script) in
let asm = pubkey_script_asm @ [Bitcoin.Script.CodeSeparator] @ output_script_asm in
Bitcoin.Script.PP.print_script asm;
print_endline "--------------------------";
let script_result = Bitcoin.Script.Interpreter.execute_script asm (tx_new, 0) in
print_endline "--------------------------";
( match script_result with
| Bitcoin.Script.Interpreter.Result item ->
Printf.printf "Script result: %s\n" (Bitcoin.Script.PP.pp_string_of_data_item item);
if Bitcoin_script.bool_of_data_item item then print_endline "PASSED" else print_endline "FAILED"
| Bitcoin.Script.Interpreter.Invalid -> print_endline "Script result: INVALID"; print_endline "FAILED"
);
print_endline "Testing merkle tree hashing...";
let merkle_root_new = Bitcoin.Rules.merkle_root_of_block block_new in
( if merkle_root_new = block_new.block_header.merkle_root then
Printf.printf "PASSED: %s =\n %s\n" (Utils.hex_string_of_string merkle_root_new) (Utils.hex_string_of_string block_new.block_header.merkle_root)
else
Printf.printf "FAILED: %s !=\n %s\n" (Utils.hex_string_of_string merkle_root_new) (Utils.hex_string_of_string block_new.block_header.merkle_root)
);
let merkle_root_old = Bitcoin.Rules.merkle_root_of_block block_old in
( if merkle_root_old = block_old.block_header.merkle_root then
Printf.printf "PASSED: %s =\n %s\n" (Utils.hex_string_of_string merkle_root_old) (Utils.hex_string_of_string block_old.block_header.merkle_root)
else
Printf.printf "FAILED: %s !=\n %s\n" (Utils.hex_string_of_string merkle_root_old) (Utils.hex_string_of_string block_old.block_header.merkle_root)
);
print_string "Sanity testing weird script int encoding...\t";
let test_value = "\xff\xff\xff\x82" in
let decoded_test_value = Option.value_exn (Bitcoin.Script.int64_of_data_item test_value) in
let encoded_test_value = Bitcoin.Script.data_item_of_int64 decoded_test_value in
( if (compare test_value encoded_test_value) <> 0 then
Printf.printf "FAILED: %s -> %Ld -> %s\n" (Utils.hex_encode test_value) decoded_test_value (Utils.hex_encode encoded_test_value)
else
print_endline "PASSED"
);
Printf.printf "Opening and initializing blockchain at %s...\t" Bitcaml_config.testnet3_folder;
let blockchain = Bitcoin.Blockchain.init_default Bitcaml_config.testnet3_folder in
print_endline "DONE";
print_string "Establishing TCP connection to peer...\t\t";
let peer_socket = connect_to_peer Bitcaml_config.peer_ip_address Bitcaml_config.peer_port in
print_endline "DONE";
let peer = {
Bitcoin.Peer.peer_network = Bitcoin.Protocol.TestNet3;
local_version = local_version ();
peer_version = Bitcoin.Peer.default_version;
peer_socket = peer_socket;
peer_debug = true;
blockchain = blockchain;
} in
Bitcoin.Peer.handle_peer peer;
(* print_string "Retrieving TestNet3 genesis block...\t\t"; *)
( match Bitcoin . Peer.get_block peer Bitcaml_config.testnet3_genesis_block_hash with
(* | None -> print_endline "FAILED" *)
(* | Some block -> print_endline "PASSED"; Bitcoin.Protocol.PP.print_block block *)
(* ); *)
print_string " Retrieving a TestNet3 initial blocks ... \t\t " ;
(* List.iter (fun hash -> *)
match Bitcoin . Peer.get_block peer hash with
(* | None -> print_endline "FAILED" *)
(* | Some block -> *)
(* print_endline "PASSED"; *)
(* Bitcoin.Protocol.PP.print_block block; *)
ignore ( Bitcoin . Blockchain.insert_block block . Bitcoin . ) )
(* (List.rev Bitcaml_config.testnet3_initial_block_hashes); *)
print_string "Disconnecting from peer...\t\t\t";
close_peer_connection peer_socket;
print_endline "DONE"
;;
| null | https://raw.githubusercontent.com/profmaad/bitcaml/18cfbca46c989f43dfb1bcfd50ee2ff500f9ab8d/src/main.ml | ocaml | main
let tx_new = {
transaction_inputs = [
{
previous_transaction_output = {
referenced_transaction_hash = Utils.hex_decode_rev "0437cd7f8525ceed2324359c2d0ba26006d92d856a9c20fa0241106ee5a597c9";
};
}
];
transaction_outputs = [
{
};
{
output_script = Utils.hex_decode "410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac";
}
];
} in
let output_script = Utils.hex_decode "410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac" in
print_string "Retrieving TestNet3 genesis block...\t\t";
| None -> print_endline "FAILED"
| Some block -> print_endline "PASSED"; Bitcoin.Protocol.PP.print_block block
);
List.iter (fun hash ->
| None -> print_endline "FAILED"
| Some block ->
print_endline "PASSED";
Bitcoin.Protocol.PP.print_block block;
(List.rev Bitcaml_config.testnet3_initial_block_hashes); | open! Core.Std
open Bitcoin.Protocol
let local_version () =
let services_set = Service.Set.singleton Bitcoin.Protocol.Service.NetworkNodeService in
let localhost_address_string = (String.make 10 '\x00') ^ "\xff\xff" ^ "\127\000\000\001" in
let receiver_address = {
Bitcoin.Protocol.services = services_set;
address = localhost_address_string;
port = Bitcaml_config.peer_port;
} in
let sender_address = receiver_address in
let random_nonce = Random.int64 Int64.max_value in
{
Bitcoin.Protocol.protocol_version = Bitcaml_config.bitcoin_protocol_version;
node_services = services_set;
timestamp = Unix.localtime (Unix.time ());
receiver_address = receiver_address;
sender_address = Some sender_address;
random_nonce = Some random_nonce;
user_agent = Some Bitcaml_config.user_agent;
start_height = Some 0;
relay = Some false;
}
;;
let connect_to_peer ip_address port =
let socket = Unix.socket Unix.PF_INET Unix.SOCK_STREAM 0 in
let peer_addr = Unix.ADDR_INET(Unix.Inet_addr.of_string ip_address, port) in
Unix.connect socket peer_addr;
socket
;;
let close_peer_connection socket =
Unix.sleep 1;
Unix.close socket
;;
let no_debug peer = { peer with Bitcoin.Peer.peer_debug = false };;
let debug peer = { peer with Bitcoin.Peer.peer_debug = true };;
let () =
Random.self_init ();
print_string "Sanity testing genesis block against its own hash...\t";
let calculated_genesis_hash = Bitcoin.Protocol.Generator.block_hash Bitcaml_config.testnet3_genesis_block_header in
if calculated_genesis_hash = Bitcaml_config.testnet3_genesis_block_hash then
print_endline "PASSED"
else (
Printf.printf"FAILED: %s != %s\n" calculated_genesis_hash Bitcaml_config.testnet3_genesis_block_hash;
exit 1;
);
print_string "Testing difficulty calculation...\t";
let difficulty_test_results = [
Bitcoin.Blockchain.DB.log_difficulty_of_difficulty_bits { Bitcoin.Protocol.bits_base = 0x00ffff; bits_exponent = 0x1d; };
Bitcoin.Blockchain.DB.log_difficulty_of_difficulty_bits { Bitcoin.Protocol.bits_base = 0x0404cb; bits_exponent = 0x1b; };
] in
print_endline (String.concat ~sep:", " (List.map ~f:(Printf.sprintf "%f") difficulty_test_results));
print_endline "Testing script parser and pretty printer...";
let test_script = "\x76\xa9\x14\x2f\xef\x8e\xdc\xc4\x50\x19\xac\xba\x3b\xb1\x46\xb7\x6c\xbd\x2f\x84\x8b\xe5\xd6\x88\xac" in
let parsed_script = Bitcoin.Script.Parser.parse_script (Bitstring.bitstring_of_string test_script) in
Bitcoin.Script.PP.print_script parsed_script;
print_endline "DONE";
print_string "Sanity testing script generator against parser...\t";
let test_script = "\x76\xa9\x14\x2f\xef\x8e\xdc\xc4\x50\x19\xac\xba\x3b\xb1\x46\xb7\x6c\xbd\x2f\x84\x8b\xe5\xd6\x88\xac" in
let parsed_script = Bitcoin.Script.Parser.parse_script (Bitstring.bitstring_of_string test_script) in
let generated_script = Bitstring.string_of_bitstring (Bitcoin.Script.Generator.bitstring_of_script parsed_script) in
if test_script = generated_script then print_endline "PASSED"
else
Printf.printf "FAILED:\nExpected: %s\nActual : %s\n" (Utils.hex_string_of_string test_script) (Utils.hex_string_of_string generated_script)
;
print_endline "Testing script engine...";
let block_new, _ = Bitcoin.Protocol.Parser.parse_block (Bitstring.bitstring_of_file "/tmp/block_new.dat") in
let block_new = Option.value_exn block_new in
let tx_new = List.nth_exn block_new.Bitcoin.Protocol.block_transactions 1 in
Bitcoin.Protocol.PP.print_transaction tx_new; Out_channel.newline stdout;
let block_old, _ = Bitcoin.Protocol.Parser.parse_block (Bitstring.bitstring_of_file "/tmp/block_old.dat") in
let block_old = Option.value_exn block_old in
let tx_old = List.nth_exn block_old.Bitcoin.Protocol.block_transactions 4 in
Bitcoin.Protocol.PP.print_transaction tx_old; Out_channel.newline stdout;
transaction_data_format_version = 1 ;
transaction_output_index = 0l ;
signature_script = " \071 " ^ Utils.hex_decode " 304402204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4acdd12909d831cc56cbbac4622082221a8768d1d0901 " ;
transaction_sequence_number = 0xffffffffl ;
transaction_output_value = 1000000000L ;
output_script = Utils.hex_decode " 4104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1baded5c72a704f7e6cd84cac " ;
transaction_output_value = 4000000000L ;
transaction_lock_time = AlwaysLockedTransaction ;
let pubkey_script = (List.hd_exn tx_new.Bitcoin.Protocol.transaction_inputs).Bitcoin.Protocol.signature_script in
let pubkey_script_asm = Bitcoin.Script.Parser.parse_script (Bitstring.bitstring_of_string pubkey_script) in
let output_script = (List.nth_exn tx_old.Bitcoin.Protocol.transaction_outputs 1).Bitcoin.Protocol.output_script in
let output_script_asm = Bitcoin.Script.Parser.parse_script (Bitstring.bitstring_of_string output_script) in
let asm = pubkey_script_asm @ [Bitcoin.Script.CodeSeparator] @ output_script_asm in
Bitcoin.Script.PP.print_script asm;
print_endline "--------------------------";
let script_result = Bitcoin.Script.Interpreter.execute_script asm (tx_new, 0) in
print_endline "--------------------------";
( match script_result with
| Bitcoin.Script.Interpreter.Result item ->
Printf.printf "Script result: %s\n" (Bitcoin.Script.PP.pp_string_of_data_item item);
if Bitcoin_script.bool_of_data_item item then print_endline "PASSED" else print_endline "FAILED"
| Bitcoin.Script.Interpreter.Invalid -> print_endline "Script result: INVALID"; print_endline "FAILED"
);
print_endline "Testing merkle tree hashing...";
let merkle_root_new = Bitcoin.Rules.merkle_root_of_block block_new in
( if merkle_root_new = block_new.block_header.merkle_root then
Printf.printf "PASSED: %s =\n %s\n" (Utils.hex_string_of_string merkle_root_new) (Utils.hex_string_of_string block_new.block_header.merkle_root)
else
Printf.printf "FAILED: %s !=\n %s\n" (Utils.hex_string_of_string merkle_root_new) (Utils.hex_string_of_string block_new.block_header.merkle_root)
);
let merkle_root_old = Bitcoin.Rules.merkle_root_of_block block_old in
( if merkle_root_old = block_old.block_header.merkle_root then
Printf.printf "PASSED: %s =\n %s\n" (Utils.hex_string_of_string merkle_root_old) (Utils.hex_string_of_string block_old.block_header.merkle_root)
else
Printf.printf "FAILED: %s !=\n %s\n" (Utils.hex_string_of_string merkle_root_old) (Utils.hex_string_of_string block_old.block_header.merkle_root)
);
print_string "Sanity testing weird script int encoding...\t";
let test_value = "\xff\xff\xff\x82" in
let decoded_test_value = Option.value_exn (Bitcoin.Script.int64_of_data_item test_value) in
let encoded_test_value = Bitcoin.Script.data_item_of_int64 decoded_test_value in
( if (compare test_value encoded_test_value) <> 0 then
Printf.printf "FAILED: %s -> %Ld -> %s\n" (Utils.hex_encode test_value) decoded_test_value (Utils.hex_encode encoded_test_value)
else
print_endline "PASSED"
);
Printf.printf "Opening and initializing blockchain at %s...\t" Bitcaml_config.testnet3_folder;
let blockchain = Bitcoin.Blockchain.init_default Bitcaml_config.testnet3_folder in
print_endline "DONE";
print_string "Establishing TCP connection to peer...\t\t";
let peer_socket = connect_to_peer Bitcaml_config.peer_ip_address Bitcaml_config.peer_port in
print_endline "DONE";
let peer = {
Bitcoin.Peer.peer_network = Bitcoin.Protocol.TestNet3;
local_version = local_version ();
peer_version = Bitcoin.Peer.default_version;
peer_socket = peer_socket;
peer_debug = true;
blockchain = blockchain;
} in
Bitcoin.Peer.handle_peer peer;
( match Bitcoin . Peer.get_block peer Bitcaml_config.testnet3_genesis_block_hash with
print_string " Retrieving a TestNet3 initial blocks ... \t\t " ;
match Bitcoin . Peer.get_block peer hash with
ignore ( Bitcoin . Blockchain.insert_block block . Bitcoin . ) )
print_string "Disconnecting from peer...\t\t\t";
close_peer_connection peer_socket;
print_endline "DONE"
;;
|
1dcb38e7cfd42708cfcd51b50f719dfe0807372824b93b0467fe78cb77ebccbe | winny-/aoc | day12.rkt | #lang racket
Advent of code day 12 :
(require json)
(define/contract (summate-jsexpr je)
(jsexpr? . -> . number?)
(match je
[(? number? n) n]
[(? list? ls) (foldl + 0 (map summate-jsexpr ls))]
[(? hash? h) (foldl + 0 (map summate-jsexpr (hash-values h)))]
[_ 0]))
(define/contract (summate-jsexpr2 je)
(jsexpr? . -> . number?)
(match je
[(? number? n) n]
[(? list? ls) (foldl + 0 (map summate-jsexpr2 ls))]
[(? hash? h)
(if (ormap (λ (v) (and (string? v) (string-contains? v "red"))) (hash-values h))
0
(foldl + 0 (hash-map h (λ (k v) (summate-jsexpr2 v)))))]
[_ 0]))
(module+ test
(require rackunit)
(define f (compose summate-jsexpr string->jsexpr))
(define g (compose summate-jsexpr2 string->jsexpr))
(check-equal? (f "[1,2,3]") 6)
(check-equal? (f "{\"a\":2,\"b\":4}") 6)
(check-equal? (f "[[[3]]]") 3)
(check-equal? (f "{\"a\":{\"b\":4},\"c\":-1}") 3)
(check-equal? (f "{\"a\":[-1,1]}") 0)
(check-equal? (f "[-1,{\"a\":1}]") 0)
(check-equal? (f "[]") 0)
(check-equal? (f "{}") 0)
(check-equal? (g "{\"a\":2,\"b\":4}") 6)
(check-equal? (g "[1,2,3]") 6)
(check-equal? (g "[1,{\"c\":\"red\",\"b\":2},3]") 4)
(check-equal? (g "{\"d\":\"red\",\"e\":[1,2,3,4],\"f\":5}") 0)
(check-equal? (g "[1,\"red\",5]") 6))
(module+ main
(define jo (read-json))
(displayln (format "With part #1 rules: ~a" (summate-jsexpr jo)))
(displayln (format "With part #2 rules: ~a" (summate-jsexpr2 jo))))
| null | https://raw.githubusercontent.com/winny-/aoc/d508caae19899dcab57ac665ef5d1a05b0a90c0c/2015/day12/day12.rkt | racket | #lang racket
Advent of code day 12 :
(require json)
(define/contract (summate-jsexpr je)
(jsexpr? . -> . number?)
(match je
[(? number? n) n]
[(? list? ls) (foldl + 0 (map summate-jsexpr ls))]
[(? hash? h) (foldl + 0 (map summate-jsexpr (hash-values h)))]
[_ 0]))
(define/contract (summate-jsexpr2 je)
(jsexpr? . -> . number?)
(match je
[(? number? n) n]
[(? list? ls) (foldl + 0 (map summate-jsexpr2 ls))]
[(? hash? h)
(if (ormap (λ (v) (and (string? v) (string-contains? v "red"))) (hash-values h))
0
(foldl + 0 (hash-map h (λ (k v) (summate-jsexpr2 v)))))]
[_ 0]))
(module+ test
(require rackunit)
(define f (compose summate-jsexpr string->jsexpr))
(define g (compose summate-jsexpr2 string->jsexpr))
(check-equal? (f "[1,2,3]") 6)
(check-equal? (f "{\"a\":2,\"b\":4}") 6)
(check-equal? (f "[[[3]]]") 3)
(check-equal? (f "{\"a\":{\"b\":4},\"c\":-1}") 3)
(check-equal? (f "{\"a\":[-1,1]}") 0)
(check-equal? (f "[-1,{\"a\":1}]") 0)
(check-equal? (f "[]") 0)
(check-equal? (f "{}") 0)
(check-equal? (g "{\"a\":2,\"b\":4}") 6)
(check-equal? (g "[1,2,3]") 6)
(check-equal? (g "[1,{\"c\":\"red\",\"b\":2},3]") 4)
(check-equal? (g "{\"d\":\"red\",\"e\":[1,2,3,4],\"f\":5}") 0)
(check-equal? (g "[1,\"red\",5]") 6))
(module+ main
(define jo (read-json))
(displayln (format "With part #1 rules: ~a" (summate-jsexpr jo)))
(displayln (format "With part #2 rules: ~a" (summate-jsexpr2 jo))))
| |
a0b6420ab853c9a9ccb6d2ad480b554208acff576f0d0a7b7054f375019555f5 | typedclojure/typedclojure | try.clj | Copyright ( c ) , contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns typed.cljc.checker.check.try
(:require [typed.cljc.checker.utils :as u]
[clojure.core.typed.util-vars :as vs]
[typed.cljc.checker.type-rep :as r]
[typed.cljc.checker.check-below :as below]
[typed.cljc.checker.filter-ops :as fo]
[typed.cljc.checker.object-rep :as o]
[typed.cljc.checker.type-ctors :as c]))
(defn combine-rets [rs]
{:pre [(seq rs)
(every? r/TCResult? rs)]
:post [(r/TCResult? %)]}
(r/ret (apply c/Un (map r/ret-t rs))
(fo/-FS (apply fo/-or (map (comp :then r/ret-f) rs))
(apply fo/-or (map (comp :else r/ret-f) rs)))
(if (apply = (map r/ret-o rs))
(r/ret-o (first rs))
o/-empty)))
; filters don't propagate between components of a `try`, nor outside of it.
(defn check-try [check {:keys [body catches finally] :as expr} expected]
(let [chk #(check % expected)
cbody (chk body)
;_ (prn "cbody ret" (u/expr-type cbody))
;_ (prn cbody)
ccatches (mapv chk catches)
;_ (prn "ccatches ret" (mapv u/expr-type ccatches))
; finally result is thrown away
cfinally (when finally
(check finally))
ret (binding [vs/*current-expr* expr]
(below/maybe-check-below
(combine-rets
(map u/expr-type (concat [cbody] ccatches)))
expected))]
( prn " try ret " ret )
(assoc expr
:body cbody
:catches ccatches
:finally cfinally
u/expr-type ret)))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/a5766b1ecb4ab956d739c54ae3fb474c4d1fc24b/typed/clj.checker/src/typed/cljc/checker/check/try.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
filters don't propagate between components of a `try`, nor outside of it.
_ (prn "cbody ret" (u/expr-type cbody))
_ (prn cbody)
_ (prn "ccatches ret" (mapv u/expr-type ccatches))
finally result is thrown away | Copyright ( c ) , contributors .
(ns typed.cljc.checker.check.try
(:require [typed.cljc.checker.utils :as u]
[clojure.core.typed.util-vars :as vs]
[typed.cljc.checker.type-rep :as r]
[typed.cljc.checker.check-below :as below]
[typed.cljc.checker.filter-ops :as fo]
[typed.cljc.checker.object-rep :as o]
[typed.cljc.checker.type-ctors :as c]))
(defn combine-rets [rs]
{:pre [(seq rs)
(every? r/TCResult? rs)]
:post [(r/TCResult? %)]}
(r/ret (apply c/Un (map r/ret-t rs))
(fo/-FS (apply fo/-or (map (comp :then r/ret-f) rs))
(apply fo/-or (map (comp :else r/ret-f) rs)))
(if (apply = (map r/ret-o rs))
(r/ret-o (first rs))
o/-empty)))
(defn check-try [check {:keys [body catches finally] :as expr} expected]
(let [chk #(check % expected)
cbody (chk body)
ccatches (mapv chk catches)
cfinally (when finally
(check finally))
ret (binding [vs/*current-expr* expr]
(below/maybe-check-below
(combine-rets
(map u/expr-type (concat [cbody] ccatches)))
expected))]
( prn " try ret " ret )
(assoc expr
:body cbody
:catches ccatches
:finally cfinally
u/expr-type ret)))
|
1b7ca3e36cb84b7fd2e3af1d46fe1d08fea2f059e5b1c9090fa46142bde9c797 | clojars/clojars-web | login.clj | (ns clojars.web.login
(:require
[clojars.web.common :refer [html-doc flash]]
[clojars.web.helpers :as helpers]
[clojars.web.safe-hiccup :refer [form-to]]
[clojure.string :as str]
[hiccup.element :refer [link-to]]
[hiccup.form :refer [label text-field
password-field submit-button]]))
(defn login-form [login_failed username message]
(html-doc "Login" {}
[:div.small-section
[:h1 "Login"]
[:p.hint "Don't have an account? "
(link-to "/register" "Sign up!")]
(flash message)
(when login_failed
[:div
[:p.error "Incorrect username, password, or two-factor code."]
(when (some? (str/index-of username \@))
[:p.error "Make sure that you are using your username, and not your email to log in."])
[:p.hint "If you have not logged in since April 2012 when "
[:a {:href ""}
"the insecure password hashes were wiped"]
", please use the " [:a {:href "/forgot-password"} "forgot password"]
" functionality to reset your password."]])
(form-to [:post "/login" :class "row"]
(label :username "Username")
(text-field {:placeholder "bob"
:required true}
:username)
(label :password "Password")
(password-field {:placeholder "keep it secret, keep it safe"
:required true}
:password)
(label :otp "Two-Factor Code")
(text-field {:placeholder "leave blank if two-factor auth not enabled"}
:otp)
(link-to {:class :hint-link} "/forgot-password" "Forgot your username or password?")
(submit-button "Login")
[:div#login-or "or"]
[:div
(link-to {:class "login-button github-login-button"}
"/oauth/github/authorize"
(helpers/retinized-image "/images/github-mark.png" "GitHub")
"Login with GitHub")]
[:div
(link-to {:class :login-button} "/oauth/gitlab/authorize"
(helpers/retinized-image "/images/gitlab-mark.png" "GitLab")
"Login with GitLab.com")])]))
| null | https://raw.githubusercontent.com/clojars/clojars-web/22f831c9e8749ac7933af48655764ad5f5bdc3e8/src/clojars/web/login.clj | clojure | (ns clojars.web.login
(:require
[clojars.web.common :refer [html-doc flash]]
[clojars.web.helpers :as helpers]
[clojars.web.safe-hiccup :refer [form-to]]
[clojure.string :as str]
[hiccup.element :refer [link-to]]
[hiccup.form :refer [label text-field
password-field submit-button]]))
(defn login-form [login_failed username message]
(html-doc "Login" {}
[:div.small-section
[:h1 "Login"]
[:p.hint "Don't have an account? "
(link-to "/register" "Sign up!")]
(flash message)
(when login_failed
[:div
[:p.error "Incorrect username, password, or two-factor code."]
(when (some? (str/index-of username \@))
[:p.error "Make sure that you are using your username, and not your email to log in."])
[:p.hint "If you have not logged in since April 2012 when "
[:a {:href ""}
"the insecure password hashes were wiped"]
", please use the " [:a {:href "/forgot-password"} "forgot password"]
" functionality to reset your password."]])
(form-to [:post "/login" :class "row"]
(label :username "Username")
(text-field {:placeholder "bob"
:required true}
:username)
(label :password "Password")
(password-field {:placeholder "keep it secret, keep it safe"
:required true}
:password)
(label :otp "Two-Factor Code")
(text-field {:placeholder "leave blank if two-factor auth not enabled"}
:otp)
(link-to {:class :hint-link} "/forgot-password" "Forgot your username or password?")
(submit-button "Login")
[:div#login-or "or"]
[:div
(link-to {:class "login-button github-login-button"}
"/oauth/github/authorize"
(helpers/retinized-image "/images/github-mark.png" "GitHub")
"Login with GitHub")]
[:div
(link-to {:class :login-button} "/oauth/gitlab/authorize"
(helpers/retinized-image "/images/gitlab-mark.png" "GitLab")
"Login with GitLab.com")])]))
| |
e58fbebe024a90ea0f3b6a885f823f9b2769cf553670b6a1927c73a90ee93e00 | Copilot-Language/copilot | Constant.hs | Copyright © 2011 National Institute of Aerospace / Galois , Inc.
{-# LANGUAGE Safe #-}
-- | Primitives to build constant streams.
module Copilot.Language.Operators.Constant
( constant
, constB
, constW8
, constW16
, constW32
, constW64
, constI8
, constI16
, constI32
, constI64
, constF
, constD
) where
import Copilot.Core (Typed)
import Copilot.Language.Stream
import Data.Word
import Data.Int
-- | Create a constant stream that is equal to the given argument, at any
-- point in time.
constant :: Typed a => a -> Stream a
constant = Const
| Create a constant stream carrying values of type ' ' that is equal to
-- the given argument, at any point in time.
constB :: Bool -> Stream Bool
constB = constant
-- | Create a constant stream carrying values of type 'Word8' that is equal to
-- the given argument, at any point in time.
constW8 :: Word8 -> Stream Word8
constW8 = constant
| Create a constant stream carrying values of type ' Word16 ' that is equal to
-- the given argument, at any point in time.
constW16 :: Word16 -> Stream Word16
constW16 = constant
| Create a constant stream carrying values of type ' ' that is equal to
-- the given argument, at any point in time.
constW32 :: Word32 -> Stream Word32
constW32 = constant
-- | Create a constant stream carrying values of type 'Word64' that is equal to
-- the given argument, at any point in time.
constW64 :: Word64 -> Stream Word64
constW64 = constant
-- | Create a constant stream carrying values of type 'Int8' that is equal to
-- the given argument, at any point in time.
constI8 :: Int8 -> Stream Int8
constI8 = constant
-- | Create a constant stream carrying values of type 'Int16' that is equal to
-- the given argument, at any point in time.
constI16 :: Int16 -> Stream Int16
constI16 = constant
| Create a constant stream carrying values of type ' Int32 ' that is equal to
-- the given argument, at any point in time.
constI32 :: Int32 -> Stream Int32
constI32 = constant
| Create a constant stream carrying values of type ' Int64 ' that is equal to
-- the given argument, at any point in time.
constI64 :: Int64 -> Stream Int64
constI64 = constant
-- | Create a constant stream carrying values of type 'Float' that is equal to
-- the given argument, at any point in time.
constF :: Float -> Stream Float
constF = constant
-- | Create a constant stream carrying values of type 'Double' that is equal to
-- the given argument, at any point in time.
constD :: Double -> Stream Double
constD = constant
| null | https://raw.githubusercontent.com/Copilot-Language/copilot/17a9b45eea4e95a465d6e773c6fbcf9bf810b6cc/copilot-language/src/Copilot/Language/Operators/Constant.hs | haskell | # LANGUAGE Safe #
| Primitives to build constant streams.
| Create a constant stream that is equal to the given argument, at any
point in time.
the given argument, at any point in time.
| Create a constant stream carrying values of type 'Word8' that is equal to
the given argument, at any point in time.
the given argument, at any point in time.
the given argument, at any point in time.
| Create a constant stream carrying values of type 'Word64' that is equal to
the given argument, at any point in time.
| Create a constant stream carrying values of type 'Int8' that is equal to
the given argument, at any point in time.
| Create a constant stream carrying values of type 'Int16' that is equal to
the given argument, at any point in time.
the given argument, at any point in time.
the given argument, at any point in time.
| Create a constant stream carrying values of type 'Float' that is equal to
the given argument, at any point in time.
| Create a constant stream carrying values of type 'Double' that is equal to
the given argument, at any point in time. | Copyright © 2011 National Institute of Aerospace / Galois , Inc.
module Copilot.Language.Operators.Constant
( constant
, constB
, constW8
, constW16
, constW32
, constW64
, constI8
, constI16
, constI32
, constI64
, constF
, constD
) where
import Copilot.Core (Typed)
import Copilot.Language.Stream
import Data.Word
import Data.Int
constant :: Typed a => a -> Stream a
constant = Const
| Create a constant stream carrying values of type ' ' that is equal to
constB :: Bool -> Stream Bool
constB = constant
constW8 :: Word8 -> Stream Word8
constW8 = constant
| Create a constant stream carrying values of type ' Word16 ' that is equal to
constW16 :: Word16 -> Stream Word16
constW16 = constant
| Create a constant stream carrying values of type ' ' that is equal to
constW32 :: Word32 -> Stream Word32
constW32 = constant
constW64 :: Word64 -> Stream Word64
constW64 = constant
constI8 :: Int8 -> Stream Int8
constI8 = constant
constI16 :: Int16 -> Stream Int16
constI16 = constant
| Create a constant stream carrying values of type ' Int32 ' that is equal to
constI32 :: Int32 -> Stream Int32
constI32 = constant
| Create a constant stream carrying values of type ' Int64 ' that is equal to
constI64 :: Int64 -> Stream Int64
constI64 = constant
constF :: Float -> Stream Float
constF = constant
constD :: Double -> Stream Double
constD = constant
|
6850584cd8578191d5d782f0ef5e0fceb29a2e3aa69a1cf67429ebcd0d461426 | tarides/dune-release | test_github.ml | let test_ssh_uri_from_http =
let check inp expected =
let test_name = "Parse.ssh_uri_from_http " ^ inp in
let result = Dune_release.Github.Parse.ssh_uri_from_http inp in
let test_fun () = Alcotest.(check (option string)) inp expected result in
(test_name, `Quick, test_fun)
in
[
(* Use cases *)
check "-release"
(Some ":tarides/dune-release");
check ":tarides/dune-release"
(Some ":tarides/dune-release");
(* This function only works for github https urls, returns its input
otherwise *)
check "-github.com/dune-release" None;
check ":dune-release" None;
check "git" (Some ":user/repo.git");
check "git+" None;
]
let suite = ("Github", test_ssh_uri_from_http)
| null | https://raw.githubusercontent.com/tarides/dune-release/f170ad510978ff0b8adf03965fc7a5df172d39ff/tests/lib/test_github.ml | ocaml | Use cases
This function only works for github https urls, returns its input
otherwise | let test_ssh_uri_from_http =
let check inp expected =
let test_name = "Parse.ssh_uri_from_http " ^ inp in
let result = Dune_release.Github.Parse.ssh_uri_from_http inp in
let test_fun () = Alcotest.(check (option string)) inp expected result in
(test_name, `Quick, test_fun)
in
[
check "-release"
(Some ":tarides/dune-release");
check ":tarides/dune-release"
(Some ":tarides/dune-release");
check "-github.com/dune-release" None;
check ":dune-release" None;
check "git" (Some ":user/repo.git");
check "git+" None;
]
let suite = ("Github", test_ssh_uri_from_http)
|
83fb03e2f6f04fdc5f8acd410914759ab6996ae10a3932964b1626bd8508f5a0 | brownplt/LambdaS5 | topological.mli | (**************************************************************************)
(* *)
: a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
(** Topological order.
This functor provides functions which allow iterating over a graph in
topological order. *)
(** Minimal graph signature to provide.
Sub-signature of {!Sig.G}. *)
module type G = sig
type t
module V : Sig.HASHABLE
val iter_vertex : (V.t -> unit) -> t -> unit
val iter_succ : (V.t -> unit) -> t -> V.t -> unit
val in_degree : t -> V.t -> int
end
(** Functor providing topological iterators over a graph. *)
module Make(G: G) : sig
val fold : (G.V.t -> 'a -> 'a) -> G.t -> 'a -> 'a
(** [fold action g seed] allows iterating over the graph [g]
in topological order. [action node accu] is called repeatedly,
where [node] is the node being visited, and [accu] is the result of
the [action]'s previous invocation, if any, and [seed] otherwise.
If [g] contains cycles, the order is unspecified inside the cycles and
every node in the cycles will be presented exactly once. *)
val iter : (G.V.t -> unit) -> G.t -> unit
(** [iter action] calls [action node] repeatedly. Nodes are (again)
presented to [action] in topological order.
The order is the same as for [fold]. *)
end
| null | https://raw.githubusercontent.com/brownplt/LambdaS5/f0bf5c7baf1daa4ead4e398ba7d430bedb7de9cf/src/ocamlgraph-1.8.1/src/topological.mli | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************
* Topological order.
This functor provides functions which allow iterating over a graph in
topological order.
* Minimal graph signature to provide.
Sub-signature of {!Sig.G}.
* Functor providing topological iterators over a graph.
* [fold action g seed] allows iterating over the graph [g]
in topological order. [action node accu] is called repeatedly,
where [node] is the node being visited, and [accu] is the result of
the [action]'s previous invocation, if any, and [seed] otherwise.
If [g] contains cycles, the order is unspecified inside the cycles and
every node in the cycles will be presented exactly once.
* [iter action] calls [action node] repeatedly. Nodes are (again)
presented to [action] in topological order.
The order is the same as for [fold]. | : a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
module type G = sig
type t
module V : Sig.HASHABLE
val iter_vertex : (V.t -> unit) -> t -> unit
val iter_succ : (V.t -> unit) -> t -> V.t -> unit
val in_degree : t -> V.t -> int
end
module Make(G: G) : sig
val fold : (G.V.t -> 'a -> 'a) -> G.t -> 'a -> 'a
val iter : (G.V.t -> unit) -> G.t -> unit
end
|
60415a14c11b3ac4150f8621c0f79f8e9df55eea1477a7521c2d663cb4496ec5 | uwplse/oddity | macros.cljc | (ns oddity.macros)
(defmacro write-and-read-result [to-chan val from-chan]
`(do
(cljs.core.async/>! ~to-chan ~val)
(cljs.core.async/<! ~from-chan)))
| null | https://raw.githubusercontent.com/uwplse/oddity/81c1a6af203a0d8e71138a27655e3c4003357127/oddity/src/cljc/oddity/macros.cljc | clojure | (ns oddity.macros)
(defmacro write-and-read-result [to-chan val from-chan]
`(do
(cljs.core.async/>! ~to-chan ~val)
(cljs.core.async/<! ~from-chan)))
| |
701a6ff26df614aeefcd99bcbe2d7d812e696677676c21aefc3f6d22101bfc5c | tadeuzagallo/verve-lang | Constraint.hs | module Typing.Constraint
( inferTyArgs
, inferTyAbs
) where
import Typing.Env
import Typing.Substitution
import Typing.Subtyping
import Typing.TypeError
import Typing.Types
import Typing.Variance
import Util.Error
import Control.Monad (zipWithM)
import Data.List (groupBy, intersect, sortBy, union)
import Data.Maybe (fromJust)
-- Inference of type arguments for generic functions
inferTyArgs :: [Type] -> Type -> Tc (Type, [Type])
inferTyArgs args (Fun gen params retType) = do
let vars = map fst gen
let initialCs = map (flip (Constraint Bot) Top) vars
cs <- zipWithM (constraintGen [] vars) args params
let cs' = initialCs `meet` foldl meet [] cs
substs <- mapM (getSubst False retType) cs'
let typeArgs = map (fromJust . flip lookup substs) vars
return (applySubst (mkSubsts substs) retType, typeArgs)
inferTyArgs _ _ = undefined
inferTyAbs :: Type -> Type -> Tc [Type]
inferTyAbs (Fun gen params ret) expected = do
let vars = map fst gen
let ty = (Fun [] params ret)
let initialCs = map (flip (Constraint Bot) Top) vars
cs <- constraintGen [] vars ty expected
let cs' = initialCs `meet` cs
substs <- mapM (getSubst True ty) cs'
return $ map (fromJust . flip lookup substs) vars
inferTyAbs _ _ = undefined
-- Constraint Solving
data Constraint
= Constraint Type Var Type
deriving (Eq, Show)
constraintGen :: [Var] -> [Var] -> Type -> Type -> Tc [Constraint]
CG - Top
constraintGen _ _ _ Top = return []
CG - Bot
constraintGen _ _ Bot _ = return []
CG - Upper
constraintGen v x (Var y _) s | y `elem` x && fv s `intersect` x == [] =
let t = v \\ s
in return [Constraint Bot y t]
CG - Lower
constraintGen v x s (Var y _) | y `elem` x && fv s `intersect` x == [] =
let t = v // s
in return [Constraint t y Top]
CG - Refl
constraintGen _v _x t1 t2 | t1 <: t2 = return []
CG - Fun
constraintGen v x (Fun y r s) (Fun y' t u)
| y == y' && map fst y `intersect` (v `union` x) == [] = do
c <- zipWithM (constraintGen (v `union` map fst y) x) t r
d <- constraintGen (v `union` map fst y) x s u
return $ foldl meet [] c `meet` d
constraintGen v x (TyApp t11 t12) (TyApp t21 t22) = do
cTy <- constraintGen v x t11 t21
cArgs <- zipWithM (constraintGen v x) t12 t22
return $ foldl meet [] cArgs `meet` cTy
constraintGen v x (Rec f1) (Rec f2) | map fst f2 `intersect` map fst f1 == map fst f2 = do
cs <- mapM aux f2
return $ foldl meet [] cs
where
aux (key, v2) =
constraintGen v x (fromJust $ lookup key f1) v2
constraintGen v x (Fun gen@(_:_) params ret) f2@(Fun [] _ _) = do
constraintGen v x (map fst gen \\ Fun [] params ret) f2
constraintGen _v _x actual expected =
throwError $ TypeError expected actual
The meet of two X / V - constraints C and D , written C /\ D , is defined as follows :
meet :: [Constraint] -> [Constraint] -> [Constraint]
meet c [] = c
meet [] d = d
meet c d =
map merge cs
where
cs = groupBy prj sorted
sorted = sortBy (\(Constraint _ t _) (Constraint _ u _) -> compare t u) (c `union` d)
prj (Constraint _ t _) (Constraint _ u _) = t == u
merge [] = undefined
merge (c:cs) = foldl mergeC c cs
mergeC (Constraint s x t) (Constraint u _ v) =
Constraint (s \/ u) x (t /\ v)
getSubst :: Bool -> Type -> Constraint -> Tc (Var, Type)
getSubst flip r (Constraint s x t) = do
let v = variance x r
let v' = if flip
then invertVariance v
else v
case v' of
Bivariant -> return (x, s)
Covariant -> return (x, s)
Contravariant -> return (x, t)
Invariant | s == t -> return (x, s)
_ -> throwError InferenceFailure
| null | https://raw.githubusercontent.com/tadeuzagallo/verve-lang/c7db1f5d4bb399b6c2623dd2444a981b5aba1aa4/src/Typing/Constraint.hs | haskell | Inference of type arguments for generic functions
Constraint Solving | module Typing.Constraint
( inferTyArgs
, inferTyAbs
) where
import Typing.Env
import Typing.Substitution
import Typing.Subtyping
import Typing.TypeError
import Typing.Types
import Typing.Variance
import Util.Error
import Control.Monad (zipWithM)
import Data.List (groupBy, intersect, sortBy, union)
import Data.Maybe (fromJust)
inferTyArgs :: [Type] -> Type -> Tc (Type, [Type])
inferTyArgs args (Fun gen params retType) = do
let vars = map fst gen
let initialCs = map (flip (Constraint Bot) Top) vars
cs <- zipWithM (constraintGen [] vars) args params
let cs' = initialCs `meet` foldl meet [] cs
substs <- mapM (getSubst False retType) cs'
let typeArgs = map (fromJust . flip lookup substs) vars
return (applySubst (mkSubsts substs) retType, typeArgs)
inferTyArgs _ _ = undefined
inferTyAbs :: Type -> Type -> Tc [Type]
inferTyAbs (Fun gen params ret) expected = do
let vars = map fst gen
let ty = (Fun [] params ret)
let initialCs = map (flip (Constraint Bot) Top) vars
cs <- constraintGen [] vars ty expected
let cs' = initialCs `meet` cs
substs <- mapM (getSubst True ty) cs'
return $ map (fromJust . flip lookup substs) vars
inferTyAbs _ _ = undefined
data Constraint
= Constraint Type Var Type
deriving (Eq, Show)
constraintGen :: [Var] -> [Var] -> Type -> Type -> Tc [Constraint]
CG - Top
constraintGen _ _ _ Top = return []
CG - Bot
constraintGen _ _ Bot _ = return []
CG - Upper
constraintGen v x (Var y _) s | y `elem` x && fv s `intersect` x == [] =
let t = v \\ s
in return [Constraint Bot y t]
CG - Lower
constraintGen v x s (Var y _) | y `elem` x && fv s `intersect` x == [] =
let t = v // s
in return [Constraint t y Top]
CG - Refl
constraintGen _v _x t1 t2 | t1 <: t2 = return []
CG - Fun
constraintGen v x (Fun y r s) (Fun y' t u)
| y == y' && map fst y `intersect` (v `union` x) == [] = do
c <- zipWithM (constraintGen (v `union` map fst y) x) t r
d <- constraintGen (v `union` map fst y) x s u
return $ foldl meet [] c `meet` d
constraintGen v x (TyApp t11 t12) (TyApp t21 t22) = do
cTy <- constraintGen v x t11 t21
cArgs <- zipWithM (constraintGen v x) t12 t22
return $ foldl meet [] cArgs `meet` cTy
constraintGen v x (Rec f1) (Rec f2) | map fst f2 `intersect` map fst f1 == map fst f2 = do
cs <- mapM aux f2
return $ foldl meet [] cs
where
aux (key, v2) =
constraintGen v x (fromJust $ lookup key f1) v2
constraintGen v x (Fun gen@(_:_) params ret) f2@(Fun [] _ _) = do
constraintGen v x (map fst gen \\ Fun [] params ret) f2
constraintGen _v _x actual expected =
throwError $ TypeError expected actual
The meet of two X / V - constraints C and D , written C /\ D , is defined as follows :
meet :: [Constraint] -> [Constraint] -> [Constraint]
meet c [] = c
meet [] d = d
meet c d =
map merge cs
where
cs = groupBy prj sorted
sorted = sortBy (\(Constraint _ t _) (Constraint _ u _) -> compare t u) (c `union` d)
prj (Constraint _ t _) (Constraint _ u _) = t == u
merge [] = undefined
merge (c:cs) = foldl mergeC c cs
mergeC (Constraint s x t) (Constraint u _ v) =
Constraint (s \/ u) x (t /\ v)
getSubst :: Bool -> Type -> Constraint -> Tc (Var, Type)
getSubst flip r (Constraint s x t) = do
let v = variance x r
let v' = if flip
then invertVariance v
else v
case v' of
Bivariant -> return (x, s)
Covariant -> return (x, s)
Contravariant -> return (x, t)
Invariant | s == t -> return (x, s)
_ -> throwError InferenceFailure
|
fb899ec79602b11cda4e7c4d75e6c903b97f46ea550574cafeb91e7ed796de09 | donut/OCamURL-server | api_server.ml |
open Lwt.Infix
module Conf = Lib.Config
module DB = Lib.DB
module Gql = Graphql_lwt
module Schema = Lib.Schema
let make_schema db_handle (conf:Conf.API.t) =
let reserved = conf.reserved in
Gql.Schema.(Schema.(schema
[
Aliases_qry.field db_handle;
Url_qry.field db_handle;
]
~mutations:[
Add_alias_mut.field ~db_handle ~reserved;
Change_alias_url_mut.field db_handle;
Delete_alias_mut.field db_handle;
Disable_alias_mut.field db_handle;
Enable_alias_mut.field db_handle;
Generate_alias_mut.field
~db_handle ~alphabet:conf.alias_alphabet ~reserved;
Rename_alias_mut.field ~db_handle ~reserved;
]
))
let main (config:Conf.API.t) =
let db = config.database in
let db_connect = DB.make_connect_func
~host:db.host ~user:db.user ~pass:db.pass ~db:db.database () in
let schema = make_schema db_connect config in
Gql.Server.start ~port:config.port ~ctx:(fun () -> ()) schema >>= fun () ->
DB.final_close ();
Lwt.return_unit
let start (config:Conf.API.t) =
Lwt_main.run @@ main config
| null | https://raw.githubusercontent.com/donut/OCamURL-server/87738c1a0bbfc2c848f9f4e2052cacc81a176489/bin/api_server.ml | ocaml |
open Lwt.Infix
module Conf = Lib.Config
module DB = Lib.DB
module Gql = Graphql_lwt
module Schema = Lib.Schema
let make_schema db_handle (conf:Conf.API.t) =
let reserved = conf.reserved in
Gql.Schema.(Schema.(schema
[
Aliases_qry.field db_handle;
Url_qry.field db_handle;
]
~mutations:[
Add_alias_mut.field ~db_handle ~reserved;
Change_alias_url_mut.field db_handle;
Delete_alias_mut.field db_handle;
Disable_alias_mut.field db_handle;
Enable_alias_mut.field db_handle;
Generate_alias_mut.field
~db_handle ~alphabet:conf.alias_alphabet ~reserved;
Rename_alias_mut.field ~db_handle ~reserved;
]
))
let main (config:Conf.API.t) =
let db = config.database in
let db_connect = DB.make_connect_func
~host:db.host ~user:db.user ~pass:db.pass ~db:db.database () in
let schema = make_schema db_connect config in
Gql.Server.start ~port:config.port ~ctx:(fun () -> ()) schema >>= fun () ->
DB.final_close ();
Lwt.return_unit
let start (config:Conf.API.t) =
Lwt_main.run @@ main config
| |
e37c09ee53e7c431501c56a2fd30001cc1bd4e529aaabc7ea888c4fb58d4ef84 | chuan6/tiger-compiler | lisp2forth.clj | (ns forth)
Usage example : ( to - forth ' ( * ( + 1 2 ) 3 4 ) )
e.g. , from ( * ( + 1 2 ) 3 4 ) to ( 4 ( 3 ( 2 1 + ) * ) * )
(defn norm [s]
(if (or (number? s) (= (class s) clojure.lang.Symbol))
s
(if (seq? s)
(if (<= (count s) 3)
(reverse (map norm s))
(let [[op a b & more] s]
(norm (conj more `(~op ~a ~b) op))))
(println "Error: expect input to be a lisp expression, i.e., a list." (type s)))))
(defn to-forth [s]
(flatten (norm s)))
;;Take the current stack, and reduce it with the next symbol.
(defn forth-reducer [stack next-symbol]
(if (contains? #{'+ '- '* '/} next-symbol)
(let [stack' (pop stack)
arg1 (peek stack)
arg2 (peek stack')
unchanged (pop stack')]
(conj unchanged (eval `(~next-symbol ~arg1 ~arg2))))
(conj stack next-symbol)))
;;Return result of a forth calculation expression.
(defn forth-calculator [s]
(let [init-stack ()]
(peek (reduce forth-reducer init-stack s))))
(defn myrand [upper]
plus 1 to avoid divide - by - zero
(defn gen-test-op []
(let [opvec ['+ '- '* '/]
n (count opvec)]
(nth opvec (myrand n))))
(defn gen-test-cell [max]
(assert (>= max 2))
(if (= max 2)
`(~(gen-test-op) ~(myrand 100) ~(myrand 100))
(loop [c max
built `(~(gen-test-op))]
(if (= c 0)
(reverse built)
(recur (- c 1) (conj built (gen-test-cell (- max 1))))))))
(defn gen-test []
Note : 4 here is large enough
clojure (eval acase)
forth (forth-calculator (to-forth acase))]
(println clojure "; clojure result")
(println forth "; forth result")
(= clojure forth)))
(defn test [n]
(assert (>= n 0))
(loop [success? true
nmore n
m 0]
(if (= nmore 0)
(do (println n "test cases run;" m "failed.")
success?)
(let [result (gen-test)]
(recur (and result success?)
(dec nmore)
(if result m (inc m)))))))
| null | https://raw.githubusercontent.com/chuan6/tiger-compiler/3fc54f0f04aba2d87bc263d6edc6a8ea446739d5/src/other_exercises/lisp2forth.clj | clojure | Take the current stack, and reduce it with the next symbol.
Return result of a forth calculation expression. | (ns forth)
Usage example : ( to - forth ' ( * ( + 1 2 ) 3 4 ) )
e.g. , from ( * ( + 1 2 ) 3 4 ) to ( 4 ( 3 ( 2 1 + ) * ) * )
(defn norm [s]
(if (or (number? s) (= (class s) clojure.lang.Symbol))
s
(if (seq? s)
(if (<= (count s) 3)
(reverse (map norm s))
(let [[op a b & more] s]
(norm (conj more `(~op ~a ~b) op))))
(println "Error: expect input to be a lisp expression, i.e., a list." (type s)))))
(defn to-forth [s]
(flatten (norm s)))
(defn forth-reducer [stack next-symbol]
(if (contains? #{'+ '- '* '/} next-symbol)
(let [stack' (pop stack)
arg1 (peek stack)
arg2 (peek stack')
unchanged (pop stack')]
(conj unchanged (eval `(~next-symbol ~arg1 ~arg2))))
(conj stack next-symbol)))
(defn forth-calculator [s]
(let [init-stack ()]
(peek (reduce forth-reducer init-stack s))))
(defn myrand [upper]
plus 1 to avoid divide - by - zero
(defn gen-test-op []
(let [opvec ['+ '- '* '/]
n (count opvec)]
(nth opvec (myrand n))))
(defn gen-test-cell [max]
(assert (>= max 2))
(if (= max 2)
`(~(gen-test-op) ~(myrand 100) ~(myrand 100))
(loop [c max
built `(~(gen-test-op))]
(if (= c 0)
(reverse built)
(recur (- c 1) (conj built (gen-test-cell (- max 1))))))))
(defn gen-test []
Note : 4 here is large enough
clojure (eval acase)
forth (forth-calculator (to-forth acase))]
(println clojure "; clojure result")
(println forth "; forth result")
(= clojure forth)))
(defn test [n]
(assert (>= n 0))
(loop [success? true
nmore n
m 0]
(if (= nmore 0)
(do (println n "test cases run;" m "failed.")
success?)
(let [result (gen-test)]
(recur (and result success?)
(dec nmore)
(if result m (inc m)))))))
|
59774a48c6200550d40e8b7cc5e9dc5bb09987a90c7b270c90465c6b3ae6c616 | ocamllabs/ocaml-modular-implicits | t060-raise.ml | open Lib;;
raise End_of_file;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 End_of_file
11 MAKEBLOCK1 0
13 RAISE
14 SETGLOBAL T060 - raise
16 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 GETGLOBAL End_of_file
11 MAKEBLOCK1 0
13 RAISE
14 SETGLOBAL T060-raise
16 STOP
**)
| null | https://raw.githubusercontent.com/ocamllabs/ocaml-modular-implicits/92e45da5c8a4c2db8b2cd5be28a5bec2ac2181f1/testsuite/tests/tool-ocaml/t060-raise.ml | ocaml | open Lib;;
raise End_of_file;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 End_of_file
11 MAKEBLOCK1 0
13 RAISE
14 SETGLOBAL T060 - raise
16 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 GETGLOBAL End_of_file
11 MAKEBLOCK1 0
13 RAISE
14 SETGLOBAL T060-raise
16 STOP
**)
| |
50cc8a720b4b1ba85987f840e4771a4123063cc11355948911b81a136703ff34 | rtoy/ansi-cl-tests | dpb.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Thu Sep 11 20:43:54 2003
Contains : Tests of
(in-package :cl-test)
;;; Error tests
(deftest dpb.error.1
(signals-error (dpb) program-error)
t)
(deftest dpb.error.2
(signals-error (dpb 1) program-error)
t)
(deftest dpb.error.3
(signals-error (dpb 1 (byte 1 0)) program-error)
t)
(deftest dpb.error.4
(signals-error (dpb 1 (byte 1 0) 0 nil) program-error)
t)
;;; Non-error tests
(deftest dpb.1
(loop for pos = (random 32)
for size = (random 32)
for newbyte = (random (ash 1 (+ pos size)))
for val = (random (1+ (random (ash 1 (+ pos size)))))
for result = (dpb newbyte (byte size pos) val)
repeat 100
unless
(loop for i from 0 to (+ pos size)
always (if (or (< i pos)
(>= i (+ pos size)))
(if (logbitp i val) (logbitp i result)
(not (logbitp i result)))
(if (logbitp (- i pos) newbyte) (logbitp i result)
(not (logbitp i result)))))
collect (list pos size newbyte val result))
nil)
(deftest dpb.2
(loop for pos = (random 1000)
for size = (random 1000)
for newbyte = (random (ash 1 (+ pos size)))
for val = (random (1+ (random (ash 1 (+ pos size)))))
for result = (dpb newbyte (byte size pos) val)
repeat 100
unless
(loop for i from 0 to (+ pos size)
always (if (or (< i pos)
(>= i (+ pos size)))
(if (logbitp i val) (logbitp i result)
(not (logbitp i result)))
(if (logbitp (- i pos) newbyte) (logbitp i result)
(not (logbitp i result)))))
collect (list pos size newbyte val result))
nil)
(deftest dpb.3
(loop for x = (random-fixnum)
for y = (random-fixnum)
for pos = (random 32)
repeat 100
always (= (dpb x (byte 0 pos) y) y))
t)
(deftest dpb.4
(let ((bound (ash 1 200)))
(loop for x = (random-from-interval bound)
for y = (random-from-interval bound)
for pos = (random 200)
repeat 100
always (= (dpb x (byte 0 pos) y) y)))
t)
(deftest dpb.5
(loop for i of-type fixnum from -1000 to 1000
always (eql (dpb -1 (byte 0 0) i) i))
t)
| null | https://raw.githubusercontent.com/rtoy/ansi-cl-tests/9708f3977220c46def29f43bb237e97d62033c1d/dpb.lsp | lisp | -*- Mode: Lisp -*-
Error tests
Non-error tests | Author :
Created : Thu Sep 11 20:43:54 2003
Contains : Tests of
(in-package :cl-test)
(deftest dpb.error.1
(signals-error (dpb) program-error)
t)
(deftest dpb.error.2
(signals-error (dpb 1) program-error)
t)
(deftest dpb.error.3
(signals-error (dpb 1 (byte 1 0)) program-error)
t)
(deftest dpb.error.4
(signals-error (dpb 1 (byte 1 0) 0 nil) program-error)
t)
(deftest dpb.1
(loop for pos = (random 32)
for size = (random 32)
for newbyte = (random (ash 1 (+ pos size)))
for val = (random (1+ (random (ash 1 (+ pos size)))))
for result = (dpb newbyte (byte size pos) val)
repeat 100
unless
(loop for i from 0 to (+ pos size)
always (if (or (< i pos)
(>= i (+ pos size)))
(if (logbitp i val) (logbitp i result)
(not (logbitp i result)))
(if (logbitp (- i pos) newbyte) (logbitp i result)
(not (logbitp i result)))))
collect (list pos size newbyte val result))
nil)
(deftest dpb.2
(loop for pos = (random 1000)
for size = (random 1000)
for newbyte = (random (ash 1 (+ pos size)))
for val = (random (1+ (random (ash 1 (+ pos size)))))
for result = (dpb newbyte (byte size pos) val)
repeat 100
unless
(loop for i from 0 to (+ pos size)
always (if (or (< i pos)
(>= i (+ pos size)))
(if (logbitp i val) (logbitp i result)
(not (logbitp i result)))
(if (logbitp (- i pos) newbyte) (logbitp i result)
(not (logbitp i result)))))
collect (list pos size newbyte val result))
nil)
(deftest dpb.3
(loop for x = (random-fixnum)
for y = (random-fixnum)
for pos = (random 32)
repeat 100
always (= (dpb x (byte 0 pos) y) y))
t)
(deftest dpb.4
(let ((bound (ash 1 200)))
(loop for x = (random-from-interval bound)
for y = (random-from-interval bound)
for pos = (random 200)
repeat 100
always (= (dpb x (byte 0 pos) y) y)))
t)
(deftest dpb.5
(loop for i of-type fixnum from -1000 to 1000
always (eql (dpb -1 (byte 0 0) i) i))
t)
|
5bc98a040c11851bb071f6af3c47df9c6e6c2335767c66cd508add48f06259ae | vydd/sketch | controllers.lisp | ;;;; controllers.lisp
(in-package #:sketch)
;;; ____ ___ _ _ _____ ____ ___ _ _ _____ ____ ____
/ _ _ _ / _ \| \ | | _ _ | _ \ / _ \| | | | | _ _ _ _ | _ \/ _ _ _ |
;;; | | | | | | \| | | | | |_) | | | | | | | | _| | |_) \___ \
| |__| |_| | |\ | | | | _ < | |_| | |___| |___| |___| _ < _ _ _ ) |
\____\___/|_| \_| |_| |_| \_\\___/|_____|_____|_____|_| \_\____/
;;; Mouse
(defmethod kit.sdl2:mousemotion-event :after ((instance sketch)
timestamp button-mask x y xrel yrel)
(out :mouse (cons x y)
:mouse-x x
:mouse-y y
:mouse-rel (cons xrel yrel)
:mouse-xrel xrel
:mouse-yrel yrel))
(defmethod kit.sdl2:mousewheel-event :after ((instance sketch)
timestamp x y)
(out :mouse-wheel (cons x y)
:mouse-wheel-x x
:mouse-wheel-y y))
(defmethod kit.sdl2:mousebutton-event :after ((instance sketch)
state timestamp button x y)
(with-slots (%env) instance
(when (env-red-screen %env)
(setf (env-debug-key-pressed %env) t))))
;;; Keyboard
(defmethod keyboard-event :after ((instance sketch)
state timestamp repeatp keysym))
| null | https://raw.githubusercontent.com/vydd/sketch/1a2e4bba865d5083889c68f8755ef6c0e2a7d5a2/src/controllers.lisp | lisp | controllers.lisp
____ ___ _ _ _____ ____ ___ _ _ _____ ____ ____
| | | | | | \| | | | | |_) | | | | | | | | _| | |_) \___ \
Mouse
Keyboard |
(in-package #:sketch)
/ _ _ _ / _ \| \ | | _ _ | _ \ / _ \| | | | | _ _ _ _ | _ \/ _ _ _ |
| |__| |_| | |\ | | | | _ < | |_| | |___| |___| |___| _ < _ _ _ ) |
\____\___/|_| \_| |_| |_| \_\\___/|_____|_____|_____|_| \_\____/
(defmethod kit.sdl2:mousemotion-event :after ((instance sketch)
timestamp button-mask x y xrel yrel)
(out :mouse (cons x y)
:mouse-x x
:mouse-y y
:mouse-rel (cons xrel yrel)
:mouse-xrel xrel
:mouse-yrel yrel))
(defmethod kit.sdl2:mousewheel-event :after ((instance sketch)
timestamp x y)
(out :mouse-wheel (cons x y)
:mouse-wheel-x x
:mouse-wheel-y y))
(defmethod kit.sdl2:mousebutton-event :after ((instance sketch)
state timestamp button x y)
(with-slots (%env) instance
(when (env-red-screen %env)
(setf (env-debug-key-pressed %env) t))))
(defmethod keyboard-event :after ((instance sketch)
state timestamp repeatp keysym))
|
3939e019905a5107f3caecf2abe779e20e80a0a695c7846e291fbbae211fe4f2 | haroldcarr/learn-haskell-coq-ml-etc | E5RepeatLineIntoFile.hs | #!/usr/bin/env stack
-- stack --resolver lts-8.12 script
{-# LANGUAGE OverloadedStrings #-}
# OPTIONS_GHC -fno - warn - missing - signatures #
# OPTIONS_GHC -fno - warn - type - defaults #
module E5RepeatLineIntoFile where
import Control.Monad (replicateM_)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BSC8
import Data.Monoid ((<>))
import System.IO
e5 = main
main :: IO ()
main = do
let nameOut = "/tmp/JUNK/E5RepeatLineIntoFile"
line = BSC8.pack (['A' .. 'Z'] <> ['\n'])
withBinaryFile nameOut WriteMode $ \hOut ->
replicateM_ 1000 $ B.hPutStr hOut line
| null | https://raw.githubusercontent.com/haroldcarr/learn-haskell-coq-ml-etc/b4e83ec7c7af730de688b7376497b9f49dc24a0e/haskell/course/2017-05-snoyman-applied-haskell-at-lambdaconf/E5RepeatLineIntoFile.hs | haskell | stack --resolver lts-8.12 script
# LANGUAGE OverloadedStrings # | #!/usr/bin/env stack
# OPTIONS_GHC -fno - warn - missing - signatures #
# OPTIONS_GHC -fno - warn - type - defaults #
module E5RepeatLineIntoFile where
import Control.Monad (replicateM_)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BSC8
import Data.Monoid ((<>))
import System.IO
e5 = main
main :: IO ()
main = do
let nameOut = "/tmp/JUNK/E5RepeatLineIntoFile"
line = BSC8.pack (['A' .. 'Z'] <> ['\n'])
withBinaryFile nameOut WriteMode $ \hOut ->
replicateM_ 1000 $ B.hPutStr hOut line
|
8bb1e91f8725e04507b55da2fa8c6287b2850534545a07a630231b7860039c1f | coq/coq | context.ml | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
Created by out of names.ml as part of the
rebuilding of Coq around a purely functional abstract type - checker ,
Aug 1999
rebuilding of Coq around a purely functional abstract type-checker,
Aug 1999 *)
Miscellaneous extensions , restructurations and bug - fixes by Hugo
Herbelin and
Herbelin and Bruno Barras *)
(* This file defines types and combinators regarding indexes-based and
names-based contexts *)
* The modules defined below represent a { e local context }
as defined by Chapter 4 in the Reference Manual :
A { e local context } is an ordered list of of { e local declarations }
of names that we call { e variables } .
A { e local declaration } of some variable can be either :
- a { e local assumption } , or
- a { e local definition } .
as defined by Chapter 4 in the Reference Manual:
A {e local context} is an ordered list of of {e local declarations}
of names that we call {e variables}.
A {e local declaration} of some variable can be either:
- a {e local assumption}, or
- a {e local definition}.
*)
open Util
open Names
type 'a binder_annot = { binder_name : 'a; binder_relevance : Sorts.relevance }
let eq_annot eq {binder_name=na1;binder_relevance=r1} {binder_name=na2;binder_relevance=r2} =
eq na1 na2 && Sorts.relevance_equal r1 r2
let hash_annot h {binder_name=n;binder_relevance=r} =
Hashset.Combine.combinesmall (Sorts.relevance_hash r) (h n)
let map_annot f {binder_name=na;binder_relevance} =
{binder_name=f na;binder_relevance}
let make_annot x r = {binder_name=x;binder_relevance=r}
let binder_name x = x.binder_name
let binder_relevance x = x.binder_relevance
let annotR x = make_annot x Sorts.Relevant
let nameR x = annotR (Name x)
let anonR = annotR Anonymous
* Representation of contexts that can capture anonymous as well as non - anonymous variables .
Individual declarations are then designated by indexes .
Individual declarations are then designated by de Bruijn indexes. *)
module Rel =
struct
(** Representation of {e local declarations}. *)
module Declaration =
struct
(* local declaration *)
type ('constr, 'types) pt =
| LocalAssum of Name.t binder_annot * 'types (** name, type *)
| LocalDef of Name.t binder_annot * 'constr * 'types (** name, value, type *)
let get_annot = function
| LocalAssum (na,_) | LocalDef (na,_,_) -> na
(** Return the name bound by a given declaration. *)
let get_name x = (get_annot x).binder_name
(** Return [Some value] for local-declarations and [None] for local-assumptions. *)
let get_value = function
| LocalAssum _ -> None
| LocalDef (_,v,_) -> Some v
(** Return the type of the name bound by a given declaration. *)
let get_type = function
| LocalAssum (_,ty)
| LocalDef (_,_,ty) -> ty
let get_relevance x = (get_annot x).binder_relevance
(** Set the name that is bound by a given declaration. *)
let set_name na = function
| LocalAssum (x,ty) -> LocalAssum ({x with binder_name=na}, ty)
| LocalDef (x,v,ty) -> LocalDef ({x with binder_name=na}, v, ty)
(** Set the type of the bound variable in a given declaration. *)
let set_type ty = function
| LocalAssum (na,_) -> LocalAssum (na, ty)
| LocalDef (na,v,_) -> LocalDef (na, v, ty)
(** Return [true] iff a given declaration is a local assumption. *)
let is_local_assum = function
| LocalAssum _ -> true
| LocalDef _ -> false
(** Return [true] iff a given declaration is a local definition. *)
let is_local_def = function
| LocalAssum _ -> false
| LocalDef _ -> true
(** Check whether any term in a given declaration satisfies a given predicate. *)
let exists f = function
| LocalAssum (_, ty) -> f ty
| LocalDef (_, v, ty) -> f v || f ty
(** Check whether all terms in a given declaration satisfy a given predicate. *)
let for_all f = function
| LocalAssum (_, ty) -> f ty
| LocalDef (_, v, ty) -> f v && f ty
* Check whether the two given declarations are equal .
let equal eq decl1 decl2 =
match decl1, decl2 with
| LocalAssum (n1,ty1), LocalAssum (n2, ty2) ->
eq_annot Name.equal n1 n2 && eq ty1 ty2
| LocalDef (n1,v1,ty1), LocalDef (n2,v2,ty2) ->
eq_annot Name.equal n1 n2 && eq v1 v2 && eq ty1 ty2
| _ ->
false
(** Map the name bound by a given declaration. *)
let map_name f x =
let na = get_name x in
let na' = f na in
if na == na' then x else set_name na' x
(** For local assumptions, this function returns the original local assumptions.
For local definitions, this function maps the value in the local definition. *)
let map_value f = function
| LocalAssum _ as decl -> decl
| LocalDef (na, v, t) as decl ->
let v' = f v in
if v == v' then decl else LocalDef (na, v', t)
(** Map the type of the name bound by a given declaration. *)
let map_type f = function
| LocalAssum (na, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalAssum (na, ty')
| LocalDef (na, v, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalDef (na, v, ty')
(** Map all terms in a given declaration. *)
let map_constr f = function
| LocalAssum (na, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalAssum (na, ty')
| LocalDef (na, v, ty) as decl ->
let v' = f v in
let ty' = f ty in
if v == v' && ty == ty' then decl else LocalDef (na, v', ty')
let map_constr_het f = function
| LocalAssum (na, ty) ->
let ty' = f ty in
LocalAssum (na, ty')
| LocalDef (na, v, ty) ->
let v' = f v in
let ty' = f ty in
LocalDef (na, v', ty')
(** Perform a given action on all terms in a given declaration. *)
let iter_constr f = function
| LocalAssum (_,ty) -> f ty
| LocalDef (_,v,ty) -> f v; f ty
(** Reduce all terms in a given declaration to a single value. *)
let fold_constr f decl acc =
match decl with
| LocalAssum (_n,ty) -> f ty acc
| LocalDef (_n,v,ty) -> f ty (f v acc)
let to_tuple = function
| LocalAssum (na, ty) -> na, None, ty
| LocalDef (na, v, ty) -> na, Some v, ty
let drop_body = function
| LocalAssum _ as d -> d
| LocalDef (na, _v, ty) -> LocalAssum (na, ty)
end
(** Rel-context is represented as a list of declarations.
Inner-most declarations are at the beginning of the list.
Outer-most declarations are at the end of the list. *)
type ('constr, 'types) pt = ('constr, 'types) Declaration.pt list
(** empty rel-context *)
let empty = []
(** Return a new rel-context enriched by with a given inner-most declaration. *)
let add d ctx = d :: ctx
(** Return the number of {e local declarations} in a given rel-context. *)
let length = List.length
(** Return the number of {e local assumptions} in a given rel-context. *)
let nhyps ctx =
let open Declaration in
let rec nhyps acc = function
| [] -> acc
| LocalAssum _ :: hyps -> nhyps (succ acc) hyps
| LocalDef _ :: hyps -> nhyps acc hyps
in
nhyps 0 ctx
(** Return a declaration designated by a given de Bruijn index.
@raise Not_found if the designated de Bruijn index is not present in the designated rel-context. *)
let rec lookup n ctx =
match n, ctx with
| 1, decl :: _ -> decl
| n, _ :: sign -> lookup (n-1) sign
| _, [] -> raise Not_found
* Check whether given two rel - contexts are equal .
let equal eq l = List.equal (fun c -> Declaration.equal eq c) l
(** Map all terms in a given rel-context. *)
let map f = List.Smart.map (Declaration.map_constr f)
(** Map all terms in a given rel-context. *)
let map_with_binders f ctx =
let rec aux k = function
| decl :: ctx as l ->
let decl' = Declaration.map_constr (f k) decl in
let ctx' = aux (k-1) ctx in
if decl == decl' && ctx == ctx' then l else decl' :: ctx'
| [] -> []
in
aux (length ctx) ctx
(** Perform a given action on every declaration in a given rel-context. *)
let iter f = List.iter (Declaration.iter_constr f)
* Reduce all terms in a given rel - context to a single value .
Innermost declarations are processed first .
Innermost declarations are processed first. *)
let fold_inside f ~init = List.fold_left f init
* Reduce all terms in a given rel - context to a single value .
Outermost declarations are processed first .
Outermost declarations are processed first. *)
let fold_outside f l ~init = List.fold_right f l init
(** Return the set of all named variables bound in a given rel-context. *)
let to_vars l =
List.fold_left (fun accu decl ->
match Declaration.get_name decl with
| Name id -> Id.Set.add id accu
| Anonymous -> accu)
Id.Set.empty l
(** Map a given rel-context to a list where each {e local assumption} is mapped to [true]
and each {e local definition} is mapped to [false]. *)
let to_tags l =
let rec aux l = function
| [] -> l
| Declaration.LocalDef _ :: ctx -> aux (true::l) ctx
| Declaration.LocalAssum _ :: ctx -> aux (false::l) ctx
in aux [] l
let drop_bodies l = List.Smart.map Declaration.drop_body l
* Split a context so that the second part contains [ n ]
[ LocalAssum ] , keeping all [ LocalDef ] in the middle in the first part
[LocalAssum], keeping all [LocalDef] in the middle in the first part *)
let chop_nhyps n l =
let rec aux l' = function
| (0, l) -> (List.rev l', l)
| (n, (Declaration.LocalDef _ as h) :: l) -> aux (h::l') (n, l)
| (n, (Declaration.LocalAssum _ as h) :: l) -> aux (h::l') (n-1, l)
| (_, []) -> CErrors.anomaly (Pp.str "chop_nhyps: not enough hypotheses.")
in aux [] (n,l)
* [ extended_list n Γ ] builds an instance [ args ] such that [ Γ , Δ ⊢ args : Γ ]
with n = |Δ| and with the { e local definitions } of [ Γ ] skipped in
[ args ] . Example : for [ x : T , y:=c , z : U ] and [ n]=2 , it gives [ Rel 5 , Rel 3 ] .
with n = |Δ| and with the {e local definitions} of [Γ] skipped in
[args]. Example: for [x:T, y:=c, z:U] and [n]=2, it gives [Rel 5, Rel 3]. *)
let to_extended_list mk n l =
let rec reln l p = function
| Declaration.LocalAssum _ :: hyps -> reln (mk (n+p) :: l) (p+1) hyps
| Declaration.LocalDef _ :: hyps -> reln l (p+1) hyps
| [] -> l
in
reln [] 1 l
(** [extended_vect n Γ] does the same, returning instead an array. *)
let to_extended_vect mk n hyps = Array.of_list (to_extended_list mk n hyps)
(** Consistency with terminology in Named *)
let instance = to_extended_vect
let instance_list = to_extended_list
end
(** This module represents contexts that can capture non-anonymous variables.
Individual declarations are then designated by the identifiers they bind. *)
module Named =
struct
(** Representation of {e local declarations}. *)
module Declaration =
struct
(** local declaration *)
type ('constr, 'types) pt =
| LocalAssum of Id.t binder_annot * 'types (** identifier, type *)
| LocalDef of Id.t binder_annot * 'constr * 'types (** identifier, value, type *)
let get_annot = function
| LocalAssum (na,_) | LocalDef (na,_,_) -> na
(** Return the identifier bound by a given declaration. *)
let get_id x = (get_annot x).binder_name
(** Return [Some value] for local-declarations and [None] for local-assumptions. *)
let get_value = function
| LocalAssum _ -> None
| LocalDef (_,v,_) -> Some v
(** Return the type of the name bound by a given declaration. *)
let get_type = function
| LocalAssum (_,ty)
| LocalDef (_,_,ty) -> ty
let get_relevance x = (get_annot x).binder_relevance
(** Set the identifier that is bound by a given declaration. *)
let set_id id =
let set x = {x with binder_name = id} in
function
| LocalAssum (x,ty) -> LocalAssum (set x, ty)
| LocalDef (x, v, ty) -> LocalDef (set x, v, ty)
(** Set the type of the bound variable in a given declaration. *)
let set_type ty = function
| LocalAssum (id,_) -> LocalAssum (id, ty)
| LocalDef (id,v,_) -> LocalDef (id, v, ty)
(** Return [true] iff a given declaration is a local assumption. *)
let is_local_assum = function
| LocalAssum _ -> true
| LocalDef _ -> false
(** Return [true] iff a given declaration is a local definition. *)
let is_local_def = function
| LocalDef _ -> true
| LocalAssum _ -> false
(** Check whether any term in a given declaration satisfies a given predicate. *)
let exists f = function
| LocalAssum (_, ty) -> f ty
| LocalDef (_, v, ty) -> f v || f ty
(** Check whether all terms in a given declaration satisfy a given predicate. *)
let for_all f = function
| LocalAssum (_, ty) -> f ty
| LocalDef (_, v, ty) -> f v && f ty
* Check whether the two given declarations are equal .
let equal eq decl1 decl2 =
match decl1, decl2 with
| LocalAssum (id1, ty1), LocalAssum (id2, ty2) ->
eq_annot Id.equal id1 id2 && eq ty1 ty2
| LocalDef (id1, v1, ty1), LocalDef (id2, v2, ty2) ->
eq_annot Id.equal id1 id2 && eq v1 v2 && eq ty1 ty2
| _ ->
false
(** Map the identifier bound by a given declaration. *)
let map_id f x =
let id = get_id x in
let id' = f id in
if id == id' then x else set_id id' x
(** For local assumptions, this function returns the original local assumptions.
For local definitions, this function maps the value in the local definition. *)
let map_value f = function
| LocalAssum _ as decl -> decl
| LocalDef (na, v, t) as decl ->
let v' = f v in
if v == v' then decl else LocalDef (na, v', t)
(** Map the type of the name bound by a given declaration. *)
let map_type f = function
| LocalAssum (id, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalAssum (id, ty')
| LocalDef (id, v, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalDef (id, v, ty')
(** Map all terms in a given declaration. *)
let map_constr f = function
| LocalAssum (id, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalAssum (id, ty')
| LocalDef (id, v, ty) as decl ->
let v' = f v in
let ty' = f ty in
if v == v' && ty == ty' then decl else LocalDef (id, v', ty')
let map_constr_het f = function
| LocalAssum (id, ty) ->
let ty' = f ty in
LocalAssum (id, ty')
| LocalDef (id, v, ty) ->
let v' = f v in
let ty' = f ty in
LocalDef (id, v', ty')
(** Perform a given action on all terms in a given declaration. *)
let iter_constr f = function
| LocalAssum (_, ty) -> f ty
| LocalDef (_, v, ty) -> f v; f ty
(** Reduce all terms in a given declaration to a single value. *)
let fold_constr f decl a =
match decl with
| LocalAssum (_, ty) -> f ty a
| LocalDef (_, v, ty) -> a |> f v |> f ty
let to_tuple = function
| LocalAssum (id, ty) -> id, None, ty
| LocalDef (id, v, ty) -> id, Some v, ty
let of_tuple = function
| id, None, ty -> LocalAssum (id, ty)
| id, Some v, ty -> LocalDef (id, v, ty)
let drop_body = function
| LocalAssum _ as d -> d
| LocalDef (id, _v, ty) -> LocalAssum (id, ty)
let of_rel_decl f = function
| Rel.Declaration.LocalAssum (na,t) ->
LocalAssum (map_annot f na, t)
| Rel.Declaration.LocalDef (na,v,t) ->
LocalDef (map_annot f na, v, t)
let to_rel_decl =
let name x = {binder_name=Name x.binder_name;binder_relevance=x.binder_relevance} in
function
| LocalAssum (id,t) ->
Rel.Declaration.LocalAssum (name id, t)
| LocalDef (id,v,t) ->
Rel.Declaration.LocalDef (name id,v,t)
end
(** Named-context is represented as a list of declarations.
Inner-most declarations are at the beginning of the list.
Outer-most declarations are at the end of the list. *)
type ('constr, 'types) pt = ('constr, 'types) Declaration.pt list
(** empty named-context *)
let empty = []
(** Return a new named-context enriched by with a given inner-most declaration. *)
let add d ctx = d :: ctx
(** Return the number of {e local declarations} in a given named-context. *)
let length = List.length
(** Return a declaration designated by a given identifier
@raise Not_found if the designated identifier is not present in the designated named-context. *)
let rec lookup id = function
| decl :: _ when Id.equal id (Declaration.get_id decl) -> decl
| _ :: sign -> lookup id sign
| [] -> raise Not_found
* Check whether given two named - contexts are equal .
let equal eq l = List.equal (fun c -> Declaration.equal eq c) l
(** Map all terms in a given named-context. *)
let map f = List.Smart.map (Declaration.map_constr f)
(** Perform a given action on every declaration in a given named-context. *)
let iter f = List.iter (Declaration.iter_constr f)
* Reduce all terms in a given named - context to a single value .
Innermost declarations are processed first .
Innermost declarations are processed first. *)
let fold_inside f ~init = List.fold_left f init
* Reduce all terms in a given named - context to a single value .
Outermost declarations are processed first .
Outermost declarations are processed first. *)
let fold_outside f l ~init = List.fold_right f l init
(** Return the set of all identifiers bound in a given named-context. *)
let to_vars l =
List.fold_left (fun accu decl -> Id.Set.add (Declaration.get_id decl) accu) Id.Set.empty l
let drop_bodies l = List.Smart.map Declaration.drop_body l
* [ to_instance Ω ] builds an instance [ args ] in reverse order such
that [ Ω ⊢ args : Ω ] where [ Ω ] is a named context and with the local
definitions of [ Ω ] skipped . Example : for [ id1 : T , id2:=c , id3 : U ] , it
gives [ Var id1 , Var id3 ] . All [ idj ] are supposed distinct .
that [Ω ⊢ args:Ω] where [Ω] is a named context and with the local
definitions of [Ω] skipped. Example: for [id1:T,id2:=c,id3:U], it
gives [Var id1, Var id3]. All [idj] are supposed distinct. *)
let to_instance mk l =
let filter = function
| Declaration.LocalAssum (id, _) -> Some (mk id.binder_name)
| _ -> None
in
List.map_filter filter l
* [ instance Ω ] builds an instance [ args ] such
that [ Ω ⊢ args : Ω ] where [ Ω ] is a named context and with the local
definitions of [ Ω ] skipped . Example : for [ id1 : T , id2:=c , id3 : U ] , it
gives [ Var id1 , Var id3 ] . All [ idj ] are supposed distinct .
that [Ω ⊢ args:Ω] where [Ω] is a named context and with the local
definitions of [Ω] skipped. Example: for [id1:T,id2:=c,id3:U], it
gives [Var id1, Var id3]. All [idj] are supposed distinct. *)
let instance_list mk l =
let filter = function
| Declaration.LocalAssum (id, _) -> Some (mk id.binder_name)
| _ -> None
in
List.rev (List.map_filter filter l)
let instance mk l =
Array.of_list (instance_list mk l)
end
module Compacted =
struct
module Declaration =
struct
type ('constr, 'types) pt =
| LocalAssum of Id.t binder_annot list * 'types
| LocalDef of Id.t binder_annot list * 'constr * 'types
let map_constr f = function
| LocalAssum (ids, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalAssum (ids, ty')
| LocalDef (ids, c, ty) as decl ->
let ty' = f ty in
let c' = f c in
if c == c' && ty == ty' then decl else LocalDef (ids,c',ty')
let of_named_decl = function
| Named.Declaration.LocalAssum (id,t) ->
LocalAssum ([id],t)
| Named.Declaration.LocalDef (id,v,t) ->
LocalDef ([id],v,t)
let to_named_context = function
| LocalAssum (ids, t) ->
List.map (fun id -> Named.Declaration.LocalAssum (id,t)) ids
| LocalDef (ids, v, t) ->
List.map (fun id -> Named.Declaration.LocalDef (id,v,t)) ids
end
type ('constr, 'types) pt = ('constr, 'types) Declaration.pt list
let fold f l ~init = List.fold_right f l init
end
| null | https://raw.githubusercontent.com/coq/coq/ee0095d7be7b32f349e295e28964660efb78f491/kernel/context.ml | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
This file defines types and combinators regarding indexes-based and
names-based contexts
* Representation of {e local declarations}.
local declaration
* name, type
* name, value, type
* Return the name bound by a given declaration.
* Return [Some value] for local-declarations and [None] for local-assumptions.
* Return the type of the name bound by a given declaration.
* Set the name that is bound by a given declaration.
* Set the type of the bound variable in a given declaration.
* Return [true] iff a given declaration is a local assumption.
* Return [true] iff a given declaration is a local definition.
* Check whether any term in a given declaration satisfies a given predicate.
* Check whether all terms in a given declaration satisfy a given predicate.
* Map the name bound by a given declaration.
* For local assumptions, this function returns the original local assumptions.
For local definitions, this function maps the value in the local definition.
* Map the type of the name bound by a given declaration.
* Map all terms in a given declaration.
* Perform a given action on all terms in a given declaration.
* Reduce all terms in a given declaration to a single value.
* Rel-context is represented as a list of declarations.
Inner-most declarations are at the beginning of the list.
Outer-most declarations are at the end of the list.
* empty rel-context
* Return a new rel-context enriched by with a given inner-most declaration.
* Return the number of {e local declarations} in a given rel-context.
* Return the number of {e local assumptions} in a given rel-context.
* Return a declaration designated by a given de Bruijn index.
@raise Not_found if the designated de Bruijn index is not present in the designated rel-context.
* Map all terms in a given rel-context.
* Map all terms in a given rel-context.
* Perform a given action on every declaration in a given rel-context.
* Return the set of all named variables bound in a given rel-context.
* Map a given rel-context to a list where each {e local assumption} is mapped to [true]
and each {e local definition} is mapped to [false].
* [extended_vect n Γ] does the same, returning instead an array.
* Consistency with terminology in Named
* This module represents contexts that can capture non-anonymous variables.
Individual declarations are then designated by the identifiers they bind.
* Representation of {e local declarations}.
* local declaration
* identifier, type
* identifier, value, type
* Return the identifier bound by a given declaration.
* Return [Some value] for local-declarations and [None] for local-assumptions.
* Return the type of the name bound by a given declaration.
* Set the identifier that is bound by a given declaration.
* Set the type of the bound variable in a given declaration.
* Return [true] iff a given declaration is a local assumption.
* Return [true] iff a given declaration is a local definition.
* Check whether any term in a given declaration satisfies a given predicate.
* Check whether all terms in a given declaration satisfy a given predicate.
* Map the identifier bound by a given declaration.
* For local assumptions, this function returns the original local assumptions.
For local definitions, this function maps the value in the local definition.
* Map the type of the name bound by a given declaration.
* Map all terms in a given declaration.
* Perform a given action on all terms in a given declaration.
* Reduce all terms in a given declaration to a single value.
* Named-context is represented as a list of declarations.
Inner-most declarations are at the beginning of the list.
Outer-most declarations are at the end of the list.
* empty named-context
* Return a new named-context enriched by with a given inner-most declaration.
* Return the number of {e local declarations} in a given named-context.
* Return a declaration designated by a given identifier
@raise Not_found if the designated identifier is not present in the designated named-context.
* Map all terms in a given named-context.
* Perform a given action on every declaration in a given named-context.
* Return the set of all identifiers bound in a given named-context. | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
Created by out of names.ml as part of the
rebuilding of Coq around a purely functional abstract type - checker ,
Aug 1999
rebuilding of Coq around a purely functional abstract type-checker,
Aug 1999 *)
Miscellaneous extensions , restructurations and bug - fixes by Hugo
Herbelin and
Herbelin and Bruno Barras *)
* The modules defined below represent a { e local context }
as defined by Chapter 4 in the Reference Manual :
A { e local context } is an ordered list of of { e local declarations }
of names that we call { e variables } .
A { e local declaration } of some variable can be either :
- a { e local assumption } , or
- a { e local definition } .
as defined by Chapter 4 in the Reference Manual:
A {e local context} is an ordered list of of {e local declarations}
of names that we call {e variables}.
A {e local declaration} of some variable can be either:
- a {e local assumption}, or
- a {e local definition}.
*)
open Util
open Names
type 'a binder_annot = { binder_name : 'a; binder_relevance : Sorts.relevance }
let eq_annot eq {binder_name=na1;binder_relevance=r1} {binder_name=na2;binder_relevance=r2} =
eq na1 na2 && Sorts.relevance_equal r1 r2
let hash_annot h {binder_name=n;binder_relevance=r} =
Hashset.Combine.combinesmall (Sorts.relevance_hash r) (h n)
let map_annot f {binder_name=na;binder_relevance} =
{binder_name=f na;binder_relevance}
let make_annot x r = {binder_name=x;binder_relevance=r}
let binder_name x = x.binder_name
let binder_relevance x = x.binder_relevance
let annotR x = make_annot x Sorts.Relevant
let nameR x = annotR (Name x)
let anonR = annotR Anonymous
* Representation of contexts that can capture anonymous as well as non - anonymous variables .
Individual declarations are then designated by indexes .
Individual declarations are then designated by de Bruijn indexes. *)
module Rel =
struct
module Declaration =
struct
type ('constr, 'types) pt =
let get_annot = function
| LocalAssum (na,_) | LocalDef (na,_,_) -> na
let get_name x = (get_annot x).binder_name
let get_value = function
| LocalAssum _ -> None
| LocalDef (_,v,_) -> Some v
let get_type = function
| LocalAssum (_,ty)
| LocalDef (_,_,ty) -> ty
let get_relevance x = (get_annot x).binder_relevance
let set_name na = function
| LocalAssum (x,ty) -> LocalAssum ({x with binder_name=na}, ty)
| LocalDef (x,v,ty) -> LocalDef ({x with binder_name=na}, v, ty)
let set_type ty = function
| LocalAssum (na,_) -> LocalAssum (na, ty)
| LocalDef (na,v,_) -> LocalDef (na, v, ty)
let is_local_assum = function
| LocalAssum _ -> true
| LocalDef _ -> false
let is_local_def = function
| LocalAssum _ -> false
| LocalDef _ -> true
let exists f = function
| LocalAssum (_, ty) -> f ty
| LocalDef (_, v, ty) -> f v || f ty
let for_all f = function
| LocalAssum (_, ty) -> f ty
| LocalDef (_, v, ty) -> f v && f ty
* Check whether the two given declarations are equal .
let equal eq decl1 decl2 =
match decl1, decl2 with
| LocalAssum (n1,ty1), LocalAssum (n2, ty2) ->
eq_annot Name.equal n1 n2 && eq ty1 ty2
| LocalDef (n1,v1,ty1), LocalDef (n2,v2,ty2) ->
eq_annot Name.equal n1 n2 && eq v1 v2 && eq ty1 ty2
| _ ->
false
let map_name f x =
let na = get_name x in
let na' = f na in
if na == na' then x else set_name na' x
let map_value f = function
| LocalAssum _ as decl -> decl
| LocalDef (na, v, t) as decl ->
let v' = f v in
if v == v' then decl else LocalDef (na, v', t)
let map_type f = function
| LocalAssum (na, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalAssum (na, ty')
| LocalDef (na, v, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalDef (na, v, ty')
let map_constr f = function
| LocalAssum (na, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalAssum (na, ty')
| LocalDef (na, v, ty) as decl ->
let v' = f v in
let ty' = f ty in
if v == v' && ty == ty' then decl else LocalDef (na, v', ty')
let map_constr_het f = function
| LocalAssum (na, ty) ->
let ty' = f ty in
LocalAssum (na, ty')
| LocalDef (na, v, ty) ->
let v' = f v in
let ty' = f ty in
LocalDef (na, v', ty')
let iter_constr f = function
| LocalAssum (_,ty) -> f ty
| LocalDef (_,v,ty) -> f v; f ty
let fold_constr f decl acc =
match decl with
| LocalAssum (_n,ty) -> f ty acc
| LocalDef (_n,v,ty) -> f ty (f v acc)
let to_tuple = function
| LocalAssum (na, ty) -> na, None, ty
| LocalDef (na, v, ty) -> na, Some v, ty
let drop_body = function
| LocalAssum _ as d -> d
| LocalDef (na, _v, ty) -> LocalAssum (na, ty)
end
type ('constr, 'types) pt = ('constr, 'types) Declaration.pt list
let empty = []
let add d ctx = d :: ctx
let length = List.length
let nhyps ctx =
let open Declaration in
let rec nhyps acc = function
| [] -> acc
| LocalAssum _ :: hyps -> nhyps (succ acc) hyps
| LocalDef _ :: hyps -> nhyps acc hyps
in
nhyps 0 ctx
let rec lookup n ctx =
match n, ctx with
| 1, decl :: _ -> decl
| n, _ :: sign -> lookup (n-1) sign
| _, [] -> raise Not_found
* Check whether given two rel - contexts are equal .
let equal eq l = List.equal (fun c -> Declaration.equal eq c) l
let map f = List.Smart.map (Declaration.map_constr f)
let map_with_binders f ctx =
let rec aux k = function
| decl :: ctx as l ->
let decl' = Declaration.map_constr (f k) decl in
let ctx' = aux (k-1) ctx in
if decl == decl' && ctx == ctx' then l else decl' :: ctx'
| [] -> []
in
aux (length ctx) ctx
let iter f = List.iter (Declaration.iter_constr f)
* Reduce all terms in a given rel - context to a single value .
Innermost declarations are processed first .
Innermost declarations are processed first. *)
let fold_inside f ~init = List.fold_left f init
* Reduce all terms in a given rel - context to a single value .
Outermost declarations are processed first .
Outermost declarations are processed first. *)
let fold_outside f l ~init = List.fold_right f l init
let to_vars l =
List.fold_left (fun accu decl ->
match Declaration.get_name decl with
| Name id -> Id.Set.add id accu
| Anonymous -> accu)
Id.Set.empty l
let to_tags l =
let rec aux l = function
| [] -> l
| Declaration.LocalDef _ :: ctx -> aux (true::l) ctx
| Declaration.LocalAssum _ :: ctx -> aux (false::l) ctx
in aux [] l
let drop_bodies l = List.Smart.map Declaration.drop_body l
* Split a context so that the second part contains [ n ]
[ LocalAssum ] , keeping all [ LocalDef ] in the middle in the first part
[LocalAssum], keeping all [LocalDef] in the middle in the first part *)
let chop_nhyps n l =
let rec aux l' = function
| (0, l) -> (List.rev l', l)
| (n, (Declaration.LocalDef _ as h) :: l) -> aux (h::l') (n, l)
| (n, (Declaration.LocalAssum _ as h) :: l) -> aux (h::l') (n-1, l)
| (_, []) -> CErrors.anomaly (Pp.str "chop_nhyps: not enough hypotheses.")
in aux [] (n,l)
* [ extended_list n Γ ] builds an instance [ args ] such that [ Γ , Δ ⊢ args : Γ ]
with n = |Δ| and with the { e local definitions } of [ Γ ] skipped in
[ args ] . Example : for [ x : T , y:=c , z : U ] and [ n]=2 , it gives [ Rel 5 , Rel 3 ] .
with n = |Δ| and with the {e local definitions} of [Γ] skipped in
[args]. Example: for [x:T, y:=c, z:U] and [n]=2, it gives [Rel 5, Rel 3]. *)
let to_extended_list mk n l =
let rec reln l p = function
| Declaration.LocalAssum _ :: hyps -> reln (mk (n+p) :: l) (p+1) hyps
| Declaration.LocalDef _ :: hyps -> reln l (p+1) hyps
| [] -> l
in
reln [] 1 l
let to_extended_vect mk n hyps = Array.of_list (to_extended_list mk n hyps)
let instance = to_extended_vect
let instance_list = to_extended_list
end
module Named =
struct
module Declaration =
struct
type ('constr, 'types) pt =
let get_annot = function
| LocalAssum (na,_) | LocalDef (na,_,_) -> na
let get_id x = (get_annot x).binder_name
let get_value = function
| LocalAssum _ -> None
| LocalDef (_,v,_) -> Some v
let get_type = function
| LocalAssum (_,ty)
| LocalDef (_,_,ty) -> ty
let get_relevance x = (get_annot x).binder_relevance
let set_id id =
let set x = {x with binder_name = id} in
function
| LocalAssum (x,ty) -> LocalAssum (set x, ty)
| LocalDef (x, v, ty) -> LocalDef (set x, v, ty)
let set_type ty = function
| LocalAssum (id,_) -> LocalAssum (id, ty)
| LocalDef (id,v,_) -> LocalDef (id, v, ty)
let is_local_assum = function
| LocalAssum _ -> true
| LocalDef _ -> false
let is_local_def = function
| LocalDef _ -> true
| LocalAssum _ -> false
let exists f = function
| LocalAssum (_, ty) -> f ty
| LocalDef (_, v, ty) -> f v || f ty
let for_all f = function
| LocalAssum (_, ty) -> f ty
| LocalDef (_, v, ty) -> f v && f ty
* Check whether the two given declarations are equal .
let equal eq decl1 decl2 =
match decl1, decl2 with
| LocalAssum (id1, ty1), LocalAssum (id2, ty2) ->
eq_annot Id.equal id1 id2 && eq ty1 ty2
| LocalDef (id1, v1, ty1), LocalDef (id2, v2, ty2) ->
eq_annot Id.equal id1 id2 && eq v1 v2 && eq ty1 ty2
| _ ->
false
let map_id f x =
let id = get_id x in
let id' = f id in
if id == id' then x else set_id id' x
let map_value f = function
| LocalAssum _ as decl -> decl
| LocalDef (na, v, t) as decl ->
let v' = f v in
if v == v' then decl else LocalDef (na, v', t)
let map_type f = function
| LocalAssum (id, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalAssum (id, ty')
| LocalDef (id, v, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalDef (id, v, ty')
let map_constr f = function
| LocalAssum (id, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalAssum (id, ty')
| LocalDef (id, v, ty) as decl ->
let v' = f v in
let ty' = f ty in
if v == v' && ty == ty' then decl else LocalDef (id, v', ty')
let map_constr_het f = function
| LocalAssum (id, ty) ->
let ty' = f ty in
LocalAssum (id, ty')
| LocalDef (id, v, ty) ->
let v' = f v in
let ty' = f ty in
LocalDef (id, v', ty')
let iter_constr f = function
| LocalAssum (_, ty) -> f ty
| LocalDef (_, v, ty) -> f v; f ty
let fold_constr f decl a =
match decl with
| LocalAssum (_, ty) -> f ty a
| LocalDef (_, v, ty) -> a |> f v |> f ty
let to_tuple = function
| LocalAssum (id, ty) -> id, None, ty
| LocalDef (id, v, ty) -> id, Some v, ty
let of_tuple = function
| id, None, ty -> LocalAssum (id, ty)
| id, Some v, ty -> LocalDef (id, v, ty)
let drop_body = function
| LocalAssum _ as d -> d
| LocalDef (id, _v, ty) -> LocalAssum (id, ty)
let of_rel_decl f = function
| Rel.Declaration.LocalAssum (na,t) ->
LocalAssum (map_annot f na, t)
| Rel.Declaration.LocalDef (na,v,t) ->
LocalDef (map_annot f na, v, t)
let to_rel_decl =
let name x = {binder_name=Name x.binder_name;binder_relevance=x.binder_relevance} in
function
| LocalAssum (id,t) ->
Rel.Declaration.LocalAssum (name id, t)
| LocalDef (id,v,t) ->
Rel.Declaration.LocalDef (name id,v,t)
end
type ('constr, 'types) pt = ('constr, 'types) Declaration.pt list
let empty = []
let add d ctx = d :: ctx
let length = List.length
let rec lookup id = function
| decl :: _ when Id.equal id (Declaration.get_id decl) -> decl
| _ :: sign -> lookup id sign
| [] -> raise Not_found
* Check whether given two named - contexts are equal .
let equal eq l = List.equal (fun c -> Declaration.equal eq c) l
let map f = List.Smart.map (Declaration.map_constr f)
let iter f = List.iter (Declaration.iter_constr f)
* Reduce all terms in a given named - context to a single value .
Innermost declarations are processed first .
Innermost declarations are processed first. *)
let fold_inside f ~init = List.fold_left f init
* Reduce all terms in a given named - context to a single value .
Outermost declarations are processed first .
Outermost declarations are processed first. *)
let fold_outside f l ~init = List.fold_right f l init
let to_vars l =
List.fold_left (fun accu decl -> Id.Set.add (Declaration.get_id decl) accu) Id.Set.empty l
let drop_bodies l = List.Smart.map Declaration.drop_body l
* [ to_instance Ω ] builds an instance [ args ] in reverse order such
that [ Ω ⊢ args : Ω ] where [ Ω ] is a named context and with the local
definitions of [ Ω ] skipped . Example : for [ id1 : T , id2:=c , id3 : U ] , it
gives [ Var id1 , Var id3 ] . All [ idj ] are supposed distinct .
that [Ω ⊢ args:Ω] where [Ω] is a named context and with the local
definitions of [Ω] skipped. Example: for [id1:T,id2:=c,id3:U], it
gives [Var id1, Var id3]. All [idj] are supposed distinct. *)
let to_instance mk l =
let filter = function
| Declaration.LocalAssum (id, _) -> Some (mk id.binder_name)
| _ -> None
in
List.map_filter filter l
* [ instance Ω ] builds an instance [ args ] such
that [ Ω ⊢ args : Ω ] where [ Ω ] is a named context and with the local
definitions of [ Ω ] skipped . Example : for [ id1 : T , id2:=c , id3 : U ] , it
gives [ Var id1 , Var id3 ] . All [ idj ] are supposed distinct .
that [Ω ⊢ args:Ω] where [Ω] is a named context and with the local
definitions of [Ω] skipped. Example: for [id1:T,id2:=c,id3:U], it
gives [Var id1, Var id3]. All [idj] are supposed distinct. *)
let instance_list mk l =
let filter = function
| Declaration.LocalAssum (id, _) -> Some (mk id.binder_name)
| _ -> None
in
List.rev (List.map_filter filter l)
let instance mk l =
Array.of_list (instance_list mk l)
end
module Compacted =
struct
module Declaration =
struct
type ('constr, 'types) pt =
| LocalAssum of Id.t binder_annot list * 'types
| LocalDef of Id.t binder_annot list * 'constr * 'types
let map_constr f = function
| LocalAssum (ids, ty) as decl ->
let ty' = f ty in
if ty == ty' then decl else LocalAssum (ids, ty')
| LocalDef (ids, c, ty) as decl ->
let ty' = f ty in
let c' = f c in
if c == c' && ty == ty' then decl else LocalDef (ids,c',ty')
let of_named_decl = function
| Named.Declaration.LocalAssum (id,t) ->
LocalAssum ([id],t)
| Named.Declaration.LocalDef (id,v,t) ->
LocalDef ([id],v,t)
let to_named_context = function
| LocalAssum (ids, t) ->
List.map (fun id -> Named.Declaration.LocalAssum (id,t)) ids
| LocalDef (ids, v, t) ->
List.map (fun id -> Named.Declaration.LocalDef (id,v,t)) ids
end
type ('constr, 'types) pt = ('constr, 'types) Declaration.pt list
let fold f l ~init = List.fold_right f l init
end
|
f81eb96f647a9bfbb8061b5f21f0d8b7a7c75c4fa266b85e7419aa611bbf2615 | solbloch/stumpwm-configs | keymap.lisp | (in-package :stumpwm)
this is the best thing VIMIFY :-)
(define-key *input-map* (kbd "C-w") 'input-backward-kill-word)
;; top map stuff
(define-key *top-map* (kbd "s-v") "hsplit")
(define-key *top-map* (kbd "s-s") "vsplit")
(define-key *top-map* (kbd "s-r") "remove")
(define-key *top-map* (kbd "s-q") "delete")
(define-key *top-map* (kbd "s-Q") "killandremove")
(define-key *top-map* (kbd "s-TAB") "pull-hidden-next")
( define - key * top - map * ( " s - p " ) " pull - hidden - previous " )
( define - key * top - map * ( " s - n " ) " pull - hidden - next " )
(define-key *top-map* (kbd "s-RET") "run-shell-command st")
(define-key *top-map* (kbd "s-e") "emoji-picker")
(define-key *top-map* (kbd "s-G") "greek-menu")
(define-key *top-map* (kbd "s-F") "fullscreen")
(define-key *top-map* (kbd "s-D") "deal")
(define-key *top-map* (kbd "s-C") "random-card-and-number")
(define-key *top-map* (kbd "s-b") "open-book")
(define-key *top-map* (kbd "s-c") "clipboard")
(define-key *top-map* (kbd "s-`") "gselect 6")
(define-key *top-map* (kbd "s-1") "gselect 1")
(define-key *top-map* (kbd "s-2") "gselect 2")
(define-key *top-map* (kbd "s-3") "gselect 3")
(define-key *top-map* (kbd "s-4") "gselect 4")
(define-key *top-map* (kbd "s-5") "gselect 5")
(define-key *top-map* (kbd "s-.") "gnext")
(define-key *top-map* (kbd "s-,") "gprev")
For Planck style keyboard .
(define-key *top-map* (kbd "M-q") "gselect 1")
(define-key *top-map* (kbd "M-w") "gselect 2")
(define-key *top-map* (kbd "M-e") "gselect 3")
(define-key *top-map* (kbd "M-r") "gselect 4")
(define-key *top-map* (kbd "M-t") "gselect 5")
(define-key *top-map* (kbd "M-y") "gselect 6")
(define-key *top-map* (kbd "M-y") "gselect 6")
(define-key *top-map* (kbd "M-TAB") "pull-hidden-next")
(define-key *top-map* (kbd "s-w") '*helpful-things*)
(define-key *top-map* (kbd "s-S") '*solb-commands*)
(define-key *top-map* (kbd "s-x") '*vpn-commands*)
(define-key *top-map* (kbd "s-A") '*looks-feels*)
(define-key *top-map* (kbd "s-g") '*group-bindings*)
(define-key *top-map* (kbd "s-f") '*frame-bindings*)
(define-key *top-map* (kbd "s-a") '*application-bindings*)
(define-key *top-map* (kbd "s-t") '*terminal-bindings*)
;; Applications ;;
(defvar *application-bindings*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "e") "emacs-fixed")
(define-key m (kbd "f") "melee")
(define-key m (kbd "u") "run-shell-command st")
(define-key m (kbd "s") "spotify")
(define-key m (kbd "c") "run-shell-command google-chrome-stable --disable-features=SendMouseLeaveEvents")
(define-key m (kbd "r") "ripcord")
(define-key m (kbd "d") "run-shell-command discord")
(define-key m (kbd "D") "darktable")
(define-key m (kbd "t") "telegram")
(define-key m (kbd "v") "run-shell-command pavucontrol")
(define-key m (kbd "w") "mpv-video0")
(define-key m (kbd "T") "teams")
m))
Group Configuration ; ;
(defvar *group-bindings*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "h") "gprev")
(define-key m (kbd "l") "gnext")
(define-key m (kbd "L") "gnext-with-window")
(define-key m (kbd "H") "gprev-with-window")
(define-key m (kbd "m") "gmove")
(define-key m (kbd "w") "grouplist")
(define-key m (kbd "n") "gnew")
(define-key m (kbd "q") "gkill")
(define-key m (kbd "r") "grename")
m))
;; Frame Configuration
(defvar *frame-bindings*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "r") "iresize")
(define-key m (kbd "w") "all-windowlist")
(define-key m (kbd "p") "pull-all-windowlist")
(define-key m (kbd "R") "title")
(define-key m (kbd "b") "balance-frames")
(define-key m (kbd "m") "mode-line")
(define-key m (kbd "f") "fullscreen")
(define-key m (kbd "g") "push-to-group-menu")
(define-key m (kbd "s") "scratch-windowlist")
(define-key m (kbd "F") "only")
(define-key m (kbd "t") "telegram-split")
(define-key m (kbd "T") "telegram-split t")
m))
Looks Feels
(defvar *looks-feels*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "y") "yoshis")
(define-key m (kbd "b") "bf")
(define-key m (kbd "F") "fod")
(define-key m (kbd "f") "fd")
(define-key m (kbd "d") "dl")
(define-key m (kbd "j") "jupiter")
m))
;; Helpful Things
(defvar *helpful-things*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "g") "toggle-gaps")
(define-key m (kbd "s") "second-screen")
(define-key m (kbd "r") '*redshift-levels*)
(define-key m (kbd "f") "fix-discord")
(define-key m (kbd "a") "audio-switch")
(define-key m (kbd "k") "keyboard-toggle")
m))
;; Redshift Configs
(defvar *redshift-levels*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "1") "redshift-temp 5500")
(define-key m (kbd "2") "redshift-temp 4500")
(define-key m (kbd "3") "redshift-temp 3500")
(define-key m (kbd "4") "redshift-temp 2500")
(define-key m (kbd "0") "exec redshift -x")
m))
;; VPN
(defvar *vpn-commands*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "c") "connect-vpn-menu")
(define-key m (kbd "k") "kill-vpn-menu")
m))
;; solb
(defvar *solb-commands*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "s") "screenshot-selection-post")
(define-key m (kbd "S") "screenshot-selection-copy")
(define-key m (kbd "f") "screenshot-full-post")
(define-key m (kbd "c") "post-clipboard-text")
(define-key m (kbd "r") "post-clipboard-redirect")
(define-key m (kbd "C-r") "ssh-pull-and-reload")
m))
;; Terminal bindings
(defvar *terminal-bindings*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "s") "run-shell-command st -e ssh ")
m))
;; Volume // Brightness Config
(define-key *top-map* (kbd "XF86Sleep") "sleep-pc")
(define-key *top-map* (kbd "XF86AudioLowerVolume") "volume-down")
(define-key *top-map* (kbd "XF86AudioRaiseVolume") "volume-up")
(define-key *top-map* (kbd "XF86AudioMute") "volume-mute")
(define-key *top-map* (kbd "XF86AudioPlay") "run-shell-command dbus-send --print-reply --dest=org.mpris.MediaPlayer2.spotify /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.PlayPause")
(define-key *top-map* (kbd "XF86AudioPrev") "run-shell-command dbus-send --print-reply --dest=org.mpris.MediaPlayer2.spotify /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Previous")
(define-key *top-map* (kbd "XF86AudioNext") "run-shell-command dbus-send --print-reply --dest=org.mpris.MediaPlayer2.spotify /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Next")
(define-key *top-map* (kbd "XF86MonBrightnessUp") "run-shell-command xbacklight -inc 5")
(define-key *top-map* (kbd "XF86MonBrightnessDown") "run-shell-command xbacklight -dec 5")
| null | https://raw.githubusercontent.com/solbloch/stumpwm-configs/f47d212fd8e4af3bb5997c796b228ab43853dd6d/keymap.lisp | lisp | top map stuff
Applications ;;
;
Frame Configuration
Helpful Things
Redshift Configs
VPN
solb
Terminal bindings
Volume // Brightness Config
| (in-package :stumpwm)
this is the best thing VIMIFY :-)
(define-key *input-map* (kbd "C-w") 'input-backward-kill-word)
(define-key *top-map* (kbd "s-v") "hsplit")
(define-key *top-map* (kbd "s-s") "vsplit")
(define-key *top-map* (kbd "s-r") "remove")
(define-key *top-map* (kbd "s-q") "delete")
(define-key *top-map* (kbd "s-Q") "killandremove")
(define-key *top-map* (kbd "s-TAB") "pull-hidden-next")
( define - key * top - map * ( " s - p " ) " pull - hidden - previous " )
( define - key * top - map * ( " s - n " ) " pull - hidden - next " )
(define-key *top-map* (kbd "s-RET") "run-shell-command st")
(define-key *top-map* (kbd "s-e") "emoji-picker")
(define-key *top-map* (kbd "s-G") "greek-menu")
(define-key *top-map* (kbd "s-F") "fullscreen")
(define-key *top-map* (kbd "s-D") "deal")
(define-key *top-map* (kbd "s-C") "random-card-and-number")
(define-key *top-map* (kbd "s-b") "open-book")
(define-key *top-map* (kbd "s-c") "clipboard")
(define-key *top-map* (kbd "s-`") "gselect 6")
(define-key *top-map* (kbd "s-1") "gselect 1")
(define-key *top-map* (kbd "s-2") "gselect 2")
(define-key *top-map* (kbd "s-3") "gselect 3")
(define-key *top-map* (kbd "s-4") "gselect 4")
(define-key *top-map* (kbd "s-5") "gselect 5")
(define-key *top-map* (kbd "s-.") "gnext")
(define-key *top-map* (kbd "s-,") "gprev")
For Planck style keyboard .
(define-key *top-map* (kbd "M-q") "gselect 1")
(define-key *top-map* (kbd "M-w") "gselect 2")
(define-key *top-map* (kbd "M-e") "gselect 3")
(define-key *top-map* (kbd "M-r") "gselect 4")
(define-key *top-map* (kbd "M-t") "gselect 5")
(define-key *top-map* (kbd "M-y") "gselect 6")
(define-key *top-map* (kbd "M-y") "gselect 6")
(define-key *top-map* (kbd "M-TAB") "pull-hidden-next")
(define-key *top-map* (kbd "s-w") '*helpful-things*)
(define-key *top-map* (kbd "s-S") '*solb-commands*)
(define-key *top-map* (kbd "s-x") '*vpn-commands*)
(define-key *top-map* (kbd "s-A") '*looks-feels*)
(define-key *top-map* (kbd "s-g") '*group-bindings*)
(define-key *top-map* (kbd "s-f") '*frame-bindings*)
(define-key *top-map* (kbd "s-a") '*application-bindings*)
(define-key *top-map* (kbd "s-t") '*terminal-bindings*)
(defvar *application-bindings*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "e") "emacs-fixed")
(define-key m (kbd "f") "melee")
(define-key m (kbd "u") "run-shell-command st")
(define-key m (kbd "s") "spotify")
(define-key m (kbd "c") "run-shell-command google-chrome-stable --disable-features=SendMouseLeaveEvents")
(define-key m (kbd "r") "ripcord")
(define-key m (kbd "d") "run-shell-command discord")
(define-key m (kbd "D") "darktable")
(define-key m (kbd "t") "telegram")
(define-key m (kbd "v") "run-shell-command pavucontrol")
(define-key m (kbd "w") "mpv-video0")
(define-key m (kbd "T") "teams")
m))
(defvar *group-bindings*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "h") "gprev")
(define-key m (kbd "l") "gnext")
(define-key m (kbd "L") "gnext-with-window")
(define-key m (kbd "H") "gprev-with-window")
(define-key m (kbd "m") "gmove")
(define-key m (kbd "w") "grouplist")
(define-key m (kbd "n") "gnew")
(define-key m (kbd "q") "gkill")
(define-key m (kbd "r") "grename")
m))
(defvar *frame-bindings*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "r") "iresize")
(define-key m (kbd "w") "all-windowlist")
(define-key m (kbd "p") "pull-all-windowlist")
(define-key m (kbd "R") "title")
(define-key m (kbd "b") "balance-frames")
(define-key m (kbd "m") "mode-line")
(define-key m (kbd "f") "fullscreen")
(define-key m (kbd "g") "push-to-group-menu")
(define-key m (kbd "s") "scratch-windowlist")
(define-key m (kbd "F") "only")
(define-key m (kbd "t") "telegram-split")
(define-key m (kbd "T") "telegram-split t")
m))
Looks Feels
(defvar *looks-feels*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "y") "yoshis")
(define-key m (kbd "b") "bf")
(define-key m (kbd "F") "fod")
(define-key m (kbd "f") "fd")
(define-key m (kbd "d") "dl")
(define-key m (kbd "j") "jupiter")
m))
(defvar *helpful-things*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "g") "toggle-gaps")
(define-key m (kbd "s") "second-screen")
(define-key m (kbd "r") '*redshift-levels*)
(define-key m (kbd "f") "fix-discord")
(define-key m (kbd "a") "audio-switch")
(define-key m (kbd "k") "keyboard-toggle")
m))
(defvar *redshift-levels*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "1") "redshift-temp 5500")
(define-key m (kbd "2") "redshift-temp 4500")
(define-key m (kbd "3") "redshift-temp 3500")
(define-key m (kbd "4") "redshift-temp 2500")
(define-key m (kbd "0") "exec redshift -x")
m))
(defvar *vpn-commands*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "c") "connect-vpn-menu")
(define-key m (kbd "k") "kill-vpn-menu")
m))
(defvar *solb-commands*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "s") "screenshot-selection-post")
(define-key m (kbd "S") "screenshot-selection-copy")
(define-key m (kbd "f") "screenshot-full-post")
(define-key m (kbd "c") "post-clipboard-text")
(define-key m (kbd "r") "post-clipboard-redirect")
(define-key m (kbd "C-r") "ssh-pull-and-reload")
m))
(defvar *terminal-bindings*
(let ((m (make-sparse-keymap)))
(define-key m (kbd "s") "run-shell-command st -e ssh ")
m))
(define-key *top-map* (kbd "XF86Sleep") "sleep-pc")
(define-key *top-map* (kbd "XF86AudioLowerVolume") "volume-down")
(define-key *top-map* (kbd "XF86AudioRaiseVolume") "volume-up")
(define-key *top-map* (kbd "XF86AudioMute") "volume-mute")
(define-key *top-map* (kbd "XF86AudioPlay") "run-shell-command dbus-send --print-reply --dest=org.mpris.MediaPlayer2.spotify /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.PlayPause")
(define-key *top-map* (kbd "XF86AudioPrev") "run-shell-command dbus-send --print-reply --dest=org.mpris.MediaPlayer2.spotify /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Previous")
(define-key *top-map* (kbd "XF86AudioNext") "run-shell-command dbus-send --print-reply --dest=org.mpris.MediaPlayer2.spotify /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Next")
(define-key *top-map* (kbd "XF86MonBrightnessUp") "run-shell-command xbacklight -inc 5")
(define-key *top-map* (kbd "XF86MonBrightnessDown") "run-shell-command xbacklight -dec 5")
|
e6de418610283805fea1b22a342df3b68abbaa73d3e5499fbc0be3c6ad7970ab | 8c6794b6/haskell-sc-scratch | LocalInEx.hs | ------------------------------------------------------------------------------
| Example for LocalIn and LocalOut UGens .
--
module Scratch.LocalInEx where
import Sound.OpenSoundControl
import Sound.SC3
import Reusable
import Instances
test1 :: (UId m) => m UGen
test1 = do
n <- whiteNoise ar
let a0 = decay (impulse ar 0.3 0) 0.1 * n * 0.2
a1 = localIn 2 ar + mce [a0,0]
a2 = delayN a1 0.2 0.2
a3 = mceEdit reverse a2 * 0.8
return $ mrg [localOut a3, out 0 a2]
test2 :: (UId m) => m UGen
test2 = return $ mrg [offsetOut 0 p, localOut d]
where p = localIn 1 ar
i = impulse ar 1 0
d = delayC (i+(p*0.995)) 1 (recip 440 - recip controlRate)
test for offsetOut
test3 = audition $ mrg [a,b]
where a = offsetOut 0 (impulse ar 5 0)
b = out 0 (sinOsc ar 60 0 * 0.1)
test4 = audition $ mrg [a,b]
where a = out 0 $ impulse ar 5 0
b = out 0 $ sinOsc ar 60 0 * 0.1
| null | https://raw.githubusercontent.com/8c6794b6/haskell-sc-scratch/22de2199359fa56f256b544609cd6513b5e40f43/Scratch/UGenTweak/LocalInEx.hs | haskell | ----------------------------------------------------------------------------
| | Example for LocalIn and LocalOut UGens .
module Scratch.LocalInEx where
import Sound.OpenSoundControl
import Sound.SC3
import Reusable
import Instances
test1 :: (UId m) => m UGen
test1 = do
n <- whiteNoise ar
let a0 = decay (impulse ar 0.3 0) 0.1 * n * 0.2
a1 = localIn 2 ar + mce [a0,0]
a2 = delayN a1 0.2 0.2
a3 = mceEdit reverse a2 * 0.8
return $ mrg [localOut a3, out 0 a2]
test2 :: (UId m) => m UGen
test2 = return $ mrg [offsetOut 0 p, localOut d]
where p = localIn 1 ar
i = impulse ar 1 0
d = delayC (i+(p*0.995)) 1 (recip 440 - recip controlRate)
test for offsetOut
test3 = audition $ mrg [a,b]
where a = offsetOut 0 (impulse ar 5 0)
b = out 0 (sinOsc ar 60 0 * 0.1)
test4 = audition $ mrg [a,b]
where a = out 0 $ impulse ar 5 0
b = out 0 $ sinOsc ar 60 0 * 0.1
|
97037f90a0bc29f9c425a0d6dc2c79a6ce286c7f14ea21548cee63cc95afaa4d | esl/erlang-web | e_component.erl | #!/usr/bin/env escript
The contents of this file are subject to the Erlang Web Public License ,
Version 1.0 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
Erlang Web Public License along with this software . If not , it can be
%% retrieved via the world wide web at -consulting.com/.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
The Initial Developer of the Original Code is Erlang Training & Consulting
Ltd. Portions created by Erlang Training & Consulting Ltd are Copyright 2009 ,
Erlang Training & Consulting Ltd. All Rights Reserved .
%%%-----------------------------------------------------------------------------
%%% File : e_component.erl
@author < >
@doc Script for downloading and installing the e_components for Erlang Web framework .
%%% @end
%%%-----------------------------------------------------------------------------
main(Args) ->
inets:start(),
action(Args).
action(["list"]) ->
Result = make_request("list.py"),
lists:foreach(fun(R) ->
write_result(R)
end, lists:sort(string:tokens(Result, "\n")));
action(["search", Keyword]) ->
Result = make_request("search.py?string=" ++ Keyword),
lists:foreach(fun(R) ->
write_result(R)
end, lists:sort(string:tokens(Result, "\n")));
action(["details", Name]) ->
Result = make_request("search.py?string=" ++ Name),
lists:foreach(fun(R) ->
write_detailed_result(R)
end, lists:sort(string:tokens(Result, "\n")));
action(["install", Name]) ->
action(["path", "lib/", "install", Name]);
action(["path", Path, "install", Name]) ->
Result = make_request("download.py?name=" ++ Name),
erl_tar:extract({binary, list_to_binary(Result)},
[{cwd, Path},
verbose,
compressed,
keep_old_files]),
modify_emakefile(Path, Name),
io:format("~s e_component installed successfully~n", [Name]);
action(_) ->
io:format("Usage: ~n"
"e_component.erl list - lists all of the components stored in the repository~n"
"e_component.erl search Keyword - searches in the repository for the component having "
"Keyword in its name or description~n"
"e_component.erl details Keyword - prints out the detailed information about the given "
"component~n"
"e_component.erl install Name - downloads and installs the ecomponent into the lib/ "
"folder in the current working directory~n"
"e_component.erl path Path install Name - downloads and installs the ecomponent into "
"the Path directory~n").
write_result(Desc) ->
[Name, Vsn, Description, _Categories, _Author] = string:tokens(Desc, "\t"),
io:format("~s ~s~n", [string:left(Name ++ "-" ++ Vsn, 40, $ ), Description]).
write_detailed_result(Desc) ->
[Name, Vsn, Description, Categories, Author] = string:tokens(Desc, "\t"),
io:format("Name: ~s Version: ~s\tAuthor: ~s~n", [string:left(Name, 30, $ ), Vsn, Author]),
io:format("Description:~n~s~n", [Description]),
io:format("Categories: ~s~n~n", [Categories]).
make_request(Type) ->
case http:request("-web.org/cgi-bin/" ++ Type) of
{ok, {_, _, Result}} ->
Result;
{error, Reason} ->
io:format("Error during making the ~p request, reason: ~p~n",
[Type, Reason]),
[]
end.
modify_emakefile(Path, Name) ->
case file:consult(filename:join([Path, Name, "Emakefile"])) of
{ok, Entries} ->
case file:open(filename:join([Path, "..", "Emakefile"]), [append]) of
{ok, Fd} ->
lists:map(fun(Entry) ->
io:format(Fd, "~p.~n~n", [prepare_entry(filename:join([Path, Name]), Entry)])
end, Entries),
file:close(Fd);
{error, Reason} ->
error_logger:warning_msg("Error during modifying the root Emakefile, reason: ~p~n",
[Reason])
end;
{error, Reason} ->
error_logger:warning_msg("Error during reading the e_component's Emakefile, reason: ~p~n",
[Reason])
end.
prepare_entry(Path, {Src, Opts}) ->
{filename:join([Path, Src]),
lists:map(fun(Opt) ->
prepare_entry_opt(Path, Opt)
end, Opts)}.
prepare_entry_opt(Path, {Type, In}) ->
{Type, filename:join([Path, In])};
prepare_entry_opt(_, Else) ->
Else.
| null | https://raw.githubusercontent.com/esl/erlang-web/2e5c2c9725465fc5b522250c305a9d553b3b8243/bin/e_component.erl | erlang | compliance with the License. You should have received a copy of the
retrieved via the world wide web at -consulting.com/.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-----------------------------------------------------------------------------
File : e_component.erl
@end
----------------------------------------------------------------------------- | #!/usr/bin/env escript
The contents of this file are subject to the Erlang Web Public License ,
Version 1.0 , ( the " License " ) ; you may not use this file except in
Erlang Web Public License along with this software . If not , it can be
Software distributed under the License is distributed on an " AS IS "
The Initial Developer of the Original Code is Erlang Training & Consulting
Ltd. Portions created by Erlang Training & Consulting Ltd are Copyright 2009 ,
Erlang Training & Consulting Ltd. All Rights Reserved .
@author < >
@doc Script for downloading and installing the e_components for Erlang Web framework .
main(Args) ->
inets:start(),
action(Args).
action(["list"]) ->
Result = make_request("list.py"),
lists:foreach(fun(R) ->
write_result(R)
end, lists:sort(string:tokens(Result, "\n")));
action(["search", Keyword]) ->
Result = make_request("search.py?string=" ++ Keyword),
lists:foreach(fun(R) ->
write_result(R)
end, lists:sort(string:tokens(Result, "\n")));
action(["details", Name]) ->
Result = make_request("search.py?string=" ++ Name),
lists:foreach(fun(R) ->
write_detailed_result(R)
end, lists:sort(string:tokens(Result, "\n")));
action(["install", Name]) ->
action(["path", "lib/", "install", Name]);
action(["path", Path, "install", Name]) ->
Result = make_request("download.py?name=" ++ Name),
erl_tar:extract({binary, list_to_binary(Result)},
[{cwd, Path},
verbose,
compressed,
keep_old_files]),
modify_emakefile(Path, Name),
io:format("~s e_component installed successfully~n", [Name]);
action(_) ->
io:format("Usage: ~n"
"e_component.erl list - lists all of the components stored in the repository~n"
"e_component.erl search Keyword - searches in the repository for the component having "
"Keyword in its name or description~n"
"e_component.erl details Keyword - prints out the detailed information about the given "
"component~n"
"e_component.erl install Name - downloads and installs the ecomponent into the lib/ "
"folder in the current working directory~n"
"e_component.erl path Path install Name - downloads and installs the ecomponent into "
"the Path directory~n").
write_result(Desc) ->
[Name, Vsn, Description, _Categories, _Author] = string:tokens(Desc, "\t"),
io:format("~s ~s~n", [string:left(Name ++ "-" ++ Vsn, 40, $ ), Description]).
write_detailed_result(Desc) ->
[Name, Vsn, Description, Categories, Author] = string:tokens(Desc, "\t"),
io:format("Name: ~s Version: ~s\tAuthor: ~s~n", [string:left(Name, 30, $ ), Vsn, Author]),
io:format("Description:~n~s~n", [Description]),
io:format("Categories: ~s~n~n", [Categories]).
make_request(Type) ->
case http:request("-web.org/cgi-bin/" ++ Type) of
{ok, {_, _, Result}} ->
Result;
{error, Reason} ->
io:format("Error during making the ~p request, reason: ~p~n",
[Type, Reason]),
[]
end.
modify_emakefile(Path, Name) ->
case file:consult(filename:join([Path, Name, "Emakefile"])) of
{ok, Entries} ->
case file:open(filename:join([Path, "..", "Emakefile"]), [append]) of
{ok, Fd} ->
lists:map(fun(Entry) ->
io:format(Fd, "~p.~n~n", [prepare_entry(filename:join([Path, Name]), Entry)])
end, Entries),
file:close(Fd);
{error, Reason} ->
error_logger:warning_msg("Error during modifying the root Emakefile, reason: ~p~n",
[Reason])
end;
{error, Reason} ->
error_logger:warning_msg("Error during reading the e_component's Emakefile, reason: ~p~n",
[Reason])
end.
prepare_entry(Path, {Src, Opts}) ->
{filename:join([Path, Src]),
lists:map(fun(Opt) ->
prepare_entry_opt(Path, Opt)
end, Opts)}.
prepare_entry_opt(Path, {Type, In}) ->
{Type, filename:join([Path, In])};
prepare_entry_opt(_, Else) ->
Else.
|
bc017ca3c244c18bd02335436ae524243cf88965bb48c71f565592a388ccc7f7 | clojure-quant/infra-guix | rock-installer.scm | (define-module (awb99 machine rock-installer)
#:use-module (gnu system)
#:use-module (gnu system install)
#:use-module (gnu packages version-control) ;git
;#:use-module (gnu packages vim)
;#:use-module (gnu packages linux)
;#:use-module (gnu packages mtools)
#:use-module (gnu packages package-management)
#:use-module (nongnu packages linux) ; arm kernel
#:export (installation-os-rock64))
(define installation-os-rock64
(operating-system
(inherit installation-os)
(kernel linux-arm64-generic)
;(firmware (list linux-firmware))
; linux firmware brings this error: kernel module not found "ahci"
(firmware %base-firmware)
;; Add some extra packages useful for the installation process
(packages
(append (list git )
(operating-system-packages installation-os)))))
| null | https://raw.githubusercontent.com/clojure-quant/infra-guix/f17021df33f65ad92c9f08eae896790fd7a849b2/modules/awb99/machine/rock-installer.scm | scheme | git
#:use-module (gnu packages vim)
#:use-module (gnu packages linux)
#:use-module (gnu packages mtools)
arm kernel
(firmware (list linux-firmware))
linux firmware brings this error: kernel module not found "ahci"
Add some extra packages useful for the installation process | (define-module (awb99 machine rock-installer)
#:use-module (gnu system)
#:use-module (gnu system install)
#:use-module (gnu packages package-management)
#:export (installation-os-rock64))
(define installation-os-rock64
(operating-system
(inherit installation-os)
(kernel linux-arm64-generic)
(firmware %base-firmware)
(packages
(append (list git )
(operating-system-packages installation-os)))))
|
0fbfefa33476f8e6b8b39e541c7c292e94fdd6143eeae64c13e2dade6d613f38 | racket/plot | bitmap.rkt | #lang typed/racket/base
(require "../bitmap.rkt")
(provide (all-from-out "../bitmap.rkt"))
| null | https://raw.githubusercontent.com/racket/plot/c4126001f2c609e36c3aa12f300e9c673ab1a806/plot-lib/plot/typed/bitmap.rkt | racket | #lang typed/racket/base
(require "../bitmap.rkt")
(provide (all-from-out "../bitmap.rkt"))
| |
10e811852646efa3f00898a6f7734cd15b7f5cbe0e97cd22f2d5c1f577287e26 | marcelgoh/opythn | bytecode.ml | Bytecode representation and compilation
open Printf
module D = DynArray
module H = Hashtbl
exception Bytecode_error of string
alias for a DynArray of Instr.t
type code = Instr.t DynArray.t
(* representation for how deeply nested a variable can be *)
type depth =
Local of int
| Global
| Referred
(* table of names and depths for a given scope *)
type sym_table = (string, depth) H.t
(* print a string and its depth *)
(* usage: H.iter print_entry (table : sym_table) *)
let print_entry str depth =
let dstr =
match depth with
Local i -> sprintf "Local %d" i
| Global -> "Global" | Referred -> "Referred"
in
printf "%s: %s\n" str dstr
(* print bytecode *)
let print_asm = Instr.print_instr_array
(* recursively compile statements *)
let rec compile_stmts in_repl in_class stmts enclosings table =
let is_global = enclosings = [] in
(* where nonlocals are stored before they're actually used *)
let nonlocals = H.create 10 in
(* for variables directly in class definitions *)
let class_vars = H.create 10 in
(* check both non-local and local tables *)
let check_tables_opt str =
match H.find_opt table str with
Some depth -> Some depth
| None ->
(match H.find_opt nonlocals str with
Some d -> Some d
| None -> None)
in
check enclosing scopes for name , return number of levels up
* calling sequence should set count to 1
* calling sequence should set count to 1
*)
let rec search_upwards count tables str : int option =
match tables with
[] -> None
| t::ts -> (match H.find_opt t str with
Some d -> (match d with
Local _ -> Some count
| _ -> None)
| None -> search_upwards (count + 1) ts str)
in
(* resolve a single expr and return unit *)
let rec resolve_expr (expr : Ast.expr) =
match expr with
Var s -> (match check_tables_opt s with
Some _ -> ()
| None -> H.replace table s Referred)
| Call(f ,es) -> resolve_expr f;
List.iter resolve_expr es
| Op(_, e) -> List.iter resolve_expr e
| Cond(e1, c, e2) -> resolve_expr e1;
resolve_expr c;
resolve_expr e2
| AttrRef(obj, _) -> resolve_expr obj
| Subscr(seq, i, slice) ->
(match slice with
Some j -> resolve_expr j
| None -> ()
);
resolve_expr i;
resolve_expr seq;
| ListLit _ | DictLit _ | TupleLit _
| IntLit _ | FloatLit _ | BoolLit _ | StrLit _ | Lambda(_, _) | None -> ()
in
iterate through statements and resolve each one
let rec resolve_stmts (stmts : Ast.stmt list) =
let add_name name =
if in_class then
H.replace class_vars name (Local 0)
else
let depth_to_add = if is_global then Global else (Local 0) in
match check_tables_opt name with
(* the only time a nonlocal shifts over to local table *)
Some d -> H.replace table name d
| None -> H.replace table name depth_to_add
in
match stmts with
[] -> ()
| s::ss ->
(match s with
Expr e -> resolve_expr e;
| Assign(Var s, e) ->
resolve_expr e;
(match H.find_opt table s with
Some Referred ->
if not in_repl then
raise (Bytecode_error (sprintf "Local name `%s` used before assignment" s))
else
()
| _ -> ());
add_name s
| Assign(AttrRef(obj, _), e) ->
resolve_expr e;
(* we don't resolve the identifier *)
resolve_expr obj
| Assign(Subscr(seq, i, slice), e) ->
resolve_expr e;
resolve_expr (Subscr(seq, i, slice))
| Assign(_, _) ->
raise (Bytecode_error "Tried to assign to non-assignable expression")
| If(c, s1, s2) ->
resolve_expr c;
resolve_stmts s1;
(match s2 with
Some ss -> resolve_stmts ss
| None -> ())
| While(c, s) ->
resolve_expr c;
resolve_stmts s
| For(id, seq, s) ->
resolve_expr id;
resolve_expr seq;
resolve_stmts s
| Global s ->
if in_class then
H.replace class_vars s Global
else
(match check_tables_opt s with
None -> H.replace table s Global
| Some _ ->
if is_global then ()
else
raise
(Bytecode_error (sprintf "Name `%s` used before global declaration" s)))
| Nonlocal s ->
if is_global then
raise (Bytecode_error "Nonlocal declared in global scope")
else
if in_class then
(match H.find_opt table s with
Some _ -> H.replace nonlocals s (Local 0)
| None ->
(match search_upwards 1 enclosings s with
Some num -> H.replace nonlocals s (Local num)
| None ->
raise (Bytecode_error (sprintf "No binding for %s found" s))))
else
(match H.find_opt table s with
Some _ ->
raise
(Bytecode_error (sprintf "Name `%s` used before nonlocal declaration" s))
| None ->
(match search_upwards 1 enclosings s with
Some num -> H.replace nonlocals s (Local num)
| None ->
raise (Bytecode_error (sprintf "No binding for %s found" s))))
| Return opt ->
(match opt with
Some e -> resolve_expr e
| None -> ())
| Del e -> resolve_expr e
(* we don't resolve inside function or class declarations *)
| Funcdef(name, _, _) ->
add_name name
| Classdef(name, _, _) ->
add_name name
| Break | Continue | Pass -> ());
resolve_stmts ss
in
the two functions below modify this instruction array
let instrs : Instr.t D.t = D.create () in
(* helper function: search list of list of lambda arguments
* calling sequence should set count to 0
*)
let rec search_lambdas count lambdas id =
match lambdas with
[] -> None
| l::ls -> if List.mem id l then Some count
else search_lambdas (count + 1) ls id
in
(* helper function to get load instruction from identifier *)
let get_load_instr lambdas id : Instr.t =
let search : Instr.t =
(match search_lambdas 0 lambdas id with
Some n -> (LOAD_LOCAL(n, id))
| None ->
(match check_tables_opt id with
Some (Local i) -> (LOAD_LOCAL(i + List.length lambdas, id))
| Some Global -> (LOAD_GLOBAL id)
| Some Referred ->
(match search_upwards 1 enclosings id with
Some n -> (LOAD_LOCAL(n, id))
| None -> (LOAD_GLOBAL id))
| None ->
raise (Bytecode_error (sprintf "Variable `%s`'s scope not resolved: COMPILE_EXPR" id))))
in
if in_class then
(match H.find_opt class_vars id with
Some d ->
(match d with
Global -> (LOAD_GLOBAL id) (* explicit global *)
| Referred | Local _ -> LOAD_NAME id)
| None ->
let instr = search in
(match instr with
LOAD_LOCAL (i, _) ->
LOAD_LOCAL (i, id)
| LOAD_GLOBAL _ ->
LOAD_NAME id
| _ ->
raise (Bytecode_error
(sprintf "Failed to get load instruction for `%s`: VAR" id))))
else
search
in
(* convert an expression to bytecode and add instructions to array *)
let rec compile_expr offset (lambdas : string list list) (e : Ast.expr) : code =
(* local instruction array for compiling expressions *)
let expr_instrs = D.create () in
let compile_and_add_expr expr = D.append (compile_expr offset lambdas expr) expr_instrs in
(* pattern match expression and compile accordingly *)
(match e with
Var id ->
let instr : Instr.t = get_load_instr lambdas id in
D.add expr_instrs instr
| IntLit i -> D.add expr_instrs (LOAD_CONST (Int i))
| FloatLit f -> D.add expr_instrs (LOAD_CONST (Float f))
| BoolLit b -> D.add expr_instrs (LOAD_CONST (Bool b))
| StrLit s -> D.add expr_instrs (LOAD_CONST (Str s))
| Call (f, args) ->
compile_and_add_expr f;
List.iter compile_and_add_expr args;
D.add expr_instrs (CALL_FUNCTION (List.length args))
| Op (And, args) ->
(match args with
e1::e2::_ ->
compile_and_add_expr e1;
let pop_index = D.length expr_instrs in
D.add expr_instrs (JUMP_IF_FALSE_OR_POP (-1)); (* dummy *)
compile_and_add_expr e2;
D.set expr_instrs pop_index (JUMP_IF_FALSE_OR_POP (D.length expr_instrs + offset)) (* backfill *)
| _ -> raise (Bytecode_error "Not enough arguments: AND"))
| Op (Or, args) ->
(match args with
e1::e2::_ ->
compile_and_add_expr e1;
let pop_index = D.length expr_instrs in
D.add expr_instrs (JUMP_IF_TRUE_OR_POP (-1)); (* dummy *)
compile_and_add_expr e2;
D.set expr_instrs pop_index (JUMP_IF_TRUE_OR_POP (D.length expr_instrs + offset)) (* backfill *)
| _ -> raise (Bytecode_error "Not enough arguments: OR"))
| Op (o, args) ->
List.iter compile_and_add_expr args;
(match o with
| Not -> D.add expr_instrs UNARY_NOT
| Is -> D.add expr_instrs COMPARE_IS
| In -> D.add expr_instrs COMPARE_IN
| NotIn -> D.add expr_instrs COMPARE_IN
| IsNot -> D.add expr_instrs COMPARE_IS_NOT
| Plus -> D.add expr_instrs BINARY_ADD
| Minus -> D.add expr_instrs BINARY_SUB
| Times -> D.add expr_instrs BINARY_MULT
| FpDiv -> D.add expr_instrs BINARY_FP_DIV
| IntDiv -> D.add expr_instrs BINARY_INT_DIV
| Mod -> D.add expr_instrs BINARY_MOD
| Exp -> D.add expr_instrs BINARY_EXP
| Eq -> D.add expr_instrs COMPARE_EQ
| Neq -> D.add expr_instrs COMPARE_NEQ
| Lt -> D.add expr_instrs COMPARE_LT
| Gt -> D.add expr_instrs COMPARE_GT
| Leq -> D.add expr_instrs COMPARE_LEQ
| Geq -> D.add expr_instrs COMPARE_GEQ
| BwAnd -> D.add expr_instrs BINARY_BW_AND
| BwOr -> D.add expr_instrs BINARY_BW_OR
| BwComp -> D.add expr_instrs UNARY_BW_COMP
| BwXor -> D.add expr_instrs BINARY_BW_XOR
| LShift -> D.add expr_instrs BINARY_LSHIFT
| RShift -> D.add expr_instrs BINARY_RSHIFT
| Neg -> D.add expr_instrs UNARY_NEG
| _ -> raise (Bytecode_error "Invalid operator encountered."))
| Cond(c,e1,e2) ->
compile_and_add_expr c;
let pop_index = D.length expr_instrs in
dummy 1
compile_and_add_expr e1;
let jump_index = D.length expr_instrs in
dummy 2
backfill 1
compile_and_add_expr e2;
backfill 2
| Lambda(args,b) ->
let new_table = H.create 10 in
List.iter (fun s -> H.replace new_table s (Local 0)) args;
let code_block = compile_expr 0 (args :: lambdas) b in
D.add code_block RETURN_VALUE;
D.add expr_instrs (MAKE_FUNCTION(args, { name = "<lambda>";
ptr = ref code_block }))
| AttrRef(e, id) ->
compile_and_add_expr e;
D.add expr_instrs (LOAD_ATTR id)
| Subscr(seq, i, slice) ->
compile_and_add_expr seq;
compile_and_add_expr i;
(match slice with
Some j ->
compile_and_add_expr j;
D.add expr_instrs SLICESUB
| None -> D.add expr_instrs SUBSCR)
| ListLit elems ->
List.iter (fun e -> compile_and_add_expr e) elems;
D.add expr_instrs (BUILD_LIST (List.length elems))
| TupleLit elems ->
List.iter (fun e -> compile_and_add_expr e) elems;
D.add expr_instrs (BUILD_TUPLE (List.length elems))
| DictLit elems ->
List.iter (fun (k,v) -> compile_and_add_expr k; compile_and_add_expr v) elems;
D.add expr_instrs (BUILD_DICT (List.length elems))
| None -> D.add expr_instrs (LOAD_CONST None)
);
expr_instrs
in
(* convert a statement to bytecode and append instructions to instruction array *)
let rec compile_stmt (in_loop : bool) (s : Ast.stmt) : unit =
let compile_and_add_expr expr = D.append (compile_expr (D.length instrs) [] expr) instrs in
let get_store_instr id : Instr.t =
if in_class then
STORE_NAME id
else
match check_tables_opt id with
Some (Local n) -> (STORE_LOCAL(n, id))
| Some Global
| Some Referred -> (STORE_GLOBAL id)
| None -> raise (Bytecode_error (sprintf "Variable `%s`'s scope not resolved: GET_STORE_INSTR" id))
in
let compile_loop start_idx pop_idx stmts for_var =
if for_var <> "" then D.add instrs (get_store_instr for_var) else ();
List.iter (compile_stmt true) stmts;
D.add instrs (JUMP start_idx); (* goto beginning of loop *)
let end_idx = D.length instrs in
(* backfill dummy address *)
if for_var <> "" then (
D.set instrs pop_idx (FOR_ITER end_idx);
)
else
D.set instrs pop_idx (POP_JUMP_IF_FALSE end_idx);
(* scan over tokens that were added to backfill breaks and continues *)
for i = start_idx to end_idx - 1 do
match D.get instrs i with
JUMP t -> if t = -10 then D.set instrs i (JUMP end_idx) else
if t = -20 then D.set instrs i (JUMP start_idx) else ()
| _ -> ()
done
in
(match s with
Expr e ->
(match e with
Call (_, _) ->
compile_and_add_expr e;
if in_repl && not in_loop then
()
else
D.add instrs POP_TOP
| _ -> compile_and_add_expr e)
| Assign (Var id, e) ->
compile_and_add_expr e;
D.add instrs (get_store_instr id)
| Assign (AttrRef(obj, id), e) ->
compile_and_add_expr e;
compile_and_add_expr obj;
D.add instrs (STORE_ATTR id)
| Assign (Subscr(seq, i, slice), e) ->
compile_and_add_expr e; (* expression to be assigned *)
compile_and_add_expr seq;
compile_and_add_expr i;
(match slice with
Some j ->
compile_and_add_expr j;
pops 4 off stack
pops 3 off stack
| Assign(_, _) ->
raise (Bytecode_error "Tried to assign to non-assignable expression")
| If (c, s1, s2) ->
compile_and_add_expr c;
let pop_index = D.length instrs in
dummy 1
List.iter (compile_stmt in_loop) s1;
(match s2 with
Some ss ->
let jump_index = D.length instrs in
dummy 2
backfill 1
List.iter (compile_stmt in_loop) ss;
backfill 2
| None ->
just backfill 1
| While (c, ss) ->
let start_idx = D.length instrs in
compile_and_add_expr c;
let pop_idx = D.length instrs in
D.add instrs (POP_JUMP_IF_FALSE (-1)); (* dummy *)
compile_loop start_idx pop_idx ss ""
| For (Var id, seq, ss) ->
compile_and_add_expr seq;
D.add instrs BUILD_SEQ;
let start_idx = D.length instrs in
D.add instrs (FOR_ITER (-1)); (* dummy *)
compile_loop start_idx start_idx ss id (* start and pop indices are the same *)
| For _ ->
raise (Bytecode_error "Cannot use provided iteration variable in for loop.")
| Funcdef (name, args, body) ->
let new_table = H.create 10 in
List.iter (fun s -> H.replace new_table s (Local 0)) args;
let code_block : code = compile_stmts in_repl false body (table::enclosings) new_table in
D.add instrs (MAKE_FUNCTION (args, { name = name;
ptr = ref code_block }));
D.add instrs (get_store_instr name)
| Return opt ->
(match opt with
Some e -> compile_and_add_expr e
| None -> D.add instrs (LOAD_CONST None)
);
D.add instrs RETURN_VALUE
| Del (Var id) ->
(match get_load_instr [] (* not in lambda *) id with
LOAD_LOCAL (n, id) -> D.add instrs (DELETE_LOCAL (n, id))
| LOAD_GLOBAL id -> D.add instrs (DELETE_GLOBAL id)
| LOAD_NAME id -> D.add instrs (DELETE_NAME id)
| _ -> raise (Bytecode_error "Could not generate delete instruction"))
| Del (Subscr(seq, i, slice)) ->
compile_and_add_expr seq;
compile_and_add_expr i;
(match slice with
Some j ->
compile_and_add_expr j;
D.add instrs DELETE_SLICESUB
| None -> D.add instrs DELETE_SUBSCR)
| Del (AttrRef(obj, id)) ->
compile_and_add_expr obj;
D.add instrs (DELETE_ATTR id)
| Del _ -> raise (Bytecode_error "Cannot delete object.")
| Break ->
if not in_loop then
raise (Bytecode_error "BREAK statement found outside loop.")
else
D.add instrs (JUMP (-10)) (* -10 indicates break *)
| Continue ->
if not in_loop then
raise (Bytecode_error "CONTINUE statement found outside loop.")
else
D.add instrs (JUMP (-20)) (* -20 indicates continue *)
| Classdef (name, super, body) ->
let code_block = compile_stmts in_repl true body enclosings table in
(* side effect: pushes superclass onto stack if needed *)
let num_supers =
match super with
Some s -> D.add instrs (LOAD_NAME s);
1
| None -> 0
in
D.add instrs (MAKE_CLASS (num_supers,
{ name = name; ptr = ref code_block }));
D.add instrs (get_store_instr name)
| Global _ | Nonlocal _ | Pass -> ()
)
in
(* iterate and compile statements *)
let rec compile_iter stmts =
match stmts with
[] -> ()
| s::ss -> compile_stmt false s;
compile_iter ss
in
(* start routine *)
resolve_stmts stmts; (* fill the hashtable of depths *)
(* H.iter print_entry table; *)
compile_iter stmts; (* compile *)
instrs
(* interface to the rest of the system *)
let compile_prog in_repl (p : Ast.program) : code =
let instrs = compile_stmts in_repl false p [] (H.create 10) in
if not in_repl && D.last instrs <> RETURN_VALUE then (
D.add instrs (LOAD_CONST None);
D.add instrs RETURN_VALUE
)
else ();
instrs
| null | https://raw.githubusercontent.com/marcelgoh/opythn/923e71635c6850a115bb4bf14fa6eb75b8ac852a/src/bytecode.ml | ocaml | representation for how deeply nested a variable can be
table of names and depths for a given scope
print a string and its depth
usage: H.iter print_entry (table : sym_table)
print bytecode
recursively compile statements
where nonlocals are stored before they're actually used
for variables directly in class definitions
check both non-local and local tables
resolve a single expr and return unit
the only time a nonlocal shifts over to local table
we don't resolve the identifier
we don't resolve inside function or class declarations
helper function: search list of list of lambda arguments
* calling sequence should set count to 0
helper function to get load instruction from identifier
explicit global
convert an expression to bytecode and add instructions to array
local instruction array for compiling expressions
pattern match expression and compile accordingly
dummy
backfill
dummy
backfill
convert a statement to bytecode and append instructions to instruction array
goto beginning of loop
backfill dummy address
scan over tokens that were added to backfill breaks and continues
expression to be assigned
dummy
dummy
start and pop indices are the same
not in lambda
-10 indicates break
-20 indicates continue
side effect: pushes superclass onto stack if needed
iterate and compile statements
start routine
fill the hashtable of depths
H.iter print_entry table;
compile
interface to the rest of the system | Bytecode representation and compilation
open Printf
module D = DynArray
module H = Hashtbl
exception Bytecode_error of string
alias for a DynArray of Instr.t
type code = Instr.t DynArray.t
type depth =
Local of int
| Global
| Referred
type sym_table = (string, depth) H.t
let print_entry str depth =
let dstr =
match depth with
Local i -> sprintf "Local %d" i
| Global -> "Global" | Referred -> "Referred"
in
printf "%s: %s\n" str dstr
let print_asm = Instr.print_instr_array
let rec compile_stmts in_repl in_class stmts enclosings table =
let is_global = enclosings = [] in
let nonlocals = H.create 10 in
let class_vars = H.create 10 in
let check_tables_opt str =
match H.find_opt table str with
Some depth -> Some depth
| None ->
(match H.find_opt nonlocals str with
Some d -> Some d
| None -> None)
in
check enclosing scopes for name , return number of levels up
* calling sequence should set count to 1
* calling sequence should set count to 1
*)
let rec search_upwards count tables str : int option =
match tables with
[] -> None
| t::ts -> (match H.find_opt t str with
Some d -> (match d with
Local _ -> Some count
| _ -> None)
| None -> search_upwards (count + 1) ts str)
in
let rec resolve_expr (expr : Ast.expr) =
match expr with
Var s -> (match check_tables_opt s with
Some _ -> ()
| None -> H.replace table s Referred)
| Call(f ,es) -> resolve_expr f;
List.iter resolve_expr es
| Op(_, e) -> List.iter resolve_expr e
| Cond(e1, c, e2) -> resolve_expr e1;
resolve_expr c;
resolve_expr e2
| AttrRef(obj, _) -> resolve_expr obj
| Subscr(seq, i, slice) ->
(match slice with
Some j -> resolve_expr j
| None -> ()
);
resolve_expr i;
resolve_expr seq;
| ListLit _ | DictLit _ | TupleLit _
| IntLit _ | FloatLit _ | BoolLit _ | StrLit _ | Lambda(_, _) | None -> ()
in
iterate through statements and resolve each one
let rec resolve_stmts (stmts : Ast.stmt list) =
let add_name name =
if in_class then
H.replace class_vars name (Local 0)
else
let depth_to_add = if is_global then Global else (Local 0) in
match check_tables_opt name with
Some d -> H.replace table name d
| None -> H.replace table name depth_to_add
in
match stmts with
[] -> ()
| s::ss ->
(match s with
Expr e -> resolve_expr e;
| Assign(Var s, e) ->
resolve_expr e;
(match H.find_opt table s with
Some Referred ->
if not in_repl then
raise (Bytecode_error (sprintf "Local name `%s` used before assignment" s))
else
()
| _ -> ());
add_name s
| Assign(AttrRef(obj, _), e) ->
resolve_expr e;
resolve_expr obj
| Assign(Subscr(seq, i, slice), e) ->
resolve_expr e;
resolve_expr (Subscr(seq, i, slice))
| Assign(_, _) ->
raise (Bytecode_error "Tried to assign to non-assignable expression")
| If(c, s1, s2) ->
resolve_expr c;
resolve_stmts s1;
(match s2 with
Some ss -> resolve_stmts ss
| None -> ())
| While(c, s) ->
resolve_expr c;
resolve_stmts s
| For(id, seq, s) ->
resolve_expr id;
resolve_expr seq;
resolve_stmts s
| Global s ->
if in_class then
H.replace class_vars s Global
else
(match check_tables_opt s with
None -> H.replace table s Global
| Some _ ->
if is_global then ()
else
raise
(Bytecode_error (sprintf "Name `%s` used before global declaration" s)))
| Nonlocal s ->
if is_global then
raise (Bytecode_error "Nonlocal declared in global scope")
else
if in_class then
(match H.find_opt table s with
Some _ -> H.replace nonlocals s (Local 0)
| None ->
(match search_upwards 1 enclosings s with
Some num -> H.replace nonlocals s (Local num)
| None ->
raise (Bytecode_error (sprintf "No binding for %s found" s))))
else
(match H.find_opt table s with
Some _ ->
raise
(Bytecode_error (sprintf "Name `%s` used before nonlocal declaration" s))
| None ->
(match search_upwards 1 enclosings s with
Some num -> H.replace nonlocals s (Local num)
| None ->
raise (Bytecode_error (sprintf "No binding for %s found" s))))
| Return opt ->
(match opt with
Some e -> resolve_expr e
| None -> ())
| Del e -> resolve_expr e
| Funcdef(name, _, _) ->
add_name name
| Classdef(name, _, _) ->
add_name name
| Break | Continue | Pass -> ());
resolve_stmts ss
in
the two functions below modify this instruction array
let instrs : Instr.t D.t = D.create () in
let rec search_lambdas count lambdas id =
match lambdas with
[] -> None
| l::ls -> if List.mem id l then Some count
else search_lambdas (count + 1) ls id
in
let get_load_instr lambdas id : Instr.t =
let search : Instr.t =
(match search_lambdas 0 lambdas id with
Some n -> (LOAD_LOCAL(n, id))
| None ->
(match check_tables_opt id with
Some (Local i) -> (LOAD_LOCAL(i + List.length lambdas, id))
| Some Global -> (LOAD_GLOBAL id)
| Some Referred ->
(match search_upwards 1 enclosings id with
Some n -> (LOAD_LOCAL(n, id))
| None -> (LOAD_GLOBAL id))
| None ->
raise (Bytecode_error (sprintf "Variable `%s`'s scope not resolved: COMPILE_EXPR" id))))
in
if in_class then
(match H.find_opt class_vars id with
Some d ->
(match d with
| Referred | Local _ -> LOAD_NAME id)
| None ->
let instr = search in
(match instr with
LOAD_LOCAL (i, _) ->
LOAD_LOCAL (i, id)
| LOAD_GLOBAL _ ->
LOAD_NAME id
| _ ->
raise (Bytecode_error
(sprintf "Failed to get load instruction for `%s`: VAR" id))))
else
search
in
let rec compile_expr offset (lambdas : string list list) (e : Ast.expr) : code =
let expr_instrs = D.create () in
let compile_and_add_expr expr = D.append (compile_expr offset lambdas expr) expr_instrs in
(match e with
Var id ->
let instr : Instr.t = get_load_instr lambdas id in
D.add expr_instrs instr
| IntLit i -> D.add expr_instrs (LOAD_CONST (Int i))
| FloatLit f -> D.add expr_instrs (LOAD_CONST (Float f))
| BoolLit b -> D.add expr_instrs (LOAD_CONST (Bool b))
| StrLit s -> D.add expr_instrs (LOAD_CONST (Str s))
| Call (f, args) ->
compile_and_add_expr f;
List.iter compile_and_add_expr args;
D.add expr_instrs (CALL_FUNCTION (List.length args))
| Op (And, args) ->
(match args with
e1::e2::_ ->
compile_and_add_expr e1;
let pop_index = D.length expr_instrs in
compile_and_add_expr e2;
| _ -> raise (Bytecode_error "Not enough arguments: AND"))
| Op (Or, args) ->
(match args with
e1::e2::_ ->
compile_and_add_expr e1;
let pop_index = D.length expr_instrs in
compile_and_add_expr e2;
| _ -> raise (Bytecode_error "Not enough arguments: OR"))
| Op (o, args) ->
List.iter compile_and_add_expr args;
(match o with
| Not -> D.add expr_instrs UNARY_NOT
| Is -> D.add expr_instrs COMPARE_IS
| In -> D.add expr_instrs COMPARE_IN
| NotIn -> D.add expr_instrs COMPARE_IN
| IsNot -> D.add expr_instrs COMPARE_IS_NOT
| Plus -> D.add expr_instrs BINARY_ADD
| Minus -> D.add expr_instrs BINARY_SUB
| Times -> D.add expr_instrs BINARY_MULT
| FpDiv -> D.add expr_instrs BINARY_FP_DIV
| IntDiv -> D.add expr_instrs BINARY_INT_DIV
| Mod -> D.add expr_instrs BINARY_MOD
| Exp -> D.add expr_instrs BINARY_EXP
| Eq -> D.add expr_instrs COMPARE_EQ
| Neq -> D.add expr_instrs COMPARE_NEQ
| Lt -> D.add expr_instrs COMPARE_LT
| Gt -> D.add expr_instrs COMPARE_GT
| Leq -> D.add expr_instrs COMPARE_LEQ
| Geq -> D.add expr_instrs COMPARE_GEQ
| BwAnd -> D.add expr_instrs BINARY_BW_AND
| BwOr -> D.add expr_instrs BINARY_BW_OR
| BwComp -> D.add expr_instrs UNARY_BW_COMP
| BwXor -> D.add expr_instrs BINARY_BW_XOR
| LShift -> D.add expr_instrs BINARY_LSHIFT
| RShift -> D.add expr_instrs BINARY_RSHIFT
| Neg -> D.add expr_instrs UNARY_NEG
| _ -> raise (Bytecode_error "Invalid operator encountered."))
| Cond(c,e1,e2) ->
compile_and_add_expr c;
let pop_index = D.length expr_instrs in
dummy 1
compile_and_add_expr e1;
let jump_index = D.length expr_instrs in
dummy 2
backfill 1
compile_and_add_expr e2;
backfill 2
| Lambda(args,b) ->
let new_table = H.create 10 in
List.iter (fun s -> H.replace new_table s (Local 0)) args;
let code_block = compile_expr 0 (args :: lambdas) b in
D.add code_block RETURN_VALUE;
D.add expr_instrs (MAKE_FUNCTION(args, { name = "<lambda>";
ptr = ref code_block }))
| AttrRef(e, id) ->
compile_and_add_expr e;
D.add expr_instrs (LOAD_ATTR id)
| Subscr(seq, i, slice) ->
compile_and_add_expr seq;
compile_and_add_expr i;
(match slice with
Some j ->
compile_and_add_expr j;
D.add expr_instrs SLICESUB
| None -> D.add expr_instrs SUBSCR)
| ListLit elems ->
List.iter (fun e -> compile_and_add_expr e) elems;
D.add expr_instrs (BUILD_LIST (List.length elems))
| TupleLit elems ->
List.iter (fun e -> compile_and_add_expr e) elems;
D.add expr_instrs (BUILD_TUPLE (List.length elems))
| DictLit elems ->
List.iter (fun (k,v) -> compile_and_add_expr k; compile_and_add_expr v) elems;
D.add expr_instrs (BUILD_DICT (List.length elems))
| None -> D.add expr_instrs (LOAD_CONST None)
);
expr_instrs
in
let rec compile_stmt (in_loop : bool) (s : Ast.stmt) : unit =
let compile_and_add_expr expr = D.append (compile_expr (D.length instrs) [] expr) instrs in
let get_store_instr id : Instr.t =
if in_class then
STORE_NAME id
else
match check_tables_opt id with
Some (Local n) -> (STORE_LOCAL(n, id))
| Some Global
| Some Referred -> (STORE_GLOBAL id)
| None -> raise (Bytecode_error (sprintf "Variable `%s`'s scope not resolved: GET_STORE_INSTR" id))
in
let compile_loop start_idx pop_idx stmts for_var =
if for_var <> "" then D.add instrs (get_store_instr for_var) else ();
List.iter (compile_stmt true) stmts;
let end_idx = D.length instrs in
if for_var <> "" then (
D.set instrs pop_idx (FOR_ITER end_idx);
)
else
D.set instrs pop_idx (POP_JUMP_IF_FALSE end_idx);
for i = start_idx to end_idx - 1 do
match D.get instrs i with
JUMP t -> if t = -10 then D.set instrs i (JUMP end_idx) else
if t = -20 then D.set instrs i (JUMP start_idx) else ()
| _ -> ()
done
in
(match s with
Expr e ->
(match e with
Call (_, _) ->
compile_and_add_expr e;
if in_repl && not in_loop then
()
else
D.add instrs POP_TOP
| _ -> compile_and_add_expr e)
| Assign (Var id, e) ->
compile_and_add_expr e;
D.add instrs (get_store_instr id)
| Assign (AttrRef(obj, id), e) ->
compile_and_add_expr e;
compile_and_add_expr obj;
D.add instrs (STORE_ATTR id)
| Assign (Subscr(seq, i, slice), e) ->
compile_and_add_expr seq;
compile_and_add_expr i;
(match slice with
Some j ->
compile_and_add_expr j;
pops 4 off stack
pops 3 off stack
| Assign(_, _) ->
raise (Bytecode_error "Tried to assign to non-assignable expression")
| If (c, s1, s2) ->
compile_and_add_expr c;
let pop_index = D.length instrs in
dummy 1
List.iter (compile_stmt in_loop) s1;
(match s2 with
Some ss ->
let jump_index = D.length instrs in
dummy 2
backfill 1
List.iter (compile_stmt in_loop) ss;
backfill 2
| None ->
just backfill 1
| While (c, ss) ->
let start_idx = D.length instrs in
compile_and_add_expr c;
let pop_idx = D.length instrs in
compile_loop start_idx pop_idx ss ""
| For (Var id, seq, ss) ->
compile_and_add_expr seq;
D.add instrs BUILD_SEQ;
let start_idx = D.length instrs in
| For _ ->
raise (Bytecode_error "Cannot use provided iteration variable in for loop.")
| Funcdef (name, args, body) ->
let new_table = H.create 10 in
List.iter (fun s -> H.replace new_table s (Local 0)) args;
let code_block : code = compile_stmts in_repl false body (table::enclosings) new_table in
D.add instrs (MAKE_FUNCTION (args, { name = name;
ptr = ref code_block }));
D.add instrs (get_store_instr name)
| Return opt ->
(match opt with
Some e -> compile_and_add_expr e
| None -> D.add instrs (LOAD_CONST None)
);
D.add instrs RETURN_VALUE
| Del (Var id) ->
LOAD_LOCAL (n, id) -> D.add instrs (DELETE_LOCAL (n, id))
| LOAD_GLOBAL id -> D.add instrs (DELETE_GLOBAL id)
| LOAD_NAME id -> D.add instrs (DELETE_NAME id)
| _ -> raise (Bytecode_error "Could not generate delete instruction"))
| Del (Subscr(seq, i, slice)) ->
compile_and_add_expr seq;
compile_and_add_expr i;
(match slice with
Some j ->
compile_and_add_expr j;
D.add instrs DELETE_SLICESUB
| None -> D.add instrs DELETE_SUBSCR)
| Del (AttrRef(obj, id)) ->
compile_and_add_expr obj;
D.add instrs (DELETE_ATTR id)
| Del _ -> raise (Bytecode_error "Cannot delete object.")
| Break ->
if not in_loop then
raise (Bytecode_error "BREAK statement found outside loop.")
else
| Continue ->
if not in_loop then
raise (Bytecode_error "CONTINUE statement found outside loop.")
else
| Classdef (name, super, body) ->
let code_block = compile_stmts in_repl true body enclosings table in
let num_supers =
match super with
Some s -> D.add instrs (LOAD_NAME s);
1
| None -> 0
in
D.add instrs (MAKE_CLASS (num_supers,
{ name = name; ptr = ref code_block }));
D.add instrs (get_store_instr name)
| Global _ | Nonlocal _ | Pass -> ()
)
in
let rec compile_iter stmts =
match stmts with
[] -> ()
| s::ss -> compile_stmt false s;
compile_iter ss
in
instrs
let compile_prog in_repl (p : Ast.program) : code =
let instrs = compile_stmts in_repl false p [] (H.create 10) in
if not in_repl && D.last instrs <> RETURN_VALUE then (
D.add instrs (LOAD_CONST None);
D.add instrs RETURN_VALUE
)
else ();
instrs
|
39e5e5ba4245404dda172adc47926038a57aee4806ddf792cce0d4b03ac14870 | janestreet/memtrace_viewer_with_deps | tuple_pool.ml | open! Core_kernel
open! Import
open Tuple_pool_intf
module Tuple_type = Tuple_type
let failwiths = Error.failwiths
let phys_equal = Caml.( == )
let arch_sixtyfour = Sys.word_size = 64
module Int = struct
let num_bits = Int.num_bits
let max_value = Caml.max_int
let to_string = string_of_int
end
let sprintf = Printf.sprintf
let concat l = Base.String.concat ~sep:"" l
module type S = S
module Pool = struct
let grow_capacity ~capacity ~old_capacity =
match capacity with
| None -> if old_capacity = 0 then 1 else old_capacity * 2
| Some capacity ->
if capacity <= old_capacity
then
failwiths
~here:[%here]
"Pool.grow got too small capacity"
(`capacity capacity, `old_capacity old_capacity)
[%sexp_of: [ `capacity of int ] * [ `old_capacity of int ]];
capacity
;;
module Slots = Tuple_type.Slots
let max_slot = 14
The pool is represented as a single [ Uniform_array.t ] , where index zero has the
metadata about the pool and the remaining indices are the tuples layed out one after
the other . Each tuple takes [ 1 + slots_per_tuple ] indices in the pool , where the
first index holds a header and the remaining indices hold the tuple 's slots :
{ v
| header | s0 | s1 | ... | s < N-1 > |
v }
A [ Pointer.t ] to a tuple contains the integer index where its header is , as well as
( a mask of ) the tuple 's unique i d.
The free tuples are singly linked via the headers .
When a tuple is in use , its header is marked to indicate so , and also to include the
tuple 's unique i d. This allows us to check in constant time whether a pointer is
valid , by comparing the i d in the pointer with the i d in the header .
When a tuple is not in use , its header is part of the free list , and its tuple slots
have dummy values of the appropriate types , from the [ dummy ] tuple supplied to
[ create ] . We must have dummy values of the correct type to prevent a segfault in
code that ( mistakenly ) uses a pointer to a free tuple .
For [ Pool . Unsafe ] , a slot in a free object is guaranteed to be an int ; it must not be
pointer to prevent a space leak . However , the int in the slot may not represent a
valid value of the type .
metadata about the pool and the remaining indices are the tuples layed out one after
the other. Each tuple takes [1 + slots_per_tuple] indices in the pool, where the
first index holds a header and the remaining indices hold the tuple's slots:
{v
| header | s0 | s1 | ... | s<N-1> |
v}
A [Pointer.t] to a tuple contains the integer index where its header is, as well as
(a mask of) the tuple's unique id.
The free tuples are singly linked via the headers.
When a tuple is in use, its header is marked to indicate so, and also to include the
tuple's unique id. This allows us to check in constant time whether a pointer is
valid, by comparing the id in the pointer with the id in the header.
When a tuple is not in use, its header is part of the free list, and its tuple slots
have dummy values of the appropriate types, from the [dummy] tuple supplied to
[create]. We must have dummy values of the correct type to prevent a segfault in
code that (mistakenly) uses a pointer to a free tuple.
For [Pool.Unsafe], a slot in a free object is guaranteed to be an int; it must not be
pointer to prevent a space leak. However, the int in the slot may not represent a
valid value of the type.
*)
module Slot = struct
type ('slots, 'a) t = int [@@deriving sexp_of]
let equal (t1 : (_, _) t) t2 = t1 = t2
let t0 = 1
let t1 = 2
let t2 = 3
let t3 = 4
let t4 = 5
let t5 = 6
let t6 = 7
let t7 = 8
let t8 = 9
let t9 = 10
let t10 = 11
let t11 = 12
let t12 = 13
let t13 = 14
let%test _ = t13 = max_slot
end
We only have [ Int.num_bits ] bits available for pool pointers . The bits of a pool
pointer encode two things :
- the tuple 's array index in the pool
- the tuple 's identifier ( not necessarily unique )
We choose [ array_index_num_bits ] as large as needed for the maximum pool capacity
that we want to support , and use the remaining [ masked_tuple_id_num_bits ] bits for
the identifier . 64 - bit and 32 - bit architectures typically have very different
address - space sizes , so we choose [ array_index_num_bits ] differently .
pointer encode two things:
- the tuple's array index in the pool
- the tuple's identifier (not necessarily unique)
We choose [array_index_num_bits] as large as needed for the maximum pool capacity
that we want to support, and use the remaining [masked_tuple_id_num_bits] bits for
the identifier. 64-bit and 32-bit architectures typically have very different
address-space sizes, so we choose [array_index_num_bits] differently. *)
let array_index_num_bits =
if arch_sixtyfour
then (
assert (Int.num_bits = 63);
30)
else (
assert (Int.num_bits = 31 || Int.num_bits = 32);
22)
;;
let masked_tuple_id_num_bits = Int.num_bits - array_index_num_bits
let%test _ = array_index_num_bits > 0
let%test _ = masked_tuple_id_num_bits > 0
let%test _ = array_index_num_bits + masked_tuple_id_num_bits <= Int.num_bits
let max_array_length = 1 lsl array_index_num_bits
module Tuple_id : sig
type t = private int [@@deriving sexp_of]
include Invariant.S with type t := t
val to_string : t -> string
val equal : t -> t -> bool
val init : t
val next : t -> t
val of_int : int -> t
val to_int : t -> int
val examples : t list
end = struct
type t = int [@@deriving sexp_of]
(* We guarantee that tuple ids are nonnegative so that they can be encoded in
headers. *)
let invariant t = assert (t >= 0)
let to_string = Int.to_string
let equal (t1 : t) t2 = t1 = t2
let init = 0
let next t = if arch_sixtyfour then t + 1 else if t = Int.max_value then 0 else t + 1
let to_int t = t
let of_int i =
if i < 0
then failwiths ~here:[%here] "Tuple_id.of_int got negative int" i [%sexp_of: int];
i
;;
let examples = [ 0; 1; 0x1FFF_FFFF; Int.max_value ]
end
let tuple_id_mask = (1 lsl masked_tuple_id_num_bits) - 1
module Pointer : sig
(* [Pointer.t] is an encoding as an [int] of the following sum type:
{[
| Null
| Normal of { header_index : int; masked_tuple_id : int }
]}
The encoding is chosen to optimize the most common operation, namely tuple-slot
access, the [slot_index] function. The encoding is designed so that [slot_index]
produces a negative number for [Null], which will cause the subsequent array bounds
check to fail. *)
type 'slots t = private int [@@deriving sexp_of, typerep]
include Invariant.S1 with type 'a t := 'a t
val phys_compare : 'a t -> 'a t -> int
val phys_equal : 'a t -> 'a t -> bool
(* The null pointer. [null] is a function due to issues with the value restriction. *)
val null : unit -> _ t
val is_null : _ t -> bool
(* Normal pointers. *)
val create : header_index:int -> Tuple_id.t -> _ t
val header_index : _ t -> int
val masked_tuple_id : _ t -> int
val slot_index : _ t -> (_, _) Slot.t -> int
val first_slot_index : _ t -> int
module Id : sig
type t [@@deriving bin_io, sexp]
val to_int63 : t -> Int63.t
val of_int63 : Int63.t -> t
end
val to_id : _ t -> Id.t
val of_id_exn : Id.t -> _ t
end = struct
(* A pointer is either [null] or the (positive) index in the pool of the next-free
field preceeding the tuple's slots. *)
type 'slots t = int [@@deriving typerep]
let sexp_of_t _ t = Sexp.Atom (sprintf "<Pool.Pointer.t: 0x%08x>" t)
let phys_equal (t1 : _ t) t2 = phys_equal t1 t2
let phys_compare = compare
let null () = -max_slot - 1
let is_null t = phys_equal t (null ())
(* [null] must be such that [null + slot] is an invalid array index for all slots.
Otherwise get/set on the null pointer may lead to a segfault. *)
let%test _ = null () + max_slot < 0
let create ~header_index (tuple_id : Tuple_id.t) =
header_index
lor ((Tuple_id.to_int tuple_id land tuple_id_mask) lsl array_index_num_bits)
;;
let header_index_mask = (1 lsl array_index_num_bits) - 1
let masked_tuple_id t = t lsr array_index_num_bits
let header_index t = t land header_index_mask
let invariant _ t = if not (is_null t) then assert (header_index t > 0)
let%test_unit _ = invariant ignore (null ())
let%test_unit _ =
List.iter Tuple_id.examples ~f:(fun tuple_id ->
invariant ignore (create ~header_index:1 tuple_id))
;;
let slot_index t slot = header_index t + slot
let first_slot_index t = slot_index t Slot.t0
module Id = struct
include Int63
let to_int63 t = t
let of_int63 i = i
end
let to_id t = Id.of_int t
let of_id_exn id =
try
let t = Id.to_int_exn id in
if is_null t
then t
else (
let should_equal =
create ~header_index:(header_index t) (Tuple_id.of_int (masked_tuple_id t))
in
if phys_equal t should_equal
then t
else failwiths ~here:[%here] "should equal" should_equal [%sexp_of: _ t])
with
| exn ->
failwiths
~here:[%here]
"Pointer.of_id_exn got strange id"
(id, exn)
[%sexp_of: Id.t * exn]
;;
end
module Header : sig
(* A [Header.t] is an encoding as an [int] of the following type:
{[
| Null
| Free of { next_free_header_index : int }
| Used of { tuple_id : int }
]}
If a tuple is free, its header is set to either [Null] or [Free] with
[next_free_header_index] indicating the header of the next tuple on the free list.
If a tuple is in use, it header is set to [Used]. *)
type t = private int [@@deriving sexp_of]
val null : t
val is_null : t -> bool
val free : next_free_header_index:int -> t
val is_free : t -> bool
val next_free_header_index : t -> int
(* only valid if [is_free t] *)
val used : Tuple_id.t -> t
val is_used : t -> bool
val tuple_id : t -> Tuple_id.t
(* only valid if [is_used t] *)
end = struct
type t = int
let null = 0
let is_null t = t = 0
(* We know that header indices are [> 0], because index [0] holds the metadata. *)
let free ~next_free_header_index = next_free_header_index
let is_free t = t > 0
let next_free_header_index t = t
let used (tuple_id : Tuple_id.t) = -1 - (tuple_id :> int)
let is_used t = t < 0
let tuple_id t = Tuple_id.of_int (-(t + 1))
let%test_unit _ =
List.iter Tuple_id.examples ~f:(fun id ->
let t = used id in
assert (is_used t);
assert (Tuple_id.equal (tuple_id t) id))
;;
let sexp_of_t t =
if is_null t
then Sexp.Atom "null"
else if is_free t
then Sexp.(List [ Atom "Free"; Atom (Int.to_string (next_free_header_index t)) ])
else Sexp.(List [ Atom "Used"; Atom (Tuple_id.to_string (tuple_id t)) ])
;;
end
let metadata_index = 0
let start_of_tuples_index = 1
let max_capacity ~slots_per_tuple =
(max_array_length - start_of_tuples_index) / (1 + slots_per_tuple)
;;
let%test_unit _ =
for slots_per_tuple = 1 to max_slot do
assert (
start_of_tuples_index + ((1 + slots_per_tuple) * max_capacity ~slots_per_tuple)
<= max_array_length)
done
;;
module Metadata = struct
type 'slots t =
{ (* [slots_per_tuple] is number of slots in a tuple as seen by the user; i.e. not
counting the next-free pointer. *)
slots_per_tuple : int
; capacity : int
; mutable length : int
; mutable next_id : Tuple_id.t
; mutable first_free : Header.t
[ dummy ] is [ None ] in an unsafe pool . In a safe pool , [ dummy ] is [ Some a ] , with
[ Uniform_array.length a = slots_per_tuple ] . [ dummy ] is actually a tuple value
with the correct type ( corresponding to [ ' slots ] ) , but we make the type of
[ dummy ] be [ Obj.t Uniform_array.t ] because we ca n't write that type here . Also ,
the purpose of [ dummy ] is to initialize a pool element , making [ dummy ] an [ Obj.t
Uniform_array.t ] lets us initialize a pool element using [ Uniform_array.blit ]
from [ dummy ] to the pool , which is an [ Obj.t Uniform_array.t ] .
[Uniform_array.length a = slots_per_tuple]. [dummy] is actually a tuple value
with the correct type (corresponding to ['slots]), but we make the type of
[dummy] be [Obj.t Uniform_array.t] because we can't write that type here. Also,
the purpose of [dummy] is to initialize a pool element, making [dummy] an [Obj.t
Uniform_array.t] lets us initialize a pool element using [Uniform_array.blit]
from [dummy] to the pool, which is an [Obj.t Uniform_array.t]. *)
; dummy : (Obj.t Uniform_array.t[@sexp.opaque]) option
}
[@@deriving fields, sexp_of]
let array_indices_per_tuple t = 1 + t.slots_per_tuple
let array_length t = start_of_tuples_index + (t.capacity * array_indices_per_tuple t)
let header_index_to_tuple_num t ~header_index =
(header_index - start_of_tuples_index) / array_indices_per_tuple t
;;
let tuple_num_to_header_index t tuple_num =
start_of_tuples_index + (tuple_num * array_indices_per_tuple t)
;;
let tuple_num_to_first_slot_index t tuple_num =
tuple_num_to_header_index t tuple_num + 1
;;
let is_full t = t.length = t.capacity
end
open Metadata
(* We use type [Obj.t] because the array holds a mix of integers as well as OCaml values
of arbitrary type. *)
type 'slots t = Obj.t Uniform_array.t
let metadata (type slots) (t : slots t) =
Uniform_array.unsafe_get t metadata_index |> (Obj.obj : _ -> slots Metadata.t)
;;
let length t = (metadata t).length
let sexp_of_t sexp_of_ty t = Metadata.sexp_of_t sexp_of_ty (metadata t)
(* Because [unsafe_header] and [unsafe_set_header] do not do a bounds check, one must be
sure that one has a valid [header_index] before calling them. *)
let unsafe_header t ~header_index =
Uniform_array.unsafe_get t header_index |> (Obj.obj : _ -> Header.t)
;;
let unsafe_set_header t ~header_index (header : Header.t) =
Uniform_array.unsafe_set_int_assuming_currently_int t header_index (header :> int)
;;
let header_index_is_in_bounds t ~header_index =
header_index >= start_of_tuples_index && header_index < Uniform_array.length t
;;
let unsafe_pointer_is_live t pointer =
let header_index = Pointer.header_index pointer in
let header = unsafe_header t ~header_index in
Header.is_used header
&& Tuple_id.to_int (Header.tuple_id header) land tuple_id_mask
= Pointer.masked_tuple_id pointer
;;
let pointer_is_valid t pointer =
header_index_is_in_bounds t ~header_index:(Pointer.header_index pointer)
(* At this point, we know the pointer isn't [null] and is in bounds, so we know it is
the index of a header, since we maintain the invariant that all pointers other than
[null] are. *)
&& unsafe_pointer_is_live t pointer
;;
let id_of_pointer _t pointer = Pointer.to_id pointer
let is_valid_header_index t ~header_index =
let metadata = metadata t in
header_index_is_in_bounds t ~header_index
&& 0
= (header_index - start_of_tuples_index)
mod Metadata.array_indices_per_tuple metadata
;;
let pointer_of_id_exn t id =
try
let pointer = Pointer.of_id_exn id in
if not (Pointer.is_null pointer)
then (
let header_index = Pointer.header_index pointer in
if not (is_valid_header_index t ~header_index)
then failwiths ~here:[%here] "invalid header index" header_index [%sexp_of: int];
if not (unsafe_pointer_is_live t pointer) then failwith "pointer not live");
pointer
with
| exn ->
failwiths
~here:[%here]
"Pool.pointer_of_id_exn got invalid id"
(id, t, exn)
[%sexp_of: Pointer.Id.t * _ t * exn]
;;
let invariant _invariant_a t : unit =
try
let metadata = metadata t in
let check f field = f (Field.get field metadata) in
Metadata.Fields.iter
~slots_per_tuple:(check (fun slots_per_tuple -> assert (slots_per_tuple > 0)))
~capacity:
(check (fun capacity ->
assert (capacity >= 0);
assert (Uniform_array.length t = Metadata.array_length metadata)))
~length:
(check (fun length ->
assert (length >= 0);
assert (length <= metadata.capacity)))
~next_id:(check Tuple_id.invariant)
~first_free:
(check (fun first_free ->
let free = Array.create ~len:metadata.capacity false in
let r = ref first_free in
while not (Header.is_null !r) do
let header = !r in
assert (Header.is_free header);
let header_index = Header.next_free_header_index header in
assert (is_valid_header_index t ~header_index);
let tuple_num = header_index_to_tuple_num metadata ~header_index in
if free.(tuple_num)
then
failwiths ~here:[%here] "cycle in free list" tuple_num [%sexp_of: int];
free.(tuple_num) <- true;
r := unsafe_header t ~header_index
done))
~dummy:
(check (function
| Some dummy ->
assert (Uniform_array.length dummy = metadata.slots_per_tuple)
| None ->
for tuple_num = 0 to metadata.capacity - 1 do
let header_index = tuple_num_to_header_index metadata tuple_num in
let header = unsafe_header t ~header_index in
if Header.is_free header
then (
let first_slot = tuple_num_to_first_slot_index metadata tuple_num in
for slot = 0 to metadata.slots_per_tuple - 1 do
assert (Obj.is_int (Uniform_array.get t (first_slot + slot)))
done)
done))
with
| exn ->
failwiths ~here:[%here] "Pool.invariant failed" (exn, t) [%sexp_of: exn * _ t]
;;
let capacity t = (metadata t).capacity
let is_full t = Metadata.is_full (metadata t)
let unsafe_add_to_free_list t metadata ~header_index =
unsafe_set_header t ~header_index metadata.first_free;
metadata.first_free <- Header.free ~next_free_header_index:header_index
;;
let set_metadata (type slots) (t : slots t) metadata =
Uniform_array.set t metadata_index (Obj.repr (metadata : slots Metadata.t))
;;
let create_array (type slots) (metadata : slots Metadata.t) : slots t =
let t = Uniform_array.create_obj_array ~len:(Metadata.array_length metadata) in
set_metadata t metadata;
t
;;
Initialize tuples numbered from [ lo ] ( inclusive ) up to [ hi ] ( exclusive ) . For each
tuple , this puts dummy values in the tuple 's slots and adds the tuple to the free
list .
tuple, this puts dummy values in the tuple's slots and adds the tuple to the free
list. *)
let unsafe_init_range t metadata ~lo ~hi =
(match metadata.dummy with
| None -> ()
| Some dummy ->
for tuple_num = lo to hi - 1 do
Uniform_array.blit
~src:dummy
~src_pos:0
~dst:t
~dst_pos:(tuple_num_to_first_slot_index metadata tuple_num)
~len:metadata.slots_per_tuple
done);
for tuple_num = hi - 1 downto lo do
unsafe_add_to_free_list
t
metadata
~header_index:(tuple_num_to_header_index metadata tuple_num)
done
;;
let create_with_dummy slots ~capacity ~dummy =
if capacity < 0
then
failwiths ~here:[%here] "Pool.create got invalid capacity" capacity [%sexp_of: int];
let slots_per_tuple = Slots.slots_per_tuple slots in
let max_capacity = max_capacity ~slots_per_tuple in
if capacity > max_capacity
then
failwiths
~here:[%here]
"Pool.create got too large capacity"
(capacity, `max max_capacity)
[%sexp_of: int * [ `max of int ]];
let metadata =
{ Metadata.slots_per_tuple
; capacity
; length = 0
; next_id = Tuple_id.init
; first_free = Header.null
; dummy
}
in
let t = create_array metadata in
unsafe_init_range t metadata ~lo:0 ~hi:capacity;
t
;;
let create (type tuple) (slots : (tuple, _) Slots.t) ~capacity ~dummy =
let dummy =
if Slots.slots_per_tuple slots = 1
then Uniform_array.singleton (Obj.repr (dummy : tuple))
else (Obj.magic (dummy : tuple) : Obj.t Uniform_array.t)
in
create_with_dummy slots ~capacity ~dummy:(Some dummy)
;;
(* Purge a pool and make it unusable. *)
let destroy t =
let metadata = metadata t in
(* We clear out all the pool's entries, which causes all pointers to be invalid. This
also prevents the destroyed pool from unnecessarily keeping heap blocks alive.
This is similar to [free]ing all the entries with the difference that we make the
free list empty as well. *)
(match metadata.dummy with
| None ->
for i = start_of_tuples_index to Uniform_array.length t - 1 do
Uniform_array.unsafe_set t i (Obj.repr 0)
done
| Some dummy ->
for tuple_num = 0 to metadata.capacity - 1 do
let header_index = tuple_num_to_header_index metadata tuple_num in
unsafe_set_header t ~header_index Header.null;
Uniform_array.blit
~src:dummy
~src_pos:0
~dst:t
~dst_pos:(header_index + 1)
~len:metadata.slots_per_tuple
done);
let metadata =
{ Metadata.slots_per_tuple = metadata.slots_per_tuple
; capacity = 0
; length = 0
; next_id = metadata.next_id
; first_free = Header.null
; dummy = metadata.dummy
}
in
set_metadata t metadata
;;
let[@cold] grow ?capacity t =
let { Metadata.slots_per_tuple
; capacity = old_capacity
; length
; next_id
; first_free = _
; dummy
}
=
metadata t
in
let capacity =
min (max_capacity ~slots_per_tuple) (grow_capacity ~capacity ~old_capacity)
in
if capacity = old_capacity
then
failwiths
~here:[%here]
"Pool.grow cannot grow pool; capacity already at maximum"
capacity
[%sexp_of: int];
let metadata =
{ Metadata.slots_per_tuple
; capacity
; length
; next_id
; first_free = Header.null
; dummy
}
in
let t' = create_array metadata in
Uniform_array.blit
~src:t
~src_pos:start_of_tuples_index
~dst:t'
~dst_pos:start_of_tuples_index
~len:(old_capacity * Metadata.array_indices_per_tuple metadata);
destroy t;
unsafe_init_range t' metadata ~lo:old_capacity ~hi:capacity;
for tuple_num = old_capacity - 1 downto 0 do
let header_index = tuple_num_to_header_index metadata tuple_num in
let header = unsafe_header t' ~header_index in
if not (Header.is_used header)
then unsafe_add_to_free_list t' metadata ~header_index
done;
t'
;;
let[@cold] raise_malloc_full t =
failwiths ~here:[%here] "Pool.malloc of full pool" t [%sexp_of: _ t]
;;
let malloc (type slots) (t : slots t) : slots Pointer.t =
let metadata = metadata t in
let first_free = metadata.first_free in
if Header.is_null first_free then raise_malloc_full t;
let header_index = Header.next_free_header_index first_free in
metadata.first_free <- unsafe_header t ~header_index;
metadata.length <- metadata.length + 1;
let tuple_id = metadata.next_id in
unsafe_set_header t ~header_index (Header.used tuple_id);
metadata.next_id <- Tuple_id.next tuple_id;
Pointer.create ~header_index tuple_id
;;
let unsafe_free (type slots) (t : slots t) (pointer : slots Pointer.t) =
let metadata = metadata t in
metadata.length <- metadata.length - 1;
unsafe_add_to_free_list t metadata ~header_index:(Pointer.header_index pointer);
match metadata.dummy with
| None ->
let pos = Pointer.first_slot_index pointer in
for i = 0 to metadata.slots_per_tuple - 1 do
Uniform_array.unsafe_clear_if_pointer t (pos + i)
done
| Some dummy ->
Uniform_array.unsafe_blit
~src:dummy
~src_pos:0
~len:metadata.slots_per_tuple
~dst:t
~dst_pos:(Pointer.first_slot_index pointer)
;;
let free (type slots) (t : slots t) (pointer : slots Pointer.t) =
(* Check [pointer_is_valid] to:
- avoid freeing a null pointer
- avoid freeing a free pointer (this would lead to a pool inconsistency)
- be able to use unsafe functions after. *)
if not (pointer_is_valid t pointer)
then
failwiths
~here:[%here]
"Pool.free of invalid pointer"
(pointer, t)
[%sexp_of: _ Pointer.t * _ t];
unsafe_free t pointer
;;
let new1 t a0 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
pointer
;;
let new2 t a0 a1 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
pointer
;;
let new3 t a0 a1 a2 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
pointer
;;
let new4 t a0 a1 a2 a3 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
pointer
;;
let new5 t a0 a1 a2 a3 a4 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
pointer
;;
let new6 t a0 a1 a2 a3 a4 a5 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
pointer
;;
let new7 t a0 a1 a2 a3 a4 a5 a6 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
pointer
;;
let new8 t a0 a1 a2 a3 a4 a5 a6 a7 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
pointer
;;
let new9 t a0 a1 a2 a3 a4 a5 a6 a7 a8 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
pointer
;;
let new10 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
Uniform_array.unsafe_set t (offset + 10) (Obj.repr a9);
pointer
;;
let new11 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
Uniform_array.unsafe_set t (offset + 10) (Obj.repr a9);
Uniform_array.unsafe_set t (offset + 11) (Obj.repr a10);
pointer
;;
let new12 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
Uniform_array.unsafe_set t (offset + 10) (Obj.repr a9);
Uniform_array.unsafe_set t (offset + 11) (Obj.repr a10);
Uniform_array.unsafe_set t (offset + 12) (Obj.repr a11);
pointer
;;
let new13 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
Uniform_array.unsafe_set t (offset + 10) (Obj.repr a9);
Uniform_array.unsafe_set t (offset + 11) (Obj.repr a10);
Uniform_array.unsafe_set t (offset + 12) (Obj.repr a11);
Uniform_array.unsafe_set t (offset + 13) (Obj.repr a12);
pointer
;;
let new14 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
Uniform_array.unsafe_set t (offset + 10) (Obj.repr a9);
Uniform_array.unsafe_set t (offset + 11) (Obj.repr a10);
Uniform_array.unsafe_set t (offset + 12) (Obj.repr a11);
Uniform_array.unsafe_set t (offset + 13) (Obj.repr a12);
Uniform_array.unsafe_set t (offset + 14) (Obj.repr a13);
pointer
;;
let get t p slot = Obj.obj (Uniform_array.get t (Pointer.slot_index p slot))
let unsafe_get t p slot =
Obj.obj (Uniform_array.unsafe_get t (Pointer.slot_index p slot))
;;
let set t p slot x = Uniform_array.set t (Pointer.slot_index p slot) (Obj.repr x)
let unsafe_set t p slot x =
Uniform_array.unsafe_set t (Pointer.slot_index p slot) (Obj.repr x)
;;
let get_tuple (type tuple) (t : (tuple, _) Slots.t t) pointer =
let metadata = metadata t in
let len = metadata.slots_per_tuple in
if len = 1
then get t pointer Slot.t0
else
(Obj.magic
(Uniform_array.sub t ~pos:(Pointer.first_slot_index pointer) ~len
: Obj.t Uniform_array.t)
: tuple)
;;
end
include Pool
module Unsafe = struct
include Pool
let create slots ~capacity = create_with_dummy slots ~capacity ~dummy:None
end
module Debug (Pool : S) = struct
open Pool
let check_invariant = ref true
let show_messages = ref true
let debug name ts arg sexp_of_arg sexp_of_result f =
let prefix = "Pool." in
if !check_invariant then List.iter ts ~f:(invariant ignore);
if !show_messages then Debug.eprints (concat [ prefix; name ]) arg sexp_of_arg;
let result_or_exn = Result.try_with f in
if !show_messages
then
Debug.eprints
(concat [ prefix; name; " result" ])
result_or_exn
[%sexp_of: (result, exn) Result.t];
Result.ok_exn result_or_exn
;;
module Slots = Slots
module Slot = Slot
module Pointer = struct
open Pointer
type nonrec 'slots t = 'slots t [@@deriving sexp_of, typerep]
let phys_compare t1 t2 =
debug
"Pointer.phys_compare"
[]
(t1, t2)
[%sexp_of: _ t * _ t]
[%sexp_of: int]
(fun () -> phys_compare t1 t2)
;;
let phys_equal t1 t2 =
debug
"Pointer.phys_equal"
[]
(t1, t2)
[%sexp_of: _ t * _ t]
[%sexp_of: bool]
(fun () -> phys_equal t1 t2)
;;
let is_null t =
debug "Pointer.is_null" [] t [%sexp_of: _ t] [%sexp_of: bool] (fun () -> is_null t)
;;
let null = null
module Id = struct
open Id
type nonrec t = t [@@deriving bin_io, sexp]
let of_int63 i =
debug "Pointer.Id.of_int63" [] i [%sexp_of: Int63.t] [%sexp_of: t] (fun () ->
of_int63 i)
;;
let to_int63 t =
debug "Pointer.Id.to_int63" [] t [%sexp_of: t] [%sexp_of: Int63.t] (fun () ->
to_int63 t)
;;
end
end
type nonrec 'slots t = 'slots t [@@deriving sexp_of]
let invariant = invariant
let length = length
let id_of_pointer t pointer =
debug
"id_of_pointer"
[ t ]
pointer
[%sexp_of: _ Pointer.t]
[%sexp_of: Pointer.Id.t]
(fun () -> id_of_pointer t pointer)
;;
let pointer_of_id_exn t id =
debug
"pointer_of_id_exn"
[ t ]
id
[%sexp_of: Pointer.Id.t]
[%sexp_of: _ Pointer.t]
(fun () -> pointer_of_id_exn t id)
;;
let pointer_is_valid t pointer =
debug
"pointer_is_valid"
[ t ]
pointer
[%sexp_of: _ Pointer.t]
[%sexp_of: bool]
(fun () -> pointer_is_valid t pointer)
;;
let create slots ~capacity ~dummy =
debug "create" [] capacity [%sexp_of: int] [%sexp_of: _ t] (fun () ->
create slots ~capacity ~dummy)
;;
let max_capacity ~slots_per_tuple =
debug "max_capacity" [] slots_per_tuple [%sexp_of: int] [%sexp_of: int] (fun () ->
max_capacity ~slots_per_tuple)
;;
let capacity t =
debug "capacity" [ t ] t [%sexp_of: _ t] [%sexp_of: int] (fun () -> capacity t)
;;
let grow ?capacity t =
debug
"grow"
[ t ]
(`capacity capacity)
[%sexp_of: [ `capacity of int option ]]
[%sexp_of: _ t]
(fun () -> grow ?capacity t)
;;
let is_full t =
debug "is_full" [ t ] t [%sexp_of: _ t] [%sexp_of: bool] (fun () -> is_full t)
;;
let unsafe_free t p =
debug "unsafe_free" [ t ] p [%sexp_of: _ Pointer.t] [%sexp_of: unit] (fun () ->
unsafe_free t p)
;;
let free t p =
debug "free" [ t ] p [%sexp_of: _ Pointer.t] [%sexp_of: unit] (fun () -> free t p)
;;
let debug_new t f = debug "new" [ t ] () [%sexp_of: unit] [%sexp_of: _ Pointer.t] f
let new1 t a0 = debug_new t (fun () -> new1 t a0)
let new2 t a0 a1 = debug_new t (fun () -> new2 t a0 a1)
let new3 t a0 a1 a2 = debug_new t (fun () -> new3 t a0 a1 a2)
let new4 t a0 a1 a2 a3 = debug_new t (fun () -> new4 t a0 a1 a2 a3)
let new5 t a0 a1 a2 a3 a4 = debug_new t (fun () -> new5 t a0 a1 a2 a3 a4)
let new6 t a0 a1 a2 a3 a4 a5 = debug_new t (fun () -> new6 t a0 a1 a2 a3 a4 a5)
let new7 t a0 a1 a2 a3 a4 a5 a6 = debug_new t (fun () -> new7 t a0 a1 a2 a3 a4 a5 a6)
let new8 t a0 a1 a2 a3 a4 a5 a6 a7 =
debug_new t (fun () -> new8 t a0 a1 a2 a3 a4 a5 a6 a7)
;;
let new9 t a0 a1 a2 a3 a4 a5 a6 a7 a8 =
debug_new t (fun () -> new9 t a0 a1 a2 a3 a4 a5 a6 a7 a8)
;;
let new10 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 =
debug_new t (fun () -> new10 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9)
;;
let new11 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 =
debug_new t (fun () -> new11 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10)
;;
let new12 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 =
debug_new t (fun () -> new12 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11)
;;
let new13 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 =
debug_new t (fun () -> new13 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12)
;;
let new14 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 =
debug_new t (fun () -> new14 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13)
;;
let get_tuple t pointer =
debug "get_tuple" [ t ] pointer [%sexp_of: _ Pointer.t] [%sexp_of: _] (fun () ->
get_tuple t pointer)
;;
let debug_get name f t pointer =
debug name [ t ] pointer [%sexp_of: _ Pointer.t] [%sexp_of: _] (fun () -> f t pointer)
;;
let get t pointer slot = debug_get "get" get t pointer slot
let unsafe_get t pointer slot = debug_get "unsafe_get" unsafe_get t pointer slot
let debug_set name f t pointer slot a =
debug name [ t ] pointer [%sexp_of: _ Pointer.t] [%sexp_of: unit] (fun () ->
f t pointer slot a)
;;
let set t pointer slot a = debug_set "set" set t pointer slot a
let unsafe_set t pointer slot a = debug_set "unsafe_set" unsafe_set t pointer slot a
end
module Error_check (Pool : S) = struct
open Pool
module Slots = Slots
module Slot = Slot
module Pointer = struct
type 'slots t =
{ mutable is_valid : bool
; pointer : 'slots Pointer.t
}
[@@deriving sexp_of, typerep]
let create pointer = { is_valid = true; pointer }
let null () = { is_valid = false; pointer = Pointer.null () }
let phys_compare t1 t2 = Pointer.phys_compare t1.pointer t2.pointer
let phys_equal t1 t2 = Pointer.phys_equal t1.pointer t2.pointer
let is_null t = Pointer.is_null t.pointer
let follow t =
if not t.is_valid
then failwiths ~here:[%here] "attempt to use invalid pointer" t [%sexp_of: _ t];
t.pointer
;;
let invalidate t = t.is_valid <- false
module Id = Pointer.Id
end
type 'slots t = 'slots Pool.t [@@deriving sexp_of]
let invariant = invariant
let length = length
let pointer_is_valid t { Pointer.is_valid; pointer } =
is_valid && pointer_is_valid t pointer
;;
(* We don't do [Pointer.follow pointer], because that would disallow [id_of_pointer t
(Pointer.null ())]. *)
let id_of_pointer t pointer = id_of_pointer t pointer.Pointer.pointer
let pointer_of_id_exn t id =
let pointer = pointer_of_id_exn t id in
let is_valid = Pool.pointer_is_valid t pointer in
{ Pointer.is_valid; pointer }
;;
let create = create
let capacity = capacity
let max_capacity = max_capacity
let grow = grow
let is_full = is_full
let get_tuple t p = get_tuple t (Pointer.follow p)
let get t p = get t (Pointer.follow p)
let unsafe_get t p = unsafe_get t (Pointer.follow p)
let set t p slot v = set t (Pointer.follow p) slot v
let unsafe_set t p slot v = unsafe_set t (Pointer.follow p) slot v
let unsafe_free t p =
unsafe_free t (Pointer.follow p);
Pointer.invalidate p
;;
let free t p =
free t (Pointer.follow p);
Pointer.invalidate p
;;
let new1 t a0 = Pointer.create (Pool.new1 t a0)
let new2 t a0 a1 = Pointer.create (Pool.new2 t a0 a1)
let new3 t a0 a1 a2 = Pointer.create (Pool.new3 t a0 a1 a2)
let new4 t a0 a1 a2 a3 = Pointer.create (Pool.new4 t a0 a1 a2 a3)
let new5 t a0 a1 a2 a3 a4 = Pointer.create (Pool.new5 t a0 a1 a2 a3 a4)
let new6 t a0 a1 a2 a3 a4 a5 = Pointer.create (Pool.new6 t a0 a1 a2 a3 a4 a5)
let new7 t a0 a1 a2 a3 a4 a5 a6 = Pointer.create (Pool.new7 t a0 a1 a2 a3 a4 a5 a6)
let new8 t a0 a1 a2 a3 a4 a5 a6 a7 =
Pointer.create (Pool.new8 t a0 a1 a2 a3 a4 a5 a6 a7)
;;
let new9 t a0 a1 a2 a3 a4 a5 a6 a7 a8 =
Pointer.create (Pool.new9 t a0 a1 a2 a3 a4 a5 a6 a7 a8)
;;
let new10 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 =
Pointer.create (Pool.new10 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9)
;;
let new11 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 =
Pointer.create (Pool.new11 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10)
;;
let new12 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 =
Pointer.create (Pool.new12 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11)
;;
let new13 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 =
Pointer.create (Pool.new13 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12)
;;
let new14 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 =
Pointer.create (Pool.new14 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13)
;;
end
| null | https://raw.githubusercontent.com/janestreet/memtrace_viewer_with_deps/5a9e1f927f5f8333e2d71c8d3ca03a45587422c4/vendor/core_kernel/tuple_pool/src/tuple_pool.ml | ocaml | We guarantee that tuple ids are nonnegative so that they can be encoded in
headers.
[Pointer.t] is an encoding as an [int] of the following sum type:
{[
| Null
| Normal of { header_index : int; masked_tuple_id : int }
]}
The encoding is chosen to optimize the most common operation, namely tuple-slot
access, the [slot_index] function. The encoding is designed so that [slot_index]
produces a negative number for [Null], which will cause the subsequent array bounds
check to fail.
The null pointer. [null] is a function due to issues with the value restriction.
Normal pointers.
A pointer is either [null] or the (positive) index in the pool of the next-free
field preceeding the tuple's slots.
[null] must be such that [null + slot] is an invalid array index for all slots.
Otherwise get/set on the null pointer may lead to a segfault.
A [Header.t] is an encoding as an [int] of the following type:
{[
| Null
| Free of { next_free_header_index : int }
| Used of { tuple_id : int }
]}
If a tuple is free, its header is set to either [Null] or [Free] with
[next_free_header_index] indicating the header of the next tuple on the free list.
If a tuple is in use, it header is set to [Used].
only valid if [is_free t]
only valid if [is_used t]
We know that header indices are [> 0], because index [0] holds the metadata.
[slots_per_tuple] is number of slots in a tuple as seen by the user; i.e. not
counting the next-free pointer.
We use type [Obj.t] because the array holds a mix of integers as well as OCaml values
of arbitrary type.
Because [unsafe_header] and [unsafe_set_header] do not do a bounds check, one must be
sure that one has a valid [header_index] before calling them.
At this point, we know the pointer isn't [null] and is in bounds, so we know it is
the index of a header, since we maintain the invariant that all pointers other than
[null] are.
Purge a pool and make it unusable.
We clear out all the pool's entries, which causes all pointers to be invalid. This
also prevents the destroyed pool from unnecessarily keeping heap blocks alive.
This is similar to [free]ing all the entries with the difference that we make the
free list empty as well.
Check [pointer_is_valid] to:
- avoid freeing a null pointer
- avoid freeing a free pointer (this would lead to a pool inconsistency)
- be able to use unsafe functions after.
We don't do [Pointer.follow pointer], because that would disallow [id_of_pointer t
(Pointer.null ())]. | open! Core_kernel
open! Import
open Tuple_pool_intf
module Tuple_type = Tuple_type
let failwiths = Error.failwiths
let phys_equal = Caml.( == )
let arch_sixtyfour = Sys.word_size = 64
module Int = struct
let num_bits = Int.num_bits
let max_value = Caml.max_int
let to_string = string_of_int
end
let sprintf = Printf.sprintf
let concat l = Base.String.concat ~sep:"" l
module type S = S
module Pool = struct
let grow_capacity ~capacity ~old_capacity =
match capacity with
| None -> if old_capacity = 0 then 1 else old_capacity * 2
| Some capacity ->
if capacity <= old_capacity
then
failwiths
~here:[%here]
"Pool.grow got too small capacity"
(`capacity capacity, `old_capacity old_capacity)
[%sexp_of: [ `capacity of int ] * [ `old_capacity of int ]];
capacity
;;
module Slots = Tuple_type.Slots
let max_slot = 14
The pool is represented as a single [ Uniform_array.t ] , where index zero has the
metadata about the pool and the remaining indices are the tuples layed out one after
the other . Each tuple takes [ 1 + slots_per_tuple ] indices in the pool , where the
first index holds a header and the remaining indices hold the tuple 's slots :
{ v
| header | s0 | s1 | ... | s < N-1 > |
v }
A [ Pointer.t ] to a tuple contains the integer index where its header is , as well as
( a mask of ) the tuple 's unique i d.
The free tuples are singly linked via the headers .
When a tuple is in use , its header is marked to indicate so , and also to include the
tuple 's unique i d. This allows us to check in constant time whether a pointer is
valid , by comparing the i d in the pointer with the i d in the header .
When a tuple is not in use , its header is part of the free list , and its tuple slots
have dummy values of the appropriate types , from the [ dummy ] tuple supplied to
[ create ] . We must have dummy values of the correct type to prevent a segfault in
code that ( mistakenly ) uses a pointer to a free tuple .
For [ Pool . Unsafe ] , a slot in a free object is guaranteed to be an int ; it must not be
pointer to prevent a space leak . However , the int in the slot may not represent a
valid value of the type .
metadata about the pool and the remaining indices are the tuples layed out one after
the other. Each tuple takes [1 + slots_per_tuple] indices in the pool, where the
first index holds a header and the remaining indices hold the tuple's slots:
{v
| header | s0 | s1 | ... | s<N-1> |
v}
A [Pointer.t] to a tuple contains the integer index where its header is, as well as
(a mask of) the tuple's unique id.
The free tuples are singly linked via the headers.
When a tuple is in use, its header is marked to indicate so, and also to include the
tuple's unique id. This allows us to check in constant time whether a pointer is
valid, by comparing the id in the pointer with the id in the header.
When a tuple is not in use, its header is part of the free list, and its tuple slots
have dummy values of the appropriate types, from the [dummy] tuple supplied to
[create]. We must have dummy values of the correct type to prevent a segfault in
code that (mistakenly) uses a pointer to a free tuple.
For [Pool.Unsafe], a slot in a free object is guaranteed to be an int; it must not be
pointer to prevent a space leak. However, the int in the slot may not represent a
valid value of the type.
*)
module Slot = struct
type ('slots, 'a) t = int [@@deriving sexp_of]
let equal (t1 : (_, _) t) t2 = t1 = t2
let t0 = 1
let t1 = 2
let t2 = 3
let t3 = 4
let t4 = 5
let t5 = 6
let t6 = 7
let t7 = 8
let t8 = 9
let t9 = 10
let t10 = 11
let t11 = 12
let t12 = 13
let t13 = 14
let%test _ = t13 = max_slot
end
We only have [ Int.num_bits ] bits available for pool pointers . The bits of a pool
pointer encode two things :
- the tuple 's array index in the pool
- the tuple 's identifier ( not necessarily unique )
We choose [ array_index_num_bits ] as large as needed for the maximum pool capacity
that we want to support , and use the remaining [ masked_tuple_id_num_bits ] bits for
the identifier . 64 - bit and 32 - bit architectures typically have very different
address - space sizes , so we choose [ array_index_num_bits ] differently .
pointer encode two things:
- the tuple's array index in the pool
- the tuple's identifier (not necessarily unique)
We choose [array_index_num_bits] as large as needed for the maximum pool capacity
that we want to support, and use the remaining [masked_tuple_id_num_bits] bits for
the identifier. 64-bit and 32-bit architectures typically have very different
address-space sizes, so we choose [array_index_num_bits] differently. *)
let array_index_num_bits =
if arch_sixtyfour
then (
assert (Int.num_bits = 63);
30)
else (
assert (Int.num_bits = 31 || Int.num_bits = 32);
22)
;;
let masked_tuple_id_num_bits = Int.num_bits - array_index_num_bits
let%test _ = array_index_num_bits > 0
let%test _ = masked_tuple_id_num_bits > 0
let%test _ = array_index_num_bits + masked_tuple_id_num_bits <= Int.num_bits
let max_array_length = 1 lsl array_index_num_bits
module Tuple_id : sig
type t = private int [@@deriving sexp_of]
include Invariant.S with type t := t
val to_string : t -> string
val equal : t -> t -> bool
val init : t
val next : t -> t
val of_int : int -> t
val to_int : t -> int
val examples : t list
end = struct
type t = int [@@deriving sexp_of]
let invariant t = assert (t >= 0)
let to_string = Int.to_string
let equal (t1 : t) t2 = t1 = t2
let init = 0
let next t = if arch_sixtyfour then t + 1 else if t = Int.max_value then 0 else t + 1
let to_int t = t
let of_int i =
if i < 0
then failwiths ~here:[%here] "Tuple_id.of_int got negative int" i [%sexp_of: int];
i
;;
let examples = [ 0; 1; 0x1FFF_FFFF; Int.max_value ]
end
let tuple_id_mask = (1 lsl masked_tuple_id_num_bits) - 1
module Pointer : sig
type 'slots t = private int [@@deriving sexp_of, typerep]
include Invariant.S1 with type 'a t := 'a t
val phys_compare : 'a t -> 'a t -> int
val phys_equal : 'a t -> 'a t -> bool
val null : unit -> _ t
val is_null : _ t -> bool
val create : header_index:int -> Tuple_id.t -> _ t
val header_index : _ t -> int
val masked_tuple_id : _ t -> int
val slot_index : _ t -> (_, _) Slot.t -> int
val first_slot_index : _ t -> int
module Id : sig
type t [@@deriving bin_io, sexp]
val to_int63 : t -> Int63.t
val of_int63 : Int63.t -> t
end
val to_id : _ t -> Id.t
val of_id_exn : Id.t -> _ t
end = struct
type 'slots t = int [@@deriving typerep]
let sexp_of_t _ t = Sexp.Atom (sprintf "<Pool.Pointer.t: 0x%08x>" t)
let phys_equal (t1 : _ t) t2 = phys_equal t1 t2
let phys_compare = compare
let null () = -max_slot - 1
let is_null t = phys_equal t (null ())
let%test _ = null () + max_slot < 0
let create ~header_index (tuple_id : Tuple_id.t) =
header_index
lor ((Tuple_id.to_int tuple_id land tuple_id_mask) lsl array_index_num_bits)
;;
let header_index_mask = (1 lsl array_index_num_bits) - 1
let masked_tuple_id t = t lsr array_index_num_bits
let header_index t = t land header_index_mask
let invariant _ t = if not (is_null t) then assert (header_index t > 0)
let%test_unit _ = invariant ignore (null ())
let%test_unit _ =
List.iter Tuple_id.examples ~f:(fun tuple_id ->
invariant ignore (create ~header_index:1 tuple_id))
;;
let slot_index t slot = header_index t + slot
let first_slot_index t = slot_index t Slot.t0
module Id = struct
include Int63
let to_int63 t = t
let of_int63 i = i
end
let to_id t = Id.of_int t
let of_id_exn id =
try
let t = Id.to_int_exn id in
if is_null t
then t
else (
let should_equal =
create ~header_index:(header_index t) (Tuple_id.of_int (masked_tuple_id t))
in
if phys_equal t should_equal
then t
else failwiths ~here:[%here] "should equal" should_equal [%sexp_of: _ t])
with
| exn ->
failwiths
~here:[%here]
"Pointer.of_id_exn got strange id"
(id, exn)
[%sexp_of: Id.t * exn]
;;
end
module Header : sig
type t = private int [@@deriving sexp_of]
val null : t
val is_null : t -> bool
val free : next_free_header_index:int -> t
val is_free : t -> bool
val next_free_header_index : t -> int
val used : Tuple_id.t -> t
val is_used : t -> bool
val tuple_id : t -> Tuple_id.t
end = struct
type t = int
let null = 0
let is_null t = t = 0
let free ~next_free_header_index = next_free_header_index
let is_free t = t > 0
let next_free_header_index t = t
let used (tuple_id : Tuple_id.t) = -1 - (tuple_id :> int)
let is_used t = t < 0
let tuple_id t = Tuple_id.of_int (-(t + 1))
let%test_unit _ =
List.iter Tuple_id.examples ~f:(fun id ->
let t = used id in
assert (is_used t);
assert (Tuple_id.equal (tuple_id t) id))
;;
let sexp_of_t t =
if is_null t
then Sexp.Atom "null"
else if is_free t
then Sexp.(List [ Atom "Free"; Atom (Int.to_string (next_free_header_index t)) ])
else Sexp.(List [ Atom "Used"; Atom (Tuple_id.to_string (tuple_id t)) ])
;;
end
let metadata_index = 0
let start_of_tuples_index = 1
let max_capacity ~slots_per_tuple =
(max_array_length - start_of_tuples_index) / (1 + slots_per_tuple)
;;
let%test_unit _ =
for slots_per_tuple = 1 to max_slot do
assert (
start_of_tuples_index + ((1 + slots_per_tuple) * max_capacity ~slots_per_tuple)
<= max_array_length)
done
;;
module Metadata = struct
type 'slots t =
slots_per_tuple : int
; capacity : int
; mutable length : int
; mutable next_id : Tuple_id.t
; mutable first_free : Header.t
[ dummy ] is [ None ] in an unsafe pool . In a safe pool , [ dummy ] is [ Some a ] , with
[ Uniform_array.length a = slots_per_tuple ] . [ dummy ] is actually a tuple value
with the correct type ( corresponding to [ ' slots ] ) , but we make the type of
[ dummy ] be [ Obj.t Uniform_array.t ] because we ca n't write that type here . Also ,
the purpose of [ dummy ] is to initialize a pool element , making [ dummy ] an [ Obj.t
Uniform_array.t ] lets us initialize a pool element using [ Uniform_array.blit ]
from [ dummy ] to the pool , which is an [ Obj.t Uniform_array.t ] .
[Uniform_array.length a = slots_per_tuple]. [dummy] is actually a tuple value
with the correct type (corresponding to ['slots]), but we make the type of
[dummy] be [Obj.t Uniform_array.t] because we can't write that type here. Also,
the purpose of [dummy] is to initialize a pool element, making [dummy] an [Obj.t
Uniform_array.t] lets us initialize a pool element using [Uniform_array.blit]
from [dummy] to the pool, which is an [Obj.t Uniform_array.t]. *)
; dummy : (Obj.t Uniform_array.t[@sexp.opaque]) option
}
[@@deriving fields, sexp_of]
let array_indices_per_tuple t = 1 + t.slots_per_tuple
let array_length t = start_of_tuples_index + (t.capacity * array_indices_per_tuple t)
let header_index_to_tuple_num t ~header_index =
(header_index - start_of_tuples_index) / array_indices_per_tuple t
;;
let tuple_num_to_header_index t tuple_num =
start_of_tuples_index + (tuple_num * array_indices_per_tuple t)
;;
let tuple_num_to_first_slot_index t tuple_num =
tuple_num_to_header_index t tuple_num + 1
;;
let is_full t = t.length = t.capacity
end
open Metadata
type 'slots t = Obj.t Uniform_array.t
let metadata (type slots) (t : slots t) =
Uniform_array.unsafe_get t metadata_index |> (Obj.obj : _ -> slots Metadata.t)
;;
let length t = (metadata t).length
let sexp_of_t sexp_of_ty t = Metadata.sexp_of_t sexp_of_ty (metadata t)
let unsafe_header t ~header_index =
Uniform_array.unsafe_get t header_index |> (Obj.obj : _ -> Header.t)
;;
let unsafe_set_header t ~header_index (header : Header.t) =
Uniform_array.unsafe_set_int_assuming_currently_int t header_index (header :> int)
;;
let header_index_is_in_bounds t ~header_index =
header_index >= start_of_tuples_index && header_index < Uniform_array.length t
;;
let unsafe_pointer_is_live t pointer =
let header_index = Pointer.header_index pointer in
let header = unsafe_header t ~header_index in
Header.is_used header
&& Tuple_id.to_int (Header.tuple_id header) land tuple_id_mask
= Pointer.masked_tuple_id pointer
;;
let pointer_is_valid t pointer =
header_index_is_in_bounds t ~header_index:(Pointer.header_index pointer)
&& unsafe_pointer_is_live t pointer
;;
let id_of_pointer _t pointer = Pointer.to_id pointer
let is_valid_header_index t ~header_index =
let metadata = metadata t in
header_index_is_in_bounds t ~header_index
&& 0
= (header_index - start_of_tuples_index)
mod Metadata.array_indices_per_tuple metadata
;;
let pointer_of_id_exn t id =
try
let pointer = Pointer.of_id_exn id in
if not (Pointer.is_null pointer)
then (
let header_index = Pointer.header_index pointer in
if not (is_valid_header_index t ~header_index)
then failwiths ~here:[%here] "invalid header index" header_index [%sexp_of: int];
if not (unsafe_pointer_is_live t pointer) then failwith "pointer not live");
pointer
with
| exn ->
failwiths
~here:[%here]
"Pool.pointer_of_id_exn got invalid id"
(id, t, exn)
[%sexp_of: Pointer.Id.t * _ t * exn]
;;
let invariant _invariant_a t : unit =
try
let metadata = metadata t in
let check f field = f (Field.get field metadata) in
Metadata.Fields.iter
~slots_per_tuple:(check (fun slots_per_tuple -> assert (slots_per_tuple > 0)))
~capacity:
(check (fun capacity ->
assert (capacity >= 0);
assert (Uniform_array.length t = Metadata.array_length metadata)))
~length:
(check (fun length ->
assert (length >= 0);
assert (length <= metadata.capacity)))
~next_id:(check Tuple_id.invariant)
~first_free:
(check (fun first_free ->
let free = Array.create ~len:metadata.capacity false in
let r = ref first_free in
while not (Header.is_null !r) do
let header = !r in
assert (Header.is_free header);
let header_index = Header.next_free_header_index header in
assert (is_valid_header_index t ~header_index);
let tuple_num = header_index_to_tuple_num metadata ~header_index in
if free.(tuple_num)
then
failwiths ~here:[%here] "cycle in free list" tuple_num [%sexp_of: int];
free.(tuple_num) <- true;
r := unsafe_header t ~header_index
done))
~dummy:
(check (function
| Some dummy ->
assert (Uniform_array.length dummy = metadata.slots_per_tuple)
| None ->
for tuple_num = 0 to metadata.capacity - 1 do
let header_index = tuple_num_to_header_index metadata tuple_num in
let header = unsafe_header t ~header_index in
if Header.is_free header
then (
let first_slot = tuple_num_to_first_slot_index metadata tuple_num in
for slot = 0 to metadata.slots_per_tuple - 1 do
assert (Obj.is_int (Uniform_array.get t (first_slot + slot)))
done)
done))
with
| exn ->
failwiths ~here:[%here] "Pool.invariant failed" (exn, t) [%sexp_of: exn * _ t]
;;
let capacity t = (metadata t).capacity
let is_full t = Metadata.is_full (metadata t)
let unsafe_add_to_free_list t metadata ~header_index =
unsafe_set_header t ~header_index metadata.first_free;
metadata.first_free <- Header.free ~next_free_header_index:header_index
;;
let set_metadata (type slots) (t : slots t) metadata =
Uniform_array.set t metadata_index (Obj.repr (metadata : slots Metadata.t))
;;
let create_array (type slots) (metadata : slots Metadata.t) : slots t =
let t = Uniform_array.create_obj_array ~len:(Metadata.array_length metadata) in
set_metadata t metadata;
t
;;
Initialize tuples numbered from [ lo ] ( inclusive ) up to [ hi ] ( exclusive ) . For each
tuple , this puts dummy values in the tuple 's slots and adds the tuple to the free
list .
tuple, this puts dummy values in the tuple's slots and adds the tuple to the free
list. *)
let unsafe_init_range t metadata ~lo ~hi =
(match metadata.dummy with
| None -> ()
| Some dummy ->
for tuple_num = lo to hi - 1 do
Uniform_array.blit
~src:dummy
~src_pos:0
~dst:t
~dst_pos:(tuple_num_to_first_slot_index metadata tuple_num)
~len:metadata.slots_per_tuple
done);
for tuple_num = hi - 1 downto lo do
unsafe_add_to_free_list
t
metadata
~header_index:(tuple_num_to_header_index metadata tuple_num)
done
;;
let create_with_dummy slots ~capacity ~dummy =
if capacity < 0
then
failwiths ~here:[%here] "Pool.create got invalid capacity" capacity [%sexp_of: int];
let slots_per_tuple = Slots.slots_per_tuple slots in
let max_capacity = max_capacity ~slots_per_tuple in
if capacity > max_capacity
then
failwiths
~here:[%here]
"Pool.create got too large capacity"
(capacity, `max max_capacity)
[%sexp_of: int * [ `max of int ]];
let metadata =
{ Metadata.slots_per_tuple
; capacity
; length = 0
; next_id = Tuple_id.init
; first_free = Header.null
; dummy
}
in
let t = create_array metadata in
unsafe_init_range t metadata ~lo:0 ~hi:capacity;
t
;;
let create (type tuple) (slots : (tuple, _) Slots.t) ~capacity ~dummy =
let dummy =
if Slots.slots_per_tuple slots = 1
then Uniform_array.singleton (Obj.repr (dummy : tuple))
else (Obj.magic (dummy : tuple) : Obj.t Uniform_array.t)
in
create_with_dummy slots ~capacity ~dummy:(Some dummy)
;;
let destroy t =
let metadata = metadata t in
(match metadata.dummy with
| None ->
for i = start_of_tuples_index to Uniform_array.length t - 1 do
Uniform_array.unsafe_set t i (Obj.repr 0)
done
| Some dummy ->
for tuple_num = 0 to metadata.capacity - 1 do
let header_index = tuple_num_to_header_index metadata tuple_num in
unsafe_set_header t ~header_index Header.null;
Uniform_array.blit
~src:dummy
~src_pos:0
~dst:t
~dst_pos:(header_index + 1)
~len:metadata.slots_per_tuple
done);
let metadata =
{ Metadata.slots_per_tuple = metadata.slots_per_tuple
; capacity = 0
; length = 0
; next_id = metadata.next_id
; first_free = Header.null
; dummy = metadata.dummy
}
in
set_metadata t metadata
;;
let[@cold] grow ?capacity t =
let { Metadata.slots_per_tuple
; capacity = old_capacity
; length
; next_id
; first_free = _
; dummy
}
=
metadata t
in
let capacity =
min (max_capacity ~slots_per_tuple) (grow_capacity ~capacity ~old_capacity)
in
if capacity = old_capacity
then
failwiths
~here:[%here]
"Pool.grow cannot grow pool; capacity already at maximum"
capacity
[%sexp_of: int];
let metadata =
{ Metadata.slots_per_tuple
; capacity
; length
; next_id
; first_free = Header.null
; dummy
}
in
let t' = create_array metadata in
Uniform_array.blit
~src:t
~src_pos:start_of_tuples_index
~dst:t'
~dst_pos:start_of_tuples_index
~len:(old_capacity * Metadata.array_indices_per_tuple metadata);
destroy t;
unsafe_init_range t' metadata ~lo:old_capacity ~hi:capacity;
for tuple_num = old_capacity - 1 downto 0 do
let header_index = tuple_num_to_header_index metadata tuple_num in
let header = unsafe_header t' ~header_index in
if not (Header.is_used header)
then unsafe_add_to_free_list t' metadata ~header_index
done;
t'
;;
let[@cold] raise_malloc_full t =
failwiths ~here:[%here] "Pool.malloc of full pool" t [%sexp_of: _ t]
;;
let malloc (type slots) (t : slots t) : slots Pointer.t =
let metadata = metadata t in
let first_free = metadata.first_free in
if Header.is_null first_free then raise_malloc_full t;
let header_index = Header.next_free_header_index first_free in
metadata.first_free <- unsafe_header t ~header_index;
metadata.length <- metadata.length + 1;
let tuple_id = metadata.next_id in
unsafe_set_header t ~header_index (Header.used tuple_id);
metadata.next_id <- Tuple_id.next tuple_id;
Pointer.create ~header_index tuple_id
;;
let unsafe_free (type slots) (t : slots t) (pointer : slots Pointer.t) =
let metadata = metadata t in
metadata.length <- metadata.length - 1;
unsafe_add_to_free_list t metadata ~header_index:(Pointer.header_index pointer);
match metadata.dummy with
| None ->
let pos = Pointer.first_slot_index pointer in
for i = 0 to metadata.slots_per_tuple - 1 do
Uniform_array.unsafe_clear_if_pointer t (pos + i)
done
| Some dummy ->
Uniform_array.unsafe_blit
~src:dummy
~src_pos:0
~len:metadata.slots_per_tuple
~dst:t
~dst_pos:(Pointer.first_slot_index pointer)
;;
let free (type slots) (t : slots t) (pointer : slots Pointer.t) =
if not (pointer_is_valid t pointer)
then
failwiths
~here:[%here]
"Pool.free of invalid pointer"
(pointer, t)
[%sexp_of: _ Pointer.t * _ t];
unsafe_free t pointer
;;
let new1 t a0 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
pointer
;;
let new2 t a0 a1 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
pointer
;;
let new3 t a0 a1 a2 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
pointer
;;
let new4 t a0 a1 a2 a3 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
pointer
;;
let new5 t a0 a1 a2 a3 a4 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
pointer
;;
let new6 t a0 a1 a2 a3 a4 a5 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
pointer
;;
let new7 t a0 a1 a2 a3 a4 a5 a6 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
pointer
;;
let new8 t a0 a1 a2 a3 a4 a5 a6 a7 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
pointer
;;
let new9 t a0 a1 a2 a3 a4 a5 a6 a7 a8 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
pointer
;;
let new10 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
Uniform_array.unsafe_set t (offset + 10) (Obj.repr a9);
pointer
;;
let new11 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
Uniform_array.unsafe_set t (offset + 10) (Obj.repr a9);
Uniform_array.unsafe_set t (offset + 11) (Obj.repr a10);
pointer
;;
let new12 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
Uniform_array.unsafe_set t (offset + 10) (Obj.repr a9);
Uniform_array.unsafe_set t (offset + 11) (Obj.repr a10);
Uniform_array.unsafe_set t (offset + 12) (Obj.repr a11);
pointer
;;
let new13 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
Uniform_array.unsafe_set t (offset + 10) (Obj.repr a9);
Uniform_array.unsafe_set t (offset + 11) (Obj.repr a10);
Uniform_array.unsafe_set t (offset + 12) (Obj.repr a11);
Uniform_array.unsafe_set t (offset + 13) (Obj.repr a12);
pointer
;;
let new14 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 =
let pointer = malloc t in
let offset = Pointer.header_index pointer in
Uniform_array.unsafe_set t (offset + 1) (Obj.repr a0);
Uniform_array.unsafe_set t (offset + 2) (Obj.repr a1);
Uniform_array.unsafe_set t (offset + 3) (Obj.repr a2);
Uniform_array.unsafe_set t (offset + 4) (Obj.repr a3);
Uniform_array.unsafe_set t (offset + 5) (Obj.repr a4);
Uniform_array.unsafe_set t (offset + 6) (Obj.repr a5);
Uniform_array.unsafe_set t (offset + 7) (Obj.repr a6);
Uniform_array.unsafe_set t (offset + 8) (Obj.repr a7);
Uniform_array.unsafe_set t (offset + 9) (Obj.repr a8);
Uniform_array.unsafe_set t (offset + 10) (Obj.repr a9);
Uniform_array.unsafe_set t (offset + 11) (Obj.repr a10);
Uniform_array.unsafe_set t (offset + 12) (Obj.repr a11);
Uniform_array.unsafe_set t (offset + 13) (Obj.repr a12);
Uniform_array.unsafe_set t (offset + 14) (Obj.repr a13);
pointer
;;
let get t p slot = Obj.obj (Uniform_array.get t (Pointer.slot_index p slot))
let unsafe_get t p slot =
Obj.obj (Uniform_array.unsafe_get t (Pointer.slot_index p slot))
;;
let set t p slot x = Uniform_array.set t (Pointer.slot_index p slot) (Obj.repr x)
let unsafe_set t p slot x =
Uniform_array.unsafe_set t (Pointer.slot_index p slot) (Obj.repr x)
;;
let get_tuple (type tuple) (t : (tuple, _) Slots.t t) pointer =
let metadata = metadata t in
let len = metadata.slots_per_tuple in
if len = 1
then get t pointer Slot.t0
else
(Obj.magic
(Uniform_array.sub t ~pos:(Pointer.first_slot_index pointer) ~len
: Obj.t Uniform_array.t)
: tuple)
;;
end
include Pool
module Unsafe = struct
include Pool
let create slots ~capacity = create_with_dummy slots ~capacity ~dummy:None
end
module Debug (Pool : S) = struct
open Pool
let check_invariant = ref true
let show_messages = ref true
let debug name ts arg sexp_of_arg sexp_of_result f =
let prefix = "Pool." in
if !check_invariant then List.iter ts ~f:(invariant ignore);
if !show_messages then Debug.eprints (concat [ prefix; name ]) arg sexp_of_arg;
let result_or_exn = Result.try_with f in
if !show_messages
then
Debug.eprints
(concat [ prefix; name; " result" ])
result_or_exn
[%sexp_of: (result, exn) Result.t];
Result.ok_exn result_or_exn
;;
module Slots = Slots
module Slot = Slot
module Pointer = struct
open Pointer
type nonrec 'slots t = 'slots t [@@deriving sexp_of, typerep]
let phys_compare t1 t2 =
debug
"Pointer.phys_compare"
[]
(t1, t2)
[%sexp_of: _ t * _ t]
[%sexp_of: int]
(fun () -> phys_compare t1 t2)
;;
let phys_equal t1 t2 =
debug
"Pointer.phys_equal"
[]
(t1, t2)
[%sexp_of: _ t * _ t]
[%sexp_of: bool]
(fun () -> phys_equal t1 t2)
;;
let is_null t =
debug "Pointer.is_null" [] t [%sexp_of: _ t] [%sexp_of: bool] (fun () -> is_null t)
;;
let null = null
module Id = struct
open Id
type nonrec t = t [@@deriving bin_io, sexp]
let of_int63 i =
debug "Pointer.Id.of_int63" [] i [%sexp_of: Int63.t] [%sexp_of: t] (fun () ->
of_int63 i)
;;
let to_int63 t =
debug "Pointer.Id.to_int63" [] t [%sexp_of: t] [%sexp_of: Int63.t] (fun () ->
to_int63 t)
;;
end
end
type nonrec 'slots t = 'slots t [@@deriving sexp_of]
let invariant = invariant
let length = length
let id_of_pointer t pointer =
debug
"id_of_pointer"
[ t ]
pointer
[%sexp_of: _ Pointer.t]
[%sexp_of: Pointer.Id.t]
(fun () -> id_of_pointer t pointer)
;;
let pointer_of_id_exn t id =
debug
"pointer_of_id_exn"
[ t ]
id
[%sexp_of: Pointer.Id.t]
[%sexp_of: _ Pointer.t]
(fun () -> pointer_of_id_exn t id)
;;
let pointer_is_valid t pointer =
debug
"pointer_is_valid"
[ t ]
pointer
[%sexp_of: _ Pointer.t]
[%sexp_of: bool]
(fun () -> pointer_is_valid t pointer)
;;
let create slots ~capacity ~dummy =
debug "create" [] capacity [%sexp_of: int] [%sexp_of: _ t] (fun () ->
create slots ~capacity ~dummy)
;;
let max_capacity ~slots_per_tuple =
debug "max_capacity" [] slots_per_tuple [%sexp_of: int] [%sexp_of: int] (fun () ->
max_capacity ~slots_per_tuple)
;;
let capacity t =
debug "capacity" [ t ] t [%sexp_of: _ t] [%sexp_of: int] (fun () -> capacity t)
;;
let grow ?capacity t =
debug
"grow"
[ t ]
(`capacity capacity)
[%sexp_of: [ `capacity of int option ]]
[%sexp_of: _ t]
(fun () -> grow ?capacity t)
;;
let is_full t =
debug "is_full" [ t ] t [%sexp_of: _ t] [%sexp_of: bool] (fun () -> is_full t)
;;
let unsafe_free t p =
debug "unsafe_free" [ t ] p [%sexp_of: _ Pointer.t] [%sexp_of: unit] (fun () ->
unsafe_free t p)
;;
let free t p =
debug "free" [ t ] p [%sexp_of: _ Pointer.t] [%sexp_of: unit] (fun () -> free t p)
;;
let debug_new t f = debug "new" [ t ] () [%sexp_of: unit] [%sexp_of: _ Pointer.t] f
let new1 t a0 = debug_new t (fun () -> new1 t a0)
let new2 t a0 a1 = debug_new t (fun () -> new2 t a0 a1)
let new3 t a0 a1 a2 = debug_new t (fun () -> new3 t a0 a1 a2)
let new4 t a0 a1 a2 a3 = debug_new t (fun () -> new4 t a0 a1 a2 a3)
let new5 t a0 a1 a2 a3 a4 = debug_new t (fun () -> new5 t a0 a1 a2 a3 a4)
let new6 t a0 a1 a2 a3 a4 a5 = debug_new t (fun () -> new6 t a0 a1 a2 a3 a4 a5)
let new7 t a0 a1 a2 a3 a4 a5 a6 = debug_new t (fun () -> new7 t a0 a1 a2 a3 a4 a5 a6)
let new8 t a0 a1 a2 a3 a4 a5 a6 a7 =
debug_new t (fun () -> new8 t a0 a1 a2 a3 a4 a5 a6 a7)
;;
let new9 t a0 a1 a2 a3 a4 a5 a6 a7 a8 =
debug_new t (fun () -> new9 t a0 a1 a2 a3 a4 a5 a6 a7 a8)
;;
let new10 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 =
debug_new t (fun () -> new10 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9)
;;
let new11 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 =
debug_new t (fun () -> new11 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10)
;;
let new12 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 =
debug_new t (fun () -> new12 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11)
;;
let new13 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 =
debug_new t (fun () -> new13 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12)
;;
let new14 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 =
debug_new t (fun () -> new14 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13)
;;
let get_tuple t pointer =
debug "get_tuple" [ t ] pointer [%sexp_of: _ Pointer.t] [%sexp_of: _] (fun () ->
get_tuple t pointer)
;;
let debug_get name f t pointer =
debug name [ t ] pointer [%sexp_of: _ Pointer.t] [%sexp_of: _] (fun () -> f t pointer)
;;
let get t pointer slot = debug_get "get" get t pointer slot
let unsafe_get t pointer slot = debug_get "unsafe_get" unsafe_get t pointer slot
let debug_set name f t pointer slot a =
debug name [ t ] pointer [%sexp_of: _ Pointer.t] [%sexp_of: unit] (fun () ->
f t pointer slot a)
;;
let set t pointer slot a = debug_set "set" set t pointer slot a
let unsafe_set t pointer slot a = debug_set "unsafe_set" unsafe_set t pointer slot a
end
module Error_check (Pool : S) = struct
open Pool
module Slots = Slots
module Slot = Slot
module Pointer = struct
type 'slots t =
{ mutable is_valid : bool
; pointer : 'slots Pointer.t
}
[@@deriving sexp_of, typerep]
let create pointer = { is_valid = true; pointer }
let null () = { is_valid = false; pointer = Pointer.null () }
let phys_compare t1 t2 = Pointer.phys_compare t1.pointer t2.pointer
let phys_equal t1 t2 = Pointer.phys_equal t1.pointer t2.pointer
let is_null t = Pointer.is_null t.pointer
let follow t =
if not t.is_valid
then failwiths ~here:[%here] "attempt to use invalid pointer" t [%sexp_of: _ t];
t.pointer
;;
let invalidate t = t.is_valid <- false
module Id = Pointer.Id
end
type 'slots t = 'slots Pool.t [@@deriving sexp_of]
let invariant = invariant
let length = length
let pointer_is_valid t { Pointer.is_valid; pointer } =
is_valid && pointer_is_valid t pointer
;;
let id_of_pointer t pointer = id_of_pointer t pointer.Pointer.pointer
let pointer_of_id_exn t id =
let pointer = pointer_of_id_exn t id in
let is_valid = Pool.pointer_is_valid t pointer in
{ Pointer.is_valid; pointer }
;;
let create = create
let capacity = capacity
let max_capacity = max_capacity
let grow = grow
let is_full = is_full
let get_tuple t p = get_tuple t (Pointer.follow p)
let get t p = get t (Pointer.follow p)
let unsafe_get t p = unsafe_get t (Pointer.follow p)
let set t p slot v = set t (Pointer.follow p) slot v
let unsafe_set t p slot v = unsafe_set t (Pointer.follow p) slot v
let unsafe_free t p =
unsafe_free t (Pointer.follow p);
Pointer.invalidate p
;;
let free t p =
free t (Pointer.follow p);
Pointer.invalidate p
;;
let new1 t a0 = Pointer.create (Pool.new1 t a0)
let new2 t a0 a1 = Pointer.create (Pool.new2 t a0 a1)
let new3 t a0 a1 a2 = Pointer.create (Pool.new3 t a0 a1 a2)
let new4 t a0 a1 a2 a3 = Pointer.create (Pool.new4 t a0 a1 a2 a3)
let new5 t a0 a1 a2 a3 a4 = Pointer.create (Pool.new5 t a0 a1 a2 a3 a4)
let new6 t a0 a1 a2 a3 a4 a5 = Pointer.create (Pool.new6 t a0 a1 a2 a3 a4 a5)
let new7 t a0 a1 a2 a3 a4 a5 a6 = Pointer.create (Pool.new7 t a0 a1 a2 a3 a4 a5 a6)
let new8 t a0 a1 a2 a3 a4 a5 a6 a7 =
Pointer.create (Pool.new8 t a0 a1 a2 a3 a4 a5 a6 a7)
;;
let new9 t a0 a1 a2 a3 a4 a5 a6 a7 a8 =
Pointer.create (Pool.new9 t a0 a1 a2 a3 a4 a5 a6 a7 a8)
;;
let new10 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 =
Pointer.create (Pool.new10 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9)
;;
let new11 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 =
Pointer.create (Pool.new11 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10)
;;
let new12 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 =
Pointer.create (Pool.new12 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11)
;;
let new13 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 =
Pointer.create (Pool.new13 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12)
;;
let new14 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 =
Pointer.create (Pool.new14 t a0 a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13)
;;
end
|
cbc98b67a4c3d64a669bfad0f905a6519c28beebe1b9c3a6ec3d5b9261247894 | na4zagin3/satyrographos | repository.ml | open Core
type library_name = string
exception RegisteredAlready of library_name
module StringSet = Set.Make(String)
type t = {
cache: Store.store;
metadata: Metadata.store;
}
(* Basic operations *)
let list reg = Metadata.list reg.metadata
TODO get data from metadata
let directory reg name = Store.directory reg.cache name
let mem reg name = Metadata.mem reg.metadata name
TODO lock
let remove_multiple reg names =
Metadata.remove_multiple reg.metadata names;
Store.remove_multiple reg.cache names
let remove reg name =
remove_multiple reg [name]
let add_dir reg name dir =
let abs_dir = Filename_unix.realpath dir in
let uri = Uri.make ~scheme:"file" ~path:abs_dir () in
Store.add_dir reg.cache name ;
Metadata.add reg.metadata name {
url = uri;
}
let update ~outf reg name =
match Metadata.find reg.metadata name with
| None -> failwith (Printf.sprintf "Library %s is not found" name)
| Some metadata -> begin match Uri.scheme metadata.url with
| Some "file" ->
let dir = Uri.path metadata.url in
let library = Library.read_dir ~outf dir in
Library.to_string library |> print_endline;
Store.remove reg.cache name;
Store.add_library ~outf reg.cache name library
| None ->
failwith (Printf.sprintf "BUG: URL scheme of library %s is unknown." name)
| Some s ->
failwith (Printf.sprintf "Unknown scheme %s." s)
end
TODO build only obsoleted libraries
let update_all ~outf reg =
let updated_libraries = list reg in
List.iter ~f:(update ~outf reg) updated_libraries;
Some updated_libraries
Advanced operations
(* TODO Implement lock *)
let add ~outf reg name uri =
if Metadata.mem reg.metadata name
then failwith (Printf.sprintf "%s is already registered." name)
else begin match Uri.scheme uri with
| None | Some "file" ->
let path = Uri.path uri in
add_dir reg name path
| Some s ->
failwith (Printf.sprintf "Unknown scheme %s." s)
end;
update_all ~outf reg
let gc reg =
let current_libraries = list reg |> StringSet.of_list in
let valid_libraries = Metadata.list reg.metadata |> StringSet.of_list in
let broken_libraries = StringSet.diff current_libraries valid_libraries in
StringSet.to_list broken_libraries
|> remove_multiple reg
let initialize libraries_dir metadata_file =
Store.initialize libraries_dir;
Metadata.initialize metadata_file
let read library_dir metadata_file = {
cache = Store.read library_dir;
metadata = metadata_file;
}
(* Tests *)
| null | https://raw.githubusercontent.com/na4zagin3/satyrographos/d2943bac9659d20746720ab36ebe11ae59203d32/src/repository.ml | ocaml | Basic operations
TODO Implement lock
Tests | open Core
type library_name = string
exception RegisteredAlready of library_name
module StringSet = Set.Make(String)
type t = {
cache: Store.store;
metadata: Metadata.store;
}
let list reg = Metadata.list reg.metadata
TODO get data from metadata
let directory reg name = Store.directory reg.cache name
let mem reg name = Metadata.mem reg.metadata name
TODO lock
let remove_multiple reg names =
Metadata.remove_multiple reg.metadata names;
Store.remove_multiple reg.cache names
let remove reg name =
remove_multiple reg [name]
let add_dir reg name dir =
let abs_dir = Filename_unix.realpath dir in
let uri = Uri.make ~scheme:"file" ~path:abs_dir () in
Store.add_dir reg.cache name ;
Metadata.add reg.metadata name {
url = uri;
}
let update ~outf reg name =
match Metadata.find reg.metadata name with
| None -> failwith (Printf.sprintf "Library %s is not found" name)
| Some metadata -> begin match Uri.scheme metadata.url with
| Some "file" ->
let dir = Uri.path metadata.url in
let library = Library.read_dir ~outf dir in
Library.to_string library |> print_endline;
Store.remove reg.cache name;
Store.add_library ~outf reg.cache name library
| None ->
failwith (Printf.sprintf "BUG: URL scheme of library %s is unknown." name)
| Some s ->
failwith (Printf.sprintf "Unknown scheme %s." s)
end
TODO build only obsoleted libraries
let update_all ~outf reg =
let updated_libraries = list reg in
List.iter ~f:(update ~outf reg) updated_libraries;
Some updated_libraries
Advanced operations
let add ~outf reg name uri =
if Metadata.mem reg.metadata name
then failwith (Printf.sprintf "%s is already registered." name)
else begin match Uri.scheme uri with
| None | Some "file" ->
let path = Uri.path uri in
add_dir reg name path
| Some s ->
failwith (Printf.sprintf "Unknown scheme %s." s)
end;
update_all ~outf reg
let gc reg =
let current_libraries = list reg |> StringSet.of_list in
let valid_libraries = Metadata.list reg.metadata |> StringSet.of_list in
let broken_libraries = StringSet.diff current_libraries valid_libraries in
StringSet.to_list broken_libraries
|> remove_multiple reg
let initialize libraries_dir metadata_file =
Store.initialize libraries_dir;
Metadata.initialize metadata_file
let read library_dir metadata_file = {
cache = Store.read library_dir;
metadata = metadata_file;
}
|
252c4e0fc55cd3b95a464889c64268d879279cb9331c32fe7d55e1fcb82ba87a | clojure-interop/java-jdk | MenuItemUI.clj | (ns javax.swing.plaf.MenuItemUI
"Pluggable look and feel interface for JMenuItem."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.plaf MenuItemUI]))
(defn ->menu-item-ui
"Constructor."
(^MenuItemUI []
(new MenuItemUI )))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.swing/src/javax/swing/plaf/MenuItemUI.clj | clojure | (ns javax.swing.plaf.MenuItemUI
"Pluggable look and feel interface for JMenuItem."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.plaf MenuItemUI]))
(defn ->menu-item-ui
"Constructor."
(^MenuItemUI []
(new MenuItemUI )))
| |
9610b8fd4ea165dd58166246485eef1eb188ddfd21626779ee85770e1a34689b | hjcapple/reading-sicp | exercise_2_86.scm | #lang racket
P137 - [ 练习 2.86 ]
(require "ch2support.scm")
(require (submod "complex_number_data_directed.scm" complex-op))
;;;;;;;;;;;;;;;;;;;;;;;;
(define (add x y) (apply-generic 'add x y))
(define (sub x y) (apply-generic 'sub x y))
(define (mul x y) (apply-generic 'mul x y))
(define (div x y) (apply-generic 'div x y))
(define (equ? x y) (apply-generic 'equ? x y))
(define (sine x) (apply-generic 'sine x))
(define (cosine x) (apply-generic 'cosine x))
;;;;;;;;;;;;;;;;;;;;;;;;
[ 见练习 2.78 ]
(define (attach-tag type-tag contents)
(if (number? contents)
contents
(cons type-tag contents)))
(define (type-tag datum)
(if (number? datum)
'scheme-number
(if (pair? datum)
(car datum)
(error "Bad tagged datum -- TYPE-TAG" datum))))
(define (contents datum)
(if (number? datum)
datum
(if (pair? datum)
(cdr datum)
(error "Bad tagged datum -- CONTENTS" datum))))
[ 见练习 2.84 ]
(define (raise-into x type)
(let ((x-type (type-tag x)))
(if (equal? x-type type)
x
(let ((x-raise (raise x)))
(if x-raise
(raise-into x-raise type)
#f)))))
(define (apply-generic op . args)
(let ((type-tags (map type-tag args)))
(let ((proc (get op type-tags)))
(if proc
(drop (apply proc (map contents args)))
(if (and (= (length args) 2)
防止 a1、a2 类型相同时死循环,见[练习 2.81 ]
(let ((a1 (car args))
(a2 (cadr args)))
(let ((a1-raise (raise-into a1 (type-tag a2))))
(if a1-raise
(apply-generic op a1-raise a2)
(let ((a2-raise (raise-into a2 (type-tag a1))))
(if a2-raise
(apply-generic op a1 a2-raise)
(error "No method for these types -- APPLY-GENERIC"
(list op type-tags)))))))
(error "No method for these types -- APPLY-GENERIC"
(list op type-tags)))))))
;;;;;;;;;;;;;;;;;;;;;;;;
(define (raise x)
(let ((raise-proc (get 'raise (list (type-tag x)))))
(if raise-proc
(raise-proc (contents x))
#f)))
(define (project x)
(let ((proc (get 'project (list (type-tag x)))))
(if proc
(proc (contents x))
#f)))
(define (rational->integer? rat)
(if (equal? 'rational (type-tag rat))
(let ((v (/ (number (contents rat)) (denom (contents rat)))))
(if (integer? v)
v
#f))
#f))
(define (drop x)
(if (pair? x) ; 过滤 #t、#f 等没有 type-tag 的参数
(let ((v (rational->integer? x)))
(if v
v
(let ((x-project (project x)))
(if (and x-project
(equ? (raise x-project) x))
(drop x-project)
x))))
x))
(define (install-raise-package)
(put 'raise '(rational)
(lambda (x) (make-scheme-number (/ (number x) (denom x)))))
(put 'raise '(scheme-number)
(lambda (x) (make-complex-from-real-imag x 0)))
'done)
(define (install-project-package)
(define (real->rational x)
(let ((rat (rationalize (inexact->exact x) 1/100)))
(make-rational (numerator rat) (denominator rat))))
(put 'project '(scheme-number) real->rational)
(put 'project '(complex)
(lambda (x) (make-scheme-number (real-part x))))
'done)
;;;;;;;;;;;;;;;;;;;;;;;;;
(define (number x) (car x))
(define (denom x) (cdr x))
(define (install-rational-package)
(define (make-rat n d)
(let ((g (gcd n d)))
(cons (/ n g) (/ d g))))
(define (add-rat x y)
(make-rat (+ (* (number x) (denom y))
(* (number y) (denom x)))
(* (denom x) (denom y))))
(define (sub-rat x y)
(make-rat (- (* (number x) (denom y))
(* (number y) (denom x)))
(* (denom x) (denom y))))
(define (mul-rat x y)
(make-rat (* (number x) (number y))
(* (denom x) (denom y))))
(define (div-rat x y)
(make-rat (* (number x) (denom y))
(* (denom x) (number y))))
(define (equal-rat? x y)
(= (* (number x) (denom y))
(* (number y) (denom x))))
(define (tag x) (attach-tag 'rational x))
(put 'add '(rational rational)
(lambda (x y) (tag (add-rat x y))))
(put 'sub '(rational rational)
(lambda (x y) (tag (sub-rat x y))))
(put 'mul '(rational rational)
(lambda (x y) (tag (mul-rat x y))))
(put 'div '(rational rational)
(lambda (x y) (tag (div-rat x y))))
(put 'equ? '(rational rational)
(lambda (x y) (equal-rat? x y)))
(put 'sine '(rational)
(lambda (x) (make-scheme-number (sin (/ (number x) (denom x))))))
(put 'cosine '(rational)
(lambda (x) (make-scheme-number (cos (/ (number x) (denom x))))))
(put 'make 'rational
(lambda (n d) (tag (make-rat n d))))
'done)
(define (make-rational n d)
((get 'make 'rational) n d))
;;;;;;;;;;;;;;;;;;;;;;;;;
(define (install-scheme-number-package)
(define (tag x)
(attach-tag 'scheme-number x))
(put 'add '(scheme-number scheme-number)
(lambda (x y) (tag (+ x y))))
(put 'sub '(scheme-number scheme-number)
(lambda (x y) (tag (- x y))))
(put 'mul '(scheme-number scheme-number)
(lambda (x y) (tag (* x y))))
(put 'div '(scheme-number scheme-number)
(lambda (x y) (tag (/ x y))))
(put 'equ? '(scheme-number scheme-number)
(lambda (x y) (= x y)))
(put 'sine '(scheme-number)
(lambda (x) (tag (sin x))))
(put 'cosine '(scheme-number)
(lambda (x) (tag (cos x))))
(put 'make 'scheme-number
(lambda (x) (tag x)))
'done)
(define (make-scheme-number n)
((get 'make 'scheme-number) n))
;;;;;;;;;;;;;;;;;;;;;;;;;
(define (install-complex-package)
(define (make-from-real-imag x y)
((get 'make-from-real-imag '(rectangular)) x y))
(define (make-from-mag-ang r a)
((get 'make-from-mag-ang '(polar)) r a))
(define (add-complex z1 z2)
(make-from-real-imag (add (real-part z1) (real-part z2))
(add (imag-part z1) (imag-part z2))))
(define (sub-complex z1 z2)
(make-from-real-imag (sub (real-part z1) (real-part z2))
(sub (imag-part z1) (imag-part z2))))
(define (mul-complex z1 z2)
(make-from-mag-ang (mul (magnitude z1) (magnitude z2))
(add (angle z1) (angle z2))))
(define (div-complex z1 z2)
(make-from-mag-ang (div (magnitude z1) (magnitude z2))
(sub (angle z1) (angle z2))))
(define (equ-complex? z1 z2)
(and (equ? (real-part z1) (real-part z2))
(equ? (imag-part z1) (imag-part z2))))
(define (tag z) (attach-tag 'complex z))
(put 'add '(complex complex)
(lambda (x y) (tag (add-complex x y))))
(put 'sub '(complex complex)
(lambda (x y) (tag (sub-complex x y))))
(put 'mul '(complex complex)
(lambda (x y) (tag (mul-complex x y))))
(put 'div '(complex complex)
(lambda (x y) (tag (div-complex x y))))
(put 'equ? '(complex complex)
(lambda (x y) (equ-complex? x y)))
(put 'make-from-real-imag 'complex
(lambda (x y) (tag (make-from-real-imag x y))))
(put 'make-from-mag-ang 'complex
(lambda (r a) (tag (make-from-mag-ang r a))))
'done)
(define (make-complex-from-real-imag x y)
((get 'make-from-real-imag 'complex) x y))
(define (make-complex-from-mag-ang r a)
((get 'make-from-mag-ang 'complex) r a))
;;;;;;;;;;;;;;;;;;;;;;;;;
(module* main #f
(install-rectangular-package)
(install-polar-package)
(install-rational-package)
(install-scheme-number-package)
(install-complex-package)
(install-raise-package)
(install-project-package)
(define complex-val (make-complex-from-real-imag (make-rational 1 2) (make-rational 1 2)))
(define complex-val-2 (make-complex-from-real-imag (make-rational 1 2) -0.5))
1
' ( complex rectangular 0 . 1.0 )
1.0
0.479425538604203
0.8775825618903728
0.479425538604203
0.8775825618903728
)
| null | https://raw.githubusercontent.com/hjcapple/reading-sicp/7051d55dde841c06cf9326dc865d33d656702ecc/chapter_2/exercise_2_86.scm | scheme |
过滤 #t、#f 等没有 type-tag 的参数
| #lang racket
P137 - [ 练习 2.86 ]
(require "ch2support.scm")
(require (submod "complex_number_data_directed.scm" complex-op))
(define (add x y) (apply-generic 'add x y))
(define (sub x y) (apply-generic 'sub x y))
(define (mul x y) (apply-generic 'mul x y))
(define (div x y) (apply-generic 'div x y))
(define (equ? x y) (apply-generic 'equ? x y))
(define (sine x) (apply-generic 'sine x))
(define (cosine x) (apply-generic 'cosine x))
[ 见练习 2.78 ]
(define (attach-tag type-tag contents)
(if (number? contents)
contents
(cons type-tag contents)))
(define (type-tag datum)
(if (number? datum)
'scheme-number
(if (pair? datum)
(car datum)
(error "Bad tagged datum -- TYPE-TAG" datum))))
(define (contents datum)
(if (number? datum)
datum
(if (pair? datum)
(cdr datum)
(error "Bad tagged datum -- CONTENTS" datum))))
[ 见练习 2.84 ]
(define (raise-into x type)
(let ((x-type (type-tag x)))
(if (equal? x-type type)
x
(let ((x-raise (raise x)))
(if x-raise
(raise-into x-raise type)
#f)))))
(define (apply-generic op . args)
(let ((type-tags (map type-tag args)))
(let ((proc (get op type-tags)))
(if proc
(drop (apply proc (map contents args)))
(if (and (= (length args) 2)
防止 a1、a2 类型相同时死循环,见[练习 2.81 ]
(let ((a1 (car args))
(a2 (cadr args)))
(let ((a1-raise (raise-into a1 (type-tag a2))))
(if a1-raise
(apply-generic op a1-raise a2)
(let ((a2-raise (raise-into a2 (type-tag a1))))
(if a2-raise
(apply-generic op a1 a2-raise)
(error "No method for these types -- APPLY-GENERIC"
(list op type-tags)))))))
(error "No method for these types -- APPLY-GENERIC"
(list op type-tags)))))))
(define (raise x)
(let ((raise-proc (get 'raise (list (type-tag x)))))
(if raise-proc
(raise-proc (contents x))
#f)))
(define (project x)
(let ((proc (get 'project (list (type-tag x)))))
(if proc
(proc (contents x))
#f)))
(define (rational->integer? rat)
(if (equal? 'rational (type-tag rat))
(let ((v (/ (number (contents rat)) (denom (contents rat)))))
(if (integer? v)
v
#f))
#f))
(define (drop x)
(let ((v (rational->integer? x)))
(if v
v
(let ((x-project (project x)))
(if (and x-project
(equ? (raise x-project) x))
(drop x-project)
x))))
x))
(define (install-raise-package)
(put 'raise '(rational)
(lambda (x) (make-scheme-number (/ (number x) (denom x)))))
(put 'raise '(scheme-number)
(lambda (x) (make-complex-from-real-imag x 0)))
'done)
(define (install-project-package)
(define (real->rational x)
(let ((rat (rationalize (inexact->exact x) 1/100)))
(make-rational (numerator rat) (denominator rat))))
(put 'project '(scheme-number) real->rational)
(put 'project '(complex)
(lambda (x) (make-scheme-number (real-part x))))
'done)
(define (number x) (car x))
(define (denom x) (cdr x))
(define (install-rational-package)
(define (make-rat n d)
(let ((g (gcd n d)))
(cons (/ n g) (/ d g))))
(define (add-rat x y)
(make-rat (+ (* (number x) (denom y))
(* (number y) (denom x)))
(* (denom x) (denom y))))
(define (sub-rat x y)
(make-rat (- (* (number x) (denom y))
(* (number y) (denom x)))
(* (denom x) (denom y))))
(define (mul-rat x y)
(make-rat (* (number x) (number y))
(* (denom x) (denom y))))
(define (div-rat x y)
(make-rat (* (number x) (denom y))
(* (denom x) (number y))))
(define (equal-rat? x y)
(= (* (number x) (denom y))
(* (number y) (denom x))))
(define (tag x) (attach-tag 'rational x))
(put 'add '(rational rational)
(lambda (x y) (tag (add-rat x y))))
(put 'sub '(rational rational)
(lambda (x y) (tag (sub-rat x y))))
(put 'mul '(rational rational)
(lambda (x y) (tag (mul-rat x y))))
(put 'div '(rational rational)
(lambda (x y) (tag (div-rat x y))))
(put 'equ? '(rational rational)
(lambda (x y) (equal-rat? x y)))
(put 'sine '(rational)
(lambda (x) (make-scheme-number (sin (/ (number x) (denom x))))))
(put 'cosine '(rational)
(lambda (x) (make-scheme-number (cos (/ (number x) (denom x))))))
(put 'make 'rational
(lambda (n d) (tag (make-rat n d))))
'done)
(define (make-rational n d)
((get 'make 'rational) n d))
(define (install-scheme-number-package)
(define (tag x)
(attach-tag 'scheme-number x))
(put 'add '(scheme-number scheme-number)
(lambda (x y) (tag (+ x y))))
(put 'sub '(scheme-number scheme-number)
(lambda (x y) (tag (- x y))))
(put 'mul '(scheme-number scheme-number)
(lambda (x y) (tag (* x y))))
(put 'div '(scheme-number scheme-number)
(lambda (x y) (tag (/ x y))))
(put 'equ? '(scheme-number scheme-number)
(lambda (x y) (= x y)))
(put 'sine '(scheme-number)
(lambda (x) (tag (sin x))))
(put 'cosine '(scheme-number)
(lambda (x) (tag (cos x))))
(put 'make 'scheme-number
(lambda (x) (tag x)))
'done)
(define (make-scheme-number n)
((get 'make 'scheme-number) n))
(define (install-complex-package)
(define (make-from-real-imag x y)
((get 'make-from-real-imag '(rectangular)) x y))
(define (make-from-mag-ang r a)
((get 'make-from-mag-ang '(polar)) r a))
(define (add-complex z1 z2)
(make-from-real-imag (add (real-part z1) (real-part z2))
(add (imag-part z1) (imag-part z2))))
(define (sub-complex z1 z2)
(make-from-real-imag (sub (real-part z1) (real-part z2))
(sub (imag-part z1) (imag-part z2))))
(define (mul-complex z1 z2)
(make-from-mag-ang (mul (magnitude z1) (magnitude z2))
(add (angle z1) (angle z2))))
(define (div-complex z1 z2)
(make-from-mag-ang (div (magnitude z1) (magnitude z2))
(sub (angle z1) (angle z2))))
(define (equ-complex? z1 z2)
(and (equ? (real-part z1) (real-part z2))
(equ? (imag-part z1) (imag-part z2))))
(define (tag z) (attach-tag 'complex z))
(put 'add '(complex complex)
(lambda (x y) (tag (add-complex x y))))
(put 'sub '(complex complex)
(lambda (x y) (tag (sub-complex x y))))
(put 'mul '(complex complex)
(lambda (x y) (tag (mul-complex x y))))
(put 'div '(complex complex)
(lambda (x y) (tag (div-complex x y))))
(put 'equ? '(complex complex)
(lambda (x y) (equ-complex? x y)))
(put 'make-from-real-imag 'complex
(lambda (x y) (tag (make-from-real-imag x y))))
(put 'make-from-mag-ang 'complex
(lambda (r a) (tag (make-from-mag-ang r a))))
'done)
(define (make-complex-from-real-imag x y)
((get 'make-from-real-imag 'complex) x y))
(define (make-complex-from-mag-ang r a)
((get 'make-from-mag-ang 'complex) r a))
(module* main #f
(install-rectangular-package)
(install-polar-package)
(install-rational-package)
(install-scheme-number-package)
(install-complex-package)
(install-raise-package)
(install-project-package)
(define complex-val (make-complex-from-real-imag (make-rational 1 2) (make-rational 1 2)))
(define complex-val-2 (make-complex-from-real-imag (make-rational 1 2) -0.5))
1
' ( complex rectangular 0 . 1.0 )
1.0
0.479425538604203
0.8775825618903728
0.479425538604203
0.8775825618903728
)
|
ac0b8e81b996dff0d8ffbbea342a620adebd1e67555386463bad843d60cc21bf | shirok/Gauche-lisp15 | trace.scm | (use srfi-42)
(use gauche.parameter)
(define-class <traced> ()
((orig-proc :init-keyword :orig-proc)
(name :init-keyword :name)))
(define nesting (make-parameter 0))
(define (indent) (make-string (* (nesting) 2)))
(define-method object-apply ((t <traced>) . args)
(print #"~(indent)Calling ~(~ t'name) with args:")
(do-ec [: arg (index i) args]
(begin
(format (current-output-port) "~a~2d: " (indent) i)
(pprint ($lisp->scheme arg) :length 6 :level 4)))
(rlet1 r (parameterize ((nesting (+ (nesting) 1)))
(apply (~ t'orig-proc) args))
(display #"~(indent)Result of ~(~ t'name): ")
(pprint ($lisp->scheme r) :length 6 :level 4)))
(define-syntax trace
(syntax-rules ()
[(_ X) (set! X (%make-trace X 'X))]))
(define (%make-trace proc name)
(when (is-a? proc <traced>)
(error "Already traced:" name))
(make <traced> :orig-proc proc :name name))
| null | https://raw.githubusercontent.com/shirok/Gauche-lisp15/d40163e6aa473608dd38a23f9cb67f13af676722/tools/trace.scm | scheme | (use srfi-42)
(use gauche.parameter)
(define-class <traced> ()
((orig-proc :init-keyword :orig-proc)
(name :init-keyword :name)))
(define nesting (make-parameter 0))
(define (indent) (make-string (* (nesting) 2)))
(define-method object-apply ((t <traced>) . args)
(print #"~(indent)Calling ~(~ t'name) with args:")
(do-ec [: arg (index i) args]
(begin
(format (current-output-port) "~a~2d: " (indent) i)
(pprint ($lisp->scheme arg) :length 6 :level 4)))
(rlet1 r (parameterize ((nesting (+ (nesting) 1)))
(apply (~ t'orig-proc) args))
(display #"~(indent)Result of ~(~ t'name): ")
(pprint ($lisp->scheme r) :length 6 :level 4)))
(define-syntax trace
(syntax-rules ()
[(_ X) (set! X (%make-trace X 'X))]))
(define (%make-trace proc name)
(when (is-a? proc <traced>)
(error "Already traced:" name))
(make <traced> :orig-proc proc :name name))
| |
d5ed918d743dcfe6787e33d18ac50660d27b982122e6d63bd8b14a2067252c3a | kxcteam/kxclib-ocaml | json.ml | open Kxclib.Json
let rec pp_jv ppf : jv -> unit = function
| `null -> fprintf ppf "`null"
| `bool b -> fprintf ppf "`bool %B" b
| `num f -> fprintf ppf "`num %F" f
| `str s -> fprintf ppf "`str %S" s
| `arr xs -> fprintf ppf "`arr %a" (List.pp pp_jv) xs
| `obj xs -> fprintf ppf "`obj %a" (List.pp (fun ppf (k, v) -> fprintf ppf "(%S, %a)" k pp_jv v)) xs
let string_of_jv = sprintf "%a" pp_jv
let gen_jv' ~has_non_printable_string =
let module G = QCheck2.Gen in
let f self size =
let (>>=) = G.bind in
let (>|=) = G.(>|=) in
let gen_null = G.pure `null in
let gen_bool = G.bool >|= (fun x -> `bool x) in
let gen_int = G.int >|= (fun x -> `num (float_of_int x)) in
let gen_float = G.float >|= (fun x -> `num x) in
let gen_num =
G.frequency [
5, gen_int;
2, gen_float;
] in
let gen_str =
G.frequency (
if has_non_printable_string
then [
5, G.small_string ~gen:G.printable;
1, G.small_string ~gen:G.char;
1, G.string_printable;
1, G.string;
]
else [
5, G.small_string ~gen:G.printable;
1, G.string_printable;
])
>|= (fun x -> `str x) in
let gen_atom = G.oneof [ gen_null; gen_bool; gen_num; gen_str; ] in
let gen_len =
sqrt (float_of_int size) |> int_of_float
|> function
| 0 | 1 -> G.pure 1
| n -> G.int_range 1 n in
let gen_arr =
(if size > 1 then
gen_len >>= fun len ->
G.list_size (G.int_bound len) (self (size/len))
else G.pure [])
>|= (fun xs -> `arr xs) in
let gen_fname =
G.frequency (
if has_non_printable_string
then [
10, G.small_string ~gen:G.printable;
2, G.small_string ~gen:G.char;
2, G.string_printable;
1, G.string;
]
else [
10, G.small_string ~gen:G.printable;
2, G.string_printable;
]) in
let gen_field size : (string*jv) G.t =
gen_fname >>= fun fname ->
self size >>= fun fval ->
G.pure (fname, fval) in
let gen_obj =
(if size > 1 then
gen_len >>= fun len ->
G.list_size (G.int_bound len) (gen_field (size/len))
>|= normalize_fields
else G.pure [])
>|= (fun fs -> `obj fs) in
match size with
| 0 -> G.pure `null
| 1 -> gen_atom
| _ -> (
G.frequency [
1, gen_atom;
2, gen_arr;
3, gen_obj;
]
)
in G.(sized @@ fix f)
let gen_jv = gen_jv' ~has_non_printable_string:true
| null | https://raw.githubusercontent.com/kxcteam/kxclib-ocaml/9c58f97bd56c7944c0a95ce17c130e2f258ef07f/unit_test/lib/json.ml | ocaml | open Kxclib.Json
let rec pp_jv ppf : jv -> unit = function
| `null -> fprintf ppf "`null"
| `bool b -> fprintf ppf "`bool %B" b
| `num f -> fprintf ppf "`num %F" f
| `str s -> fprintf ppf "`str %S" s
| `arr xs -> fprintf ppf "`arr %a" (List.pp pp_jv) xs
| `obj xs -> fprintf ppf "`obj %a" (List.pp (fun ppf (k, v) -> fprintf ppf "(%S, %a)" k pp_jv v)) xs
let string_of_jv = sprintf "%a" pp_jv
let gen_jv' ~has_non_printable_string =
let module G = QCheck2.Gen in
let f self size =
let (>>=) = G.bind in
let (>|=) = G.(>|=) in
let gen_null = G.pure `null in
let gen_bool = G.bool >|= (fun x -> `bool x) in
let gen_int = G.int >|= (fun x -> `num (float_of_int x)) in
let gen_float = G.float >|= (fun x -> `num x) in
let gen_num =
G.frequency [
5, gen_int;
2, gen_float;
] in
let gen_str =
G.frequency (
if has_non_printable_string
then [
5, G.small_string ~gen:G.printable;
1, G.small_string ~gen:G.char;
1, G.string_printable;
1, G.string;
]
else [
5, G.small_string ~gen:G.printable;
1, G.string_printable;
])
>|= (fun x -> `str x) in
let gen_atom = G.oneof [ gen_null; gen_bool; gen_num; gen_str; ] in
let gen_len =
sqrt (float_of_int size) |> int_of_float
|> function
| 0 | 1 -> G.pure 1
| n -> G.int_range 1 n in
let gen_arr =
(if size > 1 then
gen_len >>= fun len ->
G.list_size (G.int_bound len) (self (size/len))
else G.pure [])
>|= (fun xs -> `arr xs) in
let gen_fname =
G.frequency (
if has_non_printable_string
then [
10, G.small_string ~gen:G.printable;
2, G.small_string ~gen:G.char;
2, G.string_printable;
1, G.string;
]
else [
10, G.small_string ~gen:G.printable;
2, G.string_printable;
]) in
let gen_field size : (string*jv) G.t =
gen_fname >>= fun fname ->
self size >>= fun fval ->
G.pure (fname, fval) in
let gen_obj =
(if size > 1 then
gen_len >>= fun len ->
G.list_size (G.int_bound len) (gen_field (size/len))
>|= normalize_fields
else G.pure [])
>|= (fun fs -> `obj fs) in
match size with
| 0 -> G.pure `null
| 1 -> gen_atom
| _ -> (
G.frequency [
1, gen_atom;
2, gen_arr;
3, gen_obj;
]
)
in G.(sized @@ fix f)
let gen_jv = gen_jv' ~has_non_printable_string:true
| |
59e74281f98c8910c139d175c6e4e8c4e99dde7c6ca515312176ef42f7dd1444 | easyuc/EasyUC | ucTypedSpec.ml | (* UcTypedSpec module *)
(* Typed Specifications *)
open EcLocation
open EcSymbols
open EcTypes
open UcSpecTypedSpecCommon
(* maps and sets *)
module IdMap = Map.Make(String) (* domain: string = symbol *)
module IdSet = Set.Make(String)
(* we sometimes use "id" to stand for a symbol, and sometimes for a
located symbol (in which case we may use "uid" to stand for the
unlocated version) *)
let exists_id (id_map : 'a IdMap.t) (id : symbol) : bool =
IdMap.exists (fun key _ -> key = id) id_map
let id_map_domain (map : 'a IdMap.t) : IdSet.t =
IdSet.of_list (List.map fst (IdMap.bindings map))
module SL = (* domain: string list = symbol list *)
struct
type t = string list
let compare = Stdlib.compare
end
we often refer to elements of type symbol list as " qualified ids " ;
note that qsymbol stands for symbol list * symbol
note that qsymbol stands for symbol list * symbol *)
module QidMap = Map.Make(SL)
module QidSet = Set.Make(SL)
let exists_qid (qid_map : 'a QidMap.t) (qid : symbol list) : bool =
QidMap.exists (fun key _ -> key = qid) qid_map
let qid_map_domain (map : 'a QidMap.t) : QidSet.t =
QidSet.of_list (List.map fst (QidMap.bindings map))
type symb_pair = symbol * symbol
module SP = (* domain: string * string = symb_pair *)
struct
type t = string * string
let compare = Stdlib.compare
end
module IdPairMap = Map.Make(SP)
module IdPairSet = Set.Make(SP)
(* we often refer to elements of symb_pair as id pairs
for an id pair (x, y), x is the capitalized root name of a .uc
file, and y is the capitalized name of an interface, functionality
or simulator from that file *)
let exists_id_pair
(id_pair_map : 'a IdPairMap.t) (id_pair : symb_pair) : bool =
IdPairMap.exists (fun key _ -> key = id_pair) id_pair_map
let id_pair_map_domain (map : 'a IdPairMap.t) : IdPairSet.t =
IdPairSet.of_list (List.map fst (IdPairMap.bindings map))
let pp_qsymbol_abbrev
(root : symbol) (ppf : Format.formatter) ((xs, y) : qsymbol) : unit =
if xs = [root]
then pp_symbol ppf y
else pp_qsymbol ppf (xs, y)
let id_pair_to_qsymbol ((x, y) : symb_pair) : qsymbol = ([x], y)
let pp_id_pair (ppf : Format.formatter) (id_pair : symb_pair) =
pp_qsymbol ppf (id_pair_to_qsymbol id_pair)
let pp_id_pair_abbrev
(root : symbol) (ppf : Format.formatter) (id_pair : symb_pair) : unit =
pp_qsymbol_abbrev root ppf (id_pair_to_qsymbol id_pair)
let nonempty_qid_to_qsymbol (xs : SL.t) : qsymbol =
let len = List.length xs in
(Batteries.List.take (len - 1) xs, Batteries.List.last xs)
let nonempty_qid_to_string (xs : SL.t) : string =
List.fold_left (fun s x -> if s <> "" then s ^ "." ^ x else x) "" xs
let get_keys_as_sing_qids (m : 'a IdMap.t) : QidSet.t =
let ids = fst (List.split (IdMap.bindings m)) in
QidSet.of_list (List.map (fun id -> [id]) ids)
let indexed_map_to_list (mapind : ('o * int) IdMap.t) : 'o list =
let l = IdMap.fold (fun _ v l -> v :: l ) mapind [] in
let lord = List.sort (fun a1 a2 -> snd a1 - snd a2) l in
List.map (fun a -> fst a) lord
let filter_map (fm : 'a -> 'b option) (m : 'a IdMap.t) : 'b IdMap.t =
let flt =
IdMap.filter
(fun _ def ->
match fm def with
| Some _ -> true
| None -> false)
m in
IdMap.map
(fun def ->
match fm def with
| Some x -> x
| None -> raise (Failure "!impossible!"))
flt
let unlocm (lm : 'a located IdMap.t) : 'a IdMap.t =
IdMap.map (fun al -> unloc al) lm
located type plus an index , starting from 0
type ty_index = (ty * int) located
(* typed messages and functionality interfaces *)
type message_body_tyd =
{dir : msg_dir; (* direction of message *)
params_map : ty_index IdMap.t; (* message parameters: index is
parameter number *)
port : symbol option} (* optional port name - used in generating
EasyCrypt code *)
type basic_inter_body_tyd = message_body_tyd IdMap.t
(* inversion of direction *)
let invert_msg_dir (mdbt : message_body_tyd) : message_body_tyd =
{mdbt with
dir = invert_dir mdbt.dir}
let invert_basic_inter_body_tyd
(bibt : basic_inter_body_tyd) : basic_inter_body_tyd =
IdMap.map invert_msg_dir bibt
type inter_body_tyd =
| BasicTyd of basic_inter_body_tyd (* basic interface *)
| CompositeTyd of symbol IdMap.t (* composite interface; symbol is
name of basic interface -
with same root *)
let is_basic_tyd ibt =
match ibt with
| BasicTyd _ -> true
| CompositeTyd _ -> false
let is_composite_tyd ibt =
match ibt with
| BasicTyd _ -> false
| CompositeTyd _ -> true
type inter_tyd = inter_body_tyd located (* typed interface *)
(* state machines, typed functionalities and simulators *)
(* message and state expressions *)
type msg_expr_tyd =
{path : msg_path; (* message path *)
args : expr list located; (* message arguments *)
port_expr : expr option} (* message destination - port expr *)
type state_expr_tyd =
{id : psymbol; (* state to transition to *)
args : expr list located} (* arguments of new state *)
(* instructions *)
type send_and_transition_tyd =
{msg_expr : msg_expr_tyd; (* message to send *)
state_expr : state_expr_tyd} (* state to transition to *)
type bindings = ((EcIdent.t * EcTypes.ty) list)
type instruction_tyd_u =
| Assign of lhs * expr (* ordinary assignment *)
| Sample of lhs * expr (* sampling assignment *)
| ITE of expr * instruction_tyd list located * (* if-then-else *)
instruction_tyd list located option
| Match of expr * match_clause_tyd list located (* match instruction *)
| SendAndTransition of send_and_transition_tyd (* send and transition *)
| Fail (* failure *)
and instruction_tyd = instruction_tyd_u located
and match_clause_tyd = symbol * (bindings * instruction_tyd list located)
type msg_match_clause_tyd = (* message match clause *)
{msg_pat : msg_pat; (* message pattern *)
code : instruction_tyd list located} (* code of clause *)
type state_body_tyd =
{is_initial : bool; (* the initial state? *)
params : ty_index IdMap.t; (* typed parameters, index is
parameter number *)
vars : ty located IdMap.t; (* local variables *)
mmclauses : msg_match_clause_tyd list} (* message match clauses *)
type state_tyd = state_body_tyd located (* typed state *)
type party_body_tyd =
{serves : symbol list located list; (* what interfaces served by party *)
states : state_tyd IdMap.t} (* state machine *)
type party_tyd = party_body_tyd located (* typed party *)
type real_fun_body_tyd =
{params : (symb_pair * int) IdMap.t; (* names of composite direct
interfaces; index is
parameter number *)
id_dir_inter : symbol; (* name of composite direct
interface - with same
root *)
id_adv_inter : symbol option; (* optional name of composite
adversarial interface -
with same root *)
sub_funs : symb_pair IdMap.t; (* names of ideal
functionalities - pair
is (root, id) *)
parties : party_tyd IdMap.t} (* parties *)
type ideal_fun_body_tyd =
{id_dir_inter : symbol; (* name of composite direct interface -
with same root *)
id_adv_inter : symbol; (* name of basic adversarial interface -
with same root *)
states : state_tyd IdMap.t} (* state machine *)
type fun_body_tyd =
| FunBodyRealTyd of real_fun_body_tyd
| FunBodyIdealTyd of ideal_fun_body_tyd
let real_fun_body_tyd_of (fbt : fun_body_tyd) : real_fun_body_tyd =
match fbt with
| FunBodyRealTyd rfbt -> rfbt
| FunBodyIdealTyd _ -> UcMessage.failure "cannot happen"
let ideal_fun_body_tyd_of (fbt : fun_body_tyd) : ideal_fun_body_tyd =
match fbt with
| FunBodyRealTyd _ -> UcMessage.failure "cannot happen"
| FunBodyIdealTyd ifbt -> ifbt
let is_real_fun_body_tyd (fbt : fun_body_tyd) : bool =
match fbt with
| FunBodyRealTyd _ -> true
| FunBodyIdealTyd _ -> false
let id_dir_inter_of_fun_body_tyd (fbt : fun_body_tyd) : symbol =
match fbt with
| FunBodyRealTyd fbr -> fbr.id_dir_inter
| FunBodyIdealTyd fbi -> fbi.id_dir_inter
let id_adv_inter_of_fun_body_tyd (fbt : fun_body_tyd) : symbol option =
match fbt with
| FunBodyRealTyd fbr -> fbr.id_adv_inter
| FunBodyIdealTyd fbi -> Some fbi.id_adv_inter
type fun_tyd = fun_body_tyd located (* functionality *)
type sim_body_tyd =
{uses : symbol; (* basic adversarial interface
from ideal functionality - with
same root *)
sims : symbol; (* real functionality being
simulated - with same root *)
sims_arg_pair_ids : symb_pair list; (* arguments to real
functionality - pairs
(root, id), naming ideal
functionalities *)
states : state_tyd IdMap.t} (* state machine *)
type sim_tyd = sim_body_tyd located (* simulator *)
four identifer pair ( more precisely , pairs of symbols ) maps for
direct and adversarial interfaces , functionalities and simulators ;
their domains are disjoint
type arguments to IdPairMap.t are all located types
direct and adversarial interfaces, functionalities and simulators;
their domains are disjoint
type arguments to IdPairMap.t are all located types *)
type maps_tyd =
{dir_inter_map : inter_tyd IdPairMap.t; (* direct interfaces *)
adv_inter_map : inter_tyd IdPairMap.t; (* adversarial interfaces *)
fun_map : fun_tyd IdPairMap.t; (* functionalities *)
sim_map : sim_tyd IdPairMap.t} (* simulators *)
let exists_id_pair_maps_tyd
(maps : maps_tyd) (id_pair : symb_pair) : bool =
exists_id_pair maps.dir_inter_map id_pair ||
exists_id_pair maps.adv_inter_map id_pair ||
exists_id_pair maps.fun_map id_pair ||
exists_id_pair maps.sim_map id_pair
let exists_id_pair_inter_maps
(dir_inter_map : inter_tyd IdPairMap.t)
(adv_inter_map : inter_tyd IdPairMap.t)
(id_pair : symb_pair) : bool =
exists_id_pair dir_inter_map id_pair ||
exists_id_pair adv_inter_map id_pair
let inter_names (root : symbol) (maps : maps_tyd) : IdSet.t =
let i_n (map : inter_tyd IdPairMap.t) =
IdSet.of_list
(List.filter_map
(fun (id_pr, _) ->
if fst id_pr = root
then Some (snd id_pr)
else None)
(IdPairMap.bindings map)) in
IdSet.union (i_n maps.dir_inter_map) (i_n maps.adv_inter_map)
let real_fun_names (root : symbol) (maps : maps_tyd) : IdSet.t =
IdSet.of_list
(List.filter_map
(fun (id_pr, ft) ->
if fst id_pr = root && is_real_fun_body_tyd (unloc ft)
then Some (snd id_pr)
else None)
(IdPairMap.bindings maps.fun_map))
let ideal_fun_names (root : symbol) (maps : maps_tyd) : IdSet.t =
IdSet.of_list
(List.filter_map
(fun (id_pr, ft) ->
if fst id_pr = root && not (is_real_fun_body_tyd (unloc ft))
then Some (snd id_pr)
else None)
(IdPairMap.bindings maps.fun_map))
let sim_names (root : symbol) (maps : maps_tyd) : IdSet.t =
IdSet.of_list
(List.filter_map
(fun (id_pr, _) ->
if fst id_pr = root
then Some (snd id_pr)
else None)
(IdPairMap.bindings maps.sim_map))
(* interface names that are reachable from an interface *)
let inter_names_reach_inter
(root : symbol) (maps : maps_tyd) (id : symbol) : IdSet.t =
let reach (map : inter_tyd IdPairMap.t) : IdSet.t =
match IdPairMap.find_opt (root, id) map with
| None -> IdSet.empty
| Some it ->
match unloc it with
| BasicTyd _ -> IdSet.empty
| CompositeTyd mp ->
IdSet.of_list (List.map snd (IdMap.bindings mp)) in
IdSet.union
(IdSet.singleton id) (* include original id *)
(IdSet.union
only one will be non - empty
(reach maps.adv_inter_map))
(* interface names that are reachable from a functionality *)
let inter_names_reach_fun
(root : symbol) (maps : maps_tyd) (id : symbol) : IdSet.t =
match unloc (IdPairMap.find (root, id) maps.fun_map) with
| FunBodyRealTyd rfbt ->
IdSet.union
(inter_names_reach_inter root maps rfbt.id_dir_inter)
(match rfbt.id_adv_inter with
| None -> IdSet.empty
| Some adv_id -> inter_names_reach_inter root maps adv_id)
| FunBodyIdealTyd ifbt ->
IdSet.union
(inter_names_reach_inter root maps ifbt.id_dir_inter)
(IdSet.singleton ifbt.id_adv_inter) (* will be basic *)
(* interface names that are reachable from a simulator *)
let inter_names_reach_sim
(root : symbol) (maps : maps_tyd) (id : symbol) : IdSet.t =
let sbt = unloc (IdPairMap.find (root, id) maps.sim_map) in
IdSet.union
(IdSet.singleton sbt.uses) (* will be basic *)
(inter_names_reach_fun root maps sbt.sims)
let basic_direct_inter_names_of_real_fun
(root : symbol) (maps : maps_tyd) (id : symbol) : IdSet.t =
match unloc (IdPairMap.find (root, id) maps.fun_map) with
| FunBodyRealTyd rfbt ->
(match rfbt.id_adv_inter with
| None -> IdSet.empty
| Some adv_id ->
match unloc (IdPairMap.find (root, adv_id) maps.adv_inter_map) with
| BasicTyd _ -> UcMessage.failure "cannot happen"
| CompositeTyd mp ->
(IdSet.of_list (List.map snd (IdMap.bindings mp))))
| FunBodyIdealTyd _ -> UcMessage.failure "cannot happen"
(* typed top-level specifications *)
type typed_spec = maps_tyd
| null | https://raw.githubusercontent.com/easyuc/EasyUC/0ee14ef8b024a8e7acde1035d06afecbdcaec990/uc-dsl/ucdsl-proj/src/ucTypedSpec.ml | ocaml | UcTypedSpec module
Typed Specifications
maps and sets
domain: string = symbol
we sometimes use "id" to stand for a symbol, and sometimes for a
located symbol (in which case we may use "uid" to stand for the
unlocated version)
domain: string list = symbol list
domain: string * string = symb_pair
we often refer to elements of symb_pair as id pairs
for an id pair (x, y), x is the capitalized root name of a .uc
file, and y is the capitalized name of an interface, functionality
or simulator from that file
typed messages and functionality interfaces
direction of message
message parameters: index is
parameter number
optional port name - used in generating
EasyCrypt code
inversion of direction
basic interface
composite interface; symbol is
name of basic interface -
with same root
typed interface
state machines, typed functionalities and simulators
message and state expressions
message path
message arguments
message destination - port expr
state to transition to
arguments of new state
instructions
message to send
state to transition to
ordinary assignment
sampling assignment
if-then-else
match instruction
send and transition
failure
message match clause
message pattern
code of clause
the initial state?
typed parameters, index is
parameter number
local variables
message match clauses
typed state
what interfaces served by party
state machine
typed party
names of composite direct
interfaces; index is
parameter number
name of composite direct
interface - with same
root
optional name of composite
adversarial interface -
with same root
names of ideal
functionalities - pair
is (root, id)
parties
name of composite direct interface -
with same root
name of basic adversarial interface -
with same root
state machine
functionality
basic adversarial interface
from ideal functionality - with
same root
real functionality being
simulated - with same root
arguments to real
functionality - pairs
(root, id), naming ideal
functionalities
state machine
simulator
direct interfaces
adversarial interfaces
functionalities
simulators
interface names that are reachable from an interface
include original id
interface names that are reachable from a functionality
will be basic
interface names that are reachable from a simulator
will be basic
typed top-level specifications |
open EcLocation
open EcSymbols
open EcTypes
open UcSpecTypedSpecCommon
module IdSet = Set.Make(String)
let exists_id (id_map : 'a IdMap.t) (id : symbol) : bool =
IdMap.exists (fun key _ -> key = id) id_map
let id_map_domain (map : 'a IdMap.t) : IdSet.t =
IdSet.of_list (List.map fst (IdMap.bindings map))
struct
type t = string list
let compare = Stdlib.compare
end
we often refer to elements of type symbol list as " qualified ids " ;
note that qsymbol stands for symbol list * symbol
note that qsymbol stands for symbol list * symbol *)
module QidMap = Map.Make(SL)
module QidSet = Set.Make(SL)
let exists_qid (qid_map : 'a QidMap.t) (qid : symbol list) : bool =
QidMap.exists (fun key _ -> key = qid) qid_map
let qid_map_domain (map : 'a QidMap.t) : QidSet.t =
QidSet.of_list (List.map fst (QidMap.bindings map))
type symb_pair = symbol * symbol
struct
type t = string * string
let compare = Stdlib.compare
end
module IdPairMap = Map.Make(SP)
module IdPairSet = Set.Make(SP)
let exists_id_pair
(id_pair_map : 'a IdPairMap.t) (id_pair : symb_pair) : bool =
IdPairMap.exists (fun key _ -> key = id_pair) id_pair_map
let id_pair_map_domain (map : 'a IdPairMap.t) : IdPairSet.t =
IdPairSet.of_list (List.map fst (IdPairMap.bindings map))
let pp_qsymbol_abbrev
(root : symbol) (ppf : Format.formatter) ((xs, y) : qsymbol) : unit =
if xs = [root]
then pp_symbol ppf y
else pp_qsymbol ppf (xs, y)
let id_pair_to_qsymbol ((x, y) : symb_pair) : qsymbol = ([x], y)
let pp_id_pair (ppf : Format.formatter) (id_pair : symb_pair) =
pp_qsymbol ppf (id_pair_to_qsymbol id_pair)
let pp_id_pair_abbrev
(root : symbol) (ppf : Format.formatter) (id_pair : symb_pair) : unit =
pp_qsymbol_abbrev root ppf (id_pair_to_qsymbol id_pair)
let nonempty_qid_to_qsymbol (xs : SL.t) : qsymbol =
let len = List.length xs in
(Batteries.List.take (len - 1) xs, Batteries.List.last xs)
let nonempty_qid_to_string (xs : SL.t) : string =
List.fold_left (fun s x -> if s <> "" then s ^ "." ^ x else x) "" xs
let get_keys_as_sing_qids (m : 'a IdMap.t) : QidSet.t =
let ids = fst (List.split (IdMap.bindings m)) in
QidSet.of_list (List.map (fun id -> [id]) ids)
let indexed_map_to_list (mapind : ('o * int) IdMap.t) : 'o list =
let l = IdMap.fold (fun _ v l -> v :: l ) mapind [] in
let lord = List.sort (fun a1 a2 -> snd a1 - snd a2) l in
List.map (fun a -> fst a) lord
let filter_map (fm : 'a -> 'b option) (m : 'a IdMap.t) : 'b IdMap.t =
let flt =
IdMap.filter
(fun _ def ->
match fm def with
| Some _ -> true
| None -> false)
m in
IdMap.map
(fun def ->
match fm def with
| Some x -> x
| None -> raise (Failure "!impossible!"))
flt
let unlocm (lm : 'a located IdMap.t) : 'a IdMap.t =
IdMap.map (fun al -> unloc al) lm
located type plus an index , starting from 0
type ty_index = (ty * int) located
type message_body_tyd =
type basic_inter_body_tyd = message_body_tyd IdMap.t
let invert_msg_dir (mdbt : message_body_tyd) : message_body_tyd =
{mdbt with
dir = invert_dir mdbt.dir}
let invert_basic_inter_body_tyd
(bibt : basic_inter_body_tyd) : basic_inter_body_tyd =
IdMap.map invert_msg_dir bibt
type inter_body_tyd =
let is_basic_tyd ibt =
match ibt with
| BasicTyd _ -> true
| CompositeTyd _ -> false
let is_composite_tyd ibt =
match ibt with
| BasicTyd _ -> false
| CompositeTyd _ -> true
type msg_expr_tyd =
type state_expr_tyd =
type send_and_transition_tyd =
type bindings = ((EcIdent.t * EcTypes.ty) list)
type instruction_tyd_u =
instruction_tyd list located option
and instruction_tyd = instruction_tyd_u located
and match_clause_tyd = symbol * (bindings * instruction_tyd list located)
type state_body_tyd =
type party_body_tyd =
type real_fun_body_tyd =
type ideal_fun_body_tyd =
type fun_body_tyd =
| FunBodyRealTyd of real_fun_body_tyd
| FunBodyIdealTyd of ideal_fun_body_tyd
let real_fun_body_tyd_of (fbt : fun_body_tyd) : real_fun_body_tyd =
match fbt with
| FunBodyRealTyd rfbt -> rfbt
| FunBodyIdealTyd _ -> UcMessage.failure "cannot happen"
let ideal_fun_body_tyd_of (fbt : fun_body_tyd) : ideal_fun_body_tyd =
match fbt with
| FunBodyRealTyd _ -> UcMessage.failure "cannot happen"
| FunBodyIdealTyd ifbt -> ifbt
let is_real_fun_body_tyd (fbt : fun_body_tyd) : bool =
match fbt with
| FunBodyRealTyd _ -> true
| FunBodyIdealTyd _ -> false
let id_dir_inter_of_fun_body_tyd (fbt : fun_body_tyd) : symbol =
match fbt with
| FunBodyRealTyd fbr -> fbr.id_dir_inter
| FunBodyIdealTyd fbi -> fbi.id_dir_inter
let id_adv_inter_of_fun_body_tyd (fbt : fun_body_tyd) : symbol option =
match fbt with
| FunBodyRealTyd fbr -> fbr.id_adv_inter
| FunBodyIdealTyd fbi -> Some fbi.id_adv_inter
type sim_body_tyd =
four identifer pair ( more precisely , pairs of symbols ) maps for
direct and adversarial interfaces , functionalities and simulators ;
their domains are disjoint
type arguments to IdPairMap.t are all located types
direct and adversarial interfaces, functionalities and simulators;
their domains are disjoint
type arguments to IdPairMap.t are all located types *)
type maps_tyd =
let exists_id_pair_maps_tyd
(maps : maps_tyd) (id_pair : symb_pair) : bool =
exists_id_pair maps.dir_inter_map id_pair ||
exists_id_pair maps.adv_inter_map id_pair ||
exists_id_pair maps.fun_map id_pair ||
exists_id_pair maps.sim_map id_pair
let exists_id_pair_inter_maps
(dir_inter_map : inter_tyd IdPairMap.t)
(adv_inter_map : inter_tyd IdPairMap.t)
(id_pair : symb_pair) : bool =
exists_id_pair dir_inter_map id_pair ||
exists_id_pair adv_inter_map id_pair
let inter_names (root : symbol) (maps : maps_tyd) : IdSet.t =
let i_n (map : inter_tyd IdPairMap.t) =
IdSet.of_list
(List.filter_map
(fun (id_pr, _) ->
if fst id_pr = root
then Some (snd id_pr)
else None)
(IdPairMap.bindings map)) in
IdSet.union (i_n maps.dir_inter_map) (i_n maps.adv_inter_map)
let real_fun_names (root : symbol) (maps : maps_tyd) : IdSet.t =
IdSet.of_list
(List.filter_map
(fun (id_pr, ft) ->
if fst id_pr = root && is_real_fun_body_tyd (unloc ft)
then Some (snd id_pr)
else None)
(IdPairMap.bindings maps.fun_map))
let ideal_fun_names (root : symbol) (maps : maps_tyd) : IdSet.t =
IdSet.of_list
(List.filter_map
(fun (id_pr, ft) ->
if fst id_pr = root && not (is_real_fun_body_tyd (unloc ft))
then Some (snd id_pr)
else None)
(IdPairMap.bindings maps.fun_map))
let sim_names (root : symbol) (maps : maps_tyd) : IdSet.t =
IdSet.of_list
(List.filter_map
(fun (id_pr, _) ->
if fst id_pr = root
then Some (snd id_pr)
else None)
(IdPairMap.bindings maps.sim_map))
let inter_names_reach_inter
(root : symbol) (maps : maps_tyd) (id : symbol) : IdSet.t =
let reach (map : inter_tyd IdPairMap.t) : IdSet.t =
match IdPairMap.find_opt (root, id) map with
| None -> IdSet.empty
| Some it ->
match unloc it with
| BasicTyd _ -> IdSet.empty
| CompositeTyd mp ->
IdSet.of_list (List.map snd (IdMap.bindings mp)) in
IdSet.union
(IdSet.union
only one will be non - empty
(reach maps.adv_inter_map))
let inter_names_reach_fun
(root : symbol) (maps : maps_tyd) (id : symbol) : IdSet.t =
match unloc (IdPairMap.find (root, id) maps.fun_map) with
| FunBodyRealTyd rfbt ->
IdSet.union
(inter_names_reach_inter root maps rfbt.id_dir_inter)
(match rfbt.id_adv_inter with
| None -> IdSet.empty
| Some adv_id -> inter_names_reach_inter root maps adv_id)
| FunBodyIdealTyd ifbt ->
IdSet.union
(inter_names_reach_inter root maps ifbt.id_dir_inter)
let inter_names_reach_sim
(root : symbol) (maps : maps_tyd) (id : symbol) : IdSet.t =
let sbt = unloc (IdPairMap.find (root, id) maps.sim_map) in
IdSet.union
(inter_names_reach_fun root maps sbt.sims)
let basic_direct_inter_names_of_real_fun
(root : symbol) (maps : maps_tyd) (id : symbol) : IdSet.t =
match unloc (IdPairMap.find (root, id) maps.fun_map) with
| FunBodyRealTyd rfbt ->
(match rfbt.id_adv_inter with
| None -> IdSet.empty
| Some adv_id ->
match unloc (IdPairMap.find (root, adv_id) maps.adv_inter_map) with
| BasicTyd _ -> UcMessage.failure "cannot happen"
| CompositeTyd mp ->
(IdSet.of_list (List.map snd (IdMap.bindings mp))))
| FunBodyIdealTyd _ -> UcMessage.failure "cannot happen"
type typed_spec = maps_tyd
|
d128bbe3567f8e1a2ce452fd6173731f54a1a11637a77bc426a2f8fd96bd8242 | originrose/cortex | project.clj | (defproject thinktopic/cortex "0.9.23-SNAPSHOT"
:description "A neural network toolkit for Clojure."
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.9.0-alpha17"]
[thinktopic/think.datatype "0.3.17"]
[com.github.fommil.netlib/all "1.1.2" :extension "pom"]
[com.taoensso/nippy "2.13.0"]
;; Change the following dep to depend on different versions of CUDA
;[org.bytedeco.javacpp-presets/cuda "7.5-1.2"]
[org.bytedeco.javacpp-presets/cuda "8.0-1.2"]
[thinktopic/think.parallel "0.3.7"]
[thinktopic/think.resource "1.2.1"]
[org.clojure/math.combinatorics "0.1.4"]]
:java-source-paths ["java"]
:profiles {:dev {:source-paths ["src" "test/cljc" "test/clj"]}
:test {:source-paths ["src" "test/cljc" "test/clj"]}
:cpu-only {:test-selectors {:default (complement :gpu)}
:source-paths ["src" "test/cljc" "test/clj"]}}
:plugins [[lein-codox "0.10.2"]])
| null | https://raw.githubusercontent.com/originrose/cortex/94b1430538e6187f3dfd1697c36ff2c62b475901/project.clj | clojure | Change the following dep to depend on different versions of CUDA
[org.bytedeco.javacpp-presets/cuda "7.5-1.2"] | (defproject thinktopic/cortex "0.9.23-SNAPSHOT"
:description "A neural network toolkit for Clojure."
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.9.0-alpha17"]
[thinktopic/think.datatype "0.3.17"]
[com.github.fommil.netlib/all "1.1.2" :extension "pom"]
[com.taoensso/nippy "2.13.0"]
[org.bytedeco.javacpp-presets/cuda "8.0-1.2"]
[thinktopic/think.parallel "0.3.7"]
[thinktopic/think.resource "1.2.1"]
[org.clojure/math.combinatorics "0.1.4"]]
:java-source-paths ["java"]
:profiles {:dev {:source-paths ["src" "test/cljc" "test/clj"]}
:test {:source-paths ["src" "test/cljc" "test/clj"]}
:cpu-only {:test-selectors {:default (complement :gpu)}
:source-paths ["src" "test/cljc" "test/clj"]}}
:plugins [[lein-codox "0.10.2"]])
|
c78a47a86cf3ba20ab7b98e75d956e3aea80ffe1c6049fa3bd34dd1913c41ec3 | Elzair/nazghul | kern-init.scm | (kern-cfg-set
;; This is the image file for the UI border. The pieces need to be arranged in
;; a specific order in this image.
"frame-image-filename" "frame.png"
;; These are the letters used by the console, etc, in the UI. The character
;; sprites need to be arranged in a specific order in this image.
"ascii-image-filename" "charset.png"
;; This is the cursor prompt used by the command window in the UI. It should
have four animation frames .
"cursor-image-filename" "cursor.png"
This is the script file run when the user selects the " Start New Game "
;; option from the main menu.
"new-game-filename" "start-new-game.scm"
This is the script file run when the user selects the " Journey Onward "
;; option from the main menu. It lists the current save files.
"save-game-filename" "saved-games.scm"
This is the script file run when the user selects the " Tutorial "
;; option from the main menu.
"tutorial-filename" "tutorial.scm"
;; This is the script file which runs the demo scene on startup.
"demo-filename" "demo.scm"
;; These are the filenames of the splash image shown on startup for the
;; various supported screen sizes. The format of the key must be
;; <width>x<height>-splash-image-filename.
"1280x960-splash-image-filename" "splash.png"
"640x480-splash-image-filename" "640x480_splash.png"
"800x480-splash-image-filename" "640x480_splash.png"
;; This is the image for the sprite pieces of the progress bar.
"progress-bar-image-filename" "progress_bar_image.png"
)
| null | https://raw.githubusercontent.com/Elzair/nazghul/8f3a45ed6289cd9f469c4ff618d39366f2fbc1d8/worlds/haxima-1.002/kern-init.scm | scheme | This is the image file for the UI border. The pieces need to be arranged in
a specific order in this image.
These are the letters used by the console, etc, in the UI. The character
sprites need to be arranged in a specific order in this image.
This is the cursor prompt used by the command window in the UI. It should
option from the main menu.
option from the main menu. It lists the current save files.
option from the main menu.
This is the script file which runs the demo scene on startup.
These are the filenames of the splash image shown on startup for the
various supported screen sizes. The format of the key must be
<width>x<height>-splash-image-filename.
This is the image for the sprite pieces of the progress bar. | (kern-cfg-set
"frame-image-filename" "frame.png"
"ascii-image-filename" "charset.png"
have four animation frames .
"cursor-image-filename" "cursor.png"
This is the script file run when the user selects the " Start New Game "
"new-game-filename" "start-new-game.scm"
This is the script file run when the user selects the " Journey Onward "
"save-game-filename" "saved-games.scm"
This is the script file run when the user selects the " Tutorial "
"tutorial-filename" "tutorial.scm"
"demo-filename" "demo.scm"
"1280x960-splash-image-filename" "splash.png"
"640x480-splash-image-filename" "640x480_splash.png"
"800x480-splash-image-filename" "640x480_splash.png"
"progress-bar-image-filename" "progress_bar_image.png"
)
|
5ac96005bfcd31ebfa4d99f4dc5f884470c69509678fa6d578b4641f70599b79 | valis/hoq | Patterns.hs | # LANGUAGE ExistentialQuantification #
module TypeChecking.Expressions.Patterns
( typeCheckPatterns, typeCheckPattern
, TermsInCtx(..), TermInCtx(..)
) where
import Data.Void
import Control.Monad
import Semantics
import Semantics.Value as V
import Semantics.Pattern as P
import Syntax as S
import Syntax.ErrorDoc
import TypeChecking.Context as C
import TypeChecking.Monad
import TypeChecking.Expressions.Utils
import Normalization
data TermInCtx b = forall a. Eq a => TermInCtx (Ctx String (Type Semantics) b a) (Pattern b a) (Term Semantics a)
data TermsInCtx b = forall a. Eq a => TermsInCtx (Ctx String (Type Semantics) b a) (Patterns b a) [Term Semantics a] (Type Semantics a)
unexpectedPatternErrorMsg :: Posn -> Ctx String (Type Semantics) Void a -> Term Semantics a -> Error
unexpectedPatternErrorMsg pos ctx ty = Error TypeMismatch $
emsgLC pos "" $ pretty "Unexpected pattern"
$$ pretty "Expected type:" <+> prettyOpen ctx ty
typeCheckPattern :: (Monad m, Eq a) => Ctx String (Type Semantics) Void a
-> Type Semantics a -> Term PName b -> TCM m (Bool, TermInCtx a)
typeCheckPattern ctx (Type (Apply (Semantics _ Interval) _) _) (Apply (pos, Ident "left") pats) = do
unless (null pats) $ warn [tooManyArgs pos]
return (False, TermInCtx C.Nil (PatICon ILeft) $ iCon ILeft)
typeCheckPattern ctx (Type (Apply (Semantics _ Interval) _) _) (Apply (pos, Ident "right") pats) = do
unless (null pats) $ warn [tooManyArgs pos]
return (False, TermInCtx C.Nil (PatICon IRight) $ iCon IRight)
typeCheckPattern ctx ty@(Type (Apply (Semantics _ (DataType _ 0)) _) _) (Apply (_, Operator "") _) =
return (True, TermInCtx (Snoc C.Nil "_" ty) (PatVar "_") bvar)
typeCheckPattern ctx (Type ty _) (Apply (pos, Operator "") _) =
throwError [Error Other $ emsgLC pos "" $ pretty "Expected non-empty type:" <+> prettyOpen ctx ty]
typeCheckPattern ctx ty (Apply (_, Ident "_") []) = return (False, TermInCtx (Snoc C.Nil "_" ty) (PatVar "_") bvar)
typeCheckPattern ctx ty@(Type (Apply (Semantics _ (DataType dt n)) params) _) (Apply (pos, var) []) = do
cons <- lift $ getConstructor var $ Just (dt, params)
case (cons, var) of
((_, con@(Apply (Semantics syn (DCon dt i _ _)) _), conds, _, Type conType _):_, _) -> if isDataType conType
then return (False, TermInCtx C.Nil (PatDCon syn dt i n conds params P.Nil) con)
else throwError [notEnoughArgs pos $ nameToPrefix var]
(_, Ident var') -> return (False, TermInCtx (Snoc C.Nil var' ty) (PatVar var') bvar)
_ -> throwError [unexpectedPatternErrorMsg pos ctx $ getType ty]
where
isDataType :: Term Semantics a -> Bool
isDataType (Lambda t) = isDataType t
isDataType (Apply (Semantics _ DataType{}) _) = True
isDataType _ = False
typeCheckPattern ctx ty (Apply (pos, Ident var) []) = return (False, TermInCtx (Snoc C.Nil var ty) (PatVar var) bvar)
typeCheckPattern ctx (Type (Apply (Semantics _ (DataType dt n)) params) _) (Apply (pos, conName) pats) = do
cons <- lift $ getConstructor conName $ Just (dt, params)
case cons of
(_, con@(Apply (Semantics syn (DCon dt i _ _)) _), conds, _, conType):_ -> do
(bf, TermsInCtx ctx' rtpats terms (Type ty' _)) <- typeCheckPatterns ctx (nfType WHNF conType) pats
case nf WHNF ty' of
Apply (Semantics _ DataType{}) _ ->
return (bf, TermInCtx ctx' (PatDCon syn dt i n conds params rtpats) $ apps (fmap (liftBase ctx') con) terms)
_ -> throwError [notEnoughArgs pos $ nameToPrefix conName]
_ -> throwError [notInScope pos "data constructor" $ nameToPrefix conName]
typeCheckPattern ctx (Type ty _) (Apply (pos, _) _) = throwError [unexpectedPatternErrorMsg pos ctx ty]
typeCheckPattern _ _ _ = error "typeCheckPattern"
typeCheckPatterns :: (Monad m, Eq a) => Ctx String (Type Semantics) Void a
-> Type Semantics a -> [Term PName b] -> TCM m (Bool, TermsInCtx a)
typeCheckPatterns _ ty [] = return (False, TermsInCtx C.Nil P.Nil [] ty)
typeCheckPatterns ctx (Type (Apply p@(Semantics _ (V.Pi k1 k2)) [a, b]) _) (pat:pats) = do
let a' = Type (nf WHNF a) k1
(bf1, TermInCtx ctx' rtpat te) <- typeCheckPattern ctx a' pat
let b' = case b of
Lambda{} -> instantiate1 te $ fmap (fmap $ liftBase ctx') $ snd (dropOnePi p a b)
_ -> fmap (liftBase ctx') b
(bf2, TermsInCtx ctx'' rtpats tes ty) <- typeCheckPatterns (ctx C.+++ ctx') (Type (nf WHNF b') k2) pats
return (bf1 || bf2, TermsInCtx (ctx' C.+++ ctx'') (Cons rtpat rtpats) (fmap (liftBase ctx'') te : tes) ty)
typeCheckPatterns _ _ (Apply (pos, _) _ : _) = throwError [tooManyArgs pos]
typeCheckPatterns _ _ _ = error "typeCheckPatterns"
| null | https://raw.githubusercontent.com/valis/hoq/9d2d2f5dee367ca5a609199856ca5964499bf33a/src/TypeChecking/Expressions/Patterns.hs | haskell | # LANGUAGE ExistentialQuantification #
module TypeChecking.Expressions.Patterns
( typeCheckPatterns, typeCheckPattern
, TermsInCtx(..), TermInCtx(..)
) where
import Data.Void
import Control.Monad
import Semantics
import Semantics.Value as V
import Semantics.Pattern as P
import Syntax as S
import Syntax.ErrorDoc
import TypeChecking.Context as C
import TypeChecking.Monad
import TypeChecking.Expressions.Utils
import Normalization
data TermInCtx b = forall a. Eq a => TermInCtx (Ctx String (Type Semantics) b a) (Pattern b a) (Term Semantics a)
data TermsInCtx b = forall a. Eq a => TermsInCtx (Ctx String (Type Semantics) b a) (Patterns b a) [Term Semantics a] (Type Semantics a)
unexpectedPatternErrorMsg :: Posn -> Ctx String (Type Semantics) Void a -> Term Semantics a -> Error
unexpectedPatternErrorMsg pos ctx ty = Error TypeMismatch $
emsgLC pos "" $ pretty "Unexpected pattern"
$$ pretty "Expected type:" <+> prettyOpen ctx ty
typeCheckPattern :: (Monad m, Eq a) => Ctx String (Type Semantics) Void a
-> Type Semantics a -> Term PName b -> TCM m (Bool, TermInCtx a)
typeCheckPattern ctx (Type (Apply (Semantics _ Interval) _) _) (Apply (pos, Ident "left") pats) = do
unless (null pats) $ warn [tooManyArgs pos]
return (False, TermInCtx C.Nil (PatICon ILeft) $ iCon ILeft)
typeCheckPattern ctx (Type (Apply (Semantics _ Interval) _) _) (Apply (pos, Ident "right") pats) = do
unless (null pats) $ warn [tooManyArgs pos]
return (False, TermInCtx C.Nil (PatICon IRight) $ iCon IRight)
typeCheckPattern ctx ty@(Type (Apply (Semantics _ (DataType _ 0)) _) _) (Apply (_, Operator "") _) =
return (True, TermInCtx (Snoc C.Nil "_" ty) (PatVar "_") bvar)
typeCheckPattern ctx (Type ty _) (Apply (pos, Operator "") _) =
throwError [Error Other $ emsgLC pos "" $ pretty "Expected non-empty type:" <+> prettyOpen ctx ty]
typeCheckPattern ctx ty (Apply (_, Ident "_") []) = return (False, TermInCtx (Snoc C.Nil "_" ty) (PatVar "_") bvar)
typeCheckPattern ctx ty@(Type (Apply (Semantics _ (DataType dt n)) params) _) (Apply (pos, var) []) = do
cons <- lift $ getConstructor var $ Just (dt, params)
case (cons, var) of
((_, con@(Apply (Semantics syn (DCon dt i _ _)) _), conds, _, Type conType _):_, _) -> if isDataType conType
then return (False, TermInCtx C.Nil (PatDCon syn dt i n conds params P.Nil) con)
else throwError [notEnoughArgs pos $ nameToPrefix var]
(_, Ident var') -> return (False, TermInCtx (Snoc C.Nil var' ty) (PatVar var') bvar)
_ -> throwError [unexpectedPatternErrorMsg pos ctx $ getType ty]
where
isDataType :: Term Semantics a -> Bool
isDataType (Lambda t) = isDataType t
isDataType (Apply (Semantics _ DataType{}) _) = True
isDataType _ = False
typeCheckPattern ctx ty (Apply (pos, Ident var) []) = return (False, TermInCtx (Snoc C.Nil var ty) (PatVar var) bvar)
typeCheckPattern ctx (Type (Apply (Semantics _ (DataType dt n)) params) _) (Apply (pos, conName) pats) = do
cons <- lift $ getConstructor conName $ Just (dt, params)
case cons of
(_, con@(Apply (Semantics syn (DCon dt i _ _)) _), conds, _, conType):_ -> do
(bf, TermsInCtx ctx' rtpats terms (Type ty' _)) <- typeCheckPatterns ctx (nfType WHNF conType) pats
case nf WHNF ty' of
Apply (Semantics _ DataType{}) _ ->
return (bf, TermInCtx ctx' (PatDCon syn dt i n conds params rtpats) $ apps (fmap (liftBase ctx') con) terms)
_ -> throwError [notEnoughArgs pos $ nameToPrefix conName]
_ -> throwError [notInScope pos "data constructor" $ nameToPrefix conName]
typeCheckPattern ctx (Type ty _) (Apply (pos, _) _) = throwError [unexpectedPatternErrorMsg pos ctx ty]
typeCheckPattern _ _ _ = error "typeCheckPattern"
typeCheckPatterns :: (Monad m, Eq a) => Ctx String (Type Semantics) Void a
-> Type Semantics a -> [Term PName b] -> TCM m (Bool, TermsInCtx a)
typeCheckPatterns _ ty [] = return (False, TermsInCtx C.Nil P.Nil [] ty)
typeCheckPatterns ctx (Type (Apply p@(Semantics _ (V.Pi k1 k2)) [a, b]) _) (pat:pats) = do
let a' = Type (nf WHNF a) k1
(bf1, TermInCtx ctx' rtpat te) <- typeCheckPattern ctx a' pat
let b' = case b of
Lambda{} -> instantiate1 te $ fmap (fmap $ liftBase ctx') $ snd (dropOnePi p a b)
_ -> fmap (liftBase ctx') b
(bf2, TermsInCtx ctx'' rtpats tes ty) <- typeCheckPatterns (ctx C.+++ ctx') (Type (nf WHNF b') k2) pats
return (bf1 || bf2, TermsInCtx (ctx' C.+++ ctx'') (Cons rtpat rtpats) (fmap (liftBase ctx'') te : tes) ty)
typeCheckPatterns _ _ (Apply (pos, _) _ : _) = throwError [tooManyArgs pos]
typeCheckPatterns _ _ _ = error "typeCheckPatterns"
| |
cc9fee88f3588fd118106ababc7086786e1efaf8ddb8e85ee2d3a3ad29881256 | Heasummn/Crab-ML | convTypes.ml | open CrabEnv
open CrabParseTree
open Error
(* Ew globals. TODO: Remove this. *)
let glob_context = ref base_ctx
let conv_type ctx tp =
match Table.lookup (Symbol.symbol tp) ctx.types with
| Some tp -> tp
| None -> raise(TypeError("Unknown type " ^ tp))
let conv_lit lit =
let data = match lit.data with
| Integer(x) -> CrabAst.Integer(x)
| Float(x) -> CrabAst.Float(x)
| Bool(x) -> CrabAst.Bool(x)
in
{ CrabAst.data = data; CrabAst.position = lit.position; CrabAst.tp = Types.TEmpty }
let rec conv_expr expr =
let data = match expr.data with
| Paren expr -> CrabAst.Paren(conv_expr expr)
| Neg expr -> CrabAst.Neg(conv_expr expr)
| BinOp(e1, op, e2) -> CrabAst.BinOp(conv_expr e1, op, conv_expr e2)
| Lit l1 -> CrabAst.Lit(conv_lit l1)
| Call(name, args) -> CrabAst.Call(name, List.map conv_expr args)
| Var x -> CrabAst.Var(x)
| Assign(tp, value, body) -> let (name, tp) = tp in let tp = conv_type !glob_context tp in
CrabAst.Assign((name, tp), conv_expr value, conv_expr body)
in
{ CrabAst.data = data; CrabAst.position = expr.position; CrabAst.tp = Types.TEmpty; }
let conv_toplevel expr =
let conv_type = conv_type !glob_context in
let data = match expr.data with
| Func((name, ret), (names, types), body) ->
let types = List.map conv_type types in
let ret = conv_type ret in
let body = conv_expr body in
let types = Types.list_arrow types in
Some (CrabAst.Func((name, ret), (names, types), body))
| Operator((name, ret), (names, types), body) ->
let types = List.map conv_type types in
let ret = conv_type ret in
let body = conv_expr body in
let types = Types.list_arrow types in
Some (CrabAst.Operator((name, ret), (names, types), body))
| Extern((name, ret), (names, types)) ->
let types = List.map conv_type types in
let ret = conv_type ret in
let types = Types.list_arrow types in
Some (CrabAst.Extern((name, ret), (names, types)))
| Typedef (name, value) ->
let value = conv_type value in
let types = Table.add (Symbol.symbol name) value !glob_context.types in
let ctx = { !glob_context with types } in
glob_context := ctx;
None
in
(data, expr.position, Types.TEmpty)
let convTree ctx tree = glob_context := ctx;
Rmove anything that is None
let data = List.filter
(fun (data, _, _) -> match data with
| Some _ -> true
| None -> false)
(List.map conv_toplevel tree)
in
(* Convert all the options to a value *)
let data = List.map (fun (data, loc, tp) -> match data with
| Some x -> (x, loc, tp)
| None -> assert false) data
in
List.map (fun (data, position, tp) ->
{CrabAst.data = data; CrabAst.position = position; CrabAst.tp = tp}
) data | null | https://raw.githubusercontent.com/Heasummn/Crab-ML/45a79373a620878b9c24464a7e009ecf3ea7f908/src/CrabPasses/convTypes.ml | ocaml | Ew globals. TODO: Remove this.
Convert all the options to a value | open CrabEnv
open CrabParseTree
open Error
let glob_context = ref base_ctx
let conv_type ctx tp =
match Table.lookup (Symbol.symbol tp) ctx.types with
| Some tp -> tp
| None -> raise(TypeError("Unknown type " ^ tp))
let conv_lit lit =
let data = match lit.data with
| Integer(x) -> CrabAst.Integer(x)
| Float(x) -> CrabAst.Float(x)
| Bool(x) -> CrabAst.Bool(x)
in
{ CrabAst.data = data; CrabAst.position = lit.position; CrabAst.tp = Types.TEmpty }
let rec conv_expr expr =
let data = match expr.data with
| Paren expr -> CrabAst.Paren(conv_expr expr)
| Neg expr -> CrabAst.Neg(conv_expr expr)
| BinOp(e1, op, e2) -> CrabAst.BinOp(conv_expr e1, op, conv_expr e2)
| Lit l1 -> CrabAst.Lit(conv_lit l1)
| Call(name, args) -> CrabAst.Call(name, List.map conv_expr args)
| Var x -> CrabAst.Var(x)
| Assign(tp, value, body) -> let (name, tp) = tp in let tp = conv_type !glob_context tp in
CrabAst.Assign((name, tp), conv_expr value, conv_expr body)
in
{ CrabAst.data = data; CrabAst.position = expr.position; CrabAst.tp = Types.TEmpty; }
let conv_toplevel expr =
let conv_type = conv_type !glob_context in
let data = match expr.data with
| Func((name, ret), (names, types), body) ->
let types = List.map conv_type types in
let ret = conv_type ret in
let body = conv_expr body in
let types = Types.list_arrow types in
Some (CrabAst.Func((name, ret), (names, types), body))
| Operator((name, ret), (names, types), body) ->
let types = List.map conv_type types in
let ret = conv_type ret in
let body = conv_expr body in
let types = Types.list_arrow types in
Some (CrabAst.Operator((name, ret), (names, types), body))
| Extern((name, ret), (names, types)) ->
let types = List.map conv_type types in
let ret = conv_type ret in
let types = Types.list_arrow types in
Some (CrabAst.Extern((name, ret), (names, types)))
| Typedef (name, value) ->
let value = conv_type value in
let types = Table.add (Symbol.symbol name) value !glob_context.types in
let ctx = { !glob_context with types } in
glob_context := ctx;
None
in
(data, expr.position, Types.TEmpty)
let convTree ctx tree = glob_context := ctx;
Rmove anything that is None
let data = List.filter
(fun (data, _, _) -> match data with
| Some _ -> true
| None -> false)
(List.map conv_toplevel tree)
in
let data = List.map (fun (data, loc, tp) -> match data with
| Some x -> (x, loc, tp)
| None -> assert false) data
in
List.map (fun (data, position, tp) ->
{CrabAst.data = data; CrabAst.position = position; CrabAst.tp = tp}
) data |
4b46a7218e64a0181abe292a2dac146bee48b1d41e125451d5419b0ad5ca93f1 | joyofclojure/book-source | chess_test.clj | (ns joy.chess-test
(:use joy.chess
clojure.test))
(deftest test-lookup
(is (= (lookup (initial-board) "a8") \r)))
| null | https://raw.githubusercontent.com/joyofclojure/book-source/b76ef15248dac88c7b1c77c2d461f3aa522a1461/test/joy/chess_test.clj | clojure | (ns joy.chess-test
(:use joy.chess
clojure.test))
(deftest test-lookup
(is (= (lookup (initial-board) "a8") \r)))
| |
c74ce5d5c2dfb886126be58c91143e49ae25214d35130a94402ea298460a4fe5 | haskell-jp/makeMistakesToLearnHaskell | Error.hs | {-# OPTIONS_GHC -Wno-unused-imports #-}
module Education.MakeMistakesToLearnHaskell.Error where
#include <imports/external.hs>
import Education.MakeMistakesToLearnHaskell.Env (appName)
dieWhenNothing :: String -> Maybe a -> IO a
dieWhenNothing _ (Just x) = return x
dieWhenNothing msg _ = die msg
die :: String -> IO a
die msg = Exit.die $ appName ++ ": ERROR: " ++ msg
throwWhenLeft :: Exception e => Either e a -> IO a
throwWhenLeft = either throwIO return
| null | https://raw.githubusercontent.com/haskell-jp/makeMistakesToLearnHaskell/1174d5c6bb82e57622be8033607a4d049e30ae7e/src/Education/MakeMistakesToLearnHaskell/Error.hs | haskell | # OPTIONS_GHC -Wno-unused-imports # |
module Education.MakeMistakesToLearnHaskell.Error where
#include <imports/external.hs>
import Education.MakeMistakesToLearnHaskell.Env (appName)
dieWhenNothing :: String -> Maybe a -> IO a
dieWhenNothing _ (Just x) = return x
dieWhenNothing msg _ = die msg
die :: String -> IO a
die msg = Exit.die $ appName ++ ": ERROR: " ++ msg
throwWhenLeft :: Exception e => Either e a -> IO a
throwWhenLeft = either throwIO return
|
abda68858653b68af60dbc0166f9672ab3322755943fb217d241aec617c8d237 | rbkmoney/genlib | genlib_opts_tests.erl | %%
-module(genlib_opts_tests).
-include_lib("eunit/include/eunit.hrl").
-spec test() -> _.
-type testcase() :: {_, fun()}.
-spec take_test_() -> [testcase()].
take_test_() ->
Opts = [{hey, "oh"}, {listen, "what"}, {i, "say oh"}, {come, "back and..."}],
[
?_assertEqual(
{"oh", [{listen, "what"}, {i, "say oh"}, {come, "back and..."}]},
genlib_opts:take(hey, Opts)
),
?_assertEqual(
{undefined, [{hey, "oh"}, {listen, "what"}, {i, "say oh"}, {come, "back and..."}]},
genlib_opts:take(hell, Opts)
),
?_assertEqual(
{42, [{hey, "oh"}, {listen, "what"}, {i, "say oh"}, {come, "back and..."}]},
genlib_opts:take(hell, Opts, 42)
),
?_assertEqual(
{["oh", undefined, "what", "say oh", "back and..."], []},
lists:foldl(
fun(K, {A, O1}) ->
{V, O2} = genlib_opts:take(K, O1),
{A ++ [V], O2}
end,
{[], Opts},
[hey, honey, listen, i, come]
)
)
].
| null | https://raw.githubusercontent.com/rbkmoney/genlib/4565a8d73f34a0b78cca32c9cd2b97d298bdadf8/test/genlib_opts_tests.erl | erlang |
-module(genlib_opts_tests).
-include_lib("eunit/include/eunit.hrl").
-spec test() -> _.
-type testcase() :: {_, fun()}.
-spec take_test_() -> [testcase()].
take_test_() ->
Opts = [{hey, "oh"}, {listen, "what"}, {i, "say oh"}, {come, "back and..."}],
[
?_assertEqual(
{"oh", [{listen, "what"}, {i, "say oh"}, {come, "back and..."}]},
genlib_opts:take(hey, Opts)
),
?_assertEqual(
{undefined, [{hey, "oh"}, {listen, "what"}, {i, "say oh"}, {come, "back and..."}]},
genlib_opts:take(hell, Opts)
),
?_assertEqual(
{42, [{hey, "oh"}, {listen, "what"}, {i, "say oh"}, {come, "back and..."}]},
genlib_opts:take(hell, Opts, 42)
),
?_assertEqual(
{["oh", undefined, "what", "say oh", "back and..."], []},
lists:foldl(
fun(K, {A, O1}) ->
{V, O2} = genlib_opts:take(K, O1),
{A ++ [V], O2}
end,
{[], Opts},
[hey, honey, listen, i, come]
)
)
].
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.