_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
cb6254097d624159149059aee03745dff0843bc18b0ea17a243bf0197929c31e | hidaris/thinking-dumps | drscheme-init.rkt | drscheme-init.scm - compatibility file for DrScheme
;; usage: (require "drscheme-init.scm")
;;; makes structs printable, and provides basic functionality for
;;; testing. This includes pretty-printing and tracing.
(module drscheme-init mzscheme
;; show the contents of define-datatype values
(print-struct #t)
(require (lib "pretty.ss"))
(provide (all-from (lib "pretty.ss")))
(require (lib "trace.ss"))
(provide (all-from (lib "trace.ss")))
(provide make-parameter)
(provide
run-experiment
run-tests!
stop-after-first-error
run-quietly
)
;; safely apply procedure fn to a list of args.
;; if successful, return (cons #t val)
;; if eopl:error is invoked, returns (cons #f string), where string is the
;; format string generated by eopl:error. If somebody manages to raise a
;; value other than an exception, then the raised value is reported.
(define apply-safely
(lambda (proc args)
(with-handlers ([(lambda (exn) #t) ; catch any error
(lambda (exn) ; evaluate to a failed test result
(cons #f
(if (exn? exn)
(exn-message exn)
exn)))])
(let ([actual (apply proc args)])
(cons #t actual)))))
;; run-experiment :
;; ((a ...) -> b) * (a ...) * b * (b * b -> bool)
;; -> (cons bool b)
;; usage: (run-experiment fn args correct-answer equal-answer?)
;; Applies fn to args. Compares the result to correct-answer.
;; Returns (cons bool b) where bool indicates whether the
;; answer is correct.
(define run-experiment
(lambda (fn args correct-answer equal-answer?)
(let*
((result (apply-safely fn args))
;; ans is either the answer or the args to eopl:error
(error-thrown? (not (car result)))
(ans (cdr result)))
(cons
(if (eqv? correct-answer 'error)
error-thrown?
(equal-answer? ans correct-answer))
ans))))
(define stop-after-first-error (make-parameter #f))
(define run-quietly (make-parameter #t))
;; run-tests! : (arg -> outcome) * (any * any -> bool) * (list-of test)
;; -> unspecified
;; where:
;; test ::= (name arg outcome)
;; outcome ::= ERROR | any
;; usage: (run-tests! run-fn equal-answer? tests)
;; for each item in tests, apply run-fn to the arg. Check to see if
;; the outcome is right, comparing values using equal-answer?.
;; print a log of the tests.
;; at the end, print either "no bugs found" or the list of tests
;; failed.
;; Normally, run-tests! will recover from any error and continue to
;; the end of the test suite. This behavior can be altered by
setting ( stop - after - first - error # t ) .
(define (run-tests! run-fn equal-answer? tests)
(let ((tests-failed '()))
(for-each
(lambda (test-item)
(let ((name (car test-item))
(pgm (cadr test-item))
(correct-answer (caddr test-item)))
(printf "test: ~a~%" name)
(let* ((result
(run-experiment
run-fn (list pgm) correct-answer equal-answer?))
(correct? (car result))
(actual-answer (cdr result)))
(if (or (not correct?)
(not (run-quietly)))
(begin
(printf "~a~%" pgm)
(printf "correct outcome: ~a~%" correct-answer)
(printf "actual outcome: ")
(pretty-display actual-answer)))
(if correct?
(printf "correct~%~%")
(begin
(printf "incorrect~%~%")
stop on first error if stop - after - first ? is set :
(if (stop-after-first-error)
(error name "incorrect outcome detected"))
(set! tests-failed
(cons name tests-failed)))))))
tests)
(if (null? tests-failed)
(printf "no bugs found~%")
(printf "incorrect answers on tests: ~a~%"
(reverse tests-failed)))))
)
| null | https://raw.githubusercontent.com/hidaris/thinking-dumps/3fceaf9e6195ab99c8315749814a7377ef8baf86/eopl-solutions/chap4/4-12/store-passing/drscheme-init.rkt | racket | usage: (require "drscheme-init.scm")
makes structs printable, and provides basic functionality for
testing. This includes pretty-printing and tracing.
show the contents of define-datatype values
safely apply procedure fn to a list of args.
if successful, return (cons #t val)
if eopl:error is invoked, returns (cons #f string), where string is the
format string generated by eopl:error. If somebody manages to raise a
value other than an exception, then the raised value is reported.
catch any error
evaluate to a failed test result
run-experiment :
((a ...) -> b) * (a ...) * b * (b * b -> bool)
-> (cons bool b)
usage: (run-experiment fn args correct-answer equal-answer?)
Applies fn to args. Compares the result to correct-answer.
Returns (cons bool b) where bool indicates whether the
answer is correct.
ans is either the answer or the args to eopl:error
run-tests! : (arg -> outcome) * (any * any -> bool) * (list-of test)
-> unspecified
where:
test ::= (name arg outcome)
outcome ::= ERROR | any
usage: (run-tests! run-fn equal-answer? tests)
for each item in tests, apply run-fn to the arg. Check to see if
the outcome is right, comparing values using equal-answer?.
print a log of the tests.
at the end, print either "no bugs found" or the list of tests
failed.
Normally, run-tests! will recover from any error and continue to
the end of the test suite. This behavior can be altered by | drscheme-init.scm - compatibility file for DrScheme
(module drscheme-init mzscheme
(print-struct #t)
(require (lib "pretty.ss"))
(provide (all-from (lib "pretty.ss")))
(require (lib "trace.ss"))
(provide (all-from (lib "trace.ss")))
(provide make-parameter)
(provide
run-experiment
run-tests!
stop-after-first-error
run-quietly
)
(define apply-safely
(lambda (proc args)
(cons #f
(if (exn? exn)
(exn-message exn)
exn)))])
(let ([actual (apply proc args)])
(cons #t actual)))))
(define run-experiment
(lambda (fn args correct-answer equal-answer?)
(let*
((result (apply-safely fn args))
(error-thrown? (not (car result)))
(ans (cdr result)))
(cons
(if (eqv? correct-answer 'error)
error-thrown?
(equal-answer? ans correct-answer))
ans))))
(define stop-after-first-error (make-parameter #f))
(define run-quietly (make-parameter #t))
setting ( stop - after - first - error # t ) .
(define (run-tests! run-fn equal-answer? tests)
(let ((tests-failed '()))
(for-each
(lambda (test-item)
(let ((name (car test-item))
(pgm (cadr test-item))
(correct-answer (caddr test-item)))
(printf "test: ~a~%" name)
(let* ((result
(run-experiment
run-fn (list pgm) correct-answer equal-answer?))
(correct? (car result))
(actual-answer (cdr result)))
(if (or (not correct?)
(not (run-quietly)))
(begin
(printf "~a~%" pgm)
(printf "correct outcome: ~a~%" correct-answer)
(printf "actual outcome: ")
(pretty-display actual-answer)))
(if correct?
(printf "correct~%~%")
(begin
(printf "incorrect~%~%")
stop on first error if stop - after - first ? is set :
(if (stop-after-first-error)
(error name "incorrect outcome detected"))
(set! tests-failed
(cons name tests-failed)))))))
tests)
(if (null? tests-failed)
(printf "no bugs found~%")
(printf "incorrect answers on tests: ~a~%"
(reverse tests-failed)))))
)
|
ab5bd2b8cc3a8c6c1830de8be19317007c97678fb7151b934cfee75b981c6709 | LaurentMazare/tensorflow-ocaml | checkpointing.ml | (* TODO: add the possibility to only keep a fixed number of checkpoints. *)
open Base
let latest_index_and_filename ~checkpoint_base =
let dirname = Caml.Filename.dirname checkpoint_base in
let basename = Caml.Filename.basename checkpoint_base in
Caml.Sys.readdir dirname
|> Array.to_list
|> List.filter_map ~f:(fun filename ->
match String.chop_prefix filename ~prefix:(basename ^ ".") with
| None -> None
| Some suffix ->
(try Some (Int.of_string suffix, Caml.Filename.concat dirname filename) with
| _ -> None))
|> List.sort ~compare:Caml.compare
|> List.last
let loop
~start_index
~end_index
~save_vars_from
~checkpoint_base
?(checkpoint_every = `seconds 600.)
f
=
if start_index < 0
then raise (Invalid_argument (Printf.sprintf "negative start_index %d" start_index));
let named_vars =
Var.get_all_vars save_vars_from
|> List.map ~f:(fun var -> "V" ^ (Node.packed_id var |> Node.Id.to_string), var)
in
let temp_checkpoint = checkpoint_base ^ ".tmp" in
let save_op = Ops.save ~filename:temp_checkpoint named_vars in
let latest_index_and_filename = latest_index_and_filename ~checkpoint_base in
let load_ops =
Option.map latest_index_and_filename ~f:(fun (latest_index, filename) ->
Stdio.eprintf
"Restoring checkpoint for index %d from '%s'.\n%!"
latest_index
filename;
let filename = Ops.const_string0 filename in
List.map named_vars ~f:(fun (var_name, Node.P var) ->
Ops.assign
var
(Ops.restore
~type_:(Node.output_type var)
filename
(Ops.const_string0 var_name))
|> fun node -> Node.P node))
in
(* From this point, no op should be added to the graph anymore as we may call
[Session.run]. *)
Option.iter load_ops ~f:(fun load_ops ->
Session.run ~targets:load_ops Session.Output.empty);
let start_index =
Option.value_map latest_index_and_filename ~default:start_index ~f:(fun (index, _) ->
index + 1)
in
let save ~suffix =
Session.run ~targets:[ Node.P save_op ] Session.Output.empty;
Unix.rename temp_checkpoint (Printf.sprintf "%s.%s" checkpoint_base suffix)
in
let last_checkpoint_time = ref (Unix.time ()) in
for index = start_index to end_index do
f ~index;
let should_checkpoint =
match checkpoint_every with
| `seconds seconds -> Float.( > ) (Unix.time () -. !last_checkpoint_time) seconds
| `iters iters -> index % iters = 0
in
if should_checkpoint
then (
save ~suffix:(Int.to_string index);
last_checkpoint_time := Unix.time ())
done;
save ~suffix:"final"
| null | https://raw.githubusercontent.com/LaurentMazare/tensorflow-ocaml/52c5f1dec1a8b7dc9bc6ef06abbc07da6cd90d39/src/graph/checkpointing.ml | ocaml | TODO: add the possibility to only keep a fixed number of checkpoints.
From this point, no op should be added to the graph anymore as we may call
[Session.run]. | open Base
let latest_index_and_filename ~checkpoint_base =
let dirname = Caml.Filename.dirname checkpoint_base in
let basename = Caml.Filename.basename checkpoint_base in
Caml.Sys.readdir dirname
|> Array.to_list
|> List.filter_map ~f:(fun filename ->
match String.chop_prefix filename ~prefix:(basename ^ ".") with
| None -> None
| Some suffix ->
(try Some (Int.of_string suffix, Caml.Filename.concat dirname filename) with
| _ -> None))
|> List.sort ~compare:Caml.compare
|> List.last
let loop
~start_index
~end_index
~save_vars_from
~checkpoint_base
?(checkpoint_every = `seconds 600.)
f
=
if start_index < 0
then raise (Invalid_argument (Printf.sprintf "negative start_index %d" start_index));
let named_vars =
Var.get_all_vars save_vars_from
|> List.map ~f:(fun var -> "V" ^ (Node.packed_id var |> Node.Id.to_string), var)
in
let temp_checkpoint = checkpoint_base ^ ".tmp" in
let save_op = Ops.save ~filename:temp_checkpoint named_vars in
let latest_index_and_filename = latest_index_and_filename ~checkpoint_base in
let load_ops =
Option.map latest_index_and_filename ~f:(fun (latest_index, filename) ->
Stdio.eprintf
"Restoring checkpoint for index %d from '%s'.\n%!"
latest_index
filename;
let filename = Ops.const_string0 filename in
List.map named_vars ~f:(fun (var_name, Node.P var) ->
Ops.assign
var
(Ops.restore
~type_:(Node.output_type var)
filename
(Ops.const_string0 var_name))
|> fun node -> Node.P node))
in
Option.iter load_ops ~f:(fun load_ops ->
Session.run ~targets:load_ops Session.Output.empty);
let start_index =
Option.value_map latest_index_and_filename ~default:start_index ~f:(fun (index, _) ->
index + 1)
in
let save ~suffix =
Session.run ~targets:[ Node.P save_op ] Session.Output.empty;
Unix.rename temp_checkpoint (Printf.sprintf "%s.%s" checkpoint_base suffix)
in
let last_checkpoint_time = ref (Unix.time ()) in
for index = start_index to end_index do
f ~index;
let should_checkpoint =
match checkpoint_every with
| `seconds seconds -> Float.( > ) (Unix.time () -. !last_checkpoint_time) seconds
| `iters iters -> index % iters = 0
in
if should_checkpoint
then (
save ~suffix:(Int.to_string index);
last_checkpoint_time := Unix.time ())
done;
save ~suffix:"final"
|
c68c6deead9485c56fd28c538d4f4266b0fb6614dbc64e081d26ad5d20572198 | cryptosense/pkcs11 | pkcs11_CK_HW_FEATURE_TYPE.ml | type t = P11_ulong.t
let typ = Ctypes.ulong
| null | https://raw.githubusercontent.com/cryptosense/pkcs11/93c39c7a31c87f68f0beabf75ef90d85a782a983/driver/pkcs11_CK_HW_FEATURE_TYPE.ml | ocaml | type t = P11_ulong.t
let typ = Ctypes.ulong
| |
fa9ddedac9ff870a8df6f2e084c7942046954408692c3e96626984c3c080c3bb | ocaml-flambda/flambda-backend | linearize.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Transformation of Mach code into a list of pseudo-instructions. *)
open Linear
(* Cons a simple instruction (arg, res, live empty) *)
let cons_instr d n =
{ desc = d; next = n; arg = [||]; res = [||];
dbg = Debuginfo.none; fdo = Fdo_info.none; live = Reg.Set.empty }
(* Build an instruction with arg, res, dbg, live taken from
the given Mach.instruction *)
let copy_instr d i n =
{ desc = d; next = n;
arg = i.Mach.arg; res = i.Mach.res;
dbg = i.Mach.dbg; fdo = Fdo_info.none; live = i.Mach.live }
(*
Label the beginning of the given instruction sequence.
- If the sequence starts with a branch, jump over it.
- If the sequence is the end, (tail call position), just do nothing
*)
let get_label n = match n.desc with
Lbranch lbl -> (lbl, n)
| Llabel { label = lbl; _ } -> (lbl, n)
| Lend -> (-1, n)
| _ ->
let lbl = Cmm.new_label() in
(* CR gyorsh: basic block sections are not supported in [linearize]. *)
(lbl, cons_instr (Llabel { label = lbl; section_name = None; }) n)
(* Check the fallthrough label *)
let check_label n = match n.desc with
| Lbranch lbl -> lbl
| Llabel { label = lbl; _ } -> lbl
| _ -> -1
(* Add pseudo-instruction Ladjust_stack_offset in front of a continuation
to notify assembler generation about updates to the stack as a result
of differences in exception trap depths
and stack allocated outgoing arguments. *)
let rec adjust_stack_offset delta_bytes next =
(* Simplify by merging and eliminating Ladjust_stack_offset instructions
whenever possible. *)
match next.desc with
| Ladjust_stack_offset { delta_bytes = k } ->
adjust_stack_offset (delta_bytes + k) next.next
| _ ->
if delta_bytes = 0 then next
else cons_instr (Ladjust_stack_offset { delta_bytes }) next
let rec adjust_trap_depth delta_traps next =
adjust_stack_offset (Linear.traps_to_bytes delta_traps) next
let delta_traps stack_before stack_after =
let rec stack_depth acc stack =
match (stack : Mach.trap_stack) with
| Uncaught -> acc
| Generic_trap t | Specific_trap (_, t) -> stack_depth (succ acc) t
in
(stack_depth 0 stack_after) - (stack_depth 0 stack_before)
(* Discard all instructions up to the next label.
This function is to be called before adding a non-terminating
instruction. *)
let rec discard_dead_code n =
let adjust ~delta_bytes =
adjust_stack_offset delta_bytes (discard_dead_code n.next)
in
let adjust_traps ~delta_traps =
adjust ~delta_bytes:(Linear.traps_to_bytes delta_traps)
in
match n.desc with
Lend -> n
| Llabel _ -> n
Do not discard Lpoptrap / Lpushtrap / Ladjust_stack_offset
or Istackoffset instructions , as this may cause a stack imbalance
later during assembler generation . Replace them
with pseudo - instruction Ladjust_stack_offset with the corresponding
stack offset and eliminate dead instructions after them .
or Istackoffset instructions, as this may cause a stack imbalance
later during assembler generation. Replace them
with pseudo-instruction Ladjust_stack_offset with the corresponding
stack offset and eliminate dead instructions after them. *)
| Lpoptrap -> adjust_traps ~delta_traps:(-1)
| Lpushtrap _ -> adjust_traps ~delta_traps:(+1)
| Ladjust_stack_offset { delta_bytes } -> adjust ~delta_bytes
| Lop(Istackoffset delta_bytes) -> adjust ~delta_bytes
| _ -> discard_dead_code n.next
(*
Add a branch in front of a continuation.
Discard dead code in the continuation.
Does not insert anything if we're just falling through
or if we jump to dead code after the end of function (lbl=-1)
*)
let add_branch lbl n =
if lbl >= 0 then
let n1 = discard_dead_code n in
match n1.desc with
| Llabel { label = lbl1; _ } when lbl1 = lbl -> n1
| _ -> cons_instr (Lbranch lbl) n1
else
discard_dead_code n
type linear_env =
{ trap_stack : Mach.trap_stack;
(** The current trap stack *)
exit_label : (int * label) list;
(** Association list: exit handler -> handler label *)
}
let initial_env =
{ trap_stack = Uncaught;
exit_label = [];
}
let find_exit_label env k =
try
List.assoc k env.exit_label
with
| Not_found -> Misc.fatal_error "Linearize.find_exit_label"
let is_next_catch env n = match env.exit_label with
| (n0,_)::_ when n0=n -> true
| _ -> false
let rec add_traps env i traps =
match traps with
| [] -> i
| Cmm.Pop :: traps ->
add_traps env (cons_instr Lpoptrap i) traps
| Cmm.Push handler :: traps ->
let lbl_handler = find_exit_label env handler in
add_traps env (cons_instr (Lpushtrap { lbl_handler; }) i) traps
let delta_traps_diff traps =
let delta =
List.fold_left
(fun delta trap ->
match trap with
| Cmm.Pop -> delta - 1
| Cmm.Push _ -> delta + 1)
0 traps in
-delta
Linearize an instruction [ i ] : add it in front of the continuation [ n ]
let linear i n contains_calls =
let rec linear env i n =
match i.Mach.desc with
Iend -> n
| Iop(Itailcall_ind | Itailcall_imm _ as op)
| Iop((Iextcall { returns = false; _ }) as op) ->
copy_instr (Lop op) i (discard_dead_code n)
| Iop(Imove | Ireload | Ispill)
when i.Mach.arg.(0).loc = i.Mach.res.(0).loc ->
linear env i.Mach.next n
| Iop((Icsel _) as op) ->
(* CR gyorsh: this optimization can leave behind dead code
from computing the condition and the arguments, because there
is not dead code elimination after linearize. *)
let len = Array.length i.Mach.arg in
let ifso = i.Mach.arg.(len-2) in
let ifnot = i.Mach.arg.(len-1) in
if Reg.same_loc i.Mach.res.(0) ifso &&
Reg.same_loc i.Mach.res.(0) ifnot
then linear env i.Mach.next n
else copy_instr (Lop op) i (linear env i.Mach.next n)
| Iop((Ipoll { return_label = None; _ }) as op) ->
(* If the poll call does not already specify where to jump to after
the poll (the expected situation in the current implementation),
absorb any branch after the poll call into the poll call itself.
This, in particular, optimises polls at the back edges of loops. *)
let n = linear env i.Mach.next n in
let op, n =
match n.desc with
| Lbranch lbl ->
Mach.Ipoll { return_label = Some lbl }, n.next
| _ -> op, n
in
copy_instr (Lop op) i n
| Iop op ->
copy_instr (Lop op) i (linear env i.Mach.next n)
| Ireturn traps ->
let n = adjust_trap_depth (delta_traps_diff traps) n in
let n1 = copy_instr Lreturn i (discard_dead_code n) in
let n2 =
if contains_calls
then cons_instr Lreloadretaddr n1
else n1
in
add_traps env n2 traps
| Iifthenelse(test, ifso, ifnot) ->
let n1 = linear env i.Mach.next n in
begin match (ifso.Mach.desc, ifnot.Mach.desc, n1.desc) with
Iend, _, Lbranch lbl ->
copy_instr (Lcondbranch(test, lbl)) i (linear env ifnot n1)
| _, Iend, Lbranch lbl ->
copy_instr (Lcondbranch(invert_test test, lbl)) i
(linear env ifso n1)
| Iexit (nfail1, []), Iexit (nfail2, []), _
when is_next_catch env nfail1 ->
let lbl2 = find_exit_label env nfail2 in
copy_instr
(Lcondbranch (invert_test test, lbl2)) i (linear env ifso n1)
| Iexit (nfail, []), _, _ ->
let n2 = linear env ifnot n1
and lbl = find_exit_label env nfail in
copy_instr (Lcondbranch(test, lbl)) i n2
| _, Iexit (nfail, []), _ ->
let n2 = linear env ifso n1 in
let lbl = find_exit_label env nfail in
copy_instr (Lcondbranch(invert_test test, lbl)) i n2
| Iend, _, _ ->
let (lbl_end, n2) = get_label n1 in
copy_instr (Lcondbranch(test, lbl_end)) i (linear env ifnot n2)
| _, Iend, _ ->
let (lbl_end, n2) = get_label n1 in
copy_instr (Lcondbranch(invert_test test, lbl_end)) i
(linear env ifso n2)
| _, _, _ ->
(* Should attempt branch prediction here *)
let (lbl_end, n2) = get_label n1 in
let (lbl_else, nelse) = get_label (linear env ifnot n2) in
copy_instr (Lcondbranch(invert_test test, lbl_else)) i
(linear env ifso (add_branch lbl_end nelse))
end
| Iswitch(index, cases) ->
let lbl_cases = Array.make (Array.length cases) 0 in
let (lbl_end, n1) = get_label(linear env i.Mach.next n) in
let n2 = ref (discard_dead_code n1) in
for i = Array.length cases - 1 downto 0 do
let (lbl_case, ncase) =
get_label(linear env cases.(i) (add_branch lbl_end !n2)) in
lbl_cases.(i) <- lbl_case;
n2 := discard_dead_code ncase
done;
Switches with 1 and 2 branches have been eliminated earlier .
Here , we do something for switches with 3 branches .
Here, we do something for switches with 3 branches. *)
if Array.length index = 3 then begin
let fallthrough_lbl = check_label !n2 in
let find_label n =
let lbl = lbl_cases.(index.(n)) in
if lbl = fallthrough_lbl then None else Some lbl in
copy_instr (Lcondbranch3(find_label 0, find_label 1, find_label 2))
i !n2
end else
copy_instr (Lswitch(Array.map (fun n -> lbl_cases.(n)) index)) i !n2
| Icatch(_rec_flag, ts_next, handlers, body) ->
let n0 = adjust_trap_depth (delta_traps ts_next env.trap_stack) n in
let env_next = { env with trap_stack = ts_next; } in
let (lbl_end, n1) = get_label(linear env_next i.Mach.next n0) in
CR mshinwell for pchambart :
1 . rename " io "
2 . Make sure the test cases cover the " Iend " cases too
1. rename "io"
2. Make sure the test cases cover the "Iend" cases too *)
let labels_at_entry_to_handlers = List.map (fun (_n, _ts, handler) ->
match handler.Mach.desc with
| Iend -> lbl_end
| _ -> Cmm.new_label ())
handlers in
let exit_label_add = List.map2
(fun (nfail, _ts, _) lbl -> (nfail, lbl))
handlers labels_at_entry_to_handlers in
let env = { env with exit_label = exit_label_add @ env.exit_label; } in
let (n2, ts_n2) =
List.fold_left2 (fun (n, ts_next) (_nfail, ts, handler) lbl_handler ->
match handler.Mach.desc with
| Iend -> n, ts_next
| _ ->
let delta = delta_traps ts ts_next in
let n = adjust_trap_depth delta n in
let env = { env with trap_stack = ts; } in
let n =
cons_instr (Llabel { label = lbl_handler; section_name = None; } )
(linear env handler (add_branch lbl_end n))
in
n, ts)
(n1, ts_next) handlers labels_at_entry_to_handlers
in
let n2 = adjust_trap_depth (delta_traps env.trap_stack ts_n2) n2 in
let n3 = linear env body (add_branch lbl_end n2) in
n3
| Iexit (nfail, traps) ->
let lbl = find_exit_label env nfail in
assert (i.Mach.next.desc = Mach.Iend);
let n1 = adjust_trap_depth (delta_traps_diff traps) n in
add_traps env (add_branch lbl n1) traps
| Itrywith(body, Regular, (ts, handler)) ->
let (lbl_join, n1) = get_label (linear env i.Mach.next n) in
assert (Mach.equal_trap_stack ts env.trap_stack);
let (lbl_handler, n2) =
get_label (cons_instr Lentertrap (linear env handler n1))
in
let env_body =
{ env with trap_stack = Mach.Generic_trap env.trap_stack; }
in
assert (i.Mach.arg = [| |]);
let n3 = cons_instr (Lpushtrap { lbl_handler; })
(linear env_body body
(cons_instr
Lpoptrap
(add_branch lbl_join n2))) in
n3
| Itrywith(body, Delayed nfail, (ts, handler)) ->
let (lbl_join, n1) = get_label (linear env i.Mach.next n) in
let delta = delta_traps ts env.trap_stack in
let n1' = adjust_trap_depth delta n1 in
let env_handler = { env with trap_stack = ts; } in
let (lbl_handler, n2) =
get_label (cons_instr Lentertrap (linear env_handler handler n1'))
in
let n2' = adjust_trap_depth (-delta) n2 in
let env_body =
{env with exit_label = (nfail, lbl_handler) :: env.exit_label; }
in
let n3 = linear env_body body (add_branch lbl_join n2') in
n3
| Iraise k ->
copy_instr (Lraise k) i (discard_dead_code n)
in linear initial_env i n
let add_prologue first_insn prologue_required =
(* The prologue needs to come after any [Iname_for_debugger] operations that
refer to parameters. (Such operations always come in a contiguous
block, cf. [Selectgen].) *)
let rec skip_naming_ops (insn : instruction) : label * instruction =
match insn.desc with
| Lop (Iname_for_debugger _) ->
let tailrec_entry_point_label, next = skip_naming_ops insn.next in
tailrec_entry_point_label, { insn with next; }
| _ ->
let tailrec_entry_point_label = Cmm.new_label () in
let tailrec_entry_point =
{ desc = Llabel { label = tailrec_entry_point_label; section_name = None; };
next = insn;
arg = [| |];
res = [| |];
dbg = insn.dbg;
fdo = insn.fdo;
live = insn.live;
}
in
We expect [ ] to expand to at least one instruction --- as such ,
if no prologue is required , we avoid adding the instruction here .
The reason is subtle : an empty expansion of [ ] can cause
two labels , one either side of the [ ] , to point at the same
location . This means that we lose the property ( cf . [ Coalesce_labels ] )
that we can check if two labels point at the same location by
comparing them for equality . This causes trouble when the function
whose prologue is in question lands at the top of the object file
and we are emitting DWARF debugging information :
foo_code_begin :
foo :
.L1 :
; empty prologue
.L2 :
...
If we were to emit a location list entry from L1 ... L2 , not realising
that they point at the same location , then the beginning and ending
points of the range would be both equal to each other and ( relative to
" foo_code_begin " ) equal to zero . This appears to confuse objdump ,
which seemingly misinterprets the entry as an end - of - list entry
( which is encoded with two zero words ) , then complaining about a
" hole in location list " ( as it ignores any remaining list entries
after the misinterpreted entry ) .
if no prologue is required, we avoid adding the instruction here.
The reason is subtle: an empty expansion of [Lprologue] can cause
two labels, one either side of the [Lprologue], to point at the same
location. This means that we lose the property (cf. [Coalesce_labels])
that we can check if two labels point at the same location by
comparing them for equality. This causes trouble when the function
whose prologue is in question lands at the top of the object file
and we are emitting DWARF debugging information:
foo_code_begin:
foo:
.L1:
; empty prologue
.L2:
...
If we were to emit a location list entry from L1...L2, not realising
that they point at the same location, then the beginning and ending
points of the range would be both equal to each other and (relative to
"foo_code_begin") equal to zero. This appears to confuse objdump,
which seemingly misinterprets the entry as an end-of-list entry
(which is encoded with two zero words), then complaining about a
"hole in location list" (as it ignores any remaining list entries
after the misinterpreted entry). *)
if prologue_required then
let prologue =
{ desc = Lprologue;
next = tailrec_entry_point;
arg = [| |];
res = [| |];
dbg = tailrec_entry_point.dbg;
fdo = tailrec_entry_point.fdo;
live = Reg.Set.empty; (* will not be used *)
}
in
tailrec_entry_point_label, prologue
else
tailrec_entry_point_label, tailrec_entry_point
in
skip_naming_ops first_insn
let fundecl f =
let fun_contains_calls = f.Mach.fun_contains_calls in
let fun_num_stack_slots = f.Mach.fun_num_stack_slots in
let fun_prologue_required =
Proc.prologue_required ~fun_contains_calls ~fun_num_stack_slots in
let fun_frame_required =
Proc.frame_required ~fun_contains_calls ~fun_num_stack_slots in
let fun_tailrec_entry_point_label, fun_body =
add_prologue (linear f.Mach.fun_body end_instr fun_contains_calls)
fun_prologue_required
in
{ fun_name = f.Mach.fun_name;
fun_body;
fun_fast = not (List.mem Cmm.Reduce_code_size f.Mach.fun_codegen_options);
fun_dbg = f.Mach.fun_dbg;
fun_tailrec_entry_point_label = Some fun_tailrec_entry_point_label;
fun_contains_calls;
fun_num_stack_slots;
fun_frame_required;
fun_prologue_required;
fun_section_name = None;
}
| null | https://raw.githubusercontent.com/ocaml-flambda/flambda-backend/bed9613a24b4bc5ed377ca20c3b685195c993e21/backend/linearize.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Transformation of Mach code into a list of pseudo-instructions.
Cons a simple instruction (arg, res, live empty)
Build an instruction with arg, res, dbg, live taken from
the given Mach.instruction
Label the beginning of the given instruction sequence.
- If the sequence starts with a branch, jump over it.
- If the sequence is the end, (tail call position), just do nothing
CR gyorsh: basic block sections are not supported in [linearize].
Check the fallthrough label
Add pseudo-instruction Ladjust_stack_offset in front of a continuation
to notify assembler generation about updates to the stack as a result
of differences in exception trap depths
and stack allocated outgoing arguments.
Simplify by merging and eliminating Ladjust_stack_offset instructions
whenever possible.
Discard all instructions up to the next label.
This function is to be called before adding a non-terminating
instruction.
Add a branch in front of a continuation.
Discard dead code in the continuation.
Does not insert anything if we're just falling through
or if we jump to dead code after the end of function (lbl=-1)
* The current trap stack
* Association list: exit handler -> handler label
CR gyorsh: this optimization can leave behind dead code
from computing the condition and the arguments, because there
is not dead code elimination after linearize.
If the poll call does not already specify where to jump to after
the poll (the expected situation in the current implementation),
absorb any branch after the poll call into the poll call itself.
This, in particular, optimises polls at the back edges of loops.
Should attempt branch prediction here
The prologue needs to come after any [Iname_for_debugger] operations that
refer to parameters. (Such operations always come in a contiguous
block, cf. [Selectgen].)
will not be used | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Linear
let cons_instr d n =
{ desc = d; next = n; arg = [||]; res = [||];
dbg = Debuginfo.none; fdo = Fdo_info.none; live = Reg.Set.empty }
let copy_instr d i n =
{ desc = d; next = n;
arg = i.Mach.arg; res = i.Mach.res;
dbg = i.Mach.dbg; fdo = Fdo_info.none; live = i.Mach.live }
let get_label n = match n.desc with
Lbranch lbl -> (lbl, n)
| Llabel { label = lbl; _ } -> (lbl, n)
| Lend -> (-1, n)
| _ ->
let lbl = Cmm.new_label() in
(lbl, cons_instr (Llabel { label = lbl; section_name = None; }) n)
let check_label n = match n.desc with
| Lbranch lbl -> lbl
| Llabel { label = lbl; _ } -> lbl
| _ -> -1
let rec adjust_stack_offset delta_bytes next =
match next.desc with
| Ladjust_stack_offset { delta_bytes = k } ->
adjust_stack_offset (delta_bytes + k) next.next
| _ ->
if delta_bytes = 0 then next
else cons_instr (Ladjust_stack_offset { delta_bytes }) next
let rec adjust_trap_depth delta_traps next =
adjust_stack_offset (Linear.traps_to_bytes delta_traps) next
let delta_traps stack_before stack_after =
let rec stack_depth acc stack =
match (stack : Mach.trap_stack) with
| Uncaught -> acc
| Generic_trap t | Specific_trap (_, t) -> stack_depth (succ acc) t
in
(stack_depth 0 stack_after) - (stack_depth 0 stack_before)
let rec discard_dead_code n =
let adjust ~delta_bytes =
adjust_stack_offset delta_bytes (discard_dead_code n.next)
in
let adjust_traps ~delta_traps =
adjust ~delta_bytes:(Linear.traps_to_bytes delta_traps)
in
match n.desc with
Lend -> n
| Llabel _ -> n
Do not discard Lpoptrap / Lpushtrap / Ladjust_stack_offset
or Istackoffset instructions , as this may cause a stack imbalance
later during assembler generation . Replace them
with pseudo - instruction Ladjust_stack_offset with the corresponding
stack offset and eliminate dead instructions after them .
or Istackoffset instructions, as this may cause a stack imbalance
later during assembler generation. Replace them
with pseudo-instruction Ladjust_stack_offset with the corresponding
stack offset and eliminate dead instructions after them. *)
| Lpoptrap -> adjust_traps ~delta_traps:(-1)
| Lpushtrap _ -> adjust_traps ~delta_traps:(+1)
| Ladjust_stack_offset { delta_bytes } -> adjust ~delta_bytes
| Lop(Istackoffset delta_bytes) -> adjust ~delta_bytes
| _ -> discard_dead_code n.next
let add_branch lbl n =
if lbl >= 0 then
let n1 = discard_dead_code n in
match n1.desc with
| Llabel { label = lbl1; _ } when lbl1 = lbl -> n1
| _ -> cons_instr (Lbranch lbl) n1
else
discard_dead_code n
type linear_env =
{ trap_stack : Mach.trap_stack;
exit_label : (int * label) list;
}
let initial_env =
{ trap_stack = Uncaught;
exit_label = [];
}
let find_exit_label env k =
try
List.assoc k env.exit_label
with
| Not_found -> Misc.fatal_error "Linearize.find_exit_label"
let is_next_catch env n = match env.exit_label with
| (n0,_)::_ when n0=n -> true
| _ -> false
let rec add_traps env i traps =
match traps with
| [] -> i
| Cmm.Pop :: traps ->
add_traps env (cons_instr Lpoptrap i) traps
| Cmm.Push handler :: traps ->
let lbl_handler = find_exit_label env handler in
add_traps env (cons_instr (Lpushtrap { lbl_handler; }) i) traps
let delta_traps_diff traps =
let delta =
List.fold_left
(fun delta trap ->
match trap with
| Cmm.Pop -> delta - 1
| Cmm.Push _ -> delta + 1)
0 traps in
-delta
Linearize an instruction [ i ] : add it in front of the continuation [ n ]
let linear i n contains_calls =
let rec linear env i n =
match i.Mach.desc with
Iend -> n
| Iop(Itailcall_ind | Itailcall_imm _ as op)
| Iop((Iextcall { returns = false; _ }) as op) ->
copy_instr (Lop op) i (discard_dead_code n)
| Iop(Imove | Ireload | Ispill)
when i.Mach.arg.(0).loc = i.Mach.res.(0).loc ->
linear env i.Mach.next n
| Iop((Icsel _) as op) ->
let len = Array.length i.Mach.arg in
let ifso = i.Mach.arg.(len-2) in
let ifnot = i.Mach.arg.(len-1) in
if Reg.same_loc i.Mach.res.(0) ifso &&
Reg.same_loc i.Mach.res.(0) ifnot
then linear env i.Mach.next n
else copy_instr (Lop op) i (linear env i.Mach.next n)
| Iop((Ipoll { return_label = None; _ }) as op) ->
let n = linear env i.Mach.next n in
let op, n =
match n.desc with
| Lbranch lbl ->
Mach.Ipoll { return_label = Some lbl }, n.next
| _ -> op, n
in
copy_instr (Lop op) i n
| Iop op ->
copy_instr (Lop op) i (linear env i.Mach.next n)
| Ireturn traps ->
let n = adjust_trap_depth (delta_traps_diff traps) n in
let n1 = copy_instr Lreturn i (discard_dead_code n) in
let n2 =
if contains_calls
then cons_instr Lreloadretaddr n1
else n1
in
add_traps env n2 traps
| Iifthenelse(test, ifso, ifnot) ->
let n1 = linear env i.Mach.next n in
begin match (ifso.Mach.desc, ifnot.Mach.desc, n1.desc) with
Iend, _, Lbranch lbl ->
copy_instr (Lcondbranch(test, lbl)) i (linear env ifnot n1)
| _, Iend, Lbranch lbl ->
copy_instr (Lcondbranch(invert_test test, lbl)) i
(linear env ifso n1)
| Iexit (nfail1, []), Iexit (nfail2, []), _
when is_next_catch env nfail1 ->
let lbl2 = find_exit_label env nfail2 in
copy_instr
(Lcondbranch (invert_test test, lbl2)) i (linear env ifso n1)
| Iexit (nfail, []), _, _ ->
let n2 = linear env ifnot n1
and lbl = find_exit_label env nfail in
copy_instr (Lcondbranch(test, lbl)) i n2
| _, Iexit (nfail, []), _ ->
let n2 = linear env ifso n1 in
let lbl = find_exit_label env nfail in
copy_instr (Lcondbranch(invert_test test, lbl)) i n2
| Iend, _, _ ->
let (lbl_end, n2) = get_label n1 in
copy_instr (Lcondbranch(test, lbl_end)) i (linear env ifnot n2)
| _, Iend, _ ->
let (lbl_end, n2) = get_label n1 in
copy_instr (Lcondbranch(invert_test test, lbl_end)) i
(linear env ifso n2)
| _, _, _ ->
let (lbl_end, n2) = get_label n1 in
let (lbl_else, nelse) = get_label (linear env ifnot n2) in
copy_instr (Lcondbranch(invert_test test, lbl_else)) i
(linear env ifso (add_branch lbl_end nelse))
end
| Iswitch(index, cases) ->
let lbl_cases = Array.make (Array.length cases) 0 in
let (lbl_end, n1) = get_label(linear env i.Mach.next n) in
let n2 = ref (discard_dead_code n1) in
for i = Array.length cases - 1 downto 0 do
let (lbl_case, ncase) =
get_label(linear env cases.(i) (add_branch lbl_end !n2)) in
lbl_cases.(i) <- lbl_case;
n2 := discard_dead_code ncase
done;
Switches with 1 and 2 branches have been eliminated earlier .
Here , we do something for switches with 3 branches .
Here, we do something for switches with 3 branches. *)
if Array.length index = 3 then begin
let fallthrough_lbl = check_label !n2 in
let find_label n =
let lbl = lbl_cases.(index.(n)) in
if lbl = fallthrough_lbl then None else Some lbl in
copy_instr (Lcondbranch3(find_label 0, find_label 1, find_label 2))
i !n2
end else
copy_instr (Lswitch(Array.map (fun n -> lbl_cases.(n)) index)) i !n2
| Icatch(_rec_flag, ts_next, handlers, body) ->
let n0 = adjust_trap_depth (delta_traps ts_next env.trap_stack) n in
let env_next = { env with trap_stack = ts_next; } in
let (lbl_end, n1) = get_label(linear env_next i.Mach.next n0) in
CR mshinwell for pchambart :
1 . rename " io "
2 . Make sure the test cases cover the " Iend " cases too
1. rename "io"
2. Make sure the test cases cover the "Iend" cases too *)
let labels_at_entry_to_handlers = List.map (fun (_n, _ts, handler) ->
match handler.Mach.desc with
| Iend -> lbl_end
| _ -> Cmm.new_label ())
handlers in
let exit_label_add = List.map2
(fun (nfail, _ts, _) lbl -> (nfail, lbl))
handlers labels_at_entry_to_handlers in
let env = { env with exit_label = exit_label_add @ env.exit_label; } in
let (n2, ts_n2) =
List.fold_left2 (fun (n, ts_next) (_nfail, ts, handler) lbl_handler ->
match handler.Mach.desc with
| Iend -> n, ts_next
| _ ->
let delta = delta_traps ts ts_next in
let n = adjust_trap_depth delta n in
let env = { env with trap_stack = ts; } in
let n =
cons_instr (Llabel { label = lbl_handler; section_name = None; } )
(linear env handler (add_branch lbl_end n))
in
n, ts)
(n1, ts_next) handlers labels_at_entry_to_handlers
in
let n2 = adjust_trap_depth (delta_traps env.trap_stack ts_n2) n2 in
let n3 = linear env body (add_branch lbl_end n2) in
n3
| Iexit (nfail, traps) ->
let lbl = find_exit_label env nfail in
assert (i.Mach.next.desc = Mach.Iend);
let n1 = adjust_trap_depth (delta_traps_diff traps) n in
add_traps env (add_branch lbl n1) traps
| Itrywith(body, Regular, (ts, handler)) ->
let (lbl_join, n1) = get_label (linear env i.Mach.next n) in
assert (Mach.equal_trap_stack ts env.trap_stack);
let (lbl_handler, n2) =
get_label (cons_instr Lentertrap (linear env handler n1))
in
let env_body =
{ env with trap_stack = Mach.Generic_trap env.trap_stack; }
in
assert (i.Mach.arg = [| |]);
let n3 = cons_instr (Lpushtrap { lbl_handler; })
(linear env_body body
(cons_instr
Lpoptrap
(add_branch lbl_join n2))) in
n3
| Itrywith(body, Delayed nfail, (ts, handler)) ->
let (lbl_join, n1) = get_label (linear env i.Mach.next n) in
let delta = delta_traps ts env.trap_stack in
let n1' = adjust_trap_depth delta n1 in
let env_handler = { env with trap_stack = ts; } in
let (lbl_handler, n2) =
get_label (cons_instr Lentertrap (linear env_handler handler n1'))
in
let n2' = adjust_trap_depth (-delta) n2 in
let env_body =
{env with exit_label = (nfail, lbl_handler) :: env.exit_label; }
in
let n3 = linear env_body body (add_branch lbl_join n2') in
n3
| Iraise k ->
copy_instr (Lraise k) i (discard_dead_code n)
in linear initial_env i n
let add_prologue first_insn prologue_required =
let rec skip_naming_ops (insn : instruction) : label * instruction =
match insn.desc with
| Lop (Iname_for_debugger _) ->
let tailrec_entry_point_label, next = skip_naming_ops insn.next in
tailrec_entry_point_label, { insn with next; }
| _ ->
let tailrec_entry_point_label = Cmm.new_label () in
let tailrec_entry_point =
{ desc = Llabel { label = tailrec_entry_point_label; section_name = None; };
next = insn;
arg = [| |];
res = [| |];
dbg = insn.dbg;
fdo = insn.fdo;
live = insn.live;
}
in
We expect [ ] to expand to at least one instruction --- as such ,
if no prologue is required , we avoid adding the instruction here .
The reason is subtle : an empty expansion of [ ] can cause
two labels , one either side of the [ ] , to point at the same
location . This means that we lose the property ( cf . [ Coalesce_labels ] )
that we can check if two labels point at the same location by
comparing them for equality . This causes trouble when the function
whose prologue is in question lands at the top of the object file
and we are emitting DWARF debugging information :
foo_code_begin :
foo :
.L1 :
; empty prologue
.L2 :
...
If we were to emit a location list entry from L1 ... L2 , not realising
that they point at the same location , then the beginning and ending
points of the range would be both equal to each other and ( relative to
" foo_code_begin " ) equal to zero . This appears to confuse objdump ,
which seemingly misinterprets the entry as an end - of - list entry
( which is encoded with two zero words ) , then complaining about a
" hole in location list " ( as it ignores any remaining list entries
after the misinterpreted entry ) .
if no prologue is required, we avoid adding the instruction here.
The reason is subtle: an empty expansion of [Lprologue] can cause
two labels, one either side of the [Lprologue], to point at the same
location. This means that we lose the property (cf. [Coalesce_labels])
that we can check if two labels point at the same location by
comparing them for equality. This causes trouble when the function
whose prologue is in question lands at the top of the object file
and we are emitting DWARF debugging information:
foo_code_begin:
foo:
.L1:
; empty prologue
.L2:
...
If we were to emit a location list entry from L1...L2, not realising
that they point at the same location, then the beginning and ending
points of the range would be both equal to each other and (relative to
"foo_code_begin") equal to zero. This appears to confuse objdump,
which seemingly misinterprets the entry as an end-of-list entry
(which is encoded with two zero words), then complaining about a
"hole in location list" (as it ignores any remaining list entries
after the misinterpreted entry). *)
if prologue_required then
let prologue =
{ desc = Lprologue;
next = tailrec_entry_point;
arg = [| |];
res = [| |];
dbg = tailrec_entry_point.dbg;
fdo = tailrec_entry_point.fdo;
}
in
tailrec_entry_point_label, prologue
else
tailrec_entry_point_label, tailrec_entry_point
in
skip_naming_ops first_insn
let fundecl f =
let fun_contains_calls = f.Mach.fun_contains_calls in
let fun_num_stack_slots = f.Mach.fun_num_stack_slots in
let fun_prologue_required =
Proc.prologue_required ~fun_contains_calls ~fun_num_stack_slots in
let fun_frame_required =
Proc.frame_required ~fun_contains_calls ~fun_num_stack_slots in
let fun_tailrec_entry_point_label, fun_body =
add_prologue (linear f.Mach.fun_body end_instr fun_contains_calls)
fun_prologue_required
in
{ fun_name = f.Mach.fun_name;
fun_body;
fun_fast = not (List.mem Cmm.Reduce_code_size f.Mach.fun_codegen_options);
fun_dbg = f.Mach.fun_dbg;
fun_tailrec_entry_point_label = Some fun_tailrec_entry_point_label;
fun_contains_calls;
fun_num_stack_slots;
fun_frame_required;
fun_prologue_required;
fun_section_name = None;
}
|
3cdc4d0f6e516a77faf53eede156ce1bd13d4b5814d70268b16b5fb7bc43361d | RichiH/git-annex | Ssh.hs | git - annex assistant ssh utilities
-
- Copyright 2012 - 2013 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2012-2013 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Ssh where
import Annex.Common
import Utility.Tmp
import Utility.Shell
import Utility.Rsync
import Utility.FileMode
import Utility.SshConfig
import Git.Remote
import Utility.SshHost
import Data.Text (Text)
import qualified Data.Text as T
import Data.Char
import Network.URI
data SshData = SshData
{ sshHostName :: Text
, sshUserName :: Maybe Text
, sshDirectory :: Text
, sshRepoName :: String
, sshPort :: Int
, needsPubKey :: Bool
, sshCapabilities :: [SshServerCapability]
, sshRepoUrl :: Maybe String
}
deriving (Read, Show, Eq)
data SshServerCapability
= GitAnnexShellCapable -- server has git-annex-shell installed
| GitCapable -- server has git installed
| RsyncCapable -- server supports raw rsync access (not only via git-annex-shell)
| PushCapable -- repo on server is set up already, and ready to accept pushes
deriving (Read, Show, Eq)
hasCapability :: SshData -> SshServerCapability -> Bool
hasCapability d c = c `elem` sshCapabilities d
addCapability :: SshData -> SshServerCapability -> SshData
addCapability d c = d { sshCapabilities = c : sshCapabilities d }
onlyCapability :: SshData -> SshServerCapability -> Bool
onlyCapability d c = all (== c) (sshCapabilities d)
type SshPubKey = String
type SshPrivKey = String
data SshKeyPair = SshKeyPair
{ sshPubKey :: SshPubKey
, sshPrivKey :: SshPrivKey
}
instance Show SshKeyPair where
show = sshPubKey
{- ssh -ofoo=bar command-line option -}
sshOpt :: String -> String -> String
sshOpt k v = concat ["-o", k, "=", v]
{- user@host or host -}
genSshHost :: Text -> Maybe Text -> SshHost
genSshHost host user = either error id $ mkSshHost $
maybe "" (\v -> T.unpack v ++ "@") user ++ T.unpack host
Generates a ssh or rsync url from a SshData .
genSshUrl :: SshData -> String
genSshUrl sshdata = case sshRepoUrl sshdata of
Just repourl -> repourl
Nothing -> addtrailingslash $ T.unpack $ T.concat $
if (onlyCapability sshdata RsyncCapable)
then [u, h, T.pack ":", sshDirectory sshdata]
else [T.pack "ssh://", u, h, d]
where
u = maybe (T.pack "") (\v -> T.concat [v, T.pack "@"]) $ sshUserName sshdata
h = sshHostName sshdata
d
| T.pack "/" `T.isPrefixOf` sshDirectory sshdata = sshDirectory sshdata
| T.pack "~/" `T.isPrefixOf` sshDirectory sshdata = T.concat [T.pack "/", sshDirectory sshdata]
| otherwise = T.concat [T.pack "/~/", sshDirectory sshdata]
addtrailingslash s
| "/" `isSuffixOf` s = s
| otherwise = s ++ "/"
{- Reverses genSshUrl -}
parseSshUrl :: String -> Maybe SshData
parseSshUrl u
| "ssh://" `isPrefixOf` u = fromssh (drop (length "ssh://") u)
| otherwise = fromrsync u
where
mkdata (userhost, dir) = Just $ SshData
{ sshHostName = T.pack host
, sshUserName = if null user then Nothing else Just $ T.pack user
, sshDirectory = T.pack dir
, sshRepoName = genSshRepoName host dir
-- dummy values, cannot determine from url
, sshPort = 22
, needsPubKey = True
, sshCapabilities = []
, sshRepoUrl = Nothing
}
where
(user, host) = if '@' `elem` userhost
then separate (== '@') userhost
else ("", userhost)
fromrsync s
| not (rsyncUrlIsShell u) = Nothing
| otherwise = mkdata $ separate (== ':') s
fromssh = mkdata . break (== '/')
Generates a git remote name , like host_dir or host
genSshRepoName :: String -> FilePath -> String
genSshRepoName host dir
| null dir = makeLegalName host
| otherwise = makeLegalName $ host ++ "_" ++ dir
{- The output of ssh, including both stdout and stderr. -}
sshTranscript :: [String] -> SshHost -> String -> (Maybe String) -> IO (String, Bool)
sshTranscript opts sshhost cmd input = processTranscript "ssh"
(opts ++ [fromSshHost sshhost, cmd]) input
{- Ensure that the ssh public key doesn't include any ssh options, like
- command=foo, or other weirdness.
-
- The returned version of the key has its comment removed.
-}
validateSshPubKey :: SshPubKey -> Either String SshPubKey
validateSshPubKey pubkey
| length (lines pubkey) == 1 = check $ words pubkey
| otherwise = Left "too many lines in ssh public key"
where
check (prefix:key:_) = checkprefix prefix (unwords [prefix, key])
check _ = err "wrong number of words in ssh public key"
err msg = Left $ unwords [msg, pubkey]
checkprefix prefix validpubkey
| ssh == "ssh" && all isAlphaNum keytype = Right validpubkey
| otherwise = err "bad ssh public key prefix"
where
(ssh, keytype) = separate (== '-') prefix
addAuthorizedKeys :: Bool -> FilePath -> SshPubKey -> IO Bool
addAuthorizedKeys gitannexshellonly dir pubkey = boolSystem "sh"
[ Param "-c" , Param $ addAuthorizedKeysCommand gitannexshellonly dir pubkey ]
{- Should only be used within the same process that added the line;
- the layout of the line is not kepy stable across versions. -}
removeAuthorizedKeys :: Bool -> FilePath -> SshPubKey -> IO ()
removeAuthorizedKeys gitannexshellonly dir pubkey = do
let keyline = authorizedKeysLine gitannexshellonly dir pubkey
sshdir <- sshDir
let keyfile = sshdir </> "authorized_keys"
ls <- lines <$> readFileStrict keyfile
viaTmp writeSshConfig keyfile $ unlines $ filter (/= keyline) ls
{- Implemented as a shell command, so it can be run on remote servers over
- ssh.
-
- The ~/.ssh/git-annex-shell wrapper script is created if not already
- present.
-}
addAuthorizedKeysCommand :: Bool -> FilePath -> SshPubKey -> String
addAuthorizedKeysCommand gitannexshellonly dir pubkey = intercalate "&&"
[ "mkdir -p ~/.ssh"
, intercalate "; "
[ "if [ ! -e " ++ wrapper ++ " ]"
, "then (" ++ intercalate ";" (map echoval script) ++ ") > " ++ wrapper
, "fi"
]
, "chmod 700 " ++ wrapper
, "touch ~/.ssh/authorized_keys"
, "chmod 600 ~/.ssh/authorized_keys"
, unwords
[ "echo"
, shellEscape $ authorizedKeysLine gitannexshellonly dir pubkey
, ">>~/.ssh/authorized_keys"
]
]
where
echoval v = "echo " ++ shellEscape v
wrapper = "~/.ssh/git-annex-shell"
script =
[ shebang_portable
, "set -e"
, "if [ \"x$SSH_ORIGINAL_COMMAND\" != \"x\" ]; then"
, runshell "$SSH_ORIGINAL_COMMAND"
, "else"
, runshell "$@"
, "fi"
]
runshell var = "exec git-annex-shell -c \"" ++ var ++ "\""
authorizedKeysLine :: Bool -> FilePath -> SshPubKey -> String
authorizedKeysLine gitannexshellonly dir pubkey
| gitannexshellonly = limitcommand ++ pubkey
{- TODO: Locking down rsync is difficult, requiring a rather
- long perl script. -}
| otherwise = pubkey
where
limitcommand = "command=\"env GIT_ANNEX_SHELL_DIRECTORY="++shellEscape dir++" ~/.ssh/git-annex-shell\",no-agent-forwarding,no-port-forwarding,no-X11-forwarding,no-pty "
{- Generates a ssh key pair. -}
genSshKeyPair :: IO SshKeyPair
genSshKeyPair = withTmpDir "git-annex-keygen" $ \dir -> do
ok <- boolSystem "ssh-keygen"
[ Param "-P", Param "" -- no password
, Param "-f", File $ dir </> "key"
]
unless ok $
error "ssh-keygen failed"
SshKeyPair
<$> readFile (dir </> "key.pub")
<*> readFile (dir </> "key")
Installs a ssh key pair , and sets up ssh config with a mangled hostname
- that will enable use of the key . This way we avoid changing the user 's
- regular ssh experience at all . Returns a modified SshData containing the
- mangled hostname .
-
- Note that the key files are put in ~/.ssh / git - annex/ , rather than directly
- in ssh because of an * * INSANE * * behavior of gnome - keyring : It loads
- ~/.ssh / ANYTHING.pub , and uses them indiscriminately . But using this key
- for a normal login to the server will force git - annex - shell to run ,
- and locks the user out . Luckily , it does not recurse into subdirectories .
-
- Similarly , IdentitiesOnly is set in the ssh config to prevent the
- ssh - agent from forcing use of a different key .
-
- Force strict host key checking to avoid repeated prompts
- when git - annex and git try to access the remote , if its
- host key has changed .
- that will enable use of the key. This way we avoid changing the user's
- regular ssh experience at all. Returns a modified SshData containing the
- mangled hostname.
-
- Note that the key files are put in ~/.ssh/git-annex/, rather than directly
- in ssh because of an **INSANE** behavior of gnome-keyring: It loads
- ~/.ssh/ANYTHING.pub, and uses them indiscriminately. But using this key
- for a normal login to the server will force git-annex-shell to run,
- and locks the user out. Luckily, it does not recurse into subdirectories.
-
- Similarly, IdentitiesOnly is set in the ssh config to prevent the
- ssh-agent from forcing use of a different key.
-
- Force strict host key checking to avoid repeated prompts
- when git-annex and git try to access the remote, if its
- host key has changed.
-}
installSshKeyPair :: SshKeyPair -> SshData -> IO SshData
installSshKeyPair sshkeypair sshdata = do
sshdir <- sshDir
createDirectoryIfMissing True $ parentDir $ sshdir </> sshPrivKeyFile sshdata
unlessM (doesFileExist $ sshdir </> sshPrivKeyFile sshdata) $
writeFileProtected (sshdir </> sshPrivKeyFile sshdata) (sshPrivKey sshkeypair)
unlessM (doesFileExist $ sshdir </> sshPubKeyFile sshdata) $
writeFile (sshdir </> sshPubKeyFile sshdata) (sshPubKey sshkeypair)
setSshConfig sshdata
[ ("IdentityFile", "~/.ssh/" ++ sshPrivKeyFile sshdata)
, ("IdentitiesOnly", "yes")
, ("StrictHostKeyChecking", "yes")
]
sshPrivKeyFile :: SshData -> FilePath
sshPrivKeyFile sshdata = "git-annex" </> "key." ++ mangleSshHostName sshdata
sshPubKeyFile :: SshData -> FilePath
sshPubKeyFile sshdata = sshPrivKeyFile sshdata ++ ".pub"
Generates an installs a new ssh key pair if one is not already
- installed . Returns the modified SshData that will use the key pair ,
- and the key pair .
- installed. Returns the modified SshData that will use the key pair,
- and the key pair. -}
setupSshKeyPair :: SshData -> IO (SshData, SshKeyPair)
setupSshKeyPair sshdata = do
sshdir <- sshDir
mprivkey <- catchMaybeIO $ readFile (sshdir </> sshPrivKeyFile sshdata)
mpubkey <- catchMaybeIO $ readFile (sshdir </> sshPubKeyFile sshdata)
keypair <- case (mprivkey, mpubkey) of
(Just privkey, Just pubkey) -> return $ SshKeyPair
{ sshPubKey = pubkey
, sshPrivKey = privkey
}
_ -> genSshKeyPair
sshdata' <- installSshKeyPair keypair sshdata
return (sshdata', keypair)
Fixes git - annex ssh key pairs configured in .ssh / config
- by old versions to set IdentitiesOnly .
-
- Strategy : Search for IdentityFile lines with key.git - annex
- in their names . These are for git - annex ssh key pairs .
- Add the IdentitiesOnly line immediately after them , if not already
- present .
- by old versions to set IdentitiesOnly.
-
- Strategy: Search for IdentityFile lines with key.git-annex
- in their names. These are for git-annex ssh key pairs.
- Add the IdentitiesOnly line immediately after them, if not already
- present.
-}
fixSshKeyPairIdentitiesOnly :: IO ()
fixSshKeyPairIdentitiesOnly = changeUserSshConfig $ unlines . go [] . lines
where
go c [] = reverse c
go c (l:[])
| all (`isInfixOf` l) indicators = go (fixedline l:l:c) []
| otherwise = go (l:c) []
go c (l:next:rest)
| all (`isInfixOf` l) indicators && not ("IdentitiesOnly" `isInfixOf` next) =
go (fixedline l:l:c) (next:rest)
| otherwise = go (l:c) (next:rest)
indicators = ["IdentityFile", "key.git-annex"]
fixedline tmpl = takeWhile isSpace tmpl ++ "IdentitiesOnly yes"
{- Add StrictHostKeyChecking to any ssh config stanzas that were written
- by git-annex. -}
fixUpSshRemotes :: IO ()
fixUpSshRemotes = modifyUserSshConfig (map go)
where
go c@(HostConfig h _)
| "git-annex-" `isPrefixOf` h = fixupconfig c
| otherwise = c
go other = other
fixupconfig c = case findHostConfigKey c "StrictHostKeyChecking" of
Nothing -> addToHostConfig c "StrictHostKeyChecking" "yes"
Just _ -> c
Setups up a ssh config with a mangled hostname .
- Returns a modified SshData containing the mangled hostname .
- Returns a modified SshData containing the mangled hostname. -}
setSshConfig :: SshData -> [(String, String)] -> IO SshData
setSshConfig sshdata config = do
sshdir <- sshDir
createDirectoryIfMissing True sshdir
let configfile = sshdir </> "config"
unlessM (catchBoolIO $ isInfixOf mangledhost <$> readFile configfile) $ do
appendFile configfile $ unlines $
[ ""
, "# Added automatically by git-annex"
, "Host " ++ mangledhost
] ++ map (\(k, v) -> "\t" ++ k ++ " " ++ v)
(settings ++ config)
setSshConfigMode configfile
return $ sshdata
{ sshHostName = T.pack mangledhost
, sshRepoUrl = replace orighost mangledhost
<$> sshRepoUrl sshdata
}
where
orighost = T.unpack $ sshHostName sshdata
mangledhost = mangleSshHostName sshdata
settings =
[ ("Hostname", orighost)
, ("Port", show $ sshPort sshdata)
]
{- This hostname is specific to a given repository on the ssh host,
- so it is based on the real hostname, the username, and the directory.
-
- The mangled hostname has the form:
- "git-annex-realhostname-username_port_dir"
- Note that "-" is only used in the realhostname and as a separator;
- this is necessary to allow unMangleSshHostName to work.
-
- Unusual characters are url encoded, but using "." rather than "%"
- (the latter has special meaning to ssh).
-
- In the username and directory, unusual characters are any
- non-alphanumerics, other than "_"
-
- The real hostname is not normally encoded at all. This is done for
- backwards compatability and to avoid unnecessary ugliness in the
- filename. However, when it contains special characters
- (notably ":" which cannot be used on some filesystems), it is url
- encoded. To indicate it was encoded, the mangled hostname
- has the form
- "git-annex-.encodedhostname-username_port_dir"
-}
mangleSshHostName :: SshData -> String
mangleSshHostName sshdata = intercalate "-"
[ "git-annex"
, escapehostname (T.unpack (sshHostName sshdata))
, escape extra
]
where
extra = intercalate "_" $ map T.unpack $ catMaybes
[ sshUserName sshdata
, Just $ T.pack $ show $ sshPort sshdata
, Just $ sshDirectory sshdata
]
safe c
| isAlphaNum c = True
| c == '_' = True
| otherwise = False
escape s = replace "%" "." $ escapeURIString safe s
escapehostname s
| all (\c -> c == '.' || safe c) s = s
| otherwise = '.' : escape s
{- Extracts the real hostname from a mangled ssh hostname. -}
unMangleSshHostName :: String -> String
unMangleSshHostName h = case splitc '-' h of
("git":"annex":rest) -> unescape (intercalate "-" (beginning rest))
_ -> h
where
unescape ('.':s) = unEscapeString (replace "." "%" s)
unescape s = s
Does ssh have known_hosts data for a hostname ?
knownHost :: Text -> IO Bool
knownHost hostname = do
sshdir <- sshDir
ifM (doesFileExist $ sshdir </> "known_hosts")
( not . null <$> checkhost
, return False
)
where
ssh - keygen -F can crash on some old known_hosts file
checkhost = catchDefaultIO "" $
readProcess "ssh-keygen" ["-F", T.unpack hostname]
| null | https://raw.githubusercontent.com/RichiH/git-annex/bbcad2b0af8cd9264d0cb86e6ca126ae626171f3/Assistant/Ssh.hs | haskell | server has git-annex-shell installed
server has git installed
server supports raw rsync access (not only via git-annex-shell)
repo on server is set up already, and ready to accept pushes
ssh -ofoo=bar command-line option
user@host or host
Reverses genSshUrl
dummy values, cannot determine from url
The output of ssh, including both stdout and stderr.
Ensure that the ssh public key doesn't include any ssh options, like
- command=foo, or other weirdness.
-
- The returned version of the key has its comment removed.
Should only be used within the same process that added the line;
- the layout of the line is not kepy stable across versions.
Implemented as a shell command, so it can be run on remote servers over
- ssh.
-
- The ~/.ssh/git-annex-shell wrapper script is created if not already
- present.
TODO: Locking down rsync is difficult, requiring a rather
- long perl script.
Generates a ssh key pair.
no password
Add StrictHostKeyChecking to any ssh config stanzas that were written
- by git-annex.
This hostname is specific to a given repository on the ssh host,
- so it is based on the real hostname, the username, and the directory.
-
- The mangled hostname has the form:
- "git-annex-realhostname-username_port_dir"
- Note that "-" is only used in the realhostname and as a separator;
- this is necessary to allow unMangleSshHostName to work.
-
- Unusual characters are url encoded, but using "." rather than "%"
- (the latter has special meaning to ssh).
-
- In the username and directory, unusual characters are any
- non-alphanumerics, other than "_"
-
- The real hostname is not normally encoded at all. This is done for
- backwards compatability and to avoid unnecessary ugliness in the
- filename. However, when it contains special characters
- (notably ":" which cannot be used on some filesystems), it is url
- encoded. To indicate it was encoded, the mangled hostname
- has the form
- "git-annex-.encodedhostname-username_port_dir"
Extracts the real hostname from a mangled ssh hostname. | git - annex assistant ssh utilities
-
- Copyright 2012 - 2013 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2012-2013 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Ssh where
import Annex.Common
import Utility.Tmp
import Utility.Shell
import Utility.Rsync
import Utility.FileMode
import Utility.SshConfig
import Git.Remote
import Utility.SshHost
import Data.Text (Text)
import qualified Data.Text as T
import Data.Char
import Network.URI
data SshData = SshData
{ sshHostName :: Text
, sshUserName :: Maybe Text
, sshDirectory :: Text
, sshRepoName :: String
, sshPort :: Int
, needsPubKey :: Bool
, sshCapabilities :: [SshServerCapability]
, sshRepoUrl :: Maybe String
}
deriving (Read, Show, Eq)
data SshServerCapability
deriving (Read, Show, Eq)
hasCapability :: SshData -> SshServerCapability -> Bool
hasCapability d c = c `elem` sshCapabilities d
addCapability :: SshData -> SshServerCapability -> SshData
addCapability d c = d { sshCapabilities = c : sshCapabilities d }
onlyCapability :: SshData -> SshServerCapability -> Bool
onlyCapability d c = all (== c) (sshCapabilities d)
type SshPubKey = String
type SshPrivKey = String
data SshKeyPair = SshKeyPair
{ sshPubKey :: SshPubKey
, sshPrivKey :: SshPrivKey
}
instance Show SshKeyPair where
show = sshPubKey
sshOpt :: String -> String -> String
sshOpt k v = concat ["-o", k, "=", v]
genSshHost :: Text -> Maybe Text -> SshHost
genSshHost host user = either error id $ mkSshHost $
maybe "" (\v -> T.unpack v ++ "@") user ++ T.unpack host
Generates a ssh or rsync url from a SshData .
genSshUrl :: SshData -> String
genSshUrl sshdata = case sshRepoUrl sshdata of
Just repourl -> repourl
Nothing -> addtrailingslash $ T.unpack $ T.concat $
if (onlyCapability sshdata RsyncCapable)
then [u, h, T.pack ":", sshDirectory sshdata]
else [T.pack "ssh://", u, h, d]
where
u = maybe (T.pack "") (\v -> T.concat [v, T.pack "@"]) $ sshUserName sshdata
h = sshHostName sshdata
d
| T.pack "/" `T.isPrefixOf` sshDirectory sshdata = sshDirectory sshdata
| T.pack "~/" `T.isPrefixOf` sshDirectory sshdata = T.concat [T.pack "/", sshDirectory sshdata]
| otherwise = T.concat [T.pack "/~/", sshDirectory sshdata]
addtrailingslash s
| "/" `isSuffixOf` s = s
| otherwise = s ++ "/"
parseSshUrl :: String -> Maybe SshData
parseSshUrl u
| "ssh://" `isPrefixOf` u = fromssh (drop (length "ssh://") u)
| otherwise = fromrsync u
where
mkdata (userhost, dir) = Just $ SshData
{ sshHostName = T.pack host
, sshUserName = if null user then Nothing else Just $ T.pack user
, sshDirectory = T.pack dir
, sshRepoName = genSshRepoName host dir
, sshPort = 22
, needsPubKey = True
, sshCapabilities = []
, sshRepoUrl = Nothing
}
where
(user, host) = if '@' `elem` userhost
then separate (== '@') userhost
else ("", userhost)
fromrsync s
| not (rsyncUrlIsShell u) = Nothing
| otherwise = mkdata $ separate (== ':') s
fromssh = mkdata . break (== '/')
Generates a git remote name , like host_dir or host
genSshRepoName :: String -> FilePath -> String
genSshRepoName host dir
| null dir = makeLegalName host
| otherwise = makeLegalName $ host ++ "_" ++ dir
sshTranscript :: [String] -> SshHost -> String -> (Maybe String) -> IO (String, Bool)
sshTranscript opts sshhost cmd input = processTranscript "ssh"
(opts ++ [fromSshHost sshhost, cmd]) input
validateSshPubKey :: SshPubKey -> Either String SshPubKey
validateSshPubKey pubkey
| length (lines pubkey) == 1 = check $ words pubkey
| otherwise = Left "too many lines in ssh public key"
where
check (prefix:key:_) = checkprefix prefix (unwords [prefix, key])
check _ = err "wrong number of words in ssh public key"
err msg = Left $ unwords [msg, pubkey]
checkprefix prefix validpubkey
| ssh == "ssh" && all isAlphaNum keytype = Right validpubkey
| otherwise = err "bad ssh public key prefix"
where
(ssh, keytype) = separate (== '-') prefix
addAuthorizedKeys :: Bool -> FilePath -> SshPubKey -> IO Bool
addAuthorizedKeys gitannexshellonly dir pubkey = boolSystem "sh"
[ Param "-c" , Param $ addAuthorizedKeysCommand gitannexshellonly dir pubkey ]
removeAuthorizedKeys :: Bool -> FilePath -> SshPubKey -> IO ()
removeAuthorizedKeys gitannexshellonly dir pubkey = do
let keyline = authorizedKeysLine gitannexshellonly dir pubkey
sshdir <- sshDir
let keyfile = sshdir </> "authorized_keys"
ls <- lines <$> readFileStrict keyfile
viaTmp writeSshConfig keyfile $ unlines $ filter (/= keyline) ls
addAuthorizedKeysCommand :: Bool -> FilePath -> SshPubKey -> String
addAuthorizedKeysCommand gitannexshellonly dir pubkey = intercalate "&&"
[ "mkdir -p ~/.ssh"
, intercalate "; "
[ "if [ ! -e " ++ wrapper ++ " ]"
, "then (" ++ intercalate ";" (map echoval script) ++ ") > " ++ wrapper
, "fi"
]
, "chmod 700 " ++ wrapper
, "touch ~/.ssh/authorized_keys"
, "chmod 600 ~/.ssh/authorized_keys"
, unwords
[ "echo"
, shellEscape $ authorizedKeysLine gitannexshellonly dir pubkey
, ">>~/.ssh/authorized_keys"
]
]
where
echoval v = "echo " ++ shellEscape v
wrapper = "~/.ssh/git-annex-shell"
script =
[ shebang_portable
, "set -e"
, "if [ \"x$SSH_ORIGINAL_COMMAND\" != \"x\" ]; then"
, runshell "$SSH_ORIGINAL_COMMAND"
, "else"
, runshell "$@"
, "fi"
]
runshell var = "exec git-annex-shell -c \"" ++ var ++ "\""
authorizedKeysLine :: Bool -> FilePath -> SshPubKey -> String
authorizedKeysLine gitannexshellonly dir pubkey
| gitannexshellonly = limitcommand ++ pubkey
| otherwise = pubkey
where
limitcommand = "command=\"env GIT_ANNEX_SHELL_DIRECTORY="++shellEscape dir++" ~/.ssh/git-annex-shell\",no-agent-forwarding,no-port-forwarding,no-X11-forwarding,no-pty "
genSshKeyPair :: IO SshKeyPair
genSshKeyPair = withTmpDir "git-annex-keygen" $ \dir -> do
ok <- boolSystem "ssh-keygen"
, Param "-f", File $ dir </> "key"
]
unless ok $
error "ssh-keygen failed"
SshKeyPair
<$> readFile (dir </> "key.pub")
<*> readFile (dir </> "key")
Installs a ssh key pair , and sets up ssh config with a mangled hostname
- that will enable use of the key . This way we avoid changing the user 's
- regular ssh experience at all . Returns a modified SshData containing the
- mangled hostname .
-
- Note that the key files are put in ~/.ssh / git - annex/ , rather than directly
- in ssh because of an * * INSANE * * behavior of gnome - keyring : It loads
- ~/.ssh / ANYTHING.pub , and uses them indiscriminately . But using this key
- for a normal login to the server will force git - annex - shell to run ,
- and locks the user out . Luckily , it does not recurse into subdirectories .
-
- Similarly , IdentitiesOnly is set in the ssh config to prevent the
- ssh - agent from forcing use of a different key .
-
- Force strict host key checking to avoid repeated prompts
- when git - annex and git try to access the remote , if its
- host key has changed .
- that will enable use of the key. This way we avoid changing the user's
- regular ssh experience at all. Returns a modified SshData containing the
- mangled hostname.
-
- Note that the key files are put in ~/.ssh/git-annex/, rather than directly
- in ssh because of an **INSANE** behavior of gnome-keyring: It loads
- ~/.ssh/ANYTHING.pub, and uses them indiscriminately. But using this key
- for a normal login to the server will force git-annex-shell to run,
- and locks the user out. Luckily, it does not recurse into subdirectories.
-
- Similarly, IdentitiesOnly is set in the ssh config to prevent the
- ssh-agent from forcing use of a different key.
-
- Force strict host key checking to avoid repeated prompts
- when git-annex and git try to access the remote, if its
- host key has changed.
-}
installSshKeyPair :: SshKeyPair -> SshData -> IO SshData
installSshKeyPair sshkeypair sshdata = do
sshdir <- sshDir
createDirectoryIfMissing True $ parentDir $ sshdir </> sshPrivKeyFile sshdata
unlessM (doesFileExist $ sshdir </> sshPrivKeyFile sshdata) $
writeFileProtected (sshdir </> sshPrivKeyFile sshdata) (sshPrivKey sshkeypair)
unlessM (doesFileExist $ sshdir </> sshPubKeyFile sshdata) $
writeFile (sshdir </> sshPubKeyFile sshdata) (sshPubKey sshkeypair)
setSshConfig sshdata
[ ("IdentityFile", "~/.ssh/" ++ sshPrivKeyFile sshdata)
, ("IdentitiesOnly", "yes")
, ("StrictHostKeyChecking", "yes")
]
sshPrivKeyFile :: SshData -> FilePath
sshPrivKeyFile sshdata = "git-annex" </> "key." ++ mangleSshHostName sshdata
sshPubKeyFile :: SshData -> FilePath
sshPubKeyFile sshdata = sshPrivKeyFile sshdata ++ ".pub"
Generates an installs a new ssh key pair if one is not already
- installed . Returns the modified SshData that will use the key pair ,
- and the key pair .
- installed. Returns the modified SshData that will use the key pair,
- and the key pair. -}
setupSshKeyPair :: SshData -> IO (SshData, SshKeyPair)
setupSshKeyPair sshdata = do
sshdir <- sshDir
mprivkey <- catchMaybeIO $ readFile (sshdir </> sshPrivKeyFile sshdata)
mpubkey <- catchMaybeIO $ readFile (sshdir </> sshPubKeyFile sshdata)
keypair <- case (mprivkey, mpubkey) of
(Just privkey, Just pubkey) -> return $ SshKeyPair
{ sshPubKey = pubkey
, sshPrivKey = privkey
}
_ -> genSshKeyPair
sshdata' <- installSshKeyPair keypair sshdata
return (sshdata', keypair)
Fixes git - annex ssh key pairs configured in .ssh / config
- by old versions to set IdentitiesOnly .
-
- Strategy : Search for IdentityFile lines with key.git - annex
- in their names . These are for git - annex ssh key pairs .
- Add the IdentitiesOnly line immediately after them , if not already
- present .
- by old versions to set IdentitiesOnly.
-
- Strategy: Search for IdentityFile lines with key.git-annex
- in their names. These are for git-annex ssh key pairs.
- Add the IdentitiesOnly line immediately after them, if not already
- present.
-}
fixSshKeyPairIdentitiesOnly :: IO ()
fixSshKeyPairIdentitiesOnly = changeUserSshConfig $ unlines . go [] . lines
where
go c [] = reverse c
go c (l:[])
| all (`isInfixOf` l) indicators = go (fixedline l:l:c) []
| otherwise = go (l:c) []
go c (l:next:rest)
| all (`isInfixOf` l) indicators && not ("IdentitiesOnly" `isInfixOf` next) =
go (fixedline l:l:c) (next:rest)
| otherwise = go (l:c) (next:rest)
indicators = ["IdentityFile", "key.git-annex"]
fixedline tmpl = takeWhile isSpace tmpl ++ "IdentitiesOnly yes"
fixUpSshRemotes :: IO ()
fixUpSshRemotes = modifyUserSshConfig (map go)
where
go c@(HostConfig h _)
| "git-annex-" `isPrefixOf` h = fixupconfig c
| otherwise = c
go other = other
fixupconfig c = case findHostConfigKey c "StrictHostKeyChecking" of
Nothing -> addToHostConfig c "StrictHostKeyChecking" "yes"
Just _ -> c
Setups up a ssh config with a mangled hostname .
- Returns a modified SshData containing the mangled hostname .
- Returns a modified SshData containing the mangled hostname. -}
setSshConfig :: SshData -> [(String, String)] -> IO SshData
setSshConfig sshdata config = do
sshdir <- sshDir
createDirectoryIfMissing True sshdir
let configfile = sshdir </> "config"
unlessM (catchBoolIO $ isInfixOf mangledhost <$> readFile configfile) $ do
appendFile configfile $ unlines $
[ ""
, "# Added automatically by git-annex"
, "Host " ++ mangledhost
] ++ map (\(k, v) -> "\t" ++ k ++ " " ++ v)
(settings ++ config)
setSshConfigMode configfile
return $ sshdata
{ sshHostName = T.pack mangledhost
, sshRepoUrl = replace orighost mangledhost
<$> sshRepoUrl sshdata
}
where
orighost = T.unpack $ sshHostName sshdata
mangledhost = mangleSshHostName sshdata
settings =
[ ("Hostname", orighost)
, ("Port", show $ sshPort sshdata)
]
mangleSshHostName :: SshData -> String
mangleSshHostName sshdata = intercalate "-"
[ "git-annex"
, escapehostname (T.unpack (sshHostName sshdata))
, escape extra
]
where
extra = intercalate "_" $ map T.unpack $ catMaybes
[ sshUserName sshdata
, Just $ T.pack $ show $ sshPort sshdata
, Just $ sshDirectory sshdata
]
safe c
| isAlphaNum c = True
| c == '_' = True
| otherwise = False
escape s = replace "%" "." $ escapeURIString safe s
escapehostname s
| all (\c -> c == '.' || safe c) s = s
| otherwise = '.' : escape s
unMangleSshHostName :: String -> String
unMangleSshHostName h = case splitc '-' h of
("git":"annex":rest) -> unescape (intercalate "-" (beginning rest))
_ -> h
where
unescape ('.':s) = unEscapeString (replace "." "%" s)
unescape s = s
Does ssh have known_hosts data for a hostname ?
knownHost :: Text -> IO Bool
knownHost hostname = do
sshdir <- sshDir
ifM (doesFileExist $ sshdir </> "known_hosts")
( not . null <$> checkhost
, return False
)
where
ssh - keygen -F can crash on some old known_hosts file
checkhost = catchDefaultIO "" $
readProcess "ssh-keygen" ["-F", T.unpack hostname]
|
8001b8260348d04751b9fe80de6d1600d7abc253c44493c3135ce84b45eb940d | jyh/metaprl | mfir_tr_atom_base.ml | doc <:doc<
@module[Mfir_tr_atom_base]
The @tt[Mfir_tr_atom_base] module defines the argument types
and result types of the FIR operators.
@docoff
------------------------------------------------------------------------
@begin[license]
This file is part of MetaPRL, a modular, higher order
logical framework that provides a logical programming
environment for OCaml and other languages. Additional
information about the system is available at
/
Copyright (C) 2002 Brian Emre Aydemir, Caltech
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
Author: Brian Emre Aydemir
@email{}
@end[license]
>>
doc <:doc<
@parents
>>
extends Mfir_ty
extends Mfir_exp
extends Mfir_option
extends Mfir_sequent
(**************************************************************************
* Declarations.
**************************************************************************)
doc <:doc<
@terms
The term @tt[res_type] returns the result type of an operator @tt[op].
The terms @tt[arg1_type] and @tt[arg2_type] return the types of
first and second arguments of an operator @tt[op] (@tt[arg2_type] is
undefined if @tt[op] is a unary operator).
>>
declare res_type{ 'op }
declare arg1_type{ 'op }
declare arg2_type{ 'op }
doc docoff
open Top_conversionals
(**************************************************************************
* Display forms.
**************************************************************************)
dform res_type_df : except_mode[src] ::
res_type{ 'op } =
bf["res_type"] `"(" slot{'op} `")"
dform arg1_type_df : except_mode[src] ::
arg1_type{ 'op } =
bf["arg1_type"] `"(" slot{'op} `")"
dform arg2_type_df : except_mode[src] ::
arg2_type{ 'op } =
bf["arg2_type"] `"(" slot{'op} `")"
(**************************************************************************
* Rewrites.
**************************************************************************)
doc <:doc<
@rewrites
Rewrites are used to define the argument and result types of the
FIR unary and binary operators. The types may not be well-formed
if the original operator is not well-formed. We omit an explicit
listing of these rewrites.
>>
doc docoff
prim_rw reduce_res_type_notEnumOp {| reduce |} :
res_type{ notEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg1_type_notEnumOp {| reduce |} :
arg1_type{ notEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_res_type_uminusIntOp {| reduce |} :
res_type{ uminusIntOp } <-->
tyInt
prim_rw reduce_arg1_type_uminusIntOp {| reduce |} :
arg1_type{ uminusIntOp } <-->
tyInt
prim_rw reduce_res_type_notIntOp {| reduce |} :
res_type{ notIntOp } <-->
tyInt
prim_rw reduce_arg1_type_notIntOp {| reduce |} :
arg1_type{ notIntOp } <-->
tyInt
prim_rw reduce_res_type_absIntOp {| reduce |} :
res_type{ absIntOp } <-->
tyInt
prim_rw reduce_arg1_type_absIntOp {| reduce |} :
arg1_type{ absIntOp } <-->
tyInt
prim_rw reduce_res_type_uminusRawIntOp {| reduce |} :
res_type{ uminusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_uminusRawIntOp {| reduce |} :
arg1_type{ uminusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_notRawIntOp {| reduce |} :
res_type{ notRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_notRawIntOp {| reduce |} :
arg1_type{ notRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_uminusFloatOp {| reduce |} :
res_type{ uminusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_uminusFloatOp {| reduce |} :
arg1_type{ uminusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_absFloatOp {| reduce |} :
res_type{ absFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_absFloatOp {| reduce |} :
arg1_type{ absFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_sinFloatOp {| reduce |} :
res_type{ sinFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_sinFloatOp {| reduce |} :
arg1_type{ sinFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_cosFloatOp {| reduce |} :
res_type{ cosFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_cosFloatOp {| reduce |} :
arg1_type{ cosFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_tanFloatop {| reduce |} :
res_type{ tanFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_tanFloatop {| reduce |} :
arg1_type{ tanFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_asinFloatOp {| reduce |} :
res_type{ asinFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_asinFloatOp {| reduce |} :
arg1_type{ asinFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_atanFloatOp {| reduce |} :
res_type{ atanFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_atanFloatOp {| reduce |} :
arg1_type{ atanFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_sinhFloatOp {| reduce |} :
res_type{ sinhFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_sinhFloatOp {| reduce |} :
arg1_type{ sinhFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_coshFloatOp {| reduce |} :
res_type{ coshFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_coshFloatOp {| reduce |} :
arg1_type{ coshFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_tanhFloatOp {| reduce |} :
res_type{ tanhFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_tanhFloatOp {| reduce |} :
arg1_type{ tanhFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_expFloatOp {| reduce |} :
res_type{ expFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_expFloatOp {| reduce |} :
arg1_type{ expFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_logFloatOp {| reduce |} :
res_type{ logFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_logFloatOp {| reduce |} :
arg1_type{ logFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_sqrtFloatOp {| reduce |} :
res_type{ sqrtFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_sqrtFloatOp {| reduce |} :
arg1_type{ sqrtFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_ceilFloatOp {| reduce |} :
res_type{ ceilFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_ceilFloatOp {| reduce |} :
arg1_type{ ceilFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_floorFloatOp {| reduce |} :
res_type{ floorFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_floorFloatOp {| reduce |} :
arg1_type{ floorFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_intOfFloatOp {| reduce |} :
res_type{ intOfFloatOp[p:n] } <-->
tyInt
prim_rw reduce_arg1_type_intOfFloatOp {| reduce |} :
arg1_type{ intOfFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_intOfRawIntOp {| reduce |} :
res_type{ intOfRawIntOp[p:n, s:s] } <-->
tyInt
prim_rw reduce_arg1_type_intOfRawIntOp {| reduce |} :
arg1_type{ intOfRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_floatOfIntOp {| reduce |} :
res_type{ floatOfIntOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_floatOfIntOp {| reduce |} :
arg1_type{ floatOfIntOp[p:n] } <-->
tyInt
prim_rw reduce_res_type_floatOfFloatOp {| reduce |} :
res_type{ floatOfFloatOp[p1:n, p2:n] } <-->
tyFloat[p1:n]
prim_rw reduce_arg1_type_floatOfFloatOp {| reduce |} :
arg1_type{ floatOfFloatOp[p1:n, p2:n] } <-->
tyFloat[p2:n]
prim_rw reduce_res_type_floatOfRawIntOp {| reduce |} :
res_type{ floatOfRawIntOp[fp:n, rp:n, s:s] } <-->
tyFloat[fp:n]
prim_rw reduce_arg1_type_floatOfRawIntOp {| reduce |} :
arg1_type{ floatOfRawIntOp[fp:n, rp:n, s:s] } <-->
tyRawInt[rp:n, s:s]
prim_rw reduce_res_type_rawIntOfIntOp {| reduce |} :
res_type{ rawIntOfIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_rawIntOfIntOp {| reduce |} :
arg1_type{ rawIntOfIntOp[p:n, s:s] } <-->
tyInt
prim_rw reduce_res_type_rawIntOfEnumOp {| reduce |} :
res_type{ rawIntOfEnumOp[p:n, s:s, i:n] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_rawIntOfEnumOp {| reduce |} :
arg1_type{ rawIntOfEnumOp[p:n, s:s, i:n] } <-->
tyEnum[i:n]
prim_rw reduce_res_type_rawIntOfFloatOp {| reduce |} :
res_type{ rawIntOfFloatOp[rp:n, s:s, fp:n] } <-->
tyRawInt[rp:n, s:s]
prim_rw reduce_arg1_type_rawIntOfFloatOp {| reduce |} :
arg1_type{ rawIntOfFloatOp[rp:n, s:s, fp:n] } <-->
tyFloat[fp:n]
prim_rw reduce_res_type_rawIntOfRawIntOp {| reduce |} :
res_type{ rawIntOfRawIntOp[dp:n, ds:s, sp:n, ss:s] } <-->
tyRawInt[dp:n, ds:s]
prim_rw reduce_arg1_type_rawIntOfRawIntOp {| reduce |} :
arg1_type{ rawIntOfRawIntOp[dp:n, ds:s, sp:n, ss:s] } <-->
tyRawInt[sp:n, ss:s]
prim_rw reduce_res_type_dtupleOfDTupleOp {| reduce |} :
res_type{ dtupleOfDTupleOp{ 'tv; 'mtyl } } <-->
tyDTuple{ 'tv; none }
prim_rw reduce_arg1_type_dtupleOfDTupleOp {| reduce |} :
arg1_type{ dtupleOfDTupleOp{ 'tv; 'mtyl } } <-->
tyDTuple{ 'tv; some{ 'mtyl } }
prim_rw reduce_res_type_unionOfUnionOp {| reduce |} :
res_type{ unionOfUnionOp{ 'tv; 'tyl; 's1; 's2 } } <-->
tyUnion{ 'tv; 'tyl; 's1 }
prim_rw reduce_arg1_type_unionOfUnionOp {| reduce |} :
arg1_type{ unionOfUnionOp{ 'tv; 'tyl; 's1; 's2 } } <-->
tyUnion{ 'tv; 'tyl; 's2 }
prim_rw reduce_res_type_rawDataOfFrameOp {| reduce |} :
res_type{ rawDataOfFrameOp{ 'tv; 'tyl } } <-->
tyRawData
prim_rw reduce_arg1_type_rawDataOfFrameOp {| reduce |} :
arg1_type{ rawDataOfFrameOp{ 'tv; 'tyl } } <-->
tyFrame{ 'tv; 'tyl }
prim_rw reduce_res_type_andEnumOp {| reduce |} :
res_type{ andEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg1_type_andEnumOp {| reduce |} :
arg1_type{ andEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg2_type_andEnumOp {| reduce |} :
arg2_type { andEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_res_type_orEnumOp {| reduce |} :
res_type{ orEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg1_type_orEnumOp {| reduce |} :
arg1_type{ orEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg2_type_orEnumOp {| reduce |} :
arg2_type { orEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_res_type_xorEnumOp {| reduce |} :
res_type{ xorEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg1_type_xorEnumOp {| reduce |} :
arg1_type{ xorEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg2_type_xorEnumOp {| reduce |} :
arg2_type { xorEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_res_type_plusIntOp {| reduce |} :
res_type{ plusIntOp } <-->
tyInt
prim_rw reduce_arg1_type_plusIntOp {| reduce |} :
arg1_type{ plusIntOp } <-->
tyInt
prim_rw reduce_arg2_type_plusIntOp {| reduce |} :
arg2_type { plusIntOp } <-->
tyInt
prim_rw reduce_res_type_minusIntOp {| reduce |} :
res_type{ minusIntOp } <-->
tyInt
prim_rw reduce_arg1_type_minusIntOp {| reduce |} :
arg1_type{ minusIntOp } <-->
tyInt
prim_rw reduce_arg2_type_minusIntOp {| reduce |} :
arg2_type { minusIntOp } <-->
tyInt
prim_rw reduce_res_type_mulIntOp {| reduce |} :
res_type{ mulIntOp } <-->
tyInt
prim_rw reduce_arg1_type_mulIntOp {| reduce |} :
arg1_type{ mulIntOp } <-->
tyInt
prim_rw reduce_arg2_type_mulIntOp {| reduce |} :
arg2_type { mulIntOp } <-->
tyInt
prim_rw reduce_res_type_divIntOp {| reduce |} :
res_type{ divIntOp } <-->
tyInt
prim_rw reduce_arg1_type_divIntOp {| reduce |} :
arg1_type{ divIntOp } <-->
tyInt
prim_rw reduce_arg2_type_divIntOp {| reduce |} :
arg2_type { divIntOp } <-->
tyInt
prim_rw reduce_res_type_remIntOp {| reduce |} :
res_type{ remIntOp } <-->
tyInt
prim_rw reduce_arg1_type_remIntOp {| reduce |} :
arg1_type{ remIntOp } <-->
tyInt
prim_rw reduce_arg2_type_remIntOp {| reduce |} :
arg2_type { remIntOp } <-->
tyInt
prim_rw reduce_res_type_lslIntOp {| reduce |} :
res_type{ lslIntOp } <-->
tyInt
prim_rw reduce_arg1_type_lslIntOp {| reduce |} :
arg1_type{ lslIntOp } <-->
tyInt
prim_rw reduce_arg2_type_lslIntOp {| reduce |} :
arg2_type { lslIntOp } <-->
tyInt
prim_rw reduce_res_type_lsrIntOp {| reduce |} :
res_type{ lsrIntOp } <-->
tyInt
prim_rw reduce_arg1_type_lsrIntOp {| reduce |} :
arg1_type{ lsrIntOp } <-->
tyInt
prim_rw reduce_arg2_type_lsrIntOp {| reduce |} :
arg2_type { lsrIntOp } <-->
tyInt
prim_rw reduce_res_type_asrIntOp {| reduce |} :
res_type{ asrIntOp } <-->
tyInt
prim_rw reduce_arg1_type_asrIntOp {| reduce |} :
arg1_type{ asrIntOp } <-->
tyInt
prim_rw reduce_arg2_type_asrIntOp {| reduce |} :
arg2_type { asrIntOp } <-->
tyInt
prim_rw reduce_res_type_andIntOp {| reduce |} :
res_type{ andIntOp } <-->
tyInt
prim_rw reduce_arg1_type_andIntOp {| reduce |} :
arg1_type{ andIntOp } <-->
tyInt
prim_rw reduce_arg2_type_andIntOp {| reduce |} :
arg2_type { andIntOp } <-->
tyInt
prim_rw reduce_res_type_orIntOp {| reduce |} :
res_type{ orIntOp } <-->
tyInt
prim_rw reduce_arg1_type_orIntOp {| reduce |} :
arg1_type{ orIntOp } <-->
tyInt
prim_rw reduce_arg2_type_orIntOp {| reduce |} :
arg2_type { orIntOp } <-->
tyInt
prim_rw reduce_res_type_xorIntOp {| reduce |} :
res_type{ xorIntOp } <-->
tyInt
prim_rw reduce_arg1_type_xorIntOp {| reduce |} :
arg1_type{ xorIntOp } <-->
tyInt
prim_rw reduce_arg2_type_xorIntOp {| reduce |} :
arg2_type { xorIntOp } <-->
tyInt
prim_rw reduce_res_type_maxIntOp {| reduce |} :
res_type{ maxIntOp } <-->
tyInt
prim_rw reduce_arg1_type_maxIntOp {| reduce |} :
arg1_type{ maxIntOp } <-->
tyInt
prim_rw reduce_arg2_type_maxIntOp {| reduce |} :
arg2_type { maxIntOp } <-->
tyInt
prim_rw reduce_res_type_eqIntOp {| reduce |} :
res_type{ eqIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_eqIntOp {| reduce |} :
arg1_type{ eqIntOp } <-->
tyInt
prim_rw reduce_arg2_type_eqIntOp {| reduce |} :
arg2_type { eqIntOp } <-->
tyInt
prim_rw reduce_res_type_neqIntOp {| reduce |} :
res_type{ neqIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_neqIntOp {| reduce |} :
arg1_type{ neqIntOp } <-->
tyInt
prim_rw reduce_arg2_type_neqIntOp {| reduce |} :
arg2_type { neqIntOp } <-->
tyInt
prim_rw reduce_res_type_ltIntOp {| reduce |} :
res_type{ ltIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_ltIntOp {| reduce |} :
arg1_type{ ltIntOp } <-->
tyInt
prim_rw reduce_arg2_type_ltIntOp {| reduce |} :
arg2_type { ltIntOp } <-->
tyInt
prim_rw reduce_res_type_leIntOp {| reduce |} :
res_type{ leIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_leIntOp {| reduce |} :
arg1_type{ leIntOp } <-->
tyInt
prim_rw reduce_arg2_type_leIntOp {| reduce |} :
arg2_type { leIntOp } <-->
tyInt
prim_rw reduce_res_type_gtIntOp {| reduce |} :
res_type{ gtIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_gtIntOp {| reduce |} :
arg1_type{ gtIntOp } <-->
tyInt
prim_rw reduce_arg2_type_gtIntOp {| reduce |} :
arg2_type { gtIntOp } <-->
tyInt
prim_rw reduce_res_type_geIntOp {| reduce |} :
res_type{ geIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_geIntOp {| reduce |} :
arg1_type{ geIntOp } <-->
tyInt
prim_rw reduce_arg2_type_geIntOp {| reduce |} :
arg2_type { geIntOp } <-->
tyInt
prim_rw reduce_res_type_cmpIntOp {| reduce |} :
res_type{ cmpIntOp } <-->
tyInt
prim_rw reduce_arg1_type_cmpIntOp {| reduce |} :
arg1_type{ cmpIntOp } <-->
tyInt
prim_rw reduce_arg2_type_cmpIntOp {| reduce |} :
arg2_type { cmpIntOp } <-->
tyInt
prim_rw reduce_res_type_plusRawIntOp {| reduce |} :
res_type{ plusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_plusRawIntOp {| reduce |} :
arg1_type{ plusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_plusRawIntOp {| reduce |} :
arg2_type { plusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_minusRawIntOp {| reduce |} :
res_type{ minusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_minusRawIntOp {| reduce |} :
arg1_type{ minusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_minusRawIntOp {| reduce |} :
arg2_type { minusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_mulRawIntOp {| reduce |} :
res_type{ mulRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_mulRawIntOp {| reduce |} :
arg1_type{ mulRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_mulRawIntOp {| reduce |} :
arg2_type { mulRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_divRawIntOp {| reduce |} :
res_type{ divRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_divRawIntOp {| reduce |} :
arg1_type{ divRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_divRawIntOp {| reduce |} :
arg2_type { divRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_remRawIntOp {| reduce |} :
res_type{ remRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_remRawIntOp {| reduce |} :
arg1_type{ remRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_remRawIntOp {| reduce |} :
arg2_type { remRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_slRawIntOp {| reduce |} :
res_type{ slRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_slRawIntOp {| reduce |} :
arg1_type{ slRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_slRawIntOp {| reduce |} :
arg2_type { slRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_srRawIntOp {| reduce |} :
res_type{ srRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_srRawIntOp {| reduce |} :
arg1_type{ srRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_srRawIntOp {| reduce |} :
arg2_type { srRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_andRawIntOp {| reduce |} :
res_type{ andRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_andRawIntOp {| reduce |} :
arg1_type{ andRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_andRawIntOp {| reduce |} :
arg2_type { andRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_orRawIntOp {| reduce |} :
res_type{ orRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_orRawIntOp {| reduce |} :
arg1_type{ orRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_orRawIntOp {| reduce |} :
arg2_type { orRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_xorRawIntOp {| reduce |} :
res_type{ xorRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_xorRawIntOp {| reduce |} :
arg1_type{ xorRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_xorRawIntOp {| reduce |} :
arg2_type { xorRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_maxRawIntOp {| reduce |} :
res_type{ maxRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_maxRawIntOp {| reduce |} :
arg1_type{ maxRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_maxRawIntOp {| reduce |} :
arg2_type { maxRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_minRawIntOp {| reduce |} :
res_type{ minRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_minRawIntOp {| reduce |} :
arg1_type{ minRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_minRawIntOp {| reduce |} :
arg2_type { minRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_eqRawIntOp {| reduce |} :
res_type{ eqRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_eqRawIntOp {| reduce |} :
arg1_type{ eqRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_eqRawIntOp {| reduce |} :
arg2_type { eqRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_neqRawIntOp {| reduce |} :
res_type{ neqRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_neqRawIntOp {| reduce |} :
arg1_type{ neqRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_neqRawIntOp {| reduce |} :
arg2_type { neqRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_ltRawIntOp {| reduce |} :
res_type{ ltRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_ltRawIntOp {| reduce |} :
arg1_type{ ltRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_ltRawIntOp {| reduce |} :
arg2_type { ltRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_leRawIntOp {| reduce |} :
res_type{ leRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_leRawIntOp {| reduce |} :
arg1_type{ leRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_leRawIntOp {| reduce |} :
arg2_type { leRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_gtRawIntOp {| reduce |} :
res_type{ gtRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_gtRawIntOp {| reduce |} :
arg1_type{ gtRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_gtRawIntOp {| reduce |} :
arg2_type { gtRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_geRawIntO {| reduce |} :
res_type{ geRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_geRawIntO {| reduce |} :
arg1_type{ geRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_geRawIntO {| reduce |} :
arg2_type { geRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_cmpRawIntOp {| reduce |} :
res_type{ cmpRawIntOp[p:n, s:s] } <-->
tyInt
prim_rw reduce_arg1_type_cmpRawIntOp {| reduce |} :
arg1_type{ cmpRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_cmpRawIntOp {| reduce |} :
arg2_type { cmpRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_plusFloatOp {| reduce |} :
res_type{ plusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_plusFloatOp {| reduce |} :
arg1_type{ plusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_plusFloatOp {| reduce |} :
arg2_type { plusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_minusFloatOp {| reduce |} :
res_type{ minusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_minusFloatOp {| reduce |} :
arg1_type{ minusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_minusFloatOp {| reduce |} :
arg2_type { minusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_mulFloatOp {| reduce |} :
res_type{ mulFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_mulFloatOp {| reduce |} :
arg1_type{ mulFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_mulFloatOp {| reduce |} :
arg2_type { mulFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_divFloatOp {| reduce |} :
res_type{ divFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_divFloatOp {| reduce |} :
arg1_type{ divFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_divFloatOp {| reduce |} :
arg2_type { divFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_remFloatOp {| reduce |} :
res_type{ remFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_remFloatOp {| reduce |} :
arg1_type{ remFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_remFloatOp {| reduce |} :
arg2_type { remFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_maxFloatOp {| reduce |} :
res_type{ maxFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_maxFloatOp {| reduce |} :
arg1_type{ maxFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_maxFloatOp {| reduce |} :
arg2_type { maxFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_minFloatOp {| reduce |} :
res_type{ minFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_minFloatOp {| reduce |} :
arg1_type{ minFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_minFloatOp {| reduce |} :
arg2_type { minFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_eqFloatOp {| reduce |} :
res_type{ eqFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_eqFloatOp {| reduce |} :
arg1_type{ eqFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_eqFloatOp {| reduce |} :
arg2_type { eqFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_neqFloatOp {| reduce |} :
res_type{ neqFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_neqFloatOp {| reduce |} :
arg1_type{ neqFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_neqFloatOp {| reduce |} :
arg2_type { neqFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_ltFloatOp {| reduce |} :
res_type{ ltFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_ltFloatOp {| reduce |} :
arg1_type{ ltFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_ltFloatOp {| reduce |} :
arg2_type { ltFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_leFloatOp {| reduce |} :
res_type{ leFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_leFloatOp {| reduce |} :
arg1_type{ leFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_leFloatOp {| reduce |} :
arg2_type { leFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_gtFloatOp {| reduce |} :
res_type{ gtFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_gtFloatOp {| reduce |} :
arg1_type{ gtFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_gtFloatOp {| reduce |} :
arg2_type { gtFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_geFloatOp {| reduce |} :
res_type{ geFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_geFloatOp {| reduce |} :
arg1_type{ geFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_geFloatOp {| reduce |} :
arg2_type { geFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_cmpFloatOp {| reduce |} :
res_type{ cmpFloatOp[p:n] } <-->
tyInt
prim_rw reduce_arg1_type_cmpFloatOp {| reduce |} :
arg1_type{ cmpFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_cmpFloatOp {| reduce |} :
arg2_type { cmpFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_atan2FloatOp {| reduce |} :
res_type{ atan2FloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_atan2FloatOp {| reduce |} :
arg1_type{ atan2FloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_atan2FloatOp {| reduce |} :
arg2_type { atan2FloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_powerFloatOp {| reduce |} :
res_type{ powerFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_powerFloatOp {| reduce |} :
arg1_type{ powerFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_powerFloatOp {| reduce |} :
arg2_type { powerFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_ldExpFloatIntOp {| reduce |} :
res_type{ ldExpFloatIntOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_ldExpFloatIntOp {| reduce |} :
arg1_type{ ldExpFloatIntOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_ldExpFloatIntOp {| reduce |} :
arg2_type { ldExpFloatIntOp[p:n] } <-->
tyInt
prim_rw reduce_res_type_eqEqOp {| reduce |} :
res_type{ eqEqOp{ 'ty } } <-->
tyEnum[2]
prim_rw reduce_arg1_type_eqEqOp {| reduce |} :
arg1_type{ eqEqOp{ 'ty } } <-->
'ty
prim_rw reduce_arg2_type_eqEqOp {| reduce |} :
arg2_type { eqEqOp{ 'ty } } <-->
'ty
prim_rw reduce_res_type_neqEqOp {| reduce |} :
res_type{ neqEqOp{ 'ty } } <-->
tyEnum[2]
prim_rw reduce_arg1_type_neqEqOp {| reduce |} :
arg1_type{ neqEqOp{ 'ty } } <-->
'ty
prim_rw reduce_arg2_type_neqEqOp {| reduce |} :
arg2_type { neqEqOp{ 'ty } } <-->
'ty
| null | https://raw.githubusercontent.com/jyh/metaprl/51ba0bbbf409ecb7f96f5abbeb91902fdec47a19/theories/fir/mfir_tr_atom_base.ml | ocaml | *************************************************************************
* Declarations.
*************************************************************************
*************************************************************************
* Display forms.
*************************************************************************
*************************************************************************
* Rewrites.
************************************************************************* | doc <:doc<
@module[Mfir_tr_atom_base]
The @tt[Mfir_tr_atom_base] module defines the argument types
and result types of the FIR operators.
@docoff
------------------------------------------------------------------------
@begin[license]
This file is part of MetaPRL, a modular, higher order
logical framework that provides a logical programming
environment for OCaml and other languages. Additional
information about the system is available at
/
Copyright (C) 2002 Brian Emre Aydemir, Caltech
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
Author: Brian Emre Aydemir
@email{}
@end[license]
>>
doc <:doc<
@parents
>>
extends Mfir_ty
extends Mfir_exp
extends Mfir_option
extends Mfir_sequent
doc <:doc<
@terms
The term @tt[res_type] returns the result type of an operator @tt[op].
The terms @tt[arg1_type] and @tt[arg2_type] return the types of
first and second arguments of an operator @tt[op] (@tt[arg2_type] is
undefined if @tt[op] is a unary operator).
>>
declare res_type{ 'op }
declare arg1_type{ 'op }
declare arg2_type{ 'op }
doc docoff
open Top_conversionals
dform res_type_df : except_mode[src] ::
res_type{ 'op } =
bf["res_type"] `"(" slot{'op} `")"
dform arg1_type_df : except_mode[src] ::
arg1_type{ 'op } =
bf["arg1_type"] `"(" slot{'op} `")"
dform arg2_type_df : except_mode[src] ::
arg2_type{ 'op } =
bf["arg2_type"] `"(" slot{'op} `")"
doc <:doc<
@rewrites
Rewrites are used to define the argument and result types of the
FIR unary and binary operators. The types may not be well-formed
if the original operator is not well-formed. We omit an explicit
listing of these rewrites.
>>
doc docoff
prim_rw reduce_res_type_notEnumOp {| reduce |} :
res_type{ notEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg1_type_notEnumOp {| reduce |} :
arg1_type{ notEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_res_type_uminusIntOp {| reduce |} :
res_type{ uminusIntOp } <-->
tyInt
prim_rw reduce_arg1_type_uminusIntOp {| reduce |} :
arg1_type{ uminusIntOp } <-->
tyInt
prim_rw reduce_res_type_notIntOp {| reduce |} :
res_type{ notIntOp } <-->
tyInt
prim_rw reduce_arg1_type_notIntOp {| reduce |} :
arg1_type{ notIntOp } <-->
tyInt
prim_rw reduce_res_type_absIntOp {| reduce |} :
res_type{ absIntOp } <-->
tyInt
prim_rw reduce_arg1_type_absIntOp {| reduce |} :
arg1_type{ absIntOp } <-->
tyInt
prim_rw reduce_res_type_uminusRawIntOp {| reduce |} :
res_type{ uminusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_uminusRawIntOp {| reduce |} :
arg1_type{ uminusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_notRawIntOp {| reduce |} :
res_type{ notRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_notRawIntOp {| reduce |} :
arg1_type{ notRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_uminusFloatOp {| reduce |} :
res_type{ uminusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_uminusFloatOp {| reduce |} :
arg1_type{ uminusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_absFloatOp {| reduce |} :
res_type{ absFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_absFloatOp {| reduce |} :
arg1_type{ absFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_sinFloatOp {| reduce |} :
res_type{ sinFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_sinFloatOp {| reduce |} :
arg1_type{ sinFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_cosFloatOp {| reduce |} :
res_type{ cosFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_cosFloatOp {| reduce |} :
arg1_type{ cosFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_tanFloatop {| reduce |} :
res_type{ tanFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_tanFloatop {| reduce |} :
arg1_type{ tanFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_asinFloatOp {| reduce |} :
res_type{ asinFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_asinFloatOp {| reduce |} :
arg1_type{ asinFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_atanFloatOp {| reduce |} :
res_type{ atanFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_atanFloatOp {| reduce |} :
arg1_type{ atanFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_sinhFloatOp {| reduce |} :
res_type{ sinhFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_sinhFloatOp {| reduce |} :
arg1_type{ sinhFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_coshFloatOp {| reduce |} :
res_type{ coshFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_coshFloatOp {| reduce |} :
arg1_type{ coshFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_tanhFloatOp {| reduce |} :
res_type{ tanhFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_tanhFloatOp {| reduce |} :
arg1_type{ tanhFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_expFloatOp {| reduce |} :
res_type{ expFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_expFloatOp {| reduce |} :
arg1_type{ expFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_logFloatOp {| reduce |} :
res_type{ logFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_logFloatOp {| reduce |} :
arg1_type{ logFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_sqrtFloatOp {| reduce |} :
res_type{ sqrtFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_sqrtFloatOp {| reduce |} :
arg1_type{ sqrtFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_ceilFloatOp {| reduce |} :
res_type{ ceilFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_ceilFloatOp {| reduce |} :
arg1_type{ ceilFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_floorFloatOp {| reduce |} :
res_type{ floorFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_floorFloatOp {| reduce |} :
arg1_type{ floorFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_intOfFloatOp {| reduce |} :
res_type{ intOfFloatOp[p:n] } <-->
tyInt
prim_rw reduce_arg1_type_intOfFloatOp {| reduce |} :
arg1_type{ intOfFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_intOfRawIntOp {| reduce |} :
res_type{ intOfRawIntOp[p:n, s:s] } <-->
tyInt
prim_rw reduce_arg1_type_intOfRawIntOp {| reduce |} :
arg1_type{ intOfRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_floatOfIntOp {| reduce |} :
res_type{ floatOfIntOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_floatOfIntOp {| reduce |} :
arg1_type{ floatOfIntOp[p:n] } <-->
tyInt
prim_rw reduce_res_type_floatOfFloatOp {| reduce |} :
res_type{ floatOfFloatOp[p1:n, p2:n] } <-->
tyFloat[p1:n]
prim_rw reduce_arg1_type_floatOfFloatOp {| reduce |} :
arg1_type{ floatOfFloatOp[p1:n, p2:n] } <-->
tyFloat[p2:n]
prim_rw reduce_res_type_floatOfRawIntOp {| reduce |} :
res_type{ floatOfRawIntOp[fp:n, rp:n, s:s] } <-->
tyFloat[fp:n]
prim_rw reduce_arg1_type_floatOfRawIntOp {| reduce |} :
arg1_type{ floatOfRawIntOp[fp:n, rp:n, s:s] } <-->
tyRawInt[rp:n, s:s]
prim_rw reduce_res_type_rawIntOfIntOp {| reduce |} :
res_type{ rawIntOfIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_rawIntOfIntOp {| reduce |} :
arg1_type{ rawIntOfIntOp[p:n, s:s] } <-->
tyInt
prim_rw reduce_res_type_rawIntOfEnumOp {| reduce |} :
res_type{ rawIntOfEnumOp[p:n, s:s, i:n] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_rawIntOfEnumOp {| reduce |} :
arg1_type{ rawIntOfEnumOp[p:n, s:s, i:n] } <-->
tyEnum[i:n]
prim_rw reduce_res_type_rawIntOfFloatOp {| reduce |} :
res_type{ rawIntOfFloatOp[rp:n, s:s, fp:n] } <-->
tyRawInt[rp:n, s:s]
prim_rw reduce_arg1_type_rawIntOfFloatOp {| reduce |} :
arg1_type{ rawIntOfFloatOp[rp:n, s:s, fp:n] } <-->
tyFloat[fp:n]
prim_rw reduce_res_type_rawIntOfRawIntOp {| reduce |} :
res_type{ rawIntOfRawIntOp[dp:n, ds:s, sp:n, ss:s] } <-->
tyRawInt[dp:n, ds:s]
prim_rw reduce_arg1_type_rawIntOfRawIntOp {| reduce |} :
arg1_type{ rawIntOfRawIntOp[dp:n, ds:s, sp:n, ss:s] } <-->
tyRawInt[sp:n, ss:s]
prim_rw reduce_res_type_dtupleOfDTupleOp {| reduce |} :
res_type{ dtupleOfDTupleOp{ 'tv; 'mtyl } } <-->
tyDTuple{ 'tv; none }
prim_rw reduce_arg1_type_dtupleOfDTupleOp {| reduce |} :
arg1_type{ dtupleOfDTupleOp{ 'tv; 'mtyl } } <-->
tyDTuple{ 'tv; some{ 'mtyl } }
prim_rw reduce_res_type_unionOfUnionOp {| reduce |} :
res_type{ unionOfUnionOp{ 'tv; 'tyl; 's1; 's2 } } <-->
tyUnion{ 'tv; 'tyl; 's1 }
prim_rw reduce_arg1_type_unionOfUnionOp {| reduce |} :
arg1_type{ unionOfUnionOp{ 'tv; 'tyl; 's1; 's2 } } <-->
tyUnion{ 'tv; 'tyl; 's2 }
prim_rw reduce_res_type_rawDataOfFrameOp {| reduce |} :
res_type{ rawDataOfFrameOp{ 'tv; 'tyl } } <-->
tyRawData
prim_rw reduce_arg1_type_rawDataOfFrameOp {| reduce |} :
arg1_type{ rawDataOfFrameOp{ 'tv; 'tyl } } <-->
tyFrame{ 'tv; 'tyl }
prim_rw reduce_res_type_andEnumOp {| reduce |} :
res_type{ andEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg1_type_andEnumOp {| reduce |} :
arg1_type{ andEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg2_type_andEnumOp {| reduce |} :
arg2_type { andEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_res_type_orEnumOp {| reduce |} :
res_type{ orEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg1_type_orEnumOp {| reduce |} :
arg1_type{ orEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg2_type_orEnumOp {| reduce |} :
arg2_type { orEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_res_type_xorEnumOp {| reduce |} :
res_type{ xorEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg1_type_xorEnumOp {| reduce |} :
arg1_type{ xorEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_arg2_type_xorEnumOp {| reduce |} :
arg2_type { xorEnumOp[i:n] } <-->
tyEnum[i:n]
prim_rw reduce_res_type_plusIntOp {| reduce |} :
res_type{ plusIntOp } <-->
tyInt
prim_rw reduce_arg1_type_plusIntOp {| reduce |} :
arg1_type{ plusIntOp } <-->
tyInt
prim_rw reduce_arg2_type_plusIntOp {| reduce |} :
arg2_type { plusIntOp } <-->
tyInt
prim_rw reduce_res_type_minusIntOp {| reduce |} :
res_type{ minusIntOp } <-->
tyInt
prim_rw reduce_arg1_type_minusIntOp {| reduce |} :
arg1_type{ minusIntOp } <-->
tyInt
prim_rw reduce_arg2_type_minusIntOp {| reduce |} :
arg2_type { minusIntOp } <-->
tyInt
prim_rw reduce_res_type_mulIntOp {| reduce |} :
res_type{ mulIntOp } <-->
tyInt
prim_rw reduce_arg1_type_mulIntOp {| reduce |} :
arg1_type{ mulIntOp } <-->
tyInt
prim_rw reduce_arg2_type_mulIntOp {| reduce |} :
arg2_type { mulIntOp } <-->
tyInt
prim_rw reduce_res_type_divIntOp {| reduce |} :
res_type{ divIntOp } <-->
tyInt
prim_rw reduce_arg1_type_divIntOp {| reduce |} :
arg1_type{ divIntOp } <-->
tyInt
prim_rw reduce_arg2_type_divIntOp {| reduce |} :
arg2_type { divIntOp } <-->
tyInt
prim_rw reduce_res_type_remIntOp {| reduce |} :
res_type{ remIntOp } <-->
tyInt
prim_rw reduce_arg1_type_remIntOp {| reduce |} :
arg1_type{ remIntOp } <-->
tyInt
prim_rw reduce_arg2_type_remIntOp {| reduce |} :
arg2_type { remIntOp } <-->
tyInt
prim_rw reduce_res_type_lslIntOp {| reduce |} :
res_type{ lslIntOp } <-->
tyInt
prim_rw reduce_arg1_type_lslIntOp {| reduce |} :
arg1_type{ lslIntOp } <-->
tyInt
prim_rw reduce_arg2_type_lslIntOp {| reduce |} :
arg2_type { lslIntOp } <-->
tyInt
prim_rw reduce_res_type_lsrIntOp {| reduce |} :
res_type{ lsrIntOp } <-->
tyInt
prim_rw reduce_arg1_type_lsrIntOp {| reduce |} :
arg1_type{ lsrIntOp } <-->
tyInt
prim_rw reduce_arg2_type_lsrIntOp {| reduce |} :
arg2_type { lsrIntOp } <-->
tyInt
prim_rw reduce_res_type_asrIntOp {| reduce |} :
res_type{ asrIntOp } <-->
tyInt
prim_rw reduce_arg1_type_asrIntOp {| reduce |} :
arg1_type{ asrIntOp } <-->
tyInt
prim_rw reduce_arg2_type_asrIntOp {| reduce |} :
arg2_type { asrIntOp } <-->
tyInt
prim_rw reduce_res_type_andIntOp {| reduce |} :
res_type{ andIntOp } <-->
tyInt
prim_rw reduce_arg1_type_andIntOp {| reduce |} :
arg1_type{ andIntOp } <-->
tyInt
prim_rw reduce_arg2_type_andIntOp {| reduce |} :
arg2_type { andIntOp } <-->
tyInt
prim_rw reduce_res_type_orIntOp {| reduce |} :
res_type{ orIntOp } <-->
tyInt
prim_rw reduce_arg1_type_orIntOp {| reduce |} :
arg1_type{ orIntOp } <-->
tyInt
prim_rw reduce_arg2_type_orIntOp {| reduce |} :
arg2_type { orIntOp } <-->
tyInt
prim_rw reduce_res_type_xorIntOp {| reduce |} :
res_type{ xorIntOp } <-->
tyInt
prim_rw reduce_arg1_type_xorIntOp {| reduce |} :
arg1_type{ xorIntOp } <-->
tyInt
prim_rw reduce_arg2_type_xorIntOp {| reduce |} :
arg2_type { xorIntOp } <-->
tyInt
prim_rw reduce_res_type_maxIntOp {| reduce |} :
res_type{ maxIntOp } <-->
tyInt
prim_rw reduce_arg1_type_maxIntOp {| reduce |} :
arg1_type{ maxIntOp } <-->
tyInt
prim_rw reduce_arg2_type_maxIntOp {| reduce |} :
arg2_type { maxIntOp } <-->
tyInt
prim_rw reduce_res_type_eqIntOp {| reduce |} :
res_type{ eqIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_eqIntOp {| reduce |} :
arg1_type{ eqIntOp } <-->
tyInt
prim_rw reduce_arg2_type_eqIntOp {| reduce |} :
arg2_type { eqIntOp } <-->
tyInt
prim_rw reduce_res_type_neqIntOp {| reduce |} :
res_type{ neqIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_neqIntOp {| reduce |} :
arg1_type{ neqIntOp } <-->
tyInt
prim_rw reduce_arg2_type_neqIntOp {| reduce |} :
arg2_type { neqIntOp } <-->
tyInt
prim_rw reduce_res_type_ltIntOp {| reduce |} :
res_type{ ltIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_ltIntOp {| reduce |} :
arg1_type{ ltIntOp } <-->
tyInt
prim_rw reduce_arg2_type_ltIntOp {| reduce |} :
arg2_type { ltIntOp } <-->
tyInt
prim_rw reduce_res_type_leIntOp {| reduce |} :
res_type{ leIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_leIntOp {| reduce |} :
arg1_type{ leIntOp } <-->
tyInt
prim_rw reduce_arg2_type_leIntOp {| reduce |} :
arg2_type { leIntOp } <-->
tyInt
prim_rw reduce_res_type_gtIntOp {| reduce |} :
res_type{ gtIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_gtIntOp {| reduce |} :
arg1_type{ gtIntOp } <-->
tyInt
prim_rw reduce_arg2_type_gtIntOp {| reduce |} :
arg2_type { gtIntOp } <-->
tyInt
prim_rw reduce_res_type_geIntOp {| reduce |} :
res_type{ geIntOp } <-->
tyEnum[2]
prim_rw reduce_arg1_type_geIntOp {| reduce |} :
arg1_type{ geIntOp } <-->
tyInt
prim_rw reduce_arg2_type_geIntOp {| reduce |} :
arg2_type { geIntOp } <-->
tyInt
prim_rw reduce_res_type_cmpIntOp {| reduce |} :
res_type{ cmpIntOp } <-->
tyInt
prim_rw reduce_arg1_type_cmpIntOp {| reduce |} :
arg1_type{ cmpIntOp } <-->
tyInt
prim_rw reduce_arg2_type_cmpIntOp {| reduce |} :
arg2_type { cmpIntOp } <-->
tyInt
prim_rw reduce_res_type_plusRawIntOp {| reduce |} :
res_type{ plusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_plusRawIntOp {| reduce |} :
arg1_type{ plusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_plusRawIntOp {| reduce |} :
arg2_type { plusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_minusRawIntOp {| reduce |} :
res_type{ minusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_minusRawIntOp {| reduce |} :
arg1_type{ minusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_minusRawIntOp {| reduce |} :
arg2_type { minusRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_mulRawIntOp {| reduce |} :
res_type{ mulRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_mulRawIntOp {| reduce |} :
arg1_type{ mulRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_mulRawIntOp {| reduce |} :
arg2_type { mulRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_divRawIntOp {| reduce |} :
res_type{ divRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_divRawIntOp {| reduce |} :
arg1_type{ divRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_divRawIntOp {| reduce |} :
arg2_type { divRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_remRawIntOp {| reduce |} :
res_type{ remRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_remRawIntOp {| reduce |} :
arg1_type{ remRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_remRawIntOp {| reduce |} :
arg2_type { remRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_slRawIntOp {| reduce |} :
res_type{ slRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_slRawIntOp {| reduce |} :
arg1_type{ slRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_slRawIntOp {| reduce |} :
arg2_type { slRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_srRawIntOp {| reduce |} :
res_type{ srRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_srRawIntOp {| reduce |} :
arg1_type{ srRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_srRawIntOp {| reduce |} :
arg2_type { srRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_andRawIntOp {| reduce |} :
res_type{ andRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_andRawIntOp {| reduce |} :
arg1_type{ andRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_andRawIntOp {| reduce |} :
arg2_type { andRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_orRawIntOp {| reduce |} :
res_type{ orRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_orRawIntOp {| reduce |} :
arg1_type{ orRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_orRawIntOp {| reduce |} :
arg2_type { orRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_xorRawIntOp {| reduce |} :
res_type{ xorRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_xorRawIntOp {| reduce |} :
arg1_type{ xorRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_xorRawIntOp {| reduce |} :
arg2_type { xorRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_maxRawIntOp {| reduce |} :
res_type{ maxRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_maxRawIntOp {| reduce |} :
arg1_type{ maxRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_maxRawIntOp {| reduce |} :
arg2_type { maxRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_minRawIntOp {| reduce |} :
res_type{ minRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg1_type_minRawIntOp {| reduce |} :
arg1_type{ minRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_minRawIntOp {| reduce |} :
arg2_type { minRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_eqRawIntOp {| reduce |} :
res_type{ eqRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_eqRawIntOp {| reduce |} :
arg1_type{ eqRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_eqRawIntOp {| reduce |} :
arg2_type { eqRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_neqRawIntOp {| reduce |} :
res_type{ neqRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_neqRawIntOp {| reduce |} :
arg1_type{ neqRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_neqRawIntOp {| reduce |} :
arg2_type { neqRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_ltRawIntOp {| reduce |} :
res_type{ ltRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_ltRawIntOp {| reduce |} :
arg1_type{ ltRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_ltRawIntOp {| reduce |} :
arg2_type { ltRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_leRawIntOp {| reduce |} :
res_type{ leRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_leRawIntOp {| reduce |} :
arg1_type{ leRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_leRawIntOp {| reduce |} :
arg2_type { leRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_gtRawIntOp {| reduce |} :
res_type{ gtRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_gtRawIntOp {| reduce |} :
arg1_type{ gtRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_gtRawIntOp {| reduce |} :
arg2_type { gtRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_geRawIntO {| reduce |} :
res_type{ geRawIntOp[p:n, s:s] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_geRawIntO {| reduce |} :
arg1_type{ geRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_geRawIntO {| reduce |} :
arg2_type { geRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_cmpRawIntOp {| reduce |} :
res_type{ cmpRawIntOp[p:n, s:s] } <-->
tyInt
prim_rw reduce_arg1_type_cmpRawIntOp {| reduce |} :
arg1_type{ cmpRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_arg2_type_cmpRawIntOp {| reduce |} :
arg2_type { cmpRawIntOp[p:n, s:s] } <-->
tyRawInt[p:n, s:s]
prim_rw reduce_res_type_plusFloatOp {| reduce |} :
res_type{ plusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_plusFloatOp {| reduce |} :
arg1_type{ plusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_plusFloatOp {| reduce |} :
arg2_type { plusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_minusFloatOp {| reduce |} :
res_type{ minusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_minusFloatOp {| reduce |} :
arg1_type{ minusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_minusFloatOp {| reduce |} :
arg2_type { minusFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_mulFloatOp {| reduce |} :
res_type{ mulFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_mulFloatOp {| reduce |} :
arg1_type{ mulFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_mulFloatOp {| reduce |} :
arg2_type { mulFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_divFloatOp {| reduce |} :
res_type{ divFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_divFloatOp {| reduce |} :
arg1_type{ divFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_divFloatOp {| reduce |} :
arg2_type { divFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_remFloatOp {| reduce |} :
res_type{ remFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_remFloatOp {| reduce |} :
arg1_type{ remFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_remFloatOp {| reduce |} :
arg2_type { remFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_maxFloatOp {| reduce |} :
res_type{ maxFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_maxFloatOp {| reduce |} :
arg1_type{ maxFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_maxFloatOp {| reduce |} :
arg2_type { maxFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_minFloatOp {| reduce |} :
res_type{ minFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_minFloatOp {| reduce |} :
arg1_type{ minFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_minFloatOp {| reduce |} :
arg2_type { minFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_eqFloatOp {| reduce |} :
res_type{ eqFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_eqFloatOp {| reduce |} :
arg1_type{ eqFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_eqFloatOp {| reduce |} :
arg2_type { eqFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_neqFloatOp {| reduce |} :
res_type{ neqFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_neqFloatOp {| reduce |} :
arg1_type{ neqFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_neqFloatOp {| reduce |} :
arg2_type { neqFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_ltFloatOp {| reduce |} :
res_type{ ltFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_ltFloatOp {| reduce |} :
arg1_type{ ltFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_ltFloatOp {| reduce |} :
arg2_type { ltFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_leFloatOp {| reduce |} :
res_type{ leFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_leFloatOp {| reduce |} :
arg1_type{ leFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_leFloatOp {| reduce |} :
arg2_type { leFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_gtFloatOp {| reduce |} :
res_type{ gtFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_gtFloatOp {| reduce |} :
arg1_type{ gtFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_gtFloatOp {| reduce |} :
arg2_type { gtFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_geFloatOp {| reduce |} :
res_type{ geFloatOp[p:n] } <-->
tyEnum[2]
prim_rw reduce_arg1_type_geFloatOp {| reduce |} :
arg1_type{ geFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_geFloatOp {| reduce |} :
arg2_type { geFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_cmpFloatOp {| reduce |} :
res_type{ cmpFloatOp[p:n] } <-->
tyInt
prim_rw reduce_arg1_type_cmpFloatOp {| reduce |} :
arg1_type{ cmpFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_cmpFloatOp {| reduce |} :
arg2_type { cmpFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_atan2FloatOp {| reduce |} :
res_type{ atan2FloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_atan2FloatOp {| reduce |} :
arg1_type{ atan2FloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_atan2FloatOp {| reduce |} :
arg2_type { atan2FloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_powerFloatOp {| reduce |} :
res_type{ powerFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_powerFloatOp {| reduce |} :
arg1_type{ powerFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_powerFloatOp {| reduce |} :
arg2_type { powerFloatOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_res_type_ldExpFloatIntOp {| reduce |} :
res_type{ ldExpFloatIntOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg1_type_ldExpFloatIntOp {| reduce |} :
arg1_type{ ldExpFloatIntOp[p:n] } <-->
tyFloat[p:n]
prim_rw reduce_arg2_type_ldExpFloatIntOp {| reduce |} :
arg2_type { ldExpFloatIntOp[p:n] } <-->
tyInt
prim_rw reduce_res_type_eqEqOp {| reduce |} :
res_type{ eqEqOp{ 'ty } } <-->
tyEnum[2]
prim_rw reduce_arg1_type_eqEqOp {| reduce |} :
arg1_type{ eqEqOp{ 'ty } } <-->
'ty
prim_rw reduce_arg2_type_eqEqOp {| reduce |} :
arg2_type { eqEqOp{ 'ty } } <-->
'ty
prim_rw reduce_res_type_neqEqOp {| reduce |} :
res_type{ neqEqOp{ 'ty } } <-->
tyEnum[2]
prim_rw reduce_arg1_type_neqEqOp {| reduce |} :
arg1_type{ neqEqOp{ 'ty } } <-->
'ty
prim_rw reduce_arg2_type_neqEqOp {| reduce |} :
arg2_type { neqEqOp{ 'ty } } <-->
'ty
|
12210a712d030a6e431feefb2bbae5b3fbdf228ca2cc478c2d7afd03cfa6b674 | brawnski/git-annex | Upgrade.hs | git - annex command
-
- Copyright 2011 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2011 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.Upgrade where
import Command
import Upgrade
import Version
import Messages
command :: [Command]
command = [standaloneCommand "upgrade" paramNothing seek
"upgrade repository layout"]
seek :: [CommandSeek]
seek = [withNothing start]
start :: CommandStartNothing
start = do
showStart "upgrade" "."
r <- upgrade
setVersion
next $ next $ return r
| null | https://raw.githubusercontent.com/brawnski/git-annex/8b847517a810d384a79178124b9766141b89bc17/Command/Upgrade.hs | haskell | git - annex command
-
- Copyright 2011 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2011 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.Upgrade where
import Command
import Upgrade
import Version
import Messages
command :: [Command]
command = [standaloneCommand "upgrade" paramNothing seek
"upgrade repository layout"]
seek :: [CommandSeek]
seek = [withNothing start]
start :: CommandStartNothing
start = do
showStart "upgrade" "."
r <- upgrade
setVersion
next $ next $ return r
| |
753989c2e166e580a6aa5efae17f54b7aae0fab18c7bb82cfbded6b1760f9541 | KavehYousefi/Esoteric-programming-languages | main.lisp | Date : 2022 - 01 - 14
;;
;; Sources:
;; -> ""
- > " "
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; -- Declaration of types. -- ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(deftype destination ()
"The ``destination'' type defines a data sink for output operations,
compatible, for instance, with ``format'' and ``write''."
'(or null (eql T) stream string))
;;; -------------------------------------------------------
(deftype deadfish-instruction-set ()
"The ``deadfish-instruction-set'' type defines the recognized variants
of the Deadfish instruction set."
'(member :standard :XKCD))
;;; -------------------------------------------------------
(deftype output-format ()
"The ``output-format'' type defines the options for the printing
commands applicable during the conversion of Deadfish source program
to the more potent JR language.
---
Deadfish restricts its output to the numeric value of its
accumulator, while JR grants the programmer the choice betwixt the
former variant and a character output, the latter of which construes
the current cell value with its ASCII code."
'(member :numeric :character))
;;; -------------------------------------------------------
(deftype deadfish-command ()
"The ``deadfish-command'' type defines a set of command identifiers
which in an abstract fashion associate with the actual tokens in a
piece of Deadfish code."
'(member :increment :decrement :square :output :whitespace :unknown))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; -- Definition of interface "Console". -- ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defclass Console ()
()
(:documentation
"The ``Console'' interface describes a data sink intended for the
output of information from a program to the user side."))
;;; -------------------------------------------------------
(defgeneric console-print-number (console number)
(:documentation
"Prints the NUMBER to the CONSOLE and returns the modified
CONSOLE."))
;;; -------------------------------------------------------
(defgeneric console-print-character (console character)
(:documentation
"Prints the CHARACTER to the CONSOLE and returns the modified
CONSOLE."))
;;; -------------------------------------------------------
(defgeneric console-print-string (console string)
(:documentation
"Prints the STRING's characters in an unquoted form to the CONSOLE
and returns the modified CONSOLE."))
;;; -------------------------------------------------------
(defgeneric console-clear (console)
(:documentation
"Clears the CONSOLE's content and returns the modified CONSOLE."))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; -- Implementation of class "Standard-Console". -- ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defclass Standard-Console (Console)
((destination
:initarg :destination
:initform T
:type destination
:documentation "The data sink to write numbers or characters to.")
(clearing-scroll-size
:initarg :clearing-scroll-size
:initform 10
:accessor standard-console-clearing-scroll-size
:type (integer 0 *)
:documentation "The number of newlines to print to the DESTINATION
in order to simulate the clearing of this console."))
(:documentation
"The ``Standard-Console'' class provides an output commodity which
manipulates a traditional Common Lisp destination --- either the
standard output, a stream, or a dynamic string --- in order to
convey information to the user.
---
Being based upon the notion of the output conduit's perpetual
extension, as counterdistinguished to the maintenance of a
random-access data structure, a veridical clearing of the console
exceeds the underlying subtrate's capacities. Instead, a
configurable number of newlines avail as separators, in the best
case transporting the data to conceal, present inside of the
visible window, to a space which no longer provides a venue on the
undesired portion.
---
If the user, for instance, operates upon a terminal of 15 lines
capacity, the ``Standard-Console'' instance should be initialized
anenst its ``clearing-scroll-size'' property to apply 15 newlines
as a means for shifting the current output upwards and outside of
the visible area. Of course, the predicament of adjustable
terminals, or windows, cannot be meliorated by this static console
attribute, and the programmer is encumbered with the onus of
invoking the ``standard-console-clearing-scroll-size'' function if
optating an adjustment to such external influences."))
;;; -------------------------------------------------------
(defun make-standard-console (&key (destination T)
(clearing-scroll-size 10))
"Creates and returns a ``Standard-Console'' operating on the
DESTINATION as its data sink, and utilizing the CLEARING-SCROLL-SIZE
as the number of newline to simulate its clearing."
(declare (type destination destination))
(declare (type (integer 0 *) clearing-scroll-size))
(the Standard-Console
(make-instance 'Standard-Console
:destination destination
:clearing-scroll-size clearing-scroll-size)))
;;; -------------------------------------------------------
(defmethod console-print-number ((console Standard-Console)
(number integer))
(declare (type Standard-Console console))
(declare (type integer number))
(format (slot-value console 'destination) "~d" number)
(the Standard-Console console))
;;; -------------------------------------------------------
(defmethod console-print-character ((console Standard-Console)
(character character))
(declare (type Standard-Console console))
(declare (type character character))
(format (slot-value console 'destination) "~a" character)
(the Standard-Console console))
;;; -------------------------------------------------------
(defmethod console-print-string ((console Standard-Console)
(string string))
(declare (type Standard-Console console))
(declare (type string string))
(format (slot-value console 'destination) "~a" string)
(the Standard-Console console))
;;; -------------------------------------------------------
(defmethod console-clear ((console Standard-Console))
(declare (type Standard-Console console))
(format (slot-value console 'destination) "~v%"
(slot-value console 'clearing-scroll-size))
(the Standard-Console console))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; -- Implementation of class "Interpreter". -- ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defclass Interpreter ()
((memory
:initarg :memory
:initform (make-array 8
:element-type 'integer
:initial-element 0)
:type (vector integer 8)
:documentation "The 8-cell array storing the JR program's data.")
(pointer
:initarg :pointer
:initform 0
:type (integer 0 7)
:documentation "A pointer into the MEMORY, referencing its currently
active cell.")
(copy-of-code
:initarg :copy-of-code
:initform (make-array 0
:element-type 'character
:adjustable T
:fill-pointer 0)
:type string
:documentation "A buffer to store the processed JR code into, as a
provision in the case of a quine's request.")
(quine-requested-p
:initarg :quine-requested-p
:initform NIL
:type boolean
:documentation "A flag which determines whether, at the end of the
program, the complete processed source code shall be
printed, producing a quine.")
(console
:initarg :console
:initform (make-standard-console)
:type Console
:documentation "The console to airt the output to."))
(:documentation
"The ``Interpreter'' class encapsulates all information requisite
for interpreting a series of JR commands without loss of
information."))
;;; -------------------------------------------------------
(defun make-interpreter (&key (console (make-standard-console)))
"Creates and returns a new ``Interpreter'' employing the CONSOLE for
its output operations."
(declare (type Console console))
(the Interpreter (make-instance 'Interpreter :console console)))
;;; -------------------------------------------------------
(defun interpreter-process-commands (interpreter code)
"Interprets the piece of JR CODE using the INTERPRETER and returns no
value."
(declare (type Interpreter interpreter))
(declare (type string code))
(with-slots (memory pointer copy-of-code quine-requested-p console)
interpreter
(declare (type (vector integer 8) memory))
(declare (type (integer 0 7) pointer))
(declare (type string copy-of-code))
(declare (type boolean quine-requested-p))
(declare (type Console console))
(flet
((current-cell ()
"Returns the value stored in the MEMORY cell at the POINTER."
(the integer
(aref memory pointer)))
((setf current-cell) (new-value)
"Sets the value stored in the MEMORY cell at the POINTER, and
returns the NEW-VALUE."
(declare (type integer new-value))
(setf (aref memory pointer)
(cond
((= new-value -1) 0)
((= new-value 256) 0)
(T new-value)))
(the integer (aref memory pointer)))
(memorize-character (character)
"Stores the CHARACTER into the COPY-OF-CODE, necessary for
providing a quine, and returns no value."
(declare (type character character))
(vector-push-extend character copy-of-code)
(values)))
(loop for token of-type character across code do
(memorize-character token)
(case token
;; End of file. => Terminate.
((NIL)
(loop-finish))
;; Decrement the current cell.
(#\[
(decf (current-cell)))
;; Increment the current cell.
(#\]
(incf (current-cell)))
;; Square the current cell.
(#\;
(setf (current-cell)
(* (current-cell)
(current-cell))))
;; Print the current cell as a number.
(#\.
(console-print-number console (current-cell)))
;; Print the current cell as an ASCII character.
(#\,
(console-print-character console
(code-char (current-cell))))
Reset the current cell to zero .
(#\@
(setf (current-cell) 0))
;; Print the program source code.
(#\!
(setf quine-requested-p T))
;; Clear the console.
(#\~
(console-clear console))
;; Move the cell pointer to the left.
(#\<
(when (plusp pointer)
(decf pointer)))
;; Move the cell pointer to the right.
(#\>
(when (< pointer (1- (length memory)))
(incf pointer)))
;; Tolerate whitespaces and layout elements.
((#\Newline #\Space #\Tab)
NIL)
According to the rules of Deadfish an error incites the
;; printing of a newline character.
(otherwise
(console-print-character console #\Newline))))
(when quine-requested-p
(console-print-string console copy-of-code))))
(the Interpreter interpreter))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; -- Implementation of main operations. -- ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defun interpret-JR (interpreter code)
"Interprets the piece of JR CODE using the INTERPRETER and returns
no value."
(declare (type Interpreter interpreter))
(declare (type string code))
(interpreter-process-commands interpreter code)
(values))
;;; -------------------------------------------------------
(defun execute-JR-shell (interpreter)
"Executes the JR shell using the INTERPRETER and returns no value."
(declare (type Interpreter interpreter))
(loop do
(format T "~&>> ")
(let ((input (read-line)))
(declare (type (or null string) input))
(clear-input)
(unless input
(loop-finish))
(interpreter-process-commands interpreter input)))
(values))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-- Implementation of Deadfish - to - JR converter . -- ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defun get-deadfish-command-for (instruction-set character)
"Returns the ``deadfish-command'' associated in the INSTRUCTION-SET
with the CHARACTER."
(declare (type deadfish-instruction-set instruction-set))
(declare (type character character))
(the deadfish-command
(case instruction-set
(:standard
(case character
(#\i :increment)
(#\d :decrement)
(#\s :square)
(#\o :output)
((#\Newline #\Space #\Tab) :whitespace)
(otherwise :unknown)))
(:XKCD
(case character
(#\x :increment)
(#\d :decrement)
(#\k :square)
(#\c :output)
((#\Newline #\Space #\Tab) :whitespace)
(otherwise :unknown)))
(otherwise
(error "Invalid instruction set: ~s." instruction-set)))))
;;; -------------------------------------------------------
(defun convert-Deadfish-to-JR (deadfish-code
&key (destination T)
(instruction-set :standard)
(output-format :numeric))
"Converts the piece of DEADFISH-CODE, stated using the
INSTRUCTION-SET, to the equivalent JR code, and prints the result to
the DESTINATION."
(declare (type string deadfish-code))
(declare (type destination destination))
(declare (type deadfish-instruction-set instruction-set))
(declare (type output-format output-format))
(if destination
(loop
for token of-type character across deadfish-code
and position of-type fixnum from 0 by 1
do
(case (get-deadfish-command-for instruction-set token)
(:increment
(write-char #\] destination))
(:decrement
(write-char #\[ destination))
(:square
(write-char #\; destination))
(:output
(case output-format
(:numeric (write-char #\. destination))
(:character (write-char #\, destination))
(otherwise (error "Invalid output format: ~s."
output-format))))
(:whitespace
(write-char token destination))
(otherwise
(error "Invalid character in the Deadfish code at ~
position ~d: ~s."
position token))))
(the string
(with-output-to-string (output)
(declare (type string-stream output))
(convert-Deadfish-to-JR deadfish-code
:destination output
:instruction-set instruction-set
:output-format output-format)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; -- Test cases. -- ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Print "Hello, world!".
(interpret-JR
(make-interpreter)
"
]]];[;]]]]]]]],
@]];]]];[[[[[
>]]];];],]]]]]]],,]]],
<,[[[[[[[[[[[[,
>>]]];]];[[,
<,]]],[[[[[[,
@]]];];,
<],
")
;;; -------------------------------------------------------
Quine .
(interpret-JR (make-interpreter) "!")
;;; -------------------------------------------------------
;; Demonstrates the computational peculiarity of the underlying
" Deadfish " heritage , which for the values -1 and 256 simulates an
overflow by setting the accumulator --- or , in the case of JR , the
current cell --- to zero . The Deadfish equivalent comprises :
iissso
;; The output should be:
;; 0
(interpret-JR (make-interpreter) "]];;;.")
;;; -------------------------------------------------------
Execute the JR shell . Please note that such a program does not offer
;; the contingency for interruption, that is, the shell must be closed
;; by manual termination of some kind.
(execute-JR-shell (make-interpreter))
;;; -------------------------------------------------------
Print to the standard output the JR code
;; ]];;;.
(convert-Deadfish-to-JR "iissso")
;;; -------------------------------------------------------
Print to the standard output the JR code
;; ]];;;,
Note that the desinent JR command produced states the character print
operation " , " in lieu of the Deadfish equivalent " . " --- a corollary
;; of specifying the ``:output-format :character'' option.
(convert-Deadfish-to-JR "iissso" :output-format :character)
;;; -------------------------------------------------------
Print to the standard output the JR code
;; ]];;;.
(convert-Deadfish-to-JR "xxkkkc" :instruction-set :XKCD)
;;; -------------------------------------------------------
Executes the equivalent JR code
;; ]];;;.
(interpret-JR (make-interpreter)
(convert-Deadfish-to-JR "iissso" :destination NIL))
| null | https://raw.githubusercontent.com/KavehYousefi/Esoteric-programming-languages/ca9e1f171af964721e09f2c8959cc92e84d83c5a/JR/JR_001/main.lisp | lisp |
Sources:
-> ""
-- Declaration of types. -- ;;
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-- Definition of interface "Console". -- ;;
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-- Implementation of class "Standard-Console". -- ;;
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-------------------------------------------------------
-- Implementation of class "Interpreter". -- ;;
-------------------------------------------------------
-------------------------------------------------------
End of file. => Terminate.
Decrement the current cell.
Increment the current cell.
Square the current cell.
Print the current cell as a number.
Print the current cell as an ASCII character.
Print the program source code.
Clear the console.
Move the cell pointer to the left.
Move the cell pointer to the right.
Tolerate whitespaces and layout elements.
printing of a newline character.
-- Implementation of main operations. -- ;;
-------------------------------------------------------
;
-------------------------------------------------------
destination))
-- Test cases. -- ;;
Print "Hello, world!".
[;]]]]]]]],
]]];[[[[[
];],]]]]]]],,]]],
]];[[,
];,
-------------------------------------------------------
-------------------------------------------------------
Demonstrates the computational peculiarity of the underlying
The output should be:
0
-------------------------------------------------------
the contingency for interruption, that is, the shell must be closed
by manual termination of some kind.
-------------------------------------------------------
]];;;.
-------------------------------------------------------
]];;;,
of specifying the ``:output-format :character'' option.
-------------------------------------------------------
]];;;.
-------------------------------------------------------
]];;;. | Date : 2022 - 01 - 14
- > " "
(deftype destination ()
"The ``destination'' type defines a data sink for output operations,
compatible, for instance, with ``format'' and ``write''."
'(or null (eql T) stream string))
(deftype deadfish-instruction-set ()
"The ``deadfish-instruction-set'' type defines the recognized variants
of the Deadfish instruction set."
'(member :standard :XKCD))
(deftype output-format ()
"The ``output-format'' type defines the options for the printing
commands applicable during the conversion of Deadfish source program
to the more potent JR language.
---
Deadfish restricts its output to the numeric value of its
accumulator, while JR grants the programmer the choice betwixt the
former variant and a character output, the latter of which construes
the current cell value with its ASCII code."
'(member :numeric :character))
(deftype deadfish-command ()
"The ``deadfish-command'' type defines a set of command identifiers
which in an abstract fashion associate with the actual tokens in a
piece of Deadfish code."
'(member :increment :decrement :square :output :whitespace :unknown))
(defclass Console ()
()
(:documentation
"The ``Console'' interface describes a data sink intended for the
output of information from a program to the user side."))
(defgeneric console-print-number (console number)
(:documentation
"Prints the NUMBER to the CONSOLE and returns the modified
CONSOLE."))
(defgeneric console-print-character (console character)
(:documentation
"Prints the CHARACTER to the CONSOLE and returns the modified
CONSOLE."))
(defgeneric console-print-string (console string)
(:documentation
"Prints the STRING's characters in an unquoted form to the CONSOLE
and returns the modified CONSOLE."))
(defgeneric console-clear (console)
(:documentation
"Clears the CONSOLE's content and returns the modified CONSOLE."))
(defclass Standard-Console (Console)
((destination
:initarg :destination
:initform T
:type destination
:documentation "The data sink to write numbers or characters to.")
(clearing-scroll-size
:initarg :clearing-scroll-size
:initform 10
:accessor standard-console-clearing-scroll-size
:type (integer 0 *)
:documentation "The number of newlines to print to the DESTINATION
in order to simulate the clearing of this console."))
(:documentation
"The ``Standard-Console'' class provides an output commodity which
manipulates a traditional Common Lisp destination --- either the
standard output, a stream, or a dynamic string --- in order to
convey information to the user.
---
Being based upon the notion of the output conduit's perpetual
extension, as counterdistinguished to the maintenance of a
random-access data structure, a veridical clearing of the console
exceeds the underlying subtrate's capacities. Instead, a
configurable number of newlines avail as separators, in the best
case transporting the data to conceal, present inside of the
visible window, to a space which no longer provides a venue on the
undesired portion.
---
If the user, for instance, operates upon a terminal of 15 lines
capacity, the ``Standard-Console'' instance should be initialized
anenst its ``clearing-scroll-size'' property to apply 15 newlines
as a means for shifting the current output upwards and outside of
the visible area. Of course, the predicament of adjustable
terminals, or windows, cannot be meliorated by this static console
attribute, and the programmer is encumbered with the onus of
invoking the ``standard-console-clearing-scroll-size'' function if
optating an adjustment to such external influences."))
(defun make-standard-console (&key (destination T)
(clearing-scroll-size 10))
"Creates and returns a ``Standard-Console'' operating on the
DESTINATION as its data sink, and utilizing the CLEARING-SCROLL-SIZE
as the number of newline to simulate its clearing."
(declare (type destination destination))
(declare (type (integer 0 *) clearing-scroll-size))
(the Standard-Console
(make-instance 'Standard-Console
:destination destination
:clearing-scroll-size clearing-scroll-size)))
(defmethod console-print-number ((console Standard-Console)
(number integer))
(declare (type Standard-Console console))
(declare (type integer number))
(format (slot-value console 'destination) "~d" number)
(the Standard-Console console))
(defmethod console-print-character ((console Standard-Console)
(character character))
(declare (type Standard-Console console))
(declare (type character character))
(format (slot-value console 'destination) "~a" character)
(the Standard-Console console))
(defmethod console-print-string ((console Standard-Console)
(string string))
(declare (type Standard-Console console))
(declare (type string string))
(format (slot-value console 'destination) "~a" string)
(the Standard-Console console))
(defmethod console-clear ((console Standard-Console))
(declare (type Standard-Console console))
(format (slot-value console 'destination) "~v%"
(slot-value console 'clearing-scroll-size))
(the Standard-Console console))
(defclass Interpreter ()
((memory
:initarg :memory
:initform (make-array 8
:element-type 'integer
:initial-element 0)
:type (vector integer 8)
:documentation "The 8-cell array storing the JR program's data.")
(pointer
:initarg :pointer
:initform 0
:type (integer 0 7)
:documentation "A pointer into the MEMORY, referencing its currently
active cell.")
(copy-of-code
:initarg :copy-of-code
:initform (make-array 0
:element-type 'character
:adjustable T
:fill-pointer 0)
:type string
:documentation "A buffer to store the processed JR code into, as a
provision in the case of a quine's request.")
(quine-requested-p
:initarg :quine-requested-p
:initform NIL
:type boolean
:documentation "A flag which determines whether, at the end of the
program, the complete processed source code shall be
printed, producing a quine.")
(console
:initarg :console
:initform (make-standard-console)
:type Console
:documentation "The console to airt the output to."))
(:documentation
"The ``Interpreter'' class encapsulates all information requisite
for interpreting a series of JR commands without loss of
information."))
(defun make-interpreter (&key (console (make-standard-console)))
"Creates and returns a new ``Interpreter'' employing the CONSOLE for
its output operations."
(declare (type Console console))
(the Interpreter (make-instance 'Interpreter :console console)))
(defun interpreter-process-commands (interpreter code)
"Interprets the piece of JR CODE using the INTERPRETER and returns no
value."
(declare (type Interpreter interpreter))
(declare (type string code))
(with-slots (memory pointer copy-of-code quine-requested-p console)
interpreter
(declare (type (vector integer 8) memory))
(declare (type (integer 0 7) pointer))
(declare (type string copy-of-code))
(declare (type boolean quine-requested-p))
(declare (type Console console))
(flet
((current-cell ()
"Returns the value stored in the MEMORY cell at the POINTER."
(the integer
(aref memory pointer)))
((setf current-cell) (new-value)
"Sets the value stored in the MEMORY cell at the POINTER, and
returns the NEW-VALUE."
(declare (type integer new-value))
(setf (aref memory pointer)
(cond
((= new-value -1) 0)
((= new-value 256) 0)
(T new-value)))
(the integer (aref memory pointer)))
(memorize-character (character)
"Stores the CHARACTER into the COPY-OF-CODE, necessary for
providing a quine, and returns no value."
(declare (type character character))
(vector-push-extend character copy-of-code)
(values)))
(loop for token of-type character across code do
(memorize-character token)
(case token
((NIL)
(loop-finish))
(#\[
(decf (current-cell)))
(#\]
(incf (current-cell)))
(setf (current-cell)
(* (current-cell)
(current-cell))))
(#\.
(console-print-number console (current-cell)))
(#\,
(console-print-character console
(code-char (current-cell))))
Reset the current cell to zero .
(#\@
(setf (current-cell) 0))
(#\!
(setf quine-requested-p T))
(#\~
(console-clear console))
(#\<
(when (plusp pointer)
(decf pointer)))
(#\>
(when (< pointer (1- (length memory)))
(incf pointer)))
((#\Newline #\Space #\Tab)
NIL)
According to the rules of Deadfish an error incites the
(otherwise
(console-print-character console #\Newline))))
(when quine-requested-p
(console-print-string console copy-of-code))))
(the Interpreter interpreter))
(defun interpret-JR (interpreter code)
"Interprets the piece of JR CODE using the INTERPRETER and returns
no value."
(declare (type Interpreter interpreter))
(declare (type string code))
(interpreter-process-commands interpreter code)
(values))
(defun execute-JR-shell (interpreter)
"Executes the JR shell using the INTERPRETER and returns no value."
(declare (type Interpreter interpreter))
(loop do
(format T "~&>> ")
(let ((input (read-line)))
(declare (type (or null string) input))
(clear-input)
(unless input
(loop-finish))
(interpreter-process-commands interpreter input)))
(values))
(defun get-deadfish-command-for (instruction-set character)
"Returns the ``deadfish-command'' associated in the INSTRUCTION-SET
with the CHARACTER."
(declare (type deadfish-instruction-set instruction-set))
(declare (type character character))
(the deadfish-command
(case instruction-set
(:standard
(case character
(#\i :increment)
(#\d :decrement)
(#\s :square)
(#\o :output)
((#\Newline #\Space #\Tab) :whitespace)
(otherwise :unknown)))
(:XKCD
(case character
(#\x :increment)
(#\d :decrement)
(#\k :square)
(#\c :output)
((#\Newline #\Space #\Tab) :whitespace)
(otherwise :unknown)))
(otherwise
(error "Invalid instruction set: ~s." instruction-set)))))
(defun convert-Deadfish-to-JR (deadfish-code
&key (destination T)
(instruction-set :standard)
(output-format :numeric))
"Converts the piece of DEADFISH-CODE, stated using the
INSTRUCTION-SET, to the equivalent JR code, and prints the result to
the DESTINATION."
(declare (type string deadfish-code))
(declare (type destination destination))
(declare (type deadfish-instruction-set instruction-set))
(declare (type output-format output-format))
(if destination
(loop
for token of-type character across deadfish-code
and position of-type fixnum from 0 by 1
do
(case (get-deadfish-command-for instruction-set token)
(:increment
(write-char #\] destination))
(:decrement
(write-char #\[ destination))
(:square
(:output
(case output-format
(:numeric (write-char #\. destination))
(:character (write-char #\, destination))
(otherwise (error "Invalid output format: ~s."
output-format))))
(:whitespace
(write-char token destination))
(otherwise
(error "Invalid character in the Deadfish code at ~
position ~d: ~s."
position token))))
(the string
(with-output-to-string (output)
(declare (type string-stream output))
(convert-Deadfish-to-JR deadfish-code
:destination output
:instruction-set instruction-set
:output-format output-format)))))
(interpret-JR
(make-interpreter)
"
<,[[[[[[[[[[[[,
<,]]],[[[[[[,
<],
")
Quine .
(interpret-JR (make-interpreter) "!")
" Deadfish " heritage , which for the values -1 and 256 simulates an
overflow by setting the accumulator --- or , in the case of JR , the
current cell --- to zero . The Deadfish equivalent comprises :
iissso
(interpret-JR (make-interpreter) "]];;;.")
Execute the JR shell . Please note that such a program does not offer
(execute-JR-shell (make-interpreter))
Print to the standard output the JR code
(convert-Deadfish-to-JR "iissso")
Print to the standard output the JR code
Note that the desinent JR command produced states the character print
operation " , " in lieu of the Deadfish equivalent " . " --- a corollary
(convert-Deadfish-to-JR "iissso" :output-format :character)
Print to the standard output the JR code
(convert-Deadfish-to-JR "xxkkkc" :instruction-set :XKCD)
Executes the equivalent JR code
(interpret-JR (make-interpreter)
(convert-Deadfish-to-JR "iissso" :destination NIL))
|
dca68e55c0d7c87e9891321f18f60372335f141b8eea77fb58dfed52d543b349 | ocaml-flambda/ocaml-jst | pr10661_ok.ml | (* TEST
* setup-ocamlc.byte-build-env
** ocamlc.byte
*** check-ocamlc.byte-output
*)
module M = struct
class row = object
end
end
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/5bf2820278c58f6715dcfaf6fa61e09a9b0d8db3/testsuite/tests/typing-modules-bugs/pr10661_ok.ml | ocaml | TEST
* setup-ocamlc.byte-build-env
** ocamlc.byte
*** check-ocamlc.byte-output
|
module M = struct
class row = object
end
end
|
c89a9f23512582a53148c169df13ffbcd07254559ba0da1346cfd85be4ca5360 | dmitryvk/sbcl-win32-threads | sysmacs.lisp | ;;;; miscellaneous system hacking macros
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!IMPL")
;;;; these are initialized in cold init
(defvar *in-without-gcing*)
(defvar *gc-inhibit*)
;;; When the dynamic usage increases beyond this amount, the system
;;; notes that a garbage collection needs to occur by setting
;;; *GC-PENDING* to T. It starts out as NIL meaning nobody has figured
;;; out what it should be yet.
(defvar *gc-pending*)
#!+sb-thread
(defvar *stop-for-gc-pending*)
;;; This one is initialized by the runtime, at thread creation. On
;;; non-x86oid gencgc targets, this is a per-thread list of objects
which must not be moved during GC . It is frobbed by the code for
with - pinned - objects in src / compiler / target / macros.lisp .
#!+(and gencgc (not (or x86 x86-64)))
(defvar sb!vm::*pinned-objects*)
(defmacro without-gcing (&body body)
#!+sb-doc
"Executes the forms in the body without doing a garbage collection. It
inhibits both automatically and explicitly triggered collections. Finally,
upon leaving the BODY if gc is not inhibited it runs the pending gc.
Similarly, if gc is triggered in another thread then it waits until gc is
enabled in this thread.
Implies SB-SYS:WITHOUT-INTERRUPTS for BODY, and causes any nested
SB-SYS:WITH-INTERRUPTS to signal a warning during execution of the BODY.
Should be used with great care, and not at all in multithreaded application
code: Any locks that are ever acquired while GC is inhibited need to be always
held with GC inhibited to prevent deadlocks: if T1 holds the lock and is
stopped for GC while T2 is waiting for the lock inside WITHOUT-GCING the
system will be deadlocked. Since SBCL does not currently document its internal
locks, application code can never be certain that this invariant is
maintained."
(with-unique-names (without-gcing-body)
`(dx-flet ((,without-gcing-body ()
,@body))
(if *gc-inhibit*
(,without-gcing-body)
We need to disable interrupts before disabling GC , so
;; that signal handlers using locks don't accidentally try
to grab them with GC inhibited .
(let ((*in-without-gcing* t))
(unwind-protect
(let* ((*allow-with-interrupts* nil)
(*interrupts-enabled* nil)
(*gc-inhibit* t))
(,without-gcing-body))
;; This is not racy becuase maybe_defer_handler
defers signals if * GC - INHIBIT * is NIL but there
;; is a pending gc or stop-for-gc.
(when (or *interrupt-pending*
*gc-pending*
#!+sb-thread *stop-for-gc-pending*)
(sb!unix::receive-pending-interrupt))))))))
EOF - OR - LOSE is a useful macro that handles EOF .
(defmacro eof-or-lose (stream eof-error-p eof-value)
`(if ,eof-error-p
(error 'end-of-file :stream ,stream)
,eof-value))
;;; These macros handle the special cases of T and NIL for input and
;;; output streams.
;;;
;;; FIXME: Shouldn't these be functions instead of macros?
(defmacro in-synonym-of (stream &optional check-type)
(let ((svar (gensym)))
`(let ((,svar ,stream))
(cond ((null ,svar) *standard-input*)
((eq ,svar t) *terminal-io*)
(t ,@(when check-type `((enforce-type ,svar ,check-type))) ;
#!+high-security
(unless (input-stream-p ,svar)
(error 'simple-type-error
:datum ,svar
:expected-type '(satisfies input-stream-p)
:format-control "~S isn't an input stream"
:format-arguments (list ,svar)))
,svar)))))
(defmacro out-synonym-of (stream &optional check-type)
(let ((svar (gensym)))
`(let ((,svar ,stream))
(cond ((null ,svar) *standard-output*)
((eq ,svar t) *terminal-io*)
(t ,@(when check-type `((check-type ,svar ,check-type)))
#!+high-security
(unless (output-stream-p ,svar)
(error 'simple-type-error
:datum ,svar
:expected-type '(satisfies output-stream-p)
:format-control "~S isn't an output stream."
:format-arguments (list ,svar)))
,svar)))))
WITH - mumble - STREAM calls the function in the given SLOT of the
;;; STREAM with the ARGS for ANSI-STREAMs, or the FUNCTION with the
;;; ARGS for FUNDAMENTAL-STREAMs.
(defmacro with-in-stream (stream (slot &rest args) &optional stream-dispatch)
`(let ((stream (in-synonym-of ,stream)))
,(if stream-dispatch
`(if (ansi-stream-p stream)
(funcall (,slot stream) stream ,@args)
,@(when stream-dispatch
`(,(destructuring-bind (function &rest args) stream-dispatch
`(,function stream ,@args)))))
`(funcall (,slot stream) stream ,@args))))
(defmacro with-out-stream/no-synonym (stream (slot &rest args) &optional stream-dispatch)
`(let ((stream ,stream))
,(if stream-dispatch
`(if (ansi-stream-p stream)
(funcall (,slot stream) stream ,@args)
,@(when stream-dispatch
`(,(destructuring-bind (function &rest args) stream-dispatch
`(,function stream ,@args)))))
`(funcall (,slot stream) stream ,@args))))
(defmacro with-out-stream (stream (slot &rest args) &optional stream-dispatch)
`(with-out-stream/no-synonym (out-synonym-of ,stream)
(,slot ,@args) ,stream-dispatch))
;;;; These are hacks to make the reader win.
;;; This macro sets up some local vars for use by the
;;; FAST-READ-CHAR macro within the enclosed lexical scope. The stream
;;; is assumed to be a ANSI-STREAM.
;;;
: Some functions ( e.g. ANSI - STREAM - READ - LINE ) use these variables
;;; directly, instead of indirecting through FAST-READ-CHAR.
(defmacro prepare-for-fast-read-char (stream &body forms)
`(let* ((%frc-stream% ,stream)
(%frc-method% (ansi-stream-in %frc-stream%))
(%frc-buffer% (ansi-stream-cin-buffer %frc-stream%))
(%frc-index% (ansi-stream-in-index %frc-stream%)))
(declare (type index %frc-index%)
(type ansi-stream %frc-stream%))
,@forms))
;;; This macro must be called after one is done with FAST-READ-CHAR
;;; inside its scope to decache the ANSI-STREAM-IN-INDEX.
(defmacro done-with-fast-read-char ()
`(setf (ansi-stream-in-index %frc-stream%) %frc-index%))
;;; a macro with the same calling convention as READ-CHAR, to be used
;;; within the scope of a PREPARE-FOR-FAST-READ-CHAR.
(defmacro fast-read-char (&optional (eof-error-p t) (eof-value ()))
`(cond
((not %frc-buffer%)
(funcall %frc-method% %frc-stream% ,eof-error-p ,eof-value))
((= %frc-index% +ansi-stream-in-buffer-length+)
(multiple-value-bind (eof-p index-or-value)
(fast-read-char-refill %frc-stream% ,eof-error-p ,eof-value)
(if eof-p
index-or-value
(progn
(setq %frc-index% (1+ index-or-value))
(aref %frc-buffer% index-or-value)))))
(t
(prog1 (aref %frc-buffer% %frc-index%)
(incf %frc-index%)))))
;;;; And these for the fasloader...
;;; Just like PREPARE-FOR-FAST-READ-CHAR except that we get the BIN
;;; method. The stream is assumed to be a ANSI-STREAM.
;;;
: It seems weird to have to remember to explicitly call
DONE - WITH - FAST - READ - BYTE at the end of this , given that we 're
;;; already wrapping the stuff inside in a block. Why not rename this
;;; macro to WITH-FAST-READ-BYTE, do the DONE-WITH-FAST-READ-BYTE stuff
;;; automatically at the end of the block, and eliminate
;;; DONE-WITH-FAST-READ-BYTE as a separate entity? (and similarly
for the FAST - READ - CHAR stuff ) -- WHN 19990825
(defmacro prepare-for-fast-read-byte (stream &body forms)
`(let* ((%frc-stream% ,stream)
(%frc-method% (ansi-stream-bin %frc-stream%))
(%frc-buffer% (ansi-stream-in-buffer %frc-stream%))
(%frc-index% (ansi-stream-in-index %frc-stream%)))
(declare (type index %frc-index%)
(type ansi-stream %frc-stream%))
,@forms))
;;; Similar to fast-read-char, but we use a different refill routine & don't
;;; convert to characters. If ANY-TYPE is true, then this can be used on any
;;; integer streams, and we don't assert the result type.
(defmacro fast-read-byte (&optional (eof-error-p t) (eof-value ()) any-type)
: should use ONCE - ONLY on EOF - ERROR - P and EOF - VALUE -- WHN 19990825
`(truly-the
,(if (and (eq eof-error-p t) (not any-type)) '(unsigned-byte 8) t)
(cond
((not %frc-buffer%)
(funcall %frc-method% %frc-stream% ,eof-error-p ,eof-value))
((= %frc-index% +ansi-stream-in-buffer-length+)
(prog1 (fast-read-byte-refill %frc-stream% ,eof-error-p ,eof-value)
(setq %frc-index% (ansi-stream-in-index %frc-stream%))))
(t
(prog1 (aref %frc-buffer% %frc-index%)
(incf %frc-index%))))))
(defmacro done-with-fast-read-byte ()
`(done-with-fast-read-char))
| null | https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/code/sysmacs.lisp | lisp | miscellaneous system hacking macros
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
these are initialized in cold init
When the dynamic usage increases beyond this amount, the system
notes that a garbage collection needs to occur by setting
*GC-PENDING* to T. It starts out as NIL meaning nobody has figured
out what it should be yet.
This one is initialized by the runtime, at thread creation. On
non-x86oid gencgc targets, this is a per-thread list of objects
that signal handlers using locks don't accidentally try
This is not racy becuase maybe_defer_handler
is a pending gc or stop-for-gc.
These macros handle the special cases of T and NIL for input and
output streams.
FIXME: Shouldn't these be functions instead of macros?
STREAM with the ARGS for ANSI-STREAMs, or the FUNCTION with the
ARGS for FUNDAMENTAL-STREAMs.
These are hacks to make the reader win.
This macro sets up some local vars for use by the
FAST-READ-CHAR macro within the enclosed lexical scope. The stream
is assumed to be a ANSI-STREAM.
directly, instead of indirecting through FAST-READ-CHAR.
This macro must be called after one is done with FAST-READ-CHAR
inside its scope to decache the ANSI-STREAM-IN-INDEX.
a macro with the same calling convention as READ-CHAR, to be used
within the scope of a PREPARE-FOR-FAST-READ-CHAR.
And these for the fasloader...
Just like PREPARE-FOR-FAST-READ-CHAR except that we get the BIN
method. The stream is assumed to be a ANSI-STREAM.
already wrapping the stuff inside in a block. Why not rename this
macro to WITH-FAST-READ-BYTE, do the DONE-WITH-FAST-READ-BYTE stuff
automatically at the end of the block, and eliminate
DONE-WITH-FAST-READ-BYTE as a separate entity? (and similarly
Similar to fast-read-char, but we use a different refill routine & don't
convert to characters. If ANY-TYPE is true, then this can be used on any
integer streams, and we don't assert the result type. |
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!IMPL")
(defvar *in-without-gcing*)
(defvar *gc-inhibit*)
(defvar *gc-pending*)
#!+sb-thread
(defvar *stop-for-gc-pending*)
which must not be moved during GC . It is frobbed by the code for
with - pinned - objects in src / compiler / target / macros.lisp .
#!+(and gencgc (not (or x86 x86-64)))
(defvar sb!vm::*pinned-objects*)
(defmacro without-gcing (&body body)
#!+sb-doc
"Executes the forms in the body without doing a garbage collection. It
inhibits both automatically and explicitly triggered collections. Finally,
upon leaving the BODY if gc is not inhibited it runs the pending gc.
Similarly, if gc is triggered in another thread then it waits until gc is
enabled in this thread.
Implies SB-SYS:WITHOUT-INTERRUPTS for BODY, and causes any nested
SB-SYS:WITH-INTERRUPTS to signal a warning during execution of the BODY.
Should be used with great care, and not at all in multithreaded application
code: Any locks that are ever acquired while GC is inhibited need to be always
held with GC inhibited to prevent deadlocks: if T1 holds the lock and is
stopped for GC while T2 is waiting for the lock inside WITHOUT-GCING the
system will be deadlocked. Since SBCL does not currently document its internal
locks, application code can never be certain that this invariant is
maintained."
(with-unique-names (without-gcing-body)
`(dx-flet ((,without-gcing-body ()
,@body))
(if *gc-inhibit*
(,without-gcing-body)
We need to disable interrupts before disabling GC , so
to grab them with GC inhibited .
(let ((*in-without-gcing* t))
(unwind-protect
(let* ((*allow-with-interrupts* nil)
(*interrupts-enabled* nil)
(*gc-inhibit* t))
(,without-gcing-body))
defers signals if * GC - INHIBIT * is NIL but there
(when (or *interrupt-pending*
*gc-pending*
#!+sb-thread *stop-for-gc-pending*)
(sb!unix::receive-pending-interrupt))))))))
EOF - OR - LOSE is a useful macro that handles EOF .
(defmacro eof-or-lose (stream eof-error-p eof-value)
`(if ,eof-error-p
(error 'end-of-file :stream ,stream)
,eof-value))
(defmacro in-synonym-of (stream &optional check-type)
(let ((svar (gensym)))
`(let ((,svar ,stream))
(cond ((null ,svar) *standard-input*)
((eq ,svar t) *terminal-io*)
#!+high-security
(unless (input-stream-p ,svar)
(error 'simple-type-error
:datum ,svar
:expected-type '(satisfies input-stream-p)
:format-control "~S isn't an input stream"
:format-arguments (list ,svar)))
,svar)))))
(defmacro out-synonym-of (stream &optional check-type)
(let ((svar (gensym)))
`(let ((,svar ,stream))
(cond ((null ,svar) *standard-output*)
((eq ,svar t) *terminal-io*)
(t ,@(when check-type `((check-type ,svar ,check-type)))
#!+high-security
(unless (output-stream-p ,svar)
(error 'simple-type-error
:datum ,svar
:expected-type '(satisfies output-stream-p)
:format-control "~S isn't an output stream."
:format-arguments (list ,svar)))
,svar)))))
WITH - mumble - STREAM calls the function in the given SLOT of the
(defmacro with-in-stream (stream (slot &rest args) &optional stream-dispatch)
`(let ((stream (in-synonym-of ,stream)))
,(if stream-dispatch
`(if (ansi-stream-p stream)
(funcall (,slot stream) stream ,@args)
,@(when stream-dispatch
`(,(destructuring-bind (function &rest args) stream-dispatch
`(,function stream ,@args)))))
`(funcall (,slot stream) stream ,@args))))
(defmacro with-out-stream/no-synonym (stream (slot &rest args) &optional stream-dispatch)
`(let ((stream ,stream))
,(if stream-dispatch
`(if (ansi-stream-p stream)
(funcall (,slot stream) stream ,@args)
,@(when stream-dispatch
`(,(destructuring-bind (function &rest args) stream-dispatch
`(,function stream ,@args)))))
`(funcall (,slot stream) stream ,@args))))
(defmacro with-out-stream (stream (slot &rest args) &optional stream-dispatch)
`(with-out-stream/no-synonym (out-synonym-of ,stream)
(,slot ,@args) ,stream-dispatch))
: Some functions ( e.g. ANSI - STREAM - READ - LINE ) use these variables
(defmacro prepare-for-fast-read-char (stream &body forms)
`(let* ((%frc-stream% ,stream)
(%frc-method% (ansi-stream-in %frc-stream%))
(%frc-buffer% (ansi-stream-cin-buffer %frc-stream%))
(%frc-index% (ansi-stream-in-index %frc-stream%)))
(declare (type index %frc-index%)
(type ansi-stream %frc-stream%))
,@forms))
(defmacro done-with-fast-read-char ()
`(setf (ansi-stream-in-index %frc-stream%) %frc-index%))
(defmacro fast-read-char (&optional (eof-error-p t) (eof-value ()))
`(cond
((not %frc-buffer%)
(funcall %frc-method% %frc-stream% ,eof-error-p ,eof-value))
((= %frc-index% +ansi-stream-in-buffer-length+)
(multiple-value-bind (eof-p index-or-value)
(fast-read-char-refill %frc-stream% ,eof-error-p ,eof-value)
(if eof-p
index-or-value
(progn
(setq %frc-index% (1+ index-or-value))
(aref %frc-buffer% index-or-value)))))
(t
(prog1 (aref %frc-buffer% %frc-index%)
(incf %frc-index%)))))
: It seems weird to have to remember to explicitly call
DONE - WITH - FAST - READ - BYTE at the end of this , given that we 're
for the FAST - READ - CHAR stuff ) -- WHN 19990825
(defmacro prepare-for-fast-read-byte (stream &body forms)
`(let* ((%frc-stream% ,stream)
(%frc-method% (ansi-stream-bin %frc-stream%))
(%frc-buffer% (ansi-stream-in-buffer %frc-stream%))
(%frc-index% (ansi-stream-in-index %frc-stream%)))
(declare (type index %frc-index%)
(type ansi-stream %frc-stream%))
,@forms))
(defmacro fast-read-byte (&optional (eof-error-p t) (eof-value ()) any-type)
: should use ONCE - ONLY on EOF - ERROR - P and EOF - VALUE -- WHN 19990825
`(truly-the
,(if (and (eq eof-error-p t) (not any-type)) '(unsigned-byte 8) t)
(cond
((not %frc-buffer%)
(funcall %frc-method% %frc-stream% ,eof-error-p ,eof-value))
((= %frc-index% +ansi-stream-in-buffer-length+)
(prog1 (fast-read-byte-refill %frc-stream% ,eof-error-p ,eof-value)
(setq %frc-index% (ansi-stream-in-index %frc-stream%))))
(t
(prog1 (aref %frc-buffer% %frc-index%)
(incf %frc-index%))))))
(defmacro done-with-fast-read-byte ()
`(done-with-fast-read-char))
|
18460c628327ac2af025ec191289fb4b5f5f2d72b2b5aef6ff4009a94d9d0293 | nasa/Common-Metadata-Repository | service.clj | (ns cmr.umm-spec.test.migration.version.service
(:require
[cheshire.core :refer [decode]]
[clojure.java.io :as io]
[clojure.test :refer :all]
[clojure.test.check.generators :as gen]
[cmr.common.mime-types :as mt]
[cmr.common.test.test-check-ext :as ext :refer [defspec]]
[cmr.common.util :refer [are3]]
[cmr.umm-spec.migration.version.core :as vm]
[cmr.umm-spec.migration.version.service :as service]
[cmr.umm-spec.test.location-keywords-helper :as lkt]
[cmr.umm-spec.test.umm-generators :as umm-gen]
[cmr.umm-spec.umm-spec-core :as core]
[cmr.umm-spec.util :as u]
[cmr.umm-spec.versioning :as v]
[com.gfredericks.test.chuck.clojure-test :refer [for-all]]))
(def service-concept-1-0
{:RelatedURL {:URLContentType "CollectionURL"
:Description "OPeNDAP Service"
:Type "GET SERVICE"
:URL "/"},
:Coverage {:Type "SPATIAL_POINT"
:CoverageSpatialExtent {:Type "SPATIAL_POINT"}}
:AccessConstraints [(apply str (repeat 1024 "x"))]
:UseConstraints [(apply str (repeat 1024 "x"))]
:ServiceQuality {:QualityFlag "Available"
:Lineage (apply str (repeat 100 "x"))}})
(def service-concept-1-1
{:Coverage {:CoverageSpatialExtent {:CoverageSpatialExtentTypeType "SPATIAL_POINT"}}
:AccessConstraints "TEST"
:UseConstraints "TEST"
:ServiceOrganizations [{:Roles ["SERVICE PROVIDER"]
:ShortName "TEST ShortName"}]
:RelatedURLs [{:URLContentType "CollectionURL"
:Description "OPeNDAP Service"
:Type "GET SERVICE"
:URL "/"}]})
(deftest test-version-steps
(with-bindings {#'cmr.umm-spec.versioning/versions {:service ["1.0" "1.1"]}}
(is (= [] (#'vm/version-steps :service "1.1" "1.1")))
(is (= [["1.0" "1.1"]] (#'vm/version-steps :service "1.0" "1.1")))
(is (= [["1.1" "1.0"]] (#'vm/version-steps :service "1.1" "1.0")))))
(defspec all-migrations-produce-valid-umm-spec 100
(for-all [umm-record (gen/no-shrink umm-gen/umm-var-generator)
dest-version (gen/elements (v/versions :service))]
(let [dest-media-type (str mt/umm-json "; version=" dest-version)
metadata (core/generate-metadata (lkt/setup-context-for-test)
umm-record dest-media-type)]
(empty? (core/validate-metadata :service dest-media-type metadata)))))
(deftest migrate-1_0-up-to-1_1
(is (= service-concept-1-1
(vm/migrate-umm {} :service "1.0" "1.1"
{:Coverage {:Type "SPATIAL_POINT"}
:AccessConstraints ["TEST"]
:UseConstraints ["TEST"]
:ServiceOrganizations [{:Roles ["SERVICE PROVIDER"]
:ShortName "TEST ShortName"
:Uuid "TEST Uuid"}]
:RelatedURL {:URL "/" :Description "OPeNDAP Service"
:Type "GET SERVICE"
:URLContentType "CollectionURL"}}))))
(deftest migrate-1_1-down-to-1_0
(is (= service-concept-1-0
(vm/migrate-umm {} :service "1.1" "1.0"
{:RelatedURLs [{:URL "/" :Description "OPeNDAP Service"
:Type "GET SERVICE"
:URLContentType "CollectionURL"}]
:AccessConstraints (apply str (repeat 4000 "x"))
:UseConstraints (apply str (repeat 20000 "x"))
:ServiceQuality {:QualityFlag "Available"
:Lineage (apply str (repeat 4000 "x"))}
:Coverage {:CoverageSpatialExtent {:CoverageSpatialExtentTypeType
"SPATIAL_POINT"}}}))))
(deftest migrate-service-options-1_1-up-to-1_2
(is (= {:Type "OPeNDAP"
:LongName "long name"
:ServiceOptions {:SubsetTypes [ "Spatial", "Variable"]
:SupportedInputProjections [{:ProjectionName "Geographic"}]
:SupportedOutputProjections [{:ProjectionName "Geographic"}]
:SupportedInputFormats ["BINARY" "HDF4" "NETCDF-3" "HDF-EOS2"]
:SupportedOutputFormats ["BINARY" "HDF4" "NETCDF-3" "HDF-EOS2"]}
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}
(vm/migrate-umm
{} :service "1.1" "1.2"
{:Type "OPeNDAP"
:LongName "long name"
:ServiceOptions {:SubsetTypes [ "Spatial" "Variable"]
:SupportedProjections [ "Geographic"]
:SupportedFormats ["Binary" "HDF4" "netCDF-3" "HDF-EOS4"]}
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}))))
(deftest migrate-service-options-1_2-down-to-1_1
(is (= {:Type "OPeNDAP"
:LongName "long name"
:ServiceOptions {:SubsetTypes [ "Spatial" "Variable"]
:SupportedProjections [ "Geographic"]
:SupportedFormats ["Binary" "HDF4" "HDF-EOS4" "HDF-EOS5"]}
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}
(vm/migrate-umm
{} :service "1.2" "1.1"
{:Type "OPeNDAP"
:LongName "long name"
:ServiceOptions {:SubsetTypes [ "Spatial", "Variable"]
:SupportedInputProjections [{:ProjectionName "Geographic"}]
:SupportedOutputProjections [{:ProjectionName "Geographic"}]
:SupportedInputFormats ["BINARY" "HDF4" "HDF-EOS2" "HDF-EOS" "KML"]
:SupportedOutputFormats ["BINARY" "HDF4" "NETCDF-3" "HDF-EOS4"]}
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}))))
(deftest migrate-contact-groups-1_1-up-to-1_2
(is (= {:Type "OPeNDAP"
:LongName "long name"
:ContactGroups [{:Roles [ "INVESTIGATOR"]
:GroupName "I TEAM"}]
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"
:ContactGroups [{:Roles [ "DEVELOPER"]
:GroupName "D TEAM"}]}]}
(vm/migrate-umm
{} :service "1.1" "1.2"
{:Type "OPeNDAP"
:LongName "long name"
:ContactGroups [{:Roles [ "INVESTIGATOR"]
:Uuid "74a1f32f-ca06-489b-bd61-4ce85872df9c"
:NonServiceOrganizationAffiliation "MSFC"
:GroupName "I TEAM"}]
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"
:ContactGroups [{:Roles [ "DEVELOPER"]
:Uuid "86a1f32f-ca06-489b-bd61-4ce85872df08"
:NonServiceOrganizationAffiliation "GSFC"
:GroupName "D TEAM"}]}]}))))
(deftest migrate-contact-groups-1_2-down-to-1_1
(is (= {:Type "OPeNDAP"
:LongName "long name"
:ContactGroups [{:Roles [ "INVESTIGATOR"]
:GroupName "I TEAM"}]
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"
:ContactGroups [{:Roles [ "DEVELOPER"]
:GroupName "D TEAM"}]}]}
(vm/migrate-umm
{} :service "1.2" "1.1"
{:Type "OPeNDAP"
:LongName "long name"
:ContactGroups [{:Roles [ "INVESTIGATOR"]
:GroupName "I TEAM"}]
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"
:ContactGroups [{:Roles [ "DEVELOPER"]
:GroupName "D TEAM"}]}]}))))
(deftest migrate-main-fields-1_1-up-to-1_2
(is (= {:Type "OPeNDAP"
:LongName "long name"
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}
(vm/migrate-umm
{} :service "1.1" "1.2"
{:Type "OPeNDAP"
:LongName "long name"
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]
:OnlineAccessURLPatternMatch "abc*"
:OnlineAccessURLPatternSubstitution "dummy_pattern"
:Coverage {:Name "dummy"}}))))
(deftest migrate-main-fields-1_2-down-to-1_1
(is (= {:Type "WEB SERVICES"
:LongName (apply str (repeat 120 "x"))
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}
(vm/migrate-umm
{} :service "1.2" "1.1"
{:Type "ESI"
:LongName (apply str (repeat 200 "x"))
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]
:OperationMetadata []}))))
(deftest create-main-url-for-v1-3-test
"Test the create-main-url-for-1_3 function"
(are3 [expected-result related-urls]
(is (= expected-result
(service/create-main-url-for-1_3 related-urls)))
"Replace the RelatedURLs with the first DistributionURL."
{:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URLValue "/"}
{:RelatedURLs
[{:Description "User Guide"
:URLContentType "PublicationURL"
:Type "VIEW RELATED INFORMATION"
:Subtype "USER'S GUIDE"
:URL ""}
{:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URL "/"}
{:Description "User Guide"
:URLContentType "PublicationURL"
:Type "VIEW RELATED INFORMATION"
:Subtype "USER'S GUIDE"
:URL ""}]}
"Since DistributionURL doesn't exist nil is returned."
nil
{:RelatedURLs
[{:Description "User Guide"
:URLContentType "PublicationURL"
:Type "VIEW RELATED INFORMATION"
:Subtype "USER'S GUIDE"
:URL ""}
{:Description "User Guide"
:URLContentType "PublicationURL"
:Type "VIEW RELATED INFORMATION"
:Subtype "USER'S GUIDE"
:URL ""}]}))
(deftest create-main-url-for-v1-2-test
"Test the create-main-url-for-1_2 function"
(are3 [expected-result url]
(is (= expected-result
(service/create-main-related-urls-for-1_2 url)))
"Replace the URL sub element with those from RelatedURL."
[{:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URL "/"}]
{:URL {:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URLValue "/"}}
"Since there are no RelatedURLs none should come back."
nil
nil))
(def remove-get-data-service-1-2->1-3-test-input
{:Roles ["SCIENCE CONTACT"]
:ContactInformation {:RelatedUrls [{:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URL "/"
:GetData {:Format "ascii"
:MimeType "application/xml"
:Size 10
:Unit "MB"
:Fees "$0.01"}}]
:ContactMechanisms [{:Type "Email"
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone" :Value "301-614-9999"}]
:Addresses [{:StreetAddresses ["Goddard Earth Sciences Data and Information Systems" "Attn: User" "NASA Goddard Space Flight Center" "Code 610.2"]
:City "Greenbelt"
:StateProvince "MD"
:Country "USA"
:PostalCode "20771"}]}
:GroupName "Main Level Group Name 1"})
(def remove-get-data-service-1-2->1-3-test-expected
{:Roles ["SCIENCE CONTACT"]
:ContactInformation {:RelatedUrls [{:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URL "/"}]
:ContactMechanisms [{:Type "Email"
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone" :Value "301-614-9999"}]
:Addresses [{:StreetAddresses ["Goddard Earth Sciences Data and Information Systems" "Attn: User" "NASA Goddard Space Flight Center" "Code 610.2"]
:City "Greenbelt"
:StateProvince "MD"
:Country "USA"
:PostalCode "20771"}]}
:GroupName "Main Level Group Name 1"})
(deftest remove-get-data-service-1-2->1-3-test
"Test the remove-get-data-service-1-2->1-3 function"
(are3 [expected-result contact]
(is (= expected-result
(service/remove-get-data-service-1-2->1-3 contact)))
"Remove the GetService from the ContactGroups RelatedUrls element."
remove-get-data-service-1-2->1-3-test-expected
remove-get-data-service-1-2->1-3-test-input))
(def service-org-contact-groups-v2
'({:Roles ["SCIENCE CONTACT"],
:ContactInformation
{:RelatedUrls
({:Description "OPeNDAP Service for AIRX3STD.006",
:URLContentType "DistributionURL",
:Type "GET SERVICE",
:Subtype "OPENDAP DATA",
:URL
"/"}),
:ContactMechanisms
[{:Type "Email",
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone", :Value "301-614-9999"}],
:Addresses
[{:StreetAddresses
["Goddard Earth Sciences Data and Information Systems, Attn: User , NASA Goddard Space Flight Center, Code 610.2"],
:City "Greenbelt",
:StateProvince "MD",
:Country "USA",
:PostalCode "20771"}]},
:GroupName "Service Org Group Name"}
{:Roles ["TECHNICAL CONTACT"],
:ContactInformation
{:ContactMechanisms
[{:Type "Email",
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone", :Value "301-614-9999"}],
:Addresses
[{:StreetAddresses
["Goddard Earth Sciences Data and Information Systems, Attn: User , NASA Goddard Space Flight Center, Code 610.2"],
:City "Greenbelt",
:StateProvince "MD",
:Country "USA",
:PostalCode "20771"}]},
:GroupName "Service Org Group Name"}
{:Roles ["SCIENCE CONTACT"],
:ContactInformation
{:ContactMechanisms
[{:Type "Email",
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone", :Value "301-614-9999"}],
:Addresses
[{:StreetAddresses
["Goddard Earth Sciences Data and Information Systems, Attn: User , NASA Goddard Space Flight Center, Code 610.2"],
:City "Greenbelt",
:StateProvince "MD",
:Country "USA",
:PostalCode "20771"}]},
:GroupName "Service Org 2 Group Name 1"}))
(def service-org-contact-persons-v2
'({:Roles ["SERVICE PROVIDER"],
:ContactInformation
{:RelatedUrls
({:Description "OPeNDAP Service for AIRX3STD.006",
:URLContentType "DistributionURL",
:Type "GET SERVICE",
:Subtype "OPENDAP DATA",
:URL
"/"}),
:ContactMechanisms
[{:Type "Email",
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone", :Value "301-614-9999"}],
:Addresses
[{:StreetAddresses
["Goddard Earth Sciences Data and Information Systems, Attn: User , NASA Goddard Space Flight Center, Code 610.2"],
:City "Greenbelt",
:StateProvince "MD",
:Country "USA",
:PostalCode "20771"}]},
:FirstName "FirstName Service Org",
:MiddleName "Service Org MiddleName",
:LastName "LastName Service Org"}))
(deftest update-service-organization-1-2->1-3-test
"Test the update-service-organization-1_2->1_3 function"
(let [s1-2 (decode
(slurp (io/file (io/resource "example-data/umm-json/service/v1.2/Service_v1.2->v1.3.json")))
true)
s1-3 (decode
(slurp (io/file (io/resource "example-data/umm-json/service/v1.3/Service_v1.3-from-v1.2.json")))
true)
serv-orgs [{:Roles ["SERVICE PROVIDER"],
:ShortName "NASA/GESDISC",
:LongName "GES DISC SERVICE HELP DESK SUPPORT GROUP"}
{:Roles ["SERVICE PROVIDER"],
:ShortName "NASA/GESDISC-2",
:LongName "GES DISC SERVICE HELP DESK SUPPORT GROUP 2"}]]
(are3 [expected-result test-record]
(let [actual-result (service/update-service-organization-1_2->1_3 test-record)]
(is (= (:ServiceOrganizations expected-result)
(:ServiceOrganizations actual-result)))
(is (= (:ContactGroups expected-result)
(:ContactGroups actual-result)))
(is (= (:ContactPersons expected-result)
(:ContactPersons actual-result))))
"Move the ServiceOrganizations ContactGroups and ContactPersons to the main level ContactGroups
and ContactPersons.
The input contains 2 ServiceOrganizations. The first ServiceOrganization contains 2 contact
groups and 1 contact persons. The second has 1 contact group and no contact persons. The main
level contact groups contains 2 groups and the main level contact persons contains 1 contact
person.
In the output there are 2 ServiceOrganizations with no contact information in them. The main
level contact groups contains 5 contact groups and the main level contact persons contains 2."
s1-3
s1-2
"Tests when ServiceOrganizations do not have any contacts and there no Contact Groups or Persons."
(-> s1-3
(assoc :ServiceOrganizations serv-orgs)
(dissoc :ContactGroups)
(dissoc :ContactPersons))
(-> s1-2
(assoc :ServiceOrganizations serv-orgs)
(dissoc :ContactGroups)
(dissoc :ContactPersons))
"Tests when no main level contact persons or groups exist"
(-> s1-3
(assoc :ContactGroups service-org-contact-groups-v2)
(assoc :ContactPersons service-org-contact-persons-v2))
(-> s1-2
(dissoc :ContactGroups)
(dissoc :ContactPersons)))))
(deftest create-online-resource-test
"Test the create-online-resource function."
(are3 [expected-result serv-orgs]
(is (= expected-result
(service/create-online-resource serv-orgs)))
"Test getting the first ContactInformation RelatedURLs where the URLContentType is DataCenterURL.
The output is an OnlineResource structure."
{:Linkage ""
:Description "A description"
:Name "HOME PAGE"}
{:ContactInformation {:ServiceHours "1-4"
:RelatedUrls [{:URLContentType "CollectionURL"
:Type "PROJECT HOME PAGE"
:URL ""}
{:URLContentType "DataCenterURL"
:Type "HOME PAGE"
:URL ""
:Description "A description"}]
:ContactInstruction "instructions"}}
"Tests When ContactInformation don't exist."
nil
nil
"Tests when I don't have any RelatedURLs."
nil
{:ContactInformation {:ServiceHours "1-4",
:ContactInstruction "instructions"}}))
(deftest update-service-organization-1-3->1-2-test
"Test the update-service-organization-1_3->1_2 function"
(let [s1-2 (decode
(slurp (io/file (io/resource "example-data/umm-json/service/v1.2/Service_v1.2-from-v1.3.json")))
true)
s1-3 (decode
(slurp (io/file (io/resource "example-data/umm-json/service/v1.3/Service_v1.3->v1.2.json")))
true)]
(are3 [expected-result test-record]
(let [actual-result (service/update-service-organization-1_3->1_2 test-record)]
(is (= (:ServiceOrganizations expected-result)
actual-result)))
"Add the version 1.3 OnlineResource to ContactInformation RelatedUrls. Remove OnlineResource."
s1-2
s1-3)))
(deftest update-service-type-1-3->1-2-test
"Test the updated-service-type-1_3->1_2 function"
(are3 [expected-result test-record]
(is (= expected-result
(service/update-service-type-1_3->1_2 test-record)))
"Test that WMTS gets translated to WMTS"
{:Type "WMS"}
{:Type "WMTS"}
"Test that EGI - No Processing is translated to WEB SERVICES"
{:Type "WEB SERVICES"}
{:Type "EGI - No Processing"}
"Testing that other values pass through"
{:Type "ECHO ORDERS"}
{:Type "ECHO ORDERS"}))
(defn- load-service-file
"Load a test data file for services"
[version-file]
(decode (->> version-file
(format "example-data/umm-json/service/%s")
io/resource
io/file
slurp)
true))
(deftest migrations-up-and-down
""
(are3
[source-version source-file destination-version destination-file]
(let [expected (load-service-file destination-file)
source (load-service-file source-file)
actual (vm/migrate-umm {} :service source-version destination-version source)]
(is (= expected actual)))
---- 1.3 tests ----
"Test the full migration of UMM-S from version 1.2 to version 1.3 using predefined example files."
"1.2" "v1.2/Service_v1.2->v1.3.json"
"1.3" "v1.3/Service_v1.3-from-v1.2.json"
"Test the full migration of UMM-S from version 1.3 to version 1.2 using predefined example files."
"1.3" "v1.3/Service_v1.3->v1.2.json"
"1.2" "v1.2/Service_v1.2-from-v1.3.json"
;; ---- 1.3.1 tests ----
"Test the full migration of UMM-S from version 1.3 to version 1.3.1 using predefined example files."
"1.3" "v1.3/Service_v1.3-to-v1.3.1.json"
"1.3.1" "v1.3.1/Service_v1.3.1-from-v1.3.json"
"Test the full migration of UMM-S from version 1.3.1 to version 1.3 using predefined example files."
"1.3.1" "v1.3.1/Service_v1.3.1-to-v1.3.json"
"1.3" "v1.3/Service_v1.3-from-v1.3.1.json"
---- 1.3.2 tests ----
"Test the full migration of UMM-S from version 1.3.1 to version 1.3.2 using predefined example files."
"1.3.1" "v1.3.1/Service_v1.3.1-to-v1.3.2.json"
"1.3.2" "v1.3.2/Service_v1.3.2-from-v1.3.1.json"
"Test the full migration of UMM-S from version 1.3.2 to version 1.3.1 using predefined example files."
"1.3.2" "v1.3.2/Service_v1.3.2-to-v1.3.1.json"
"1.3.1" "v1.3.1/Service_v1.3.1-from-v1.3.2.json"
;; ---- a 1.3.3 test ----
"Test the full migration of UMM-S from version 1.3.3 to version 1.3.2 using predefined example files."
"1.3.3" "v1.3.3/Service_v1.3.3-to-v1.3.2.json"
"1.3.2" "v1.3.2/Service_v1.3.2-from-v1.3.3.json"
---- 1.3.4 tests ----
"Test the full migration of UMM-S from version 1.3.3 to version 1.3.4 using predefined example files."
"1.3.3" "v1.3.3/Service_v1.3.3-to-v1.3.4.json"
"1.3.4" "v1.3.4/Service_v1.3.4-from-v1.3.3.json"
"Test the full migration of UMM-S from version 1.3.4 to version 1.3.3 using predefined example files."
"1.3.4" "v1.3.4/Service_v1.3.4-to-v1.3.3.json"
"1.3.3" "v1.3.3/Service_v1.3.3-from-v1.3.4.json"
---- 1.4 tests ----
"Migrating down from 1.4 to 1.3.4"
"1.4" "v1.4/Service_v1.4-to-v1.3.4.json"
"1.3.4" "v1.3.4/Service_v1.3.4-from-v1.3.3.json"
"Migration up from 1.3.4 to 1.4"
"1.3.4" "v1.3.4/Service_v1.3.4-from-v1.3.3.json"
"1.4" "v1.4/Service_v1.4-from-v1.3.4.json"
---- 1.4.1 tests ----
"Migrating down from 1.4.1 to 1.4"
"1.4.1" "v1.4.1/Service_v1.4.1.json"
"1.4" "v1.4.1/Service_v1.4.1-to-v1.4.json"
"Migrating up from 1.4 to 1.4.1"
"1.4" "v1.4.1/Service_v1.4.json"
"1.4.1" "v1.4.1/Service_v1.4-to-v1.4.1.json"
---- 1.5.0 tests ----
"Migrating down from 1.5.0 to 1.4.1"
"1.5.0" "v1.5.0/Service_v1.5.0.json"
"1.4.1" "v1.5.0/Service_v1.4.1.json"
"Migrating up from 1.4.1 to 1.5.0"
"1.4.1" "v1.5.0/Service_v1.4.1.json"
"1.5.0" "v1.5.0/Service_v1.4.1-to-v1.5.0.json"
---- 1.5.1 tests ----
"Migrating down from 1.5.1 to 1.5.0"
"1.5.1" "v1.5.1/Service_v1.5.1.json"
"1.5.0" "v1.5.1/Service_v1.5.0.json"
"Migrating down from 1.5.1 to 1.5.0 with multiple interpolation values."
"1.5.1" "v1.5.1/Service_v1.5.1_interp.json"
"1.5.0" "v1.5.1/Service_v1.5.0_interp.json"
"Migrating up from 1.5.0 to 1.5.1"
"1.5.0" "v1.5.1/Service_v1.5.0.json"
"1.5.1" "v1.5.1/Service_v1.5.0-to-v1.5.1.json"))
(comment
(core/validate-metadata
:service "application/vnd.nasa.cmr.umm+json; version=1.2"
(slurp (io/file (io/resource "example-data/umm-json/service/v1.2/S1200245793-EDF_OPS_v1.2.json")))))
; (slurp (io/file (io/resource "example-data/umm-json/service/v1.2/S10000000-TEST_ORNL_WCS_v1.2.json"))))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/1606a1949aad31833d37718d25dce3190be8a9fe/umm-spec-lib/test/cmr/umm_spec/test/migration/version/service.clj | clojure | ---- 1.3.1 tests ----
---- a 1.3.3 test ----
(slurp (io/file (io/resource "example-data/umm-json/service/v1.2/S10000000-TEST_ORNL_WCS_v1.2.json")))) | (ns cmr.umm-spec.test.migration.version.service
(:require
[cheshire.core :refer [decode]]
[clojure.java.io :as io]
[clojure.test :refer :all]
[clojure.test.check.generators :as gen]
[cmr.common.mime-types :as mt]
[cmr.common.test.test-check-ext :as ext :refer [defspec]]
[cmr.common.util :refer [are3]]
[cmr.umm-spec.migration.version.core :as vm]
[cmr.umm-spec.migration.version.service :as service]
[cmr.umm-spec.test.location-keywords-helper :as lkt]
[cmr.umm-spec.test.umm-generators :as umm-gen]
[cmr.umm-spec.umm-spec-core :as core]
[cmr.umm-spec.util :as u]
[cmr.umm-spec.versioning :as v]
[com.gfredericks.test.chuck.clojure-test :refer [for-all]]))
(def service-concept-1-0
{:RelatedURL {:URLContentType "CollectionURL"
:Description "OPeNDAP Service"
:Type "GET SERVICE"
:URL "/"},
:Coverage {:Type "SPATIAL_POINT"
:CoverageSpatialExtent {:Type "SPATIAL_POINT"}}
:AccessConstraints [(apply str (repeat 1024 "x"))]
:UseConstraints [(apply str (repeat 1024 "x"))]
:ServiceQuality {:QualityFlag "Available"
:Lineage (apply str (repeat 100 "x"))}})
(def service-concept-1-1
{:Coverage {:CoverageSpatialExtent {:CoverageSpatialExtentTypeType "SPATIAL_POINT"}}
:AccessConstraints "TEST"
:UseConstraints "TEST"
:ServiceOrganizations [{:Roles ["SERVICE PROVIDER"]
:ShortName "TEST ShortName"}]
:RelatedURLs [{:URLContentType "CollectionURL"
:Description "OPeNDAP Service"
:Type "GET SERVICE"
:URL "/"}]})
(deftest test-version-steps
(with-bindings {#'cmr.umm-spec.versioning/versions {:service ["1.0" "1.1"]}}
(is (= [] (#'vm/version-steps :service "1.1" "1.1")))
(is (= [["1.0" "1.1"]] (#'vm/version-steps :service "1.0" "1.1")))
(is (= [["1.1" "1.0"]] (#'vm/version-steps :service "1.1" "1.0")))))
(defspec all-migrations-produce-valid-umm-spec 100
(for-all [umm-record (gen/no-shrink umm-gen/umm-var-generator)
dest-version (gen/elements (v/versions :service))]
(let [dest-media-type (str mt/umm-json "; version=" dest-version)
metadata (core/generate-metadata (lkt/setup-context-for-test)
umm-record dest-media-type)]
(empty? (core/validate-metadata :service dest-media-type metadata)))))
(deftest migrate-1_0-up-to-1_1
(is (= service-concept-1-1
(vm/migrate-umm {} :service "1.0" "1.1"
{:Coverage {:Type "SPATIAL_POINT"}
:AccessConstraints ["TEST"]
:UseConstraints ["TEST"]
:ServiceOrganizations [{:Roles ["SERVICE PROVIDER"]
:ShortName "TEST ShortName"
:Uuid "TEST Uuid"}]
:RelatedURL {:URL "/" :Description "OPeNDAP Service"
:Type "GET SERVICE"
:URLContentType "CollectionURL"}}))))
(deftest migrate-1_1-down-to-1_0
(is (= service-concept-1-0
(vm/migrate-umm {} :service "1.1" "1.0"
{:RelatedURLs [{:URL "/" :Description "OPeNDAP Service"
:Type "GET SERVICE"
:URLContentType "CollectionURL"}]
:AccessConstraints (apply str (repeat 4000 "x"))
:UseConstraints (apply str (repeat 20000 "x"))
:ServiceQuality {:QualityFlag "Available"
:Lineage (apply str (repeat 4000 "x"))}
:Coverage {:CoverageSpatialExtent {:CoverageSpatialExtentTypeType
"SPATIAL_POINT"}}}))))
(deftest migrate-service-options-1_1-up-to-1_2
(is (= {:Type "OPeNDAP"
:LongName "long name"
:ServiceOptions {:SubsetTypes [ "Spatial", "Variable"]
:SupportedInputProjections [{:ProjectionName "Geographic"}]
:SupportedOutputProjections [{:ProjectionName "Geographic"}]
:SupportedInputFormats ["BINARY" "HDF4" "NETCDF-3" "HDF-EOS2"]
:SupportedOutputFormats ["BINARY" "HDF4" "NETCDF-3" "HDF-EOS2"]}
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}
(vm/migrate-umm
{} :service "1.1" "1.2"
{:Type "OPeNDAP"
:LongName "long name"
:ServiceOptions {:SubsetTypes [ "Spatial" "Variable"]
:SupportedProjections [ "Geographic"]
:SupportedFormats ["Binary" "HDF4" "netCDF-3" "HDF-EOS4"]}
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}))))
(deftest migrate-service-options-1_2-down-to-1_1
(is (= {:Type "OPeNDAP"
:LongName "long name"
:ServiceOptions {:SubsetTypes [ "Spatial" "Variable"]
:SupportedProjections [ "Geographic"]
:SupportedFormats ["Binary" "HDF4" "HDF-EOS4" "HDF-EOS5"]}
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}
(vm/migrate-umm
{} :service "1.2" "1.1"
{:Type "OPeNDAP"
:LongName "long name"
:ServiceOptions {:SubsetTypes [ "Spatial", "Variable"]
:SupportedInputProjections [{:ProjectionName "Geographic"}]
:SupportedOutputProjections [{:ProjectionName "Geographic"}]
:SupportedInputFormats ["BINARY" "HDF4" "HDF-EOS2" "HDF-EOS" "KML"]
:SupportedOutputFormats ["BINARY" "HDF4" "NETCDF-3" "HDF-EOS4"]}
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}))))
(deftest migrate-contact-groups-1_1-up-to-1_2
(is (= {:Type "OPeNDAP"
:LongName "long name"
:ContactGroups [{:Roles [ "INVESTIGATOR"]
:GroupName "I TEAM"}]
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"
:ContactGroups [{:Roles [ "DEVELOPER"]
:GroupName "D TEAM"}]}]}
(vm/migrate-umm
{} :service "1.1" "1.2"
{:Type "OPeNDAP"
:LongName "long name"
:ContactGroups [{:Roles [ "INVESTIGATOR"]
:Uuid "74a1f32f-ca06-489b-bd61-4ce85872df9c"
:NonServiceOrganizationAffiliation "MSFC"
:GroupName "I TEAM"}]
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"
:ContactGroups [{:Roles [ "DEVELOPER"]
:Uuid "86a1f32f-ca06-489b-bd61-4ce85872df08"
:NonServiceOrganizationAffiliation "GSFC"
:GroupName "D TEAM"}]}]}))))
(deftest migrate-contact-groups-1_2-down-to-1_1
(is (= {:Type "OPeNDAP"
:LongName "long name"
:ContactGroups [{:Roles [ "INVESTIGATOR"]
:GroupName "I TEAM"}]
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"
:ContactGroups [{:Roles [ "DEVELOPER"]
:GroupName "D TEAM"}]}]}
(vm/migrate-umm
{} :service "1.2" "1.1"
{:Type "OPeNDAP"
:LongName "long name"
:ContactGroups [{:Roles [ "INVESTIGATOR"]
:GroupName "I TEAM"}]
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"
:ContactGroups [{:Roles [ "DEVELOPER"]
:GroupName "D TEAM"}]}]}))))
(deftest migrate-main-fields-1_1-up-to-1_2
(is (= {:Type "OPeNDAP"
:LongName "long name"
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}
(vm/migrate-umm
{} :service "1.1" "1.2"
{:Type "OPeNDAP"
:LongName "long name"
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]
:OnlineAccessURLPatternMatch "abc*"
:OnlineAccessURLPatternSubstitution "dummy_pattern"
:Coverage {:Name "dummy"}}))))
(deftest migrate-main-fields-1_2-down-to-1_1
(is (= {:Type "WEB SERVICES"
:LongName (apply str (repeat 120 "x"))
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]}
(vm/migrate-umm
{} :service "1.2" "1.1"
{:Type "ESI"
:LongName (apply str (repeat 200 "x"))
:ServiceOrganizations [{:Roles ["DEVELOPER"]
:ShortName "EED2"}]
:OperationMetadata []}))))
(deftest create-main-url-for-v1-3-test
"Test the create-main-url-for-1_3 function"
(are3 [expected-result related-urls]
(is (= expected-result
(service/create-main-url-for-1_3 related-urls)))
"Replace the RelatedURLs with the first DistributionURL."
{:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URLValue "/"}
{:RelatedURLs
[{:Description "User Guide"
:URLContentType "PublicationURL"
:Type "VIEW RELATED INFORMATION"
:Subtype "USER'S GUIDE"
:URL ""}
{:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URL "/"}
{:Description "User Guide"
:URLContentType "PublicationURL"
:Type "VIEW RELATED INFORMATION"
:Subtype "USER'S GUIDE"
:URL ""}]}
"Since DistributionURL doesn't exist nil is returned."
nil
{:RelatedURLs
[{:Description "User Guide"
:URLContentType "PublicationURL"
:Type "VIEW RELATED INFORMATION"
:Subtype "USER'S GUIDE"
:URL ""}
{:Description "User Guide"
:URLContentType "PublicationURL"
:Type "VIEW RELATED INFORMATION"
:Subtype "USER'S GUIDE"
:URL ""}]}))
(deftest create-main-url-for-v1-2-test
"Test the create-main-url-for-1_2 function"
(are3 [expected-result url]
(is (= expected-result
(service/create-main-related-urls-for-1_2 url)))
"Replace the URL sub element with those from RelatedURL."
[{:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URL "/"}]
{:URL {:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URLValue "/"}}
"Since there are no RelatedURLs none should come back."
nil
nil))
(def remove-get-data-service-1-2->1-3-test-input
{:Roles ["SCIENCE CONTACT"]
:ContactInformation {:RelatedUrls [{:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URL "/"
:GetData {:Format "ascii"
:MimeType "application/xml"
:Size 10
:Unit "MB"
:Fees "$0.01"}}]
:ContactMechanisms [{:Type "Email"
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone" :Value "301-614-9999"}]
:Addresses [{:StreetAddresses ["Goddard Earth Sciences Data and Information Systems" "Attn: User" "NASA Goddard Space Flight Center" "Code 610.2"]
:City "Greenbelt"
:StateProvince "MD"
:Country "USA"
:PostalCode "20771"}]}
:GroupName "Main Level Group Name 1"})
(def remove-get-data-service-1-2->1-3-test-expected
{:Roles ["SCIENCE CONTACT"]
:ContactInformation {:RelatedUrls [{:Description "OPeNDAP Service for AIRX3STD.006"
:URLContentType "DistributionURL"
:Type "GET SERVICE"
:Subtype "OPENDAP DATA"
:URL "/"}]
:ContactMechanisms [{:Type "Email"
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone" :Value "301-614-9999"}]
:Addresses [{:StreetAddresses ["Goddard Earth Sciences Data and Information Systems" "Attn: User" "NASA Goddard Space Flight Center" "Code 610.2"]
:City "Greenbelt"
:StateProvince "MD"
:Country "USA"
:PostalCode "20771"}]}
:GroupName "Main Level Group Name 1"})
(deftest remove-get-data-service-1-2->1-3-test
"Test the remove-get-data-service-1-2->1-3 function"
(are3 [expected-result contact]
(is (= expected-result
(service/remove-get-data-service-1-2->1-3 contact)))
"Remove the GetService from the ContactGroups RelatedUrls element."
remove-get-data-service-1-2->1-3-test-expected
remove-get-data-service-1-2->1-3-test-input))
(def service-org-contact-groups-v2
'({:Roles ["SCIENCE CONTACT"],
:ContactInformation
{:RelatedUrls
({:Description "OPeNDAP Service for AIRX3STD.006",
:URLContentType "DistributionURL",
:Type "GET SERVICE",
:Subtype "OPENDAP DATA",
:URL
"/"}),
:ContactMechanisms
[{:Type "Email",
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone", :Value "301-614-9999"}],
:Addresses
[{:StreetAddresses
["Goddard Earth Sciences Data and Information Systems, Attn: User , NASA Goddard Space Flight Center, Code 610.2"],
:City "Greenbelt",
:StateProvince "MD",
:Country "USA",
:PostalCode "20771"}]},
:GroupName "Service Org Group Name"}
{:Roles ["TECHNICAL CONTACT"],
:ContactInformation
{:ContactMechanisms
[{:Type "Email",
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone", :Value "301-614-9999"}],
:Addresses
[{:StreetAddresses
["Goddard Earth Sciences Data and Information Systems, Attn: User , NASA Goddard Space Flight Center, Code 610.2"],
:City "Greenbelt",
:StateProvince "MD",
:Country "USA",
:PostalCode "20771"}]},
:GroupName "Service Org Group Name"}
{:Roles ["SCIENCE CONTACT"],
:ContactInformation
{:ContactMechanisms
[{:Type "Email",
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone", :Value "301-614-9999"}],
:Addresses
[{:StreetAddresses
["Goddard Earth Sciences Data and Information Systems, Attn: User , NASA Goddard Space Flight Center, Code 610.2"],
:City "Greenbelt",
:StateProvince "MD",
:Country "USA",
:PostalCode "20771"}]},
:GroupName "Service Org 2 Group Name 1"}))
(def service-org-contact-persons-v2
'({:Roles ["SERVICE PROVIDER"],
:ContactInformation
{:RelatedUrls
({:Description "OPeNDAP Service for AIRX3STD.006",
:URLContentType "DistributionURL",
:Type "GET SERVICE",
:Subtype "OPENDAP DATA",
:URL
"/"}),
:ContactMechanisms
[{:Type "Email",
:Value "gsfc-help-disc at lists.nasa.gov"}
{:Type "Telephone", :Value "301-614-9999"}],
:Addresses
[{:StreetAddresses
["Goddard Earth Sciences Data and Information Systems, Attn: User , NASA Goddard Space Flight Center, Code 610.2"],
:City "Greenbelt",
:StateProvince "MD",
:Country "USA",
:PostalCode "20771"}]},
:FirstName "FirstName Service Org",
:MiddleName "Service Org MiddleName",
:LastName "LastName Service Org"}))
(deftest update-service-organization-1-2->1-3-test
"Test the update-service-organization-1_2->1_3 function"
(let [s1-2 (decode
(slurp (io/file (io/resource "example-data/umm-json/service/v1.2/Service_v1.2->v1.3.json")))
true)
s1-3 (decode
(slurp (io/file (io/resource "example-data/umm-json/service/v1.3/Service_v1.3-from-v1.2.json")))
true)
serv-orgs [{:Roles ["SERVICE PROVIDER"],
:ShortName "NASA/GESDISC",
:LongName "GES DISC SERVICE HELP DESK SUPPORT GROUP"}
{:Roles ["SERVICE PROVIDER"],
:ShortName "NASA/GESDISC-2",
:LongName "GES DISC SERVICE HELP DESK SUPPORT GROUP 2"}]]
(are3 [expected-result test-record]
(let [actual-result (service/update-service-organization-1_2->1_3 test-record)]
(is (= (:ServiceOrganizations expected-result)
(:ServiceOrganizations actual-result)))
(is (= (:ContactGroups expected-result)
(:ContactGroups actual-result)))
(is (= (:ContactPersons expected-result)
(:ContactPersons actual-result))))
"Move the ServiceOrganizations ContactGroups and ContactPersons to the main level ContactGroups
and ContactPersons.
The input contains 2 ServiceOrganizations. The first ServiceOrganization contains 2 contact
groups and 1 contact persons. The second has 1 contact group and no contact persons. The main
level contact groups contains 2 groups and the main level contact persons contains 1 contact
person.
In the output there are 2 ServiceOrganizations with no contact information in them. The main
level contact groups contains 5 contact groups and the main level contact persons contains 2."
s1-3
s1-2
"Tests when ServiceOrganizations do not have any contacts and there no Contact Groups or Persons."
(-> s1-3
(assoc :ServiceOrganizations serv-orgs)
(dissoc :ContactGroups)
(dissoc :ContactPersons))
(-> s1-2
(assoc :ServiceOrganizations serv-orgs)
(dissoc :ContactGroups)
(dissoc :ContactPersons))
"Tests when no main level contact persons or groups exist"
(-> s1-3
(assoc :ContactGroups service-org-contact-groups-v2)
(assoc :ContactPersons service-org-contact-persons-v2))
(-> s1-2
(dissoc :ContactGroups)
(dissoc :ContactPersons)))))
(deftest create-online-resource-test
"Test the create-online-resource function."
(are3 [expected-result serv-orgs]
(is (= expected-result
(service/create-online-resource serv-orgs)))
"Test getting the first ContactInformation RelatedURLs where the URLContentType is DataCenterURL.
The output is an OnlineResource structure."
{:Linkage ""
:Description "A description"
:Name "HOME PAGE"}
{:ContactInformation {:ServiceHours "1-4"
:RelatedUrls [{:URLContentType "CollectionURL"
:Type "PROJECT HOME PAGE"
:URL ""}
{:URLContentType "DataCenterURL"
:Type "HOME PAGE"
:URL ""
:Description "A description"}]
:ContactInstruction "instructions"}}
"Tests When ContactInformation don't exist."
nil
nil
"Tests when I don't have any RelatedURLs."
nil
{:ContactInformation {:ServiceHours "1-4",
:ContactInstruction "instructions"}}))
(deftest update-service-organization-1-3->1-2-test
"Test the update-service-organization-1_3->1_2 function"
(let [s1-2 (decode
(slurp (io/file (io/resource "example-data/umm-json/service/v1.2/Service_v1.2-from-v1.3.json")))
true)
s1-3 (decode
(slurp (io/file (io/resource "example-data/umm-json/service/v1.3/Service_v1.3->v1.2.json")))
true)]
(are3 [expected-result test-record]
(let [actual-result (service/update-service-organization-1_3->1_2 test-record)]
(is (= (:ServiceOrganizations expected-result)
actual-result)))
"Add the version 1.3 OnlineResource to ContactInformation RelatedUrls. Remove OnlineResource."
s1-2
s1-3)))
(deftest update-service-type-1-3->1-2-test
"Test the updated-service-type-1_3->1_2 function"
(are3 [expected-result test-record]
(is (= expected-result
(service/update-service-type-1_3->1_2 test-record)))
"Test that WMTS gets translated to WMTS"
{:Type "WMS"}
{:Type "WMTS"}
"Test that EGI - No Processing is translated to WEB SERVICES"
{:Type "WEB SERVICES"}
{:Type "EGI - No Processing"}
"Testing that other values pass through"
{:Type "ECHO ORDERS"}
{:Type "ECHO ORDERS"}))
(defn- load-service-file
"Load a test data file for services"
[version-file]
(decode (->> version-file
(format "example-data/umm-json/service/%s")
io/resource
io/file
slurp)
true))
(deftest migrations-up-and-down
""
(are3
[source-version source-file destination-version destination-file]
(let [expected (load-service-file destination-file)
source (load-service-file source-file)
actual (vm/migrate-umm {} :service source-version destination-version source)]
(is (= expected actual)))
---- 1.3 tests ----
"Test the full migration of UMM-S from version 1.2 to version 1.3 using predefined example files."
"1.2" "v1.2/Service_v1.2->v1.3.json"
"1.3" "v1.3/Service_v1.3-from-v1.2.json"
"Test the full migration of UMM-S from version 1.3 to version 1.2 using predefined example files."
"1.3" "v1.3/Service_v1.3->v1.2.json"
"1.2" "v1.2/Service_v1.2-from-v1.3.json"
"Test the full migration of UMM-S from version 1.3 to version 1.3.1 using predefined example files."
"1.3" "v1.3/Service_v1.3-to-v1.3.1.json"
"1.3.1" "v1.3.1/Service_v1.3.1-from-v1.3.json"
"Test the full migration of UMM-S from version 1.3.1 to version 1.3 using predefined example files."
"1.3.1" "v1.3.1/Service_v1.3.1-to-v1.3.json"
"1.3" "v1.3/Service_v1.3-from-v1.3.1.json"
---- 1.3.2 tests ----
"Test the full migration of UMM-S from version 1.3.1 to version 1.3.2 using predefined example files."
"1.3.1" "v1.3.1/Service_v1.3.1-to-v1.3.2.json"
"1.3.2" "v1.3.2/Service_v1.3.2-from-v1.3.1.json"
"Test the full migration of UMM-S from version 1.3.2 to version 1.3.1 using predefined example files."
"1.3.2" "v1.3.2/Service_v1.3.2-to-v1.3.1.json"
"1.3.1" "v1.3.1/Service_v1.3.1-from-v1.3.2.json"
"Test the full migration of UMM-S from version 1.3.3 to version 1.3.2 using predefined example files."
"1.3.3" "v1.3.3/Service_v1.3.3-to-v1.3.2.json"
"1.3.2" "v1.3.2/Service_v1.3.2-from-v1.3.3.json"
---- 1.3.4 tests ----
"Test the full migration of UMM-S from version 1.3.3 to version 1.3.4 using predefined example files."
"1.3.3" "v1.3.3/Service_v1.3.3-to-v1.3.4.json"
"1.3.4" "v1.3.4/Service_v1.3.4-from-v1.3.3.json"
"Test the full migration of UMM-S from version 1.3.4 to version 1.3.3 using predefined example files."
"1.3.4" "v1.3.4/Service_v1.3.4-to-v1.3.3.json"
"1.3.3" "v1.3.3/Service_v1.3.3-from-v1.3.4.json"
---- 1.4 tests ----
"Migrating down from 1.4 to 1.3.4"
"1.4" "v1.4/Service_v1.4-to-v1.3.4.json"
"1.3.4" "v1.3.4/Service_v1.3.4-from-v1.3.3.json"
"Migration up from 1.3.4 to 1.4"
"1.3.4" "v1.3.4/Service_v1.3.4-from-v1.3.3.json"
"1.4" "v1.4/Service_v1.4-from-v1.3.4.json"
---- 1.4.1 tests ----
"Migrating down from 1.4.1 to 1.4"
"1.4.1" "v1.4.1/Service_v1.4.1.json"
"1.4" "v1.4.1/Service_v1.4.1-to-v1.4.json"
"Migrating up from 1.4 to 1.4.1"
"1.4" "v1.4.1/Service_v1.4.json"
"1.4.1" "v1.4.1/Service_v1.4-to-v1.4.1.json"
---- 1.5.0 tests ----
"Migrating down from 1.5.0 to 1.4.1"
"1.5.0" "v1.5.0/Service_v1.5.0.json"
"1.4.1" "v1.5.0/Service_v1.4.1.json"
"Migrating up from 1.4.1 to 1.5.0"
"1.4.1" "v1.5.0/Service_v1.4.1.json"
"1.5.0" "v1.5.0/Service_v1.4.1-to-v1.5.0.json"
---- 1.5.1 tests ----
"Migrating down from 1.5.1 to 1.5.0"
"1.5.1" "v1.5.1/Service_v1.5.1.json"
"1.5.0" "v1.5.1/Service_v1.5.0.json"
"Migrating down from 1.5.1 to 1.5.0 with multiple interpolation values."
"1.5.1" "v1.5.1/Service_v1.5.1_interp.json"
"1.5.0" "v1.5.1/Service_v1.5.0_interp.json"
"Migrating up from 1.5.0 to 1.5.1"
"1.5.0" "v1.5.1/Service_v1.5.0.json"
"1.5.1" "v1.5.1/Service_v1.5.0-to-v1.5.1.json"))
(comment
(core/validate-metadata
:service "application/vnd.nasa.cmr.umm+json; version=1.2"
(slurp (io/file (io/resource "example-data/umm-json/service/v1.2/S1200245793-EDF_OPS_v1.2.json")))))
|
7e3c742256657c8abbfb2f1fb778dfbc967cd58daa4896d4db149fad2c6eb0c2 | lasp-lang/partisan | partisan_support_otp.erl | -module(partisan_support_otp).
-compile([nowarn_export_all, export_all]).
start_node(Name) ->
start_node(Name, []).
start_node(Name, Config) ->
Prefix = string:join([atom_to_list(Name), "server"], "_"),
Result = partisan_support:start(Prefix, Config, [
{peer_service_manager, partisan_pluggable_peer_service_manager},
{servers, partisan_support:node_list(1, Prefix, [])},
{clients, []}
]),
case Result of
[] ->
ct:fail("Couldn't start peer");
[{_, PeerNode}] ->
_ = put({?MODULE, nodes}, [PeerNode]),
{ok, PeerNode}
end.
stop_node(Name) ->
stop_nodes([Name]).
stop_nodes(ToStop) ->
case get({?MODULE, nodes}) of
undefined ->
ok;
Nodes ->
Remaining = lists:subtract(Nodes, [ToStop]),
_ = put({?MODULE, nodes}, Remaining),
partisan_support:stop(Nodes),
ok
end.
stop_all_nodes() ->
case get({?MODULE, nodes}) of
undefined ->
ok;
Nodes ->
partisan_support:stop(Nodes),
_ = erase({?MODULE, nodes}),
ok
end.
| null | https://raw.githubusercontent.com/lasp-lang/partisan/73913c954f63b673ffc84dc1e3d02b6c00a415d8/test/partisan_support_otp.erl | erlang | -module(partisan_support_otp).
-compile([nowarn_export_all, export_all]).
start_node(Name) ->
start_node(Name, []).
start_node(Name, Config) ->
Prefix = string:join([atom_to_list(Name), "server"], "_"),
Result = partisan_support:start(Prefix, Config, [
{peer_service_manager, partisan_pluggable_peer_service_manager},
{servers, partisan_support:node_list(1, Prefix, [])},
{clients, []}
]),
case Result of
[] ->
ct:fail("Couldn't start peer");
[{_, PeerNode}] ->
_ = put({?MODULE, nodes}, [PeerNode]),
{ok, PeerNode}
end.
stop_node(Name) ->
stop_nodes([Name]).
stop_nodes(ToStop) ->
case get({?MODULE, nodes}) of
undefined ->
ok;
Nodes ->
Remaining = lists:subtract(Nodes, [ToStop]),
_ = put({?MODULE, nodes}, Remaining),
partisan_support:stop(Nodes),
ok
end.
stop_all_nodes() ->
case get({?MODULE, nodes}) of
undefined ->
ok;
Nodes ->
partisan_support:stop(Nodes),
_ = erase({?MODULE, nodes}),
ok
end.
| |
d7008bfb8b8d5d50c0811fe755c009bafd5c1e565e708b6d88624cec047fa413 | ejgallego/HOARe2 | exp.ml | Copyright ( c ) 2014 , The Trustees of the University of Pennsylvania
Copyright ( c ) 2014 , The IMDEA Software Institute
All rights reserved .
LICENSE : 3 - clause BSD style .
See the LICENSE file for details on licensing .
Copyright (c) 2014, The IMDEA Software Institute
All rights reserved.
LICENSE: 3-clause BSD style.
See the LICENSE file for details on licensing.
*)
open Parsetree
open EC.Location
open Constants
(* Miscellanous random bits for dealing with expressions, most of it will
eventually be moved elsewhere *)
let error loc msg = raise (ParseError (loc, msg))
(* This module captures the state of an expression *)
module ExpState = struct
let fo_init () =
let open Env in
let open WhyImport in
let e = empty in
(* Logical conectives *)
let e = add_type e tprop_info [] in
let e = add_prim e (builtin_th, l_not) ty_boolop1 in
let e = add_prim e (builtin_th, l_and) ty_boolop2 in
let e = add_prim e (builtin_th, l_or) ty_boolop2 in
let e = add_prim e (builtin_th, l_impl) ty_boolop2 in
Core theories
let e = load_why3_theory builtin_th e in
let e = load_why3_theory bool_th e in
let e = load_why3_theory ho_th e in
let e = Array.fold_left (fun e n ->
load_why3_theory (tuple_th n) e) e @@ Array.init 10 (fun x -> x) in
let e = load_why3_theory distr_th e in
Forall needs the functional type
let e = add_prim e (builtin_th, l_all) ty_quant in
(* Required for the monad *)
let e = load_why3_theory real_th e in
e
type exp_st_rec = {
env : Env.env;
(* Name of the file *)
name : string;
(* Whether we will trust VC, in which case they are relegated to a
why3 file *)
trust : string option;
FIXME : This has to go away and be replace with a proper PCF typer
in_assertion : bool;
}
type exp_st = exp_st_rec located
let getenv st = (unloc st).env
let update_env st f = lmap (fun st -> {st with env = f st.env}) st
let with_env st f = f (unloc st).env
(* Builtins *)
let initial name = mk_loc _dummy
{
env = fo_init ();
name = name;
trust = None;
in_assertion = false;
}
let empty = mk_loc _dummy
{
env = Env.empty;
name = "";
trust = None;
in_assertion = false;
}
let enable_ass = lmap (fun st -> { st with in_assertion = true; })
let disable_ass = lmap (fun st -> { st with in_assertion = false; })
let enable_trust vc = lmap (fun st -> { st with trust = Some vc; })
let mk_vc_file st n = let st = unloc st in
Option.map (fun tn -> st.name ^ "_vc_" ^ tn ^ "_" ^ (string_of_int n) ^ ".why") st.trust
let open_binder st bi ty = lmap (fun st ->
{ st with
env = Env.extend_bi bi ty st.env;
}) st
let extend st n rel opaque ty =
let (bi, n_env) = Env.extend n rel opaque ty (getenv st) in
(bi, update_env st (fun _ -> n_env) )
let access st idx =
Env.access (unloc st).env idx
let prim_type st p = snd @@ Env.lookup_prim (getenv st) p
let cons_type st c = snd @@ Env.lookup_cons (getenv st) c
let type_info st ty = (fst @@ Env.lookup_type (getenv st) ty, ty)
let cons_info st cs = (fst @@ Env.lookup_cons (getenv st) cs, cs)
end
open ExpState
module Builders = struct
let make_var idx bi env =
EVar {
v_binfo = bi;
v_index = idx;
v_side = SiNone;
v_size = Env.length env;
}
let make_rvar idx bi side env =
EVar {
v_binfo = bi;
v_index = idx;
v_side = side;
v_size = Env.length env;
}
Resolve ident either to a primitive or to a HO binding . The
seconds has always preference .
seconds has always preference. *)
let resolve_ident env loc id side =
Try first the HO bindings
mk_loc loc @@
match Env.lookup id env with
| Some (idx, bi, _ty) ->
begin match side with
| None -> make_var idx bi env
| Some side -> make_rvar idx bi side env
end
Try the FO binders
| None ->
begin match side with
| None ->
begin
try let (th, _ty) = Env.lookup_prim env id in
EPrim (th, id)
with Not_found -> error loc (Some ("Identifier " ^ id ^ " not bound!"))
end
| Some _ -> error loc (Some ("Identifier " ^ id ^ " is not relational or not declared, cannot resolve side!"))
end
let mk_from_id st id =
resolve_ident (getenv st) (getloc st) id None
let mk_loc (st : 'a located) e = mk_loc st.pl_loc e
(* Expression and type builders *)
let mk_exp_float st v =
mk_loc st @@ EConst (ECReal v)
let mk_exp_prim st p =
mk_loc st @@ EPrim p
let mk_exp_cs st p =
mk_loc st @@ ECs p
let mk_exp_var st idx bi =
mk_loc st @@ make_var idx bi (getenv st)
let mk_exp_lam st bi ty e =
mk_loc st @@ ELam(bi, ty, e)
let mk_exp_app st f arg =
mk_loc st @@ EApp(f, arg)
EG : We could improve this
let mk_app_list e largs =
EC.Location.mk_loc e.pl_loc @@ EApp(e, largs)
let mk_exp_tuple l n e = mk_app_list (EC.Location.mk_loc l (ECs (tuple_th n, "Tuple" ^ (string_of_int n)))) e
let mk_exp_bin st op e1 e2 =
let op_exp = mk_from_id st op in
let ap1 = mk_loc st @@ EApp(op_exp, [e1]) in
mk_loc st @@ EApp (ap1, [e2])
(* Exp should be already bound *)
let mk_exp_forall st bi ty exp =
let exp_all = mk_exp_prim st (builtin_th, l_all) in
let exp_lam = mk_exp_lam st bi ty exp in
mk_exp_app st exp_all [exp_lam]
let mk_ty_float st =
let ty = TPrim (type_info st "real", []) in
mk_loc st (ty, None)
let mk_ty_unit st e_ann =
let ty = TPrim (type_info st "tuple0", []) in
mk_loc st @@ (ty, e_ann)
(* *)
let mk_ty_m st bi_a bi_d ty e_ann=
mk_loc st @@ (TM (bi_a, bi_d, ty), e_ann)
let mk_ty_c st ty e_ann=
mk_loc st @@ (TC ty, e_ann)
(* The issue of nested refinement types is still open *)
let mk_ty_ref st bi b_ty ass e_ann =
(* Kind of a hack *)
let (b_ty, ass) = match ty_u b_ty with
| TRef (_bi, b_ty', ass') ->
We must remove one binding
let nv = mk_from_id st bi.b_name in
let b_ty_n = ty_subst 1 nv b_ty' in
let ass_n = exp_subst 1 nv ass' in
(b_ty_n, mk_exp_bin st l_and ass_n ass)
| _ -> (b_ty, ass)
in
mk_loc st @@ (TRef (bi, b_ty, ass), e_ann)
end
(* open Builders *)
(* Normalize nested refinement types, even if we avoid the most
common cases in the typer *)
match ty_u b_ty with
| ( _ bi , b_ty ' , ass ' ) - >
(* let n_ass = mk_exp_bin l_and ass ass' in *)
(* | _ *)
(* Useless for now, they need the mythical exp_map_with_env... *)
(* let exp_sanity st e = *)
(* let f_sanity env v = *)
(* let (bi, _) = Env.access env v.v_index in *)
(* assert (v.v_binfo.b_name = bi.b_name) in *)
(* let f_map env _ v = *)
(* f_sanity env v; *)
EVar v in
let _ = exp_map 0 ( f_map ( getenv st ) ) e in
(* () *)
let ty_sanity =
(* let f_sanity env v = *)
(* let (bi, _) = Env.access env v.v_index in *)
(* assert (v.v_binfo.b_name = bi.b_name) in *)
(* let f_map env _ v = *)
(* f_sanity env v; *)
EVar v in
let _ = ty_map 0 ( f_map ( getenv st ) ) ty in
(* () *)
| null | https://raw.githubusercontent.com/ejgallego/HOARe2/48d8760696fdf4b8a3eda5a4d2a53eeba53072d8/src/exp.ml | ocaml | Miscellanous random bits for dealing with expressions, most of it will
eventually be moved elsewhere
This module captures the state of an expression
Logical conectives
Required for the monad
Name of the file
Whether we will trust VC, in which case they are relegated to a
why3 file
Builtins
Expression and type builders
Exp should be already bound
The issue of nested refinement types is still open
Kind of a hack
open Builders
Normalize nested refinement types, even if we avoid the most
common cases in the typer
let n_ass = mk_exp_bin l_and ass ass' in
| _
Useless for now, they need the mythical exp_map_with_env...
let exp_sanity st e =
let f_sanity env v =
let (bi, _) = Env.access env v.v_index in
assert (v.v_binfo.b_name = bi.b_name) in
let f_map env _ v =
f_sanity env v;
()
let f_sanity env v =
let (bi, _) = Env.access env v.v_index in
assert (v.v_binfo.b_name = bi.b_name) in
let f_map env _ v =
f_sanity env v;
() | Copyright ( c ) 2014 , The Trustees of the University of Pennsylvania
Copyright ( c ) 2014 , The IMDEA Software Institute
All rights reserved .
LICENSE : 3 - clause BSD style .
See the LICENSE file for details on licensing .
Copyright (c) 2014, The IMDEA Software Institute
All rights reserved.
LICENSE: 3-clause BSD style.
See the LICENSE file for details on licensing.
*)
open Parsetree
open EC.Location
open Constants
let error loc msg = raise (ParseError (loc, msg))
module ExpState = struct
let fo_init () =
let open Env in
let open WhyImport in
let e = empty in
let e = add_type e tprop_info [] in
let e = add_prim e (builtin_th, l_not) ty_boolop1 in
let e = add_prim e (builtin_th, l_and) ty_boolop2 in
let e = add_prim e (builtin_th, l_or) ty_boolop2 in
let e = add_prim e (builtin_th, l_impl) ty_boolop2 in
Core theories
let e = load_why3_theory builtin_th e in
let e = load_why3_theory bool_th e in
let e = load_why3_theory ho_th e in
let e = Array.fold_left (fun e n ->
load_why3_theory (tuple_th n) e) e @@ Array.init 10 (fun x -> x) in
let e = load_why3_theory distr_th e in
Forall needs the functional type
let e = add_prim e (builtin_th, l_all) ty_quant in
let e = load_why3_theory real_th e in
e
type exp_st_rec = {
env : Env.env;
name : string;
trust : string option;
FIXME : This has to go away and be replace with a proper PCF typer
in_assertion : bool;
}
type exp_st = exp_st_rec located
let getenv st = (unloc st).env
let update_env st f = lmap (fun st -> {st with env = f st.env}) st
let with_env st f = f (unloc st).env
let initial name = mk_loc _dummy
{
env = fo_init ();
name = name;
trust = None;
in_assertion = false;
}
let empty = mk_loc _dummy
{
env = Env.empty;
name = "";
trust = None;
in_assertion = false;
}
let enable_ass = lmap (fun st -> { st with in_assertion = true; })
let disable_ass = lmap (fun st -> { st with in_assertion = false; })
let enable_trust vc = lmap (fun st -> { st with trust = Some vc; })
let mk_vc_file st n = let st = unloc st in
Option.map (fun tn -> st.name ^ "_vc_" ^ tn ^ "_" ^ (string_of_int n) ^ ".why") st.trust
let open_binder st bi ty = lmap (fun st ->
{ st with
env = Env.extend_bi bi ty st.env;
}) st
let extend st n rel opaque ty =
let (bi, n_env) = Env.extend n rel opaque ty (getenv st) in
(bi, update_env st (fun _ -> n_env) )
let access st idx =
Env.access (unloc st).env idx
let prim_type st p = snd @@ Env.lookup_prim (getenv st) p
let cons_type st c = snd @@ Env.lookup_cons (getenv st) c
let type_info st ty = (fst @@ Env.lookup_type (getenv st) ty, ty)
let cons_info st cs = (fst @@ Env.lookup_cons (getenv st) cs, cs)
end
open ExpState
module Builders = struct
let make_var idx bi env =
EVar {
v_binfo = bi;
v_index = idx;
v_side = SiNone;
v_size = Env.length env;
}
let make_rvar idx bi side env =
EVar {
v_binfo = bi;
v_index = idx;
v_side = side;
v_size = Env.length env;
}
Resolve ident either to a primitive or to a HO binding . The
seconds has always preference .
seconds has always preference. *)
let resolve_ident env loc id side =
Try first the HO bindings
mk_loc loc @@
match Env.lookup id env with
| Some (idx, bi, _ty) ->
begin match side with
| None -> make_var idx bi env
| Some side -> make_rvar idx bi side env
end
Try the FO binders
| None ->
begin match side with
| None ->
begin
try let (th, _ty) = Env.lookup_prim env id in
EPrim (th, id)
with Not_found -> error loc (Some ("Identifier " ^ id ^ " not bound!"))
end
| Some _ -> error loc (Some ("Identifier " ^ id ^ " is not relational or not declared, cannot resolve side!"))
end
let mk_from_id st id =
resolve_ident (getenv st) (getloc st) id None
let mk_loc (st : 'a located) e = mk_loc st.pl_loc e
let mk_exp_float st v =
mk_loc st @@ EConst (ECReal v)
let mk_exp_prim st p =
mk_loc st @@ EPrim p
let mk_exp_cs st p =
mk_loc st @@ ECs p
let mk_exp_var st idx bi =
mk_loc st @@ make_var idx bi (getenv st)
let mk_exp_lam st bi ty e =
mk_loc st @@ ELam(bi, ty, e)
let mk_exp_app st f arg =
mk_loc st @@ EApp(f, arg)
EG : We could improve this
let mk_app_list e largs =
EC.Location.mk_loc e.pl_loc @@ EApp(e, largs)
let mk_exp_tuple l n e = mk_app_list (EC.Location.mk_loc l (ECs (tuple_th n, "Tuple" ^ (string_of_int n)))) e
let mk_exp_bin st op e1 e2 =
let op_exp = mk_from_id st op in
let ap1 = mk_loc st @@ EApp(op_exp, [e1]) in
mk_loc st @@ EApp (ap1, [e2])
let mk_exp_forall st bi ty exp =
let exp_all = mk_exp_prim st (builtin_th, l_all) in
let exp_lam = mk_exp_lam st bi ty exp in
mk_exp_app st exp_all [exp_lam]
let mk_ty_float st =
let ty = TPrim (type_info st "real", []) in
mk_loc st (ty, None)
let mk_ty_unit st e_ann =
let ty = TPrim (type_info st "tuple0", []) in
mk_loc st @@ (ty, e_ann)
let mk_ty_m st bi_a bi_d ty e_ann=
mk_loc st @@ (TM (bi_a, bi_d, ty), e_ann)
let mk_ty_c st ty e_ann=
mk_loc st @@ (TC ty, e_ann)
let mk_ty_ref st bi b_ty ass e_ann =
let (b_ty, ass) = match ty_u b_ty with
| TRef (_bi, b_ty', ass') ->
We must remove one binding
let nv = mk_from_id st bi.b_name in
let b_ty_n = ty_subst 1 nv b_ty' in
let ass_n = exp_subst 1 nv ass' in
(b_ty_n, mk_exp_bin st l_and ass_n ass)
| _ -> (b_ty, ass)
in
mk_loc st @@ (TRef (bi, b_ty, ass), e_ann)
end
match ty_u b_ty with
| ( _ bi , b_ty ' , ass ' ) - >
EVar v in
let _ = exp_map 0 ( f_map ( getenv st ) ) e in
let ty_sanity =
EVar v in
let _ = ty_map 0 ( f_map ( getenv st ) ) ty in
|
b90d866d0a7a30f52f04fb1928d74c76419b47f1c83c15630bd7136abadcfc48 | fractalide/fractalide | model.rkt | #lang racket
(require fractalide/modules/rkt/rkt-fbp/agent
fractalide/modules/rkt/rkt-fbp/def)
(require/edge ${cardano-wallet.wcreate})
(define-agent
#:input '("data" "trigger")
#:output '("out")
(define msg (recv (input "data")))
(recv (input "trigger"))
(send (output "out") msg)
)
| null | https://raw.githubusercontent.com/fractalide/fractalide/9c54ec2615fcc2a1f3363292d4eed2a0fcb9c3a5/modules/rkt/rkt-fbp/agents/cardano-wallet/wcreate/wallet-recovery/model.rkt | racket | #lang racket
(require fractalide/modules/rkt/rkt-fbp/agent
fractalide/modules/rkt/rkt-fbp/def)
(require/edge ${cardano-wallet.wcreate})
(define-agent
#:input '("data" "trigger")
#:output '("out")
(define msg (recv (input "data")))
(recv (input "trigger"))
(send (output "out") msg)
)
| |
d6a91ad856d293ea98dcc19efdb92624440bd99aa15a694a9424d85f67d18ce6 | Dasudian/DSDIN | dsdcuckoo.erl |
-module(dsdcuckoo).
%% API
-export([bin_dir/0,
bin/1,
lib_dir/0]).
%% Returns the path of the directory where the miner and verifier
%% executables are.
-spec bin_dir() -> file:filename_all().
bin_dir() ->
filename:join([priv_dir(), "bin"]).
%% Returns the path of the file of the specified executable.
-spec bin(nonempty_string()) -> file:filename_all().
bin(ExecutableBasename) ->
filename:join([bin_dir(), ExecutableBasename]).
%% Returns the directory where the shared-objects files needed by
%% miner and verifier executables are.
-spec lib_dir() -> file:filename_all().
lib_dir() ->
filename:join([priv_dir(), "lib"]).
priv_dir() ->
code:priv_dir(dsdcuckoo).
| null | https://raw.githubusercontent.com/Dasudian/DSDIN/b27a437d8deecae68613604fffcbb9804a6f1729/apps/dsdcuckoo/src/dsdcuckoo.erl | erlang | API
Returns the path of the directory where the miner and verifier
executables are.
Returns the path of the file of the specified executable.
Returns the directory where the shared-objects files needed by
miner and verifier executables are. |
-module(dsdcuckoo).
-export([bin_dir/0,
bin/1,
lib_dir/0]).
-spec bin_dir() -> file:filename_all().
bin_dir() ->
filename:join([priv_dir(), "bin"]).
-spec bin(nonempty_string()) -> file:filename_all().
bin(ExecutableBasename) ->
filename:join([bin_dir(), ExecutableBasename]).
-spec lib_dir() -> file:filename_all().
lib_dir() ->
filename:join([priv_dir(), "lib"]).
priv_dir() ->
code:priv_dir(dsdcuckoo).
|
198dc106b97dc5d75ce4021a50b993269dc3eb804d44f6432872fc5a81773f17 | avsm/eeww | thread.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
and , projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1995 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* Lightweight threads for [ 1003.1c ] and .
type t
(** The type of thread handles. *)
* { 1 Thread creation and termination }
val create : ('a -> 'b) -> 'a -> t
(** [Thread.create funct arg] creates a new thread of control,
in which the function application [funct arg]
is executed concurrently with the other threads of the domain.
The application of [Thread.create]
returns the handle of the newly created thread.
The new thread terminates when the application [funct arg]
returns, either normally or by raising the {!Thread.Exit} exception
or by raising any other uncaught exception.
In the last case, the uncaught exception is printed on standard error,
but not propagated back to the parent thread. Similarly, the
result of the application [funct arg] is discarded and not
directly accessible to the parent thread.
See also {!Domain.spawn} if you want parallel execution instead.
*)
val self : unit -> t
(** Return the handle for the thread currently executing. *)
val id : t -> int
(** Return the identifier of the given thread. A thread identifier
is an integer that identifies uniquely the thread.
It can be used to build data structures indexed by threads. *)
exception Exit
(** Exception raised by user code to initiate termination of the
current thread.
In a thread created by {!Thread.create} [funct] [arg], if the
{!Thread.Exit} exception reaches the top of the application
[funct arg], it has the effect of terminating the current thread
silently. In other contexts, there is no implicit handling of the
{!Thread.Exit} exception. *)
val exit : unit -> unit
[@@ocaml.deprecated "Use 'raise Thread.Exit' instead."]
(** Raise the {!Thread.Exit} exception.
In a thread created by {!Thread.create}, this will cause the thread
to terminate prematurely, unless the thread function handles the
exception itself. {!Fun.protect} finalizers and catch-all
exception handlers will be executed.
To make it clear that an exception is raised and will trigger
finalizers and catch-all exception handlers, it is recommended
to write [raise Thread.Exit] instead of [Thread.exit ()].
@before 5.0 A different implementation was used, not based on raising
an exception, and not running finalizers and catch-all handlers.
The previous implementation had a different behavior when called
outside of a thread created by {!Thread.create}. *)
* { 1 Suspending threads }
val delay: float -> unit
(** [delay d] suspends the execution of the calling thread for
[d] seconds. The other program threads continue to run during
this time. *)
val join : t -> unit
(** [join th] suspends the execution of the calling thread
until the thread [th] has terminated. *)
val yield : unit -> unit
(** Re-schedule the calling thread without suspending it.
This function can be used to give scheduling hints,
telling the scheduler that now is a good time to
switch to other threads. *)
(** {1 Waiting for file descriptors or processes} *)
(** The functions below are leftovers from an earlier, VM-based threading
system. The {!Unix} module provides equivalent functionality, in
a more general and more standard-conformant manner. It is recommended
to use {!Unix} functions directly. *)
val wait_timed_read : Unix.file_descr -> float -> bool
[@@ocaml.deprecated "Use Unix.select instead."]
(** See {!Thread.wait_timed_write}.*)
val wait_timed_write : Unix.file_descr -> float -> bool
[@@ocaml.deprecated "Use Unix.select instead."]
* Suspend the execution of the calling thread until at least
one character or EOF is available for reading ( [ wait_timed_read ] ) or
one character can be written without blocking ( [ wait_timed_write ] )
on the given Unix file descriptor . Wait for at most
the amount of time given as second argument ( in seconds ) .
Return [ true ] if the file descriptor is ready for input / output
and [ false ] if the timeout expired .
The same functionality can be achieved with { ! Unix.select } .
one character or EOF is available for reading ([wait_timed_read]) or
one character can be written without blocking ([wait_timed_write])
on the given Unix file descriptor. Wait for at most
the amount of time given as second argument (in seconds).
Return [true] if the file descriptor is ready for input/output
and [false] if the timeout expired.
The same functionality can be achieved with {!Unix.select}.
*)
val select :
Unix.file_descr list -> Unix.file_descr list ->
Unix.file_descr list -> float ->
Unix.file_descr list * Unix.file_descr list * Unix.file_descr list
[@@ocaml.deprecated "Use Unix.select instead."]
* Same function as { ! Unix.select } .
Suspend the execution of the calling thread until input / output
becomes possible on the given Unix file descriptors .
The arguments and results have the same meaning as for
{ ! Unix.select } .
Suspend the execution of the calling thread until input/output
becomes possible on the given Unix file descriptors.
The arguments and results have the same meaning as for
{!Unix.select}. *)
val wait_pid : int -> int * Unix.process_status
[@@ocaml.deprecated "Use Unix.waitpid instead."]
* Same function as { ! Unix.waitpid } .
[ wait_pid p ] suspends the execution of the calling thread
until the process specified by the process identifier [ p ]
terminates . Returns the pid of the child caught and
its termination status , as per { ! Unix.wait } .
[wait_pid p] suspends the execution of the calling thread
until the process specified by the process identifier [p]
terminates. Returns the pid of the child caught and
its termination status, as per {!Unix.wait}. *)
(** {1 Management of signals} *)
* Signal handling follows the POSIX thread model : signals generated
by a thread are delivered to that thread ; signals generated externally
are delivered to one of the threads that does not block it .
Each thread possesses a set of blocked signals , which can be modified
using { ! Thread.sigmask } . This set is inherited at thread creation time .
Per - thread signal masks are supported only by the system thread library
under Unix , but not under Win32 , nor by the VM thread library .
by a thread are delivered to that thread; signals generated externally
are delivered to one of the threads that does not block it.
Each thread possesses a set of blocked signals, which can be modified
using {!Thread.sigmask}. This set is inherited at thread creation time.
Per-thread signal masks are supported only by the system thread library
under Unix, but not under Win32, nor by the VM thread library. *)
val sigmask : Unix.sigprocmask_command -> int list -> int list
* [ sigmask cmd sigs ] changes the set of blocked signals for the
calling thread .
If [ cmd ] is [ SIG_SETMASK ] , blocked signals are set to those in
the list [ ] .
If [ cmd ] is [ SIG_BLOCK ] , the signals in [ sigs ] are added to
the set of blocked signals .
If [ cmd ] is [ SIG_UNBLOCK ] , the signals in [ sigs ] are removed
from the set of blocked signals .
[ sigmask ] returns the set of previously blocked signals for the thread .
calling thread.
If [cmd] is [SIG_SETMASK], blocked signals are set to those in
the list [sigs].
If [cmd] is [SIG_BLOCK], the signals in [sigs] are added to
the set of blocked signals.
If [cmd] is [SIG_UNBLOCK], the signals in [sigs] are removed
from the set of blocked signals.
[sigmask] returns the set of previously blocked signals for the thread. *)
val wait_signal : int list -> int
* [ wait_signal sigs ] suspends the execution of the calling thread
until the process receives one of the signals specified in the
list [ ] . It then returns the number of the signal received .
Signal handlers attached to the signals in [ sigs ] will not
be invoked . The signals [ sigs ] are expected to be blocked before
calling [ wait_signal ] .
until the process receives one of the signals specified in the
list [sigs]. It then returns the number of the signal received.
Signal handlers attached to the signals in [sigs] will not
be invoked. The signals [sigs] are expected to be blocked before
calling [wait_signal]. *)
* { 1 Uncaught exceptions }
val default_uncaught_exception_handler : exn -> unit
* [ Thread.default_uncaught_exception_handler ] will print the thread 's i d ,
exception and backtrace ( if available ) .
exception and backtrace (if available). *)
val set_uncaught_exception_handler : (exn -> unit) -> unit
(** [Thread.set_uncaught_exception_handler fn] registers [fn] as the handler
for uncaught exceptions.
If the newly set uncaught exception handler raise an exception,
{!default_uncaught_exception_handler} will be called. *)
| null | https://raw.githubusercontent.com/avsm/eeww/23ca8b36127b337512e13c6fb8e86b3a7254d4f9/boot/ocaml/otherlibs/systhreads/thread.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* The type of thread handles.
* [Thread.create funct arg] creates a new thread of control,
in which the function application [funct arg]
is executed concurrently with the other threads of the domain.
The application of [Thread.create]
returns the handle of the newly created thread.
The new thread terminates when the application [funct arg]
returns, either normally or by raising the {!Thread.Exit} exception
or by raising any other uncaught exception.
In the last case, the uncaught exception is printed on standard error,
but not propagated back to the parent thread. Similarly, the
result of the application [funct arg] is discarded and not
directly accessible to the parent thread.
See also {!Domain.spawn} if you want parallel execution instead.
* Return the handle for the thread currently executing.
* Return the identifier of the given thread. A thread identifier
is an integer that identifies uniquely the thread.
It can be used to build data structures indexed by threads.
* Exception raised by user code to initiate termination of the
current thread.
In a thread created by {!Thread.create} [funct] [arg], if the
{!Thread.Exit} exception reaches the top of the application
[funct arg], it has the effect of terminating the current thread
silently. In other contexts, there is no implicit handling of the
{!Thread.Exit} exception.
* Raise the {!Thread.Exit} exception.
In a thread created by {!Thread.create}, this will cause the thread
to terminate prematurely, unless the thread function handles the
exception itself. {!Fun.protect} finalizers and catch-all
exception handlers will be executed.
To make it clear that an exception is raised and will trigger
finalizers and catch-all exception handlers, it is recommended
to write [raise Thread.Exit] instead of [Thread.exit ()].
@before 5.0 A different implementation was used, not based on raising
an exception, and not running finalizers and catch-all handlers.
The previous implementation had a different behavior when called
outside of a thread created by {!Thread.create}.
* [delay d] suspends the execution of the calling thread for
[d] seconds. The other program threads continue to run during
this time.
* [join th] suspends the execution of the calling thread
until the thread [th] has terminated.
* Re-schedule the calling thread without suspending it.
This function can be used to give scheduling hints,
telling the scheduler that now is a good time to
switch to other threads.
* {1 Waiting for file descriptors or processes}
* The functions below are leftovers from an earlier, VM-based threading
system. The {!Unix} module provides equivalent functionality, in
a more general and more standard-conformant manner. It is recommended
to use {!Unix} functions directly.
* See {!Thread.wait_timed_write}.
* {1 Management of signals}
* [Thread.set_uncaught_exception_handler fn] registers [fn] as the handler
for uncaught exceptions.
If the newly set uncaught exception handler raise an exception,
{!default_uncaught_exception_handler} will be called. | and , projet Cristal , INRIA Rocquencourt
Copyright 1995 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* Lightweight threads for [ 1003.1c ] and .
type t
* { 1 Thread creation and termination }
val create : ('a -> 'b) -> 'a -> t
val self : unit -> t
val id : t -> int
exception Exit
val exit : unit -> unit
[@@ocaml.deprecated "Use 'raise Thread.Exit' instead."]
* { 1 Suspending threads }
val delay: float -> unit
val join : t -> unit
val yield : unit -> unit
val wait_timed_read : Unix.file_descr -> float -> bool
[@@ocaml.deprecated "Use Unix.select instead."]
val wait_timed_write : Unix.file_descr -> float -> bool
[@@ocaml.deprecated "Use Unix.select instead."]
* Suspend the execution of the calling thread until at least
one character or EOF is available for reading ( [ wait_timed_read ] ) or
one character can be written without blocking ( [ wait_timed_write ] )
on the given Unix file descriptor . Wait for at most
the amount of time given as second argument ( in seconds ) .
Return [ true ] if the file descriptor is ready for input / output
and [ false ] if the timeout expired .
The same functionality can be achieved with { ! Unix.select } .
one character or EOF is available for reading ([wait_timed_read]) or
one character can be written without blocking ([wait_timed_write])
on the given Unix file descriptor. Wait for at most
the amount of time given as second argument (in seconds).
Return [true] if the file descriptor is ready for input/output
and [false] if the timeout expired.
The same functionality can be achieved with {!Unix.select}.
*)
val select :
Unix.file_descr list -> Unix.file_descr list ->
Unix.file_descr list -> float ->
Unix.file_descr list * Unix.file_descr list * Unix.file_descr list
[@@ocaml.deprecated "Use Unix.select instead."]
* Same function as { ! Unix.select } .
Suspend the execution of the calling thread until input / output
becomes possible on the given Unix file descriptors .
The arguments and results have the same meaning as for
{ ! Unix.select } .
Suspend the execution of the calling thread until input/output
becomes possible on the given Unix file descriptors.
The arguments and results have the same meaning as for
{!Unix.select}. *)
val wait_pid : int -> int * Unix.process_status
[@@ocaml.deprecated "Use Unix.waitpid instead."]
* Same function as { ! Unix.waitpid } .
[ wait_pid p ] suspends the execution of the calling thread
until the process specified by the process identifier [ p ]
terminates . Returns the pid of the child caught and
its termination status , as per { ! Unix.wait } .
[wait_pid p] suspends the execution of the calling thread
until the process specified by the process identifier [p]
terminates. Returns the pid of the child caught and
its termination status, as per {!Unix.wait}. *)
* Signal handling follows the POSIX thread model : signals generated
by a thread are delivered to that thread ; signals generated externally
are delivered to one of the threads that does not block it .
Each thread possesses a set of blocked signals , which can be modified
using { ! Thread.sigmask } . This set is inherited at thread creation time .
Per - thread signal masks are supported only by the system thread library
under Unix , but not under Win32 , nor by the VM thread library .
by a thread are delivered to that thread; signals generated externally
are delivered to one of the threads that does not block it.
Each thread possesses a set of blocked signals, which can be modified
using {!Thread.sigmask}. This set is inherited at thread creation time.
Per-thread signal masks are supported only by the system thread library
under Unix, but not under Win32, nor by the VM thread library. *)
val sigmask : Unix.sigprocmask_command -> int list -> int list
* [ sigmask cmd sigs ] changes the set of blocked signals for the
calling thread .
If [ cmd ] is [ SIG_SETMASK ] , blocked signals are set to those in
the list [ ] .
If [ cmd ] is [ SIG_BLOCK ] , the signals in [ sigs ] are added to
the set of blocked signals .
If [ cmd ] is [ SIG_UNBLOCK ] , the signals in [ sigs ] are removed
from the set of blocked signals .
[ sigmask ] returns the set of previously blocked signals for the thread .
calling thread.
If [cmd] is [SIG_SETMASK], blocked signals are set to those in
the list [sigs].
If [cmd] is [SIG_BLOCK], the signals in [sigs] are added to
the set of blocked signals.
If [cmd] is [SIG_UNBLOCK], the signals in [sigs] are removed
from the set of blocked signals.
[sigmask] returns the set of previously blocked signals for the thread. *)
val wait_signal : int list -> int
* [ wait_signal sigs ] suspends the execution of the calling thread
until the process receives one of the signals specified in the
list [ ] . It then returns the number of the signal received .
Signal handlers attached to the signals in [ sigs ] will not
be invoked . The signals [ sigs ] are expected to be blocked before
calling [ wait_signal ] .
until the process receives one of the signals specified in the
list [sigs]. It then returns the number of the signal received.
Signal handlers attached to the signals in [sigs] will not
be invoked. The signals [sigs] are expected to be blocked before
calling [wait_signal]. *)
* { 1 Uncaught exceptions }
val default_uncaught_exception_handler : exn -> unit
* [ Thread.default_uncaught_exception_handler ] will print the thread 's i d ,
exception and backtrace ( if available ) .
exception and backtrace (if available). *)
val set_uncaught_exception_handler : (exn -> unit) -> unit
|
46e6e15e79d52cadd9f8b32fb79d8e7a6efcc98365e5ea77ce78d7d87dba2091 | cdornan/keystore | markdown.hs | import Data.KeyStore.Types.Schema
import Data.API.Markdown
import Data.API.Types
main :: IO ()
main =
writeFile "schema.md" $ markdown markdownMethods keystoreSchema
markdownMethods :: MarkdownMethods
markdownMethods =
MDM
{ mdmSummaryPostfix = _TypeName
, mdmLink = _TypeName
, mdmPp = const id
, mdmFieldDefault = const $ const Nothing
}
| null | https://raw.githubusercontent.com/cdornan/keystore/e16103e75cf067baa3a939a1d9e79bd7af6942e7/scripts/markdown.hs | haskell | import Data.KeyStore.Types.Schema
import Data.API.Markdown
import Data.API.Types
main :: IO ()
main =
writeFile "schema.md" $ markdown markdownMethods keystoreSchema
markdownMethods :: MarkdownMethods
markdownMethods =
MDM
{ mdmSummaryPostfix = _TypeName
, mdmLink = _TypeName
, mdmPp = const id
, mdmFieldDefault = const $ const Nothing
}
| |
6851b01e348957636526b6cedf7cfb6bf8aa84e644f012bc53ba960526011dd5 | mattdw/atompub | project.clj | (defproject atompub "1.0.0"
:description "An implementation of Atom Syndication and the Atom Publishing Protocol."
:url ""
:license "Eclipse Public License 1.0"
:dependencies [[org.clojure/clojure "1.6.0"]
[org.clojure/data.zip "0.1.1"]
[joda-time "1.6"]
[net.cgrand/moustache "1.1.0" :exclusions [org.clojure/clojure]]]
:plugins [[lein-marginalia "0.8.0"]])
| null | https://raw.githubusercontent.com/mattdw/atompub/fd8036e74a9a586d7c2e6e9c14992497e8e9b416/project.clj | clojure | (defproject atompub "1.0.0"
:description "An implementation of Atom Syndication and the Atom Publishing Protocol."
:url ""
:license "Eclipse Public License 1.0"
:dependencies [[org.clojure/clojure "1.6.0"]
[org.clojure/data.zip "0.1.1"]
[joda-time "1.6"]
[net.cgrand/moustache "1.1.0" :exclusions [org.clojure/clojure]]]
:plugins [[lein-marginalia "0.8.0"]])
| |
a851a58c4f52673394ccfdb34d833073564044103defec16793d92afbc401ada | fukamachi/clozure-cl | cocoa-utils.lisp | ; -*- Mode: Lisp; Package: GUI -*-
(in-package "GUI")
(defmethod list-from-ns-array (thing) (error "~S is not an instance of NS:NS-ARRAY" thing))
(defmethod list-from-ns-array ((nsa ns:ns-array))
(let ((result (list))
(c (#/count nsa)))
(dotimes (i c) (setf result (push (#/objectAtIndex: nsa i) result)))
(reverse result)))
(defclass key-select-table-view (ns:ns-table-view)
()
(:metaclass ns:+ns-object))
(objc:defmethod (#/keyDown: :void) ((self key-select-table-view) event)
(let* ((code (#/keyCode event)))
(if (and (>= (#/selectedRow self) 0)
(= code 36)) ; return key
(#/sendAction:to:from: *NSApp* (#/doubleAction self) (#/target self) self)
(call-next-method event))))
(defclass sequence-window-controller (ns:ns-window-controller)
((table-view :foreign-type :id :reader sequence-window-controller-table-view)
(sequence :initform nil :initarg :sequence :type sequence :reader sequence-window-controller-sequence)
(result-callback :initarg :result-callback)
(display :initform #'(lambda (item stream) (prin1 item stream)) :initarg :display)
(title :initform "Sequence dialog" :initarg :title)
(before-close-function :initarg :before-close-function :initform nil))
(:metaclass ns:+ns-object))
(objc:defmethod #/init ((self sequence-window-controller))
(call-next-method)
(let* ((w (new-cocoa-window :activate nil))
(contentview (#/contentView w))
(contentframe (#/frame contentview))
(scrollview (make-instance 'ns:ns-scroll-view :with-frame contentframe)))
(#/setWindow: self w)
(#/release w)
(#/setDelegate: w self)
(#/setWindowController: w self)
(#/setHasVerticalScroller: scrollview t)
(#/setHasHorizontalScroller: scrollview t)
(#/setAutohidesScrollers: scrollview t)
(#/setRulersVisible: scrollview nil)
(#/setAutoresizingMask: scrollview (logior
#$NSViewWidthSizable
#$NSViewHeightSizable))
(#/setAutoresizesSubviews: (#/contentView scrollview) t)
(let* ((table-view (make-instance 'key-select-table-view)))
(#/setDocumentView: scrollview table-view)
(#/release table-view)
#-cocotron
(#/setColumnAutoresizingStyle: table-view #$NSTableViewUniformColumnAutoresizingStyle)
(setf (slot-value self 'table-view) table-view)
(let* ((column (make-instance 'ns:ns-table-column :with-identifier #@"")))
(#/setEditable: column nil)
#-cocotron
(#/setResizingMask: column #$NSTableColumnAutoresizingMask)
(#/addTableColumn: table-view column)
(#/release column))
(#/setAutoresizingMask: table-view (logior
#$NSViewWidthSizable
#$NSViewHeightSizable))
(#/sizeToFit table-view)
(#/setDataSource: table-view self)
(#/setTarget: table-view self)
(#/setHeaderView: table-view +null-ptr+)
(#/setUsesAlternatingRowBackgroundColors: table-view t)
(#/setDoubleAction: table-view (@selector #/sequenceDoubleClick:))
(#/addSubview: contentview scrollview)
(#/release scrollview)
self)))
(objc:defmethod (#/dealloc :void) ((self sequence-window-controller))
(objc:remove-lisp-slots self)
(call-next-method))
(objc:defmethod (#/windowWillClose: :void) ((self sequence-window-controller)
notification)
(declare (ignore notification))
(#/setDataSource: (slot-value self 'table-view) +null-ptr+)
(with-slots (before-close-function) self
(when (functionp before-close-function)
(funcall before-close-function self)))
(#/autorelease self))
(objc:defmethod (#/sequenceDoubleClick: :void)
((self sequence-window-controller) sender)
(let* ((n (#/selectedRow sender)))
(when (>= n 0)
(with-slots (sequence result-callback) self
(funcall result-callback (elt sequence n))))))
(objc:defmethod (#/numberOfRowsInTableView: :<NSI>nteger)
((self sequence-window-controller) view)
(declare (ignore view))
(length (slot-value self 'sequence)))
(objc:defmethod #/tableView:objectValueForTableColumn:row:
((self sequence-window-controller) view column (row :<NSI>nteger))
(declare (ignore column view))
(with-slots (display sequence) self
(#/autorelease
(%make-nsstring (with-output-to-string (s)
(funcall display (elt sequence row) s))))))
(defmethod initialize-instance :after ((self sequence-window-controller) &key &allow-other-keys)
(let* ((window (#/window self)))
(with-slots (title) self
(when title (#/setTitle: window (%make-nsstring title))))
(#/reloadData (sequence-window-controller-table-view self))
(#/performSelectorOnMainThread:withObject:waitUntilDone:
self
(@selector #/showWindow:)
+null-ptr+
nil)))
;;; Looks like a "util" to me ...
(defun pathname-to-url (pathname)
(make-instance 'ns:ns-url
:file-url-with-path
(%make-nsstring (native-translated-namestring pathname))))
(defun cgfloat (number)
(float number ccl::+cgfloat-zero+))
(defun color-values-to-nscolor (red green blue &optional alpha)
(#/retain (#/colorWithCalibratedRed:green:blue:alpha: ns:ns-color
(cgfloat red)
(cgfloat green)
(cgfloat blue)
(cgfloat (or alpha 1.0)))))
(defun map-windows (fn)
(let ((win-arr (#/orderedWindows *NSApp*)))
(dotimes (i (#/count win-arr))
(funcall fn (#/objectAtIndex: win-arr i)))))
(defun windows ()
(let* ((ret nil))
(map-windows #'(lambda (w) (push w ret)))
(nreverse ret)))
(defun front-window ()
(map-windows #'(lambda (win) (return-from front-window win))))
(defun target ()
"Returns the second window in the list returned by (windows)."
(let ((first? nil))
(map-windows #'(lambda (win)
(if first?
(return-from target win)
(setf first? t))))))
(defun first-window-satisfying-predicate (pred)
(block foo
(map-windows #'(lambda (w) (when (funcall pred w)
(return-from foo w))))))
(defun first-window-with-controller-type (controller-type)
(first-window-satisfying-predicate #'(lambda (w) (typep (#/windowController w) controller-type))))
(defun new-listener (&key (inhibit-greeting ccl::*inhibit-greeting*))
(let ((wptr (execute-in-gui (lambda ()
(declare (special hemlock-listener-document))
TODO : fix this .
(let ((old ccl::*inhibit-greeting*))
(unwind-protect
(progn
(setq ccl::*inhibit-greeting* inhibit-greeting)
(#/newListener: (#/delegate *NSApp*) (%null-ptr)))
(setq ccl::*inhibit-greeting* old)))
(let ((doc (#/topListener hemlock-listener-document)))
(unless (%null-ptr-p doc)
(#/window (#/lastObject (#/windowControllers doc)))))))))
(when wptr (hemlock-view wptr))))
(defun cocoa-close (object &optional wait-p)
(if (eq *current-process* ccl::*initial-process*)
(#/close object)
(#/performSelectorOnMainThread:withObject:waitUntilDone:
object
(@selector #/close)
+null-ptr+
wait-p)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
(defvar *log-callback-errors* :backtrace)
(defun maybe-log-callback-error (condition)
(when *log-callback-errors*
;; Put these in separate ignore-errors, so at least some of it can get thru
(let ((emsg (ignore-errors (princ-to-string condition))))
(ignore-errors (clear-output *debug-io*))
(ignore-errors (format *debug-io* "~&Lisp error: ~s" (or emsg condition)))
(when (eq *log-callback-errors* :backtrace)
(let* ((err (nth-value 1 (ignore-errors (ccl:print-call-history :detailed-p t)))))
(when err
(ignore-errors (format *debug-io* "~&Error printing call history - "))
(ignore-errors (print err *debug-io*))
(ignore-errors (princ err *debug-io*))
(ignore-errors (force-output *debug-io*))))))))
(defmacro with-callback-context (description &body body)
(let ((saved-debug-io (gensym)))
`(ccl::with-standard-abort-handling ,(format nil "Abort ~a" description)
(let ((,saved-debug-io *debug-io*))
(handler-bind ((error #'(lambda (condition)
(let ((*debug-io* ,saved-debug-io))
(maybe-log-callback-error condition)
(abort)))))
,@body)))))
Usually , one does not sublass NSApplication . We do it mainly
;;; because we use a user-defined event to signal the event loop to
;;; invoke a lisp function, and the only way I know of to respond to a
;;; user-defined event is to override -[NSApplication sendEvent:].
(defclass lisp-application (ns:ns-application)
((termp :foreign-type :<BOOL>)
(console :foreign-type :id :accessor console))
(:metaclass ns:+ns-object))
;;; previously used names
(defun execute-in-gui (thunk &key context)
(declare (ignore context))
(ccl::call-in-event-process thunk))
(defun queue-for-gui (thunk &key result-handler context at-start)
(declare (ignore result-handler context at-start))
(ccl::queue-for-event-process thunk))
(defmethod current-event-modifier-p (modifier-mask)
(let* ((event (#/currentEvent *nsapp*))
(modifiers (#/modifierFlags event)))
(logtest modifier-mask modifiers)))
(defun current-event-command-key-p ()
(current-event-modifier-p #$NSCommandKeyMask))
Better definition in file-dialogs.lisp
#+IGNORE
(defun choose-directory-dialog ()
(execute-in-gui #'(lambda ()
(let ((op (#/openPanel ns:ns-open-panel)))
(#/setAllowsMultipleSelection: op nil)
(#/setCanChooseDirectories: op t)
(#/setCanChooseFiles: op nil)
(when (eql (#/runModalForTypes: op +null-ptr+) #$NSOKButton)
;; #/stringByStandardizingPath seems to strip trailing slashes
(let* ((path (#/retain (#/stringByAppendingString:
(#/stringByStandardizingPath
(#/objectAtIndex: (#/filenames op) 0))
#@"/"))))
path))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; debugging
(defun double-%-in (string)
;; Replace any % characters in string with %%, to keep them from
;; being treated as printf directives.
(let* ((%pos (position #\% string)))
(if %pos
(concatenate 'string (subseq string 0 %pos) "%%" (double-%-in (subseq string (1+ %pos))))
string)))
(defun log-debug (format-string &rest args)
(let ((string (apply #'format nil format-string args)))
(#_NSLog (ccl::%make-nsstring (double-%-in string)))))
(pushnew '(log-debug . 0) ccl::*format-arg-functions* :test #'equal)
(defun nslog-condition (c &optional (msg "Error in event loop: "))
(let* ((rep (format nil "~a" c)))
(with-cstrs ((str rep)
(msg-str msg))
(with-nsstr (nsstr str (length rep))
(with-nsstr (nsmsg msg-str (length msg))
(#_NSLog #@"%@: %@" :address nsmsg :address nsstr))))))
(defun nsstring-for-lisp-condition (cond)
(%make-nsstring (double-%-in (or (ignore-errors (princ-to-string cond))
"#<error printing error message>"))))
(defun assume-cocoa-thread ()
(assert (eq *current-process* ccl::*initial-process*)))
(defmethod assume-not-editing ((whatever t)))
;;; -----------------------------------------------------------------
utility to display a Cocoa alert window
;;; -----------------------------------------------------------------
TODO : Currently this form gives no indication which button was clicked . Probably it should do so .
(defun alert-window (&key
(title "Alert")
(message "Something happened.")
(default-button "Okay")
alternate-button
other-button)
(let ((nstitle (%make-nsstring title))
(nsmessage (%make-nsstring message))
(ns-default-button (%make-nsstring default-button))
(ns-alternate-button (or (and alternate-button (%make-nsstring alternate-button))
+null-ptr+))
(ns-other-button (or (and other-button (%make-nsstring other-button))
+null-ptr+)))
(#_NSRunAlertPanel nstitle nsmessage ns-default-button ns-alternate-button ns-other-button)
(#/release nstitle)
(#/release nsmessage)
(#/release ns-default-button)
(unless (eql ns-alternate-button +null-ptr+)
(#/release ns-alternate-button))
(unless (eql ns-other-button +null-ptr+)
(#/release ns-other-button))))
;;; -----------------------------------------------------------------
utility to display a Cocoa progress window
;;; -----------------------------------------------------------------
(defparameter *progress-window-controller* nil)
(defclass progress-window-controller (ns:ns-window-controller)
((progress-window :foreign-type :id :reader progress-window)
(message-field :foreign-type :id :reader progress-window-message-field)
(progress-bar :foreign-type :id :reader progress-window-progress-bar))
(:metaclass ns:+ns-object))
(defun get-progress-window ()
(unless *progress-window-controller*
(setf *progress-window-controller*
(make-instance 'progress-window-controller))
(#/initWithWindowNibName: *progress-window-controller* #@"ProgressWindow"))
(unless (#/isWindowLoaded *progress-window-controller*)
(#/loadWindow *progress-window-controller*))
(let ((window (progress-window *progress-window-controller*)))
(if (or (null window)
(%null-ptr-p window))
nil
window)))
(defmacro with-modal-progress-dialog (title message &body body)
`(let* ((nstitle (%make-nsstring ,title))
(nsmessage (%make-nsstring ,message))
(window (get-progress-window))
(progress-bar (progress-window-progress-bar *progress-window-controller*))
(message-field (progress-window-message-field *progress-window-controller*)))
(unwind-protect
(if window
(progn
(#/setTitle: window nstitle)
(#/setIndeterminate: progress-bar #$YES)
(#/setUsesThreadedAnimation: progress-bar #$YES)
(#/setStringValue: message-field nsmessage)
(#/makeKeyAndOrderFront: window +null-ptr+)
(let ((modal-session (#/beginModalSessionForWindow: ccl::*nsapp* window)))
(#/startAnimation: progress-bar +null-ptr+)
(let ((result (progn ,@body)))
(#/stopAnimation: progress-bar +null-ptr+)
(#/orderOut: window +null-ptr+)
(#/endModalSession: ccl::*nsapp* modal-session)
result)))
(progn
(alert-window :title "Failure"
:message "Unable to load the modal progress window")
nil))
(#/release nstitle)
(#/release nsmessage))))
(defun post-tiger-p ()
#+cocotron t
#-cocotron
(rlet ((p :int))
(#_Gestalt #$gestaltSystemVersion p)
(>= (%get-long p) #x1050)))
;; This works even if an event loop is not running.
#+windows-target
(defun shift-key-now-p ()
(logbitp 15 (#_GetAsyncKeyState #$VK_SHIFT)))
#+darwin-target
(defun shift-key-now-p ()
(let* ((event (#_CGEventCreate +null-ptr+))
(flags (#_CGEventGetFlags event)))
(prog1
(logtest flags #$kCGEventFlagMaskShift)
(#_CFRelease event))))
;;; I would remove this, but I think that people use it...
(defclass abstract-ns-lisp-string (ns:ns-string)
()
(:metaclass ns:+ns-object))
(defgeneric ns-lisp-string-string (abstract-ns-lisp-string)
(:method ((self abstract-ns-lisp-string)) nil))
(objc:defmethod (#/length :<NSUI>nteger) ((self abstract-ns-lisp-string))
(length (ns-lisp-string-string self)))
(objc:defmethod (#/characterAtIndex: :unichar) ((self abstract-ns-lisp-string) (index :<NSUI>nteger))
(char-code (char (ns-lisp-string-string self) index)))
(defclass ns-lisp-string (abstract-ns-lisp-string)
((lisp-string :initarg :string :reader ns-lisp-string-string))
(:metaclass ns:+ns-object))
| null | https://raw.githubusercontent.com/fukamachi/clozure-cl/4b0c69452386ae57b08984ed815d9b50b4bcc8a2/cocoa-ide/cocoa-utils.lisp | lisp | -*- Mode: Lisp; Package: GUI -*-
return key
Looks like a "util" to me ...
Put these in separate ignore-errors, so at least some of it can get thru
because we use a user-defined event to signal the event loop to
invoke a lisp function, and the only way I know of to respond to a
user-defined event is to override -[NSApplication sendEvent:].
previously used names
#/stringByStandardizingPath seems to strip trailing slashes
debugging
Replace any % characters in string with %%, to keep them from
being treated as printf directives.
-----------------------------------------------------------------
-----------------------------------------------------------------
-----------------------------------------------------------------
-----------------------------------------------------------------
This works even if an event loop is not running.
I would remove this, but I think that people use it... |
(in-package "GUI")
(defmethod list-from-ns-array (thing) (error "~S is not an instance of NS:NS-ARRAY" thing))
(defmethod list-from-ns-array ((nsa ns:ns-array))
(let ((result (list))
(c (#/count nsa)))
(dotimes (i c) (setf result (push (#/objectAtIndex: nsa i) result)))
(reverse result)))
(defclass key-select-table-view (ns:ns-table-view)
()
(:metaclass ns:+ns-object))
(objc:defmethod (#/keyDown: :void) ((self key-select-table-view) event)
(let* ((code (#/keyCode event)))
(if (and (>= (#/selectedRow self) 0)
(#/sendAction:to:from: *NSApp* (#/doubleAction self) (#/target self) self)
(call-next-method event))))
(defclass sequence-window-controller (ns:ns-window-controller)
((table-view :foreign-type :id :reader sequence-window-controller-table-view)
(sequence :initform nil :initarg :sequence :type sequence :reader sequence-window-controller-sequence)
(result-callback :initarg :result-callback)
(display :initform #'(lambda (item stream) (prin1 item stream)) :initarg :display)
(title :initform "Sequence dialog" :initarg :title)
(before-close-function :initarg :before-close-function :initform nil))
(:metaclass ns:+ns-object))
(objc:defmethod #/init ((self sequence-window-controller))
(call-next-method)
(let* ((w (new-cocoa-window :activate nil))
(contentview (#/contentView w))
(contentframe (#/frame contentview))
(scrollview (make-instance 'ns:ns-scroll-view :with-frame contentframe)))
(#/setWindow: self w)
(#/release w)
(#/setDelegate: w self)
(#/setWindowController: w self)
(#/setHasVerticalScroller: scrollview t)
(#/setHasHorizontalScroller: scrollview t)
(#/setAutohidesScrollers: scrollview t)
(#/setRulersVisible: scrollview nil)
(#/setAutoresizingMask: scrollview (logior
#$NSViewWidthSizable
#$NSViewHeightSizable))
(#/setAutoresizesSubviews: (#/contentView scrollview) t)
(let* ((table-view (make-instance 'key-select-table-view)))
(#/setDocumentView: scrollview table-view)
(#/release table-view)
#-cocotron
(#/setColumnAutoresizingStyle: table-view #$NSTableViewUniformColumnAutoresizingStyle)
(setf (slot-value self 'table-view) table-view)
(let* ((column (make-instance 'ns:ns-table-column :with-identifier #@"")))
(#/setEditable: column nil)
#-cocotron
(#/setResizingMask: column #$NSTableColumnAutoresizingMask)
(#/addTableColumn: table-view column)
(#/release column))
(#/setAutoresizingMask: table-view (logior
#$NSViewWidthSizable
#$NSViewHeightSizable))
(#/sizeToFit table-view)
(#/setDataSource: table-view self)
(#/setTarget: table-view self)
(#/setHeaderView: table-view +null-ptr+)
(#/setUsesAlternatingRowBackgroundColors: table-view t)
(#/setDoubleAction: table-view (@selector #/sequenceDoubleClick:))
(#/addSubview: contentview scrollview)
(#/release scrollview)
self)))
(objc:defmethod (#/dealloc :void) ((self sequence-window-controller))
(objc:remove-lisp-slots self)
(call-next-method))
(objc:defmethod (#/windowWillClose: :void) ((self sequence-window-controller)
notification)
(declare (ignore notification))
(#/setDataSource: (slot-value self 'table-view) +null-ptr+)
(with-slots (before-close-function) self
(when (functionp before-close-function)
(funcall before-close-function self)))
(#/autorelease self))
(objc:defmethod (#/sequenceDoubleClick: :void)
((self sequence-window-controller) sender)
(let* ((n (#/selectedRow sender)))
(when (>= n 0)
(with-slots (sequence result-callback) self
(funcall result-callback (elt sequence n))))))
(objc:defmethod (#/numberOfRowsInTableView: :<NSI>nteger)
((self sequence-window-controller) view)
(declare (ignore view))
(length (slot-value self 'sequence)))
(objc:defmethod #/tableView:objectValueForTableColumn:row:
((self sequence-window-controller) view column (row :<NSI>nteger))
(declare (ignore column view))
(with-slots (display sequence) self
(#/autorelease
(%make-nsstring (with-output-to-string (s)
(funcall display (elt sequence row) s))))))
(defmethod initialize-instance :after ((self sequence-window-controller) &key &allow-other-keys)
(let* ((window (#/window self)))
(with-slots (title) self
(when title (#/setTitle: window (%make-nsstring title))))
(#/reloadData (sequence-window-controller-table-view self))
(#/performSelectorOnMainThread:withObject:waitUntilDone:
self
(@selector #/showWindow:)
+null-ptr+
nil)))
(defun pathname-to-url (pathname)
(make-instance 'ns:ns-url
:file-url-with-path
(%make-nsstring (native-translated-namestring pathname))))
(defun cgfloat (number)
(float number ccl::+cgfloat-zero+))
(defun color-values-to-nscolor (red green blue &optional alpha)
(#/retain (#/colorWithCalibratedRed:green:blue:alpha: ns:ns-color
(cgfloat red)
(cgfloat green)
(cgfloat blue)
(cgfloat (or alpha 1.0)))))
(defun map-windows (fn)
(let ((win-arr (#/orderedWindows *NSApp*)))
(dotimes (i (#/count win-arr))
(funcall fn (#/objectAtIndex: win-arr i)))))
(defun windows ()
(let* ((ret nil))
(map-windows #'(lambda (w) (push w ret)))
(nreverse ret)))
(defun front-window ()
(map-windows #'(lambda (win) (return-from front-window win))))
(defun target ()
"Returns the second window in the list returned by (windows)."
(let ((first? nil))
(map-windows #'(lambda (win)
(if first?
(return-from target win)
(setf first? t))))))
(defun first-window-satisfying-predicate (pred)
(block foo
(map-windows #'(lambda (w) (when (funcall pred w)
(return-from foo w))))))
(defun first-window-with-controller-type (controller-type)
(first-window-satisfying-predicate #'(lambda (w) (typep (#/windowController w) controller-type))))
(defun new-listener (&key (inhibit-greeting ccl::*inhibit-greeting*))
(let ((wptr (execute-in-gui (lambda ()
(declare (special hemlock-listener-document))
TODO : fix this .
(let ((old ccl::*inhibit-greeting*))
(unwind-protect
(progn
(setq ccl::*inhibit-greeting* inhibit-greeting)
(#/newListener: (#/delegate *NSApp*) (%null-ptr)))
(setq ccl::*inhibit-greeting* old)))
(let ((doc (#/topListener hemlock-listener-document)))
(unless (%null-ptr-p doc)
(#/window (#/lastObject (#/windowControllers doc)))))))))
(when wptr (hemlock-view wptr))))
(defun cocoa-close (object &optional wait-p)
(if (eq *current-process* ccl::*initial-process*)
(#/close object)
(#/performSelectorOnMainThread:withObject:waitUntilDone:
object
(@selector #/close)
+null-ptr+
wait-p)))
(defvar *log-callback-errors* :backtrace)
(defun maybe-log-callback-error (condition)
(when *log-callback-errors*
(let ((emsg (ignore-errors (princ-to-string condition))))
(ignore-errors (clear-output *debug-io*))
(ignore-errors (format *debug-io* "~&Lisp error: ~s" (or emsg condition)))
(when (eq *log-callback-errors* :backtrace)
(let* ((err (nth-value 1 (ignore-errors (ccl:print-call-history :detailed-p t)))))
(when err
(ignore-errors (format *debug-io* "~&Error printing call history - "))
(ignore-errors (print err *debug-io*))
(ignore-errors (princ err *debug-io*))
(ignore-errors (force-output *debug-io*))))))))
(defmacro with-callback-context (description &body body)
(let ((saved-debug-io (gensym)))
`(ccl::with-standard-abort-handling ,(format nil "Abort ~a" description)
(let ((,saved-debug-io *debug-io*))
(handler-bind ((error #'(lambda (condition)
(let ((*debug-io* ,saved-debug-io))
(maybe-log-callback-error condition)
(abort)))))
,@body)))))
Usually , one does not sublass NSApplication . We do it mainly
(defclass lisp-application (ns:ns-application)
((termp :foreign-type :<BOOL>)
(console :foreign-type :id :accessor console))
(:metaclass ns:+ns-object))
(defun execute-in-gui (thunk &key context)
(declare (ignore context))
(ccl::call-in-event-process thunk))
(defun queue-for-gui (thunk &key result-handler context at-start)
(declare (ignore result-handler context at-start))
(ccl::queue-for-event-process thunk))
(defmethod current-event-modifier-p (modifier-mask)
(let* ((event (#/currentEvent *nsapp*))
(modifiers (#/modifierFlags event)))
(logtest modifier-mask modifiers)))
(defun current-event-command-key-p ()
(current-event-modifier-p #$NSCommandKeyMask))
Better definition in file-dialogs.lisp
#+IGNORE
(defun choose-directory-dialog ()
(execute-in-gui #'(lambda ()
(let ((op (#/openPanel ns:ns-open-panel)))
(#/setAllowsMultipleSelection: op nil)
(#/setCanChooseDirectories: op t)
(#/setCanChooseFiles: op nil)
(when (eql (#/runModalForTypes: op +null-ptr+) #$NSOKButton)
(let* ((path (#/retain (#/stringByAppendingString:
(#/stringByStandardizingPath
(#/objectAtIndex: (#/filenames op) 0))
#@"/"))))
path))))))
(defun double-%-in (string)
(let* ((%pos (position #\% string)))
(if %pos
(concatenate 'string (subseq string 0 %pos) "%%" (double-%-in (subseq string (1+ %pos))))
string)))
(defun log-debug (format-string &rest args)
(let ((string (apply #'format nil format-string args)))
(#_NSLog (ccl::%make-nsstring (double-%-in string)))))
(pushnew '(log-debug . 0) ccl::*format-arg-functions* :test #'equal)
(defun nslog-condition (c &optional (msg "Error in event loop: "))
(let* ((rep (format nil "~a" c)))
(with-cstrs ((str rep)
(msg-str msg))
(with-nsstr (nsstr str (length rep))
(with-nsstr (nsmsg msg-str (length msg))
(#_NSLog #@"%@: %@" :address nsmsg :address nsstr))))))
(defun nsstring-for-lisp-condition (cond)
(%make-nsstring (double-%-in (or (ignore-errors (princ-to-string cond))
"#<error printing error message>"))))
(defun assume-cocoa-thread ()
(assert (eq *current-process* ccl::*initial-process*)))
(defmethod assume-not-editing ((whatever t)))
utility to display a Cocoa alert window
TODO : Currently this form gives no indication which button was clicked . Probably it should do so .
(defun alert-window (&key
(title "Alert")
(message "Something happened.")
(default-button "Okay")
alternate-button
other-button)
(let ((nstitle (%make-nsstring title))
(nsmessage (%make-nsstring message))
(ns-default-button (%make-nsstring default-button))
(ns-alternate-button (or (and alternate-button (%make-nsstring alternate-button))
+null-ptr+))
(ns-other-button (or (and other-button (%make-nsstring other-button))
+null-ptr+)))
(#_NSRunAlertPanel nstitle nsmessage ns-default-button ns-alternate-button ns-other-button)
(#/release nstitle)
(#/release nsmessage)
(#/release ns-default-button)
(unless (eql ns-alternate-button +null-ptr+)
(#/release ns-alternate-button))
(unless (eql ns-other-button +null-ptr+)
(#/release ns-other-button))))
utility to display a Cocoa progress window
(defparameter *progress-window-controller* nil)
(defclass progress-window-controller (ns:ns-window-controller)
((progress-window :foreign-type :id :reader progress-window)
(message-field :foreign-type :id :reader progress-window-message-field)
(progress-bar :foreign-type :id :reader progress-window-progress-bar))
(:metaclass ns:+ns-object))
(defun get-progress-window ()
(unless *progress-window-controller*
(setf *progress-window-controller*
(make-instance 'progress-window-controller))
(#/initWithWindowNibName: *progress-window-controller* #@"ProgressWindow"))
(unless (#/isWindowLoaded *progress-window-controller*)
(#/loadWindow *progress-window-controller*))
(let ((window (progress-window *progress-window-controller*)))
(if (or (null window)
(%null-ptr-p window))
nil
window)))
(defmacro with-modal-progress-dialog (title message &body body)
`(let* ((nstitle (%make-nsstring ,title))
(nsmessage (%make-nsstring ,message))
(window (get-progress-window))
(progress-bar (progress-window-progress-bar *progress-window-controller*))
(message-field (progress-window-message-field *progress-window-controller*)))
(unwind-protect
(if window
(progn
(#/setTitle: window nstitle)
(#/setIndeterminate: progress-bar #$YES)
(#/setUsesThreadedAnimation: progress-bar #$YES)
(#/setStringValue: message-field nsmessage)
(#/makeKeyAndOrderFront: window +null-ptr+)
(let ((modal-session (#/beginModalSessionForWindow: ccl::*nsapp* window)))
(#/startAnimation: progress-bar +null-ptr+)
(let ((result (progn ,@body)))
(#/stopAnimation: progress-bar +null-ptr+)
(#/orderOut: window +null-ptr+)
(#/endModalSession: ccl::*nsapp* modal-session)
result)))
(progn
(alert-window :title "Failure"
:message "Unable to load the modal progress window")
nil))
(#/release nstitle)
(#/release nsmessage))))
(defun post-tiger-p ()
#+cocotron t
#-cocotron
(rlet ((p :int))
(#_Gestalt #$gestaltSystemVersion p)
(>= (%get-long p) #x1050)))
#+windows-target
(defun shift-key-now-p ()
(logbitp 15 (#_GetAsyncKeyState #$VK_SHIFT)))
#+darwin-target
(defun shift-key-now-p ()
(let* ((event (#_CGEventCreate +null-ptr+))
(flags (#_CGEventGetFlags event)))
(prog1
(logtest flags #$kCGEventFlagMaskShift)
(#_CFRelease event))))
(defclass abstract-ns-lisp-string (ns:ns-string)
()
(:metaclass ns:+ns-object))
(defgeneric ns-lisp-string-string (abstract-ns-lisp-string)
(:method ((self abstract-ns-lisp-string)) nil))
(objc:defmethod (#/length :<NSUI>nteger) ((self abstract-ns-lisp-string))
(length (ns-lisp-string-string self)))
(objc:defmethod (#/characterAtIndex: :unichar) ((self abstract-ns-lisp-string) (index :<NSUI>nteger))
(char-code (char (ns-lisp-string-string self) index)))
(defclass ns-lisp-string (abstract-ns-lisp-string)
((lisp-string :initarg :string :reader ns-lisp-string-string))
(:metaclass ns:+ns-object))
|
978c3041e4ed1f324c7e39cda7065e5be6fcc2d19b370c49121fedcfc4713f50 | ocaml-multicore/ocaml-tsan | odoc_value.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cambium , INRIA Paris
(* *)
Copyright 2022 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(** Representation and manipulation of values, class attributes and class methods. *)
module Name = Odoc_name
(** Types *)
type t_value = {
val_name : Name.t;
mutable val_info : Odoc_types.info option;
val_type : Types.type_expr;
val_recursive : bool;
mutable val_parameters : Odoc_parameter.parameter list;
mutable val_code : string option;
mutable val_loc : Odoc_types.location;
}
(** Representation of a value. *)
type t_attribute = {
att_value : t_value;
att_mutable : bool;
att_virtual : bool;
}
(** Representation of a class attribute. *)
type t_method = {
met_value : t_value;
met_private : bool;
met_virtual : bool;
}
(** Representation of a class method. *)
(** Functions *)
val value_parameter_text_by_name : t_value -> string -> Odoc_types.text option
(** Returns the text associated to the given parameter name
in the given value, or None. *)
val update_value_parameters_text : t_value -> unit
* Update the parameters text of a t_value , according to the field .
val dummy_parameter_list : Types.type_expr -> Odoc_parameter.param_info list
* Create a list of parameters with dummy names " ? ? " from a type list .
Used when we want to merge the parameters of a value , from the .ml
and the .mli file . In the .mli file we do n't have parameter names
so there is nothing to merge . With this dummy list we can merge the
parameter names from the .ml and the type from the .mli file .
Used when we want to merge the parameters of a value, from the .ml
and the .mli file. In the .mli file we don't have parameter names
so there is nothing to merge. With this dummy list we can merge the
parameter names from the .ml and the type from the .mli file. *)
val is_function : t_value -> bool
(** Return true if the value is a function, i.e. has a functional type.*)
| null | https://raw.githubusercontent.com/ocaml-multicore/ocaml-tsan/f54002470cc6ab780963cc81b11a85a820a40819/ocamldoc/odoc_value.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Representation and manipulation of values, class attributes and class methods.
* Types
* Representation of a value.
* Representation of a class attribute.
* Representation of a class method.
* Functions
* Returns the text associated to the given parameter name
in the given value, or None.
* Return true if the value is a function, i.e. has a functional type. | , projet Cambium , INRIA Paris
Copyright 2022 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
module Name = Odoc_name
type t_value = {
val_name : Name.t;
mutable val_info : Odoc_types.info option;
val_type : Types.type_expr;
val_recursive : bool;
mutable val_parameters : Odoc_parameter.parameter list;
mutable val_code : string option;
mutable val_loc : Odoc_types.location;
}
type t_attribute = {
att_value : t_value;
att_mutable : bool;
att_virtual : bool;
}
type t_method = {
met_value : t_value;
met_private : bool;
met_virtual : bool;
}
val value_parameter_text_by_name : t_value -> string -> Odoc_types.text option
val update_value_parameters_text : t_value -> unit
* Update the parameters text of a t_value , according to the field .
val dummy_parameter_list : Types.type_expr -> Odoc_parameter.param_info list
* Create a list of parameters with dummy names " ? ? " from a type list .
Used when we want to merge the parameters of a value , from the .ml
and the .mli file . In the .mli file we do n't have parameter names
so there is nothing to merge . With this dummy list we can merge the
parameter names from the .ml and the type from the .mli file .
Used when we want to merge the parameters of a value, from the .ml
and the .mli file. In the .mli file we don't have parameter names
so there is nothing to merge. With this dummy list we can merge the
parameter names from the .ml and the type from the .mli file. *)
val is_function : t_value -> bool
|
04c6c3e4f6ed7cf3e9c3150c5bdb6b6bab96bf1b7aea74a70fb98b44ef1b1425 | eugeneia/athens | id.lisp | Erlangen universal agent identifiers .
(in-package :erlangen.distribution.id)
(defun valid-name-p (string)
"Predicate to test if STRING is a valid name (e.g. not empty and does
not contain #\/)."
(not (or (= (length string) 0) (find #\/ string))))
(deftype name ()
"Type for host, node and agent names."
'(and string (satisfies valid-name-p)))
(defun host-name ()
"Returns hostname."
(machine-instance))
(defun gen-node-name ()
"Returns node name generated from Unix pid."
(format nil "node-~a" (ccl::getpid)))
(defvar *node-name*/lock (make-read-write-lock))
(defvar *node-name* (gen-node-name)
"Node name.")
(defun node-name ()
"Returns the name (a string) of this node."
(with-read-lock (*node-name*/lock)
*node-name*))
(defun set-node-name (name)
"Sets the node name to NAME."
(check-type name name)
(with-write-lock (*node-name*/lock)
(setf *node-name* name)))
(defsetf node-name set-node-name)
(defvar *aid-counter*/lock (make-lock "…id::*aid-counter*"))
(defvar *aid-counter* 0
"Counter used for agent id generation.")
(defun gen-aid ()
"Generates and returns unique identifier for node-local agent."
(format nil "~x" (with-lock-grabbed (*aid-counter*/lock)
(prog1 *aid-counter*
(incf *aid-counter*)))))
(defun aid-value (aid)
"Return numerical value for AID."
(parse-integer aid :radix 16 :junk-allowed nil))
(defvar *agent<->aid*/lock (make-read-write-lock))
(defvar *agent->aid* (make-hash-table :test 'eq :weak :key)
"Agent to aid mapping.")
(defvar *aid->agent* (make-hash-table :test 'equal :weak :value)
"Aid to agent mapping.")
(defun bind-aid (aid agent)
"Associate AID with AGENT."
(with-write-lock (*agent<->aid*/lock)
(setf (gethash aid *aid->agent*) agent
(gethash agent *agent->aid*) aid)))
(defun intern-anonymous-aid (agent)
"If possible, returns the existing aid for AGENT. Otherwise a new aid
for AGENT is created and returned."
(or (with-read-lock (*agent<->aid*/lock)
(gethash agent *agent->aid*))
(bind-aid (gen-aid) agent)))
(defun find-anonymous-agent (aid)
"Returns agent by AID."
(with-read-lock (*agent<->aid*/lock)
(gethash aid *aid->agent*)))
(defun registry-aid (name)
"Returns aid for agent registered for NAME. Signals an error if no aid
could be derived from NAME."
(let ((aid (prin1-to-string name)))
(check-type aid name)
aid))
(defun find-registered-agent (aid)
"Returns registered agent by AID or nil if no such agent exists."
(ignore-errors (agent-by-name (find-symbol aid :keyword))))
(defun format-id (aid)
"Return id for AID."
(format nil "~a/~a/~a" (host-name) (node-name) aid))
(defun agent-id (agent)
"Return id for AGENT."
(format-id (etypecase agent
(agent (intern-anonymous-aid agent))
(keyword (registry-aid agent)))))
(defun decode-id (id)
"Decodes unique agent ID. Returns host name, node name, and aid."
(values-list (split-sequence #\/ id)))
(defun decode-aid (aid)
"Decodes AID and returns agent id type (:REGISTERED or :ANONYMOUS) and
name."
(if (string= ":" aid :end2 1)
(values :registered (subseq aid 1))
(values :anonymous aid)))
(defun find-agent (id)
"Returns agent by ID."
(multiple-value-bind (host node aid) (decode-id id)
(when (and (equal host (host-name))
(equal node (node-name)))
(multiple-value-bind (type aid) (decode-aid aid)
(values (ecase type
(:anonymous (find-anonymous-agent aid))
(:registered (find-registered-agent aid))))))))
(defvar *reserved*/lock (make-lock "…id::*reserved-ids*"))
(defvar *reserved* nil
"List of `aid' reservations")
(defun reserve-id ()
"Reserve a free id."
(let* ((aid (gen-aid))
(n (aid-value aid)))
(with-lock-grabbed (*reserved*/lock)
(push n *reserved*))
(format-id aid)))
(defun claim-id (id)
"Claim reserved ID. Removes ID from the list of reserved ids, and return T if
ID was removed and NIL otherwise. At the same, CLAIM-ID acts as a sort of
garbace collector. Heuristically stale reservations are removed from the list
of reserved ids, and if a stale reservation is claimed an error is signaled."
(multiple-value-bind (host node aid) (decode-id id)
(assert (equal host (host-name)))
(assert (equal node (node-name)))
(let ((n (aid-value aid))
(found-p nil))
(labels ((stale-p (n)
(< (+ n 1000000) *aid-counter*))
(delete-p (reserved)
(cond ((= reserved n) (setf found-p t))
((stale-p reserved) t))))
(with-lock-grabbed (*reserved*/lock)
(setf *reserved* (delete-if #'delete-p *reserved*)))
(assert (not (stale-p n))))
found-p)))
(defun bind-id (id agent)
"Bind aid in ID to AGENT."
(multiple-value-bind (host node aid) (decode-id id)
(declare (ignore host node))
(bind-aid aid agent)))
Specialized cl - conspack encoding / decoding for AGENT structures . Values
;; of type AGENT will be encoded as their respective agent id and decoded
;; as plain strings.
(defmethod encode-object ((agent agent) &key &allow-other-keys)
`((,(agent-id agent))))
(defmethod decode-object ((class (eql 'agent)) alist
&key &allow-other-keys)
(caar alist))
| null | https://raw.githubusercontent.com/eugeneia/athens/cc9d456edd3891b764b0fbf0202a3e2f58865cbf/lib/erlangen/distribution/id.lisp | lisp | of type AGENT will be encoded as their respective agent id and decoded
as plain strings. | Erlangen universal agent identifiers .
(in-package :erlangen.distribution.id)
(defun valid-name-p (string)
"Predicate to test if STRING is a valid name (e.g. not empty and does
not contain #\/)."
(not (or (= (length string) 0) (find #\/ string))))
(deftype name ()
"Type for host, node and agent names."
'(and string (satisfies valid-name-p)))
(defun host-name ()
"Returns hostname."
(machine-instance))
(defun gen-node-name ()
"Returns node name generated from Unix pid."
(format nil "node-~a" (ccl::getpid)))
(defvar *node-name*/lock (make-read-write-lock))
(defvar *node-name* (gen-node-name)
"Node name.")
(defun node-name ()
"Returns the name (a string) of this node."
(with-read-lock (*node-name*/lock)
*node-name*))
(defun set-node-name (name)
"Sets the node name to NAME."
(check-type name name)
(with-write-lock (*node-name*/lock)
(setf *node-name* name)))
(defsetf node-name set-node-name)
(defvar *aid-counter*/lock (make-lock "…id::*aid-counter*"))
(defvar *aid-counter* 0
"Counter used for agent id generation.")
(defun gen-aid ()
"Generates and returns unique identifier for node-local agent."
(format nil "~x" (with-lock-grabbed (*aid-counter*/lock)
(prog1 *aid-counter*
(incf *aid-counter*)))))
(defun aid-value (aid)
"Return numerical value for AID."
(parse-integer aid :radix 16 :junk-allowed nil))
(defvar *agent<->aid*/lock (make-read-write-lock))
(defvar *agent->aid* (make-hash-table :test 'eq :weak :key)
"Agent to aid mapping.")
(defvar *aid->agent* (make-hash-table :test 'equal :weak :value)
"Aid to agent mapping.")
(defun bind-aid (aid agent)
"Associate AID with AGENT."
(with-write-lock (*agent<->aid*/lock)
(setf (gethash aid *aid->agent*) agent
(gethash agent *agent->aid*) aid)))
(defun intern-anonymous-aid (agent)
"If possible, returns the existing aid for AGENT. Otherwise a new aid
for AGENT is created and returned."
(or (with-read-lock (*agent<->aid*/lock)
(gethash agent *agent->aid*))
(bind-aid (gen-aid) agent)))
(defun find-anonymous-agent (aid)
"Returns agent by AID."
(with-read-lock (*agent<->aid*/lock)
(gethash aid *aid->agent*)))
(defun registry-aid (name)
"Returns aid for agent registered for NAME. Signals an error if no aid
could be derived from NAME."
(let ((aid (prin1-to-string name)))
(check-type aid name)
aid))
(defun find-registered-agent (aid)
"Returns registered agent by AID or nil if no such agent exists."
(ignore-errors (agent-by-name (find-symbol aid :keyword))))
(defun format-id (aid)
"Return id for AID."
(format nil "~a/~a/~a" (host-name) (node-name) aid))
(defun agent-id (agent)
"Return id for AGENT."
(format-id (etypecase agent
(agent (intern-anonymous-aid agent))
(keyword (registry-aid agent)))))
(defun decode-id (id)
"Decodes unique agent ID. Returns host name, node name, and aid."
(values-list (split-sequence #\/ id)))
(defun decode-aid (aid)
"Decodes AID and returns agent id type (:REGISTERED or :ANONYMOUS) and
name."
(if (string= ":" aid :end2 1)
(values :registered (subseq aid 1))
(values :anonymous aid)))
(defun find-agent (id)
"Returns agent by ID."
(multiple-value-bind (host node aid) (decode-id id)
(when (and (equal host (host-name))
(equal node (node-name)))
(multiple-value-bind (type aid) (decode-aid aid)
(values (ecase type
(:anonymous (find-anonymous-agent aid))
(:registered (find-registered-agent aid))))))))
(defvar *reserved*/lock (make-lock "…id::*reserved-ids*"))
(defvar *reserved* nil
"List of `aid' reservations")
(defun reserve-id ()
"Reserve a free id."
(let* ((aid (gen-aid))
(n (aid-value aid)))
(with-lock-grabbed (*reserved*/lock)
(push n *reserved*))
(format-id aid)))
(defun claim-id (id)
"Claim reserved ID. Removes ID from the list of reserved ids, and return T if
ID was removed and NIL otherwise. At the same, CLAIM-ID acts as a sort of
garbace collector. Heuristically stale reservations are removed from the list
of reserved ids, and if a stale reservation is claimed an error is signaled."
(multiple-value-bind (host node aid) (decode-id id)
(assert (equal host (host-name)))
(assert (equal node (node-name)))
(let ((n (aid-value aid))
(found-p nil))
(labels ((stale-p (n)
(< (+ n 1000000) *aid-counter*))
(delete-p (reserved)
(cond ((= reserved n) (setf found-p t))
((stale-p reserved) t))))
(with-lock-grabbed (*reserved*/lock)
(setf *reserved* (delete-if #'delete-p *reserved*)))
(assert (not (stale-p n))))
found-p)))
(defun bind-id (id agent)
"Bind aid in ID to AGENT."
(multiple-value-bind (host node aid) (decode-id id)
(declare (ignore host node))
(bind-aid aid agent)))
Specialized cl - conspack encoding / decoding for AGENT structures . Values
(defmethod encode-object ((agent agent) &key &allow-other-keys)
`((,(agent-id agent))))
(defmethod decode-object ((class (eql 'agent)) alist
&key &allow-other-keys)
(caar alist))
|
26b64fa15b79b9ccbc5ecf271400b506dfd47b29525c6e0b2762b66fafc4f13e | facebook/flow | existsCheck.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type t = {
null_loc: ALoc.t option;
bool_loc: ALoc.t option;
string_loc: ALoc.t option;
number_loc: ALoc.t option;
bigint_loc: ALoc.t option;
mixed_loc: ALoc.t option;
enum_bool_loc: ALoc.t option;
enum_string_loc: ALoc.t option;
enum_number_loc: ALoc.t option;
enum_bigint_loc: ALoc.t option;
}
let empty =
{
null_loc = None;
bool_loc = None;
string_loc = None;
number_loc = None;
bigint_loc = None;
mixed_loc = None;
enum_bool_loc = None;
enum_string_loc = None;
enum_number_loc = None;
enum_bigint_loc = None;
}
let debug_to_string t =
let string_of_loc_option = function
| None -> "None"
| Some loc -> ALoc.debug_to_string ~include_source:true loc
in
[
("null_loc", t.null_loc);
("bool_loc", t.bool_loc);
("string_loc", t.string_loc);
("number_loc", t.number_loc);
("bigint_loc", t.bigint_loc);
("mixed_loc", t.mixed_loc);
("enum_bool_loc", t.enum_bool_loc);
("enum_string_loc", t.enum_string_loc);
("enum_number_loc", t.enum_number_loc);
("enum_bigint_loc", t.enum_bigint_loc);
]
|> Base.List.map ~f:(fun (name, loc_opt) -> (name, string_of_loc_option loc_opt))
|> Base.List.map ~f:(fun (name, loc) -> Printf.sprintf " %s: %s;\n" name loc)
|> String.concat ""
|> Printf.sprintf "{\n%s}"
| null | https://raw.githubusercontent.com/facebook/flow/b6de2c8bbe21682cad96c788a192075d66bbef25/src/typing/existsCheck.ml | ocaml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type t = {
null_loc: ALoc.t option;
bool_loc: ALoc.t option;
string_loc: ALoc.t option;
number_loc: ALoc.t option;
bigint_loc: ALoc.t option;
mixed_loc: ALoc.t option;
enum_bool_loc: ALoc.t option;
enum_string_loc: ALoc.t option;
enum_number_loc: ALoc.t option;
enum_bigint_loc: ALoc.t option;
}
let empty =
{
null_loc = None;
bool_loc = None;
string_loc = None;
number_loc = None;
bigint_loc = None;
mixed_loc = None;
enum_bool_loc = None;
enum_string_loc = None;
enum_number_loc = None;
enum_bigint_loc = None;
}
let debug_to_string t =
let string_of_loc_option = function
| None -> "None"
| Some loc -> ALoc.debug_to_string ~include_source:true loc
in
[
("null_loc", t.null_loc);
("bool_loc", t.bool_loc);
("string_loc", t.string_loc);
("number_loc", t.number_loc);
("bigint_loc", t.bigint_loc);
("mixed_loc", t.mixed_loc);
("enum_bool_loc", t.enum_bool_loc);
("enum_string_loc", t.enum_string_loc);
("enum_number_loc", t.enum_number_loc);
("enum_bigint_loc", t.enum_bigint_loc);
]
|> Base.List.map ~f:(fun (name, loc_opt) -> (name, string_of_loc_option loc_opt))
|> Base.List.map ~f:(fun (name, loc) -> Printf.sprintf " %s: %s;\n" name loc)
|> String.concat ""
|> Printf.sprintf "{\n%s}"
| |
8bf06dc3714a5cb76abff531373810332b8def5c64b5a4d8f9d0f9eaf1ada90b | seckcoder/iu_c311 | infer-parser1.rkt | #lang racket
(require eopl/datatype
"../base/utils.rkt"
"../cps/builtin.rkt")
(provide (all-defined-out))
(define-datatype
expression expression?
(const-exp
(cst const?))
(var-exp
(var symbol?))
(quote-exp
(sexp sexp?))
(op-exp
(op op?)
(rands (list-of expression?)))
(call-exp
(rator expression?)
(rands (list-of expression?)))
(if-exp
(test expression?)
(then expression?)
(else expression?))
(lambda-exp
(vars (list-of symbol?))
(body expression?))
(let-exp
(var symbol?)
(val-exp expression?)
(body expression?))
(letrec-exp
(p-names (list-of symbol?))
(procs (list-of expression?))
(body expression?))
)
(define (single-or-compound exps)
(if (null? (cdr exps))
(car exps)
`(begin
,@exps)))
(define (parse sexp)
(match sexp
[(? const? x) (const-exp x)]
[(? symbol? x) (var-exp x)]
; symbol
[`(quote ,x) (quote-exp x)]
; builtin ops
[(list (? op? op) params ...)
(op-exp op (map parse params))]
; if
[`(if ,test ,then ,else)
(if-exp (parse test)
(parse then)
(parse else))]
; lambda
[`(lambda (,params ...) ,body)
(lambda-exp params
(parse body))]
[`(let ((,var ,val)) ,body)
(let-exp var
(parse val)
(parse body))]
[`(letrec ((,names ,procs) ...) ,body)
(letrec-exp names
(map parse procs)
(parse body))]
; procedure call
[(list rator rands ...)
(call-exp (parse rator)
(map parse rands))]
))
| null | https://raw.githubusercontent.com/seckcoder/iu_c311/a1215983b6ab08df32058ef1e089cb294419e567/racket/types/infer-parser1.rkt | racket | symbol
builtin ops
if
lambda
procedure call | #lang racket
(require eopl/datatype
"../base/utils.rkt"
"../cps/builtin.rkt")
(provide (all-defined-out))
(define-datatype
expression expression?
(const-exp
(cst const?))
(var-exp
(var symbol?))
(quote-exp
(sexp sexp?))
(op-exp
(op op?)
(rands (list-of expression?)))
(call-exp
(rator expression?)
(rands (list-of expression?)))
(if-exp
(test expression?)
(then expression?)
(else expression?))
(lambda-exp
(vars (list-of symbol?))
(body expression?))
(let-exp
(var symbol?)
(val-exp expression?)
(body expression?))
(letrec-exp
(p-names (list-of symbol?))
(procs (list-of expression?))
(body expression?))
)
(define (single-or-compound exps)
(if (null? (cdr exps))
(car exps)
`(begin
,@exps)))
(define (parse sexp)
(match sexp
[(? const? x) (const-exp x)]
[(? symbol? x) (var-exp x)]
[`(quote ,x) (quote-exp x)]
[(list (? op? op) params ...)
(op-exp op (map parse params))]
[`(if ,test ,then ,else)
(if-exp (parse test)
(parse then)
(parse else))]
[`(lambda (,params ...) ,body)
(lambda-exp params
(parse body))]
[`(let ((,var ,val)) ,body)
(let-exp var
(parse val)
(parse body))]
[`(letrec ((,names ,procs) ...) ,body)
(letrec-exp names
(map parse procs)
(parse body))]
[(list rator rands ...)
(call-exp (parse rator)
(map parse rands))]
))
|
017f29c49d79c231caf607ce3407d84766c977f816c8086d741d93fccfb1086a | PrincetonUniversity/lucid | TyperModules.ml | open Syntax
open SyntaxUtils
open Collections
open Batteries
open TyperUtil
Goes through each decl and substitutes each bound TName for whatever it 's
bound to . If it runs into a type declaration in an interface , it adds it to
the environment before continuing . If it runs into an abstract type declaration ,
it first explicitly binds it to a TAbstract before continuing .
bound to. If it runs into a type declaration in an interface, it adds it to
the environment before continuing. If it runs into an abstract type declaration,
it first explicitly binds it to a TAbstract before continuing. *)
let subst_TNames env d =
let v =
object (self)
inherit [_] s_map as super
method! visit_ty env ty =
match ty.raw_ty with
| TName _ ->
let raw_ty =
match lookup_TName ty.tspan (snd !env) ty.raw_ty with
| TAbstract (x, y, z, _) -> TAbstract (x, y, z, ty.raw_ty)
| raw_ty -> raw_ty
in
{ ty with raw_ty }
| _ -> super#visit_ty env ty
method! visit_raw_ty env raw_ty =
match raw_ty with
| TName _ ->
let raw_ty =
match lookup_TName Span.default (snd !env) raw_ty with
| TAbstract (x, y, z, _) -> TAbstract (x, y, z, raw_ty)
| raw_ty -> raw_ty
in
raw_ty
| _ -> super#visit_raw_ty env raw_ty
method! visit_TAbstract _ cid sizes b rty =
(* Don't replace in rty *)
TAbstract (cid, sizes, b, rty)
method! visit_DUserTy env id sizes ty =
let ty = self#visit_ty env ty in
env := fst !env, define_user_ty id sizes ty (snd !env);
DUserTy (id, sizes, ty)
method! visit_InTy env id sizes tyo b =
let tyo' =
match tyo with
| Some ty -> Some (self#visit_ty env ty)
| None ->
let abs_cid = Cid.create_ids_rev @@ (Id.freshen id :: fst !env) in
Some (TAbstract (abs_cid, sizes, b, TVoid) |> ty)
in
env := fst !env, define_user_ty id sizes (Option.get tyo') (snd !env);
InTy (id, sizes, tyo', b)
method! visit_DModule env id interface ds =
let orig_path, orig_env = !env in
let ds = self#visit_decls env ds in
env := id :: orig_path, orig_env;
let interface = self#visit_interface env interface in
env := orig_path, orig_env;
DModule (id, interface, ds)
method! visit_TQVar env tqv =
match tqv with
| TVar { contents = Link x } -> self#visit_raw_ty env x
| _ -> TQVar tqv
method! visit_IVar env tqv =
match tqv with
| TVar { contents = Link x } -> self#visit_size env x
| _ -> IVar tqv
method! visit_FVar env tqv =
match tqv with
| TVar { contents = Link x } -> self#visit_effect env x
| _ -> FVar tqv
method! visit_exp env e = { e with e = self#visit_e env e.e }
end
in
v#visit_decl (ref ([], env)) d
;;
let unsubst_TAbstracts ds =
let v =
object (self)
inherit [_] s_map
method! visit_TAbstract _ _ _ _ orig_ty = orig_ty
method! visit_InTy env id sizes tyo b =
let tyo =
match tyo with
| Some { raw_ty = TAbstract (_, _, _, TVoid) } -> None
| Some ty -> Some (self#visit_ty env ty)
| None -> failwith "Sanity check: shouldn't happen"
in
InTy (id, sizes, tyo, b)
end
in
v#visit_decls () ds
;;
let rec modul_of_interface span env interface =
let aux acc intf =
match intf.ispec with
| InSize id -> { acc with sizes = IdSet.add id acc.sizes }
| InVar (id, ty) -> { acc with vars = IdMap.add id ty acc.vars }
| InTy (id, sizes, tyo, _) ->
let ty =
match tyo with
| Some ty -> ty
| None -> failwith "Internal error: should be replaced by subst_TNames"
in
{ acc with user_tys = IdMap.add id (sizes, ty) acc.user_tys }
| InConstr (id, ty, params) ->
let start_eff = FVar (QVar (Id.fresh "eff")) in
let fty =
{ arg_tys = List.map snd params
; ret_ty = ty
; start_eff
; end_eff = start_eff
; constraints = ref []
}
|> normalize_tfun
in
{ acc with constructors = IdMap.add id fty acc.constructors }
| InFun (id, ret_ty, constrs, params) ->
let start_eff = FVar (QVar (Id.fresh "eff")) in
let constrs, end_eff =
spec_to_constraints env span start_eff params constrs
in
let end_eff = Option.default start_eff end_eff in
let fty =
{ arg_tys = List.map snd params
; ret_ty
; start_eff
; end_eff
; constraints = ref constrs
}
|> normalize_tfun
in
{ acc with vars = IdMap.add id (ty @@ TFun fty) acc.vars }
| InEvent (id, constrs, params) ->
let start_eff = FVar (QVar (Id.fresh "eff")) in
let constrs, _ = spec_to_constraints env span start_eff params constrs in
let fty =
{ arg_tys = List.map snd params
; ret_ty = ty TEvent
; start_eff
; end_eff = start_eff
; constraints = ref constrs
}
|> normalize_tfun
in
{ acc with vars = IdMap.add id (ty @@ TFun fty) acc.vars }
| InModule (id, interface) ->
{ acc with
submodules =
IdMap.add id (modul_of_interface span env interface) acc.submodules
}
in
List.fold_left aux empty_modul interface
;;
(* Go through a module and consistently replace each abstract type which is
defined in the body. For use in checking equivalence/interface validity *)
let replace_abstract_type (target : cid) (replacement : sizes * ty) modul =
let v =
object
inherit [_] s_map as super
method! visit_ty (target, (sizes', ty')) ty =
match ty.raw_ty with
| TAbstract (cid, sizes, _, _) ->
let replaced_ty =
ReplaceUserTys.subst_sizes
ty.tspan
cid
ty'.raw_ty
(ReplaceUserTys.extract_ids ty.tspan sizes')
sizes
in
{ ty with raw_ty = replaced_ty }
| _ -> super#visit_ty (target, (sizes', ty')) ty
end
in
let rec replace_modul env modul =
{ modul with
vars = IdMap.map (v#visit_ty env) modul.vars
; constructors = IdMap.map (v#visit_func_ty env) modul.constructors
; user_tys =
IdMap.map (fun (sz, ty) -> sz, v#visit_ty env ty) modul.user_tys
; submodules = IdMap.map (replace_modul env) modul.submodules
}
in
let env = target, replacement in
replace_modul env modul
;;
let rec ensure_equiv_modul span m1 m2 =
For each abstract type declared in m1 , ensure that also declares an abstract
type with the same name , and replace each occurrence of that abstract type in
m1 with the definition in ( so they can be compared directly later
type with the same name, and replace each occurrence of that abstract type in
m1 with the definition in m2 (so they can be compared directly later *)
let open Printing in
let err str =
Console.error_position span @@ "Modules have inequvalent interfaces: " ^ str
in
print_endline @@ " m1 : " ^ modul_to_string m1 ;
let m1 =
IdMap.fold
(fun id (_, ty) acc ->
match ty.raw_ty with
| TAbstract (cid, _, b, TVoid)
when i d = ( Cid.last_id cid )
(match IdMap.find_opt id m2.user_tys with
| Some (sizes', ({ raw_ty = TAbstract (_, _, b', _) } as ty'))
when b = b' -> replace_abstract_type cid (sizes', ty') acc
| _ -> (* We'll return false later *) acc)
| _ -> (* Not an abstract type, don't need to replace *) acc)
m1.user_tys
m1
in
(* print_endline @@ "replaced_m1: " ^ modul_to_string m1;
print_endline @@ "m2: " ^ modul_to_string m1; *)
let compare_sizes m1 m2 =
let sz_diff = IdSet.sym_diff m1.sizes m2.sizes in
if not (IdSet.is_empty sz_diff)
then
Console.error_position span
@@ "Size "
^ id_to_string (IdSet.choose sz_diff)
^ " is defined in one module but not the other"
in
let compare_maps cmp print map1 map2 =
ignore
@@ IdMap.merge
(fun id o1 o2 ->
match o1, o2 with
| None, None -> failwith "impossible"
| None, _ | _, None ->
err
@@ id_to_string id
^ " is defined in one module but not the other"
| Some x1, Some x2 ->
if not (cmp x1 x2)
then
err
@@ Printf.sprintf
"%s has type %s in one module and %s in the other"
(id_to_string id)
(print x1)
(print x2);
None)
map1
map2
in
let compare_vars m1 m2 = compare_maps equiv_ty ty_to_string m1.vars m2.vars in
let compare_user_tys m1 m2 =
compare_maps
(fun (szs1, ty1) (szs2, ty2) ->
(* I think this is overkill since we replaced in m1 but I don't think it's wrong *)
let szs1, ty1 =
let norm = normalizer () in
List.map (norm#visit_size ()) szs1, norm#visit_ty () ty1
in
let szs2, ty2 =
let norm = normalizer () in
List.map (norm#visit_size ()) szs2, norm#visit_ty () ty2
in
List.length szs1 = List.length szs2
&& equiv_ty ~ignore_effects:true ty1 ty2)
(fun (_, ty) -> ty_to_string ty)
m1.user_tys
m2.user_tys
in
let compare_constructors m1 m2 =
compare_maps
(fun fty1 fty2 -> equiv_raw_ty (TFun fty1) (TFun fty2))
func_to_string
m1.constructors
m2.constructors
in
let compare_submodules m1 m2 =
compare_maps
(fun m1 m2 ->
ensure_equiv_modul span m1 m2;
true)
modul_to_string
m1.submodules
m2.submodules
in
compare_sizes m1 m2;
compare_vars m1 m2;
compare_user_tys m1 m2;
compare_constructors m1 m2;
compare_submodules m1 m2
;;
let rec ensure_compatible_interface span intf_modul modul =
let open Printing in
(* For each abstract type declared in the interface, endsure the body has a
corresponding type declared, and replace the version in the interface with
the body's definition *)
let intf_modul =
IdMap.fold
(fun id (_, ty) acc ->
match ty.raw_ty with
| TAbstract (cid, _, b, TVoid)
when i d = ( Cid.last_id cid )
(match IdMap.find_opt id modul.user_tys with
| Some (sizes', ty') ->
if (b && is_global ty') || ((not b) && is_not_global ty')
then replace_abstract_type cid (sizes', ty') acc
else acc
| _ -> (* We'll return false later *) acc)
| _ -> (* Not an abstract type, don't need to replace *) acc)
intf_modul.user_tys
intf_modul
in
let check_func_tys id fty1 fty2 =
if List.length fty1.arg_tys <> List.length fty2.arg_tys
then
error_sp span
@@ Printf.sprintf
"%s takes %d arguments in interface but %d in body"
(id_to_string id)
(List.length fty1.arg_tys)
(List.length fty2.arg_tys);
List.iter2i
(fun n ty1 ty2 ->
if not (equiv_ty ty1 ty2)
then
error_sp ty1.tspan
@@ Printf.sprintf
"Argument %d to %s has type %s in interface but %s in module \
body"
n
(id_to_string id)
(ty_to_string ty1)
(ty_to_string ty2))
fty1.arg_tys
fty2.arg_tys;
if not (equiv_ty fty1.ret_ty fty2.ret_ty)
then
error_sp span
@@ Printf.sprintf
"%s returns %s in interface but %s in body"
(id_to_string id)
(ty_to_string fty1.ret_ty)
(ty_to_string fty2.ret_ty)
in
let diff = IdSet.diff intf_modul.sizes modul.sizes in
if not (IdSet.is_empty diff)
then
error_sp span
@@ "Size "
^ id_to_string (IdSet.choose diff)
^ " appears in interface but not module body";
IdMap.iter
(fun id ty ->
match IdMap.find_opt id modul.vars with
(* Note: This won't work right if we get more functional later *)
| Some { raw_ty = TFun body_fty } ->
Got ta handle functions differently , since the constraints in the
interface may be less restrictive . Need to ensure :
1 . Constraints in the interface imply constraints in the body
2 . End effect in interface is > = end effect in body
interface may be less restrictive. Need to ensure:
1. Constraints in the interface imply constraints in the body
2. End effect in interface is >= end effect in body *)
let intf_fty =
match ty.raw_ty with
| TFun fty -> normalize_tfun fty
| _ ->
error_sp span
@@ Printf.sprintf
"%s has type non-function type %s in interface but function \
type in module body"
(id_to_string id)
(ty_to_string (normalize_ty ty))
in
let body_fty = normalize_tfun body_fty in
let sufficient_constraints =
let rhs =
match !(body_fty.constraints) with
| [] -> []
| lst -> CLeq (body_fty.end_eff, intf_fty.end_eff) :: lst
in
TyperZ3.check_implies !(intf_fty.constraints) rhs
in
if not sufficient_constraints
then
error_sp span
@@ Printf.sprintf
"Constraints in interface (for function %s) are weaker than the \
constraints in the module body."
(Printing.id_to_string id);
check_func_tys id intf_fty body_fty
| Some ty' when equiv_raw_ty ty.raw_ty ty'.raw_ty -> ()
| Some ty' ->
error_sp span
@@ Printf.sprintf
"%s has type %s in interface but type %s in module body"
(id_to_string id)
(ty_to_string (normalize_ty ty))
(ty_to_string @@ normalize_ty ty')
| None ->
error_sp span
@@ id_to_string id
^ " is declared in module interface but does not appear in the body")
intf_modul.vars;
IdMap.iter
(fun id fty ->
match IdMap.find_opt id modul.constructors with
| Some fty' when equiv_raw_ty (TFun fty) (TFun fty') -> ()
| Some fty' ->
error_sp span
@@ Printf.sprintf
"Constructor %s has type %s in interface but type %s in module \
body"
(id_to_string id)
(func_to_string fty)
(func_to_string fty')
| None ->
error_sp span
@@ "Constructor "
^ id_to_string id
^ " is declared in module interface but does not appear in the body")
intf_modul.constructors;
IdMap.iter
(fun id (sizes, ty) ->
match IdMap.find_opt id modul.user_tys with
| None ->
error_sp span
@@ "Type "
^ id_to_string id
^ " is declared in module interface but does not appear in the body"
| Some (sizes', ty') ->
if List.length sizes <> List.length sizes'
then
error_sp span
@@ Printf.sprintf
"Type %s has %d size parameters in interface but %d in body"
(id_to_string id)
(List.length sizes)
(List.length sizes');
if not (equiv_raw_ty ty.raw_ty ty'.raw_ty)
then
error_sp span
@@ Printf.sprintf
"Type %s has is defined as %s in interface but %s in body"
(id_to_string id)
(ty_to_string ty)
(ty_to_string ty'))
intf_modul.user_tys;
IdMap.iter
(fun id m ->
match IdMap.find_opt id modul.submodules with
| Some m' -> ensure_compatible_interface span m m'
| None ->
error_sp span
@@ "Module "
^ id_to_string id
^ " is declared in module interface but does not appear in the body")
intf_modul.submodules
;;
let add_interface span env id interface modul =
let intf_modul = modul_of_interface span env interface in
ensure_compatible_interface span intf_modul modul;
define_submodule id intf_modul env
;;
(* Replace each abstract type declared in a modul with a fresh abstract type *)
let re_abstract_modul new_id m =
IdMap.fold
(fun _ (_, uty) acc ->
match uty.raw_ty with
| TAbstract (cid, sizes, b, TVoid)
when i d = ( Cid.last_id cid )
let new_cid = Compound (new_id, Id (Id.freshen (Cid.last_id cid))) in
replace_abstract_type
cid
(sizes, ty @@ TAbstract (new_cid, sizes, b, TVoid))
acc
| _ -> (* Not an abstract type, don't need to replace *) acc)
m.user_tys
m
;;
| null | https://raw.githubusercontent.com/PrincetonUniversity/lucid/b6e8130edb2e1f4a082e48e020b37576b1126600/src/lib/frontend/typing/TyperModules.ml | ocaml | Don't replace in rty
Go through a module and consistently replace each abstract type which is
defined in the body. For use in checking equivalence/interface validity
We'll return false later
Not an abstract type, don't need to replace
print_endline @@ "replaced_m1: " ^ modul_to_string m1;
print_endline @@ "m2: " ^ modul_to_string m1;
I think this is overkill since we replaced in m1 but I don't think it's wrong
For each abstract type declared in the interface, endsure the body has a
corresponding type declared, and replace the version in the interface with
the body's definition
We'll return false later
Not an abstract type, don't need to replace
Note: This won't work right if we get more functional later
Replace each abstract type declared in a modul with a fresh abstract type
Not an abstract type, don't need to replace | open Syntax
open SyntaxUtils
open Collections
open Batteries
open TyperUtil
Goes through each decl and substitutes each bound TName for whatever it 's
bound to . If it runs into a type declaration in an interface , it adds it to
the environment before continuing . If it runs into an abstract type declaration ,
it first explicitly binds it to a TAbstract before continuing .
bound to. If it runs into a type declaration in an interface, it adds it to
the environment before continuing. If it runs into an abstract type declaration,
it first explicitly binds it to a TAbstract before continuing. *)
let subst_TNames env d =
let v =
object (self)
inherit [_] s_map as super
method! visit_ty env ty =
match ty.raw_ty with
| TName _ ->
let raw_ty =
match lookup_TName ty.tspan (snd !env) ty.raw_ty with
| TAbstract (x, y, z, _) -> TAbstract (x, y, z, ty.raw_ty)
| raw_ty -> raw_ty
in
{ ty with raw_ty }
| _ -> super#visit_ty env ty
method! visit_raw_ty env raw_ty =
match raw_ty with
| TName _ ->
let raw_ty =
match lookup_TName Span.default (snd !env) raw_ty with
| TAbstract (x, y, z, _) -> TAbstract (x, y, z, raw_ty)
| raw_ty -> raw_ty
in
raw_ty
| _ -> super#visit_raw_ty env raw_ty
method! visit_TAbstract _ cid sizes b rty =
TAbstract (cid, sizes, b, rty)
method! visit_DUserTy env id sizes ty =
let ty = self#visit_ty env ty in
env := fst !env, define_user_ty id sizes ty (snd !env);
DUserTy (id, sizes, ty)
method! visit_InTy env id sizes tyo b =
let tyo' =
match tyo with
| Some ty -> Some (self#visit_ty env ty)
| None ->
let abs_cid = Cid.create_ids_rev @@ (Id.freshen id :: fst !env) in
Some (TAbstract (abs_cid, sizes, b, TVoid) |> ty)
in
env := fst !env, define_user_ty id sizes (Option.get tyo') (snd !env);
InTy (id, sizes, tyo', b)
method! visit_DModule env id interface ds =
let orig_path, orig_env = !env in
let ds = self#visit_decls env ds in
env := id :: orig_path, orig_env;
let interface = self#visit_interface env interface in
env := orig_path, orig_env;
DModule (id, interface, ds)
method! visit_TQVar env tqv =
match tqv with
| TVar { contents = Link x } -> self#visit_raw_ty env x
| _ -> TQVar tqv
method! visit_IVar env tqv =
match tqv with
| TVar { contents = Link x } -> self#visit_size env x
| _ -> IVar tqv
method! visit_FVar env tqv =
match tqv with
| TVar { contents = Link x } -> self#visit_effect env x
| _ -> FVar tqv
method! visit_exp env e = { e with e = self#visit_e env e.e }
end
in
v#visit_decl (ref ([], env)) d
;;
let unsubst_TAbstracts ds =
let v =
object (self)
inherit [_] s_map
method! visit_TAbstract _ _ _ _ orig_ty = orig_ty
method! visit_InTy env id sizes tyo b =
let tyo =
match tyo with
| Some { raw_ty = TAbstract (_, _, _, TVoid) } -> None
| Some ty -> Some (self#visit_ty env ty)
| None -> failwith "Sanity check: shouldn't happen"
in
InTy (id, sizes, tyo, b)
end
in
v#visit_decls () ds
;;
let rec modul_of_interface span env interface =
let aux acc intf =
match intf.ispec with
| InSize id -> { acc with sizes = IdSet.add id acc.sizes }
| InVar (id, ty) -> { acc with vars = IdMap.add id ty acc.vars }
| InTy (id, sizes, tyo, _) ->
let ty =
match tyo with
| Some ty -> ty
| None -> failwith "Internal error: should be replaced by subst_TNames"
in
{ acc with user_tys = IdMap.add id (sizes, ty) acc.user_tys }
| InConstr (id, ty, params) ->
let start_eff = FVar (QVar (Id.fresh "eff")) in
let fty =
{ arg_tys = List.map snd params
; ret_ty = ty
; start_eff
; end_eff = start_eff
; constraints = ref []
}
|> normalize_tfun
in
{ acc with constructors = IdMap.add id fty acc.constructors }
| InFun (id, ret_ty, constrs, params) ->
let start_eff = FVar (QVar (Id.fresh "eff")) in
let constrs, end_eff =
spec_to_constraints env span start_eff params constrs
in
let end_eff = Option.default start_eff end_eff in
let fty =
{ arg_tys = List.map snd params
; ret_ty
; start_eff
; end_eff
; constraints = ref constrs
}
|> normalize_tfun
in
{ acc with vars = IdMap.add id (ty @@ TFun fty) acc.vars }
| InEvent (id, constrs, params) ->
let start_eff = FVar (QVar (Id.fresh "eff")) in
let constrs, _ = spec_to_constraints env span start_eff params constrs in
let fty =
{ arg_tys = List.map snd params
; ret_ty = ty TEvent
; start_eff
; end_eff = start_eff
; constraints = ref constrs
}
|> normalize_tfun
in
{ acc with vars = IdMap.add id (ty @@ TFun fty) acc.vars }
| InModule (id, interface) ->
{ acc with
submodules =
IdMap.add id (modul_of_interface span env interface) acc.submodules
}
in
List.fold_left aux empty_modul interface
;;
let replace_abstract_type (target : cid) (replacement : sizes * ty) modul =
let v =
object
inherit [_] s_map as super
method! visit_ty (target, (sizes', ty')) ty =
match ty.raw_ty with
| TAbstract (cid, sizes, _, _) ->
let replaced_ty =
ReplaceUserTys.subst_sizes
ty.tspan
cid
ty'.raw_ty
(ReplaceUserTys.extract_ids ty.tspan sizes')
sizes
in
{ ty with raw_ty = replaced_ty }
| _ -> super#visit_ty (target, (sizes', ty')) ty
end
in
let rec replace_modul env modul =
{ modul with
vars = IdMap.map (v#visit_ty env) modul.vars
; constructors = IdMap.map (v#visit_func_ty env) modul.constructors
; user_tys =
IdMap.map (fun (sz, ty) -> sz, v#visit_ty env ty) modul.user_tys
; submodules = IdMap.map (replace_modul env) modul.submodules
}
in
let env = target, replacement in
replace_modul env modul
;;
let rec ensure_equiv_modul span m1 m2 =
For each abstract type declared in m1 , ensure that also declares an abstract
type with the same name , and replace each occurrence of that abstract type in
m1 with the definition in ( so they can be compared directly later
type with the same name, and replace each occurrence of that abstract type in
m1 with the definition in m2 (so they can be compared directly later *)
let open Printing in
let err str =
Console.error_position span @@ "Modules have inequvalent interfaces: " ^ str
in
print_endline @@ " m1 : " ^ modul_to_string m1 ;
let m1 =
IdMap.fold
(fun id (_, ty) acc ->
match ty.raw_ty with
| TAbstract (cid, _, b, TVoid)
when i d = ( Cid.last_id cid )
(match IdMap.find_opt id m2.user_tys with
| Some (sizes', ({ raw_ty = TAbstract (_, _, b', _) } as ty'))
when b = b' -> replace_abstract_type cid (sizes', ty') acc
m1.user_tys
m1
in
let compare_sizes m1 m2 =
let sz_diff = IdSet.sym_diff m1.sizes m2.sizes in
if not (IdSet.is_empty sz_diff)
then
Console.error_position span
@@ "Size "
^ id_to_string (IdSet.choose sz_diff)
^ " is defined in one module but not the other"
in
let compare_maps cmp print map1 map2 =
ignore
@@ IdMap.merge
(fun id o1 o2 ->
match o1, o2 with
| None, None -> failwith "impossible"
| None, _ | _, None ->
err
@@ id_to_string id
^ " is defined in one module but not the other"
| Some x1, Some x2 ->
if not (cmp x1 x2)
then
err
@@ Printf.sprintf
"%s has type %s in one module and %s in the other"
(id_to_string id)
(print x1)
(print x2);
None)
map1
map2
in
let compare_vars m1 m2 = compare_maps equiv_ty ty_to_string m1.vars m2.vars in
let compare_user_tys m1 m2 =
compare_maps
(fun (szs1, ty1) (szs2, ty2) ->
let szs1, ty1 =
let norm = normalizer () in
List.map (norm#visit_size ()) szs1, norm#visit_ty () ty1
in
let szs2, ty2 =
let norm = normalizer () in
List.map (norm#visit_size ()) szs2, norm#visit_ty () ty2
in
List.length szs1 = List.length szs2
&& equiv_ty ~ignore_effects:true ty1 ty2)
(fun (_, ty) -> ty_to_string ty)
m1.user_tys
m2.user_tys
in
let compare_constructors m1 m2 =
compare_maps
(fun fty1 fty2 -> equiv_raw_ty (TFun fty1) (TFun fty2))
func_to_string
m1.constructors
m2.constructors
in
let compare_submodules m1 m2 =
compare_maps
(fun m1 m2 ->
ensure_equiv_modul span m1 m2;
true)
modul_to_string
m1.submodules
m2.submodules
in
compare_sizes m1 m2;
compare_vars m1 m2;
compare_user_tys m1 m2;
compare_constructors m1 m2;
compare_submodules m1 m2
;;
let rec ensure_compatible_interface span intf_modul modul =
let open Printing in
let intf_modul =
IdMap.fold
(fun id (_, ty) acc ->
match ty.raw_ty with
| TAbstract (cid, _, b, TVoid)
when i d = ( Cid.last_id cid )
(match IdMap.find_opt id modul.user_tys with
| Some (sizes', ty') ->
if (b && is_global ty') || ((not b) && is_not_global ty')
then replace_abstract_type cid (sizes', ty') acc
else acc
intf_modul.user_tys
intf_modul
in
let check_func_tys id fty1 fty2 =
if List.length fty1.arg_tys <> List.length fty2.arg_tys
then
error_sp span
@@ Printf.sprintf
"%s takes %d arguments in interface but %d in body"
(id_to_string id)
(List.length fty1.arg_tys)
(List.length fty2.arg_tys);
List.iter2i
(fun n ty1 ty2 ->
if not (equiv_ty ty1 ty2)
then
error_sp ty1.tspan
@@ Printf.sprintf
"Argument %d to %s has type %s in interface but %s in module \
body"
n
(id_to_string id)
(ty_to_string ty1)
(ty_to_string ty2))
fty1.arg_tys
fty2.arg_tys;
if not (equiv_ty fty1.ret_ty fty2.ret_ty)
then
error_sp span
@@ Printf.sprintf
"%s returns %s in interface but %s in body"
(id_to_string id)
(ty_to_string fty1.ret_ty)
(ty_to_string fty2.ret_ty)
in
let diff = IdSet.diff intf_modul.sizes modul.sizes in
if not (IdSet.is_empty diff)
then
error_sp span
@@ "Size "
^ id_to_string (IdSet.choose diff)
^ " appears in interface but not module body";
IdMap.iter
(fun id ty ->
match IdMap.find_opt id modul.vars with
| Some { raw_ty = TFun body_fty } ->
Got ta handle functions differently , since the constraints in the
interface may be less restrictive . Need to ensure :
1 . Constraints in the interface imply constraints in the body
2 . End effect in interface is > = end effect in body
interface may be less restrictive. Need to ensure:
1. Constraints in the interface imply constraints in the body
2. End effect in interface is >= end effect in body *)
let intf_fty =
match ty.raw_ty with
| TFun fty -> normalize_tfun fty
| _ ->
error_sp span
@@ Printf.sprintf
"%s has type non-function type %s in interface but function \
type in module body"
(id_to_string id)
(ty_to_string (normalize_ty ty))
in
let body_fty = normalize_tfun body_fty in
let sufficient_constraints =
let rhs =
match !(body_fty.constraints) with
| [] -> []
| lst -> CLeq (body_fty.end_eff, intf_fty.end_eff) :: lst
in
TyperZ3.check_implies !(intf_fty.constraints) rhs
in
if not sufficient_constraints
then
error_sp span
@@ Printf.sprintf
"Constraints in interface (for function %s) are weaker than the \
constraints in the module body."
(Printing.id_to_string id);
check_func_tys id intf_fty body_fty
| Some ty' when equiv_raw_ty ty.raw_ty ty'.raw_ty -> ()
| Some ty' ->
error_sp span
@@ Printf.sprintf
"%s has type %s in interface but type %s in module body"
(id_to_string id)
(ty_to_string (normalize_ty ty))
(ty_to_string @@ normalize_ty ty')
| None ->
error_sp span
@@ id_to_string id
^ " is declared in module interface but does not appear in the body")
intf_modul.vars;
IdMap.iter
(fun id fty ->
match IdMap.find_opt id modul.constructors with
| Some fty' when equiv_raw_ty (TFun fty) (TFun fty') -> ()
| Some fty' ->
error_sp span
@@ Printf.sprintf
"Constructor %s has type %s in interface but type %s in module \
body"
(id_to_string id)
(func_to_string fty)
(func_to_string fty')
| None ->
error_sp span
@@ "Constructor "
^ id_to_string id
^ " is declared in module interface but does not appear in the body")
intf_modul.constructors;
IdMap.iter
(fun id (sizes, ty) ->
match IdMap.find_opt id modul.user_tys with
| None ->
error_sp span
@@ "Type "
^ id_to_string id
^ " is declared in module interface but does not appear in the body"
| Some (sizes', ty') ->
if List.length sizes <> List.length sizes'
then
error_sp span
@@ Printf.sprintf
"Type %s has %d size parameters in interface but %d in body"
(id_to_string id)
(List.length sizes)
(List.length sizes');
if not (equiv_raw_ty ty.raw_ty ty'.raw_ty)
then
error_sp span
@@ Printf.sprintf
"Type %s has is defined as %s in interface but %s in body"
(id_to_string id)
(ty_to_string ty)
(ty_to_string ty'))
intf_modul.user_tys;
IdMap.iter
(fun id m ->
match IdMap.find_opt id modul.submodules with
| Some m' -> ensure_compatible_interface span m m'
| None ->
error_sp span
@@ "Module "
^ id_to_string id
^ " is declared in module interface but does not appear in the body")
intf_modul.submodules
;;
let add_interface span env id interface modul =
let intf_modul = modul_of_interface span env interface in
ensure_compatible_interface span intf_modul modul;
define_submodule id intf_modul env
;;
let re_abstract_modul new_id m =
IdMap.fold
(fun _ (_, uty) acc ->
match uty.raw_ty with
| TAbstract (cid, sizes, b, TVoid)
when i d = ( Cid.last_id cid )
let new_cid = Compound (new_id, Id (Id.freshen (Cid.last_id cid))) in
replace_abstract_type
cid
(sizes, ty @@ TAbstract (new_cid, sizes, b, TVoid))
acc
m.user_tys
m
;;
|
8eb5be8dbb076be6c9fe7a3bdb22d849be9bf934ecd391df30c7688a37b3db20 | GaloisInc/what4 | StringSeq.hs | |
Module : . StringSeq
Description : Datastructure for sequences of appended strings
Copyright : ( c ) Galois Inc , 2019 - 2020
License : :
A simple datatype for collecting sequences of strings
that are to be concatenated together .
We intend to maintain several invariants . First , that
no sequence is empty ; the empty string literal should
instead be the unique representative of empty strings .
Second , that string sequences do not contain adjacent
literals . In other words , adjacent string literals
are coalesced .
Module : What4.Expr.StringSeq
Description : Datastructure for sequences of appended strings
Copyright : (c) Galois Inc, 2019-2020
License : BSD3
Maintainer :
A simple datatype for collecting sequences of strings
that are to be concatenated together.
We intend to maintain several invariants. First, that
no sequence is empty; the empty string literal should
instead be the unique representative of empty strings.
Second, that string sequences do not contain adjacent
literals. In other words, adjacent string literals
are coalesced.
-}
# LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE RankNTypes #-}
module What4.Expr.StringSeq
( StringSeq
, StringSeqEntry(..)
, singleton
, append
, stringSeqAbs
, toList
, traverseStringSeq
) where
import Data.Kind
import qualified Data.Foldable as F
import qualified Data.FingerTree as FT
import Data.Parameterized.Classes
import What4.BaseTypes
import What4.Interface
import What4.Utils.AbstractDomains
import What4.Utils.IncrHash
-- | Annotation value for string sequences.
First value is the XOR hash of the sequence
Second value is the string abstract domain .
data StringSeqNote = StringSeqNote !IncrHash !StringAbstractValue
instance Semigroup StringSeqNote where
StringSeqNote xh xabs <> StringSeqNote yh yabs =
StringSeqNote (xh <> yh) (stringAbsConcat xabs yabs)
instance Monoid StringSeqNote where
mempty = StringSeqNote mempty stringAbsEmpty
mappend = (<>)
data StringSeqEntry e si
= StringSeqLiteral !(StringLiteral si)
| StringSeqTerm !(e (BaseStringType si))
instance (HasAbsValue e, HashableF e) => FT.Measured StringSeqNote (StringSeqEntry e si) where
measure (StringSeqLiteral l) = StringSeqNote (toIncrHashWithSalt 1 l) (stringAbsSingle l)
measure (StringSeqTerm e) = StringSeqNote (mkIncrHash (hashWithSaltF 2 e)) (getAbsValue e)
type StringFT e si = FT.FingerTree StringSeqNote (StringSeqEntry e si)
sft_hash :: (HashableF e, HasAbsValue e) => StringFT e si -> IncrHash
sft_hash ft =
case FT.measure ft of
StringSeqNote h _abs -> h
ft_eqBy :: FT.Measured v a => (a -> a -> Bool) -> FT.FingerTree v a -> FT.FingerTree v a -> Bool
ft_eqBy eq xs0 ys0 = go (FT.viewl xs0) (FT.viewl ys0)
where
go FT.EmptyL FT.EmptyL = True
go (x FT.:< xs) (y FT.:< ys) = eq x y && go (FT.viewl xs) (FT.viewl ys)
go _ _ = False
data StringSeq
(e :: BaseType -> Type)
(si :: StringInfo) =
StringSeq
{ _stringSeqRepr :: StringInfoRepr si
, stringSeq :: FT.FingerTree StringSeqNote (StringSeqEntry e si)
}
instance (TestEquality e, HasAbsValue e, HashableF e) => TestEquality (StringSeq e) where
testEquality (StringSeq xi xs) (StringSeq yi ys)
| Just Refl <- testEquality xi yi
, sft_hash xs == sft_hash ys
= let f (StringSeqLiteral a) (StringSeqLiteral b) = a == b
f (StringSeqTerm a) (StringSeqTerm b) = isJust (testEquality a b)
f _ _ = False
in if ft_eqBy f xs ys then Just Refl else Nothing
testEquality _ _ = Nothing
instance (TestEquality e, HasAbsValue e, HashableF e) => Eq (StringSeq e si) where
x == y = isJust (testEquality x y)
instance (HasAbsValue e, HashableF e) => HashableF (StringSeq e) where
hashWithSaltF s (StringSeq _si xs) = hashWithSalt s (sft_hash xs)
instance (HasAbsValue e, HashableF e, TestEquality e) => Hashable (StringSeq e si) where
hashWithSalt = hashWithSaltF
singleton :: (HasAbsValue e, HashableF e, IsExpr e) => StringInfoRepr si -> e (BaseStringType si) -> StringSeq e si
singleton si x
| Just l <- asString x = StringSeq si (FT.singleton (StringSeqLiteral l))
| otherwise = StringSeq si (FT.singleton (StringSeqTerm x))
append :: (HasAbsValue e, HashableF e) => StringSeq e si -> StringSeq e si -> StringSeq e si
append (StringSeq si xs) (StringSeq _ ys) =
case (FT.viewr xs, FT.viewl ys) of
(xs' FT.:> StringSeqLiteral xlit, StringSeqLiteral ylit FT.:< ys')
-> StringSeq si (xs' <> (StringSeqLiteral (xlit <> ylit) FT.<| ys'))
_ -> StringSeq si (xs <> ys)
stringSeqAbs :: (HasAbsValue e, HashableF e) => StringSeq e si -> StringAbstractValue
stringSeqAbs (StringSeq _ xs) =
case FT.measure xs of
StringSeqNote _ a -> a
toList :: StringSeq e si -> [StringSeqEntry e si]
toList = F.toList . stringSeq
traverseStringSeq :: (HasAbsValue f, HashableF f, Applicative m) =>
(forall x. e x -> m (f x)) ->
StringSeq e si -> m (StringSeq f si)
traverseStringSeq f (StringSeq si xs) =
StringSeq si <$> F.foldl' (\m x -> (FT.|>) <$> m <*> g x) (pure FT.empty) xs
where
g (StringSeqLiteral l) = pure (StringSeqLiteral l)
g (StringSeqTerm x) = StringSeqTerm <$> f x
| null | https://raw.githubusercontent.com/GaloisInc/what4/f4912ea9efa8a2ef54c8010383695372af4492b6/what4/src/What4/Expr/StringSeq.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes #
| Annotation value for string sequences. | |
Module : . StringSeq
Description : Datastructure for sequences of appended strings
Copyright : ( c ) Galois Inc , 2019 - 2020
License : :
A simple datatype for collecting sequences of strings
that are to be concatenated together .
We intend to maintain several invariants . First , that
no sequence is empty ; the empty string literal should
instead be the unique representative of empty strings .
Second , that string sequences do not contain adjacent
literals . In other words , adjacent string literals
are coalesced .
Module : What4.Expr.StringSeq
Description : Datastructure for sequences of appended strings
Copyright : (c) Galois Inc, 2019-2020
License : BSD3
Maintainer :
A simple datatype for collecting sequences of strings
that are to be concatenated together.
We intend to maintain several invariants. First, that
no sequence is empty; the empty string literal should
instead be the unique representative of empty strings.
Second, that string sequences do not contain adjacent
literals. In other words, adjacent string literals
are coalesced.
-}
# LANGUAGE DataKinds #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
module What4.Expr.StringSeq
( StringSeq
, StringSeqEntry(..)
, singleton
, append
, stringSeqAbs
, toList
, traverseStringSeq
) where
import Data.Kind
import qualified Data.Foldable as F
import qualified Data.FingerTree as FT
import Data.Parameterized.Classes
import What4.BaseTypes
import What4.Interface
import What4.Utils.AbstractDomains
import What4.Utils.IncrHash
First value is the XOR hash of the sequence
Second value is the string abstract domain .
data StringSeqNote = StringSeqNote !IncrHash !StringAbstractValue
instance Semigroup StringSeqNote where
StringSeqNote xh xabs <> StringSeqNote yh yabs =
StringSeqNote (xh <> yh) (stringAbsConcat xabs yabs)
instance Monoid StringSeqNote where
mempty = StringSeqNote mempty stringAbsEmpty
mappend = (<>)
data StringSeqEntry e si
= StringSeqLiteral !(StringLiteral si)
| StringSeqTerm !(e (BaseStringType si))
instance (HasAbsValue e, HashableF e) => FT.Measured StringSeqNote (StringSeqEntry e si) where
measure (StringSeqLiteral l) = StringSeqNote (toIncrHashWithSalt 1 l) (stringAbsSingle l)
measure (StringSeqTerm e) = StringSeqNote (mkIncrHash (hashWithSaltF 2 e)) (getAbsValue e)
type StringFT e si = FT.FingerTree StringSeqNote (StringSeqEntry e si)
sft_hash :: (HashableF e, HasAbsValue e) => StringFT e si -> IncrHash
sft_hash ft =
case FT.measure ft of
StringSeqNote h _abs -> h
ft_eqBy :: FT.Measured v a => (a -> a -> Bool) -> FT.FingerTree v a -> FT.FingerTree v a -> Bool
ft_eqBy eq xs0 ys0 = go (FT.viewl xs0) (FT.viewl ys0)
where
go FT.EmptyL FT.EmptyL = True
go (x FT.:< xs) (y FT.:< ys) = eq x y && go (FT.viewl xs) (FT.viewl ys)
go _ _ = False
data StringSeq
(e :: BaseType -> Type)
(si :: StringInfo) =
StringSeq
{ _stringSeqRepr :: StringInfoRepr si
, stringSeq :: FT.FingerTree StringSeqNote (StringSeqEntry e si)
}
instance (TestEquality e, HasAbsValue e, HashableF e) => TestEquality (StringSeq e) where
testEquality (StringSeq xi xs) (StringSeq yi ys)
| Just Refl <- testEquality xi yi
, sft_hash xs == sft_hash ys
= let f (StringSeqLiteral a) (StringSeqLiteral b) = a == b
f (StringSeqTerm a) (StringSeqTerm b) = isJust (testEquality a b)
f _ _ = False
in if ft_eqBy f xs ys then Just Refl else Nothing
testEquality _ _ = Nothing
instance (TestEquality e, HasAbsValue e, HashableF e) => Eq (StringSeq e si) where
x == y = isJust (testEquality x y)
instance (HasAbsValue e, HashableF e) => HashableF (StringSeq e) where
hashWithSaltF s (StringSeq _si xs) = hashWithSalt s (sft_hash xs)
instance (HasAbsValue e, HashableF e, TestEquality e) => Hashable (StringSeq e si) where
hashWithSalt = hashWithSaltF
singleton :: (HasAbsValue e, HashableF e, IsExpr e) => StringInfoRepr si -> e (BaseStringType si) -> StringSeq e si
singleton si x
| Just l <- asString x = StringSeq si (FT.singleton (StringSeqLiteral l))
| otherwise = StringSeq si (FT.singleton (StringSeqTerm x))
append :: (HasAbsValue e, HashableF e) => StringSeq e si -> StringSeq e si -> StringSeq e si
append (StringSeq si xs) (StringSeq _ ys) =
case (FT.viewr xs, FT.viewl ys) of
(xs' FT.:> StringSeqLiteral xlit, StringSeqLiteral ylit FT.:< ys')
-> StringSeq si (xs' <> (StringSeqLiteral (xlit <> ylit) FT.<| ys'))
_ -> StringSeq si (xs <> ys)
stringSeqAbs :: (HasAbsValue e, HashableF e) => StringSeq e si -> StringAbstractValue
stringSeqAbs (StringSeq _ xs) =
case FT.measure xs of
StringSeqNote _ a -> a
toList :: StringSeq e si -> [StringSeqEntry e si]
toList = F.toList . stringSeq
traverseStringSeq :: (HasAbsValue f, HashableF f, Applicative m) =>
(forall x. e x -> m (f x)) ->
StringSeq e si -> m (StringSeq f si)
traverseStringSeq f (StringSeq si xs) =
StringSeq si <$> F.foldl' (\m x -> (FT.|>) <$> m <*> g x) (pure FT.empty) xs
where
g (StringSeqLiteral l) = pure (StringSeqLiteral l)
g (StringSeqTerm x) = StringSeqTerm <$> f x
|
0b7cbd63f6c30384d0327dbe5b4fc902c0cae4da763e8fd6a1461c149830b9d3 | comptekki/esysman | redirect_handler.erl | Copyright ( c ) 2012 , < >
%% All rights reserved.
%%
%% Redistribution and use in source and binary forms, with or without
%% modification, are permitted provided that the following conditions are met:
%%
%% * Redistributions of source code must retain the above copyright
%% notice, this list of conditions and the following disclaimer.
%% * Redistributions in binary form must reproduce the above copyright
%% notice, this list of conditions and the following disclaimer in the
%% documentation and/or other materials provided with the distribution.
%% * Neither the name of "ESysMan" nor the names of its contributors may be
%% used to endorse or promote products derived from this software without
%% specific prior written permission.
%%
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
%% AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
%% CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
%% SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
%% CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
%% ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
%% POSSIBILITY OF SUCH DAMAGE.
%%
%%
-module(redirect_handler).
-export([init/2]).
-include("esysman.hrl").
init(Req, Opts) ->
{ok, [_, _, {_, [{Uname, _}]}, _, _]} = file:consult(?CONF),
Req2 = cowboy_req : set_resp_cookie(Uname , CookieVal , Req , # { max_age = > ? MAXAGE , path = > " / " , secure = > true , http_only = > true } ) ,
Req2 = cowboy_req:set_resp_cookie(Uname, <<"">>, Req, #{ path => "/", max_age => 0 }),
% Req3 = cowboy_req:set_resp_header(<<"Location">>, "/esysman", Req2),
Req4 = cowboy_req:reply(
200,
#{ <<"content-type">> => <<"text/html">> },
<<"<html>
<head>
<meta Http-Equiv='Cache-Control' Content='no-cache'>
<meta Http-Equiv='Pragma' Content='no-cache'>
<meta Http-Equiv='Expires' Content='0'>
<META HTTP-EQUIV='EXPIRES' CONTENT='Mon, 30 Apr 2012 00:00:01 GMT'>
<meta http-equiv='refresh' content='0; url=/esysman'>
</head>
<style>
body {background-color:black; color:yellow}
</style>
<body>
</body>
</html>">>, Req2),
{ok, Req4, Opts}.
| null | https://raw.githubusercontent.com/comptekki/esysman/f13b48c8aef261e230b374cc8272e409fc7d4e10/src/redirect_handler.erl | erlang | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of "ESysMan" nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Req3 = cowboy_req:set_resp_header(<<"Location">>, "/esysman", Req2), | Copyright ( c ) 2012 , < >
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
-module(redirect_handler).
-export([init/2]).
-include("esysman.hrl").
init(Req, Opts) ->
{ok, [_, _, {_, [{Uname, _}]}, _, _]} = file:consult(?CONF),
Req2 = cowboy_req : set_resp_cookie(Uname , CookieVal , Req , # { max_age = > ? MAXAGE , path = > " / " , secure = > true , http_only = > true } ) ,
Req2 = cowboy_req:set_resp_cookie(Uname, <<"">>, Req, #{ path => "/", max_age => 0 }),
Req4 = cowboy_req:reply(
200,
#{ <<"content-type">> => <<"text/html">> },
<<"<html>
<head>
<meta Http-Equiv='Cache-Control' Content='no-cache'>
<meta Http-Equiv='Pragma' Content='no-cache'>
<meta Http-Equiv='Expires' Content='0'>
<META HTTP-EQUIV='EXPIRES' CONTENT='Mon, 30 Apr 2012 00:00:01 GMT'>
<meta http-equiv='refresh' content='0; url=/esysman'>
</head>
<style>
body {background-color:black; color:yellow}
</style>
<body>
</body>
</html>">>, Req2),
{ok, Req4, Opts}.
|
c17425c232dc95de9194bee43b047ee39deb2940bc82608ee15c7296aaece553 | thoughtstem/game-engine | on-key.rkt | #lang racket
(require "../game-entities.rkt")
(provide (except-out (struct-out on-key) on-key)
(rename-out (make-on-key on-key)
(on-key struct-on-key)
(on-key-rule struct-on-key-rule)
(on-key-f struct-on-key-f)
))
(component on-key (key rule f))
(define (make-on-key key #:rule [rule (lambda (g e) #t)] f)
(new-on-key key rule f))
(define (update-on-key g e c)
(if (and (button-change-down? (on-key-key c) g)
((on-key-rule c) g e))
((on-key-f c) g e)
e))
(new-component on-key?
update-on-key) | null | https://raw.githubusercontent.com/thoughtstem/game-engine/98c4b9e9b8c071818e564ef7efb55465cff487a8/components/on-key.rkt | racket | #lang racket
(require "../game-entities.rkt")
(provide (except-out (struct-out on-key) on-key)
(rename-out (make-on-key on-key)
(on-key struct-on-key)
(on-key-rule struct-on-key-rule)
(on-key-f struct-on-key-f)
))
(component on-key (key rule f))
(define (make-on-key key #:rule [rule (lambda (g e) #t)] f)
(new-on-key key rule f))
(define (update-on-key g e c)
(if (and (button-change-down? (on-key-key c) g)
((on-key-rule c) g e))
((on-key-f c) g e)
e))
(new-component on-key?
update-on-key) | |
c747851058ab1ea126e23e34fd603f4eca41eaa6d30a1dc6452f42d3c3225f43 | dada-lang/dada-model | traverse.rkt | #lang racket
(require redex
"../grammar.rkt"
"../util.rkt"
"lang.rkt"
"stack.rkt"
"heap.rkt"
"lease.rkt"
"test-store.rkt")
(provide traversal
traversal-e
swap-traversal
logical-write-traversal
read-traversal
owned-permission?
unique-permission?
traversal-address
access-permissions)
;; A **traversal** encodes the path that we walked when evaluating a place.
;;
;; Creating a traversal is a side-effect free operation. It is used to derive
;; the actions that result from reading/writing a place.
;;
;; Example (assume all edges are `my` for simplicity):
;;
┌ ─ ─ ─ ─ ─ ─ ┐
pair ─ ─ ─ ─ ─ ┤ ] │ ┌ ─ ─ ─ ─ ─ ─ ─ ┐
;; │ a ───┼──►│[Point]│
│ │ │ x ─ ─ ─ ─ ┼ ─ ► a4 = 22
;; │ b ───┼─┐ │ │
│ │ │ │ y ─ ─ ─ ─ ┼ ─ ► a5 = 44
└ ─ ─ ─ ─ ─ ─ ┘ ─ ─ ─ ─ ─ ┘
;; a1 │ a2
;; │
;; │
;; │ ┌───────┐
► │ [ Point ] │
│ x ─ ─ ─ ─ ┼ ─ ► a6 = 66
;; │ │
│ y ─ ─ ─ ─ ┼ ─ ► a7 = 88
;; └───────┘
;; a3
;;
;; Source:
;;
;; The place `pair a x` corresponds to a traversal:
;;
;; ( ( . x shared ) = (my box a4) )
;; │
;; ▼
;; ( ( . a var ) = (my box a2) )
;; │
;; ▼
;; ( x = (my box a1) )
(define-metafunction Dada
traversal : program Store place-at-rest -> Traversal-e or expired
[(traversal program Store place-at-rest)
Traversal
(where Traversal (traversal-e program Store place-at-rest))
]
[(traversal program Store place-at-rest)
expired
]
)
(define-metafunction Dada
traversal-e : program Store place-at-rest -> Traversal-e or expired
[(traversal-e program Store (x f ...))
(traverse-fields program Store (x = Value) (f ...))
(where Value (var-in-store Store x))
]
)
(define-metafunction Dada
traverse-fields : program Store Traversal-e (f ...) -> Traversal-e or expired
[(traverse-fields program Store Traversal-e ())
Traversal-e
]
[(traverse-fields program Store (_ = expired) (f_0 f_1 ...))
expired
]
[(traverse-fields program Store Traversal (f_0 f_1 ...))
(traverse-fields program Store ((Traversal f_0 mutability) = Box-value) (f_1 ...))
(where (Box-value mutability) (field-from-traversal program Store Traversal f_0))
]
)
(define-metafunction Dada
field-from-traversal : program Store Traversal f -> (Value mutability)
[(field-from-traversal program Store Traversal f_0)
(select-field program Unboxed-value f_0)
(where/error Address (traversal-address Traversal))
(where/error Unboxed-value (load-heap Store Address))
]
)
(define-metafunction Dada
select-field : program Unboxed-value f -> (Value mutability)
[(select-field program ((class c) (_ ... (f Value) _ ...)) f)
(Value (class-field-mutability program c f))
]
)
(define-metafunction Dada
access-permissions : Traversal-e -> Access-permissions
[(access-permissions Traversal-e)
(access-permissions-for-traversal Traversal-e (my () ()))
]
)
(define-metafunction Dada
access-permissions-for-traversal : Traversal-e Access-permissions -> Access-permissions
[(access-permissions-for-traversal (Traversal-origin = expired) Access-permissions)
(access-permissions-for-traversal-origin Traversal-origin Access-permissions)
]
[(access-permissions-for-traversal (Traversal-origin = (my box _)) Access-permissions)
(access-permissions-for-traversal-origin Traversal-origin Access-permissions)
]
[(access-permissions-for-traversal (Traversal-origin = ((lent Lease) box _)) (Owned-kind atomic? Leases))
(access-permissions-for-traversal-origin Traversal-origin (Owned-kind atomic? (add-lease-to-leases Lease Leases)))
]
[(access-permissions-for-traversal (Traversal-origin = (our box _)) (_ atomic? Leases))
(our atomic? Leases)
]
[(access-permissions-for-traversal (Traversal-origin = ((shared Lease) box _)) (_ atomic? Leases))
(our atomic? (add-lease-to-leases Lease Leases))
]
)
(define-metafunction Dada
access-permissions-for-traversal-origin : Traversal-origin Access-permissions -> Access-permissions
[(access-permissions-for-traversal-origin x Access-permissions)
Access-permissions
]
[(access-permissions-for-traversal-origin (Traversal _ var) Access-permissions)
(access-permissions-for-traversal Traversal Access-permissions)
]
[(access-permissions-for-traversal-origin (Traversal _ shared) (_ atomic? Leases))
(access-permissions-for-traversal Traversal (our atomic? Leases))
]
[(access-permissions-for-traversal-origin (Traversal _ atomic) (Owned-kind _ Leases))
(access-permissions-for-traversal Traversal (Owned-kind (atomic) Leases))
]
)
(define-metafunction Dada
swap-traversal : Store Traversal-e Value -> (Fallible-actions Value_old)
[; modify local variable: easy
(swap-traversal Store (x = Value_old) Value_new)
(((update-local x Value_new)) Value_old)
]
[; modify field: requires field be writable
(swap-traversal Store (Traversal-origin = Value_old) Value_new)
((Fallible-action ... (update-address Address Unboxed-value_new)) Value_old)
(where/error ((_ = (_ box Address)) f _) Traversal-origin)
(where/error (Fallible-action ...) (write-traversal-origin Store Traversal-origin))
(where/error Unboxed-value_old (load-heap Store Address))
(where/error Unboxed-value_new (replace-field Unboxed-value_old f Value_new))
]
)
(define-metafunction Dada
;; logical-write-traversal
;;
;; Creates the actions to write to Traversal without actually changing anything
;; in the heap. Used when lending the location.
logical-write-traversal : Store Traversal -> (Fallible-actions Box-value)
[(logical-write-traversal Store (Traversal-origin = Box-value))
(swap-traversal Store (Traversal-origin = Box-value) Box-value)
]
)
(define-metafunction Dada
replace-field : Unboxed-value f Value -> Unboxed-value
[(replace-field Unboxed-value f Value)
(Aggregate-id (Field-value_0 ... (f Value) Field-value_1 ...))
(where/error (Aggregate-id (Field-value_0 ... (f Value_old) Field-value_1 ...)) Unboxed-value)]
)
(define-metafunction Dada
write-traversal-origin : Store Traversal-origin -> Fallible-actions
[; modify local variable: no perms needed
(write-traversal-origin Store x)
()]
[; attempt to modify shared field: error
(write-traversal-origin Store (Traversal f shared))
(expired)]
[; attempt to modify var field with non-unique permission: error
(write-traversal-origin Store (Traversal f var))
(expired)
(where/error (Traversal-origin = (Permission box Address)) Traversal)
(where #f (unique-permission? Permission))
]
[; attempt to modify var field with unique permission: requires context be mutable, too
(write-traversal-origin Store (Traversal f var))
(Fallible-action ... (write-address Permission Address))
(where/error (Traversal-origin = (Permission box Address)) Traversal)
(where #t (unique-permission? Permission))
(where (Fallible-action ...) (write-traversal-origin Store Traversal-origin))
]
[; attempt to modify atomic field: field needs to be readable
(write-traversal-origin Store (Traversal f atomic))
(read-traversal-origin Store (Traversal f atomic))
]
)
(define-metafunction Dada
unique-permission? : Permission -> boolean
[(unique-permission? my) #t]
[(unique-permission? (lent _)) #t]
[(unique-permission? (shared _)) #f]
[(unique-permission? our) #f]
)
(define-metafunction Dada
owned-permission? : Permission -> boolean
[(owned-permission? my) #t]
[(owned-permission? our) #t]
[(owned-permission? (lent _)) #f]
[(owned-permission? (shared _)) #f]
)
(define-metafunction Dada
traversal-address : Traversal -> Address
[(traversal-address (_ = (_ box Address))) Address]
)
(define-metafunction Dada
read-traversal : Store Traversal -> (Fallible-actions Box-value)
[(read-traversal Store Traversal)
(Fallible-actions Box-value)
(where/error (Traversal-origin = Box-value) Traversal)
(where/error Fallible-actions (read-traversal-origin Store Traversal-origin))
]
)
(define-metafunction Dada
read-traversal-origin : Store Traversal-origin -> Fallible-actions
[; read local variable: no perms needed
(read-traversal-origin Store x)
()]
[; attempt to read field of any kind
(read-traversal-origin Store (Traversal f _))
(Fallible-action ... (read-address Permission Address))
(where/error (Traversal-origin = (Permission box Address)) Traversal)
(where (Fallible-action ...) (read-traversal-origin Store Traversal-origin))
]
)
(module+ test
(redex-let*
Dada
[; corresponds roughly to the diagram at the top of this file, with some additions
(Store (term (test-store
[(pair (my box a1))
(sh-p (my box a9))]
[(a1 (box 1 ((class Pair) ((a (my box a2)) (b (my box a3))))))
(a2 (box 1 ((class Point) ((x (our box a4)) (y (our box a5))))))
(a3 (box 1 ((class Point) ((x (my box a6)) (y (my box a7))))))
(a4 (box 2 22))
(a5 (box 2 44))
(a6 (box 1 66))
(a7 (box 1 88))
(a8 (box 1 99))
(a9 (box 1 ((class ShPoint) ((x (our box a4)) (y (our box a5))))))
]
[]
)))
(Traversal_pair_a_x (term (traversal program_test Store (pair a x))))
(Traversal_pair (term (traversal program_test Store (pair))))
(Traversal_sh-p_x (term (traversal program_test Store (sh-p x))))
]
(test-equal-terms Traversal_pair_a_x
(((((pair = (my box a1)) a var) = (my box a2)) x var)
=
(our box a4)))
(; mutating var fields propagates through the path
test-equal-terms (swap-traversal Store Traversal_pair_a_x (my box a8))
(((write-address my a1)
(write-address my a2)
(update-address
a2
((class Point) ((x (my box a8)) (y (our box a5))))))
(our box a4)))
(; mutate a local variable
test-equal-terms (swap-traversal Store Traversal_pair (my box a8))
(((update-local pair (my box a8))) (my box a1)))
(; can't mutate a shared field
test-equal-terms (swap-traversal Store Traversal_sh-p_x (my box a8))
((expired
(update-address
a9
((class ShPoint) ((x (my box a8)) (y (our box a5))))))
(our box a4)))
(; can read a shared field
test-equal-terms (read-traversal Store Traversal_sh-p_x)
(((read-address my a9)) (our box a4)))
)
) | null | https://raw.githubusercontent.com/dada-lang/dada-model/a9796940861e4802beb5822840475b91ce7af699/racket/opsem/traverse.rkt | racket | A **traversal** encodes the path that we walked when evaluating a place.
Creating a traversal is a side-effect free operation. It is used to derive
the actions that result from reading/writing a place.
Example (assume all edges are `my` for simplicity):
│ a ───┼──►│[Point]│
│ b ───┼─┐ │ │
a1 │ a2
│
│
│ ┌───────┐
│ │
└───────┘
a3
Source:
The place `pair a x` corresponds to a traversal:
( ( . x shared ) = (my box a4) )
│
▼
( ( . a var ) = (my box a2) )
│
▼
( x = (my box a1) )
modify local variable: easy
modify field: requires field be writable
logical-write-traversal
Creates the actions to write to Traversal without actually changing anything
in the heap. Used when lending the location.
modify local variable: no perms needed
attempt to modify shared field: error
attempt to modify var field with non-unique permission: error
attempt to modify var field with unique permission: requires context be mutable, too
attempt to modify atomic field: field needs to be readable
read local variable: no perms needed
attempt to read field of any kind
corresponds roughly to the diagram at the top of this file, with some additions
mutating var fields propagates through the path
mutate a local variable
can't mutate a shared field
can read a shared field | #lang racket
(require redex
"../grammar.rkt"
"../util.rkt"
"lang.rkt"
"stack.rkt"
"heap.rkt"
"lease.rkt"
"test-store.rkt")
(provide traversal
traversal-e
swap-traversal
logical-write-traversal
read-traversal
owned-permission?
unique-permission?
traversal-address
access-permissions)
┌ ─ ─ ─ ─ ─ ─ ┐
pair ─ ─ ─ ─ ─ ┤ ] │ ┌ ─ ─ ─ ─ ─ ─ ─ ┐
│ │ │ x ─ ─ ─ ─ ┼ ─ ► a4 = 22
│ │ │ │ y ─ ─ ─ ─ ┼ ─ ► a5 = 44
└ ─ ─ ─ ─ ─ ─ ┘ ─ ─ ─ ─ ─ ┘
► │ [ Point ] │
│ x ─ ─ ─ ─ ┼ ─ ► a6 = 66
│ y ─ ─ ─ ─ ┼ ─ ► a7 = 88
(define-metafunction Dada
traversal : program Store place-at-rest -> Traversal-e or expired
[(traversal program Store place-at-rest)
Traversal
(where Traversal (traversal-e program Store place-at-rest))
]
[(traversal program Store place-at-rest)
expired
]
)
(define-metafunction Dada
traversal-e : program Store place-at-rest -> Traversal-e or expired
[(traversal-e program Store (x f ...))
(traverse-fields program Store (x = Value) (f ...))
(where Value (var-in-store Store x))
]
)
(define-metafunction Dada
traverse-fields : program Store Traversal-e (f ...) -> Traversal-e or expired
[(traverse-fields program Store Traversal-e ())
Traversal-e
]
[(traverse-fields program Store (_ = expired) (f_0 f_1 ...))
expired
]
[(traverse-fields program Store Traversal (f_0 f_1 ...))
(traverse-fields program Store ((Traversal f_0 mutability) = Box-value) (f_1 ...))
(where (Box-value mutability) (field-from-traversal program Store Traversal f_0))
]
)
(define-metafunction Dada
field-from-traversal : program Store Traversal f -> (Value mutability)
[(field-from-traversal program Store Traversal f_0)
(select-field program Unboxed-value f_0)
(where/error Address (traversal-address Traversal))
(where/error Unboxed-value (load-heap Store Address))
]
)
(define-metafunction Dada
select-field : program Unboxed-value f -> (Value mutability)
[(select-field program ((class c) (_ ... (f Value) _ ...)) f)
(Value (class-field-mutability program c f))
]
)
(define-metafunction Dada
access-permissions : Traversal-e -> Access-permissions
[(access-permissions Traversal-e)
(access-permissions-for-traversal Traversal-e (my () ()))
]
)
(define-metafunction Dada
access-permissions-for-traversal : Traversal-e Access-permissions -> Access-permissions
[(access-permissions-for-traversal (Traversal-origin = expired) Access-permissions)
(access-permissions-for-traversal-origin Traversal-origin Access-permissions)
]
[(access-permissions-for-traversal (Traversal-origin = (my box _)) Access-permissions)
(access-permissions-for-traversal-origin Traversal-origin Access-permissions)
]
[(access-permissions-for-traversal (Traversal-origin = ((lent Lease) box _)) (Owned-kind atomic? Leases))
(access-permissions-for-traversal-origin Traversal-origin (Owned-kind atomic? (add-lease-to-leases Lease Leases)))
]
[(access-permissions-for-traversal (Traversal-origin = (our box _)) (_ atomic? Leases))
(our atomic? Leases)
]
[(access-permissions-for-traversal (Traversal-origin = ((shared Lease) box _)) (_ atomic? Leases))
(our atomic? (add-lease-to-leases Lease Leases))
]
)
(define-metafunction Dada
access-permissions-for-traversal-origin : Traversal-origin Access-permissions -> Access-permissions
[(access-permissions-for-traversal-origin x Access-permissions)
Access-permissions
]
[(access-permissions-for-traversal-origin (Traversal _ var) Access-permissions)
(access-permissions-for-traversal Traversal Access-permissions)
]
[(access-permissions-for-traversal-origin (Traversal _ shared) (_ atomic? Leases))
(access-permissions-for-traversal Traversal (our atomic? Leases))
]
[(access-permissions-for-traversal-origin (Traversal _ atomic) (Owned-kind _ Leases))
(access-permissions-for-traversal Traversal (Owned-kind (atomic) Leases))
]
)
(define-metafunction Dada
swap-traversal : Store Traversal-e Value -> (Fallible-actions Value_old)
(swap-traversal Store (x = Value_old) Value_new)
(((update-local x Value_new)) Value_old)
]
(swap-traversal Store (Traversal-origin = Value_old) Value_new)
((Fallible-action ... (update-address Address Unboxed-value_new)) Value_old)
(where/error ((_ = (_ box Address)) f _) Traversal-origin)
(where/error (Fallible-action ...) (write-traversal-origin Store Traversal-origin))
(where/error Unboxed-value_old (load-heap Store Address))
(where/error Unboxed-value_new (replace-field Unboxed-value_old f Value_new))
]
)
(define-metafunction Dada
logical-write-traversal : Store Traversal -> (Fallible-actions Box-value)
[(logical-write-traversal Store (Traversal-origin = Box-value))
(swap-traversal Store (Traversal-origin = Box-value) Box-value)
]
)
(define-metafunction Dada
replace-field : Unboxed-value f Value -> Unboxed-value
[(replace-field Unboxed-value f Value)
(Aggregate-id (Field-value_0 ... (f Value) Field-value_1 ...))
(where/error (Aggregate-id (Field-value_0 ... (f Value_old) Field-value_1 ...)) Unboxed-value)]
)
(define-metafunction Dada
write-traversal-origin : Store Traversal-origin -> Fallible-actions
(write-traversal-origin Store x)
()]
(write-traversal-origin Store (Traversal f shared))
(expired)]
(write-traversal-origin Store (Traversal f var))
(expired)
(where/error (Traversal-origin = (Permission box Address)) Traversal)
(where #f (unique-permission? Permission))
]
(write-traversal-origin Store (Traversal f var))
(Fallible-action ... (write-address Permission Address))
(where/error (Traversal-origin = (Permission box Address)) Traversal)
(where #t (unique-permission? Permission))
(where (Fallible-action ...) (write-traversal-origin Store Traversal-origin))
]
(write-traversal-origin Store (Traversal f atomic))
(read-traversal-origin Store (Traversal f atomic))
]
)
(define-metafunction Dada
unique-permission? : Permission -> boolean
[(unique-permission? my) #t]
[(unique-permission? (lent _)) #t]
[(unique-permission? (shared _)) #f]
[(unique-permission? our) #f]
)
(define-metafunction Dada
owned-permission? : Permission -> boolean
[(owned-permission? my) #t]
[(owned-permission? our) #t]
[(owned-permission? (lent _)) #f]
[(owned-permission? (shared _)) #f]
)
(define-metafunction Dada
traversal-address : Traversal -> Address
[(traversal-address (_ = (_ box Address))) Address]
)
(define-metafunction Dada
read-traversal : Store Traversal -> (Fallible-actions Box-value)
[(read-traversal Store Traversal)
(Fallible-actions Box-value)
(where/error (Traversal-origin = Box-value) Traversal)
(where/error Fallible-actions (read-traversal-origin Store Traversal-origin))
]
)
(define-metafunction Dada
read-traversal-origin : Store Traversal-origin -> Fallible-actions
(read-traversal-origin Store x)
()]
(read-traversal-origin Store (Traversal f _))
(Fallible-action ... (read-address Permission Address))
(where/error (Traversal-origin = (Permission box Address)) Traversal)
(where (Fallible-action ...) (read-traversal-origin Store Traversal-origin))
]
)
(module+ test
(redex-let*
Dada
(Store (term (test-store
[(pair (my box a1))
(sh-p (my box a9))]
[(a1 (box 1 ((class Pair) ((a (my box a2)) (b (my box a3))))))
(a2 (box 1 ((class Point) ((x (our box a4)) (y (our box a5))))))
(a3 (box 1 ((class Point) ((x (my box a6)) (y (my box a7))))))
(a4 (box 2 22))
(a5 (box 2 44))
(a6 (box 1 66))
(a7 (box 1 88))
(a8 (box 1 99))
(a9 (box 1 ((class ShPoint) ((x (our box a4)) (y (our box a5))))))
]
[]
)))
(Traversal_pair_a_x (term (traversal program_test Store (pair a x))))
(Traversal_pair (term (traversal program_test Store (pair))))
(Traversal_sh-p_x (term (traversal program_test Store (sh-p x))))
]
(test-equal-terms Traversal_pair_a_x
(((((pair = (my box a1)) a var) = (my box a2)) x var)
=
(our box a4)))
test-equal-terms (swap-traversal Store Traversal_pair_a_x (my box a8))
(((write-address my a1)
(write-address my a2)
(update-address
a2
((class Point) ((x (my box a8)) (y (our box a5))))))
(our box a4)))
test-equal-terms (swap-traversal Store Traversal_pair (my box a8))
(((update-local pair (my box a8))) (my box a1)))
test-equal-terms (swap-traversal Store Traversal_sh-p_x (my box a8))
((expired
(update-address
a9
((class ShPoint) ((x (my box a8)) (y (our box a5))))))
(our box a4)))
test-equal-terms (read-traversal Store Traversal_sh-p_x)
(((read-address my a9)) (our box a4)))
)
) |
f6f5d8e57b638076431c4c1e12b444a76e013e5e6788d224c16baa13f337f6da | baskeboler/cljs-karaoke-client | seek_buttons.cljs | (ns cljs-karaoke.views.seek-buttons
(:require [reagent.core :as reagent :refer [atom]]
[re-frame.core :as rf]
[stylefy.core :as stylefy]
[cljs-karaoke.subs :as s]
[cljs-karaoke.events.views :as view-events]))
(declare right-seek-hotspot)
(defn right-seek-hotspot []
[:a.right-seek-hotspot
{:on-click #(rf/dispatch [::view-events/show-seek-buttons])}
" "])
(defn right-seek-component [seek-fn]
(let [visible (rf/subscribe [::s/seek-buttons-visible?])]
(if @visible
[:a.right-seek-btn
{:on-click seek-fn}
[:i.fas.fa-forward]]
[right-seek-hotspot])))
(defn left-seek-hotspot []
[:a.left-seek-hotspot
{:on-click #(rf/dispatch [::view-events/show-seek-buttons])}
" "])
(defn left-seek-component [seek-fn]
(let [visible (rf/subscribe [::s/seek-buttons-visible?])]
(if @visible
[:a.left-seek-btn
{:on-click seek-fn}
[:i.fas.fa-backward]]
[left-seek-hotspot])))
(defn seek-component [fw-fn bw-fn]
[:div
[left-seek-component bw-fn]
[right-seek-component fw-fn]])
| null | https://raw.githubusercontent.com/baskeboler/cljs-karaoke-client/bb6512435eaa436d35034886be99213625847ee0/src/main/cljs_karaoke/views/seek_buttons.cljs | clojure | (ns cljs-karaoke.views.seek-buttons
(:require [reagent.core :as reagent :refer [atom]]
[re-frame.core :as rf]
[stylefy.core :as stylefy]
[cljs-karaoke.subs :as s]
[cljs-karaoke.events.views :as view-events]))
(declare right-seek-hotspot)
(defn right-seek-hotspot []
[:a.right-seek-hotspot
{:on-click #(rf/dispatch [::view-events/show-seek-buttons])}
" "])
(defn right-seek-component [seek-fn]
(let [visible (rf/subscribe [::s/seek-buttons-visible?])]
(if @visible
[:a.right-seek-btn
{:on-click seek-fn}
[:i.fas.fa-forward]]
[right-seek-hotspot])))
(defn left-seek-hotspot []
[:a.left-seek-hotspot
{:on-click #(rf/dispatch [::view-events/show-seek-buttons])}
" "])
(defn left-seek-component [seek-fn]
(let [visible (rf/subscribe [::s/seek-buttons-visible?])]
(if @visible
[:a.left-seek-btn
{:on-click seek-fn}
[:i.fas.fa-backward]]
[left-seek-hotspot])))
(defn seek-component [fw-fn bw-fn]
[:div
[left-seek-component bw-fn]
[right-seek-component fw-fn]])
| |
a6fc41633f5e3270e4f8b0e4ddd92e929010c8e384c0365d5a15b2bc6ebf35b0 | Plutonomicon/Shrinker | Spec.hs | module Main (main) where
import Test.Tasty (defaultMain, localOption, testGroup)
import Test.Tasty.Hedgehog (HedgehogTestLimit (HedgehogTestLimit))
import UnitTests (makeUnitTests)
main :: IO ()
main = do
unitTests <- makeUnitTests
defaultMain $
testGroup
"shrinker tests"
[ localOption (HedgehogTestLimit (Just 1)) unitTests
]
| null | https://raw.githubusercontent.com/Plutonomicon/Shrinker/3347923943ec87707cdc53c268ba7fe20577c79b/unit-testing/spec/Spec.hs | haskell | module Main (main) where
import Test.Tasty (defaultMain, localOption, testGroup)
import Test.Tasty.Hedgehog (HedgehogTestLimit (HedgehogTestLimit))
import UnitTests (makeUnitTests)
main :: IO ()
main = do
unitTests <- makeUnitTests
defaultMain $
testGroup
"shrinker tests"
[ localOption (HedgehogTestLimit (Just 1)) unitTests
]
| |
990bc80e7875b9a02ec71bde2d32d156162403ec1bcbf13383f71a1960d92634 | Kappa-Dev/KappaTools | color.mli | type color = Red | Grey | Lightblue | Black
val triple_of_color: color -> int * int * int
| null | https://raw.githubusercontent.com/Kappa-Dev/KappaTools/5e756eb3529db9976cf0a0884a22676925985978/core/dataStructures/color.mli | ocaml | type color = Red | Grey | Lightblue | Black
val triple_of_color: color -> int * int * int
| |
0a15b05a0a4549157385210c8adfd6ea5de674da3a8993e12a201e2a15796bf5 | camdez/honeybadger | core.clj | (ns honeybadger.core
(:require [aleph.http :as http]
[byte-streams :as bs]
[clj-stacktrace.core :as st]
[clj-stacktrace.repl :as st-repl]
[clojure.data.json :as json]
[clojure.java.io :as io]
[clojure.string :as str]
[clojure.walk :refer [keywordize-keys]]
[honeybadger.schemas :refer [Event EventFilter]]
[honeybadger.utils
:refer
[deep-merge some-chain underscore update-contained-in]]
[manifold.deferred :as d]
[schema.core :as s]))
(def notifier-name
"Honeybadger for Clojure")
(def notifier-version
"0.4.2-SNAPSHOT")
(def notifier-homepage
"")
(def api-endpoint
"")
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- ex-chain
"Return all exceptions in the exception chain."
[e]
(->> e
(iterate #(when (some? %) (.getCause %)))
(take-while some?)))
(defn- format-stacktrace-elem [{:keys [line file] :as elem}]
{:number line
:file file
:method (st-repl/method-str elem)})
(defn- format-stacktrace [st]
(->> st st/parse-trace-elems (map format-stacktrace-elem)))
(defn- error-map-no-causes
"Return a map representing a Honeybadger error without causes."
[^Throwable t]
{:message (str t)
:class (.. t getClass getName)
:backtrace (format-stacktrace (.getStackTrace t))})
(defprotocol Notifiable
(error-map [this]))
(extend-protocol Notifiable
String
(error-map [this] {:message this})
Throwable
(error-map [this]
(let [[final & causes] (->> this ex-chain (map error-map-no-causes))]
(assoc final :causes causes))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def hostname
(.getHostName (java.net.InetAddress/getLocalHost)))
(def project-root
(.getCanonicalPath (io/file ".")))
(defn- base-notice [environment]
{:notifier {:name notifier-name
:language "clojure"
:version notifier-version
:url notifier-homepage}
:server {:project-root project-root
:environment-name environment
:hostname hostname}})
(defn- error-patch [notifiable]
{:error (error-map notifiable)})
(defn- metadata-patch [{:keys [tags context component action request]}]
(let [{:keys [method url params session]} request]
{:error {:tags tags}
:request {:url url
:component component
:action action
:params params
:context (or context {}) ; displays differently if nil
:session session
:cgi-data (some->> method
name
str/upper-case
(array-map "REQUEST_METHOD"))}}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- post-notice [n api-key]
(d/chain (http/post api-endpoint
{:accept :json
:content-type :json
:headers {"X-API-Key" api-key}
:body (json/write-str n :key-fn underscore)})
:body
bs/to-string
#(json/read-str % :key-fn keyword)
:id))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/defn ^:private normalize-event :- Event
"Normalize data to a standard form that user-provided filters can
make sense of and transform."
[api-key env msg-or-ex metadata]
(-> {:api-key api-key
:env (keyword env)
:exception msg-or-ex
:metadata metadata}
keywordize-keys
(update-contained-in [:metadata :tags] #(set (map keyword %)))
(update-contained-in [:metadata :request :method] keyword)
(update-contained-in [:metadata :request :params] #(or % {}))
(update-contained-in [:metadata :request :session] #(or % {}))
(->> (deep-merge {:metadata {:tags #{}
:request {}
:context {}
:component nil
:action nil}}))))
(s/defn ^:private apply-filters :- (s/maybe Event)
"Successively apply all transformation functions in `filters` to
exception details, halting the chain if any filter returns nil."
[filters :- [EventFilter]
event :- Event]
(some-chain event filters))
(s/defn ^:private event->notice
"Convert data to the appropriate form for the Honeybadger API.
See #sample-payload for
the error schema."
[{:keys [env exception metadata]} :- Event]
(deep-merge (base-notice env)
(error-patch exception)
(metadata-patch metadata)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Public
(defn notify
([config msg-or-ex]
(notify config msg-or-ex {}))
([{:keys [api-key env filters]} msg-or-ex metadata]
(if-let [e (->> (normalize-event api-key env msg-or-ex metadata)
(apply-filters filters))]
(post-notice (event->notice e) api-key)
(d/success-deferred nil))))
| null | https://raw.githubusercontent.com/camdez/honeybadger/2b126f53d4f5fa4f2c38d9d4fe1ae718fe43c483/src/honeybadger/core.clj | clojure |
displays differently if nil
Public | (ns honeybadger.core
(:require [aleph.http :as http]
[byte-streams :as bs]
[clj-stacktrace.core :as st]
[clj-stacktrace.repl :as st-repl]
[clojure.data.json :as json]
[clojure.java.io :as io]
[clojure.string :as str]
[clojure.walk :refer [keywordize-keys]]
[honeybadger.schemas :refer [Event EventFilter]]
[honeybadger.utils
:refer
[deep-merge some-chain underscore update-contained-in]]
[manifold.deferred :as d]
[schema.core :as s]))
(def notifier-name
"Honeybadger for Clojure")
(def notifier-version
"0.4.2-SNAPSHOT")
(def notifier-homepage
"")
(def api-endpoint
"")
(defn- ex-chain
"Return all exceptions in the exception chain."
[e]
(->> e
(iterate #(when (some? %) (.getCause %)))
(take-while some?)))
(defn- format-stacktrace-elem [{:keys [line file] :as elem}]
{:number line
:file file
:method (st-repl/method-str elem)})
(defn- format-stacktrace [st]
(->> st st/parse-trace-elems (map format-stacktrace-elem)))
(defn- error-map-no-causes
"Return a map representing a Honeybadger error without causes."
[^Throwable t]
{:message (str t)
:class (.. t getClass getName)
:backtrace (format-stacktrace (.getStackTrace t))})
(defprotocol Notifiable
(error-map [this]))
(extend-protocol Notifiable
String
(error-map [this] {:message this})
Throwable
(error-map [this]
(let [[final & causes] (->> this ex-chain (map error-map-no-causes))]
(assoc final :causes causes))))
(def hostname
(.getHostName (java.net.InetAddress/getLocalHost)))
(def project-root
(.getCanonicalPath (io/file ".")))
(defn- base-notice [environment]
{:notifier {:name notifier-name
:language "clojure"
:version notifier-version
:url notifier-homepage}
:server {:project-root project-root
:environment-name environment
:hostname hostname}})
(defn- error-patch [notifiable]
{:error (error-map notifiable)})
(defn- metadata-patch [{:keys [tags context component action request]}]
(let [{:keys [method url params session]} request]
{:error {:tags tags}
:request {:url url
:component component
:action action
:params params
:session session
:cgi-data (some->> method
name
str/upper-case
(array-map "REQUEST_METHOD"))}}))
(defn- post-notice [n api-key]
(d/chain (http/post api-endpoint
{:accept :json
:content-type :json
:headers {"X-API-Key" api-key}
:body (json/write-str n :key-fn underscore)})
:body
bs/to-string
#(json/read-str % :key-fn keyword)
:id))
(s/defn ^:private normalize-event :- Event
"Normalize data to a standard form that user-provided filters can
make sense of and transform."
[api-key env msg-or-ex metadata]
(-> {:api-key api-key
:env (keyword env)
:exception msg-or-ex
:metadata metadata}
keywordize-keys
(update-contained-in [:metadata :tags] #(set (map keyword %)))
(update-contained-in [:metadata :request :method] keyword)
(update-contained-in [:metadata :request :params] #(or % {}))
(update-contained-in [:metadata :request :session] #(or % {}))
(->> (deep-merge {:metadata {:tags #{}
:request {}
:context {}
:component nil
:action nil}}))))
(s/defn ^:private apply-filters :- (s/maybe Event)
"Successively apply all transformation functions in `filters` to
exception details, halting the chain if any filter returns nil."
[filters :- [EventFilter]
event :- Event]
(some-chain event filters))
(s/defn ^:private event->notice
"Convert data to the appropriate form for the Honeybadger API.
See #sample-payload for
the error schema."
[{:keys [env exception metadata]} :- Event]
(deep-merge (base-notice env)
(error-patch exception)
(metadata-patch metadata)))
(defn notify
([config msg-or-ex]
(notify config msg-or-ex {}))
([{:keys [api-key env filters]} msg-or-ex metadata]
(if-let [e (->> (normalize-event api-key env msg-or-ex metadata)
(apply-filters filters))]
(post-notice (event->notice e) api-key)
(d/success-deferred nil))))
|
0260f7428147f2a806df84260ff10f45091df59cfea93fbef27e054b87d9f025 | fetburner/compelib | topologicalSort.mli | (* 無向グラフ *)
module type UnweightedDirectedGraph = sig
module Vertex : sig
type t
type set
(* グラフに含まれる頂点の集合 *)
val universe : set
(* 頂点に含まれる集合の畳み込み *)
val fold_universe : (t -> 'a -> 'a) -> 'a -> 'a
(* 隣接する頂点の畳み込み *)
val fold_adjacencies : t -> (t -> 'a -> 'a) -> 'a -> 'a
end
end
リスト
module type List = sig
type t
type elt
val nil : t
val cons : elt -> t -> t
end
module type Array = sig
type t
type elt
type key
type size
val make : size -> t
val get : t -> key -> elt
val set : t -> key -> elt -> unit
end
module F
(* 頂点を添字,真偽値を要素とした配列の実装
A.make は false で初期化された配列を返さなくてはならない *)
(A : Array with type elt = bool)
(* 頂点のリスト *)
(L : List with type elt = A.key)
: sig
type vertex = A.key
type vertices = A.size
(* トポロジカルソート *)
val sort :
(* 有向グラフ *)
(module UnweightedDirectedGraph
with type Vertex.t = vertex
and type Vertex.set = vertices) ->
頂点をトポロジカルソートしたリスト
L.t
end
| null | https://raw.githubusercontent.com/fetburner/compelib/d8fc5d9acd04e676c4d4d2ca9c6a7140f1b85670/lib/graph/topologicalSort.mli | ocaml | 無向グラフ
グラフに含まれる頂点の集合
頂点に含まれる集合の畳み込み
隣接する頂点の畳み込み
頂点を添字,真偽値を要素とした配列の実装
A.make は false で初期化された配列を返さなくてはならない
頂点のリスト
トポロジカルソート
有向グラフ | module type UnweightedDirectedGraph = sig
module Vertex : sig
type t
type set
val universe : set
val fold_universe : (t -> 'a -> 'a) -> 'a -> 'a
val fold_adjacencies : t -> (t -> 'a -> 'a) -> 'a -> 'a
end
end
リスト
module type List = sig
type t
type elt
val nil : t
val cons : elt -> t -> t
end
module type Array = sig
type t
type elt
type key
type size
val make : size -> t
val get : t -> key -> elt
val set : t -> key -> elt -> unit
end
module F
(A : Array with type elt = bool)
(L : List with type elt = A.key)
: sig
type vertex = A.key
type vertices = A.size
val sort :
(module UnweightedDirectedGraph
with type Vertex.t = vertex
and type Vertex.set = vertices) ->
頂点をトポロジカルソートしたリスト
L.t
end
|
becfbdd9241bb1d2580913f872dd5ffd322df64aa087a8d82cbf068e6edfa5c0 | kevsmith/giza | giza_datetime.erl | Copyright ( c ) 2009 Electronic Arts , Inc.
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
-module(giza_datetime).
-author("Kevin A. Smith <>").
-define(EPOCH_BASE, 62167219200).
-export([to_timestamp/1, from_timestamp/1]).
) - > Result
%% Now = {integer(), integer(), integer()}
%% Result = number()
@doc Encode the tuple returned from erlang : now/0 into a Unix epoch timestamp
to_timestamp({_, _, _}=Now) ->
to_timestamp(calendar:now_to_universal_time(Now));
to_timestamp(DateTime ) - > Result
%% DateTime = {{integer(), integer(), integer()}, {integer(), integer(), integer()}}
%% Result = number()
@doc Encode an Erlang datetime tuple into a Unix epoch timestamp
to_timestamp(DateTime) ->
TS = calendar:datetime_to_gregorian_seconds(DateTime),
TS - ?EPOCH_BASE.
from_timestamp(EpochTimestamp ) - > Result
%% EpochTimestamp = number()
%% Result = {{integer(), integer(), integer()}, {integer(), integer(), integer()}}
%% @doc Convert an Unix epoch timestamp into the equivalent Unix datetime tuple
from_timestamp(EpochTimestamp) ->
calendar:gregorian_seconds_to_datetime(EpochTimestamp + ?EPOCH_BASE).
| null | https://raw.githubusercontent.com/kevsmith/giza/576eada45ccff8d7fb688b8abe3a33c25d34028d/src/giza_datetime.erl | erlang | Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Now = {integer(), integer(), integer()}
Result = number()
DateTime = {{integer(), integer(), integer()}, {integer(), integer(), integer()}}
Result = number()
EpochTimestamp = number()
Result = {{integer(), integer(), integer()}, {integer(), integer(), integer()}}
@doc Convert an Unix epoch timestamp into the equivalent Unix datetime tuple | Copyright ( c ) 2009 Electronic Arts , Inc.
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-module(giza_datetime).
-author("Kevin A. Smith <>").
-define(EPOCH_BASE, 62167219200).
-export([to_timestamp/1, from_timestamp/1]).
) - > Result
@doc Encode the tuple returned from erlang : now/0 into a Unix epoch timestamp
to_timestamp({_, _, _}=Now) ->
to_timestamp(calendar:now_to_universal_time(Now));
to_timestamp(DateTime ) - > Result
@doc Encode an Erlang datetime tuple into a Unix epoch timestamp
to_timestamp(DateTime) ->
TS = calendar:datetime_to_gregorian_seconds(DateTime),
TS - ?EPOCH_BASE.
from_timestamp(EpochTimestamp ) - > Result
from_timestamp(EpochTimestamp) ->
calendar:gregorian_seconds_to_datetime(EpochTimestamp + ?EPOCH_BASE).
|
6e8e8f93d8a50e096cc9df4e2aec46ed66f22234e191155cb0b1cce326b8c3df | aeternity/aesophia | aeso_compiler_tests.erl | %%% -*- erlang-indent-level:4; indent-tabs-mode: nil -*-
%%%-------------------------------------------------------------------
( C ) 2018 , Aeternity Anstalt
%%% @doc Test Sophia language compiler.
%%%
%%% @end
%%%-------------------------------------------------------------------
-module(aeso_compiler_tests).
-compile([export_all, nowarn_export_all]).
-include_lib("eunit/include/eunit.hrl").
run_test(Test) ->
TestFun = list_to_atom(lists:concat([Test, "_test_"])),
[ begin
io:format("~s\n", [Label]),
Fun()
end || {Label, Fun} <- ?MODULE:TestFun() ],
ok.
%% Very simply test compile the given contracts. Only basic checks
%% are made on the output, just that it is a binary which indicates
%% that the compilation worked.
simple_compile_test_() ->
[ {"Testing the " ++ ContractName ++ " contract",
fun() ->
case compile(ContractName) of
#{fate_code := Code} ->
Code1 = aeb_fate_code:deserialize(aeb_fate_code:serialize(Code)),
?assertMatch({X, X}, {Code1, Code});
Error -> io:format("\n\n~p\n\n", [Error]), print_and_throw(Error)
end
end} || ContractName <- compilable_contracts()] ++
[ {"Test file not found error",
fun() ->
{error, Errors} = aeso_compiler:file("does_not_exist.aes"),
ExpErr = <<"File error:\ndoes_not_exist.aes: no such file or directory">>,
check_errors([ExpErr], Errors)
end} ] ++
[ {"Testing error messages of " ++ ContractName,
fun() ->
Errors = compile(ContractName, [warn_all, warn_error]),
check_errors(ExpectedErrors, Errors)
end} ||
{ContractName, ExpectedErrors} <- failing_contracts() ] ++
[ {"Testing include with explicit files",
fun() ->
FileSystem = maps:from_list(
[ begin
{ok, Bin} = file:read_file(filename:join([aeso_test_utils:contract_path(), File])),
{File, Bin}
end || File <- ["included.aes", "../contracts/included2.aes"] ]),
#{byte_code := Code1} = compile("include", [{include, {explicit_files, FileSystem}}]),
#{byte_code := Code2} = compile("include"),
?assertMatch(true, Code1 == Code2)
end} ] ++
[ {"Testing deadcode elimination",
fun() ->
#{ byte_code := NoDeadCode } = compile("nodeadcode"),
#{ byte_code := DeadCode } = compile("deadcode"),
SizeNoDeadCode = byte_size(NoDeadCode),
SizeDeadCode = byte_size(DeadCode),
Delta = 20,
?assertMatch({_, _, true}, {SizeDeadCode, SizeNoDeadCode, SizeDeadCode + Delta < SizeNoDeadCode}),
ok
end} ] ++
[ {"Testing warning messages",
fun() ->
#{ warnings := Warnings } = compile("warnings", [warn_all]),
check_warnings(warnings(), Warnings)
end} ] ++
[].
%% Check if all modules in the standard library compile
stdlib_test_() ->
{ok, Files} = file:list_dir(aeso_stdlib:stdlib_include_path()),
[ { "Testing " ++ File ++ " from the stdlib",
fun() ->
String = "include \"" ++ File ++ "\"\nmain contract Test =\n entrypoint f(x) = x",
Options = [{src_file, File}],
case aeso_compiler:from_string(String, Options) of
{ok, #{fate_code := Code}} ->
Code1 = aeb_fate_code:deserialize(aeb_fate_code:serialize(Code)),
?assertMatch({X, X}, {Code1, Code});
{error, Error} -> io:format("\n\n~p\n\n", [Error]), print_and_throw(Error)
end
end} || File <- Files,
lists:suffix(".aes", File)
].
check_errors(no_error, Actual) -> ?assertMatch(#{}, Actual);
check_errors(Expect, #{}) ->
?assertEqual({error, Expect}, ok);
check_errors(Expect0, Actual0) ->
Expect = lists:sort(Expect0),
Actual = [ list_to_binary(string:trim(aeso_errors:pp(Err))) || Err <- Actual0 ],
case {Expect -- Actual, Actual -- Expect} of
{[], Extra} -> ?assertMatch({unexpected, []}, {unexpected, Extra});
{Missing, []} -> ?assertMatch({missing, []}, {missing, Missing});
{Missing, Extra} -> ?assertEqual(Missing, Extra)
end.
check_warnings(Expect0, Actual0) ->
Expect = lists:sort(Expect0),
Actual = [ list_to_binary(string:trim(aeso_warnings:pp(Warn))) || Warn <- Actual0 ],
case {Expect -- Actual, Actual -- Expect} of
{[], Extra} -> ?assertMatch({unexpected, []}, {unexpected, Extra});
{Missing, []} -> ?assertMatch({missing, []}, {missing, Missing});
{Missing, Extra} -> ?assertEqual(Missing, Extra)
end.
compile(Name) ->
compile( Name, [{include, {file_system, [aeso_test_utils:contract_path()]}}]).
compile(Name, Options) ->
String = aeso_test_utils:read_contract(Name),
Options1 =
case lists:member(Name, debug_mode_contracts()) of
true -> [debug_mode];
false -> []
end ++
[ {src_file, Name ++ ".aes"}
, {include, {file_system, [aeso_test_utils:contract_path()]}}
] ++ Options,
case aeso_compiler:from_string(String, Options1) of
{ok, Map} -> Map;
{error, ErrorString} when is_binary(ErrorString) -> ErrorString;
{error, Errors} -> Errors
end.
%% compilable_contracts() -> [ContractName].
%% The currently compilable contracts.
compilable_contracts() ->
["complex_types",
"counter",
"dutch_auction",
"environment",
"factorial",
"functions",
"fundme",
"identity",
"maps",
"oracles",
"remote_call",
"remote_call_ambiguous_record",
"simple",
"simple_storage",
"spend_test",
"stack",
"test",
"builtin_bug",
"builtin_map_get_bug",
"lc_record_bug",
"nodeadcode",
"deadcode",
"variant_types",
"state_handling",
"events",
"include",
"basic_auth",
"basic_auth_tx",
"bitcoin_auth",
"address_literals",
"bytes_equality",
"address_chain",
"namespace_bug",
"bytes_to_x",
"bytes_concat",
"aens",
"aens_update",
"tuple_match",
"cyclic_include",
"stdlib_include",
"double_include",
"manual_stdlib_include",
"list_comp",
"payable",
"unapplied_builtins",
"underscore_number_literals",
"pairing_crypto",
"qualified_constructor",
"let_patterns",
"lhs_matching",
"more_strings",
"protected_call",
"hermetization_turnoff",
"multiple_contracts",
"clone",
"clone_simple",
"create",
"child_contract_init_bug",
"using_namespace",
"assign_patterns",
"patterns_guards",
"pipe_operator",
"polymorphism_contract_implements_interface",
"polymorphism_contract_multi_interface",
"polymorphism_contract_interface_extends_interface",
"polymorphism_contract_interface_extensions",
"polymorphism_contract_interface_same_decl_multi_interface",
"polymorphism_contract_interface_same_name_same_type",
"polymorphism_variance_switching_chain_create",
"polymorphism_variance_switching_void_supertype",
"polymorphism_variance_switching_unify_with_interface_decls",
"polymorphism_preserve_or_add_payable_contract",
"polymorphism_preserve_or_add_payable_entrypoint",
"polymorphism_preserve_or_remove_stateful_entrypoint",
"missing_init_fun_state_unit",
"complex_compare_leq",
"complex_compare",
"higher_order_compare",
"higher_order_map_keys",
"higher_order_state",
"polymorphic_compare",
"polymorphic_entrypoint",
"polymorphic_entrypoint_return",
"polymorphic_map_keys",
"unapplied_contract_call",
"unapplied_named_arg_builtin",
"resolve_field_constraint_by_arity",
"test" % Custom general-purpose test file. Keep it last on the list.
].
debug_mode_contracts() ->
["hermetization_turnoff"].
%% Contracts that should produce type errors
-define(Pos(Kind, File, Line, Col), (list_to_binary(Kind))/binary, " error in '",
(list_to_binary(File))/binary, ".aes' at line " ??Line ", col " ??Col ":\n").
-define(Pos(Line, Col), ?Pos(__Kind, __File, Line, Col)).
-define(ERROR(Kind, Name, Errs),
(fun() ->
__Kind = Kind,
__File = ??Name,
{__File, Errs}
end)()).
-define(TYPE_ERROR(Name, Errs), ?ERROR("Type", Name, Errs)).
-define(PARSE_ERROR(Name, Errs), ?ERROR("Parse", Name, Errs)).
-define(PosW(Kind, File, Line, Col), (list_to_binary(Kind))/binary, " in '",
(list_to_binary(File))/binary, ".aes' at line " ??Line ", col " ??Col ":\n").
-define(PosW(Line, Col), ?PosW(__Kind, __File, Line, Col)).
-define(WARNING(Name, Warns),
(fun() ->
__Kind = "Warning",
__File = ??Name,
Warns
end)()).
warnings() ->
?WARNING(warnings,
[<<?PosW(0, 0)
"The file `Triple.aes` is included but not used.">>,
<<?PosW(13, 3)
"The function `h` is defined but never used.">>,
<<?PosW(19, 3)
"The type `unused_type` is defined but never used.">>,
<<?PosW(23, 54)
"Negative spend.">>,
<<?PosW(27, 9)
"The definition of `x` shadows an older definition at line 26, column 9.">>,
<<?PosW(30, 36)
"Division by zero.">>,
<<?PosW(32, 3)
"The function `unused_stateful` is unnecessarily marked as stateful.">>,
<<?PosW(35, 31)
"The variable `unused_arg` is defined but never used.">>,
<<?PosW(36, 9)
"The variable `unused_var` is defined but never used.">>,
<<?PosW(41, 3)
"The function `unused_function` is defined but never used.">>,
<<?PosW(42, 3)
"The function `recursive_unused_function` is defined but never used.">>,
<<?PosW(43, 3)
"The function `called_unused_function1` is defined but never used.">>,
<<?PosW(44, 3)
"The function `called_unused_function2` is defined but never used.">>,
<<?PosW(48, 5)
"Unused return value.">>,
<<?PosW(60, 5)
"The function `dec` is defined but never used.">>
]).
failing_contracts() ->
{ok, V} = aeso_compiler:numeric_version(),
Version = list_to_binary(string:join([integer_to_list(N) || N <- V], ".")),
Parse errors
[ ?PARSE_ERROR(field_parse_error,
[<<?Pos(5, 26)
"Cannot use nested fields or keys in record construction: p.x">>])
, ?PARSE_ERROR(vsemi, [<<?Pos(3, 3) "Unexpected indentation. Did you forget a '}'?">>])
, ?PARSE_ERROR(vclose, [<<?Pos(4, 3) "Unexpected indentation. Did you forget a ']'?">>])
, ?PARSE_ERROR(indent_fail, [<<?Pos(3, 2) "Unexpected token 'entrypoint'.">>])
, ?PARSE_ERROR(assign_pattern_to_pattern, [<<?Pos(3, 22) "Unexpected token '='.">>])
%% Type errors
, ?TYPE_ERROR(name_clash,
[<<?Pos(4, 3)
"Duplicate definitions of `double_def` at\n"
" - line 3, column 3\n"
" - line 4, column 3">>,
<<?Pos(7, 3)
"Duplicate definitions of `abort` at\n"
" - (builtin location)\n"
" - line 7, column 3">>,
<<?Pos(8, 3)
"Duplicate definitions of `require` at\n"
" - (builtin location)\n"
" - line 8, column 3">>,
<<?Pos(9, 3)
"Duplicate definitions of `put` at\n"
" - (builtin location)\n"
" - line 9, column 3">>,
<<?Pos(10, 3)
"Duplicate definitions of `state` at\n"
" - (builtin location)\n"
" - line 10, column 3">>])
, ?TYPE_ERROR(type_errors,
[<<?Pos(17, 23)
"Unbound variable `zz`">>,
<<?Pos(26, 9)
"Cannot unify `int` and `list(int)`\n"
"when checking the application of\n"
" `(::) : (int, list(int)) => list(int)`\n"
"to arguments\n"
" `x : int`\n"
" `x : int`">>,
<<?Pos(9, 48)
"Cannot unify `string` and `int`\n"
"when checking the assignment of the field `x : map(string, string)` "
"to the old value `__x` and the new value `__x {[\"foo\"] @ x = x + 1} : map(string, int)`">>,
<<?Pos(34, 47)
"Cannot unify `int` and `string`\n"
"when checking the type of the expression `1 : int` "
"against the expected type `string`">>,
<<?Pos(34, 52)
"Cannot unify `string` and `int`\n"
"when checking the type of the expression `\"bla\" : string` "
"against the expected type `int`">>,
<<?Pos(32, 18)
"Cannot unify `string` and `int`\n"
"when checking the type of the expression `\"x\" : string` "
"against the expected type `int`">>,
<<?Pos(11, 58)
"Cannot unify `string` and `int`\n"
"when checking the type of the expression `\"foo\" : string` "
"against the expected type `int`">>,
<<?Pos(38, 13)
"Cannot unify `int` and `string`\n"
"when comparing the types of the if-branches\n"
" - w : int (at line 38, column 13)\n"
" - z : string (at line 39, column 10)">>,
<<?Pos(22, 40)
"Not a record type: `string`\n"
"arising from the projection of the field `y`">>,
<<?Pos(21, 44)
"Not a record type: `string`\n"
"arising from an assignment of the field `y`">>,
<<?Pos(20, 40)
"Not a record type: `string`\n"
"arising from an assignment of the field `y`">>,
<<?Pos(19, 37)
"Not a record type: `string`\n"
"arising from an assignment of the field `y`">>,
<<?Pos(13, 27)
"Ambiguous record type with field `y` could be one of\n"
" - `r` (at line 4, column 10)\n"
" - `r'` (at line 5, column 10)">>,
<<?Pos(26, 7)
"Repeated name `x` in the pattern `x :: x`">>,
<<?Pos(44, 14)
"Repeated names `x`, `y` in the pattern `(x : int, y, x : string, y : bool)`">>,
<<?Pos(44, 39)
"Cannot unify `int` and `string`\n"
"when checking the type of the expression `x : int` "
"against the expected type `string`">>,
<<?Pos(44, 72)
"Cannot unify `int` and `string`\n"
"when checking the type of the expression `x : int` "
"against the expected type `string`">>,
<<?Pos(14, 24)
"No record type with fields `y`, `z`">>,
<<?Pos(15, 26)
"The field `z` is missing when constructing an element of type `r2`">>,
<<?Pos(15, 24)
"Record type `r2` does not have field `y`">>,
<<?Pos(47, 5)
"Let binding must be followed by an expression.">>,
<<?Pos(50, 5)
"Let binding must be followed by an expression.">>,
<<?Pos(54, 5)
"Let binding must be followed by an expression.">>,
<<?Pos(58, 5)
"Let binding must be followed by an expression.">>,
<<?Pos(63, 5)
"Cannot unify `int` and `bool`\n"
"when checking the type of the expression `id(n) : int` "
"against the expected type `bool`">>])
, ?TYPE_ERROR(init_type_error,
[<<?Pos(7, 3)
"Cannot unify `string` and `map(int, int)`\n"
"when checking that `init` returns a value of type `state`">>])
, ?TYPE_ERROR(missing_state_type,
[<<?Pos(5, 3)
"Cannot unify `string` and `unit`\n"
"when checking that `init` returns a value of type `state`">>])
, ?TYPE_ERROR(missing_fields_in_record_expression,
[<<?Pos(7, 42)
"The field `x` is missing when constructing an element of type `r('a)`">>,
<<?Pos(8, 42)
"The field `y` is missing when constructing an element of type `r(int)`">>,
<<?Pos(6, 42)
"The fields `y`, `z` are missing when constructing an element of type `r('a)`">>])
, ?TYPE_ERROR(namespace_clash_builtin,
[<<?Pos(4, 10)
"The contract `Call` has the same name as a namespace at (builtin location)">>])
, ?TYPE_ERROR(namespace_clash_included,
[<<?Pos(5, 11)
"The namespace `BLS12_381` has the same name as a namespace at line 1, column 11 in BLS12_381.aes">>])
, ?TYPE_ERROR(namespace_clash_same_file,
[<<?Pos(4, 11)
"The namespace `Nmsp` has the same name as a namespace at line 1, column 11">>])
, ?TYPE_ERROR(bad_events,
[<<?Pos(9, 25)
"The indexed type `string` is not a word type">>,
<<?Pos(10, 25)
"The indexed type `alias_string` equals `string` which is not a word type">>])
, ?TYPE_ERROR(bad_events2,
[<<?Pos(9, 7)
"The event constructor `BadEvent1` has too many non-indexed values (max 1)">>,
<<?Pos(10, 7)
"The event constructor `BadEvent2` has too many indexed values (max 3)">>])
, ?TYPE_ERROR(type_clash,
[<<?Pos(12, 42)
"Cannot unify `int` and `string`\n"
"when checking the type of the expression `r.foo() : map(int, string)` "
"against the expected type `map(string, int)`">>])
, ?TYPE_ERROR(not_toplevel_include,
[<<?Pos(2, 11)
"Include of `included.aes` is not allowed, include only allowed at top level.">>])
, ?TYPE_ERROR(not_toplevel_namespace,
[<<?Pos(2, 13)
"Nested namespaces are not allowed. Namespace `Foo` is not defined at top level.">>])
, ?TYPE_ERROR(not_toplevel_contract,
[<<?Pos(2, 12)
"Nested contracts are not allowed. Contract `Con` is not defined at top level.">>])
, ?TYPE_ERROR(bad_address_literals,
[<<?Pos(11, 5)
"Cannot unify `address` and `oracle(int, bool)`\n"
"when checking the type of the expression `ak_2gx9MEFxKvY9vMG5YnqnXWv1hCsX7rgnfvBLJS4aQurustR1rt : address` "
"against the expected type `oracle(int, bool)`">>,
<<?Pos(9, 5)
"Cannot unify `address` and `Remote`\n"
"when checking the type of the expression `ak_2gx9MEFxKvY9vMG5YnqnXWv1hCsX7rgnfvBLJS4aQurustR1rt : address` "
"against the expected type `Remote`">>,
<<?Pos(7, 5)
"Cannot unify `address` and `bytes(32)`\n"
"when checking the type of the expression `ak_2gx9MEFxKvY9vMG5YnqnXWv1hCsX7rgnfvBLJS4aQurustR1rt : address` "
"against the expected type `bytes(32)`">>,
<<?Pos(14, 5)
"Cannot unify `oracle('a, 'b)` and `oracle_query(int, bool)`\n"
"when checking the type of the expression "
"`ok_2YNyxd6TRJPNrTcEDCe9ra59SVUdp9FR9qWC5msKZWYD9bP9z5 : oracle('a, 'b)` "
"against the expected type `oracle_query(int, bool)`">>,
<<?Pos(16, 5)
"Cannot unify `oracle('c, 'd)` and `bytes(32)`\n"
"when checking the type of the expression "
"`ok_2YNyxd6TRJPNrTcEDCe9ra59SVUdp9FR9qWC5msKZWYD9bP9z5 : oracle('c, 'd)` "
"against the expected type `bytes(32)`">>,
<<?Pos(18, 5)
"Cannot unify `oracle('e, 'f)` and `Remote`\n"
"when checking the type of the expression "
"`ok_2YNyxd6TRJPNrTcEDCe9ra59SVUdp9FR9qWC5msKZWYD9bP9z5 : oracle('e, 'f)` "
"against the expected type `Remote`">>,
<<?Pos(21, 5)
"Cannot unify `oracle_query('g, 'h)` and `oracle(int, bool)`\n"
"when checking the type of the expression "
"`oq_2oRvyowJuJnEkxy58Ckkw77XfWJrmRgmGaLzhdqb67SKEL1gPY : oracle_query('g, 'h)` "
"against the expected type `oracle(int, bool)`">>,
<<?Pos(23, 5)
"Cannot unify `oracle_query('i, 'j)` and `bytes(32)`\n"
"when checking the type of the expression "
"`oq_2oRvyowJuJnEkxy58Ckkw77XfWJrmRgmGaLzhdqb67SKEL1gPY : oracle_query('i, 'j)` "
"against the expected type `bytes(32)`">>,
<<?Pos(25, 5)
"Cannot unify `oracle_query('k, 'l)` and `Remote`\n"
"when checking the type of the expression "
"`oq_2oRvyowJuJnEkxy58Ckkw77XfWJrmRgmGaLzhdqb67SKEL1gPY : oracle_query('k, 'l)` "
"against the expected type `Remote`">>,
<<?Pos(28, 5)
"The type `address` is not a contract type\n"
"when checking that the contract literal "
"`ct_Ez6MyeTMm17YnTnDdHTSrzMEBKmy7Uz2sXu347bTDPgVH2ifJ` "
"has the type `address`">>,
<<?Pos(30, 5)
"The type `oracle(int, bool)` is not a contract type\n"
"when checking that the contract literal "
"`ct_Ez6MyeTMm17YnTnDdHTSrzMEBKmy7Uz2sXu347bTDPgVH2ifJ` "
"has the type `oracle(int, bool)`">>,
<<?Pos(32, 5)
"The type `bytes(32)` is not a contract type\n"
"when checking that the contract literal "
"`ct_Ez6MyeTMm17YnTnDdHTSrzMEBKmy7Uz2sXu347bTDPgVH2ifJ` "
"has the type `bytes(32)`">>,
<<?Pos(34, 5),
"The type `address` is not a contract type\n"
"when checking that the call to `Address.to_contract` "
"has the type `address`">>])
, ?TYPE_ERROR(stateful,
[<<?Pos(13, 35)
"Cannot reference stateful function `Chain.spend` in the definition of non-stateful function `fail1`.">>,
<<?Pos(14, 35)
"Cannot reference stateful function `local_spend` in the definition of non-stateful function `fail2`.">>,
<<?Pos(16, 15)
"Cannot reference stateful function `Chain.spend` in the definition of non-stateful function `fail3`.">>,
<<?Pos(20, 31)
"Cannot reference stateful function `Chain.spend` in the definition of non-stateful function `fail4`.">>,
<<?Pos(35, 47)
"Cannot reference stateful function `Chain.spend` in the definition of non-stateful function `fail5`.">>,
<<?Pos(48, 57)
"Cannot pass non-zero value argument `1000` in the definition of non-stateful function `fail6`.">>,
<<?Pos(49, 56)
"Cannot pass non-zero value argument `1000` in the definition of non-stateful function `fail7`.">>,
<<?Pos(52, 17)
"Cannot pass non-zero value argument `1000` in the definition of non-stateful function `fail8`.">>])
, ?TYPE_ERROR(bad_init_state_access,
[<<?Pos(11, 5)
"The `init` function should return the initial state as its result and cannot write the state, "
"but it calls\n"
" - `set_state` (at line 11, column 5), which calls\n"
" - `roundabout` (at line 8, column 38), which calls\n"
" - `put` (at line 7, column 39)">>,
<<?Pos(12, 5)
"The `init` function should return the initial state as its result and cannot read the state, "
"but it calls\n"
" - `new_state` (at line 12, column 5), which calls\n"
" - `state` (at line 5, column 29)">>,
<<?Pos(13, 13)
"The `init` function should return the initial state as its result and cannot read the state, "
"but it calls\n"
" - `state` (at line 13, column 13)">>])
, ?TYPE_ERROR(modifier_checks,
[<<?Pos(11, 3)
"The function `all_the_things` cannot be both public and private.">>,
<<?Pos(3, 3)
"Namespaces cannot contain entrypoints. Use `function` instead.">>,
<<?Pos(5, 10)
"The contract `Remote` has no entrypoints. Since Sophia version 3.2, "
"public contract functions must be declared with the `entrypoint` "
"keyword instead of `function`.">>,
<<?Pos(12, 3)
"The entrypoint `wha` cannot be private. Use `function` instead.">>,
<<?Pos(6, 3)
"Use `entrypoint` for declaration of `foo`: `entrypoint foo : () => unit`">>,
<<?Pos(10, 3)
"Use `entrypoint` instead of `function` for public function `foo`: `entrypoint foo() = ()`">>,
<<?Pos(6, 3)
"Use `entrypoint` instead of `function` for public function `foo`: `entrypoint foo : () => unit`">>])
, ?TYPE_ERROR(list_comp_not_a_list,
[<<?Pos(2, 36)
"Cannot unify `int` and `list('a)`\n"
"when checking rvalue of list comprehension binding `1 : int` against type `list('a)`">>
])
, ?TYPE_ERROR(list_comp_if_not_bool,
[<<?Pos(2, 44)
"Cannot unify `int` and `bool`\n"
"when checking the type of the expression `3 : int` against the expected type `bool`">>
])
, ?TYPE_ERROR(list_comp_bad_shadow,
[<<?Pos(2, 53)
"Cannot unify `string` and `int`\n"
"when checking the type of the pattern `x : int` against the expected type `string`">>
])
, ?TYPE_ERROR(map_as_map_key,
[<<?Pos(5, 47)
"Invalid key type `map(int, int)`\n"
"Map keys cannot contain other maps.">>,
<<?Pos(6, 31)
"Invalid key type `list(map(int, int))`\n"
"Map keys cannot contain other maps.">>,
<<?Pos(6, 31)
"Invalid key type `lm`\n"
"Map keys cannot contain other maps.">>])
, ?TYPE_ERROR(calling_init_function,
[<<?Pos(7, 28)
"The 'init' function is called exclusively by the create contract transaction "
"and cannot be called from the contract code.">>])
, ?TYPE_ERROR(bad_top_level_decl,
[<<?Pos(1, 1) "The definition of 'square' must appear inside a contract or namespace.">>])
, ?TYPE_ERROR(missing_event_type,
[<<?Pos(3, 5)
"Unbound variable `Chain.event`\n"
"Did you forget to define the event type?">>])
, ?TYPE_ERROR(bad_bytes_concat,
[<<?Pos(12, 40)
"Failed to resolve byte array lengths in call to Bytes.concat with arguments of type\n"
" - 'g (at line 12, column 20)\n"
" - 'h (at line 12, column 23)\n"
"and result type\n"
" - bytes(10) (at line 12, column 28)">>,
<<?Pos(13, 28)
"Failed to resolve byte array lengths in call to Bytes.concat with arguments of type\n"
" - 'd (at line 13, column 20)\n"
" - 'e (at line 13, column 23)\n"
"and result type\n"
" - 'f (at line 13, column 14)">>,
<<?Pos(15, 5)
"Cannot unify `bytes(26)` and `bytes(25)`\n"
"when checking the type of the expression `Bytes.concat(x, y) : bytes(26)` "
"against the expected type `bytes(25)`">>,
<<?Pos(17, 5)
"Failed to resolve byte array lengths in call to Bytes.concat with arguments of type\n"
" - bytes(6) (at line 16, column 24)\n"
" - 'b (at line 16, column 34)\n"
"and result type\n"
" - 'c (at line 16, column 39)">>,
<<?Pos(19, 25)
"Cannot resolve length of byte array.">>])
, ?TYPE_ERROR(bad_bytes_split,
[<<?Pos(13, 5)
"Failed to resolve byte array lengths in call to Bytes.split with argument of type\n"
" - 'f (at line 12, column 20)\n"
"and result types\n"
" - 'e (at line 12, column 25)\n"
" - bytes(20) (at line 12, column 29)">>,
<<?Pos(16, 5)
"Failed to resolve byte array lengths in call to Bytes.split with argument of type\n"
" - bytes(15) (at line 15, column 24)\n"
"and result types\n"
" - 'c (at line 16, column 5)\n"
" - 'd (at line 16, column 5)">>,
<<?Pos(19, 5)
"Failed to resolve byte array lengths in call to Bytes.split with argument of type\n"
" - 'b (at line 18, column 20)\n"
"and result types\n"
" - bytes(20) (at line 18, column 25)\n"
" - 'a (at line 18, column 37)">>])
, ?TYPE_ERROR(wrong_compiler_version,
[<<?Pos(1, 1)
"Cannot compile with this version of the compiler, "
"because it does not satisfy the constraint ", Version/binary, " < 1.0">>,
<<?Pos(2, 1)
"Cannot compile with this version of the compiler, "
"because it does not satisfy the constraint ", Version/binary, " == 9.9.9">>])
, ?TYPE_ERROR(interface_with_defs,
[<<?Pos(2, 3)
"Contract interfaces cannot contain defined functions or entrypoints.\n"
"Fix: replace the definition of `foo` by a type signature.">>])
, ?TYPE_ERROR(contract_as_namespace,
[<<?Pos(5, 28)
"Invalid call to contract entrypoint `Foo.foo`.\n"
"It must be called as `c.foo` for some `c : Foo`.">>])
, ?TYPE_ERROR(toplevel_let,
[<<?Pos(2, 7)
"Toplevel \"let\" definitions are not supported. "
"Value `this_is_illegal` could be replaced by 0-argument function.">>])
, ?TYPE_ERROR(empty_typedecl,
[<<?Pos(2, 8)
"Empty type declarations are not supported. "
"Type `t` lacks a definition">>])
, ?TYPE_ERROR(higher_kinded_type,
[<<?Pos(2, 35)
"Type `'m` is a higher kinded type variable "
"(takes another type as an argument)">>])
, ?TYPE_ERROR(bad_arity,
[<<?Pos(3, 20)
"Arity for id doesn't match. Expected 1, got 0">>,
<<?Pos(3, 25)
"Cannot unify `int` and `id`\n"
"when checking the type of the expression `123 : int` "
"against the expected type `id`">>,
<<?Pos(4, 20)
"Arity for id doesn't match. Expected 1, got 2">>,
<<?Pos(4, 35)
"Cannot unify `int` and `id(int, int)`\n"
"when checking the type of the expression `123 : int` "
"against the expected type `id(int, int)`">>])
, ?TYPE_ERROR(bad_unnamed_map_update_default,
[<<?Pos(4, 17)
"Invalid map update with default">>])
, ?TYPE_ERROR(non_functional_entrypoint,
[<<?Pos(2, 14)
"`f` was declared with an invalid type `int`. "
"Entrypoints and functions must have functional types">>])
, ?TYPE_ERROR(bad_records,
[<<?Pos(3, 16)
"Mixed record fields and map keys in `{x = 0, [0] = 1}`">>,
<<?Pos(4, 6)
"Mixed record fields and map keys in `r {x = 0, [0] = 1}`">>,
<<?Pos(5, 6)
"Empty record/map update `r {}`">>
])
, ?TYPE_ERROR(bad_protected_call,
[<<?Pos(6, 22)
"Invalid `protected` argument `(0 : int) == (1 : int) : bool`. "
"It must be either `true` or `false`.">>
])
, ?TYPE_ERROR(bad_function_block,
[<<?Pos(4, 5)
"Mismatch in the function block. Expected implementation/type declaration of g function">>,
<<?Pos(5, 5)
"Mismatch in the function block. Expected implementation/type declaration of g function">>
])
, ?TYPE_ERROR(just_an_empty_file,
[<<?Pos(0, 0)
"Empty contract">>
])
, ?TYPE_ERROR(bad_number_of_args,
[<<?Pos(3, 39)
"Cannot unify `() => unit` and `(int) => 'a`\n",
"when checking the application of\n"
" `f : () => unit`\n"
"to arguments\n"
" `1 : int`">>,
<<?Pos(4, 20)
"Cannot unify `(int, string) => 'e` and `(int) => 'd`\n"
"when checking the application of\n"
" `g : (int, string) => 'e`\n"
"to arguments\n"
" `1 : int`">>,
<<?Pos(5, 20)
"Cannot unify `(int, string) => 'c` and `(string) => 'b`\n"
"when checking the application of\n"
" `g : (int, string) => 'c`\n"
"to arguments\n"
" `\"Litwo, ojczyzno moja\" : string`">>
])
, ?TYPE_ERROR(bad_state,
[<<?Pos(4, 16)
"Conflicting updates for field 'foo'">>])
, ?TYPE_ERROR(factories_type_errors,
[<<?Pos(10,18)
"Chain.clone requires `ref` named argument of contract type.">>,
<<?Pos(11,18)
"Cannot unify `(gas : int, value : int, protected : bool) => if(protected, option(void), void)` and `(gas : int, value : int, protected : bool, int, bool) => if(protected, option(void), void)`\n"
"when checking contract construction of type\n"
" (gas : int, value : int, protected : bool) =>\n"
" if(protected, option(void), void) (at line 11, column 18)\n"
"against the expected type\n"
" (gas : int, value : int, protected : bool, int, bool) =>\n"
" if(protected, option(void), void)">>,
<<?Pos(11,18)
"Cannot unify `Bakoom` and `Kaboom`\n"
"when checking that contract construction of type\n"
" Bakoom\n"
"arising from resolution of variadic function `Chain.clone`\n"
"matches the expected type\n"
" Kaboom">>,
<<?Pos(12,37)
"Cannot unify `int` and `bool`\n"
"when checking named argument `gas : int` against inferred type `bool`">>,
<<?Pos(13,18),
"Kaboom is not implemented.\n"
"when resolving arguments of variadic function `Chain.create`">>,
<<?Pos(18,18)
"Cannot unify `(gas : int, value : int, protected : bool, int, bool) => if(protected, option(void), void)` and `(gas : int, value : int, protected : bool) => 'a`\n"
"when checking contract construction of type\n (gas : int, value : int, protected : bool, int, bool) =>\n if(protected, option(void), void) (at line 18, column 18)\nagainst the expected type\n (gas : int, value : int, protected : bool) => 'a">>,
<<?Pos(19,42),
"Named argument `protected` is not one of the expected named arguments\n - `value : int`">>,
<<?Pos(20,42),
"Cannot unify `int` and `bool`\n"
"when checking named argument `value : int` against inferred type `bool`">>
])
, ?TYPE_ERROR(ambiguous_main,
[<<?Pos(1,1)
"Could not deduce the main contract. You can point it out manually with the `main` keyword.">>
])
, ?TYPE_ERROR(no_main_contract,
[<<?Pos(0,0)
"No contract defined.">>
])
, ?TYPE_ERROR(multiple_main_contracts,
[<<?Pos(1,6)
"Only one main contract can be defined.">>
])
, ?TYPE_ERROR(using_namespace_ambiguous_name,
[ <<?Pos(13,23)
"Ambiguous name `A.f` could be one of\n"
" - `Xa.f` (at line 2, column 3)\n"
" - `Xb.f` (at line 5, column 3)">>
, <<?Pos(13,23)
"Unbound variable `A.f`">>
])
, ?TYPE_ERROR(using_namespace_wrong_scope,
[ <<?Pos(19,5)
"Unbound variable `f`">>
, <<?Pos(21,23)
"Unbound variable `f`">>
])
, ?TYPE_ERROR(using_namespace_undefined,
[<<?Pos(2,3)
"Cannot use undefined namespace MyUndefinedNamespace">>
])
, ?TYPE_ERROR(using_namespace_undefined_parts,
[<<?Pos(5,3)
"The namespace Nsp does not define the following names: a">>
])
, ?TYPE_ERROR(using_namespace_hidden_parts,
[<<?Pos(8,23)
"Unbound variable `g`">>
])
, ?TYPE_ERROR(stateful_pattern_guard,
[<<?Pos(8,12)
"Cannot reference stateful function `g` in a pattern guard.">>
])
, ?TYPE_ERROR(non_boolean_pattern_guard,
[<<?Pos(4,24)
"Cannot unify `string` and `bool`\n"
"when checking the type of the expression `\"y\" : string` "
"against the expected type `bool`">>
])
, ?TYPE_ERROR(empty_record_definition,
[<<?Pos(2,5)
"Empty record definitions are not allowed. Cannot define the record `r`">>
])
, ?TYPE_ERROR(operator_lambdas,
[<<?Pos(9,32)
"Cannot unify `(int, int) => int` and `(int) => 'a`\n"
"when checking the application of\n"
" `(l : _, r : _) => l + r : (int, int) => int`\n"
"to arguments\n"
" `1 : int`">>
])
, ?TYPE_ERROR(warnings,
[<<?Pos(0, 0)
"The file `Triple.aes` is included but not used.">>,
<<?Pos(13, 3)
"The function `h` is defined but never used.">>,
<<?Pos(19, 3)
"The type `unused_type` is defined but never used.">>,
<<?Pos(23, 54)
"Negative spend.">>,
<<?Pos(27, 9)
"The definition of `x` shadows an older definition at line 26, column 9.">>,
<<?Pos(30, 36)
"Division by zero.">>,
<<?Pos(32, 3)
"The function `unused_stateful` is unnecessarily marked as stateful.">>,
<<?Pos(35, 31)
"The variable `unused_arg` is defined but never used.">>,
<<?Pos(36, 9)
"The variable `unused_var` is defined but never used.">>,
<<?Pos(41, 3)
"The function `unused_function` is defined but never used.">>,
<<?Pos(42, 3)
"The function `recursive_unused_function` is defined but never used.">>,
<<?Pos(43, 3)
"The function `called_unused_function1` is defined but never used.">>,
<<?Pos(44, 3)
"The function `called_unused_function2` is defined but never used.">>,
<<?Pos(48, 5)
"Unused return value.">>,
<<?Pos(60, 5)
"The function `dec` is defined but never used.">>
])
, ?TYPE_ERROR(polymorphism_contract_interface_recursive,
[<<?Pos(1,24)
"Trying to implement or extend an undefined interface `Z`">>
])
, ?TYPE_ERROR(polymorphism_contract_interface_same_name_different_type,
[<<?Pos(9,5)
"Duplicate definitions of `f` at\n"
" - line 8, column 5\n"
" - line 9, column 5">>])
, ?TYPE_ERROR(polymorphism_contract_missing_implementation,
[<<?Pos(4,20)
"Unimplemented entrypoint `f` from the interface `I1` in the contract `I2`">>
])
, ?TYPE_ERROR(polymorphism_contract_same_decl_multi_interface,
[<<?Pos(7,10)
"Both interfaces `I` and `J` implemented by the contract `C` have a function called `f`">>
])
, ?TYPE_ERROR(polymorphism_contract_undefined_interface,
[<<?Pos(1,14)
"Trying to implement or extend an undefined interface `I`">>
])
, ?TYPE_ERROR(polymorphism_contract_same_name_different_type_multi_interface,
[<<?Pos(7,10)
"Both interfaces `I` and `J` implemented by the contract `C` have a function called `f`">>
])
, ?TYPE_ERROR(polymorphism_contract_interface_undefined_interface,
[<<?Pos(1,24)
"Trying to implement or extend an undefined interface `H`">>
])
, ?TYPE_ERROR(polymorphism_variance_switching,
[<<?Pos(36,49)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the application of\n"
" `g2 : (Cat) => Cat`\n"
"to arguments\n"
" `x : Animal`">>,
<<?Pos(39,43)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `g3(x) : Animal` against the expected type `Cat`">>,
<<?Pos(48,55)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the application of\n"
" `g5 : ((Animal) => Animal) => Cat`\n"
"to arguments\n"
" `x : (Cat) => Cat`">>,
<<?Pos(52,44)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `f6() : option(Animal)` against the expected type `option(Cat)`">>,
<<?Pos(73,43)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `some_animal : Animal` against the expected type `Cat`">>
])
, ?TYPE_ERROR(polymorphism_variance_switching_custom_types,
[<<?Pos(56,39)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_CONTRA(f_c_to_u) : dt_contra(Cat)` against the expected type `dt_contra(Animal)`">>,
<<?Pos(62,35)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `DT_CO(f_u_to_a) : dt_co(Animal)` against the expected type `dt_co(Cat)`">>,
<<?Pos(67,36)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the application of\n `DT_INV : ((Cat) => Cat) => dt_inv(Cat)`\nto arguments\n `f_c_to_a : (Cat) => Animal`">>,
<<?Pos(68,36)
"Cannot unify `Cat` and `Animal` in a invariant context\n"
"when checking the type of the expression `DT_INV(f_c_to_c) : dt_inv(Cat)` against the expected type `dt_inv(Animal)`">>,
<<?Pos(69,36)
"Cannot unify `Animal` and `Cat` in a invariant context\n"
"when checking the type of the expression `DT_INV(f_a_to_a) : dt_inv(Animal)` against the expected type `dt_inv(Cat)`">>,
<<?Pos(70,36)
"Cannot unify `Animal` and `Cat` in a invariant context\n"
"when checking the type of the expression `DT_INV(f_a_to_c) : dt_inv(Animal)` against the expected type `dt_inv(Cat)`">>,
<<?Pos(71,36)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the application of\n `DT_INV : ((Cat) => Cat) => dt_inv(Cat)`\nto arguments\n `f_c_to_a : (Cat) => Animal`">>,
<<?Pos(80,40)
"Cannot unify `Cat` and `Animal` in a invariant context\n"
"when checking the type of the expression `DT_INV_SEP_A(f_c_to_u) : dt_inv_sep(Cat)` against the expected type `dt_inv_sep(Animal)`">>,
<<?Pos(82,40)
"Cannot unify `Cat` and `Animal` in a invariant context\n"
"when checking the type of the expression `DT_INV_SEP_B(f_u_to_c) : dt_inv_sep(Cat)` against the expected type `dt_inv_sep(Animal)`">>,
<<?Pos(83,40)
"Cannot unify `Animal` and `Cat` in a invariant context\n"
"when checking the type of the expression `DT_INV_SEP_A(f_a_to_u) : dt_inv_sep(Animal)` against the expected type `dt_inv_sep(Cat)`">>,
<<?Pos(85,40)
"Cannot unify `Animal` and `Cat` in a invariant context\n"
"when checking the type of the expression `DT_INV_SEP_B(f_u_to_a) : dt_inv_sep(Animal)` against the expected type `dt_inv_sep(Cat)`">>,
<<?Pos(90,42)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `DT_CO_NEST_A(f_dt_contra_a_to_u) : dt_co_nest_a(Animal)` against the expected type `dt_co_nest_a(Cat)`">>,
<<?Pos(94,46)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_CONTRA_NEST_A(f_dt_co_c_to_u) : dt_contra_nest_a(Cat)` against the expected type `dt_contra_nest_a(Animal)`">>,
<<?Pos(99,46)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_CONTRA_NEST_B(f_u_to_dt_contra_c) : dt_contra_nest_b(Cat)` against the expected type `dt_contra_nest_b(Animal)`">>,
<<?Pos(105,42)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `DT_CO_NEST_B(f_u_to_dt_co_a) : dt_co_nest_b(Animal)` against the expected type `dt_co_nest_b(Cat)`">>,
<<?Pos(110,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `vj3 : dt_co_twice(Cat)` against the expected type `dt_co_twice(Animal)`">>,
<<?Pos(114,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_a_to_c_to_u) : dt_a_contra_b_contra(Animal, Cat)` against the expected type `dt_a_contra_b_contra(Animal, Animal)`">>,
<<?Pos(115,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_c_to_a_to_u) : dt_a_contra_b_contra(Cat, Animal)` against the expected type `dt_a_contra_b_contra(Animal, Animal)`">>,
<<?Pos(116,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_c_to_c_to_u) : dt_a_contra_b_contra(Cat, Cat)` against the expected type `dt_a_contra_b_contra(Animal, Animal)`">>,
<<?Pos(119,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_c_to_a_to_u) : dt_a_contra_b_contra(Cat, Animal)` against the expected type `dt_a_contra_b_contra(Animal, Cat)`">>,
<<?Pos(120,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_c_to_c_to_u) : dt_a_contra_b_contra(Cat, Cat)` against the expected type `dt_a_contra_b_contra(Animal, Cat)`">>,
<<?Pos(122,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_a_to_c_to_u) : dt_a_contra_b_contra(Animal, Cat)` against the expected type `dt_a_contra_b_contra(Cat, Animal)`">>,
<<?Pos(124,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_c_to_c_to_u) : dt_a_contra_b_contra(Cat, Cat)` against the expected type `dt_a_contra_b_contra(Cat, Animal)`">>,
<<?Pos(131,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `vl2 : dt_contra_twice(Animal)` against the expected type `dt_contra_twice(Cat)`">>
])
, ?TYPE_ERROR(polymorphism_variance_switching_records,
[<<?Pos(27,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `r03 : rec_co(Cat)` against the expected type `Main.rec_co(Animal)`">>,
<<?Pos(33,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `r06 : rec_contra(Animal)` against the expected type `Main.rec_contra(Cat)`">>,
<<?Pos(40,13)
"Cannot unify `Cat` and `Animal` in a invariant context\n"
"when checking the type of the pattern `r10 : rec_inv(Animal)` against the expected type `Main.rec_inv(Cat)`">>,
<<?Pos(41,13)
"Cannot unify `Animal` and `Cat` in a invariant context\n"
"when checking the type of the pattern `r11 : rec_inv(Cat)` against the expected type `Main.rec_inv(Animal)`">>
])
, ?TYPE_ERROR(polymorphism_variance_switching_oracles,
[<<?Pos(15,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `o03 : oracle(Animal, Animal)` against the expected type `oracle(Cat, Animal)`">>,
<<?Pos(16,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `o04 : oracle(Animal, Animal)` against the expected type `oracle(Cat, Cat)`">>,
<<?Pos(17,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `o05 : oracle(Animal, Cat)` against the expected type `oracle(Animal, Animal)`">>,
<<?Pos(19,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `o07 : oracle(Animal, Cat)` against the expected type `oracle(Cat, Animal)`">>,
<<?Pos(20,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `o08 : oracle(Animal, Cat)` against the expected type `oracle(Cat, Cat)`">>,
<<?Pos(25,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `o13 : oracle(Cat, Cat)` against the expected type `oracle(Animal, Animal)`">>,
<<?Pos(27,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `o15 : oracle(Cat, Cat)` against the expected type `oracle(Cat, Animal)`">>,
<<?Pos(34,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q05 : oracle_query(Animal, Cat)` against the expected type `oracle_query(Animal, Animal)`">>,
<<?Pos(36,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q07 : oracle_query(Animal, Cat)` against the expected type `oracle_query(Cat, Animal)`">>,
<<?Pos(38,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q09 : oracle_query(Cat, Animal)` against the expected type `oracle_query(Animal, Animal)`">>,
<<?Pos(39,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q10 : oracle_query(Cat, Animal)` against the expected type `oracle_query(Animal, Cat)`">>,
<<?Pos(42,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q13 : oracle_query(Cat, Cat)` against the expected type `oracle_query(Animal, Animal)`">>,
<<?Pos(43,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q14 : oracle_query(Cat, Cat)` against the expected type `oracle_query(Animal, Cat)`">>,
<<?Pos(44,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q15 : oracle_query(Cat, Cat)` against the expected type `oracle_query(Cat, Animal)`">>
])
, ?TYPE_ERROR(polymorphism_variance_switching_chain_create_fail,
[<<?Pos(9,22)
"I is not implemented.\n"
"when resolving arguments of variadic function `Chain.create`">>,
<<?Pos(10,13)
"Cannot unify `I` and `C` in a covariant context\n"
"when checking the type of the pattern `c2 : C` against the expected type `I`">>,
<<?Pos(10,22)
"I is not implemented.\n"
"when resolving arguments of variadic function `Chain.create`">>,
<<?Pos(11,22)
"I is not implemented.\n"
"when resolving arguments of variadic function `Chain.create`">>
])
, ?TYPE_ERROR(missing_definition,
[<<?Pos(2,14)
"Missing definition of function `foo`">>
])
, ?TYPE_ERROR(child_with_decls,
[<<?Pos(2,14)
"Missing definition of function `f`">>
])
, ?TYPE_ERROR(parameterised_state,
[<<?Pos(3,8)
"The state type cannot be parameterized">>
])
, ?TYPE_ERROR(parameterised_event,
[<<?Pos(3,12)
"The event type cannot be parameterized">>
])
, ?TYPE_ERROR(missing_init_fun_alias_to_type,
[<<?Pos(1,10)
"Missing `init` function for the contract `AliasToType`.\n"
"The `init` function can only be omitted if the state type is `unit`">>
])
, ?TYPE_ERROR(missing_init_fun_alias_to_alias_to_type,
[<<?Pos(1,10)
"Missing `init` function for the contract `AliasToAliasToType`.\n"
"The `init` function can only be omitted if the state type is `unit`">>
])
, ?TYPE_ERROR(higher_order_entrypoint,
[<<?Pos(2,20)
"The argument\n"
" `f : (int) => int`\n"
"of entrypoint `apply` has a higher-order (contains function types) type">>
])
, ?TYPE_ERROR(higher_order_entrypoint_return,
[<<?Pos(2,3)
"The return type\n"
" `(int) => int`\n"
"of entrypoint `add` is higher-order (contains function types)">>
])
, ?TYPE_ERROR(polymorphic_aens_resolve,
[<<?Pos(4,5)
"Invalid return type of `AENS.resolve`:\n"
" `'a`\n"
"It must be a `string` or a pubkey type (`address`, `oracle`, etc)">>
])
, ?TYPE_ERROR(bad_aens_resolve,
[<<?Pos(6,5)
"Invalid return type of `AENS.resolve`:\n"
" `list(int)`\n"
"It must be a `string` or a pubkey type (`address`, `oracle`, etc)">>
])
, ?TYPE_ERROR(bad_aens_resolve_using,
[<<?Pos(7,5)
"Invalid return type of `AENS.resolve`:\n"
" `list(int)`\n"
"It must be a `string` or a pubkey type (`address`, `oracle`, etc)">>
])
, ?TYPE_ERROR(polymorphic_query_type,
[<<?Pos(3,5)
"Invalid oracle type\n"
" `oracle('a, 'b)`\n"
"The query type must not be polymorphic (contain type variables)">>,
<<?Pos(3,5)
"Invalid oracle type\n"
" `oracle('a, 'b)`\n"
"The response type must not be polymorphic (contain type variables)">>
])
, ?TYPE_ERROR(polymorphic_response_type,
[<<?Pos(3,5)
"Invalid oracle type\n"
" `oracle(string, 'r)`\n"
"The response type must not be polymorphic (contain type variables)">>
])
, ?TYPE_ERROR(higher_order_query_type,
[<<?Pos(3,5)
"Invalid oracle type\n"
" `oracle((int) => int, string)`\n"
"The query type must not be higher-order (contain function types)">>
])
, ?TYPE_ERROR(higher_order_response_type,
[<<?Pos(3,5)
"Invalid oracle type\n"
" `oracle(string, (int) => int)`\n"
"The response type must not be higher-order (contain function types)">>
])
, ?TYPE_ERROR(var_args_unify_let,
[<<?Pos(3,9)
"Cannot infer types for variable argument list.\n"
"when checking the type of the pattern `x : 'a` against the expected type `(gas : int, value : int, protected : bool, ref : 'b, var_args) => 'b`">>
])
, ?TYPE_ERROR(var_args_unify_fun_call,
[<<?Pos(6,5)
"Cannot infer types for variable argument list.\n"
"when checking the application of\n"
" `g : (() => 'b) => 'b`\n"
"to arguments\n"
" `Chain.create : (value : int, var_args) => 'c`">>
])
, ?TYPE_ERROR(polymorphism_add_stateful_entrypoint,
[<<?Pos(5,25)
"`f` cannot be stateful because the entrypoint `f` in the interface `I` is not stateful">>
])
, ?TYPE_ERROR(polymorphism_change_entrypoint_to_function,
[<<?Pos(6,14)
"`f` must be declared as an entrypoint instead of a function in order to implement the entrypoint `f` from the interface `I`">>
])
, ?TYPE_ERROR(polymorphism_non_payable_contract_implement_payable,
[<<?Pos(4,10)
"Non-payable contract `C` cannot implement payable interface `I`">>
])
, ?TYPE_ERROR(polymorphism_non_payable_interface_implement_payable,
[<<?Pos(4,20)
"Non-payable interface `H` cannot implement payable interface `I`">>
])
, ?TYPE_ERROR(polymorphism_remove_payable_entrypoint,
[<<?Pos(5,16)
"`f` must be payable because the entrypoint `f` in the interface `I` is payable">>
])
, ?TYPE_ERROR(calling_child_contract_entrypoint,
[<<?Pos(5,20)
"Invalid call to contract entrypoint `F.g`.\n"
"It must be called as `c.g` for some `c : F`.">>])
, ?TYPE_ERROR(using_contract_as_namespace,
[<<?Pos(5,3)
"Cannot use undefined namespace F">>])
, ?TYPE_ERROR(hole_expression,
[<<?Pos(5,13)
"Found a hole of type `bool`">>,
<<?Pos(6,17)
"Found a hole of type `string`">>,
<<?Pos(9,37)
"Found a hole of type `(int) => int`">>,
<<?Pos(13,20)
"Found a hole of type `'a`">>
])
].
validation_test_() ->
[{"Validation fail: " ++ C1 ++ " /= " ++ C2,
fun() ->
Actual = case validate(C1, C2) of
{error, Errs} -> Errs;
ok -> #{}
end,
check_errors(Expect, Actual)
end} || {C1, C2, Expect} <- validation_fails()] ++
[{"Validation of " ++ C,
fun() ->
?assertEqual(ok, validate(C, C))
end} || C <- compilable_contracts()].
validation_fails() ->
[{"deadcode", "nodeadcode",
[<<"Data error:\n"
"Byte code does not match source code.\n"
"- Functions in the source code but not in the byte code:\n"
" .MyList.map2">>]},
{"validation_test1", "validation_test2",
[<<"Data error:\n"
"Byte code does not match source code.\n"
"- The implementation of the function code_fail is different.\n"
"- The attributes of the function attr_fail differ:\n"
" Byte code: payable\n"
" Source code: \n"
"- The type of the function type_fail differs:\n"
" Byte code: integer => integer\n"
" Source code: {tvar,0} => {tvar,0}">>]},
{"validation_test1", "validation_test3",
[<<"Data error:\n"
"Byte code contract is not payable, but source code contract is.">>]}].
validate(Contract1, Contract2) ->
case compile(Contract1) of
ByteCode = #{ fate_code := FCode } ->
FCode1 = aeb_fate_code:serialize(aeb_fate_code:strip_init_function(FCode)),
Source = aeso_test_utils:read_contract(Contract2),
aeso_compiler:validate_byte_code(
ByteCode#{ byte_code := FCode1 }, Source,
case lists:member(Contract2, debug_mode_contracts()) of
true -> [debug_mode];
false -> []
end ++
[{include, {file_system, [aeso_test_utils:contract_path()]}}]);
Error -> print_and_throw(Error)
end.
print_and_throw(Err) ->
case Err of
ErrBin when is_binary(ErrBin) ->
io:format("\n~s", [ErrBin]),
error(ErrBin);
Errors ->
io:format("Compilation error:\n~s", [string:join([aeso_errors:pp(E) || E <- Errors], "\n\n")]),
error(compilation_error)
end.
| null | https://raw.githubusercontent.com/aeternity/aesophia/c078119bc46045088076d4397506aa92e720b674/test/aeso_compiler_tests.erl | erlang | -*- erlang-indent-level:4; indent-tabs-mode: nil -*-
-------------------------------------------------------------------
@doc Test Sophia language compiler.
@end
-------------------------------------------------------------------
Very simply test compile the given contracts. Only basic checks
are made on the output, just that it is a binary which indicates
that the compilation worked.
Check if all modules in the standard library compile
compilable_contracts() -> [ContractName].
The currently compilable contracts.
Custom general-purpose test file. Keep it last on the list.
Contracts that should produce type errors
Type errors | ( C ) 2018 , Aeternity Anstalt
-module(aeso_compiler_tests).
-compile([export_all, nowarn_export_all]).
-include_lib("eunit/include/eunit.hrl").
run_test(Test) ->
TestFun = list_to_atom(lists:concat([Test, "_test_"])),
[ begin
io:format("~s\n", [Label]),
Fun()
end || {Label, Fun} <- ?MODULE:TestFun() ],
ok.
simple_compile_test_() ->
[ {"Testing the " ++ ContractName ++ " contract",
fun() ->
case compile(ContractName) of
#{fate_code := Code} ->
Code1 = aeb_fate_code:deserialize(aeb_fate_code:serialize(Code)),
?assertMatch({X, X}, {Code1, Code});
Error -> io:format("\n\n~p\n\n", [Error]), print_and_throw(Error)
end
end} || ContractName <- compilable_contracts()] ++
[ {"Test file not found error",
fun() ->
{error, Errors} = aeso_compiler:file("does_not_exist.aes"),
ExpErr = <<"File error:\ndoes_not_exist.aes: no such file or directory">>,
check_errors([ExpErr], Errors)
end} ] ++
[ {"Testing error messages of " ++ ContractName,
fun() ->
Errors = compile(ContractName, [warn_all, warn_error]),
check_errors(ExpectedErrors, Errors)
end} ||
{ContractName, ExpectedErrors} <- failing_contracts() ] ++
[ {"Testing include with explicit files",
fun() ->
FileSystem = maps:from_list(
[ begin
{ok, Bin} = file:read_file(filename:join([aeso_test_utils:contract_path(), File])),
{File, Bin}
end || File <- ["included.aes", "../contracts/included2.aes"] ]),
#{byte_code := Code1} = compile("include", [{include, {explicit_files, FileSystem}}]),
#{byte_code := Code2} = compile("include"),
?assertMatch(true, Code1 == Code2)
end} ] ++
[ {"Testing deadcode elimination",
fun() ->
#{ byte_code := NoDeadCode } = compile("nodeadcode"),
#{ byte_code := DeadCode } = compile("deadcode"),
SizeNoDeadCode = byte_size(NoDeadCode),
SizeDeadCode = byte_size(DeadCode),
Delta = 20,
?assertMatch({_, _, true}, {SizeDeadCode, SizeNoDeadCode, SizeDeadCode + Delta < SizeNoDeadCode}),
ok
end} ] ++
[ {"Testing warning messages",
fun() ->
#{ warnings := Warnings } = compile("warnings", [warn_all]),
check_warnings(warnings(), Warnings)
end} ] ++
[].
stdlib_test_() ->
{ok, Files} = file:list_dir(aeso_stdlib:stdlib_include_path()),
[ { "Testing " ++ File ++ " from the stdlib",
fun() ->
String = "include \"" ++ File ++ "\"\nmain contract Test =\n entrypoint f(x) = x",
Options = [{src_file, File}],
case aeso_compiler:from_string(String, Options) of
{ok, #{fate_code := Code}} ->
Code1 = aeb_fate_code:deserialize(aeb_fate_code:serialize(Code)),
?assertMatch({X, X}, {Code1, Code});
{error, Error} -> io:format("\n\n~p\n\n", [Error]), print_and_throw(Error)
end
end} || File <- Files,
lists:suffix(".aes", File)
].
check_errors(no_error, Actual) -> ?assertMatch(#{}, Actual);
check_errors(Expect, #{}) ->
?assertEqual({error, Expect}, ok);
check_errors(Expect0, Actual0) ->
Expect = lists:sort(Expect0),
Actual = [ list_to_binary(string:trim(aeso_errors:pp(Err))) || Err <- Actual0 ],
case {Expect -- Actual, Actual -- Expect} of
{[], Extra} -> ?assertMatch({unexpected, []}, {unexpected, Extra});
{Missing, []} -> ?assertMatch({missing, []}, {missing, Missing});
{Missing, Extra} -> ?assertEqual(Missing, Extra)
end.
check_warnings(Expect0, Actual0) ->
Expect = lists:sort(Expect0),
Actual = [ list_to_binary(string:trim(aeso_warnings:pp(Warn))) || Warn <- Actual0 ],
case {Expect -- Actual, Actual -- Expect} of
{[], Extra} -> ?assertMatch({unexpected, []}, {unexpected, Extra});
{Missing, []} -> ?assertMatch({missing, []}, {missing, Missing});
{Missing, Extra} -> ?assertEqual(Missing, Extra)
end.
compile(Name) ->
compile( Name, [{include, {file_system, [aeso_test_utils:contract_path()]}}]).
compile(Name, Options) ->
String = aeso_test_utils:read_contract(Name),
Options1 =
case lists:member(Name, debug_mode_contracts()) of
true -> [debug_mode];
false -> []
end ++
[ {src_file, Name ++ ".aes"}
, {include, {file_system, [aeso_test_utils:contract_path()]}}
] ++ Options,
case aeso_compiler:from_string(String, Options1) of
{ok, Map} -> Map;
{error, ErrorString} when is_binary(ErrorString) -> ErrorString;
{error, Errors} -> Errors
end.
compilable_contracts() ->
["complex_types",
"counter",
"dutch_auction",
"environment",
"factorial",
"functions",
"fundme",
"identity",
"maps",
"oracles",
"remote_call",
"remote_call_ambiguous_record",
"simple",
"simple_storage",
"spend_test",
"stack",
"test",
"builtin_bug",
"builtin_map_get_bug",
"lc_record_bug",
"nodeadcode",
"deadcode",
"variant_types",
"state_handling",
"events",
"include",
"basic_auth",
"basic_auth_tx",
"bitcoin_auth",
"address_literals",
"bytes_equality",
"address_chain",
"namespace_bug",
"bytes_to_x",
"bytes_concat",
"aens",
"aens_update",
"tuple_match",
"cyclic_include",
"stdlib_include",
"double_include",
"manual_stdlib_include",
"list_comp",
"payable",
"unapplied_builtins",
"underscore_number_literals",
"pairing_crypto",
"qualified_constructor",
"let_patterns",
"lhs_matching",
"more_strings",
"protected_call",
"hermetization_turnoff",
"multiple_contracts",
"clone",
"clone_simple",
"create",
"child_contract_init_bug",
"using_namespace",
"assign_patterns",
"patterns_guards",
"pipe_operator",
"polymorphism_contract_implements_interface",
"polymorphism_contract_multi_interface",
"polymorphism_contract_interface_extends_interface",
"polymorphism_contract_interface_extensions",
"polymorphism_contract_interface_same_decl_multi_interface",
"polymorphism_contract_interface_same_name_same_type",
"polymorphism_variance_switching_chain_create",
"polymorphism_variance_switching_void_supertype",
"polymorphism_variance_switching_unify_with_interface_decls",
"polymorphism_preserve_or_add_payable_contract",
"polymorphism_preserve_or_add_payable_entrypoint",
"polymorphism_preserve_or_remove_stateful_entrypoint",
"missing_init_fun_state_unit",
"complex_compare_leq",
"complex_compare",
"higher_order_compare",
"higher_order_map_keys",
"higher_order_state",
"polymorphic_compare",
"polymorphic_entrypoint",
"polymorphic_entrypoint_return",
"polymorphic_map_keys",
"unapplied_contract_call",
"unapplied_named_arg_builtin",
"resolve_field_constraint_by_arity",
].
debug_mode_contracts() ->
["hermetization_turnoff"].
-define(Pos(Kind, File, Line, Col), (list_to_binary(Kind))/binary, " error in '",
(list_to_binary(File))/binary, ".aes' at line " ??Line ", col " ??Col ":\n").
-define(Pos(Line, Col), ?Pos(__Kind, __File, Line, Col)).
-define(ERROR(Kind, Name, Errs),
(fun() ->
__Kind = Kind,
__File = ??Name,
{__File, Errs}
end)()).
-define(TYPE_ERROR(Name, Errs), ?ERROR("Type", Name, Errs)).
-define(PARSE_ERROR(Name, Errs), ?ERROR("Parse", Name, Errs)).
-define(PosW(Kind, File, Line, Col), (list_to_binary(Kind))/binary, " in '",
(list_to_binary(File))/binary, ".aes' at line " ??Line ", col " ??Col ":\n").
-define(PosW(Line, Col), ?PosW(__Kind, __File, Line, Col)).
-define(WARNING(Name, Warns),
(fun() ->
__Kind = "Warning",
__File = ??Name,
Warns
end)()).
warnings() ->
?WARNING(warnings,
[<<?PosW(0, 0)
"The file `Triple.aes` is included but not used.">>,
<<?PosW(13, 3)
"The function `h` is defined but never used.">>,
<<?PosW(19, 3)
"The type `unused_type` is defined but never used.">>,
<<?PosW(23, 54)
"Negative spend.">>,
<<?PosW(27, 9)
"The definition of `x` shadows an older definition at line 26, column 9.">>,
<<?PosW(30, 36)
"Division by zero.">>,
<<?PosW(32, 3)
"The function `unused_stateful` is unnecessarily marked as stateful.">>,
<<?PosW(35, 31)
"The variable `unused_arg` is defined but never used.">>,
<<?PosW(36, 9)
"The variable `unused_var` is defined but never used.">>,
<<?PosW(41, 3)
"The function `unused_function` is defined but never used.">>,
<<?PosW(42, 3)
"The function `recursive_unused_function` is defined but never used.">>,
<<?PosW(43, 3)
"The function `called_unused_function1` is defined but never used.">>,
<<?PosW(44, 3)
"The function `called_unused_function2` is defined but never used.">>,
<<?PosW(48, 5)
"Unused return value.">>,
<<?PosW(60, 5)
"The function `dec` is defined but never used.">>
]).
failing_contracts() ->
{ok, V} = aeso_compiler:numeric_version(),
Version = list_to_binary(string:join([integer_to_list(N) || N <- V], ".")),
Parse errors
[ ?PARSE_ERROR(field_parse_error,
[<<?Pos(5, 26)
"Cannot use nested fields or keys in record construction: p.x">>])
, ?PARSE_ERROR(vsemi, [<<?Pos(3, 3) "Unexpected indentation. Did you forget a '}'?">>])
, ?PARSE_ERROR(vclose, [<<?Pos(4, 3) "Unexpected indentation. Did you forget a ']'?">>])
, ?PARSE_ERROR(indent_fail, [<<?Pos(3, 2) "Unexpected token 'entrypoint'.">>])
, ?PARSE_ERROR(assign_pattern_to_pattern, [<<?Pos(3, 22) "Unexpected token '='.">>])
, ?TYPE_ERROR(name_clash,
[<<?Pos(4, 3)
"Duplicate definitions of `double_def` at\n"
" - line 3, column 3\n"
" - line 4, column 3">>,
<<?Pos(7, 3)
"Duplicate definitions of `abort` at\n"
" - (builtin location)\n"
" - line 7, column 3">>,
<<?Pos(8, 3)
"Duplicate definitions of `require` at\n"
" - (builtin location)\n"
" - line 8, column 3">>,
<<?Pos(9, 3)
"Duplicate definitions of `put` at\n"
" - (builtin location)\n"
" - line 9, column 3">>,
<<?Pos(10, 3)
"Duplicate definitions of `state` at\n"
" - (builtin location)\n"
" - line 10, column 3">>])
, ?TYPE_ERROR(type_errors,
[<<?Pos(17, 23)
"Unbound variable `zz`">>,
<<?Pos(26, 9)
"Cannot unify `int` and `list(int)`\n"
"when checking the application of\n"
" `(::) : (int, list(int)) => list(int)`\n"
"to arguments\n"
" `x : int`\n"
" `x : int`">>,
<<?Pos(9, 48)
"Cannot unify `string` and `int`\n"
"when checking the assignment of the field `x : map(string, string)` "
"to the old value `__x` and the new value `__x {[\"foo\"] @ x = x + 1} : map(string, int)`">>,
<<?Pos(34, 47)
"Cannot unify `int` and `string`\n"
"when checking the type of the expression `1 : int` "
"against the expected type `string`">>,
<<?Pos(34, 52)
"Cannot unify `string` and `int`\n"
"when checking the type of the expression `\"bla\" : string` "
"against the expected type `int`">>,
<<?Pos(32, 18)
"Cannot unify `string` and `int`\n"
"when checking the type of the expression `\"x\" : string` "
"against the expected type `int`">>,
<<?Pos(11, 58)
"Cannot unify `string` and `int`\n"
"when checking the type of the expression `\"foo\" : string` "
"against the expected type `int`">>,
<<?Pos(38, 13)
"Cannot unify `int` and `string`\n"
"when comparing the types of the if-branches\n"
" - w : int (at line 38, column 13)\n"
" - z : string (at line 39, column 10)">>,
<<?Pos(22, 40)
"Not a record type: `string`\n"
"arising from the projection of the field `y`">>,
<<?Pos(21, 44)
"Not a record type: `string`\n"
"arising from an assignment of the field `y`">>,
<<?Pos(20, 40)
"Not a record type: `string`\n"
"arising from an assignment of the field `y`">>,
<<?Pos(19, 37)
"Not a record type: `string`\n"
"arising from an assignment of the field `y`">>,
<<?Pos(13, 27)
"Ambiguous record type with field `y` could be one of\n"
" - `r` (at line 4, column 10)\n"
" - `r'` (at line 5, column 10)">>,
<<?Pos(26, 7)
"Repeated name `x` in the pattern `x :: x`">>,
<<?Pos(44, 14)
"Repeated names `x`, `y` in the pattern `(x : int, y, x : string, y : bool)`">>,
<<?Pos(44, 39)
"Cannot unify `int` and `string`\n"
"when checking the type of the expression `x : int` "
"against the expected type `string`">>,
<<?Pos(44, 72)
"Cannot unify `int` and `string`\n"
"when checking the type of the expression `x : int` "
"against the expected type `string`">>,
<<?Pos(14, 24)
"No record type with fields `y`, `z`">>,
<<?Pos(15, 26)
"The field `z` is missing when constructing an element of type `r2`">>,
<<?Pos(15, 24)
"Record type `r2` does not have field `y`">>,
<<?Pos(47, 5)
"Let binding must be followed by an expression.">>,
<<?Pos(50, 5)
"Let binding must be followed by an expression.">>,
<<?Pos(54, 5)
"Let binding must be followed by an expression.">>,
<<?Pos(58, 5)
"Let binding must be followed by an expression.">>,
<<?Pos(63, 5)
"Cannot unify `int` and `bool`\n"
"when checking the type of the expression `id(n) : int` "
"against the expected type `bool`">>])
, ?TYPE_ERROR(init_type_error,
[<<?Pos(7, 3)
"Cannot unify `string` and `map(int, int)`\n"
"when checking that `init` returns a value of type `state`">>])
, ?TYPE_ERROR(missing_state_type,
[<<?Pos(5, 3)
"Cannot unify `string` and `unit`\n"
"when checking that `init` returns a value of type `state`">>])
, ?TYPE_ERROR(missing_fields_in_record_expression,
[<<?Pos(7, 42)
"The field `x` is missing when constructing an element of type `r('a)`">>,
<<?Pos(8, 42)
"The field `y` is missing when constructing an element of type `r(int)`">>,
<<?Pos(6, 42)
"The fields `y`, `z` are missing when constructing an element of type `r('a)`">>])
, ?TYPE_ERROR(namespace_clash_builtin,
[<<?Pos(4, 10)
"The contract `Call` has the same name as a namespace at (builtin location)">>])
, ?TYPE_ERROR(namespace_clash_included,
[<<?Pos(5, 11)
"The namespace `BLS12_381` has the same name as a namespace at line 1, column 11 in BLS12_381.aes">>])
, ?TYPE_ERROR(namespace_clash_same_file,
[<<?Pos(4, 11)
"The namespace `Nmsp` has the same name as a namespace at line 1, column 11">>])
, ?TYPE_ERROR(bad_events,
[<<?Pos(9, 25)
"The indexed type `string` is not a word type">>,
<<?Pos(10, 25)
"The indexed type `alias_string` equals `string` which is not a word type">>])
, ?TYPE_ERROR(bad_events2,
[<<?Pos(9, 7)
"The event constructor `BadEvent1` has too many non-indexed values (max 1)">>,
<<?Pos(10, 7)
"The event constructor `BadEvent2` has too many indexed values (max 3)">>])
, ?TYPE_ERROR(type_clash,
[<<?Pos(12, 42)
"Cannot unify `int` and `string`\n"
"when checking the type of the expression `r.foo() : map(int, string)` "
"against the expected type `map(string, int)`">>])
, ?TYPE_ERROR(not_toplevel_include,
[<<?Pos(2, 11)
"Include of `included.aes` is not allowed, include only allowed at top level.">>])
, ?TYPE_ERROR(not_toplevel_namespace,
[<<?Pos(2, 13)
"Nested namespaces are not allowed. Namespace `Foo` is not defined at top level.">>])
, ?TYPE_ERROR(not_toplevel_contract,
[<<?Pos(2, 12)
"Nested contracts are not allowed. Contract `Con` is not defined at top level.">>])
, ?TYPE_ERROR(bad_address_literals,
[<<?Pos(11, 5)
"Cannot unify `address` and `oracle(int, bool)`\n"
"when checking the type of the expression `ak_2gx9MEFxKvY9vMG5YnqnXWv1hCsX7rgnfvBLJS4aQurustR1rt : address` "
"against the expected type `oracle(int, bool)`">>,
<<?Pos(9, 5)
"Cannot unify `address` and `Remote`\n"
"when checking the type of the expression `ak_2gx9MEFxKvY9vMG5YnqnXWv1hCsX7rgnfvBLJS4aQurustR1rt : address` "
"against the expected type `Remote`">>,
<<?Pos(7, 5)
"Cannot unify `address` and `bytes(32)`\n"
"when checking the type of the expression `ak_2gx9MEFxKvY9vMG5YnqnXWv1hCsX7rgnfvBLJS4aQurustR1rt : address` "
"against the expected type `bytes(32)`">>,
<<?Pos(14, 5)
"Cannot unify `oracle('a, 'b)` and `oracle_query(int, bool)`\n"
"when checking the type of the expression "
"`ok_2YNyxd6TRJPNrTcEDCe9ra59SVUdp9FR9qWC5msKZWYD9bP9z5 : oracle('a, 'b)` "
"against the expected type `oracle_query(int, bool)`">>,
<<?Pos(16, 5)
"Cannot unify `oracle('c, 'd)` and `bytes(32)`\n"
"when checking the type of the expression "
"`ok_2YNyxd6TRJPNrTcEDCe9ra59SVUdp9FR9qWC5msKZWYD9bP9z5 : oracle('c, 'd)` "
"against the expected type `bytes(32)`">>,
<<?Pos(18, 5)
"Cannot unify `oracle('e, 'f)` and `Remote`\n"
"when checking the type of the expression "
"`ok_2YNyxd6TRJPNrTcEDCe9ra59SVUdp9FR9qWC5msKZWYD9bP9z5 : oracle('e, 'f)` "
"against the expected type `Remote`">>,
<<?Pos(21, 5)
"Cannot unify `oracle_query('g, 'h)` and `oracle(int, bool)`\n"
"when checking the type of the expression "
"`oq_2oRvyowJuJnEkxy58Ckkw77XfWJrmRgmGaLzhdqb67SKEL1gPY : oracle_query('g, 'h)` "
"against the expected type `oracle(int, bool)`">>,
<<?Pos(23, 5)
"Cannot unify `oracle_query('i, 'j)` and `bytes(32)`\n"
"when checking the type of the expression "
"`oq_2oRvyowJuJnEkxy58Ckkw77XfWJrmRgmGaLzhdqb67SKEL1gPY : oracle_query('i, 'j)` "
"against the expected type `bytes(32)`">>,
<<?Pos(25, 5)
"Cannot unify `oracle_query('k, 'l)` and `Remote`\n"
"when checking the type of the expression "
"`oq_2oRvyowJuJnEkxy58Ckkw77XfWJrmRgmGaLzhdqb67SKEL1gPY : oracle_query('k, 'l)` "
"against the expected type `Remote`">>,
<<?Pos(28, 5)
"The type `address` is not a contract type\n"
"when checking that the contract literal "
"`ct_Ez6MyeTMm17YnTnDdHTSrzMEBKmy7Uz2sXu347bTDPgVH2ifJ` "
"has the type `address`">>,
<<?Pos(30, 5)
"The type `oracle(int, bool)` is not a contract type\n"
"when checking that the contract literal "
"`ct_Ez6MyeTMm17YnTnDdHTSrzMEBKmy7Uz2sXu347bTDPgVH2ifJ` "
"has the type `oracle(int, bool)`">>,
<<?Pos(32, 5)
"The type `bytes(32)` is not a contract type\n"
"when checking that the contract literal "
"`ct_Ez6MyeTMm17YnTnDdHTSrzMEBKmy7Uz2sXu347bTDPgVH2ifJ` "
"has the type `bytes(32)`">>,
<<?Pos(34, 5),
"The type `address` is not a contract type\n"
"when checking that the call to `Address.to_contract` "
"has the type `address`">>])
, ?TYPE_ERROR(stateful,
[<<?Pos(13, 35)
"Cannot reference stateful function `Chain.spend` in the definition of non-stateful function `fail1`.">>,
<<?Pos(14, 35)
"Cannot reference stateful function `local_spend` in the definition of non-stateful function `fail2`.">>,
<<?Pos(16, 15)
"Cannot reference stateful function `Chain.spend` in the definition of non-stateful function `fail3`.">>,
<<?Pos(20, 31)
"Cannot reference stateful function `Chain.spend` in the definition of non-stateful function `fail4`.">>,
<<?Pos(35, 47)
"Cannot reference stateful function `Chain.spend` in the definition of non-stateful function `fail5`.">>,
<<?Pos(48, 57)
"Cannot pass non-zero value argument `1000` in the definition of non-stateful function `fail6`.">>,
<<?Pos(49, 56)
"Cannot pass non-zero value argument `1000` in the definition of non-stateful function `fail7`.">>,
<<?Pos(52, 17)
"Cannot pass non-zero value argument `1000` in the definition of non-stateful function `fail8`.">>])
, ?TYPE_ERROR(bad_init_state_access,
[<<?Pos(11, 5)
"The `init` function should return the initial state as its result and cannot write the state, "
"but it calls\n"
" - `set_state` (at line 11, column 5), which calls\n"
" - `roundabout` (at line 8, column 38), which calls\n"
" - `put` (at line 7, column 39)">>,
<<?Pos(12, 5)
"The `init` function should return the initial state as its result and cannot read the state, "
"but it calls\n"
" - `new_state` (at line 12, column 5), which calls\n"
" - `state` (at line 5, column 29)">>,
<<?Pos(13, 13)
"The `init` function should return the initial state as its result and cannot read the state, "
"but it calls\n"
" - `state` (at line 13, column 13)">>])
, ?TYPE_ERROR(modifier_checks,
[<<?Pos(11, 3)
"The function `all_the_things` cannot be both public and private.">>,
<<?Pos(3, 3)
"Namespaces cannot contain entrypoints. Use `function` instead.">>,
<<?Pos(5, 10)
"The contract `Remote` has no entrypoints. Since Sophia version 3.2, "
"public contract functions must be declared with the `entrypoint` "
"keyword instead of `function`.">>,
<<?Pos(12, 3)
"The entrypoint `wha` cannot be private. Use `function` instead.">>,
<<?Pos(6, 3)
"Use `entrypoint` for declaration of `foo`: `entrypoint foo : () => unit`">>,
<<?Pos(10, 3)
"Use `entrypoint` instead of `function` for public function `foo`: `entrypoint foo() = ()`">>,
<<?Pos(6, 3)
"Use `entrypoint` instead of `function` for public function `foo`: `entrypoint foo : () => unit`">>])
, ?TYPE_ERROR(list_comp_not_a_list,
[<<?Pos(2, 36)
"Cannot unify `int` and `list('a)`\n"
"when checking rvalue of list comprehension binding `1 : int` against type `list('a)`">>
])
, ?TYPE_ERROR(list_comp_if_not_bool,
[<<?Pos(2, 44)
"Cannot unify `int` and `bool`\n"
"when checking the type of the expression `3 : int` against the expected type `bool`">>
])
, ?TYPE_ERROR(list_comp_bad_shadow,
[<<?Pos(2, 53)
"Cannot unify `string` and `int`\n"
"when checking the type of the pattern `x : int` against the expected type `string`">>
])
, ?TYPE_ERROR(map_as_map_key,
[<<?Pos(5, 47)
"Invalid key type `map(int, int)`\n"
"Map keys cannot contain other maps.">>,
<<?Pos(6, 31)
"Invalid key type `list(map(int, int))`\n"
"Map keys cannot contain other maps.">>,
<<?Pos(6, 31)
"Invalid key type `lm`\n"
"Map keys cannot contain other maps.">>])
, ?TYPE_ERROR(calling_init_function,
[<<?Pos(7, 28)
"The 'init' function is called exclusively by the create contract transaction "
"and cannot be called from the contract code.">>])
, ?TYPE_ERROR(bad_top_level_decl,
[<<?Pos(1, 1) "The definition of 'square' must appear inside a contract or namespace.">>])
, ?TYPE_ERROR(missing_event_type,
[<<?Pos(3, 5)
"Unbound variable `Chain.event`\n"
"Did you forget to define the event type?">>])
, ?TYPE_ERROR(bad_bytes_concat,
[<<?Pos(12, 40)
"Failed to resolve byte array lengths in call to Bytes.concat with arguments of type\n"
" - 'g (at line 12, column 20)\n"
" - 'h (at line 12, column 23)\n"
"and result type\n"
" - bytes(10) (at line 12, column 28)">>,
<<?Pos(13, 28)
"Failed to resolve byte array lengths in call to Bytes.concat with arguments of type\n"
" - 'd (at line 13, column 20)\n"
" - 'e (at line 13, column 23)\n"
"and result type\n"
" - 'f (at line 13, column 14)">>,
<<?Pos(15, 5)
"Cannot unify `bytes(26)` and `bytes(25)`\n"
"when checking the type of the expression `Bytes.concat(x, y) : bytes(26)` "
"against the expected type `bytes(25)`">>,
<<?Pos(17, 5)
"Failed to resolve byte array lengths in call to Bytes.concat with arguments of type\n"
" - bytes(6) (at line 16, column 24)\n"
" - 'b (at line 16, column 34)\n"
"and result type\n"
" - 'c (at line 16, column 39)">>,
<<?Pos(19, 25)
"Cannot resolve length of byte array.">>])
, ?TYPE_ERROR(bad_bytes_split,
[<<?Pos(13, 5)
"Failed to resolve byte array lengths in call to Bytes.split with argument of type\n"
" - 'f (at line 12, column 20)\n"
"and result types\n"
" - 'e (at line 12, column 25)\n"
" - bytes(20) (at line 12, column 29)">>,
<<?Pos(16, 5)
"Failed to resolve byte array lengths in call to Bytes.split with argument of type\n"
" - bytes(15) (at line 15, column 24)\n"
"and result types\n"
" - 'c (at line 16, column 5)\n"
" - 'd (at line 16, column 5)">>,
<<?Pos(19, 5)
"Failed to resolve byte array lengths in call to Bytes.split with argument of type\n"
" - 'b (at line 18, column 20)\n"
"and result types\n"
" - bytes(20) (at line 18, column 25)\n"
" - 'a (at line 18, column 37)">>])
, ?TYPE_ERROR(wrong_compiler_version,
[<<?Pos(1, 1)
"Cannot compile with this version of the compiler, "
"because it does not satisfy the constraint ", Version/binary, " < 1.0">>,
<<?Pos(2, 1)
"Cannot compile with this version of the compiler, "
"because it does not satisfy the constraint ", Version/binary, " == 9.9.9">>])
, ?TYPE_ERROR(interface_with_defs,
[<<?Pos(2, 3)
"Contract interfaces cannot contain defined functions or entrypoints.\n"
"Fix: replace the definition of `foo` by a type signature.">>])
, ?TYPE_ERROR(contract_as_namespace,
[<<?Pos(5, 28)
"Invalid call to contract entrypoint `Foo.foo`.\n"
"It must be called as `c.foo` for some `c : Foo`.">>])
, ?TYPE_ERROR(toplevel_let,
[<<?Pos(2, 7)
"Toplevel \"let\" definitions are not supported. "
"Value `this_is_illegal` could be replaced by 0-argument function.">>])
, ?TYPE_ERROR(empty_typedecl,
[<<?Pos(2, 8)
"Empty type declarations are not supported. "
"Type `t` lacks a definition">>])
, ?TYPE_ERROR(higher_kinded_type,
[<<?Pos(2, 35)
"Type `'m` is a higher kinded type variable "
"(takes another type as an argument)">>])
, ?TYPE_ERROR(bad_arity,
[<<?Pos(3, 20)
"Arity for id doesn't match. Expected 1, got 0">>,
<<?Pos(3, 25)
"Cannot unify `int` and `id`\n"
"when checking the type of the expression `123 : int` "
"against the expected type `id`">>,
<<?Pos(4, 20)
"Arity for id doesn't match. Expected 1, got 2">>,
<<?Pos(4, 35)
"Cannot unify `int` and `id(int, int)`\n"
"when checking the type of the expression `123 : int` "
"against the expected type `id(int, int)`">>])
, ?TYPE_ERROR(bad_unnamed_map_update_default,
[<<?Pos(4, 17)
"Invalid map update with default">>])
, ?TYPE_ERROR(non_functional_entrypoint,
[<<?Pos(2, 14)
"`f` was declared with an invalid type `int`. "
"Entrypoints and functions must have functional types">>])
, ?TYPE_ERROR(bad_records,
[<<?Pos(3, 16)
"Mixed record fields and map keys in `{x = 0, [0] = 1}`">>,
<<?Pos(4, 6)
"Mixed record fields and map keys in `r {x = 0, [0] = 1}`">>,
<<?Pos(5, 6)
"Empty record/map update `r {}`">>
])
, ?TYPE_ERROR(bad_protected_call,
[<<?Pos(6, 22)
"Invalid `protected` argument `(0 : int) == (1 : int) : bool`. "
"It must be either `true` or `false`.">>
])
, ?TYPE_ERROR(bad_function_block,
[<<?Pos(4, 5)
"Mismatch in the function block. Expected implementation/type declaration of g function">>,
<<?Pos(5, 5)
"Mismatch in the function block. Expected implementation/type declaration of g function">>
])
, ?TYPE_ERROR(just_an_empty_file,
[<<?Pos(0, 0)
"Empty contract">>
])
, ?TYPE_ERROR(bad_number_of_args,
[<<?Pos(3, 39)
"Cannot unify `() => unit` and `(int) => 'a`\n",
"when checking the application of\n"
" `f : () => unit`\n"
"to arguments\n"
" `1 : int`">>,
<<?Pos(4, 20)
"Cannot unify `(int, string) => 'e` and `(int) => 'd`\n"
"when checking the application of\n"
" `g : (int, string) => 'e`\n"
"to arguments\n"
" `1 : int`">>,
<<?Pos(5, 20)
"Cannot unify `(int, string) => 'c` and `(string) => 'b`\n"
"when checking the application of\n"
" `g : (int, string) => 'c`\n"
"to arguments\n"
" `\"Litwo, ojczyzno moja\" : string`">>
])
, ?TYPE_ERROR(bad_state,
[<<?Pos(4, 16)
"Conflicting updates for field 'foo'">>])
, ?TYPE_ERROR(factories_type_errors,
[<<?Pos(10,18)
"Chain.clone requires `ref` named argument of contract type.">>,
<<?Pos(11,18)
"Cannot unify `(gas : int, value : int, protected : bool) => if(protected, option(void), void)` and `(gas : int, value : int, protected : bool, int, bool) => if(protected, option(void), void)`\n"
"when checking contract construction of type\n"
" (gas : int, value : int, protected : bool) =>\n"
" if(protected, option(void), void) (at line 11, column 18)\n"
"against the expected type\n"
" (gas : int, value : int, protected : bool, int, bool) =>\n"
" if(protected, option(void), void)">>,
<<?Pos(11,18)
"Cannot unify `Bakoom` and `Kaboom`\n"
"when checking that contract construction of type\n"
" Bakoom\n"
"arising from resolution of variadic function `Chain.clone`\n"
"matches the expected type\n"
" Kaboom">>,
<<?Pos(12,37)
"Cannot unify `int` and `bool`\n"
"when checking named argument `gas : int` against inferred type `bool`">>,
<<?Pos(13,18),
"Kaboom is not implemented.\n"
"when resolving arguments of variadic function `Chain.create`">>,
<<?Pos(18,18)
"Cannot unify `(gas : int, value : int, protected : bool, int, bool) => if(protected, option(void), void)` and `(gas : int, value : int, protected : bool) => 'a`\n"
"when checking contract construction of type\n (gas : int, value : int, protected : bool, int, bool) =>\n if(protected, option(void), void) (at line 18, column 18)\nagainst the expected type\n (gas : int, value : int, protected : bool) => 'a">>,
<<?Pos(19,42),
"Named argument `protected` is not one of the expected named arguments\n - `value : int`">>,
<<?Pos(20,42),
"Cannot unify `int` and `bool`\n"
"when checking named argument `value : int` against inferred type `bool`">>
])
, ?TYPE_ERROR(ambiguous_main,
[<<?Pos(1,1)
"Could not deduce the main contract. You can point it out manually with the `main` keyword.">>
])
, ?TYPE_ERROR(no_main_contract,
[<<?Pos(0,0)
"No contract defined.">>
])
, ?TYPE_ERROR(multiple_main_contracts,
[<<?Pos(1,6)
"Only one main contract can be defined.">>
])
, ?TYPE_ERROR(using_namespace_ambiguous_name,
[ <<?Pos(13,23)
"Ambiguous name `A.f` could be one of\n"
" - `Xa.f` (at line 2, column 3)\n"
" - `Xb.f` (at line 5, column 3)">>
, <<?Pos(13,23)
"Unbound variable `A.f`">>
])
, ?TYPE_ERROR(using_namespace_wrong_scope,
[ <<?Pos(19,5)
"Unbound variable `f`">>
, <<?Pos(21,23)
"Unbound variable `f`">>
])
, ?TYPE_ERROR(using_namespace_undefined,
[<<?Pos(2,3)
"Cannot use undefined namespace MyUndefinedNamespace">>
])
, ?TYPE_ERROR(using_namespace_undefined_parts,
[<<?Pos(5,3)
"The namespace Nsp does not define the following names: a">>
])
, ?TYPE_ERROR(using_namespace_hidden_parts,
[<<?Pos(8,23)
"Unbound variable `g`">>
])
, ?TYPE_ERROR(stateful_pattern_guard,
[<<?Pos(8,12)
"Cannot reference stateful function `g` in a pattern guard.">>
])
, ?TYPE_ERROR(non_boolean_pattern_guard,
[<<?Pos(4,24)
"Cannot unify `string` and `bool`\n"
"when checking the type of the expression `\"y\" : string` "
"against the expected type `bool`">>
])
, ?TYPE_ERROR(empty_record_definition,
[<<?Pos(2,5)
"Empty record definitions are not allowed. Cannot define the record `r`">>
])
, ?TYPE_ERROR(operator_lambdas,
[<<?Pos(9,32)
"Cannot unify `(int, int) => int` and `(int) => 'a`\n"
"when checking the application of\n"
" `(l : _, r : _) => l + r : (int, int) => int`\n"
"to arguments\n"
" `1 : int`">>
])
, ?TYPE_ERROR(warnings,
[<<?Pos(0, 0)
"The file `Triple.aes` is included but not used.">>,
<<?Pos(13, 3)
"The function `h` is defined but never used.">>,
<<?Pos(19, 3)
"The type `unused_type` is defined but never used.">>,
<<?Pos(23, 54)
"Negative spend.">>,
<<?Pos(27, 9)
"The definition of `x` shadows an older definition at line 26, column 9.">>,
<<?Pos(30, 36)
"Division by zero.">>,
<<?Pos(32, 3)
"The function `unused_stateful` is unnecessarily marked as stateful.">>,
<<?Pos(35, 31)
"The variable `unused_arg` is defined but never used.">>,
<<?Pos(36, 9)
"The variable `unused_var` is defined but never used.">>,
<<?Pos(41, 3)
"The function `unused_function` is defined but never used.">>,
<<?Pos(42, 3)
"The function `recursive_unused_function` is defined but never used.">>,
<<?Pos(43, 3)
"The function `called_unused_function1` is defined but never used.">>,
<<?Pos(44, 3)
"The function `called_unused_function2` is defined but never used.">>,
<<?Pos(48, 5)
"Unused return value.">>,
<<?Pos(60, 5)
"The function `dec` is defined but never used.">>
])
, ?TYPE_ERROR(polymorphism_contract_interface_recursive,
[<<?Pos(1,24)
"Trying to implement or extend an undefined interface `Z`">>
])
, ?TYPE_ERROR(polymorphism_contract_interface_same_name_different_type,
[<<?Pos(9,5)
"Duplicate definitions of `f` at\n"
" - line 8, column 5\n"
" - line 9, column 5">>])
, ?TYPE_ERROR(polymorphism_contract_missing_implementation,
[<<?Pos(4,20)
"Unimplemented entrypoint `f` from the interface `I1` in the contract `I2`">>
])
, ?TYPE_ERROR(polymorphism_contract_same_decl_multi_interface,
[<<?Pos(7,10)
"Both interfaces `I` and `J` implemented by the contract `C` have a function called `f`">>
])
, ?TYPE_ERROR(polymorphism_contract_undefined_interface,
[<<?Pos(1,14)
"Trying to implement or extend an undefined interface `I`">>
])
, ?TYPE_ERROR(polymorphism_contract_same_name_different_type_multi_interface,
[<<?Pos(7,10)
"Both interfaces `I` and `J` implemented by the contract `C` have a function called `f`">>
])
, ?TYPE_ERROR(polymorphism_contract_interface_undefined_interface,
[<<?Pos(1,24)
"Trying to implement or extend an undefined interface `H`">>
])
, ?TYPE_ERROR(polymorphism_variance_switching,
[<<?Pos(36,49)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the application of\n"
" `g2 : (Cat) => Cat`\n"
"to arguments\n"
" `x : Animal`">>,
<<?Pos(39,43)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `g3(x) : Animal` against the expected type `Cat`">>,
<<?Pos(48,55)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the application of\n"
" `g5 : ((Animal) => Animal) => Cat`\n"
"to arguments\n"
" `x : (Cat) => Cat`">>,
<<?Pos(52,44)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `f6() : option(Animal)` against the expected type `option(Cat)`">>,
<<?Pos(73,43)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `some_animal : Animal` against the expected type `Cat`">>
])
, ?TYPE_ERROR(polymorphism_variance_switching_custom_types,
[<<?Pos(56,39)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_CONTRA(f_c_to_u) : dt_contra(Cat)` against the expected type `dt_contra(Animal)`">>,
<<?Pos(62,35)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `DT_CO(f_u_to_a) : dt_co(Animal)` against the expected type `dt_co(Cat)`">>,
<<?Pos(67,36)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the application of\n `DT_INV : ((Cat) => Cat) => dt_inv(Cat)`\nto arguments\n `f_c_to_a : (Cat) => Animal`">>,
<<?Pos(68,36)
"Cannot unify `Cat` and `Animal` in a invariant context\n"
"when checking the type of the expression `DT_INV(f_c_to_c) : dt_inv(Cat)` against the expected type `dt_inv(Animal)`">>,
<<?Pos(69,36)
"Cannot unify `Animal` and `Cat` in a invariant context\n"
"when checking the type of the expression `DT_INV(f_a_to_a) : dt_inv(Animal)` against the expected type `dt_inv(Cat)`">>,
<<?Pos(70,36)
"Cannot unify `Animal` and `Cat` in a invariant context\n"
"when checking the type of the expression `DT_INV(f_a_to_c) : dt_inv(Animal)` against the expected type `dt_inv(Cat)`">>,
<<?Pos(71,36)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the application of\n `DT_INV : ((Cat) => Cat) => dt_inv(Cat)`\nto arguments\n `f_c_to_a : (Cat) => Animal`">>,
<<?Pos(80,40)
"Cannot unify `Cat` and `Animal` in a invariant context\n"
"when checking the type of the expression `DT_INV_SEP_A(f_c_to_u) : dt_inv_sep(Cat)` against the expected type `dt_inv_sep(Animal)`">>,
<<?Pos(82,40)
"Cannot unify `Cat` and `Animal` in a invariant context\n"
"when checking the type of the expression `DT_INV_SEP_B(f_u_to_c) : dt_inv_sep(Cat)` against the expected type `dt_inv_sep(Animal)`">>,
<<?Pos(83,40)
"Cannot unify `Animal` and `Cat` in a invariant context\n"
"when checking the type of the expression `DT_INV_SEP_A(f_a_to_u) : dt_inv_sep(Animal)` against the expected type `dt_inv_sep(Cat)`">>,
<<?Pos(85,40)
"Cannot unify `Animal` and `Cat` in a invariant context\n"
"when checking the type of the expression `DT_INV_SEP_B(f_u_to_a) : dt_inv_sep(Animal)` against the expected type `dt_inv_sep(Cat)`">>,
<<?Pos(90,42)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `DT_CO_NEST_A(f_dt_contra_a_to_u) : dt_co_nest_a(Animal)` against the expected type `dt_co_nest_a(Cat)`">>,
<<?Pos(94,46)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_CONTRA_NEST_A(f_dt_co_c_to_u) : dt_contra_nest_a(Cat)` against the expected type `dt_contra_nest_a(Animal)`">>,
<<?Pos(99,46)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_CONTRA_NEST_B(f_u_to_dt_contra_c) : dt_contra_nest_b(Cat)` against the expected type `dt_contra_nest_b(Animal)`">>,
<<?Pos(105,42)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the expression `DT_CO_NEST_B(f_u_to_dt_co_a) : dt_co_nest_b(Animal)` against the expected type `dt_co_nest_b(Cat)`">>,
<<?Pos(110,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `vj3 : dt_co_twice(Cat)` against the expected type `dt_co_twice(Animal)`">>,
<<?Pos(114,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_a_to_c_to_u) : dt_a_contra_b_contra(Animal, Cat)` against the expected type `dt_a_contra_b_contra(Animal, Animal)`">>,
<<?Pos(115,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_c_to_a_to_u) : dt_a_contra_b_contra(Cat, Animal)` against the expected type `dt_a_contra_b_contra(Animal, Animal)`">>,
<<?Pos(116,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_c_to_c_to_u) : dt_a_contra_b_contra(Cat, Cat)` against the expected type `dt_a_contra_b_contra(Animal, Animal)`">>,
<<?Pos(119,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_c_to_a_to_u) : dt_a_contra_b_contra(Cat, Animal)` against the expected type `dt_a_contra_b_contra(Animal, Cat)`">>,
<<?Pos(120,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_c_to_c_to_u) : dt_a_contra_b_contra(Cat, Cat)` against the expected type `dt_a_contra_b_contra(Animal, Cat)`">>,
<<?Pos(122,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_a_to_c_to_u) : dt_a_contra_b_contra(Animal, Cat)` against the expected type `dt_a_contra_b_contra(Cat, Animal)`">>,
<<?Pos(124,59)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the expression `DT_A_CONTRA_B_CONTRA(f_c_to_c_to_u) : dt_a_contra_b_contra(Cat, Cat)` against the expected type `dt_a_contra_b_contra(Cat, Animal)`">>,
<<?Pos(131,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `vl2 : dt_contra_twice(Animal)` against the expected type `dt_contra_twice(Cat)`">>
])
, ?TYPE_ERROR(polymorphism_variance_switching_records,
[<<?Pos(27,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `r03 : rec_co(Cat)` against the expected type `Main.rec_co(Animal)`">>,
<<?Pos(33,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `r06 : rec_contra(Animal)` against the expected type `Main.rec_contra(Cat)`">>,
<<?Pos(40,13)
"Cannot unify `Cat` and `Animal` in a invariant context\n"
"when checking the type of the pattern `r10 : rec_inv(Animal)` against the expected type `Main.rec_inv(Cat)`">>,
<<?Pos(41,13)
"Cannot unify `Animal` and `Cat` in a invariant context\n"
"when checking the type of the pattern `r11 : rec_inv(Cat)` against the expected type `Main.rec_inv(Animal)`">>
])
, ?TYPE_ERROR(polymorphism_variance_switching_oracles,
[<<?Pos(15,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `o03 : oracle(Animal, Animal)` against the expected type `oracle(Cat, Animal)`">>,
<<?Pos(16,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `o04 : oracle(Animal, Animal)` against the expected type `oracle(Cat, Cat)`">>,
<<?Pos(17,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `o05 : oracle(Animal, Cat)` against the expected type `oracle(Animal, Animal)`">>,
<<?Pos(19,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `o07 : oracle(Animal, Cat)` against the expected type `oracle(Cat, Animal)`">>,
<<?Pos(20,13)
"Cannot unify `Cat` and `Animal` in a contravariant context\n"
"when checking the type of the pattern `o08 : oracle(Animal, Cat)` against the expected type `oracle(Cat, Cat)`">>,
<<?Pos(25,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `o13 : oracle(Cat, Cat)` against the expected type `oracle(Animal, Animal)`">>,
<<?Pos(27,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `o15 : oracle(Cat, Cat)` against the expected type `oracle(Cat, Animal)`">>,
<<?Pos(34,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q05 : oracle_query(Animal, Cat)` against the expected type `oracle_query(Animal, Animal)`">>,
<<?Pos(36,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q07 : oracle_query(Animal, Cat)` against the expected type `oracle_query(Cat, Animal)`">>,
<<?Pos(38,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q09 : oracle_query(Cat, Animal)` against the expected type `oracle_query(Animal, Animal)`">>,
<<?Pos(39,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q10 : oracle_query(Cat, Animal)` against the expected type `oracle_query(Animal, Cat)`">>,
<<?Pos(42,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q13 : oracle_query(Cat, Cat)` against the expected type `oracle_query(Animal, Animal)`">>,
<<?Pos(43,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q14 : oracle_query(Cat, Cat)` against the expected type `oracle_query(Animal, Cat)`">>,
<<?Pos(44,13)
"Cannot unify `Animal` and `Cat` in a covariant context\n"
"when checking the type of the pattern `q15 : oracle_query(Cat, Cat)` against the expected type `oracle_query(Cat, Animal)`">>
])
, ?TYPE_ERROR(polymorphism_variance_switching_chain_create_fail,
[<<?Pos(9,22)
"I is not implemented.\n"
"when resolving arguments of variadic function `Chain.create`">>,
<<?Pos(10,13)
"Cannot unify `I` and `C` in a covariant context\n"
"when checking the type of the pattern `c2 : C` against the expected type `I`">>,
<<?Pos(10,22)
"I is not implemented.\n"
"when resolving arguments of variadic function `Chain.create`">>,
<<?Pos(11,22)
"I is not implemented.\n"
"when resolving arguments of variadic function `Chain.create`">>
])
, ?TYPE_ERROR(missing_definition,
[<<?Pos(2,14)
"Missing definition of function `foo`">>
])
, ?TYPE_ERROR(child_with_decls,
[<<?Pos(2,14)
"Missing definition of function `f`">>
])
, ?TYPE_ERROR(parameterised_state,
[<<?Pos(3,8)
"The state type cannot be parameterized">>
])
, ?TYPE_ERROR(parameterised_event,
[<<?Pos(3,12)
"The event type cannot be parameterized">>
])
, ?TYPE_ERROR(missing_init_fun_alias_to_type,
[<<?Pos(1,10)
"Missing `init` function for the contract `AliasToType`.\n"
"The `init` function can only be omitted if the state type is `unit`">>
])
, ?TYPE_ERROR(missing_init_fun_alias_to_alias_to_type,
[<<?Pos(1,10)
"Missing `init` function for the contract `AliasToAliasToType`.\n"
"The `init` function can only be omitted if the state type is `unit`">>
])
, ?TYPE_ERROR(higher_order_entrypoint,
[<<?Pos(2,20)
"The argument\n"
" `f : (int) => int`\n"
"of entrypoint `apply` has a higher-order (contains function types) type">>
])
, ?TYPE_ERROR(higher_order_entrypoint_return,
[<<?Pos(2,3)
"The return type\n"
" `(int) => int`\n"
"of entrypoint `add` is higher-order (contains function types)">>
])
, ?TYPE_ERROR(polymorphic_aens_resolve,
[<<?Pos(4,5)
"Invalid return type of `AENS.resolve`:\n"
" `'a`\n"
"It must be a `string` or a pubkey type (`address`, `oracle`, etc)">>
])
, ?TYPE_ERROR(bad_aens_resolve,
[<<?Pos(6,5)
"Invalid return type of `AENS.resolve`:\n"
" `list(int)`\n"
"It must be a `string` or a pubkey type (`address`, `oracle`, etc)">>
])
, ?TYPE_ERROR(bad_aens_resolve_using,
[<<?Pos(7,5)
"Invalid return type of `AENS.resolve`:\n"
" `list(int)`\n"
"It must be a `string` or a pubkey type (`address`, `oracle`, etc)">>
])
, ?TYPE_ERROR(polymorphic_query_type,
[<<?Pos(3,5)
"Invalid oracle type\n"
" `oracle('a, 'b)`\n"
"The query type must not be polymorphic (contain type variables)">>,
<<?Pos(3,5)
"Invalid oracle type\n"
" `oracle('a, 'b)`\n"
"The response type must not be polymorphic (contain type variables)">>
])
, ?TYPE_ERROR(polymorphic_response_type,
[<<?Pos(3,5)
"Invalid oracle type\n"
" `oracle(string, 'r)`\n"
"The response type must not be polymorphic (contain type variables)">>
])
, ?TYPE_ERROR(higher_order_query_type,
[<<?Pos(3,5)
"Invalid oracle type\n"
" `oracle((int) => int, string)`\n"
"The query type must not be higher-order (contain function types)">>
])
, ?TYPE_ERROR(higher_order_response_type,
[<<?Pos(3,5)
"Invalid oracle type\n"
" `oracle(string, (int) => int)`\n"
"The response type must not be higher-order (contain function types)">>
])
, ?TYPE_ERROR(var_args_unify_let,
[<<?Pos(3,9)
"Cannot infer types for variable argument list.\n"
"when checking the type of the pattern `x : 'a` against the expected type `(gas : int, value : int, protected : bool, ref : 'b, var_args) => 'b`">>
])
, ?TYPE_ERROR(var_args_unify_fun_call,
[<<?Pos(6,5)
"Cannot infer types for variable argument list.\n"
"when checking the application of\n"
" `g : (() => 'b) => 'b`\n"
"to arguments\n"
" `Chain.create : (value : int, var_args) => 'c`">>
])
, ?TYPE_ERROR(polymorphism_add_stateful_entrypoint,
[<<?Pos(5,25)
"`f` cannot be stateful because the entrypoint `f` in the interface `I` is not stateful">>
])
, ?TYPE_ERROR(polymorphism_change_entrypoint_to_function,
[<<?Pos(6,14)
"`f` must be declared as an entrypoint instead of a function in order to implement the entrypoint `f` from the interface `I`">>
])
, ?TYPE_ERROR(polymorphism_non_payable_contract_implement_payable,
[<<?Pos(4,10)
"Non-payable contract `C` cannot implement payable interface `I`">>
])
, ?TYPE_ERROR(polymorphism_non_payable_interface_implement_payable,
[<<?Pos(4,20)
"Non-payable interface `H` cannot implement payable interface `I`">>
])
, ?TYPE_ERROR(polymorphism_remove_payable_entrypoint,
[<<?Pos(5,16)
"`f` must be payable because the entrypoint `f` in the interface `I` is payable">>
])
, ?TYPE_ERROR(calling_child_contract_entrypoint,
[<<?Pos(5,20)
"Invalid call to contract entrypoint `F.g`.\n"
"It must be called as `c.g` for some `c : F`.">>])
, ?TYPE_ERROR(using_contract_as_namespace,
[<<?Pos(5,3)
"Cannot use undefined namespace F">>])
, ?TYPE_ERROR(hole_expression,
[<<?Pos(5,13)
"Found a hole of type `bool`">>,
<<?Pos(6,17)
"Found a hole of type `string`">>,
<<?Pos(9,37)
"Found a hole of type `(int) => int`">>,
<<?Pos(13,20)
"Found a hole of type `'a`">>
])
].
validation_test_() ->
[{"Validation fail: " ++ C1 ++ " /= " ++ C2,
fun() ->
Actual = case validate(C1, C2) of
{error, Errs} -> Errs;
ok -> #{}
end,
check_errors(Expect, Actual)
end} || {C1, C2, Expect} <- validation_fails()] ++
[{"Validation of " ++ C,
fun() ->
?assertEqual(ok, validate(C, C))
end} || C <- compilable_contracts()].
validation_fails() ->
[{"deadcode", "nodeadcode",
[<<"Data error:\n"
"Byte code does not match source code.\n"
"- Functions in the source code but not in the byte code:\n"
" .MyList.map2">>]},
{"validation_test1", "validation_test2",
[<<"Data error:\n"
"Byte code does not match source code.\n"
"- The implementation of the function code_fail is different.\n"
"- The attributes of the function attr_fail differ:\n"
" Byte code: payable\n"
" Source code: \n"
"- The type of the function type_fail differs:\n"
" Byte code: integer => integer\n"
" Source code: {tvar,0} => {tvar,0}">>]},
{"validation_test1", "validation_test3",
[<<"Data error:\n"
"Byte code contract is not payable, but source code contract is.">>]}].
validate(Contract1, Contract2) ->
case compile(Contract1) of
ByteCode = #{ fate_code := FCode } ->
FCode1 = aeb_fate_code:serialize(aeb_fate_code:strip_init_function(FCode)),
Source = aeso_test_utils:read_contract(Contract2),
aeso_compiler:validate_byte_code(
ByteCode#{ byte_code := FCode1 }, Source,
case lists:member(Contract2, debug_mode_contracts()) of
true -> [debug_mode];
false -> []
end ++
[{include, {file_system, [aeso_test_utils:contract_path()]}}]);
Error -> print_and_throw(Error)
end.
print_and_throw(Err) ->
case Err of
ErrBin when is_binary(ErrBin) ->
io:format("\n~s", [ErrBin]),
error(ErrBin);
Errors ->
io:format("Compilation error:\n~s", [string:join([aeso_errors:pp(E) || E <- Errors], "\n\n")]),
error(compilation_error)
end.
|
0ce594b1c2431715ec3cb15f49977c6eda01e7d777861dbc1883bacebe85e2ec | redclawtech/vmq_cloudwatch_metrics | vmq_cloudwatch_metrics_app.erl | Copyright 2018 Dairon ( )
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(vmq_cloudwatch_metrics_app).
-behaviour(application).
%% Application callbacks
-export([start/2, stop/1]).
%%====================================================================
%% API
%%====================================================================
start(_StartType, _StartArgs) ->
vmq_cloudwatch_metrics_sup:start_link().
stop(_State) ->
ok. | null | https://raw.githubusercontent.com/redclawtech/vmq_cloudwatch_metrics/8392c3c16358b591fe0fc92fc16ac44143565d00/src/vmq_cloudwatch_metrics_app.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Application callbacks
====================================================================
API
==================================================================== | Copyright 2018 Dairon ( )
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(vmq_cloudwatch_metrics_app).
-behaviour(application).
-export([start/2, stop/1]).
start(_StartType, _StartArgs) ->
vmq_cloudwatch_metrics_sup:start_link().
stop(_State) ->
ok. |
5df7edda09daedb1b82d192cb3717eeed4fb2e6c453760487ab21c5742c52359 | PeterDWhite/Osker | CoreData.hs | Copyright ( c ) , 2002 , 2003
Copyright ( c ) OHSU , 2002 , 2003
module CoreData
( FSData (..) -- The state maintained by the file system core
, initFSData -- Initializer for the file system core state
) where
----------------------------------------------------------------------
-- The file system core device driver data structures
----------------------------------------------------------------------
Osker imports
import qualified DirectoryCache as DC
-- The data maintained by the file system core device driver
data FSData = FSData { fileSystem :: String
, directoryCache :: DC.DirectoryCache
}
initFSData :: String -> FSData
initFSData fsName =
FSData { fileSystem = fsName
, directoryCache = DC.dirInitCache
}
| null | https://raw.githubusercontent.com/PeterDWhite/Osker/301e1185f7c08c62c2929171cc0469a159ea802f/Kernel/FileSystem/CoreData.hs | haskell | The state maintained by the file system core
Initializer for the file system core state
--------------------------------------------------------------------
The file system core device driver data structures
--------------------------------------------------------------------
The data maintained by the file system core device driver | Copyright ( c ) , 2002 , 2003
Copyright ( c ) OHSU , 2002 , 2003
module CoreData
) where
Osker imports
import qualified DirectoryCache as DC
data FSData = FSData { fileSystem :: String
, directoryCache :: DC.DirectoryCache
}
initFSData :: String -> FSData
initFSData fsName =
FSData { fileSystem = fsName
, directoryCache = DC.dirInitCache
}
|
d97d9979d5258286b0daeea9909bb296a403f2ac6d49d27cac3a6326986ad8fb | TorXakis/TorXakis | SMTInternal.hs |
TorXakis - Model Based Testing
Copyright ( c ) 2015 - 2017 TNO and Radboud University
See LICENSE at root directory of this repository .
TorXakis - Model Based Testing
Copyright (c) 2015-2017 TNO and Radboud University
See LICENSE at root directory of this repository.
-}
-- ----------------------------------------------------------------------------------------- --
{-# LANGUAGE OverloadedStrings #-}
module SMTInternal
-- ------------------------------------------------------------
SMT Internal should not be included directly in production code .
SMT Internal contains all non - interface SMT functions .
-- Some of these functions are used for test purposes
-- ----------------------------------------------------------------------------------------- --
-- import
where
import Control.Concurrent
import Control.Exception (onException)
import Control.Monad.State (get, gets, lift, modify, unless)
import qualified Data.List as List
import qualified Data.Map as Map
import Data.Monoid
import Data.String.Utils (endswith, replace, startswith, strip)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time
import System.Exit
import System.IO
import System.Process
import Constant
import SMT2TXS
import SMTAlex
import SMTData
import SMTHappy
import SolveDefs
import TXS2SMT
import ValExpr
import Variable
-- ----------------------------------------------------------------------------------------- --
opens a connection to the SMTLIB interactive shell
-- defines the logic and sets the appropriate options
and returns a handle for SMT
-- ----------------------------------------------------------------------------------------- --
openSolver :: SMT String
openSolver = do
n <- getInfo "name"
v <- getInfo "version"
SMTInternal.init
push
return $ n ++ " [" ++ v ++ "]"
-- ----------------------------------------------------------------------------------------- --
close the connection to the SMTLIB interactive shell
-- ----------------------------------------------------------------------------------------- --
close :: SMT ()
close = do
put "(exit)"
st <- get
case logFileHandle st of
Nothing -> return ()
Just lfh -> lift $ hClose lfh
ec <- lift $ waitForProcess (smtProcessHandle st)
case ec of
ExitSuccess -> return ()
ExitFailure c -> error ("Smt Solver exited with error code " ++ show c)
-- ----------------------------------------------------------------------------------------- --
-- push
-- ----------------------------------------------------------------------------------------- --
push :: SMT ()
push = put "(push 1)"
-- ----------------------------------------------------------------------------------------- --
-- pop
-- ----------------------------------------------------------------------------------------- --
pop :: SMT ()
pop = put "(pop 1)"
-- ----------------------------------------------------------------------------------------- --
SMT communication functions via process fork
-- ----------------------------------------------------------------------------------------- --
createSMTEnv :: CreateProcess -> Bool -> IO SmtEnv
createSMTEnv cmd lgFlag = do
(Just hin, Just hout, Just herr, ph) <- createProcess cmd
{ std_out = CreatePipe
, std_in = CreatePipe
, std_err = CreatePipe
}
hSetBuffering hin NoBuffering -- alternative: LineBuffering
hSetBuffering hout NoBuffering
hSetBuffering herr NoBuffering
hSetEncoding hin latin1
hSetEncoding hout latin1
hSetEncoding herr latin1
open SMT logging file
lg <- if lgFlag
then do timeZone <- getCurrentTimeZone
startTime <- getCurrentTime
let timeString = replace ":" "-" $
replace " " "-" $
show (utcToLocalTime timeZone startTime) in do
h <- openFile ("logSMT." ++ timeString ++ ".smt2") WriteMode
hSetBuffering h NoBuffering
hSetEncoding h latin1
return $ Just h
else return Nothing
--ein <- hGetEncoding hin
--eout <- hGetEncoding hout
Trace.trace ( " hin encoding = " + + show ein + + " \n " + +
-- "hout encoding = " ++ show eout ++ "\n" ++
" encoding = " + + show eerr + + " \n " )
( NewlineMode { inputNL = LF , outputNL = CRLF } )
hSetNewlineMode hout ( NewlineMode { inputNL = CRLF , outputNL = LF } )
handle warning messages of ( over herr )
note : errors in SMT are reported over hout with the response " ( error < string > ) "
_ <- forkIO (showErrors herr "SMT WARN >> ")
return (SmtEnv hin
hout
ph
lg
initialEnvNames
(EnvDefs Map.empty Map.empty Map.empty)
)
-- ----------------------------------------------------------------------------------------- --
-- addDefinitions
-- ----------------------------------------------------------------------------------------- --
addDefinitions :: EnvDefs -> SMT ()
addDefinitions edefs = do
enames <- gets envNames
-- exclude earlier defined sorts, e.g. for the pre-defined data types,
such as , Int , and , because we use the equivalent SMTLIB built - in types
let newSorts = Map.filterWithKey (\k _ -> Map.notMember k (sortNames enames)) (sortDefs edefs)
let snames = foldr insertSort enames (Map.toList newSorts)
-- constructors of sort introduce functions
let newCstrs = Map.filterWithKey (\k _ -> Map.notMember k (cstrNames snames)) (cstrDefs edefs)
let cnames = foldr insertCstr snames (Map.toList newCstrs)
putT ( sortdefsToSMT cnames (EnvDefs newSorts newCstrs Map.empty) )
put "\n\n"
let newFuncs = Map.filterWithKey (\k _ -> Map.notMember k (funcNames cnames)) (funcDefs edefs)
let fnames = foldr insertFunc cnames (Map.toList newFuncs)
putT ( funcdefsToSMT fnames newFuncs )
put "\n\n"
original <- gets envDefs
modify ( \e -> e { envNames = fnames
, envDefs = EnvDefs (Map.union (sortDefs original) (sortDefs edefs))
(Map.union (cstrDefs original) (cstrDefs edefs))
(Map.union (funcDefs original) (funcDefs edefs))
}
)
-- use union to be certain all definitions remain included
-- --------------------------------------------------------------------------------------------
-- addDeclarations
-- --------------------------------------------------------------------------------------------
addDeclarations :: (Variable v) => [v] -> SMT ()
addDeclarations [] = return ()
addDeclarations vs = do
mapI <- gets envNames
putT ( declarationsToSMT mapI vs )
return ()
-- ----------------------------------------------------------------------------------------- --
addAssertions
-- ----------------------------------------------------------------------------------------- --
addAssertions :: (Variable v) => [ValExpr v] -> SMT ()
addAssertions vexps = do
mapI <- gets envNames
putT ( assertionsToSMT mapI vexps )
return ()
-- ----------------------------------------------------------------------------------------- --
-- ----------------------------------------------------------------------------------------- --
getSolvable :: SMT SolvableProblem
getSolvable = do
put "(check-sat)"
s <- getSMTresponse
return $ case s of
"sat" -> Sat
"unsat" -> Unsat
"unknown" -> Unknown
_ -> error ("SMT checkSat: Unexpected result '"++ s ++ "'")
-- ----------------------------------------------------------------------------------------- --
-- getSolution
-- ----------------------------------------------------------------------------------------- --
getSolution :: (Variable v) => [v] -> SMT (Solution v)
getSolution [] = return Map.empty
getSolution vs = do
putT ("(get-value (" <> T.intercalate " " (map vname vs) <>"))")
s <- getSMTresponse
let vnameSMTValueMap = Map.mapKeys T.pack . smtParser . smtLexer $ s
edefs <- gets envDefs
return $ Map.fromList (map (toConst edefs vnameSMTValueMap) vs)
where
toConst :: (Variable v) => EnvDefs -> Map.Map Text SMTValue -> v -> (v, Constant)
toConst edefs mp v = case Map.lookup (vname v) mp of
Just smtValue -> case smtValueToValExpr smtValue (cstrDefs edefs) (vsort v) of
Left t -> error $ "getSolution - SMT parse error:\n" ++ t
Right val -> (v,val)
Nothing -> error "getSolution - SMT hasn't returned the value of requested variable."
-- ------------------------------------------
get SMT info
-- ------------------------------------------
getInfo :: String -> SMT String
getInfo info = do
put ("(get-info :" ++ info ++ ")")
s <- getSMTresponse
let list = strip s in
if startswith "(" list && endswith ")" list
then
let tuple = strip (List.init . tail $ list) in
case List.stripPrefix (":" ++ info) tuple of
Just res -> let str = strip res in
if startswith "\"" str && endswith "\"" str
then return $ List.init . tail $ str
else error ("SMT response violates quotes in pattern.\nExpected (:" ++ info ++ " \"<name>\")\nActual "++ list)
Nothing -> error ("SMT response violates info in pattern.\nExpected (:" ++ info ++ " \"<name>\")\nActual "++ list)
else
error ("SMT response violates brackets in pattern.\nExpected (:" ++ info ++ " \"<name>\")\nActual "++ list)
-- ----------------------------------------------------------------------------------------- --
-- init
-- ----------------------------------------------------------------------------------------- --
init :: SMT ()
init = do
put "(set-option :produce-models true)"
put "(set-logic ALL)"
put "(set-info :smt-lib-version 2.5)"
putT basicDefinitionsSMT
return ()
| execute the SMT command given as a string
put :: String -> SMT ()
put cmd = do
st <- get
let lg = logFileHandle st
hin = inHandle st
in do
case lg of
Nothing -> return ()
Just h -> lift $ hPutStrLn h cmd
lift $ hPutStrLn hin cmd
putT :: Text -> SMT ()
putT = put . T.unpack
| Transform value expression to an SMT string .
valExprToString :: Variable v => ValExpr v -> SMT Text
valExprToString v = do
mapI <- gets envNames
return $ valexprToSMT mapI v
-- ----------------------------------------------------------------------------------------- --
-- return error messages if any are present
-- where the given prefix is prepended to every line
showErrors :: Handle -> String -> IO ()
showErrors h prefix = do
s <- hIsEOF h
unless s $ do
msg <- hGetLine h
hPutStrLn stderr (prefix ++ msg)
showErrors h prefix
-- ----------------------------------------------------------------------------------------- --
-- read the response (as lines) from the handle
-- this operation is blocking until some data can be read
getResponse :: Handle -> Integer -> IO String
getResponse h count = do
s <- hGetLine h
let newCount = count + countBracket s in
if 0 == newCount
then return s
else do
tl <- getResponse h newCount
return $ s ++ tl
getSMTresponse :: SMT String
getSMTresponse = do
hout <- gets outHandle
ph <- gets smtProcessHandle
lift $ getResponse hout 0 `onException` exitWithError ph
where
exitWithError :: ProcessHandle -> IO ()
exitWithError procHandle = do
ec <- getProcessExitCode procHandle
The output and error handles of the SMT process are closed when this
-- error occurs (maybe because they're pipes?) so no information can be
-- given to the user, other than this.
putStrLn $ "getSMTresponse: SMT exited with status: " ++ show ec
error "getSMTresponse: Could not get a response from the solver"
countBracket :: String -> Integer
countBracket ('"':xs) = skipCountInsideString xs -- ignore brackets inside strings
countBracket ('(':xs) = 1 + countBracket xs
countBracket (')':xs) = -1 + countBracket xs
countBracket (_:xs) = countBracket xs
countBracket [] = 0
skipCountInsideString :: String -> Integer
skipCountInsideString ('"':'"':xxs) = skipCountInsideString xxs -- escape quote, stay in string
skipCountInsideString ('"':xs) = countBracket xs -- outside string
skipCountInsideString (_:xs) = skipCountInsideString xs
skipCountInsideString [] = 0
| null | https://raw.githubusercontent.com/TorXakis/TorXakis/038463824b3d358df6b6b3ff08732335b7dbdb53/sys/solve/src/SMTInternal.hs | haskell | ----------------------------------------------------------------------------------------- --
# LANGUAGE OverloadedStrings #
------------------------------------------------------------
Some of these functions are used for test purposes
----------------------------------------------------------------------------------------- --
import
----------------------------------------------------------------------------------------- --
defines the logic and sets the appropriate options
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
push
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
pop
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
alternative: LineBuffering
ein <- hGetEncoding hin
eout <- hGetEncoding hout
"hout encoding = " ++ show eout ++ "\n" ++
----------------------------------------------------------------------------------------- --
addDefinitions
----------------------------------------------------------------------------------------- --
exclude earlier defined sorts, e.g. for the pre-defined data types,
constructors of sort introduce functions
use union to be certain all definitions remain included
--------------------------------------------------------------------------------------------
addDeclarations
--------------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
getSolution
----------------------------------------------------------------------------------------- --
------------------------------------------
------------------------------------------
----------------------------------------------------------------------------------------- --
init
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
return error messages if any are present
where the given prefix is prepended to every line
----------------------------------------------------------------------------------------- --
read the response (as lines) from the handle
this operation is blocking until some data can be read
error occurs (maybe because they're pipes?) so no information can be
given to the user, other than this.
ignore brackets inside strings
escape quote, stay in string
outside string |
TorXakis - Model Based Testing
Copyright ( c ) 2015 - 2017 TNO and Radboud University
See LICENSE at root directory of this repository .
TorXakis - Model Based Testing
Copyright (c) 2015-2017 TNO and Radboud University
See LICENSE at root directory of this repository.
-}
module SMTInternal
SMT Internal should not be included directly in production code .
SMT Internal contains all non - interface SMT functions .
where
import Control.Concurrent
import Control.Exception (onException)
import Control.Monad.State (get, gets, lift, modify, unless)
import qualified Data.List as List
import qualified Data.Map as Map
import Data.Monoid
import Data.String.Utils (endswith, replace, startswith, strip)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time
import System.Exit
import System.IO
import System.Process
import Constant
import SMT2TXS
import SMTAlex
import SMTData
import SMTHappy
import SolveDefs
import TXS2SMT
import ValExpr
import Variable
opens a connection to the SMTLIB interactive shell
and returns a handle for SMT
openSolver :: SMT String
openSolver = do
n <- getInfo "name"
v <- getInfo "version"
SMTInternal.init
push
return $ n ++ " [" ++ v ++ "]"
close the connection to the SMTLIB interactive shell
close :: SMT ()
close = do
put "(exit)"
st <- get
case logFileHandle st of
Nothing -> return ()
Just lfh -> lift $ hClose lfh
ec <- lift $ waitForProcess (smtProcessHandle st)
case ec of
ExitSuccess -> return ()
ExitFailure c -> error ("Smt Solver exited with error code " ++ show c)
push :: SMT ()
push = put "(push 1)"
pop :: SMT ()
pop = put "(pop 1)"
SMT communication functions via process fork
createSMTEnv :: CreateProcess -> Bool -> IO SmtEnv
createSMTEnv cmd lgFlag = do
(Just hin, Just hout, Just herr, ph) <- createProcess cmd
{ std_out = CreatePipe
, std_in = CreatePipe
, std_err = CreatePipe
}
hSetBuffering hout NoBuffering
hSetBuffering herr NoBuffering
hSetEncoding hin latin1
hSetEncoding hout latin1
hSetEncoding herr latin1
open SMT logging file
lg <- if lgFlag
then do timeZone <- getCurrentTimeZone
startTime <- getCurrentTime
let timeString = replace ":" "-" $
replace " " "-" $
show (utcToLocalTime timeZone startTime) in do
h <- openFile ("logSMT." ++ timeString ++ ".smt2") WriteMode
hSetBuffering h NoBuffering
hSetEncoding h latin1
return $ Just h
else return Nothing
Trace.trace ( " hin encoding = " + + show ein + + " \n " + +
" encoding = " + + show eerr + + " \n " )
( NewlineMode { inputNL = LF , outputNL = CRLF } )
hSetNewlineMode hout ( NewlineMode { inputNL = CRLF , outputNL = LF } )
handle warning messages of ( over herr )
note : errors in SMT are reported over hout with the response " ( error < string > ) "
_ <- forkIO (showErrors herr "SMT WARN >> ")
return (SmtEnv hin
hout
ph
lg
initialEnvNames
(EnvDefs Map.empty Map.empty Map.empty)
)
addDefinitions :: EnvDefs -> SMT ()
addDefinitions edefs = do
enames <- gets envNames
such as , Int , and , because we use the equivalent SMTLIB built - in types
let newSorts = Map.filterWithKey (\k _ -> Map.notMember k (sortNames enames)) (sortDefs edefs)
let snames = foldr insertSort enames (Map.toList newSorts)
let newCstrs = Map.filterWithKey (\k _ -> Map.notMember k (cstrNames snames)) (cstrDefs edefs)
let cnames = foldr insertCstr snames (Map.toList newCstrs)
putT ( sortdefsToSMT cnames (EnvDefs newSorts newCstrs Map.empty) )
put "\n\n"
let newFuncs = Map.filterWithKey (\k _ -> Map.notMember k (funcNames cnames)) (funcDefs edefs)
let fnames = foldr insertFunc cnames (Map.toList newFuncs)
putT ( funcdefsToSMT fnames newFuncs )
put "\n\n"
original <- gets envDefs
modify ( \e -> e { envNames = fnames
, envDefs = EnvDefs (Map.union (sortDefs original) (sortDefs edefs))
(Map.union (cstrDefs original) (cstrDefs edefs))
(Map.union (funcDefs original) (funcDefs edefs))
}
)
addDeclarations :: (Variable v) => [v] -> SMT ()
addDeclarations [] = return ()
addDeclarations vs = do
mapI <- gets envNames
putT ( declarationsToSMT mapI vs )
return ()
addAssertions
addAssertions :: (Variable v) => [ValExpr v] -> SMT ()
addAssertions vexps = do
mapI <- gets envNames
putT ( assertionsToSMT mapI vexps )
return ()
getSolvable :: SMT SolvableProblem
getSolvable = do
put "(check-sat)"
s <- getSMTresponse
return $ case s of
"sat" -> Sat
"unsat" -> Unsat
"unknown" -> Unknown
_ -> error ("SMT checkSat: Unexpected result '"++ s ++ "'")
getSolution :: (Variable v) => [v] -> SMT (Solution v)
getSolution [] = return Map.empty
getSolution vs = do
putT ("(get-value (" <> T.intercalate " " (map vname vs) <>"))")
s <- getSMTresponse
let vnameSMTValueMap = Map.mapKeys T.pack . smtParser . smtLexer $ s
edefs <- gets envDefs
return $ Map.fromList (map (toConst edefs vnameSMTValueMap) vs)
where
toConst :: (Variable v) => EnvDefs -> Map.Map Text SMTValue -> v -> (v, Constant)
toConst edefs mp v = case Map.lookup (vname v) mp of
Just smtValue -> case smtValueToValExpr smtValue (cstrDefs edefs) (vsort v) of
Left t -> error $ "getSolution - SMT parse error:\n" ++ t
Right val -> (v,val)
Nothing -> error "getSolution - SMT hasn't returned the value of requested variable."
get SMT info
getInfo :: String -> SMT String
getInfo info = do
put ("(get-info :" ++ info ++ ")")
s <- getSMTresponse
let list = strip s in
if startswith "(" list && endswith ")" list
then
let tuple = strip (List.init . tail $ list) in
case List.stripPrefix (":" ++ info) tuple of
Just res -> let str = strip res in
if startswith "\"" str && endswith "\"" str
then return $ List.init . tail $ str
else error ("SMT response violates quotes in pattern.\nExpected (:" ++ info ++ " \"<name>\")\nActual "++ list)
Nothing -> error ("SMT response violates info in pattern.\nExpected (:" ++ info ++ " \"<name>\")\nActual "++ list)
else
error ("SMT response violates brackets in pattern.\nExpected (:" ++ info ++ " \"<name>\")\nActual "++ list)
init :: SMT ()
init = do
put "(set-option :produce-models true)"
put "(set-logic ALL)"
put "(set-info :smt-lib-version 2.5)"
putT basicDefinitionsSMT
return ()
| execute the SMT command given as a string
put :: String -> SMT ()
put cmd = do
st <- get
let lg = logFileHandle st
hin = inHandle st
in do
case lg of
Nothing -> return ()
Just h -> lift $ hPutStrLn h cmd
lift $ hPutStrLn hin cmd
putT :: Text -> SMT ()
putT = put . T.unpack
| Transform value expression to an SMT string .
valExprToString :: Variable v => ValExpr v -> SMT Text
valExprToString v = do
mapI <- gets envNames
return $ valexprToSMT mapI v
showErrors :: Handle -> String -> IO ()
showErrors h prefix = do
s <- hIsEOF h
unless s $ do
msg <- hGetLine h
hPutStrLn stderr (prefix ++ msg)
showErrors h prefix
getResponse :: Handle -> Integer -> IO String
getResponse h count = do
s <- hGetLine h
let newCount = count + countBracket s in
if 0 == newCount
then return s
else do
tl <- getResponse h newCount
return $ s ++ tl
getSMTresponse :: SMT String
getSMTresponse = do
hout <- gets outHandle
ph <- gets smtProcessHandle
lift $ getResponse hout 0 `onException` exitWithError ph
where
exitWithError :: ProcessHandle -> IO ()
exitWithError procHandle = do
ec <- getProcessExitCode procHandle
The output and error handles of the SMT process are closed when this
putStrLn $ "getSMTresponse: SMT exited with status: " ++ show ec
error "getSMTresponse: Could not get a response from the solver"
countBracket :: String -> Integer
countBracket ('(':xs) = 1 + countBracket xs
countBracket (')':xs) = -1 + countBracket xs
countBracket (_:xs) = countBracket xs
countBracket [] = 0
skipCountInsideString :: String -> Integer
skipCountInsideString (_:xs) = skipCountInsideString xs
skipCountInsideString [] = 0
|
391d628358e0e4dde18a51c442d2deb402923289f9222546d54cf20878b002e3 | mzp/coq-for-ipad | odoc_dot.ml | (***********************************************************************)
(* Ocamldoc *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : odoc_dot.ml 7619 2006 - 09 - 20 11:14:37Z doligez $
(** Definition of a class which outputs a dot file showing
top modules dependencies.*)
open Odoc_info
module F = Format
(** This class generates a dot file showing the top modules dependencies. *)
class dot =
object (self)
(** To store the colors associated to locations of modules. *)
val mutable loc_colors = []
(** the list of modules we know. *)
val mutable modules = []
(** Colors to use when finding new locations of modules. *)
val mutable colors = !Args.dot_colors
(** Graph header. *)
method header =
"digraph G {\n"^
" size=\"10,7.5\";\n"^
" ratio=\"fill\";\n"^
" rotate=90;\n"^
" fontsize=\"12pt\";\n"^
" rankdir = TB ;\n"
method get_one_color =
match colors with
[] -> None
| h :: q ->
colors <- q ;
Some h
method node_color s =
try Some (List.assoc s loc_colors)
with
Not_found ->
match self#get_one_color with
None -> None
| Some c ->
loc_colors <- (s, c) :: loc_colors ;
Some c
method print_module_atts fmt m =
match self#node_color (Filename.dirname m.Module.m_file) with
None -> ()
| Some col -> F.fprintf fmt "\"%s\" [style=filled, color=%s];\n" m.Module.m_name col
method print_type_atts fmt t =
match self#node_color (Name.father t.Type.ty_name) with
None -> ()
| Some col -> F.fprintf fmt "\"%s\" [style=filled, color=%s];\n" t.Type.ty_name col
method print_one_dep fmt src dest =
F.fprintf fmt "\"%s\" -> \"%s\";\n" src dest
method generate_for_module fmt m =
let l = List.filter
(fun n ->
!Args.dot_include_all or
(List.exists (fun m -> m.Module.m_name = n) modules))
m.Module.m_top_deps
in
self#print_module_atts fmt m;
List.iter (self#print_one_dep fmt m.Module.m_name) l
method generate_for_type fmt (t, l) =
self#print_type_atts fmt t;
List.iter
(self#print_one_dep fmt t.Type.ty_name)
l
method generate_types types =
try
let oc = open_out !Args.out_file in
let fmt = F.formatter_of_out_channel oc in
F.fprintf fmt "%s" self#header;
let graph = Odoc_info.Dep.deps_of_types
~kernel: !Args.dot_reduce
types
in
List.iter (self#generate_for_type fmt) graph;
F.fprintf fmt "}\n" ;
F.pp_print_flush fmt ();
close_out oc
with
Sys_error s ->
raise (Failure s)
method generate_modules modules_list =
try
modules <- modules_list ;
let oc = open_out !Args.out_file in
let fmt = F.formatter_of_out_channel oc in
F.fprintf fmt "%s" self#header;
if !Args.dot_reduce then
Odoc_info.Dep.kernel_deps_of_modules modules_list;
List.iter (self#generate_for_module fmt) modules_list;
F.fprintf fmt "}\n" ;
F.pp_print_flush fmt ();
close_out oc
with
Sys_error s ->
raise (Failure s)
* Generate the dot code in the file { ! } .
method generate (modules_list : Odoc_info.Module.t_module list) =
colors <- !Args.dot_colors;
if !Args.dot_types then
self#generate_types (Odoc_info.Search.types modules_list)
else
self#generate_modules modules_list
end
| null | https://raw.githubusercontent.com/mzp/coq-for-ipad/4fb3711723e2581a170ffd734e936f210086396e/src/ocaml-3.12.0/ocamldoc/odoc_dot.ml | ocaml | *********************************************************************
Ocamldoc
*********************************************************************
* Definition of a class which outputs a dot file showing
top modules dependencies.
* This class generates a dot file showing the top modules dependencies.
* To store the colors associated to locations of modules.
* the list of modules we know.
* Colors to use when finding new locations of modules.
* Graph header. | , projet Cristal , INRIA Rocquencourt
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : odoc_dot.ml 7619 2006 - 09 - 20 11:14:37Z doligez $
open Odoc_info
module F = Format
class dot =
object (self)
val mutable loc_colors = []
val mutable modules = []
val mutable colors = !Args.dot_colors
method header =
"digraph G {\n"^
" size=\"10,7.5\";\n"^
" ratio=\"fill\";\n"^
" rotate=90;\n"^
" fontsize=\"12pt\";\n"^
" rankdir = TB ;\n"
method get_one_color =
match colors with
[] -> None
| h :: q ->
colors <- q ;
Some h
method node_color s =
try Some (List.assoc s loc_colors)
with
Not_found ->
match self#get_one_color with
None -> None
| Some c ->
loc_colors <- (s, c) :: loc_colors ;
Some c
method print_module_atts fmt m =
match self#node_color (Filename.dirname m.Module.m_file) with
None -> ()
| Some col -> F.fprintf fmt "\"%s\" [style=filled, color=%s];\n" m.Module.m_name col
method print_type_atts fmt t =
match self#node_color (Name.father t.Type.ty_name) with
None -> ()
| Some col -> F.fprintf fmt "\"%s\" [style=filled, color=%s];\n" t.Type.ty_name col
method print_one_dep fmt src dest =
F.fprintf fmt "\"%s\" -> \"%s\";\n" src dest
method generate_for_module fmt m =
let l = List.filter
(fun n ->
!Args.dot_include_all or
(List.exists (fun m -> m.Module.m_name = n) modules))
m.Module.m_top_deps
in
self#print_module_atts fmt m;
List.iter (self#print_one_dep fmt m.Module.m_name) l
method generate_for_type fmt (t, l) =
self#print_type_atts fmt t;
List.iter
(self#print_one_dep fmt t.Type.ty_name)
l
method generate_types types =
try
let oc = open_out !Args.out_file in
let fmt = F.formatter_of_out_channel oc in
F.fprintf fmt "%s" self#header;
let graph = Odoc_info.Dep.deps_of_types
~kernel: !Args.dot_reduce
types
in
List.iter (self#generate_for_type fmt) graph;
F.fprintf fmt "}\n" ;
F.pp_print_flush fmt ();
close_out oc
with
Sys_error s ->
raise (Failure s)
method generate_modules modules_list =
try
modules <- modules_list ;
let oc = open_out !Args.out_file in
let fmt = F.formatter_of_out_channel oc in
F.fprintf fmt "%s" self#header;
if !Args.dot_reduce then
Odoc_info.Dep.kernel_deps_of_modules modules_list;
List.iter (self#generate_for_module fmt) modules_list;
F.fprintf fmt "}\n" ;
F.pp_print_flush fmt ();
close_out oc
with
Sys_error s ->
raise (Failure s)
* Generate the dot code in the file { ! } .
method generate (modules_list : Odoc_info.Module.t_module list) =
colors <- !Args.dot_colors;
if !Args.dot_types then
self#generate_types (Odoc_info.Search.types modules_list)
else
self#generate_modules modules_list
end
|
d3e5b6ca7ff4c48c499f9e44a25640a03517a219ec75a8a020d702567e3bc63e | oliyh/re-partee | macros.cljc | (ns re-partee.fake.macros
#?(:clj (:require [clojure.edn :as edn]
[clojure.java.io :as io])))
#?(:clj (defmacro defdata [sym resource-name]
(let [data (edn/read-string (slurp (io/resource resource-name)))]
`(def ~sym ~data))))
| null | https://raw.githubusercontent.com/oliyh/re-partee/8ab2b4301c518ee9448157f4249e33ad09ff2fc7/src/re_partee/fake/macros.cljc | clojure | (ns re-partee.fake.macros
#?(:clj (:require [clojure.edn :as edn]
[clojure.java.io :as io])))
#?(:clj (defmacro defdata [sym resource-name]
(let [data (edn/read-string (slurp (io/resource resource-name)))]
`(def ~sym ~data))))
| |
83c6afec668425d35c52b947b82c7e0619084bd272e306d09f50b716424a020a | ku-fpg/haskino | IntExample.hs | -------------------------------------------------------------------------------
-- |
-- Module : System.Hardware.Haskino.SamplePrograms.Deep.intExample
Copyright : ( c ) University of Kansas
-- License : BSD3
-- Stability : experimental
--
This is an example of using semaphores to communicate between two tasks .
One task gives a semaphore then delays for 2 seconds . The other task
waits for the semaphore then blinks the led rapidly 3 times .
-------------------------------------------------------------------------------
module System.Hardware.Haskino.SamplePrograms.Deep.IntExample where
import Prelude hiding ((<*))
import Control.Concurrent (threadDelay)
import Control.Monad.Trans (liftIO)
import Data.Boolean
import Data.Boolean.Numbers
import Data.Word
import System.Hardware.Haskino
blinkDelay :: Expr Word32
blinkDelay = 125
semId :: Expr Word8
semId = 0
myTask :: Expr Word8 -> Arduino ()
myTask led =
loopE $ do
takeSemE semId
digitalWriteE led true
delayMillisE blinkDelay
digitalWriteE led false
delayMillisE blinkDelay
intTask :: Arduino ()
intTask = giveSemE semId
initIntExample :: Arduino ()
initIntExample = do
let led = 13
setPinModeE led OUTPUT
let button = 2
setPinModeE button INPUT
let myTaskId = 1
let intTaskId = 2
-- Create the tasks
createTaskE myTaskId (myTask led)
createTaskE intTaskId intTask
Schedule the task to start in 50ms , the second starting after the first
scheduleTaskE myTaskId 50
attachIntE button intTaskId FALLING
intExample :: IO ()
intExample = withArduino True "/dev/cu.usbmodem1421" $ do
initIntExample
-- Query to confirm task creation
tasks <- queryAllTasksE
liftIO $ print tasks
task1 <- queryTaskE 1
liftIO $ print task1
task2 <- queryTaskE 2
liftIO $ print task2
| null | https://raw.githubusercontent.com/ku-fpg/haskino/9a0709c92c2da9b9371e292b00fd076e5539eb18/legacy/Deep/IntExample.hs | haskell | -----------------------------------------------------------------------------
|
Module : System.Hardware.Haskino.SamplePrograms.Deep.intExample
License : BSD3
Stability : experimental
-----------------------------------------------------------------------------
Create the tasks
Query to confirm task creation | Copyright : ( c ) University of Kansas
This is an example of using semaphores to communicate between two tasks .
One task gives a semaphore then delays for 2 seconds . The other task
waits for the semaphore then blinks the led rapidly 3 times .
module System.Hardware.Haskino.SamplePrograms.Deep.IntExample where
import Prelude hiding ((<*))
import Control.Concurrent (threadDelay)
import Control.Monad.Trans (liftIO)
import Data.Boolean
import Data.Boolean.Numbers
import Data.Word
import System.Hardware.Haskino
blinkDelay :: Expr Word32
blinkDelay = 125
semId :: Expr Word8
semId = 0
myTask :: Expr Word8 -> Arduino ()
myTask led =
loopE $ do
takeSemE semId
digitalWriteE led true
delayMillisE blinkDelay
digitalWriteE led false
delayMillisE blinkDelay
intTask :: Arduino ()
intTask = giveSemE semId
initIntExample :: Arduino ()
initIntExample = do
let led = 13
setPinModeE led OUTPUT
let button = 2
setPinModeE button INPUT
let myTaskId = 1
let intTaskId = 2
createTaskE myTaskId (myTask led)
createTaskE intTaskId intTask
Schedule the task to start in 50ms , the second starting after the first
scheduleTaskE myTaskId 50
attachIntE button intTaskId FALLING
intExample :: IO ()
intExample = withArduino True "/dev/cu.usbmodem1421" $ do
initIntExample
tasks <- queryAllTasksE
liftIO $ print tasks
task1 <- queryTaskE 1
liftIO $ print task1
task2 <- queryTaskE 2
liftIO $ print task2
|
b5404fc7f9bc635bae7600dc693dd9d85d3717bd99e89fc9c05fef2c1f04b992 | zefhemel/adia | application.clj | (ns application
(:require wiki.index)
(:use adia.servlet))
(start-server 8080)
| null | https://raw.githubusercontent.com/zefhemel/adia/12d1f47ab7e891f417fa87ce6016269fee6e5dd2/examples/wiki/application.clj | clojure | (ns application
(:require wiki.index)
(:use adia.servlet))
(start-server 8080)
| |
be701266220ec8b54758f47ad0b85f886f7bf2d56f6fd56bb81c59539bcf6a66 | clojure-interop/java-jdk | NavigationFilter$FilterBypass.clj | (ns javax.swing.text.NavigationFilter$FilterBypass
"Used as a way to circumvent calling back into the caret to
position the cursor. Caret implementations that wish to support
a NavigationFilter must provide an implementation that will
not callback into the NavigationFilter."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.text NavigationFilter$FilterBypass]))
(defn ->filter-bypass
"Constructor."
(^NavigationFilter$FilterBypass []
(new NavigationFilter$FilterBypass )))
(defn get-caret
"Returns the Caret that is changing.
returns: Caret that is changing - `javax.swing.text.Caret`"
(^javax.swing.text.Caret [^NavigationFilter$FilterBypass this]
(-> this (.getCaret))))
(defn set-dot
"Sets the caret location, bypassing the NavigationFilter.
dot - the position >= 0 - `int`
bias - Bias to place the dot at - `javax.swing.text.Position$Bias`"
([^NavigationFilter$FilterBypass this ^Integer dot ^javax.swing.text.Position$Bias bias]
(-> this (.setDot dot bias))))
(defn move-dot
"Moves the caret location, bypassing the NavigationFilter.
dot - the position >= 0 - `int`
bias - Bias for new location - `javax.swing.text.Position$Bias`"
([^NavigationFilter$FilterBypass this ^Integer dot ^javax.swing.text.Position$Bias bias]
(-> this (.moveDot dot bias))))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.swing/src/javax/swing/text/NavigationFilter%24FilterBypass.clj | clojure | (ns javax.swing.text.NavigationFilter$FilterBypass
"Used as a way to circumvent calling back into the caret to
position the cursor. Caret implementations that wish to support
a NavigationFilter must provide an implementation that will
not callback into the NavigationFilter."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.text NavigationFilter$FilterBypass]))
(defn ->filter-bypass
"Constructor."
(^NavigationFilter$FilterBypass []
(new NavigationFilter$FilterBypass )))
(defn get-caret
"Returns the Caret that is changing.
returns: Caret that is changing - `javax.swing.text.Caret`"
(^javax.swing.text.Caret [^NavigationFilter$FilterBypass this]
(-> this (.getCaret))))
(defn set-dot
"Sets the caret location, bypassing the NavigationFilter.
dot - the position >= 0 - `int`
bias - Bias to place the dot at - `javax.swing.text.Position$Bias`"
([^NavigationFilter$FilterBypass this ^Integer dot ^javax.swing.text.Position$Bias bias]
(-> this (.setDot dot bias))))
(defn move-dot
"Moves the caret location, bypassing the NavigationFilter.
dot - the position >= 0 - `int`
bias - Bias for new location - `javax.swing.text.Position$Bias`"
([^NavigationFilter$FilterBypass this ^Integer dot ^javax.swing.text.Position$Bias bias]
(-> this (.moveDot dot bias))))
| |
617ba5a6b10084cca3da1b833482ab7da925fbfcbf5428e4147883aa739c3217 | rudymatela/express | instances.hs | Copyright ( c ) 2017 - 2021 .
-- Distributed under the 3-Clause BSD licence (see the file LICENSE).
# LANGUAGE NoMonomorphismRestriction #
import Test
main :: IO ()
main = mainTest tests 5040
tests :: Int -> [Bool]
tests n =
[ True
, eval undefined (eqFor (undefined :: Int) :$ one :$ one) == True
, eval undefined (eqFor (undefined :: Int) :$ one :$ two) == False
, eval undefined (lessEqFor (undefined :: Int) :$ one :$ two) == True
, eval undefined (lessEqFor (undefined :: Int) :$ one :$ one) == True
, eval undefined (lessEqFor (undefined :: Int) :$ two :$ one) == False
, eval undefined (lessFor (undefined :: Int) :$ one :$ two) == True
, eval undefined (lessFor (undefined :: Int) :$ one :$ one) == False
, eval undefined (lessFor (undefined :: Int) :$ two :$ one) == False
-- for the time being, compare has been removed from reifyOrd's result
, eval undefined ( compareFor ( undefined : : Int ) : $ one : $ two ) = = LT
, eval undefined ( compareFor ( undefined : : Int ) : $ one : $ one ) = = EQ
, eval undefined ( compareFor ( undefined : : Int ) : $ two : $ one ) = = GT
, eval undefined (nameFor (undefined :: Int) :$ xx) == "x"
, eval undefined (nameFor (undefined :: Int) :$ yy) == "x"
, eval undefined (nameFor (undefined :: Bool) :$ pp) == "p"
, eval undefined (nameFor (undefined :: Bool) :$ qq) == "p"
, length (validApps functions one) == 5
-- when lookupName does not find a name instance,
it defaults to x , xs , xss , xsss , ...
-- depending on the number of list nestings
, lookupName [] (val (0::Int)) == "x"
, lookupName [] (val [0::Int]) == "xs"
, lookupName [] (val [[0::Int]]) == "xss"
, lookupName [] (val [[[0::Int]]]) == "xsss"
, lookupName [] (val False) == "x"
, lookupName [] (val [False]) == "xs"
, lookupName [] (val [[False]]) == "xss"
, lookupName [] (val [[[False]]]) == "xsss"
, lookupName [] (val (0::A)) == "x"
, lookupName [] (val [0::A]) == "xs"
, lookupName [] (val [[0::A]]) == "xss"
, lookupName [] (val [[[0::A]]]) == "xsss"
, lookupName preludeNameInstances (val False) == "p"
, lookupName preludeNameInstances (val [False]) == "ps"
, lookupName preludeNameInstances (val [[False]]) == "xss" -- XXX: caveat
, lookupName preludeNameInstances (val [[[False]]]) == "xsss" -- XXX: caveat
, lookupName preludeNameInstances (val (0::A)) == "x"
, lookupName preludeNameInstances (val [0::A]) == "xs"
, lookupName preludeNameInstances (val [[0::A]]) == "xss"
, lookupName preludeNameInstances (val [[[0::A]]]) == "xsss"
]
where
eqFor = head . reifyEq
lessEqFor = head . reifyOrd
lessFor = head . tail . reifyOrd
--compareFor = head . reifyOrd
nameFor = head . reifyName
functions :: [Expr]
functions = concat
[ reifyEq (undefined :: Int)
, reifyEq (undefined :: Bool)
, reifyOrd (undefined :: Int)
, reifyOrd (undefined :: Bool)
, reifyName (undefined :: Int)
, reifyName (undefined :: Bool)
]
| null | https://raw.githubusercontent.com/rudymatela/express/07e5ad577a0fca83d82ab32394e1cd17be7ac41f/test/instances.hs | haskell | Distributed under the 3-Clause BSD licence (see the file LICENSE).
for the time being, compare has been removed from reifyOrd's result
when lookupName does not find a name instance,
depending on the number of list nestings
XXX: caveat
XXX: caveat
compareFor = head . reifyOrd | Copyright ( c ) 2017 - 2021 .
# LANGUAGE NoMonomorphismRestriction #
import Test
main :: IO ()
main = mainTest tests 5040
tests :: Int -> [Bool]
tests n =
[ True
, eval undefined (eqFor (undefined :: Int) :$ one :$ one) == True
, eval undefined (eqFor (undefined :: Int) :$ one :$ two) == False
, eval undefined (lessEqFor (undefined :: Int) :$ one :$ two) == True
, eval undefined (lessEqFor (undefined :: Int) :$ one :$ one) == True
, eval undefined (lessEqFor (undefined :: Int) :$ two :$ one) == False
, eval undefined (lessFor (undefined :: Int) :$ one :$ two) == True
, eval undefined (lessFor (undefined :: Int) :$ one :$ one) == False
, eval undefined (lessFor (undefined :: Int) :$ two :$ one) == False
, eval undefined ( compareFor ( undefined : : Int ) : $ one : $ two ) = = LT
, eval undefined ( compareFor ( undefined : : Int ) : $ one : $ one ) = = EQ
, eval undefined ( compareFor ( undefined : : Int ) : $ two : $ one ) = = GT
, eval undefined (nameFor (undefined :: Int) :$ xx) == "x"
, eval undefined (nameFor (undefined :: Int) :$ yy) == "x"
, eval undefined (nameFor (undefined :: Bool) :$ pp) == "p"
, eval undefined (nameFor (undefined :: Bool) :$ qq) == "p"
, length (validApps functions one) == 5
it defaults to x , xs , xss , xsss , ...
, lookupName [] (val (0::Int)) == "x"
, lookupName [] (val [0::Int]) == "xs"
, lookupName [] (val [[0::Int]]) == "xss"
, lookupName [] (val [[[0::Int]]]) == "xsss"
, lookupName [] (val False) == "x"
, lookupName [] (val [False]) == "xs"
, lookupName [] (val [[False]]) == "xss"
, lookupName [] (val [[[False]]]) == "xsss"
, lookupName [] (val (0::A)) == "x"
, lookupName [] (val [0::A]) == "xs"
, lookupName [] (val [[0::A]]) == "xss"
, lookupName [] (val [[[0::A]]]) == "xsss"
, lookupName preludeNameInstances (val False) == "p"
, lookupName preludeNameInstances (val [False]) == "ps"
, lookupName preludeNameInstances (val (0::A)) == "x"
, lookupName preludeNameInstances (val [0::A]) == "xs"
, lookupName preludeNameInstances (val [[0::A]]) == "xss"
, lookupName preludeNameInstances (val [[[0::A]]]) == "xsss"
]
where
eqFor = head . reifyEq
lessEqFor = head . reifyOrd
lessFor = head . tail . reifyOrd
nameFor = head . reifyName
functions :: [Expr]
functions = concat
[ reifyEq (undefined :: Int)
, reifyEq (undefined :: Bool)
, reifyOrd (undefined :: Int)
, reifyOrd (undefined :: Bool)
, reifyName (undefined :: Int)
, reifyName (undefined :: Bool)
]
|
054e4c390d617348583634d8947ea7707adf08f54ac3c35b23d507917769722a | FranklinChen/hugs98-plus-Sep2006 | FiniteMap.hs | -- | Simple Finite Maps.
This implementation provides several useful methods that Data .
-- does not.
module Data.Graph.Inductive.Internal.FiniteMap(
-- * Type
FiniteMap(..),
-- * Operations
emptyFM,addToFM,delFromFM,
updFM,
accumFM,
splitFM,
isEmptyFM,sizeFM,lookupFM,elemFM,
rangeFM,
minFM,maxFM,predFM,succFM,
splitMinFM,
fmToList
) where
import Data.Maybe (isJust)
data Ord a => FiniteMap a b =
Empty | Node Int (FiniteMap a b) (a,b) (FiniteMap a b)
deriving (Eq)
----------------------------------------------------------------------
UTILITIES
----------------------------------------------------------------------
-- pretty printing
--
showsMap :: (Show a,Show b,Ord a) => FiniteMap a b -> ShowS
showsMap Empty = id
showsMap (Node _ l (i,x) r) = showsMap l . (' ':) .
shows i . ("->"++) . shows x . showsMap r
instance (Show a,Show b,Ord a) => Show (FiniteMap a b) where
showsPrec _ m = showsMap m
-- other
--
splitMax :: Ord a => FiniteMap a b -> (FiniteMap a b,(a,b))
splitMax (Node _ l x Empty) = (l,x)
splitMax (Node _ l x r) = (avlBalance l x m,y) where (m,y) = splitMax r
splitMax Empty = error "splitMax on empty FiniteMap"
merge :: Ord a => FiniteMap a b -> FiniteMap a b -> FiniteMap a b
merge l Empty = l
merge Empty r = r
merge l r = avlBalance l' x r where (l',x) = splitMax l
----------------------------------------------------------------------
-- MAIN FUNCTIONS
----------------------------------------------------------------------
emptyFM :: Ord a => FiniteMap a b
emptyFM = Empty
addToFM :: Ord a => FiniteMap a b -> a -> b -> FiniteMap a b
addToFM Empty i x = node Empty (i,x) Empty
addToFM (Node h l (j,y) r) i x
| i<j = avlBalance (addToFM l i x) (j,y) r
| i>j = avlBalance l (j,y) (addToFM r i x)
| otherwise = Node h l (j,x) r
-- | applies function to stored entry
updFM :: Ord a => FiniteMap a b -> a -> (b -> b) -> FiniteMap a b
updFM Empty _ _ = Empty
updFM (Node h l (j,x) r) i f
| i<j = let l' = updFM l i f in l' `seq` Node h l' (j,x) r
| i>j = let r' = updFM r i f in r' `seq` Node h l (j,x) r'
| otherwise = Node h l (j,f x) r
-- | defines or aggregates entries
accumFM :: Ord a => FiniteMap a b -> a -> (b -> b -> b) -> b -> FiniteMap a b
accumFM Empty i _ x = node Empty (i,x) Empty
accumFM (Node h l (j,y) r) i f x
| i<j = avlBalance (accumFM l i f x) (j,y) r
| i>j = avlBalance l (j,y) (accumFM r i f x)
| otherwise = Node h l (j,f x y) r
delFromFM :: Ord a => FiniteMap a b -> a -> FiniteMap a b
delFromFM Empty _ = Empty
delFromFM (Node _ l (j,x) r) i
| i<j = avlBalance (delFromFM l i) (j,x) r
| i>j = avlBalance l (j,x) (delFromFM r i)
| otherwise = merge l r
isEmptyFM :: FiniteMap a b -> Bool
isEmptyFM Empty = True
isEmptyFM _ = False
sizeFM :: Ord a => FiniteMap a b -> Int
sizeFM Empty = 0
sizeFM (Node _ l _ r) = sizeFM l + 1 + sizeFM r
lookupFM :: Ord a => FiniteMap a b -> a -> Maybe b
lookupFM Empty _ = Nothing
lookupFM (Node _ l (j,x) r) i | i<j = lookupFM l i
| i>j = lookupFM r i
| otherwise = Just x
-- | applies lookup to an interval
rangeFM :: Ord a => FiniteMap a b -> a -> a -> [b]
rangeFM m i j = rangeFMa m i j []
--
rangeFMa Empty _ _ a = a
rangeFMa (Node _ l (k,x) r) i j a
| k<i = rangeFMa r i j a
| k>j = rangeFMa l i j a
| otherwise = rangeFMa l i j (x:rangeFMa r i j a)
minFM :: Ord a => FiniteMap a b -> Maybe (a,b)
minFM Empty = Nothing
minFM (Node _ Empty x _) = Just x
minFM (Node _ l _ _) = minFM l
maxFM :: Ord a => FiniteMap a b -> Maybe (a,b)
maxFM Empty = Nothing
maxFM (Node _ _ x Empty) = Just x
maxFM (Node _ _ _ r) = maxFM r
predFM :: Ord a => FiniteMap a b -> a -> Maybe (a,b)
predFM m i = predFM' m i Nothing
--
predFM' Empty _ p = p
predFM' (Node _ l (j,x) r) i p | i<j = predFM' l i p
| i>j = predFM' r i (Just (j,x))
| isJust ml = ml
| otherwise = p
where ml = maxFM l
succFM :: Ord a => FiniteMap a b -> a -> Maybe (a,b)
succFM m i = succFM' m i Nothing
--
succFM' Empty _ p = p
succFM' (Node _ l (j,x) r) i p | i<j = succFM' l i (Just (j,x))
| i>j = succFM' r i p
| isJust mr = mr
| otherwise = p
where mr = minFM r
elemFM :: Ord a => FiniteMap a b -> a -> Bool
elemFM m i = case lookupFM m i of {Nothing -> False; _ -> True}
-- | combines delFrom and lookup
splitFM :: Ord a => FiniteMap a b -> a -> Maybe (FiniteMap a b,(a,b))
splitFM Empty _ = Nothing
splitFM (Node _ l (j,x) r) i =
if i<j then
case splitFM l i of
Just (l',y) -> Just (avlBalance l' (j,x) r,y)
Nothing -> Nothing else
if i>j then
case splitFM r i of
Just (r',y) -> Just (avlBalance l (j,x) r',y)
Nothing -> Nothing
else {- i==j -} Just (merge l r,(j,x))
| combines splitFM and
splitMinFM :: Ord a => FiniteMap a b -> Maybe (FiniteMap a b,(a,b))
splitMinFM Empty = Nothing
splitMinFM (Node _ Empty x r) = Just (r,x)
splitMinFM (Node _ l x r) = Just (avlBalance l' x r,y)
where Just (l',y) = splitMinFM l
fmToList :: Ord a => FiniteMap a b -> [(a,b)]
fmToList m = scan m []
where scan Empty xs = xs
scan (Node _ l x r) xs = scan l (x:(scan r xs))
----------------------------------------------------------------------
AVL tree helper functions
----------------------------------------------------------------------
height :: Ord a => FiniteMap a b -> Int
height Empty = 0
height (Node h _ _ _) = h
node :: Ord a => FiniteMap a b -> (a,b) -> FiniteMap a b -> FiniteMap a b
node l val r = Node h l val r
where h=1+(height l `max` height r)
avlBalance :: Ord a => FiniteMap a b -> (a,b) -> FiniteMap a b -> FiniteMap a b
avlBalance l (i,x) r
| (hr + 1 < hl) && (bias l < 0) = rotr (node (rotl l) (i,x) r)
| (hr + 1 < hl) = rotr (node l (i,x) r)
| (hl + 1 < hr) && (0 < bias r) = rotl (node l (i,x) (rotr r))
| (hl + 1 < hr) = rotl (node l (i,x) r)
| otherwise = node l (i,x) r
where hl=height l; hr=height r
bias :: Ord a => FiniteMap a b -> Int
bias (Node _ l _ r) = height l - height r
bias Empty = 0
rotr :: Ord a => FiniteMap a b -> FiniteMap a b
rotr Empty = Empty
rotr (Node _ (Node _ l1 v1 r1) v2 r2) = node l1 v1 (node r1 v2 r2)
rotr (Node _ Empty _ _) = error "rotr on invalid FiniteMap"
rotl :: Ord a => FiniteMap a b -> FiniteMap a b
rotl Empty = Empty
rotl (Node _ l1 v1 (Node _ l2 v2 r2)) = node (node l1 v1 l2) v2 r2
rotl (Node _ _ _ Empty) = error "rotl on invalid FiniteMap"
| null | https://raw.githubusercontent.com/FranklinChen/hugs98-plus-Sep2006/54ab69bd6313adbbed1d790b46aca2a0305ea67e/packages/fgl/Data/Graph/Inductive/Internal/FiniteMap.hs | haskell | | Simple Finite Maps.
does not.
* Type
* Operations
--------------------------------------------------------------------
--------------------------------------------------------------------
pretty printing
other
--------------------------------------------------------------------
MAIN FUNCTIONS
--------------------------------------------------------------------
| applies function to stored entry
| defines or aggregates entries
| applies lookup to an interval
| combines delFrom and lookup
i==j
--------------------------------------------------------------------
-------------------------------------------------------------------- | This implementation provides several useful methods that Data .
module Data.Graph.Inductive.Internal.FiniteMap(
FiniteMap(..),
emptyFM,addToFM,delFromFM,
updFM,
accumFM,
splitFM,
isEmptyFM,sizeFM,lookupFM,elemFM,
rangeFM,
minFM,maxFM,predFM,succFM,
splitMinFM,
fmToList
) where
import Data.Maybe (isJust)
data Ord a => FiniteMap a b =
Empty | Node Int (FiniteMap a b) (a,b) (FiniteMap a b)
deriving (Eq)
UTILITIES
showsMap :: (Show a,Show b,Ord a) => FiniteMap a b -> ShowS
showsMap Empty = id
showsMap (Node _ l (i,x) r) = showsMap l . (' ':) .
shows i . ("->"++) . shows x . showsMap r
instance (Show a,Show b,Ord a) => Show (FiniteMap a b) where
showsPrec _ m = showsMap m
splitMax :: Ord a => FiniteMap a b -> (FiniteMap a b,(a,b))
splitMax (Node _ l x Empty) = (l,x)
splitMax (Node _ l x r) = (avlBalance l x m,y) where (m,y) = splitMax r
splitMax Empty = error "splitMax on empty FiniteMap"
merge :: Ord a => FiniteMap a b -> FiniteMap a b -> FiniteMap a b
merge l Empty = l
merge Empty r = r
merge l r = avlBalance l' x r where (l',x) = splitMax l
emptyFM :: Ord a => FiniteMap a b
emptyFM = Empty
addToFM :: Ord a => FiniteMap a b -> a -> b -> FiniteMap a b
addToFM Empty i x = node Empty (i,x) Empty
addToFM (Node h l (j,y) r) i x
| i<j = avlBalance (addToFM l i x) (j,y) r
| i>j = avlBalance l (j,y) (addToFM r i x)
| otherwise = Node h l (j,x) r
updFM :: Ord a => FiniteMap a b -> a -> (b -> b) -> FiniteMap a b
updFM Empty _ _ = Empty
updFM (Node h l (j,x) r) i f
| i<j = let l' = updFM l i f in l' `seq` Node h l' (j,x) r
| i>j = let r' = updFM r i f in r' `seq` Node h l (j,x) r'
| otherwise = Node h l (j,f x) r
accumFM :: Ord a => FiniteMap a b -> a -> (b -> b -> b) -> b -> FiniteMap a b
accumFM Empty i _ x = node Empty (i,x) Empty
accumFM (Node h l (j,y) r) i f x
| i<j = avlBalance (accumFM l i f x) (j,y) r
| i>j = avlBalance l (j,y) (accumFM r i f x)
| otherwise = Node h l (j,f x y) r
delFromFM :: Ord a => FiniteMap a b -> a -> FiniteMap a b
delFromFM Empty _ = Empty
delFromFM (Node _ l (j,x) r) i
| i<j = avlBalance (delFromFM l i) (j,x) r
| i>j = avlBalance l (j,x) (delFromFM r i)
| otherwise = merge l r
isEmptyFM :: FiniteMap a b -> Bool
isEmptyFM Empty = True
isEmptyFM _ = False
sizeFM :: Ord a => FiniteMap a b -> Int
sizeFM Empty = 0
sizeFM (Node _ l _ r) = sizeFM l + 1 + sizeFM r
lookupFM :: Ord a => FiniteMap a b -> a -> Maybe b
lookupFM Empty _ = Nothing
lookupFM (Node _ l (j,x) r) i | i<j = lookupFM l i
| i>j = lookupFM r i
| otherwise = Just x
rangeFM :: Ord a => FiniteMap a b -> a -> a -> [b]
rangeFM m i j = rangeFMa m i j []
rangeFMa Empty _ _ a = a
rangeFMa (Node _ l (k,x) r) i j a
| k<i = rangeFMa r i j a
| k>j = rangeFMa l i j a
| otherwise = rangeFMa l i j (x:rangeFMa r i j a)
minFM :: Ord a => FiniteMap a b -> Maybe (a,b)
minFM Empty = Nothing
minFM (Node _ Empty x _) = Just x
minFM (Node _ l _ _) = minFM l
maxFM :: Ord a => FiniteMap a b -> Maybe (a,b)
maxFM Empty = Nothing
maxFM (Node _ _ x Empty) = Just x
maxFM (Node _ _ _ r) = maxFM r
predFM :: Ord a => FiniteMap a b -> a -> Maybe (a,b)
predFM m i = predFM' m i Nothing
predFM' Empty _ p = p
predFM' (Node _ l (j,x) r) i p | i<j = predFM' l i p
| i>j = predFM' r i (Just (j,x))
| isJust ml = ml
| otherwise = p
where ml = maxFM l
succFM :: Ord a => FiniteMap a b -> a -> Maybe (a,b)
succFM m i = succFM' m i Nothing
succFM' Empty _ p = p
succFM' (Node _ l (j,x) r) i p | i<j = succFM' l i (Just (j,x))
| i>j = succFM' r i p
| isJust mr = mr
| otherwise = p
where mr = minFM r
elemFM :: Ord a => FiniteMap a b -> a -> Bool
elemFM m i = case lookupFM m i of {Nothing -> False; _ -> True}
splitFM :: Ord a => FiniteMap a b -> a -> Maybe (FiniteMap a b,(a,b))
splitFM Empty _ = Nothing
splitFM (Node _ l (j,x) r) i =
if i<j then
case splitFM l i of
Just (l',y) -> Just (avlBalance l' (j,x) r,y)
Nothing -> Nothing else
if i>j then
case splitFM r i of
Just (r',y) -> Just (avlBalance l (j,x) r',y)
Nothing -> Nothing
| combines splitFM and
splitMinFM :: Ord a => FiniteMap a b -> Maybe (FiniteMap a b,(a,b))
splitMinFM Empty = Nothing
splitMinFM (Node _ Empty x r) = Just (r,x)
splitMinFM (Node _ l x r) = Just (avlBalance l' x r,y)
where Just (l',y) = splitMinFM l
fmToList :: Ord a => FiniteMap a b -> [(a,b)]
fmToList m = scan m []
where scan Empty xs = xs
scan (Node _ l x r) xs = scan l (x:(scan r xs))
AVL tree helper functions
height :: Ord a => FiniteMap a b -> Int
height Empty = 0
height (Node h _ _ _) = h
node :: Ord a => FiniteMap a b -> (a,b) -> FiniteMap a b -> FiniteMap a b
node l val r = Node h l val r
where h=1+(height l `max` height r)
avlBalance :: Ord a => FiniteMap a b -> (a,b) -> FiniteMap a b -> FiniteMap a b
avlBalance l (i,x) r
| (hr + 1 < hl) && (bias l < 0) = rotr (node (rotl l) (i,x) r)
| (hr + 1 < hl) = rotr (node l (i,x) r)
| (hl + 1 < hr) && (0 < bias r) = rotl (node l (i,x) (rotr r))
| (hl + 1 < hr) = rotl (node l (i,x) r)
| otherwise = node l (i,x) r
where hl=height l; hr=height r
bias :: Ord a => FiniteMap a b -> Int
bias (Node _ l _ r) = height l - height r
bias Empty = 0
rotr :: Ord a => FiniteMap a b -> FiniteMap a b
rotr Empty = Empty
rotr (Node _ (Node _ l1 v1 r1) v2 r2) = node l1 v1 (node r1 v2 r2)
rotr (Node _ Empty _ _) = error "rotr on invalid FiniteMap"
rotl :: Ord a => FiniteMap a b -> FiniteMap a b
rotl Empty = Empty
rotl (Node _ l1 v1 (Node _ l2 v2 r2)) = node (node l1 v1 l2) v2 r2
rotl (Node _ _ _ Empty) = error "rotl on invalid FiniteMap"
|
343845b9c4b8133532a35232809680b46ce542c53f299ec23433984c13f36cb6 | rufus-lang/rufus | rufus_parse_throw_test.erl | -module(rufus_parse_throw_test).
-include_lib("eunit/include/eunit.hrl").
parse_function_that_throws_an_atom_literal_test() ->
RufusText =
"func Explode() atom {\n"
" throw :kaboom\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{atom_lit, #{
line => 2,
spec => kaboom,
type => {type, #{line => 2, spec => atom}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_a_bool_literal_test() ->
RufusText =
"func Explode() atom {\n"
" throw true\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{bool_lit, #{
line => 2,
spec => true,
type => {type, #{line => 2, spec => bool}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_a_float_literal_test() ->
RufusText =
"func Explode() atom {\n"
" throw 42.0\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{float_lit, #{
line => 2,
spec => 42.0,
type => {type, #{line => 2, spec => float}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_an_int_literal_test() ->
RufusText =
"func Explode() atom {\n"
" throw 42\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{int_lit, #{
line => 2,
spec => 42,
type => {type, #{line => 2, spec => int}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_a_string_literal_test() ->
RufusText =
"func Explode() atom {\n"
" throw \"kaboom\"\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{string_lit, #{
line => 2,
spec => <<"kaboom">>,
type => {type, #{line => 2, spec => string}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_a_cons_expression_test() ->
RufusText =
"func Explode() atom {\n"
" throw list[int]{2|tail}\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{cons, #{
head =>
{int_lit, #{
line => 2,
spec => 2,
type => {type, #{line => 2, spec => int}}
}},
line => 2,
tail => {identifier, #{line => 2, spec => tail}},
type =>
{type, #{
element_type =>
{type, #{line => 2, spec => int}},
kind => list,
line => 2,
spec => 'list[int]'
}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_an_identifier_test() ->
RufusText =
"func Explode() atom {\n"
" throw kaboom\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr => {identifier, #{line => 2, spec => kaboom}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_a_match_op_expression_test() ->
RufusText =
"func Explode() atom {\n"
" throw a = b\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{match_op, #{
left => {identifier, #{line => 2, spec => a}},
line => 2,
right => {identifier, #{line => 2, spec => b}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
%% Failure mode tests
parse_function_with_throw_used_in_another_expression_test() ->
RufusText =
"func Explode() atom {\n"
" a = throw :kaboom\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
?assertMatch({error, _}, rufus_parse:parse(Tokens)).
| null | https://raw.githubusercontent.com/rufus-lang/rufus/fb3ec8f67c06e8d656c6aa6c4297461559ee168f/rf/test/rufus_parse_throw_test.erl | erlang | Failure mode tests | -module(rufus_parse_throw_test).
-include_lib("eunit/include/eunit.hrl").
parse_function_that_throws_an_atom_literal_test() ->
RufusText =
"func Explode() atom {\n"
" throw :kaboom\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{atom_lit, #{
line => 2,
spec => kaboom,
type => {type, #{line => 2, spec => atom}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_a_bool_literal_test() ->
RufusText =
"func Explode() atom {\n"
" throw true\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{bool_lit, #{
line => 2,
spec => true,
type => {type, #{line => 2, spec => bool}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_a_float_literal_test() ->
RufusText =
"func Explode() atom {\n"
" throw 42.0\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{float_lit, #{
line => 2,
spec => 42.0,
type => {type, #{line => 2, spec => float}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_an_int_literal_test() ->
RufusText =
"func Explode() atom {\n"
" throw 42\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{int_lit, #{
line => 2,
spec => 42,
type => {type, #{line => 2, spec => int}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_a_string_literal_test() ->
RufusText =
"func Explode() atom {\n"
" throw \"kaboom\"\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{string_lit, #{
line => 2,
spec => <<"kaboom">>,
type => {type, #{line => 2, spec => string}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_a_cons_expression_test() ->
RufusText =
"func Explode() atom {\n"
" throw list[int]{2|tail}\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{cons, #{
head =>
{int_lit, #{
line => 2,
spec => 2,
type => {type, #{line => 2, spec => int}}
}},
line => 2,
tail => {identifier, #{line => 2, spec => tail}},
type =>
{type, #{
element_type =>
{type, #{line => 2, spec => int}},
kind => list,
line => 2,
spec => 'list[int]'
}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_an_identifier_test() ->
RufusText =
"func Explode() atom {\n"
" throw kaboom\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr => {identifier, #{line => 2, spec => kaboom}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_that_throws_a_match_op_expression_test() ->
RufusText =
"func Explode() atom {\n"
" throw a = b\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
{ok, Forms} = rufus_parse:parse(Tokens),
Expected = [
{func, #{
exprs => [
{throw, #{
expr =>
{match_op, #{
left => {identifier, #{line => 2, spec => a}},
line => 2,
right => {identifier, #{line => 2, spec => b}}
}},
line => 2
}}
],
line => 1,
params => [],
return_type => {type, #{line => 1, spec => atom}},
spec => 'Explode'
}}
],
?assertEqual(Expected, Forms).
parse_function_with_throw_used_in_another_expression_test() ->
RufusText =
"func Explode() atom {\n"
" a = throw :kaboom\n"
"}\n",
{ok, Tokens} = rufus_tokenize:string(RufusText),
?assertMatch({error, _}, rufus_parse:parse(Tokens)).
|
9f8a9772b6fc28e64f8a9ddd705107697d1d3b249528f7ac37eb9189d1ffd96c | swaywm/chicken-wlroots | wlr-matrix.scm | Copyright 2019
;;
;; Permission is hereby granted, free of charge, to any person obtaining a copy
;; of this software and associated documentation files (the "Software"), to
deal in the Software without restriction , including without limitation the
;; rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software , and to permit persons to whom the Software is
;; furnished to do so, subject to the following conditions:
;;
;; The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
;; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
;; IN THE SOFTWARE.
(foreign-declare "#include <wlr/types/wlr_matrix.h>")
(include "wlroots-types.scm")
(module (wlr types wlr-matrix)
(make-wlr-matrix
wlr-matrix-ref
wlr-matrix-set
wlr-matrix-identity
wlr-matrix-transpose
wlr-matrix-translate
wlr-matrix-scale
wlr-matrix-rotate
wlr-matrix-transform
wlr-matrix-projection
wlr-matrix-project-box)
(import (scheme)
(chicken base)
(chicken gc)
(chicken memory))
(include "ffi-helpers.scm")
(bind-file "include/bind/wlr/types/wlr_matrix.h")
(define-foreign-type wlr-matrix (c-pointer float))
(define (make-wlr-matrix)
; allocate a float[9]
XXX : assumes 32 - bit float
(let ((mem (allocate (* 9 4))))
(set-finalizer! mem free)
mem))
(define wlr-matrix-ref
(foreign-lambda* float ((wlr-matrix matrix) (int i))
"C_return(matrix[i]);"))
(define wlr-matrix-set
(foreign-lambda* void ((wlr-matrix matrix) (int i) (float v))
"matrix[i] = v;")))
| null | https://raw.githubusercontent.com/swaywm/chicken-wlroots/649f200126102b1247ba638eb797d14b46bafc0b/src/wlr/types/wlr-matrix.scm | scheme |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
allocate a float[9] | Copyright 2019
deal in the Software without restriction , including without limitation the
sell copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(foreign-declare "#include <wlr/types/wlr_matrix.h>")
(include "wlroots-types.scm")
(module (wlr types wlr-matrix)
(make-wlr-matrix
wlr-matrix-ref
wlr-matrix-set
wlr-matrix-identity
wlr-matrix-transpose
wlr-matrix-translate
wlr-matrix-scale
wlr-matrix-rotate
wlr-matrix-transform
wlr-matrix-projection
wlr-matrix-project-box)
(import (scheme)
(chicken base)
(chicken gc)
(chicken memory))
(include "ffi-helpers.scm")
(bind-file "include/bind/wlr/types/wlr_matrix.h")
(define-foreign-type wlr-matrix (c-pointer float))
(define (make-wlr-matrix)
XXX : assumes 32 - bit float
(let ((mem (allocate (* 9 4))))
(set-finalizer! mem free)
mem))
(define wlr-matrix-ref
(foreign-lambda* float ((wlr-matrix matrix) (int i))
"C_return(matrix[i]);"))
(define wlr-matrix-set
(foreign-lambda* void ((wlr-matrix matrix) (int i) (float v))
"matrix[i] = v;")))
|
7b0230704f386bc1530a49d1a718cbdde73e8cb4639bf0e73267cc057687fb4a | elaforge/karya | DiffPerformance.hs | Copyright 2013
-- This program is distributed under the terms of the GNU General Public
-- License 3.0, see COPYING or -3.0.txt
| Functions to compare a performance against a previous \"known good\ " one .
-- This is used to detect when code changes cause a performance to change.
module Cmd.DiffPerformance (
-- * save and load
load_midi, save_midi, midi_magic
-- * diff
, diff_lilypond
, diff_im
, diff_midi
-- * util
, show_midi
, diff_lines
) where
import qualified Control.Exception as Exception
import qualified Data.List as List
import qualified Data.Text as Text
import qualified Data.Vector as Vector
import qualified System.Directory as Directory
import System.FilePath ((</>))
import qualified System.IO.Error as IO.Error
import qualified System.Process as Process
import qualified Util.File as File
import qualified Util.Seq as Seq
import qualified Util.Serialize as Serialize
import qualified Midi.Encode as Encode
import Midi.Instances ()
import qualified Midi.Midi as Midi
import qualified Perform.RealTime as RealTime
import qualified Synth.Shared.Note as Shared.Note
import qualified Ui.UiConfig as UiConfig
import Global
type Messages = Vector.Vector Midi.WriteMessage
-- * save and load
load_midi :: FilePath -> IO (Either Text Messages)
load_midi fname =
first ((("loading " <> showt fname <> ": ") <>) . pretty) <$>
Serialize.unserialize midi_magic fname
-- | Perform the input score and save the midi msgs to the output file.
-- This creates the -perf files.
save_midi :: FilePath -> Messages -> IO ()
save_midi fn = void . Serialize.serialize midi_magic fn
-- | Saved MIDI performance.
midi_magic :: Serialize.Magic (Vector.Vector Midi.WriteMessage)
midi_magic = Serialize.Magic 'm' 'i' 'd' 'i'
-- * diff
diff_lilypond :: String -> FilePath -> UiConfig.LilypondPerformance -> Text
-> IO (Maybe Text, [FilePath])
diff_lilypond = diff_performance Text.lines
diff_im :: String -> FilePath -> UiConfig.ImPerformance -> [Shared.Note.Note]
-> IO (Maybe Text, [FilePath])
diff_im name dir performance =
diff_performance show_im name dir (Vector.toList <$> performance)
where show_im = map pretty
diff_midi :: String -> FilePath -> UiConfig.MidiPerformance
-> [Midi.WriteMessage] -> IO (Maybe Text, [FilePath])
diff_midi name dir performance =
diff_performance show_midi name dir (Vector.toList <$> performance)
diff_performance :: (events -> [Text]) -> String -> FilePath
-> UiConfig.Performance events -> events -> IO (Maybe Text, [FilePath])
diff_performance show_events name dir performance events =
first (fmap (info<>)) <$> diff_lines name dir
(show_events (UiConfig.perf_events performance))
(show_events events)
where
info = Text.unlines
[ "Diffs from " <> pretty (UiConfig.perf_creation performance)
, "Commit: " <> UiConfig.perf_commit performance
]
-- | Write files in the given directory and run the @diff@ command on them.
diff_lines :: String -> FilePath -> [Text] -> [Text]
-> IO (Maybe Text, [FilePath])
-- ^ (abbreviated_diff, wrote_files)
diff_lines name dir expected got = do
Directory.createDirectoryIfMissing True dir
File.writeLines expected_fn expected
File.writeLines got_fn got
(_code, diff, stderr) <- Process.readProcessWithExitCode
"diff" [expected_fn, got_fn] ""
unless (null stderr) $
Exception.throwIO $ IO.Error.userError $ "diff failed: " ++ stderr
let abbreviated
| null diff = Nothing
| otherwise = Just $ show_diffs (txt diff)
return (abbreviated, [expected_fn, got_fn])
where
expected_fn = dir </> name ++ ".expected"
got_fn = dir </> name ++ ".got"
show_diffs :: Text -> Text
show_diffs diff = Text.unlines (limit 50 (Text.lines diff))
limit :: Int -> [Text] -> [Text]
limit n xs = pre ++ if null post then [] else [msg]
where
msg = "... trimmed (" <> showt (length xs) <> " lines)"
(pre, post) = splitAt n xs
show_midi :: [Midi.WriteMessage] -> [Text]
show_midi = map pretty . normalize
-- | To better approximate audible differences, I strip excessive time
-- precision and ensure notes happening at the same time are in a consistent
-- order.
normalize :: [Midi.WriteMessage] -> [Midi.WriteMessage]
normalize = concatMap List.sort . Seq.group_adjacent Midi.wmsg_ts . map strip
where
strip wmsg = wmsg
{ Midi.wmsg_ts = strip_time (Midi.wmsg_ts wmsg)
, Midi.wmsg_msg = strip_msg (Midi.wmsg_msg wmsg)
}
-- It'll be rounded again by the pretty instance, since I actually diff
-- pretty output, so this is likely unnecessary.
strip_time = RealTime.seconds . round_to 3 . RealTime.to_seconds
PitchBends are serialized as 14 - bit numbers , so when they get
-- deserialized they change.
strip_msg = Encode.decode . Encode.encode
round_to :: RealFrac d => Int -> d -> d
round_to n = (/ 10^n) . fromIntegral . round . (* 10^n)
| null | https://raw.githubusercontent.com/elaforge/karya/471a2131f5a68b3b10b1a138e6f9ed1282980a18/Cmd/DiffPerformance.hs | haskell | This program is distributed under the terms of the GNU General Public
License 3.0, see COPYING or -3.0.txt
This is used to detect when code changes cause a performance to change.
* save and load
* diff
* util
* save and load
| Perform the input score and save the midi msgs to the output file.
This creates the -perf files.
| Saved MIDI performance.
* diff
| Write files in the given directory and run the @diff@ command on them.
^ (abbreviated_diff, wrote_files)
| To better approximate audible differences, I strip excessive time
precision and ensure notes happening at the same time are in a consistent
order.
It'll be rounded again by the pretty instance, since I actually diff
pretty output, so this is likely unnecessary.
deserialized they change. | Copyright 2013
| Functions to compare a performance against a previous \"known good\ " one .
module Cmd.DiffPerformance (
load_midi, save_midi, midi_magic
, diff_lilypond
, diff_im
, diff_midi
, show_midi
, diff_lines
) where
import qualified Control.Exception as Exception
import qualified Data.List as List
import qualified Data.Text as Text
import qualified Data.Vector as Vector
import qualified System.Directory as Directory
import System.FilePath ((</>))
import qualified System.IO.Error as IO.Error
import qualified System.Process as Process
import qualified Util.File as File
import qualified Util.Seq as Seq
import qualified Util.Serialize as Serialize
import qualified Midi.Encode as Encode
import Midi.Instances ()
import qualified Midi.Midi as Midi
import qualified Perform.RealTime as RealTime
import qualified Synth.Shared.Note as Shared.Note
import qualified Ui.UiConfig as UiConfig
import Global
type Messages = Vector.Vector Midi.WriteMessage
load_midi :: FilePath -> IO (Either Text Messages)
load_midi fname =
first ((("loading " <> showt fname <> ": ") <>) . pretty) <$>
Serialize.unserialize midi_magic fname
save_midi :: FilePath -> Messages -> IO ()
save_midi fn = void . Serialize.serialize midi_magic fn
midi_magic :: Serialize.Magic (Vector.Vector Midi.WriteMessage)
midi_magic = Serialize.Magic 'm' 'i' 'd' 'i'
diff_lilypond :: String -> FilePath -> UiConfig.LilypondPerformance -> Text
-> IO (Maybe Text, [FilePath])
diff_lilypond = diff_performance Text.lines
diff_im :: String -> FilePath -> UiConfig.ImPerformance -> [Shared.Note.Note]
-> IO (Maybe Text, [FilePath])
diff_im name dir performance =
diff_performance show_im name dir (Vector.toList <$> performance)
where show_im = map pretty
diff_midi :: String -> FilePath -> UiConfig.MidiPerformance
-> [Midi.WriteMessage] -> IO (Maybe Text, [FilePath])
diff_midi name dir performance =
diff_performance show_midi name dir (Vector.toList <$> performance)
diff_performance :: (events -> [Text]) -> String -> FilePath
-> UiConfig.Performance events -> events -> IO (Maybe Text, [FilePath])
diff_performance show_events name dir performance events =
first (fmap (info<>)) <$> diff_lines name dir
(show_events (UiConfig.perf_events performance))
(show_events events)
where
info = Text.unlines
[ "Diffs from " <> pretty (UiConfig.perf_creation performance)
, "Commit: " <> UiConfig.perf_commit performance
]
diff_lines :: String -> FilePath -> [Text] -> [Text]
-> IO (Maybe Text, [FilePath])
diff_lines name dir expected got = do
Directory.createDirectoryIfMissing True dir
File.writeLines expected_fn expected
File.writeLines got_fn got
(_code, diff, stderr) <- Process.readProcessWithExitCode
"diff" [expected_fn, got_fn] ""
unless (null stderr) $
Exception.throwIO $ IO.Error.userError $ "diff failed: " ++ stderr
let abbreviated
| null diff = Nothing
| otherwise = Just $ show_diffs (txt diff)
return (abbreviated, [expected_fn, got_fn])
where
expected_fn = dir </> name ++ ".expected"
got_fn = dir </> name ++ ".got"
show_diffs :: Text -> Text
show_diffs diff = Text.unlines (limit 50 (Text.lines diff))
limit :: Int -> [Text] -> [Text]
limit n xs = pre ++ if null post then [] else [msg]
where
msg = "... trimmed (" <> showt (length xs) <> " lines)"
(pre, post) = splitAt n xs
show_midi :: [Midi.WriteMessage] -> [Text]
show_midi = map pretty . normalize
normalize :: [Midi.WriteMessage] -> [Midi.WriteMessage]
normalize = concatMap List.sort . Seq.group_adjacent Midi.wmsg_ts . map strip
where
strip wmsg = wmsg
{ Midi.wmsg_ts = strip_time (Midi.wmsg_ts wmsg)
, Midi.wmsg_msg = strip_msg (Midi.wmsg_msg wmsg)
}
strip_time = RealTime.seconds . round_to 3 . RealTime.to_seconds
PitchBends are serialized as 14 - bit numbers , so when they get
strip_msg = Encode.decode . Encode.encode
round_to :: RealFrac d => Int -> d -> d
round_to n = (/ 10^n) . fromIntegral . round . (* 10^n)
|
289d966115f9d03dabb6a3008f89e32259d76f4a4d0e8b6d0088615090b9cd05 | janestreet/universe | example.ml | open Core
(* Examples from ../README.md *)
let _basic_use =
[%sexp { a = "hello" ; b = (Time.now () : Time.t) } ]
let _expressions_with_their_evaluations ~x ~y ~z =
[%sexp ~~(x : int), (y + z : int), "literal"]
let _recomended_use_for_errors ~tmpfile ~dst =
try Unix.rename ~src:tmpfile ~dst
with exn ->
raise_s
[%sexp "Error while renaming file",
{ source = (tmpfile : string)
; dest = (dst : string)
; exn = (exn : exn )
}]
| null | https://raw.githubusercontent.com/janestreet/universe/b6cb56fdae83f5d55f9c809f1c2a2b50ea213126/ppx_sexp_value/example/example.ml | ocaml | Examples from ../README.md | open Core
let _basic_use =
[%sexp { a = "hello" ; b = (Time.now () : Time.t) } ]
let _expressions_with_their_evaluations ~x ~y ~z =
[%sexp ~~(x : int), (y + z : int), "literal"]
let _recomended_use_for_errors ~tmpfile ~dst =
try Unix.rename ~src:tmpfile ~dst
with exn ->
raise_s
[%sexp "Error while renaming file",
{ source = (tmpfile : string)
; dest = (dst : string)
; exn = (exn : exn )
}]
|
c3c68190fa0153511216eedda3418113f6e28f62d662dd5a808328ce7322c6f5 | MinaProtocol/mina | snark_util.ml | open Core_kernel
module Make (Impl : Snarky_backendless.Snark_intf.S) = struct
open Impl
open Let_syntax
let pack_int bs =
assert (List.length bs < 62) ;
let rec go pt acc = function
| [] ->
acc
| b :: bs ->
if b then go (2 * pt) (pt + acc) bs else go (2 * pt) acc bs
in
go 1 0 bs
let boolean_assert_lte (x : Boolean.var) (y : Boolean.var) =
x < = y
y = = 1 or x = 0
( y - 1 ) * x = 0
x <= y
y == 1 or x = 0
(y - 1) * x = 0
*)
assert_r1cs
Field.Var.(sub (y :> Field.Var.t) (constant Field.one))
(x :> Field.Var.t)
(Field.Var.constant Field.zero)
let assert_decreasing : Boolean.var list -> unit Checked.t =
let rec go prev (bs0 : Boolean.var list) =
match bs0 with
| [] ->
return ()
| b :: bs ->
let%bind () = boolean_assert_lte b prev in
go b bs
in
function [] -> return () | b :: bs -> go b bs
let nth_bit x ~n = (x lsr n) land 1 = 1
let apply_mask mask bs = Checked.all (List.map2_exn mask bs ~f:Boolean.( && ))
let pack_unsafe (bs0 : Boolean.var list) =
let n = List.length bs0 in
assert (n <= Field.size_in_bits) ;
let rec go acc two_to_the_i = function
| b :: bs ->
go
(Field.Var.add acc (Field.Var.scale b two_to_the_i))
(Field.add two_to_the_i two_to_the_i)
bs
| [] ->
acc
in
go (Field.Var.constant Field.zero) Field.one (bs0 :> Field.Var.t list)
type _ Snarky_backendless.Request.t +=
| N_ones : bool list Snarky_backendless.Request.t
let n_ones ~total_length n =
let%bind bs =
exists
(Typ.list ~length:total_length Boolean.typ)
~request:(As_prover.return N_ones)
~compute:
As_prover.(
map (read_var n) ~f:(fun n ->
List.init total_length ~f:(fun i ->
Bigint.(
compare (of_field (Field.of_int i)) (of_field n) < 0) ) ))
in
let%map () =
Field.Checked.Assert.equal
(Field.Var.sum (bs :> Field.Var.t list))
(* This can't overflow since the field is huge *)
n
and () = assert_decreasing bs in
bs
let assert_num_bits_upper_bound bs u =
let total_length = List.length bs in
assert (total_length < Field.size_in_bits) ;
let%bind mask = n_ones ~total_length u in
let%bind masked = apply_mask mask bs in
with_label __LOC__ (fun () ->
Field.Checked.Assert.equal (pack_unsafe masked) (pack_unsafe bs) )
let num_bits_int =
let rec go acc n = if n = 0 then acc else go (1 + acc) (n lsr 1) in
go 0
let size_in_bits_size_in_bits = num_bits_int Field.size_in_bits
type _ Snarky_backendless.Request.t +=
| Num_bits_upper_bound : Field.t Snarky_backendless.Request.t
let num_bits_upper_bound_unchecked x =
let num_bits =
match
List.find_mapi
(List.rev (Field.unpack x))
~f:(fun i x -> if x then Some i else None)
with
| Some leading_zeroes ->
Field.size_in_bits - leading_zeroes
| None ->
0
in
num_bits
(* Someday: this could definitely be made more efficient *)
let num_bits_upper_bound_unpacked : Boolean.var list -> Field.Var.t Checked.t
=
fun x_unpacked ->
let%bind res =
exists Typ.field
~request:(As_prover.return Num_bits_upper_bound)
~compute:
As_prover.(
map
(read_var (Field.Var.project x_unpacked))
~f:(fun x -> Field.of_int (num_bits_upper_bound_unchecked x)))
in
let%map () = assert_num_bits_upper_bound x_unpacked res in
res
let num_bits_upper_bound ~max_length (x : Field.Var.t) : Field.Var.t Checked.t
=
Field.Checked.unpack x ~length:max_length >>= num_bits_upper_bound_unpacked
let%test_module "Snark_util" =
( module struct
let () = Random.init 123456789
let random_bitstring length =
List.init length ~f:(fun _ -> Random.bool ())
let random_n_bit_field_elt n = Field.project (random_bitstring n)
let%test_unit "compare" =
let bit_length = Field.size_in_bits - 2 in
let random () = random_n_bit_field_elt bit_length in
let test () =
let x = random () in
let y = random () in
let less, less_or_equal =
run_and_check
(let%map { less; less_or_equal } =
Field.Checked.compare ~bit_length (Field.Var.constant x)
(Field.Var.constant y)
in
As_prover.(
map2 (read Boolean.typ less)
(read Boolean.typ less_or_equal)
~f:Tuple2.create) )
|> Or_error.ok_exn
in
let r = Bigint.(compare (of_field x) (of_field y)) in
assert (Bool.equal less (r < 0)) ;
assert (Bool.equal less_or_equal (r <= 0))
in
for _i = 0 to 100 do
test ()
done
let%test_unit "boolean_assert_lte" =
Or_error.ok_exn
(check
(Checked.all_unit
[ boolean_assert_lte Boolean.false_ Boolean.false_
; boolean_assert_lte Boolean.false_ Boolean.true_
; boolean_assert_lte Boolean.true_ Boolean.true_
] ) ) ;
assert (
Or_error.is_error
(check (boolean_assert_lte Boolean.true_ Boolean.false_)) )
let%test_unit "assert_decreasing" =
let decreasing bs =
check (assert_decreasing (List.map ~f:Boolean.var_of_value bs))
in
Or_error.ok_exn (decreasing [ true; true; true; false ]) ;
Or_error.ok_exn (decreasing [ true; true; false; false ]) ;
assert (Or_error.is_error (decreasing [ true; true; false; true ]))
let%test_unit "n_ones" =
let total_length = 6 in
let test n =
let t () =
n_ones ~total_length (Field.Var.constant (Field.of_int n))
in
let handle_with (resp : bool list) =
handle t (fun (With { request; respond }) ->
match request with
| N_ones ->
respond (Provide resp)
| _ ->
unhandled )
in
let correct = Int.pow 2 n - 1 in
let to_bits k =
List.init total_length ~f:(fun i -> (k lsr i) land 1 = 1)
in
for i = 0 to Int.pow 2 total_length - 1 do
if i = correct then
Or_error.ok_exn (check (handle_with (to_bits i)))
else assert (Or_error.is_error (check (handle_with (to_bits i))))
done
in
for n = 0 to total_length do
test n
done
let%test_unit "num_bits_int" =
assert (num_bits_int 1 = 1) ;
assert (num_bits_int 5 = 3) ;
assert (num_bits_int 17 = 5)
let%test_unit "num_bits_upper_bound_unchecked" =
let f k bs =
assert (num_bits_upper_bound_unchecked (Field.project bs) = k)
in
f 3 [ true; true; true; false; false ] ;
f 4 [ true; true; true; true; false ] ;
f 3 [ true; false; true; false; false ] ;
f 5 [ true; false; true; false; true ]
let%test_unit " num_bits_upper_bound " =
let = Field.size_in_bits - 1 in
let test x =
let handle_with resp =
handle
( num_bits_upper_bound ( Field.Var.constant x ) )
( fun ( With { request ; respond } ) - >
match request with
| Num_bits_upper_bound - > respond ( Field.of_int resp )
| _ - > unhandled )
in
let true_answer = num_bits_upper_bound_unchecked x in
for i = 0 to true_answer - 1 do
if check ( handle_with i ) ( )
then begin
let n = Bigint.of_field x in
failwithf ! " Should n't have passed : x=%s , i=%d "
( String.init > if Bigint.test_bit n j then ' 1 ' else ' 0 ' ) )
i ( ) ;
end ;
done ;
assert ( check ( handle_with true_answer ) ( ) )
in
test ( random_n_bit_field_elt )
let max_length = Field.size_in_bits - 1 in
let test x =
let handle_with resp =
handle
(num_bits_upper_bound ~max_length (Field.Var.constant x))
(fun (With {request; respond}) ->
match request with
| Num_bits_upper_bound -> respond (Field.of_int resp)
| _ -> unhandled)
in
let true_answer = num_bits_upper_bound_unchecked x in
for i = 0 to true_answer - 1 do
if check (handle_with i) ()
then begin
let n = Bigint.of_field x in
failwithf !"Shouldn't have passed: x=%s, i=%d"
(String.init max_length ~f:(fun j -> if Bigint.test_bit n j then '1' else '0'))
i ();
end;
done;
assert (check (handle_with true_answer) ())
in
test (random_n_bit_field_elt max_length)*)
end )
end
| null | https://raw.githubusercontent.com/MinaProtocol/mina/774ee06e0aa9472f9eb8f71f346c13b7e283af4b/src/lib/snark_params/snark_util.ml | ocaml | This can't overflow since the field is huge
Someday: this could definitely be made more efficient | open Core_kernel
module Make (Impl : Snarky_backendless.Snark_intf.S) = struct
open Impl
open Let_syntax
let pack_int bs =
assert (List.length bs < 62) ;
let rec go pt acc = function
| [] ->
acc
| b :: bs ->
if b then go (2 * pt) (pt + acc) bs else go (2 * pt) acc bs
in
go 1 0 bs
let boolean_assert_lte (x : Boolean.var) (y : Boolean.var) =
x < = y
y = = 1 or x = 0
( y - 1 ) * x = 0
x <= y
y == 1 or x = 0
(y - 1) * x = 0
*)
assert_r1cs
Field.Var.(sub (y :> Field.Var.t) (constant Field.one))
(x :> Field.Var.t)
(Field.Var.constant Field.zero)
let assert_decreasing : Boolean.var list -> unit Checked.t =
let rec go prev (bs0 : Boolean.var list) =
match bs0 with
| [] ->
return ()
| b :: bs ->
let%bind () = boolean_assert_lte b prev in
go b bs
in
function [] -> return () | b :: bs -> go b bs
let nth_bit x ~n = (x lsr n) land 1 = 1
let apply_mask mask bs = Checked.all (List.map2_exn mask bs ~f:Boolean.( && ))
let pack_unsafe (bs0 : Boolean.var list) =
let n = List.length bs0 in
assert (n <= Field.size_in_bits) ;
let rec go acc two_to_the_i = function
| b :: bs ->
go
(Field.Var.add acc (Field.Var.scale b two_to_the_i))
(Field.add two_to_the_i two_to_the_i)
bs
| [] ->
acc
in
go (Field.Var.constant Field.zero) Field.one (bs0 :> Field.Var.t list)
type _ Snarky_backendless.Request.t +=
| N_ones : bool list Snarky_backendless.Request.t
let n_ones ~total_length n =
let%bind bs =
exists
(Typ.list ~length:total_length Boolean.typ)
~request:(As_prover.return N_ones)
~compute:
As_prover.(
map (read_var n) ~f:(fun n ->
List.init total_length ~f:(fun i ->
Bigint.(
compare (of_field (Field.of_int i)) (of_field n) < 0) ) ))
in
let%map () =
Field.Checked.Assert.equal
(Field.Var.sum (bs :> Field.Var.t list))
n
and () = assert_decreasing bs in
bs
let assert_num_bits_upper_bound bs u =
let total_length = List.length bs in
assert (total_length < Field.size_in_bits) ;
let%bind mask = n_ones ~total_length u in
let%bind masked = apply_mask mask bs in
with_label __LOC__ (fun () ->
Field.Checked.Assert.equal (pack_unsafe masked) (pack_unsafe bs) )
let num_bits_int =
let rec go acc n = if n = 0 then acc else go (1 + acc) (n lsr 1) in
go 0
let size_in_bits_size_in_bits = num_bits_int Field.size_in_bits
type _ Snarky_backendless.Request.t +=
| Num_bits_upper_bound : Field.t Snarky_backendless.Request.t
let num_bits_upper_bound_unchecked x =
let num_bits =
match
List.find_mapi
(List.rev (Field.unpack x))
~f:(fun i x -> if x then Some i else None)
with
| Some leading_zeroes ->
Field.size_in_bits - leading_zeroes
| None ->
0
in
num_bits
let num_bits_upper_bound_unpacked : Boolean.var list -> Field.Var.t Checked.t
=
fun x_unpacked ->
let%bind res =
exists Typ.field
~request:(As_prover.return Num_bits_upper_bound)
~compute:
As_prover.(
map
(read_var (Field.Var.project x_unpacked))
~f:(fun x -> Field.of_int (num_bits_upper_bound_unchecked x)))
in
let%map () = assert_num_bits_upper_bound x_unpacked res in
res
let num_bits_upper_bound ~max_length (x : Field.Var.t) : Field.Var.t Checked.t
=
Field.Checked.unpack x ~length:max_length >>= num_bits_upper_bound_unpacked
let%test_module "Snark_util" =
( module struct
let () = Random.init 123456789
let random_bitstring length =
List.init length ~f:(fun _ -> Random.bool ())
let random_n_bit_field_elt n = Field.project (random_bitstring n)
let%test_unit "compare" =
let bit_length = Field.size_in_bits - 2 in
let random () = random_n_bit_field_elt bit_length in
let test () =
let x = random () in
let y = random () in
let less, less_or_equal =
run_and_check
(let%map { less; less_or_equal } =
Field.Checked.compare ~bit_length (Field.Var.constant x)
(Field.Var.constant y)
in
As_prover.(
map2 (read Boolean.typ less)
(read Boolean.typ less_or_equal)
~f:Tuple2.create) )
|> Or_error.ok_exn
in
let r = Bigint.(compare (of_field x) (of_field y)) in
assert (Bool.equal less (r < 0)) ;
assert (Bool.equal less_or_equal (r <= 0))
in
for _i = 0 to 100 do
test ()
done
let%test_unit "boolean_assert_lte" =
Or_error.ok_exn
(check
(Checked.all_unit
[ boolean_assert_lte Boolean.false_ Boolean.false_
; boolean_assert_lte Boolean.false_ Boolean.true_
; boolean_assert_lte Boolean.true_ Boolean.true_
] ) ) ;
assert (
Or_error.is_error
(check (boolean_assert_lte Boolean.true_ Boolean.false_)) )
let%test_unit "assert_decreasing" =
let decreasing bs =
check (assert_decreasing (List.map ~f:Boolean.var_of_value bs))
in
Or_error.ok_exn (decreasing [ true; true; true; false ]) ;
Or_error.ok_exn (decreasing [ true; true; false; false ]) ;
assert (Or_error.is_error (decreasing [ true; true; false; true ]))
let%test_unit "n_ones" =
let total_length = 6 in
let test n =
let t () =
n_ones ~total_length (Field.Var.constant (Field.of_int n))
in
let handle_with (resp : bool list) =
handle t (fun (With { request; respond }) ->
match request with
| N_ones ->
respond (Provide resp)
| _ ->
unhandled )
in
let correct = Int.pow 2 n - 1 in
let to_bits k =
List.init total_length ~f:(fun i -> (k lsr i) land 1 = 1)
in
for i = 0 to Int.pow 2 total_length - 1 do
if i = correct then
Or_error.ok_exn (check (handle_with (to_bits i)))
else assert (Or_error.is_error (check (handle_with (to_bits i))))
done
in
for n = 0 to total_length do
test n
done
let%test_unit "num_bits_int" =
assert (num_bits_int 1 = 1) ;
assert (num_bits_int 5 = 3) ;
assert (num_bits_int 17 = 5)
let%test_unit "num_bits_upper_bound_unchecked" =
let f k bs =
assert (num_bits_upper_bound_unchecked (Field.project bs) = k)
in
f 3 [ true; true; true; false; false ] ;
f 4 [ true; true; true; true; false ] ;
f 3 [ true; false; true; false; false ] ;
f 5 [ true; false; true; false; true ]
let%test_unit " num_bits_upper_bound " =
let = Field.size_in_bits - 1 in
let test x =
let handle_with resp =
handle
( num_bits_upper_bound ( Field.Var.constant x ) )
( fun ( With { request ; respond } ) - >
match request with
| Num_bits_upper_bound - > respond ( Field.of_int resp )
| _ - > unhandled )
in
let true_answer = num_bits_upper_bound_unchecked x in
for i = 0 to true_answer - 1 do
if check ( handle_with i ) ( )
then begin
let n = Bigint.of_field x in
failwithf ! " Should n't have passed : x=%s , i=%d "
( String.init > if Bigint.test_bit n j then ' 1 ' else ' 0 ' ) )
i ( ) ;
end ;
done ;
assert ( check ( handle_with true_answer ) ( ) )
in
test ( random_n_bit_field_elt )
let max_length = Field.size_in_bits - 1 in
let test x =
let handle_with resp =
handle
(num_bits_upper_bound ~max_length (Field.Var.constant x))
(fun (With {request; respond}) ->
match request with
| Num_bits_upper_bound -> respond (Field.of_int resp)
| _ -> unhandled)
in
let true_answer = num_bits_upper_bound_unchecked x in
for i = 0 to true_answer - 1 do
if check (handle_with i) ()
then begin
let n = Bigint.of_field x in
failwithf !"Shouldn't have passed: x=%s, i=%d"
(String.init max_length ~f:(fun j -> if Bigint.test_bit n j then '1' else '0'))
i ();
end;
done;
assert (check (handle_with true_answer) ())
in
test (random_n_bit_field_elt max_length)*)
end )
end
|
b2ac18682da85c488701038dc2f5862030bcb9d587b3a439391a1a7c800b8a5e | gilith/hol-light | minisat_resolve.ml | open satCommonTools
functions for replaying minisat proof LCF - style .
Called from minisatProve.ml after proof log has
been parsed .
Called from minisatProve.ml after proof log has
been parsed. *)
(* p is a literal *)
let toVar p =
if is_neg p
then rand p
else p;;
let (NOT_NOT_ELIM,NOT_NOT_CONV) =
let t = mk_var("t",bool_ty) in
let NOT_NOT2 = SPEC_ALL NOT_NOT in
((fun th -> EQ_MP (INST [rand(rand(concl th)),t] NOT_NOT2) th),
(fun tm -> INST [rand(rand tm),t] NOT_NOT2));;
let l2hh = function
h0::h1::t -> (h0,h1,t)
| _ -> failwith("Match failure in l2hh");;
+1 because minisat var numbers start at 0 , dimacsTools at 1
let mk_sat_var lfn n =
let rv = lookup_sat_num (n+1) in
tryapplyd lfn rv rv;;
let get_var_num lfn v = lookup_sat_var v - 1;;
(* mcth maps clause term t to thm of the form cnf |- t, *)
(* where t is a clause of the cnf term *)
let dualise =
let pth_and = TAUT `F \/ F <=> F` and pth_not = TAUT `~T <=> F` in
let rec REFUTE_DISJ tm =
match tm with
Comb(Comb(Const("\\/",_) as op,l),r) ->
TRANS (MK_COMB(AP_TERM op (REFUTE_DISJ l),REFUTE_DISJ r)) pth_and
| Comb(Const("~",_) as l,r) ->
TRANS (AP_TERM l (EQT_INTRO(ASSUME r))) pth_not
| _ ->
ASSUME(mk_iff(tm,f_tm)) in
fun lfn -> let INSTANTIATE_ALL_UNDERLYING th =
let fvs = thm_frees th in
let tms = map (fun v -> tryapplyd lfn v v) fvs in
INST (zip tms fvs) th in
fun mcth t ->
EQ_MP (INSTANTIATE_ALL_UNDERLYING(REFUTE_DISJ t))
(Termmap.find t mcth),t_tm,TRUTH;;
convert clause term to dualised thm form on first use
let prepareRootClause lfn mcth cl (t,lns) ci =
let (th,dl,cdef) = dualise lfn mcth t in
let _ = Array.set cl ci (Root (Rthm (th,lns,dl,cdef))) in
(th,lns);;
(* will return clause info at index ci *)
exception Fn_get_clause__match;;
exception Fn_get_root_clause__match;;
(* will return clause info at index ci *)
let getRootClause cl ci =
let res =
match (Array.get cl ci) with
Root (Rthm (t,lns,dl,cdef)) -> (t,lns,dl,cdef)
| _ -> raise Fn_get_root_clause__match in
res;;
(* will return clause thm at index ci *)
let getClause lfn mcth cl ci =
let res =
match (Array.get cl ci) with
Root (Ll (t,lns)) -> prepareRootClause lfn mcth cl (t,lns) ci
| Root (Rthm (t,lns,dl,cdef)) -> (t,lns)
| Chain _ -> raise Fn_get_clause__match
| Learnt (th,lns) -> (th,lns)
| Blank -> raise Fn_get_clause__match in
res;;
(* ground resolve clauses c0 and c1 on v,
where v is the only var that occurs with opposite signs in c0 and c1 *)
if n0 then v negated in c0
(* (but remember we are working with dualised clauses) *)
let resolve =
let pth = UNDISCH(TAUT `F ==> p`) in
let p = concl pth
and f_tm = hd(hyp pth) in
fun v n0 rth0 rth1 ->
let th0 = DEDUCT_ANTISYM_RULE (INST [v,p] pth) (if n0 then rth0 else rth1)
and th1 = DEDUCT_ANTISYM_RULE (INST [mk_iff(v,f_tm),p] pth)
(if n0 then rth1 else rth0) in
EQ_MP th1 th0;;
resolve c0 against v
let resolveClause lfn mcth cl vi rci (c0i,c1i) =
let ((rth0,lns0),(rth1,lns1)) = pair_map (getClause lfn mcth cl) (c0i,c1i) in
let piv = mk_sat_var lfn vi in
let n0 = mem piv (hyp rth0) in
let rth = resolve piv n0 rth0 rth1 in
let _ = Array.set cl rci (Learnt (rth,lns0)) in
();;
let resolveChain lfn mcth cl rci =
let (nl,lnl) =
match (Array.get cl rci) with
Chain (l,ll) -> (l,ll)
| _ -> failwith("resolveChain") in
let (vil,cil) = unzip nl in
let vil = tl vil in (* first pivot var is actually dummy value -1 *)
let (c0i,c1i,cilt) = l2hh cil in
let _ = resolveClause lfn mcth cl (List.hd vil) rci (c0i,c1i) in
let _ =
List.iter
(fun (vi,ci) ->
resolveClause lfn mcth cl vi rci (ci,rci))
(tl (tl nl)) in
();;
(* rth should be A |- F, where A contains all and only *)
(* the root clauses used in the proof *)
let unsatProveResolve lfn mcth (cl,sk,srl) =
let _ = List.iter (resolveChain lfn mcth cl) (List.rev sk) in
let rth =
match (Array.get cl (srl-1)) with
Learnt (th,_) -> th
| _ -> failwith("unsatProveTrace") in
rth;;
| null | https://raw.githubusercontent.com/gilith/hol-light/f3f131963f2298b4d65ee5fead6e986a4a14237a/Minisat/minisat_resolve.ml | ocaml | p is a literal
mcth maps clause term t to thm of the form cnf |- t,
where t is a clause of the cnf term
will return clause info at index ci
will return clause info at index ci
will return clause thm at index ci
ground resolve clauses c0 and c1 on v,
where v is the only var that occurs with opposite signs in c0 and c1
(but remember we are working with dualised clauses)
first pivot var is actually dummy value -1
rth should be A |- F, where A contains all and only
the root clauses used in the proof | open satCommonTools
functions for replaying minisat proof LCF - style .
Called from minisatProve.ml after proof log has
been parsed .
Called from minisatProve.ml after proof log has
been parsed. *)
let toVar p =
if is_neg p
then rand p
else p;;
let (NOT_NOT_ELIM,NOT_NOT_CONV) =
let t = mk_var("t",bool_ty) in
let NOT_NOT2 = SPEC_ALL NOT_NOT in
((fun th -> EQ_MP (INST [rand(rand(concl th)),t] NOT_NOT2) th),
(fun tm -> INST [rand(rand tm),t] NOT_NOT2));;
let l2hh = function
h0::h1::t -> (h0,h1,t)
| _ -> failwith("Match failure in l2hh");;
+1 because minisat var numbers start at 0 , dimacsTools at 1
let mk_sat_var lfn n =
let rv = lookup_sat_num (n+1) in
tryapplyd lfn rv rv;;
let get_var_num lfn v = lookup_sat_var v - 1;;
let dualise =
let pth_and = TAUT `F \/ F <=> F` and pth_not = TAUT `~T <=> F` in
let rec REFUTE_DISJ tm =
match tm with
Comb(Comb(Const("\\/",_) as op,l),r) ->
TRANS (MK_COMB(AP_TERM op (REFUTE_DISJ l),REFUTE_DISJ r)) pth_and
| Comb(Const("~",_) as l,r) ->
TRANS (AP_TERM l (EQT_INTRO(ASSUME r))) pth_not
| _ ->
ASSUME(mk_iff(tm,f_tm)) in
fun lfn -> let INSTANTIATE_ALL_UNDERLYING th =
let fvs = thm_frees th in
let tms = map (fun v -> tryapplyd lfn v v) fvs in
INST (zip tms fvs) th in
fun mcth t ->
EQ_MP (INSTANTIATE_ALL_UNDERLYING(REFUTE_DISJ t))
(Termmap.find t mcth),t_tm,TRUTH;;
convert clause term to dualised thm form on first use
let prepareRootClause lfn mcth cl (t,lns) ci =
let (th,dl,cdef) = dualise lfn mcth t in
let _ = Array.set cl ci (Root (Rthm (th,lns,dl,cdef))) in
(th,lns);;
exception Fn_get_clause__match;;
exception Fn_get_root_clause__match;;
let getRootClause cl ci =
let res =
match (Array.get cl ci) with
Root (Rthm (t,lns,dl,cdef)) -> (t,lns,dl,cdef)
| _ -> raise Fn_get_root_clause__match in
res;;
let getClause lfn mcth cl ci =
let res =
match (Array.get cl ci) with
Root (Ll (t,lns)) -> prepareRootClause lfn mcth cl (t,lns) ci
| Root (Rthm (t,lns,dl,cdef)) -> (t,lns)
| Chain _ -> raise Fn_get_clause__match
| Learnt (th,lns) -> (th,lns)
| Blank -> raise Fn_get_clause__match in
res;;
if n0 then v negated in c0
let resolve =
let pth = UNDISCH(TAUT `F ==> p`) in
let p = concl pth
and f_tm = hd(hyp pth) in
fun v n0 rth0 rth1 ->
let th0 = DEDUCT_ANTISYM_RULE (INST [v,p] pth) (if n0 then rth0 else rth1)
and th1 = DEDUCT_ANTISYM_RULE (INST [mk_iff(v,f_tm),p] pth)
(if n0 then rth1 else rth0) in
EQ_MP th1 th0;;
resolve c0 against v
let resolveClause lfn mcth cl vi rci (c0i,c1i) =
let ((rth0,lns0),(rth1,lns1)) = pair_map (getClause lfn mcth cl) (c0i,c1i) in
let piv = mk_sat_var lfn vi in
let n0 = mem piv (hyp rth0) in
let rth = resolve piv n0 rth0 rth1 in
let _ = Array.set cl rci (Learnt (rth,lns0)) in
();;
let resolveChain lfn mcth cl rci =
let (nl,lnl) =
match (Array.get cl rci) with
Chain (l,ll) -> (l,ll)
| _ -> failwith("resolveChain") in
let (vil,cil) = unzip nl in
let (c0i,c1i,cilt) = l2hh cil in
let _ = resolveClause lfn mcth cl (List.hd vil) rci (c0i,c1i) in
let _ =
List.iter
(fun (vi,ci) ->
resolveClause lfn mcth cl vi rci (ci,rci))
(tl (tl nl)) in
();;
let unsatProveResolve lfn mcth (cl,sk,srl) =
let _ = List.iter (resolveChain lfn mcth cl) (List.rev sk) in
let rth =
match (Array.get cl (srl-1)) with
Learnt (th,_) -> th
| _ -> failwith("unsatProveTrace") in
rth;;
|
f410f761aa6f00d547d664178e1231b35e4251a698a3a7fd62a8a7f75c4f0e81 | lipas-liikuntapaikat/lipas | lists.cljs | (ns lipas.ui.components.lists
(:require
["react-virtualized" :refer [AutoSizer List InfiniteLoader]]
[goog.object :as gobj]
[lipas.ui.mui :as mui]
[reagent.core :as r]))
(defn row-renderer
[{:keys [items label-fn label2-fn on-item-click]} js-opts]
(let [key (gobj/get js-opts "key")
idx (gobj/get js-opts "index")
style (gobj/get js-opts "style")
item (get items idx)]
(r/as-element
^{:key key}
[mui/grid {:item true :xs 12 :style style}
[mui/list-item
{:button (some? item) :divider (some? item)
:on-click #(when item (on-item-click item))}
[mui/list-item-text
{:primary (label-fn item)
:secondary (label2-fn item)
:primary-typography-props {:no-wrap true}
:secondary-typography-props {:no-wrap true}}]]])))
(defn virtualized-list [{:keys [items] :as props}]
[:> AutoSizer
(fn [m]
(let [row-height 64
width (gobj/get m "width")
height (max (gobj/get m "height") (* 5 row-height))]
(r/as-element
[:> List
{:row-width width
:width width
:height height
:row-height row-height
:row-renderer (partial row-renderer props)
:row-count (inc (count items))}])))])
(defn inifinite-list [{:keys [items] :as props}]
[:> InfiniteLoader
(fn [m]
(let [row-height 64
width (gobj/get m "width")
height (gobj/get m "height")]
(r/as-element
[:> List
{:row-width width
:width width
:height height
:row-height row-height
:row-renderer (partial row-renderer props)
:row-count (count items)}])))])
| null | https://raw.githubusercontent.com/lipas-liikuntapaikat/lipas/779934b723ec1e0cf52d6a7778b0b7e9f5d15c6b/webapp/src/cljs/lipas/ui/components/lists.cljs | clojure | (ns lipas.ui.components.lists
(:require
["react-virtualized" :refer [AutoSizer List InfiniteLoader]]
[goog.object :as gobj]
[lipas.ui.mui :as mui]
[reagent.core :as r]))
(defn row-renderer
[{:keys [items label-fn label2-fn on-item-click]} js-opts]
(let [key (gobj/get js-opts "key")
idx (gobj/get js-opts "index")
style (gobj/get js-opts "style")
item (get items idx)]
(r/as-element
^{:key key}
[mui/grid {:item true :xs 12 :style style}
[mui/list-item
{:button (some? item) :divider (some? item)
:on-click #(when item (on-item-click item))}
[mui/list-item-text
{:primary (label-fn item)
:secondary (label2-fn item)
:primary-typography-props {:no-wrap true}
:secondary-typography-props {:no-wrap true}}]]])))
(defn virtualized-list [{:keys [items] :as props}]
[:> AutoSizer
(fn [m]
(let [row-height 64
width (gobj/get m "width")
height (max (gobj/get m "height") (* 5 row-height))]
(r/as-element
[:> List
{:row-width width
:width width
:height height
:row-height row-height
:row-renderer (partial row-renderer props)
:row-count (inc (count items))}])))])
(defn inifinite-list [{:keys [items] :as props}]
[:> InfiniteLoader
(fn [m]
(let [row-height 64
width (gobj/get m "width")
height (gobj/get m "height")]
(r/as-element
[:> List
{:row-width width
:width width
:height height
:row-height row-height
:row-renderer (partial row-renderer props)
:row-count (count items)}])))])
| |
6a615a842cd2e6372a8572a9f40a1c95e44084150259b1f665e1690e4fdf3ee1 | abridgewater/nq-clim | port-discovery.lisp | ;;;
nq - clim / port / port - discovery
;;;
Part of CLIM II 9.2 .
;;;
(cl:defpackage :nq-clim/port/port-discovery
(:use :cl
:nq-clim/port/port
:nq-clim/port/port-protocol)
(:export
"*DEFAULT-SERVER-PATH*"
"FIND-PORT"
"MAP-OVER-PORTS"
;; The following are nq-clim specific implementation hooks.
"RESOLVE-SERVER-PATH"
"CREATE-PORT"))
(cl:in-package :nq-clim/port/port-discovery)
(defvar *all-ports* nil "All currently-open ports.")
(defun map-over-ports (function)
(dolist (port *all-ports*)
(funcall function port)))
(defmethod destroy-port :after (port)
(setf *all-ports* (remove port *all-ports*)))
(defparameter *default-server-path* '(:clx))
(defgeneric resolve-server-path (port-type &key)
(:documentation "To be implemented by each port type, convert the
spread server path passed as arguments to what would be returned from
PORT-SERVER-PATH called on the port should a port be opened with this
path designator."))
(defgeneric create-port (port-type &key)
(:documentation "To be implemented by each port type, create a port
object corresponding to the server path passed as parameters."))
(defun find-port (&rest initargs
&key (server-path *default-server-path*)
&allow-other-keys)
(let ((server-path (apply #'resolve-server-path server-path)))
(map-over-ports
(lambda (port)
(when (equal server-path (port-server-path port))
(return-from find-port port))))
(let ((port (apply #'create-port (car server-path)
:server-path server-path
initargs)))
(push port *all-ports*)
port)))
EOF
| null | https://raw.githubusercontent.com/abridgewater/nq-clim/11d339fd0ac77b6d624fc5537b170294a191a3de/port/port-discovery.lisp | lisp |
The following are nq-clim specific implementation hooks. | nq - clim / port / port - discovery
Part of CLIM II 9.2 .
(cl:defpackage :nq-clim/port/port-discovery
(:use :cl
:nq-clim/port/port
:nq-clim/port/port-protocol)
(:export
"*DEFAULT-SERVER-PATH*"
"FIND-PORT"
"MAP-OVER-PORTS"
"RESOLVE-SERVER-PATH"
"CREATE-PORT"))
(cl:in-package :nq-clim/port/port-discovery)
(defvar *all-ports* nil "All currently-open ports.")
(defun map-over-ports (function)
(dolist (port *all-ports*)
(funcall function port)))
(defmethod destroy-port :after (port)
(setf *all-ports* (remove port *all-ports*)))
(defparameter *default-server-path* '(:clx))
(defgeneric resolve-server-path (port-type &key)
(:documentation "To be implemented by each port type, convert the
spread server path passed as arguments to what would be returned from
PORT-SERVER-PATH called on the port should a port be opened with this
path designator."))
(defgeneric create-port (port-type &key)
(:documentation "To be implemented by each port type, create a port
object corresponding to the server path passed as parameters."))
(defun find-port (&rest initargs
&key (server-path *default-server-path*)
&allow-other-keys)
(let ((server-path (apply #'resolve-server-path server-path)))
(map-over-ports
(lambda (port)
(when (equal server-path (port-server-path port))
(return-from find-port port))))
(let ((port (apply #'create-port (car server-path)
:server-path server-path
initargs)))
(push port *all-ports*)
port)))
EOF
|
31b3b003f9753ccfdd762f29642c3f1709ae211d46633549f2102ecaf329598b | zalora/zerobin | Main.hs | # LANGUAGE QuasiQuotes #
module Main where
import Data.Version (showVersion)
import Paths_zerobin (version) -- from cabal
import System.Environment (getArgs)
import System.Exit (exitFailure)
import System.IO (stderr, hPutStrLn)
import Text.RawString.QQ (r)
import Web.ZeroBin (share, Expiration(..))
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as C
import qualified System.Console.Docopt.NoTH as O
usage :: String
usage = "zerobin " ++ showVersion version
++ " pastes to 0bin services" ++ [r|
zerobin prints URI to be shared or error message
See and
Usage:
zerobin [options] TEXT
Options:
0bin service [ default : ]
file Paste the content of file TEXT ( " - " for stdin )
expire = E Set expiration of paste : once , day , week , month [ default : day ]
-h, --help Show this message
Examples:
zerobin hello paste "hello" for a day
zerobin -f /etc/fstab paste file /etc/fstab for a day
cat /etc/fstab | zerobin -f - likewise
zerobin -e once hello paste "hello", it will burn after reading
zerobin -b hello paste to 0bin.net
|]
getExpiration :: String -> Maybe Expiration
getExpiration e =
case e of
"once" -> Just Once
"day" -> Just Day
"week" -> Just Week
"month" -> Just Month
_ -> Nothing
die :: String -> IO ()
die msg = do
hPutStrLn stderr $ "zerobin: " ++ msg
exitFailure
getContent :: Bool -> String -> IO BS.ByteString
getContent isFile text
| isFile && (text == "-") = BS.getContents
| isFile = BS.readFile text
| otherwise = return $ C.pack text
main :: IO ()
main = do
doco <- O.parseUsageOrExit usage
args <- O.parseArgsOrExit doco =<< getArgs
if args `O.isPresent` O.longOption "help"
then putStrLn $ O.usage doco
else do
let get = O.getArgOrExitWith doco
bin <- args `get` O.longOption "bin"
expire <- args `get` O.longOption "expire"
text <- args `get` O.argument "TEXT"
cnt <- getContent (args `O.isPresent` O.longOption "file") text
case getExpiration expire of
Nothing -> die "invalid value for expiration"
Just e -> share bin e cnt >>= putStrLn
| null | https://raw.githubusercontent.com/zalora/zerobin/3c3da3eca80e2343fa196c864d06653784169c03/cli/Main.hs | haskell | from cabal
help Show this message | # LANGUAGE QuasiQuotes #
module Main where
import Data.Version (showVersion)
import System.Environment (getArgs)
import System.Exit (exitFailure)
import System.IO (stderr, hPutStrLn)
import Text.RawString.QQ (r)
import Web.ZeroBin (share, Expiration(..))
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as C
import qualified System.Console.Docopt.NoTH as O
usage :: String
usage = "zerobin " ++ showVersion version
++ " pastes to 0bin services" ++ [r|
zerobin prints URI to be shared or error message
See and
Usage:
zerobin [options] TEXT
Options:
0bin service [ default : ]
file Paste the content of file TEXT ( " - " for stdin )
expire = E Set expiration of paste : once , day , week , month [ default : day ]
Examples:
zerobin hello paste "hello" for a day
zerobin -f /etc/fstab paste file /etc/fstab for a day
cat /etc/fstab | zerobin -f - likewise
zerobin -e once hello paste "hello", it will burn after reading
zerobin -b hello paste to 0bin.net
|]
getExpiration :: String -> Maybe Expiration
getExpiration e =
case e of
"once" -> Just Once
"day" -> Just Day
"week" -> Just Week
"month" -> Just Month
_ -> Nothing
die :: String -> IO ()
die msg = do
hPutStrLn stderr $ "zerobin: " ++ msg
exitFailure
getContent :: Bool -> String -> IO BS.ByteString
getContent isFile text
| isFile && (text == "-") = BS.getContents
| isFile = BS.readFile text
| otherwise = return $ C.pack text
main :: IO ()
main = do
doco <- O.parseUsageOrExit usage
args <- O.parseArgsOrExit doco =<< getArgs
if args `O.isPresent` O.longOption "help"
then putStrLn $ O.usage doco
else do
let get = O.getArgOrExitWith doco
bin <- args `get` O.longOption "bin"
expire <- args `get` O.longOption "expire"
text <- args `get` O.argument "TEXT"
cnt <- getContent (args `O.isPresent` O.longOption "file") text
case getExpiration expire of
Nothing -> die "invalid value for expiration"
Just e -> share bin e cnt >>= putStrLn
|
af06602d624e456dd7a1180d40e15ec76a4fb7dd12668b4aee19fe36ff7e8e2d | cuplv/raz.ocaml | batList.ml |
* BatList - additional and modified functions for lists .
* Copyright ( C ) 2003
* Copyright ( C ) 2003
* Copyright ( C ) 2008 Red Hat Inc.
* Copyright ( C ) 2008 , LIFO , Universite d'Orleans
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* BatList - additional and modified functions for lists.
* Copyright (C) 2003 Brian Hurt
* Copyright (C) 2003 Nicolas Cannasse
* Copyright (C) 2008 Red Hat Inc.
* Copyright (C) 2008 David Rajchenbach-Teller, LIFO, Universite d'Orleans
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
: : VH : : GLUE with StdLib
let merge = List.merge
let fast_sort = List.fast_sort
let stable_sort = List.stable_sort
let sort = List.sort
let assq = List.assq
let assoc = List.assoc
let find = List.find
let exists = List.exists
let for_all = List.for_all
let fold_left = List.fold_left
let rev_map = List.rev_map
let iter = List.iter
let rev_append = List.rev_append
let rev = List.rev
let length = List.length
let tl = List.tl
let hd = List.hd
let mem = List.mem
let memq = List.memq
let mem_assq = List.mem_assq
let mem_assoc = List.mem_assoc
let rev_map2 = List.rev_map2
: : VH : : END GLUE
Thanks to for suggesting the following structure
type 'a mut_list = {
hd: 'a;
mutable tl: 'a list
}
type 'a t = 'a list
type 'a enumerable = 'a t
type 'a mappable = 'a t
external inj : 'a mut_list -> 'a list = "%identity"
module Acc = struct
let dummy () =
{ hd = Obj.magic (); tl = [] }
let create x =
{ hd = x; tl = [] }
let accum acc x =
let cell = create x in
acc.tl <- inj cell;
cell
end
let cons h t = h::t
let is_empty = function
| [] -> true
| _ -> false
(*$T is_empty
is_empty []
not (is_empty [1])
*)
let at_negative_index_msg = "Negative index not allowed"
let at_after_end_msg = "Index past end of list"
let nth l index =
if index < 0 then invalid_arg at_negative_index_msg;
let rec loop n = function
| [] -> invalid_arg at_after_end_msg;
| h :: t ->
if n = 0 then h else loop (n - 1) t
in
loop index l
let at = nth
$ T at
try ignore ( at [ ] 0 ) ; false with Invalid_argument _ - > true
try ignore ( at [ 1;2;3 ] ( -1 ) ) ; false with Invalid_argument _ - > true
at [ 1;2;3 ] 2 = 3
try ignore (at [] 0); false with Invalid_argument _ -> true
try ignore (at [1;2;3] (-1)); false with Invalid_argument _ -> true
at [1;2;3] 2 = 3
*)
let mem_cmp cmp x l =
exists (fun y -> cmp x y = 0) l
$ T mem_cmp
mem_cmp Pervasives.compare 0 [ ] = false
mem_cmp Pervasives.compare 0 [ 1 ; 2 ] = false
mem_cmp Pervasives.compare 1 [ 1 ; 2 ] = true
mem_cmp Pervasives.compare 2 [ 1 ; 2 ] = true
mem_cmp Pervasives.compare 0 [] = false
mem_cmp Pervasives.compare 0 [1; 2] = false
mem_cmp Pervasives.compare 1 [1; 2] = true
mem_cmp Pervasives.compare 2 [1; 2] = true
*)
let append l1 l2 =
match l1 with
| [] -> l2
| h :: t ->
let rec loop dst = function
| [] ->
dst.tl <- l2
| h :: t ->
loop (Acc.accum dst h) t
in
let r = Acc.create h in
loop r t;
inj r
$ T append
append [ ] [ ] = [ ]
append [ ] [ 1 ] = [ 1 ]
append [ 1 ] [ ] = [ 1 ]
append [ 1 ] [ 2 ] = [ 1 ; 2 ]
append [ 1 ; 2 ] [ 3 ] = [ 1 ; 2 ; 3 ]
append [ 1 ] [ 2 ; 3 ] = [ 1 ; 2 ; 3 ]
append [] [] = []
append [] [1] = [1]
append [1] [] = [1]
append [1] [2] = [1; 2]
append [1; 2] [3] = [1; 2; 3]
append [1] [2; 3] = [1; 2; 3]
*)
let flatten l =
let rec inner dst = function
| [] -> dst
| h :: t ->
inner (Acc.accum dst h) t
in
let rec outer dst = function
| [] -> ()
| h :: t -> outer (inner dst h) t
in
let r = Acc.dummy () in
outer r l;
r.tl
let concat = flatten
$ T flatten
flatten [ [ 1;2];[3];[];[4;5;6 ] ] = [ 1;2;3;4;5;6 ]
flatten [ [ ] ] = [ ]
flatten [[1;2];[3];[];[4;5;6]] = [1;2;3;4;5;6]
flatten [[]] = []
*)
let singleton x = [x]
$ Q singleton
Q.int ( fun x - > let s = singleton x in hd s = x & & length s = 1 )
Q.int (fun x -> let s = singleton x in hd s = x && length s = 1)
*)
let map f = function
| [] -> []
| h :: t ->
let rec loop dst = function
| [] -> ()
| h :: t ->
loop (Acc.accum dst (f h)) t
in
let r = Acc.create (f h) in
loop r t;
inj r
$ Q map
( Q.pair ( Q.fun1 Q.int Q.int ) ( Q.list Q.small_int ) ) \
( fun ( f , l ) - > map f l = List.map f l )
(Q.pair (Q.fun1 Q.int Q.int) (Q.list Q.small_int)) \
(fun (f,l) -> map f l = List.map f l)
*)
let rec drop n = function
| _ :: l when n > 0 -> drop (n-1) l
| l -> l
$ = drop & ~printer:(IO.to_string ( List.print Int.print ) )
( drop 0 [ 1;2;3 ] ) [ 1;2;3 ]
( drop 3 [ 1;2;3 ] ) [ ]
( drop 4 [ 1;2;3 ] ) [ ]
( drop 1 [ 1;2;3 ] ) [ 2;3 ]
(drop 0 [1;2;3]) [1;2;3]
(drop 3 [1;2;3]) []
(drop 4 [1;2;3]) []
(drop 1 [1;2;3]) [2;3]
*)
let take n l =
let rec loop n dst = function
| h :: t when n > 0 ->
loop (n - 1) (Acc.accum dst h) t
| _ ->
()
in
let dummy = Acc.dummy () in
loop n dummy l;
dummy.tl
$ = take & ~printer:(IO.to_string ( List.print Int.print ) )
( take 0 [ 1;2;3 ] ) [ ]
( take 3 [ 1;2;3 ] ) [ 1;2;3 ]
( take 4 [ 1;2;3 ] ) [ 1;2;3 ]
( take 1 [ 1;2;3 ] ) [ 1 ]
(take 0 [1;2;3]) []
(take 3 [1;2;3]) [1;2;3]
(take 4 [1;2;3]) [1;2;3]
(take 1 [1;2;3]) [1]
*)
let takedrop n l =
let rec loop n dst = function
| h :: t when n > 0 -> loop (n - 1) (Acc.accum dst h) t
| rest -> rest
in
let dummy = Acc.dummy () in
let rest = loop n dummy l in
(dummy.tl, rest)
$ T takedrop
takedrop 0 [ 1 ; 2 ; 3 ] = ( [ ] , [ 1 ; 2 ; 3 ] )
takedrop 3 [ 1 ; 2 ; 3 ] = ( [ 1 ; 2 ; 3 ] , [ ] )
takedrop 4 [ 1 ; 2 ; 3 ] = ( [ 1 ; 2 ; 3 ] , [ ] )
takedrop 1 [ 1 ; 2 ; 3 ] = ( [ 1 ] , [ 2 ; 3 ] )
takedrop 0 [1; 2; 3] = ([], [1; 2; 3])
takedrop 3 [1; 2; 3] = ([1; 2; 3], [])
takedrop 4 [1; 2; 3] = ([1; 2; 3], [])
takedrop 1 [1; 2; 3] = ([1], [2; 3])
*)
let ntake n l =
if n < 1 then invalid_arg "BatList.ntake";
let took, left = takedrop n l in
let acc = Acc.create took in
let rec loop dst = function
| [] -> inj acc
| li -> let taken, rest = takedrop n li in
loop (Acc.accum dst taken) rest
in
loop acc left
$ T 2 [ ] = [ [ ] ]
ntake 2 [ 1 ] = [ [ 1 ] ]
ntake 2 [ 1 ; 2 ] = [ [ 1 ; 2 ] ]
ntake 2 [ 1 ; 2 ; 3 ] = [ [ 1 ; 2 ] ; [ 3 ] ]
ntake 2 [ 1 ; 2 ; 3 ; 4 ] = [ [ 1 ; 2 ] ; [ 3 ; 4 ] ]
ntake 2 [] = [[]]
ntake 2 [1] = [[1]]
ntake 2 [1; 2] = [[1; 2]]
ntake 2 [1; 2; 3] = [[1; 2]; [3]]
ntake 2 [1; 2; 3; 4] = [[1; 2]; [3; 4]]
*)
let take_while p li =
let rec loop dst = function
| [] -> ()
| x :: xs ->
if p x then
loop (Acc.accum dst x) xs in
let dummy = Acc.dummy () in
loop dummy li;
dummy.tl
$ = take_while & ~printer:(IO.to_string ( List.print Int.print ) )
( take_while ( (= ) 3 ) [ 3;3;4;3;3 ] ) [ 3;3 ]
( take_while ( (= ) 3 ) [ 3 ] ) [ 3 ]
( take_while ( (= ) 3 ) [ 4 ] ) [ ]
( take_while ( (= ) 3 ) [ ] ) [ ]
( take_while ( (= ) 2 ) [ 2 ; 2 ] ) [ 2 ; 2 ]
(take_while ((=) 3) [3;3;4;3;3]) [3;3]
(take_while ((=) 3) [3]) [3]
(take_while ((=) 3) [4]) []
(take_while ((=) 3) []) []
(take_while ((=) 2) [2; 2]) [2; 2]
*)
let rec drop_while f = function
| [] -> []
| x :: xs when f x -> drop_while f xs
| xs -> xs
$ = drop_while & ~printer:(IO.to_string ( List.print Int.print ) )
( drop_while ( (= ) 3 ) [ 3;3;4;3;3 ] ) [ 4;3;3 ]
( drop_while ( (= ) 3 ) [ 3 ] ) [ ]
(drop_while ((=) 3) [3;3;4;3;3]) [4;3;3]
(drop_while ((=) 3) [3]) []
*)
let span p li =
let rec loop dst = function
| [] -> []
| x :: xs as l ->
if p x then
loop (Acc.accum dst x) xs
else l
in
let dummy = Acc.dummy () in
let xs = loop dummy li in
(dummy.tl , xs)
$ = span
( span ( (= ) 3 ) [ 3;3;4;3;3 ] ) ( [ 3;3],[4;3;3 ] )
( span ( (= ) 3 ) [ 3 ] ) ( [ 3 ] , [ ] )
( span ( (= ) 3 ) [ 4 ] ) ( [ ] , [ 4 ] )
( span ( (= ) 3 ) [ ] ) ( [ ] , [ ] )
( span ( (= ) 2 ) [ 2 ; 2 ] ) ( [ 2 ; 2 ] , [ ] )
(span ((=) 3) [3;3;4;3;3]) ([3;3],[4;3;3])
(span ((=) 3) [3]) ([3],[])
(span ((=) 3) [4]) ([],[4])
(span ((=) 3) []) ([],[])
(span ((=) 2) [2; 2]) ([2; 2],[])
*)
let nsplit p = function
| [] -> []
note that returning [ ] on empty inputs is an arbitrary choice
that is made for consistence with the behavior of
BatString.nsplit . Not having this hardcoded case would have
` nsplit p [ ] ` return ` [ [ ] ] ` , which is also a semantically valid
return value ( in fact the two are equivalent , but ` [ [ ] ] ` would be
a more natural choice as it allows to enforce the simply
invariant that ` ` return values are always non - empty ) .
If that was to redo from scratch , ` [ [ ] ] ` would be a better return
value for both ` BatList.nsplit ` and ` BatString.nsplit ` .
that is made for consistence with the behavior of
BatString.nsplit. Not having this hardcoded case would have
`nsplit p []` return `[[]]`, which is also a semantically valid
return value (in fact the two are equivalent, but `[[]]` would be
a more natural choice as it allows to enforce the simply
invariant that `nsplit` return values are always non-empty).
If that was to redo from scratch, `[[]]` would be a better return
value for both `BatList.nsplit` and `BatString.nsplit`.
*)
| li ->
let not_p x = not (p x) in
let rec loop dst l =
let ok, rest = span not_p l in
let r = Acc.accum dst ok in
match rest with
| [] -> ()
| x :: xs -> loop r xs
in
let dummy = Acc.dummy () in
loop dummy li;
dummy.tl
$ T ( (= ) 0 ) [ ] = [ ]
nsplit ( (= ) 0 ) [ 0 ] = [ [ ] ; [ ] ]
nsplit ( (= ) 0 ) [ 1 ; 0 ] = [ [ 1 ] ; [ ] ]
nsplit ( (= ) 0 ) [ 0 ; 1 ] = [ [ ] ; [ 1 ] ]
nsplit ( (= ) 0 ) [ 1 ; 2 ; 0 ; 0 ; 3 ; 4 ; 0 ; 5 ] = [ [ 1 ; 2 ] ; [ ] ; [ 3 ; 4 ] ; [ 5 ] ]
nsplit ((=) 0) [] = []
nsplit ((=) 0) [0] = [[]; []]
nsplit ((=) 0) [1; 0] = [[1]; []]
nsplit ((=) 0) [0; 1] = [[]; [1]]
nsplit ((=) 0) [1; 2; 0; 0; 3; 4; 0; 5] = [[1; 2]; []; [3; 4]; [5]]
*)
$ Q nsplit & ~count:10
( Q.list ( Q.list Q.pos_int ) ) ( fun xss - > \
let join sep xss = flatten ( interleave [ sep ] xss ) in \
( * normalize : the return type of \
is quotiented by the equivalence [ ] ~ [ [ ] ]
(Q.list (Q.list Q.pos_int)) (fun xss -> \
let join sep xss = flatten (interleave [sep] xss) in \
(* normalize: the return type of nsplit \
is quotiented by the equivalence []~[[]] *) \
let normalize = function [] -> [[]] | li -> li in \
let neg = -1 in \
normalize xss = normalize (nsplit ((=) neg) (join neg xss)) \
)
(Q.pair Q.small_int (Q.list Q.small_int)) (fun (sep,xs) -> \
let join sep xss = flatten (interleave [sep] xss) in \
xs = join sep (nsplit ((=) sep) xs) \
)
*)
nsplit ( (= ) sep ) la @ ( (= ) sep ) lb = ( (= ) sep ) ( la @ [ sep ] @ lb )
let group_consecutive p l =
let rec loop dst = function
| [] -> ()
| x :: rest ->
let xs, rest = span (p x) rest in
loop (Acc.accum dst (x :: xs)) rest
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
$ = group_consecutive & ~printer:(IO.to_string ( List.print ( List.print Int.print ) ) )
( group_consecutive ( =) [ 3 ; 3 ; 4 ; 3 ; 3 ] ) [ [ 3 ; 3 ] ; [ 4 ] ; [ 3 ; 3 ] ]
( group_consecutive ( =) [ 3 ] ) [ [ 3 ] ]
( group_consecutive ( =) [ ] ) [ ]
( group_consecutive ( =) [ 2 ; 2 ] ) [ [ 2 ; 2 ] ]
(group_consecutive (=) [3; 3; 4; 3; 3]) [[3; 3]; [4]; [3; 3]]
(group_consecutive (=) [3]) [[3]]
(group_consecutive (=) []) []
(group_consecutive (=) [2; 2]) [[2; 2]]
*)
let takewhile = take_while
let dropwhile = drop_while
let interleave ?first ?last (sep:'a) (l:'a list) =
let may_prepend maybe_x lst = match maybe_x with
| None -> lst
| Some x -> x :: lst
in
let rec loop acc = function
| [] -> acc
| x :: xs ->
match acc with
| [] -> loop [x] xs
| _ -> loop (x :: sep :: acc) xs
in
let res = loop [] l in
may_prepend first (rev (may_prepend last res))
$ = interleave & ~printer:(IO.to_string ( List.print Int.print ) )
( interleave 0 [ 1;2;3 ] ) [ 1;0;2;0;3 ]
( interleave 0 [ 1 ] ) [ 1 ]
( interleave 0 [ ] ) [ ]
( interleave ~first:(-1 ) 0 [ 1;2;3 ] ) [ -1;1;0;2;0;3 ]
( interleave ~first:(-1 ) 0 [ 1 ] ) [ -1;1 ]
( interleave ~first:(-1 ) 0 [ ] ) [ -1 ]
( interleave ~last:(-2 ) 0 [ 1;2;3 ] ) [ 1;0;2;0;3;-2 ]
( interleave ~last:(-2 ) 0 [ 1 ] ) [ 1;-2 ]
( interleave ~last:(-2 ) 0 [ ] ) [ -2 ]
( interleave ~first:(-1 ) ~last:(-2 ) 0 [ 1;2;3 ] ) [ -1;1;0;2;0;3;-2 ]
( interleave ~first:(-1 ) ~last:(-2 ) 0 [ 1 ] ) [ -1;1;-2 ]
( interleave ~first:(-1 ) ~last:(-2 ) 0 [ ] ) [ -1;-2 ]
(interleave 0 [1;2;3]) [1;0;2;0;3]
(interleave 0 [1]) [1]
(interleave 0 []) []
(interleave ~first:(-1) 0 [1;2;3]) [-1;1;0;2;0;3]
(interleave ~first:(-1) 0 [1]) [-1;1]
(interleave ~first:(-1) 0 []) [-1]
(interleave ~last:(-2) 0 [1;2;3]) [1;0;2;0;3;-2]
(interleave ~last:(-2) 0 [1]) [1;-2]
(interleave ~last:(-2) 0 []) [-2]
(interleave ~first:(-1) ~last:(-2) 0 [1;2;3]) [-1;1;0;2;0;3;-2]
(interleave ~first:(-1) ~last:(-2) 0 [1]) [-1;1;-2]
(interleave ~first:(-1) ~last:(-2) 0 []) [-1;-2]
*)
let unique ?(eq = ( = )) l =
let rec loop dst = function
| [] -> ()
| h :: t ->
match exists (eq h) t with
| true -> loop dst t
| false ->
loop (Acc.accum dst h) t
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
(* FIXME BAD TESTS: RESULT IS SPECIFIC TO IMPLEMENTATION *)
$ = unique & ~printer:(IO.to_string ( List.print Int.print ) )
[ 1;2;3;4;5;6 ] ( unique [ 1;1;2;2;3;3;4;5;6;4;5;6 ] )
[ 1 ] ( unique [ 1;1;1;1;1;1;1;1;1;1 ] )
[ 1;2 ] ( unique ~eq:(fun x y - > x land 1 = y land 1 ) [ 2;2;2;4;6;8;3;1;2 ] )
[1;2;3;4;5;6] (unique [1;1;2;2;3;3;4;5;6;4;5;6])
[1] (unique [1;1;1;1;1;1;1;1;1;1])
[1;2] (unique ~eq:(fun x y -> x land 1 = y land 1) [2;2;2;4;6;8;3;1;2])
*)
let unique_cmp ?(cmp = Pervasives.compare) l =
let set = ref (BatMap.PMap.create cmp) in
let should_keep x =
if BatMap.PMap.mem x !set then false
else ( set := BatMap.PMap.add x true !set; true )
in
(* use a stateful filter to remove duplicate elements *)
List.filter should_keep l
$ = unique_cmp & ~printer:(IO.to_string ( List.print Int.print ) )
[ 1;2;3;4;5;6 ] ( unique_cmp [ 1;1;2;2;3;3;4;5;6;4;5;6 ] )
[ 1 ] ( unique_cmp [ 1;1;1;1;1;1;1;1;1;1 ] )
[ 2;3 ] ( unique_cmp ~cmp:(fun x y - > Int.compare ( x land 1 ) ( y land 1 ) ) [ 2;2;2;4;6;8;3;1;2 ] )
[1;2;3;4;5;6] (unique_cmp [1;1;2;2;3;3;4;5;6;4;5;6])
[1] (unique_cmp [1;1;1;1;1;1;1;1;1;1])
[2;3] (unique_cmp ~cmp:(fun x y -> Int.compare (x land 1) (y land 1)) [2;2;2;4;6;8;3;1;2])
*)
let unique_hash (type et) ?(hash = Hashtbl.hash) ?(eq = (=)) (l : et list) =
let module HT = Hashtbl.Make(struct type t = et let equal = eq let hash = hash end) in
let ht = HT.create (List.length l) in
let rec loop dst = function
| h::t when not (HT.mem ht h) ->
HT.add ht h (); (* put h in hash table *)
loop
(Acc.accum dst h) (* and to output list *)
t
| _::t -> (* if already in hashtable then don't add to output list *)
loop dst t
| [] -> ()
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
$ = unique_hash & ~printer:(IO.to_string ( List.print Int.print ) )
[ 1;2;3;4;5;6 ] ( unique_hash [ 1;1;2;2;3;3;4;5;6;4;5;6 ] )
[ 1 ] ( unique_hash [ 1;1;1;1;1;1;1;1;1;1 ] )
[ 2;3 ] ( unique_hash ~hash:(fun x - > Hashtbl.hash ( x land 1 ) ) ~eq:(fun x y - > x land 1 = y land 1 ) [ 2;2;2;4;6;8;3;1;2 ] )
[1;2;3;4;5;6] (unique_hash [1;1;2;2;3;3;4;5;6;4;5;6])
[1] (unique_hash [1;1;1;1;1;1;1;1;1;1])
[2;3] (unique_hash ~hash:(fun x -> Hashtbl.hash (x land 1)) ~eq:(fun x y -> x land 1 = y land 1) [2;2;2;4;6;8;3;1;2])
*)
let filter_map f l =
let rec loop dst = function
| [] -> ()
| h :: t ->
match f h with
| None -> loop dst t
| Some x ->
loop (Acc.accum dst x) t
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
let filteri_map f l =
let rec loop i dst = function
| [] -> ()
| h :: t ->
match f i h with
| None -> loop (succ i) dst t
| Some x ->
loop (succ i) (Acc.accum dst x) t
in
let dummy = Acc.dummy () in
loop 0 dummy l;
dummy.tl
$ T filteri_map
( let r = ref ( -1 ) in filteri_map ( fun i _ - > incr r ; if i = ! r then Some i else None ) [ 5 ; 4 ; 8 ] = [ 0 ; 1 ; 2 ] )
filteri_map ( fun _ x - > if x > 4 then Some ( x , string_of_int x ) else None ) [ 5 ; 4 ; 8 ] = [ ( 5 , " 5 " ) ; ( 8 , " 8 " ) ]
filteri_map ( fun _ _ - > Some ( ) ) [ ] = [ ]
filteri_map ( fun _ _ - > None ) [ 1 ; 2 ] = [ ]
(let r = ref (-1) in filteri_map (fun i _ -> incr r; if i = !r then Some i else None) [5; 4; 8] = [0; 1; 2])
filteri_map (fun _ x -> if x > 4 then Some (x, string_of_int x) else None) [5; 4; 8] = [(5, "5"); (8, "8")]
filteri_map (fun _ _ -> Some ()) [] = []
filteri_map (fun _ _ -> None) [1; 2] = []
*)
let rec find_map f = function
| [] -> raise Not_found
| x :: xs ->
match f x with
| Some y -> y
| None -> find_map f xs
let fold_right_max = 1000
let fold_right f l init =
let rec tail_loop acc = function
| [] -> acc
| h :: t -> tail_loop (f h acc) t
in
let rec loop n = function
| [] -> init
| h :: t ->
if n < fold_right_max then
f h (loop (n+1) t)
else
f h (tail_loop init (rev t))
in
loop 0 l
let map2 f l1 l2 =
let rec loop dst src1 src2 =
match src1, src2 with
| [], [] -> ()
| h1 :: t1, h2 :: t2 ->
loop (Acc.accum dst (f h1 h2)) t1 t2
| _ -> invalid_arg "map2: Different_list_size"
in
let dummy = Acc.dummy () in
loop dummy l1 l2;
dummy.tl
let rec iter2 f l1 l2 =
match l1, l2 with
| [], [] -> ()
| h1 :: t1, h2 :: t2 -> f h1 h2; iter2 f t1 t2
| _ -> invalid_arg "iter2: Different_list_size"
let rec fold_left2 f accum l1 l2 =
match l1, l2 with
| [], [] -> accum
| h1 :: t1, h2 :: t2 -> fold_left2 f (f accum h1 h2) t1 t2
| _ -> invalid_arg "fold_left2: Different_list_size"
let fold_right2 f l1 l2 init =
let rec tail_loop acc l1 l2 =
match l1, l2 with
| [] , [] -> acc
| h1 :: t1 , h2 :: t2 -> tail_loop (f h1 h2 acc) t1 t2
| _ -> invalid_arg "fold_left2: Different_list_size"
in
let rec loop n l1 l2 =
match l1, l2 with
| [], [] -> init
| h1 :: t1, h2 :: t2 ->
if n < fold_right_max then
f h1 h2 (loop (n+1) t1 t2)
else
f h1 h2 (tail_loop init (rev t1) (rev t2))
| _ -> invalid_arg "fold_right2: Different_list_size"
in
loop 0 l1 l2
let for_all2 p l1 l2 =
let rec loop l1 l2 =
match l1, l2 with
| [], [] -> true
| h1 :: t1, h2 :: t2 -> if p h1 h2 then loop t1 t2 else false
| _ -> invalid_arg "for_all2: Different_list_size"
in
loop l1 l2
let exists2 p l1 l2 =
let rec loop l1 l2 =
match l1, l2 with
| [], [] -> false
| h1 :: t1, h2 :: t2 -> if p h1 h2 then true else loop t1 t2
| _ -> invalid_arg "exists2: Different_list_size"
in
loop l1 l2
let remove_assoc x lst =
let rec loop dst = function
| [] -> ()
| (a, _ as pair) :: t ->
if a = x then
dst.tl <- t
else
loop (Acc.accum dst pair) t
in
let dummy = Acc.dummy () in
loop dummy lst;
dummy.tl
let remove_assq x lst =
let rec loop dst = function
| [] -> ()
| (a, _ as pair) :: t ->
if a == x then
dst.tl <- t
else
loop (Acc.accum dst pair) t
in
let dummy = Acc.dummy () in
loop dummy lst;
dummy.tl
let remove_at i lst =
let rec loop dst i = function
| [] -> invalid_arg "BatList.remove_at"
| x :: xs ->
if i = 0 then
dst.tl <- xs
else
loop (Acc.accum dst x) (i - 1) xs
in
if i < 0 then
invalid_arg "BatList.remove_at"
else
let dummy = Acc.dummy () in
loop dummy i lst;
dummy.tl
$ T remove_at
try ignore ( remove_at 0 [ ] ) ; false with Invalid_argument _ - > true
try ignore ( remove_at 1 [ 0 ] ) ; false with Invalid_argument _ - > true
remove_at 0 [ 0 ] = [ ]
remove_at 0 [ 0 ; 1 ; 2 ] = [ 1 ; 2 ]
remove_at 1 [ 0 ; 1 ; 2 ] = [ 0 ; 2 ]
remove_at 2 [ 0 ; 1 ; 2 ] = [ 0 ; 1 ]
try ignore (remove_at 0 []) ; false with Invalid_argument _ -> true
try ignore (remove_at 1 [0]); false with Invalid_argument _ -> true
remove_at 0 [0] = []
remove_at 0 [0; 1; 2] = [1; 2]
remove_at 1 [0; 1; 2] = [0; 2]
remove_at 2 [0; 1; 2] = [0; 1]
*)
let rfind p l = find p (rev l)
let find_all p l =
let rec findnext dst = function
| [] -> ()
| h :: t ->
if p h then
findnext (Acc.accum dst h) t
else
findnext dst t
in
let dummy = Acc.dummy () in
findnext dummy l;
dummy.tl
let rec findi p l =
let rec loop n = function
| [] -> raise Not_found
| h :: t ->
if p n h then (n,h) else loop (n+1) t
in
loop 0 l
let rec index_of e l =
let rec loop n = function
| [] -> None
| h::_ when h = e -> Some n
| _::t -> loop ( n + 1 ) t
in loop 0 l
let rec index_ofq e l =
let rec loop n = function
| [] -> None
| h::_ when h == e -> Some n
| _::t -> loop ( n + 1 ) t
in loop 0 l
let rec rindex_of e l =
let rec loop n acc = function
| [] -> acc
| h::t when h = e -> loop ( n + 1) ( Some n ) t
| _::t -> loop ( n + 1 ) acc t
in loop 0 None l
let rec rindex_ofq e l =
let rec loop n acc = function
| [] -> acc
| h::t when h == e -> loop ( n + 1) ( Some n ) t
| _::t -> loop ( n + 1 ) acc t
in loop 0 None l
let filter = find_all
let filteri f =
let rec aux i = function
| [] -> []
| x::xs when f i x -> x :: aux (succ i) xs
| x::xs -> aux (succ i) xs
in
aux 0
$ T filteri
( let r = ref ( -1 ) in filteri ( fun i _ - > incr r ; i = ! r ) [ 5 ; 4 ; 8 ] = [ 5 ; 4 ; 8 ] )
filteri ( fun _ x - > x > 4 ) [ 5 ; 4 ; 8 ] = [ 5 ; 8 ]
filteri ( fun _ _ - > true ) [ ] = [ ]
(let r = ref (-1) in filteri (fun i _ -> incr r; i = !r) [5; 4; 8] = [5; 4; 8])
filteri (fun _ x -> x > 4) [5; 4; 8] = [5; 8]
filteri (fun _ _ -> true) [] = []
*)
let partition p lst =
let rec loop yesdst nodst = function
| [] -> ()
| h :: t ->
if p h then
loop (Acc.accum yesdst h) nodst t
else
loop yesdst (Acc.accum nodst h) t
in
let yesdummy = Acc.dummy ()
and nodummy = Acc.dummy ()
in
loop yesdummy nodummy lst;
(yesdummy.tl, nodummy.tl)
let split lst =
let rec loop adst bdst = function
| [] -> ()
| (a, b) :: t ->
loop (Acc.accum adst a) (Acc.accum bdst b) t
in
let adummy = Acc.dummy ()
and bdummy = Acc.dummy ()
in
loop adummy bdummy lst;
adummy.tl, bdummy.tl
let combine l1 l2 =
let list_sizes_differ = Invalid_argument "combine: Different_list_size" in
match l1, l2 with
| [], [] -> []
| x :: xs, y :: ys ->
let acc = Acc.create (x, y) in
let rec loop dst l1 l2 = match l1, l2 with
| [], [] -> inj acc
| h1 :: t1, h2 :: t2 -> loop (Acc.accum dst (h1, h2)) t1 t2
| _, _ -> raise list_sizes_differ
in loop acc xs ys
| _, _ -> raise list_sizes_differ
$ T combine
combine [ ] [ ] = [ ]
combine [ 1 ] [ 2 ] = [ ( 1 , 2 ) ]
combine [ 1 ; 3 ] [ 2 ; 4 ] = [ ( 1 , 2 ) ; ( 3 , 4 ) ]
combine [] [] = []
combine [1] [2] = [(1, 2)]
combine [1; 3] [2; 4] = [(1, 2); (3, 4)]
*)
let init size f =
if size = 0 then []
else if size < 0 then invalid_arg "BatList.init"
else
let rec loop dst n =
if n < size then
loop (Acc.accum dst (f n)) (n+1)
in
let r = Acc.create (f 0) in
loop r 1;
inj r
let unfold_exc f =
let rec loop dst =
loop (Acc.accum dst (f ()))
in
let acc = Acc.dummy () in
try
loop acc
with exn -> (acc.tl, exn)
$ T unfold_exc
let exc ( ) = raise End_of_file in \
unfold_exc exc = ( [ ] , End_of_file )
let state = ref 0 in \
let ( ) = \
if ! state = 1 then raise End_of_file \
else let _ = incr state in 0 \
in \
unfold_exc just_zero = ( [ 0 ] , End_of_file )
let exc () = raise End_of_file in \
unfold_exc exc = ([], End_of_file)
let state = ref 0 in \
let just_zero () = \
if !state = 1 then raise End_of_file \
else let _ = incr state in 0 \
in \
unfold_exc just_zero = ([0], End_of_file)
*)
let make i x =
if i < 0 then invalid_arg "List.make";
let rec loop x acc = function
| 0 -> acc
| i -> loop x (x::acc) (i-1)
in
loop x [] i
let range i dir j =
let op = match dir with
| `To ->
if i > j
then invalid_arg (Printf.sprintf "List.range %d `To %d" i j)
else pred
| `Downto ->
if i < j
then invalid_arg (Printf.sprintf "List.range %d `Downto %d" i j)
else succ
in
let rec loop acc k =
if i = k then
k :: acc
else
loop (k :: acc) (op k)
in
loop [] j
$ T range
range 1 ` To 3 = [ 1 ; 2 ; 3 ]
range 1 ` To 1 = [ 1 ]
range 3 ` Downto 1 = [ 3 ; 2 ; 1 ]
range 3 ` Downto 3 = [ 3 ]
try ignore(range 1 ` To 0 ) ; true with > true
try ignore(range 1 ` Downto 2 ) ; true with > true
range 1 `To 3 = [1; 2; 3]
range 1 `To 1 = [1]
range 3 `Downto 1 = [3; 2; 1]
range 3 `Downto 3 = [3]
try ignore(range 1 `To 0); true with Invalid_argument _ -> true
try ignore(range 1 `Downto 2); true with Invalid_argument _ -> true
*)
let mapi f = function
| [] -> []
| h :: t ->
let rec loop dst n = function
| [] -> ()
| h :: t ->
loop (Acc.accum dst (f n h)) (n + 1) t
in
let r = Acc.create (f 0 h) in
loop r 1 t;
inj r
let iteri f l =
let rec loop n = function
| [] -> ()
| h :: t ->
f n h;
loop (n+1) t
in
loop 0 l
let fold_lefti f init l =
let rec loop i acc = function
| [] -> acc
| x :: xs -> loop (i + 1) (f acc i x) xs
in
loop 0 init l
$ T fold_lefti
fold_lefti ( fun acc i x - > ( i , x ) : : acc ) [ ] [ ] = [ ]
fold_lefti ( fun acc i x - > ( i , x ) : : acc ) [ ] [ 0 . ] = [ ( 0 , 0 . ) ]
fold_lefti ( fun acc i x - > ( i , x ) : : acc ) [ ] [ 0 . ; 1 . ] = [ ( 1 , 1 . ) ; ( 0 , 0 . ) ]
fold_lefti (fun acc i x -> (i, x) :: acc) [] [] = []
fold_lefti (fun acc i x -> (i, x) :: acc) [] [0.] = [(0, 0.)]
fold_lefti (fun acc i x -> (i, x) :: acc) [] [0.; 1.] = [(1, 1.); (0, 0.)]
*)
let fold_righti f l init =
let xis =
(* reverse the list and index its elements *)
fold_lefti (fun acc i x -> (i, x) :: acc) [] l
in
fold_left
(fun acc (i, x) -> f i x acc)
init
xis
$ T fold_righti
fold_righti ( fun i x acc - > ( i , x ) : : acc ) [ ] [ ] = [ ]
fold_righti ( fun i x acc - > ( i , x ) : : acc ) [ 0 . ] [ ] = [ ( 0 , 0 . ) ]
fold_righti ( fun i x acc - > ( i , x ) : : acc ) [ 0 . ; 1 . ] [ ] = [ ( 0 , 0 . ) ; ( 1 , 1 . ) ]
fold_righti (fun i x acc -> (i, x) :: acc) [] [] = []
fold_righti (fun i x acc -> (i, x) :: acc) [0.] [] = [(0, 0.)]
fold_righti (fun i x acc -> (i, x) :: acc) [0.; 1.] [] = [(0, 0.); (1, 1.)]
*)
let first = hd
let rec last = function
| [] -> invalid_arg "Empty List"
| h :: [] -> h
| _ :: t -> last t
let split_nth index = function
| [] -> if index = 0 then [],[] else invalid_arg at_after_end_msg
| (h :: t as l) ->
if index = 0 then [],l
else if index < 0 then invalid_arg at_negative_index_msg
else
let rec loop n dst l =
if n = 0 then l else
match l with
| [] -> invalid_arg at_after_end_msg
| h :: t ->
loop (n - 1) (Acc.accum dst h) t
in
let r = Acc.create h in
inj r, loop (index-1) r t
let split_at = split_nth
let find_exn f e l =
try
find f l
with
Not_found -> raise e
let remove l x =
let rec loop dst = function
| [] -> ()
| h :: t ->
if x = h then
dst.tl <- t
else
loop (Acc.accum dst h) t
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
let remove_if f lst =
let rec loop dst = function
| [] -> ()
| x :: l ->
if f x then
dst.tl <- l
else
loop (Acc.accum dst x) l
in
let dummy = Acc.dummy () in
loop dummy lst;
dummy.tl
let remove_all l x =
let rec loop dst = function
| [] -> ()
| h :: t ->
if x = h then
loop dst t
else
loop (Acc.accum dst h) t
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
let transpose = function
| [] -> []
| [x] -> List.map (fun x -> [x]) x
| x::xs ->
let heads = List.map Acc.create x in
ignore ( fold_left
(fun acc x ->
map2
(fun x xs -> Acc.accum xs x)
x acc)
heads xs);
equivalent to List.map inj heads , but without creating a new list
$ T transpose
transpose [ [ 1 ; 2 ; 3 ; ] ; [ 4 ; 5 ; 6 ; ] ; [ 7 ; 8 ; 9 ; ] ] = [ [ 1;4;7];[2;5;8];[3;6;9 ] ]
transpose [ ] = [ ]
transpose [ [ 1 ] ] = [ [ 1 ] ]
transpose [ [1; 2; 3;]; [4; 5; 6;]; [7; 8; 9;] ] = [[1;4;7];[2;5;8];[3;6;9]]
transpose [] = []
transpose [ [1] ] = [ [1] ]
*)
let enum l =
let rec make lr count =
BatEnum.make
~next:(fun () ->
match !lr with
| [] -> raise BatEnum.No_more_elements
| h :: t ->
decr count;
lr := t;
h
)
~count:(fun () ->
if !count < 0 then count := length !lr;
!count
)
~clone:(fun () ->
make (ref !lr) (ref !count)
)
in
make (ref l) (ref (-1))
let of_enum e =
let h = Acc.dummy () in
let _ = BatEnum.fold Acc.accum h e in
h.tl
let backwards l = enum (rev l) (*TODO: should we make it more efficient?*)
let backwards l = ( * This version only needs one pass but is actually less lazy
let rec aux acc = function
| [] -> acc
| h::t -> aux BatEnum.append (BatEnum.singleton h) acc
in aux l*)
let of_backwards e =
let rec aux acc = match BatEnum.get e with
| Some h -> aux (h::acc)
| None -> acc
in aux []
let assoc_inv e l =
let rec aux = function
| [] -> raise Not_found
| (a,b)::_ when b = e -> a
| _::t -> aux t
in aux l
let assq_inv e l =
let rec aux = function
| [] -> raise Not_found
| (a,b)::_ when b == e -> a
| _::t -> aux t
in aux l
let modify_opt a f l =
let rec aux p = function
| [] ->
(match f None with
| None -> raise Exit
| Some v -> rev ((a,v)::p))
| (a',b)::t when a' = a ->
(match f (Some b) with
| None -> rev_append p t
| Some b' -> rev_append ((a,b')::p) t)
| p'::t ->
aux (p'::p) t
in
try aux [] l with Exit -> l
$ = modify_opt & ~printer:(IO.to_string ( List.print ( fun fmt ( a , b ) - > Printf.fprintf fmt " % d,%d " a b ) ) )
( * to modify a value
(* to modify a value *) \
(modify_opt 5 (function Some 1 -> Some 2 | _ -> assert false) [ 1,0 ; 5,1 ; 8,2 ]) \
[ 1,0 ; 5,2 ; 8,2 ]
(* to add a value *) \
(modify_opt 5 (function None -> Some 2 | _ -> assert false) [ 1,0 ; 8,2 ]) \
[ 1,0 ; 8,2 ; 5,2 ]
(* to remove a value *) \
(modify_opt 5 (function Some 1 -> None | _ -> assert false) [ 1,0 ; 5,1 ; 8,2 ]) \
[ 1,0 ; 8,2 ]
*)
let modify a f l =
let f' = function
| None -> raise Not_found
| Some b -> Some (f b)
in
modify_opt a f' l
$ = modify & ~printer:(IO.to_string ( List.print ( fun fmt ( a , b ) - > Printf.fprintf fmt " % d,%d " a b ) ) )
( modify 5 succ [ 1,0 ; 5,1 ; 8,2 ] ) [ 1,0 ; 5,2 ; 8,2 ]
(modify 5 succ [ 1,0 ; 5,1 ; 8,2 ]) [ 1,0 ; 5,2 ; 8,2 ]
*)
$ T modify
try ignore ( modify 5 succ [ 1,0 ; 8,2 ] ) ; false with Not_found - > true
try ignore (modify 5 succ [ 1,0 ; 8,2 ]); false with Not_found -> true
*)
let modify_def dfl a f l =
let f' = function
| None -> Some (f dfl)
| Some b -> Some (f b)
in
modify_opt a f' l
$ = modify_def & ~printer:(IO.to_string ( List.print ( fun fmt ( a , b ) - > Printf.fprintf fmt " % d,%d " a b ) ) )
( modify_def 0 5 succ [ 1,0 ; 5,1 ; 8,2 ] ) [ 1,0 ; 5,2 ; 8,2 ]
( modify_def 0 5 succ [ 1,0 ; 8,2 ] ) [ 1,0 ; 8,2 ; 5,1 ]
(modify_def 0 5 succ [ 1,0 ; 5,1 ; 8,2 ]) [ 1,0 ; 5,2 ; 8,2 ]
(modify_def 0 5 succ [ 1,0 ; 8,2 ]) [ 1,0 ; 8,2 ; 5,1 ]
*)
let modify_opt_at n f l =
if n < 0 then invalid_arg at_negative_index_msg;
let rec loop acc n = function
| [] -> invalid_arg at_after_end_msg
| h :: t ->
if n <> 0 then loop (h :: acc) (n - 1) t
else match f h with
| None -> rev_append acc t
| Some v -> rev_append acc (v :: t)
in
loop [] n l
$ T modify_opt_at
modify_opt_at 2 ( fun n - > Some ( n*n ) ) [ 1;2;3;4;5 ] = [ 1;2;9;4;5 ]
modify_opt_at 2 ( fun _ - > None ) [ 1;2;3;4;5 ] = [ 1;2;4;5 ]
try ignore ( modify_opt_at 0 ( fun _ - > None ) [ ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_opt_at 2 ( fun _ - > None ) [ ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_opt_at ( -1 ) ( fun _ - > None ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_opt_at 5 ( fun _ - > None ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_opt_at 3 ( fun _ - > None ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
modify_opt_at 2 (fun n -> Some (n*n)) [1;2;3;4;5] = [1;2;9;4;5]
modify_opt_at 2 (fun _ -> None) [1;2;3;4;5] = [1;2;4;5]
try ignore (modify_opt_at 0 (fun _ -> None) []); false \
with Invalid_argument _ -> true
try ignore (modify_opt_at 2 (fun _ -> None) []); false \
with Invalid_argument _ -> true
try ignore (modify_opt_at (-1) (fun _ -> None) [1;2;3]); false \
with Invalid_argument _ -> true
try ignore (modify_opt_at 5 (fun _ -> None) [1;2;3]); false \
with Invalid_argument _ -> true
try ignore (modify_opt_at 3 (fun _ -> None) [1;2;3]); false \
with Invalid_argument _ -> true
*)
let modify_at n f l =
modify_opt_at n (fun x -> Some (f x)) l
$ T modify_at
modify_at 2 ( ( + ) 1 ) [ 1;2;3;4 ] = [ 1;2;4;4 ]
try ignore ( modify_at 0 ( ( + ) 1 ) [ ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_at 2 ( ( + ) 1 ) [ ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_at ( -1 ) ( ( + ) 1 ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_at 5 ( ( + ) 1 ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_at 3 ( ( + ) 1 ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
modify_at 2 ((+) 1) [1;2;3;4] = [1;2;4;4]
try ignore (modify_at 0 ((+) 1) []); false \
with Invalid_argument _ -> true
try ignore (modify_at 2 ((+) 1) []); false \
with Invalid_argument _ -> true
try ignore (modify_at (-1) ((+) 1) [1;2;3]); false \
with Invalid_argument _ -> true
try ignore (modify_at 5 ((+) 1) [1;2;3]); false \
with Invalid_argument _ -> true
try ignore (modify_at 3 ((+) 1) [1;2;3]); false \
with Invalid_argument _ -> true
*)
let sort_unique cmp lst =
let sorted = List.sort cmp lst in
let fold first rest = List.fold_left
(fun (acc, last) elem ->
if (cmp last elem) = 0 then (acc, elem)
else (elem::acc, elem)
)
([first], first)
rest
in
match sorted with
| [] -> []
| hd::tl ->
begin
let rev_result, _ = fold hd tl in
List.rev rev_result
end
let sort_uniq = List.sort_uniq
let group cmp lst =
let sorted = List.sort cmp lst in
let fold first rest = List.fold_left
(fun (acc, agr, last) elem ->
if (cmp last elem) = 0 then (acc, elem::agr, elem)
else (agr::acc, [elem], elem)
)
([], [first], first)
rest
in
match sorted with
| [] -> []
| hd::tl ->
begin
let groups, lastgr, _ = fold hd tl in
List.rev_map List.rev (lastgr::groups)
end
$ T group
group Pervasives.compare [ ] = [ ]
group Pervasives.compare [ 1 ] = [ [ 1 ] ]
group Pervasives.compare [ 2 ; 2 ] = [ [ 2 ; 2 ] ]
group Pervasives.compare [ 5 ; 4 ; 4 ; 2 ; 1 ; 6 ] = [ [ 1 ] ; [ 2 ] ; [ 4 ; 4 ] ; [ 5 ] ; [ 6 ] ]
group Pervasives.compare [] = []
group Pervasives.compare [1] = [[1]]
group Pervasives.compare [2; 2] = [[2; 2]]
group Pervasives.compare [5; 4; 4; 2; 1; 6] = [[1]; [2]; [4; 4]; [5]; [6]]
*)
let cartesian_product l1 l2 =
List.concat (List.map (fun i -> List.map (fun j -> (i,j)) l2) l1)
(*$T cartesian_product as cp
cp [1;2;3] ['x';'y'] = [1,'x';1,'y';2,'x';2,'y';3,'x';3,'y']
*)
let rec n_cartesian_product = function
| [] -> [[]]
| h :: t ->
let rest = n_cartesian_product t in
List.concat (List.map (fun i -> List.map (fun r -> i :: r) rest) h)
$ T n_cartesian_product as ncp
ncp [ ] = [ [ ] ]
ncp [ [ ] ] = [ ]
ncp [ [ 1 ] ; [ 2 ] ; [ 3 ] ] = [ [ 1;2;3 ] ]
ncp [ [ 1;2;3 ] ] = [ [ 1 ] ; [ 2 ] ; [ 3 ] ]
ncp [ [ 1;2;3 ] ; [ ] ] = [ ]
ncp [ [ 1;2;3 ] ; [ 4;5 ] ] = [ [ 1;4 ] ; [ 1;5 ] ; [ 2;4 ] ; [ 2;5 ] ; [ 3;4 ] ; [ 3;5 ] ]
ncp [] = [[]]
ncp [[]] = []
ncp [[1]; [2]; [3]] = [[1;2;3]]
ncp [[1;2;3]] = [[1]; [2]; [3]]
ncp [[1;2;3]; []] = []
ncp [[1;2;3]; [4;5]] = [[1;4]; [1;5]; [2;4]; [2;5]; [3;4]; [3;5]]
*)
let print ?(first="[") ?(last="]") ?(sep="; ") print_a out = function
| [] ->
BatInnerIO.nwrite out first;
BatInnerIO.nwrite out last
| [h] ->
BatInnerIO.nwrite out first;
print_a out h;
BatInnerIO.nwrite out last
| h::t ->
BatInnerIO.nwrite out first;
print_a out h;
iter (fun x -> BatInnerIO.nwrite out sep; print_a out x) t;
BatInnerIO.nwrite out last
let t_printer a_printer _paren out x = print (a_printer false) out x
let reduce f = function [] -> invalid_arg "Empty List"
| h::t -> fold_left f h t
let min l = reduce Pervasives.min l
let max l = reduce Pervasives.max l
let sum l = reduce (+) l
let fsum l = reduce (+.) l
let kahan_sum li =
This algorithm is written in a particularly untasteful imperative
style to benefit from the nice unboxing of float references that
is harder to obtain with recursive functions today . See the
definition of sum on arrays , on which this one is directly
modeled .
style to benefit from the nice unboxing of float references that
is harder to obtain with recursive functions today. See the
definition of kahan sum on arrays, on which this one is directly
modeled. *)
let li = ref li in
let continue = ref (!li <> []) in
let sum = ref 0. in
let err = ref 0. in
while !continue do
match !li with
| [] -> continue := false
| x::xs ->
li := xs;
let x = x -. !err in
let new_sum = !sum +. x in
err := (new_sum -. !sum) -. x;
sum := new_sum +. 0.;
done;
!sum +. 0.
$ T kahan_sum
kahan_sum [ ] = 0 .
kahan_sum [ 1 . ; 2 . ] = 3 .
let n , x = 1_000 , 1.1 in \
Float.approx_equal ( float n * . x ) \
( kahan_sum ( List.make n x ) )
kahan_sum [ ] = 0.
kahan_sum [ 1.; 2. ] = 3.
let n, x = 1_000, 1.1 in \
Float.approx_equal (float n *. x) \
(kahan_sum (List.make n x))
*)
let min_max ?cmp:(cmp = Pervasives.compare) = function
| [] -> invalid_arg "List.min_max: Empty List"
| x :: xs ->
fold_left
(fun (curr_min, curr_max) y ->
let new_min =
if cmp curr_min y = 1
then y
else curr_min
in
let new_max =
if cmp curr_max y = -1
then y
else curr_max
in
(new_min, new_max)
)
(x, x)
xs
$ T min_max
min_max [ 1 ] = ( 1 , 1 )
min_max [ 1 ; 1 ] = ( 1 , 1 )
min_max [ 1 ; -2 ; 3 ; 4 ; 5 ; 60 ; 7 ; 8 ] = ( -2 , 60 )
min_max [1] = (1, 1)
min_max [1; 1] = (1, 1)
min_max [1; -2; 3; 4; 5; 60; 7; 8] = (-2, 60)
*)
let unfold b f =
let acc = Acc.dummy () in
let rec loop dst v =
match f v with
| None -> acc.tl
| Some (a, v) -> loop (Acc.accum dst a) v
in loop acc b
$ T unfold
unfold 1 ( fun x - > None ) = [ ]
unfold 0 ( fun x - > if x > 3 then None else Some ( x , succ x ) ) = [ 0;1;2;3 ]
unfold 1 (fun x -> None) = []
unfold 0 (fun x -> if x > 3 then None else Some (x, succ x)) = [0;1;2;3]
*)
let subset cmp l l' = for_all (fun x -> mem_cmp cmp x l') l
$ T subset
subset Pervasives.compare [ 1;2;3;4 ] [ 1;2;3 ] = false
subset Pervasives.compare [ 1;2;3 ] [ 1;2;3 ] = true
subset Pervasives.compare [ 3;2;1 ] [ 1;2;3 ] = true
subset Pervasives.compare [ 1;2 ] [ 1;2;3 ] = true
subset Pervasives.compare [1;2;3;4] [1;2;3] = false
subset Pervasives.compare [1;2;3] [1;2;3] = true
subset Pervasives.compare [3;2;1] [1;2;3] = true
subset Pervasives.compare [1;2] [1;2;3] = true
*)
module Exceptionless = struct
let rfind p l =
try Some (rfind p l)
with Not_found -> None
let find p l =
try Some (find p l)
with Not_found -> None
let findi p l =
try Some (findi p l)
with Not_found -> None
let split_at n l =
try `Ok (split_at n l)
with Invalid_argument s -> `Invalid_argument s
let at n l =
try `Ok (at n l)
with Invalid_argument s -> `Invalid_argument s
let assoc e l =
try Some (assoc e l)
with Not_found -> None
let assq e l =
try Some (assq e l)
with Not_found -> None
let assoc_inv e l =
try Some (assoc_inv e l)
with Not_found -> None
let find_map f l =
try Some(find_map f l)
with Not_found -> None
let hd l =
try Some (hd l)
with Failure _ -> None
let tl l =
try Some (tl l)
with Failure _ -> None
let rec last = function
| [] -> None
| [x] -> Some x
| _ :: l -> last l
end
module Labels = struct
type 'a t = 'a list
let init i ~f = init i f
let make n x = make n x
let iteri ~f l = iteri f l
let map ~f l = map f l
let mapi ~f l = mapi f l
let rfind ~f l = rfind f l
let find ~f l = find f l
let findi ~f = findi f
let find_exn ~f = find_exn f
let filter_map ~f = filter_map f
let remove_if ~f = remove_if f
let take_while ~f = take_while f
let drop_while ~f = drop_while f
let map2 ~f = map2 f
let iter2 ~f = iter2 f
let exists2 ~f = exists2 f
let fold_left ~f ~init = fold_left f init
let fold_right ~f l ~init = fold_right f l init
let fold_left2 ~f ~init = fold_left2 f init
let fold_right2 ~f l1 l2 ~init = fold_right2 f l1 l2 init
let filter ~f = filter f
let find_all ~f = find_all f
let partition ~f = partition f
let rev_map ~f = rev_map f
let rev_map2 ~f = rev_map2 f
let iter ~f = iter f
let for_all ~f = for_all f
let for_all2 ~f = for_all2 f
let exists ~f = exists f
let subset ~cmp = subset cmp
let stable_sort ?(cmp=compare) = stable_sort cmp
let fast_sort ?(cmp=compare) = fast_sort cmp
let sort ?(cmp=compare) = sort cmp
let merge ?(cmp=compare) = merge cmp
module LExceptionless = struct
include Exceptionless
let rfind ~f l = rfind f l
let find ~f l = find f l
let findi ~f l = findi f l
end
end
let ( @ ) = List.append
module Infix = struct
let ( @ ) = ( @ )
end
open BatOrd
let rec eq eq_elt l1 l2 =
match l1 with
| [] -> (match l2 with [] -> true | _ -> false)
| hd1::tl1 ->
(match l2 with
| [] -> false
| hd2::tl2 -> bin_eq eq_elt hd1 hd2 (eq eq_elt) tl1 tl2)
let rec ord ord_elt l1 l2 =
match l1 with
| [] -> (match l2 with [] -> Eq | _::_ -> Lt)
| hd1::tl1 ->
(match l2 with
| [] -> Gt
| hd2::tl2 -> bin_ord ord_elt hd1 hd2 (ord ord_elt) tl1 tl2)
let rec compare comp_elt l1 l2 =
match l1 with
| [] -> (match l2 with [] -> 0 | _::_ -> -1)
| hd1::tl1 ->
(match l2 with
| [] -> 1
| hd2::tl2 -> bin_comp comp_elt hd1 hd2 (compare comp_elt) tl1 tl2)
module Eq (T : Eq) = struct
type t = T.t list
let eq = eq T.eq
end
module Ord (T : Ord) = struct
type t = T.t list
let ord = ord T.ord
end
module Comp (T : Comp) = struct
type t = T.t list
let compare = compare T.compare
end | null | https://raw.githubusercontent.com/cuplv/raz.ocaml/b9b42d87d95cb1db34b8b35b3e30b4d4da41dea6/batList.ml | ocaml | $T is_empty
is_empty []
not (is_empty [1])
normalize: the return type of nsplit \
is quotiented by the equivalence []~[[]]
FIXME BAD TESTS: RESULT IS SPECIFIC TO IMPLEMENTATION
use a stateful filter to remove duplicate elements
put h in hash table
and to output list
if already in hashtable then don't add to output list
reverse the list and index its elements
TODO: should we make it more efficient?
to modify a value
to add a value
to remove a value
$T cartesian_product as cp
cp [1;2;3] ['x';'y'] = [1,'x';1,'y';2,'x';2,'y';3,'x';3,'y']
|
* BatList - additional and modified functions for lists .
* Copyright ( C ) 2003
* Copyright ( C ) 2003
* Copyright ( C ) 2008 Red Hat Inc.
* Copyright ( C ) 2008 , LIFO , Universite d'Orleans
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* BatList - additional and modified functions for lists.
* Copyright (C) 2003 Brian Hurt
* Copyright (C) 2003 Nicolas Cannasse
* Copyright (C) 2008 Red Hat Inc.
* Copyright (C) 2008 David Rajchenbach-Teller, LIFO, Universite d'Orleans
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
: : VH : : GLUE with StdLib
let merge = List.merge
let fast_sort = List.fast_sort
let stable_sort = List.stable_sort
let sort = List.sort
let assq = List.assq
let assoc = List.assoc
let find = List.find
let exists = List.exists
let for_all = List.for_all
let fold_left = List.fold_left
let rev_map = List.rev_map
let iter = List.iter
let rev_append = List.rev_append
let rev = List.rev
let length = List.length
let tl = List.tl
let hd = List.hd
let mem = List.mem
let memq = List.memq
let mem_assq = List.mem_assq
let mem_assoc = List.mem_assoc
let rev_map2 = List.rev_map2
: : VH : : END GLUE
Thanks to for suggesting the following structure
type 'a mut_list = {
hd: 'a;
mutable tl: 'a list
}
type 'a t = 'a list
type 'a enumerable = 'a t
type 'a mappable = 'a t
external inj : 'a mut_list -> 'a list = "%identity"
module Acc = struct
let dummy () =
{ hd = Obj.magic (); tl = [] }
let create x =
{ hd = x; tl = [] }
let accum acc x =
let cell = create x in
acc.tl <- inj cell;
cell
end
let cons h t = h::t
let is_empty = function
| [] -> true
| _ -> false
let at_negative_index_msg = "Negative index not allowed"
let at_after_end_msg = "Index past end of list"
let nth l index =
if index < 0 then invalid_arg at_negative_index_msg;
let rec loop n = function
| [] -> invalid_arg at_after_end_msg;
| h :: t ->
if n = 0 then h else loop (n - 1) t
in
loop index l
let at = nth
$ T at
try ignore ( at [ ] 0 ) ; false with Invalid_argument _ - > true
try ignore ( at [ 1;2;3 ] ( -1 ) ) ; false with Invalid_argument _ - > true
at [ 1;2;3 ] 2 = 3
try ignore (at [] 0); false with Invalid_argument _ -> true
try ignore (at [1;2;3] (-1)); false with Invalid_argument _ -> true
at [1;2;3] 2 = 3
*)
let mem_cmp cmp x l =
exists (fun y -> cmp x y = 0) l
$ T mem_cmp
mem_cmp Pervasives.compare 0 [ ] = false
mem_cmp Pervasives.compare 0 [ 1 ; 2 ] = false
mem_cmp Pervasives.compare 1 [ 1 ; 2 ] = true
mem_cmp Pervasives.compare 2 [ 1 ; 2 ] = true
mem_cmp Pervasives.compare 0 [] = false
mem_cmp Pervasives.compare 0 [1; 2] = false
mem_cmp Pervasives.compare 1 [1; 2] = true
mem_cmp Pervasives.compare 2 [1; 2] = true
*)
let append l1 l2 =
match l1 with
| [] -> l2
| h :: t ->
let rec loop dst = function
| [] ->
dst.tl <- l2
| h :: t ->
loop (Acc.accum dst h) t
in
let r = Acc.create h in
loop r t;
inj r
$ T append
append [ ] [ ] = [ ]
append [ ] [ 1 ] = [ 1 ]
append [ 1 ] [ ] = [ 1 ]
append [ 1 ] [ 2 ] = [ 1 ; 2 ]
append [ 1 ; 2 ] [ 3 ] = [ 1 ; 2 ; 3 ]
append [ 1 ] [ 2 ; 3 ] = [ 1 ; 2 ; 3 ]
append [] [] = []
append [] [1] = [1]
append [1] [] = [1]
append [1] [2] = [1; 2]
append [1; 2] [3] = [1; 2; 3]
append [1] [2; 3] = [1; 2; 3]
*)
let flatten l =
let rec inner dst = function
| [] -> dst
| h :: t ->
inner (Acc.accum dst h) t
in
let rec outer dst = function
| [] -> ()
| h :: t -> outer (inner dst h) t
in
let r = Acc.dummy () in
outer r l;
r.tl
let concat = flatten
$ T flatten
flatten [ [ 1;2];[3];[];[4;5;6 ] ] = [ 1;2;3;4;5;6 ]
flatten [ [ ] ] = [ ]
flatten [[1;2];[3];[];[4;5;6]] = [1;2;3;4;5;6]
flatten [[]] = []
*)
let singleton x = [x]
$ Q singleton
Q.int ( fun x - > let s = singleton x in hd s = x & & length s = 1 )
Q.int (fun x -> let s = singleton x in hd s = x && length s = 1)
*)
let map f = function
| [] -> []
| h :: t ->
let rec loop dst = function
| [] -> ()
| h :: t ->
loop (Acc.accum dst (f h)) t
in
let r = Acc.create (f h) in
loop r t;
inj r
$ Q map
( Q.pair ( Q.fun1 Q.int Q.int ) ( Q.list Q.small_int ) ) \
( fun ( f , l ) - > map f l = List.map f l )
(Q.pair (Q.fun1 Q.int Q.int) (Q.list Q.small_int)) \
(fun (f,l) -> map f l = List.map f l)
*)
let rec drop n = function
| _ :: l when n > 0 -> drop (n-1) l
| l -> l
$ = drop & ~printer:(IO.to_string ( List.print Int.print ) )
( drop 0 [ 1;2;3 ] ) [ 1;2;3 ]
( drop 3 [ 1;2;3 ] ) [ ]
( drop 4 [ 1;2;3 ] ) [ ]
( drop 1 [ 1;2;3 ] ) [ 2;3 ]
(drop 0 [1;2;3]) [1;2;3]
(drop 3 [1;2;3]) []
(drop 4 [1;2;3]) []
(drop 1 [1;2;3]) [2;3]
*)
let take n l =
let rec loop n dst = function
| h :: t when n > 0 ->
loop (n - 1) (Acc.accum dst h) t
| _ ->
()
in
let dummy = Acc.dummy () in
loop n dummy l;
dummy.tl
$ = take & ~printer:(IO.to_string ( List.print Int.print ) )
( take 0 [ 1;2;3 ] ) [ ]
( take 3 [ 1;2;3 ] ) [ 1;2;3 ]
( take 4 [ 1;2;3 ] ) [ 1;2;3 ]
( take 1 [ 1;2;3 ] ) [ 1 ]
(take 0 [1;2;3]) []
(take 3 [1;2;3]) [1;2;3]
(take 4 [1;2;3]) [1;2;3]
(take 1 [1;2;3]) [1]
*)
let takedrop n l =
let rec loop n dst = function
| h :: t when n > 0 -> loop (n - 1) (Acc.accum dst h) t
| rest -> rest
in
let dummy = Acc.dummy () in
let rest = loop n dummy l in
(dummy.tl, rest)
$ T takedrop
takedrop 0 [ 1 ; 2 ; 3 ] = ( [ ] , [ 1 ; 2 ; 3 ] )
takedrop 3 [ 1 ; 2 ; 3 ] = ( [ 1 ; 2 ; 3 ] , [ ] )
takedrop 4 [ 1 ; 2 ; 3 ] = ( [ 1 ; 2 ; 3 ] , [ ] )
takedrop 1 [ 1 ; 2 ; 3 ] = ( [ 1 ] , [ 2 ; 3 ] )
takedrop 0 [1; 2; 3] = ([], [1; 2; 3])
takedrop 3 [1; 2; 3] = ([1; 2; 3], [])
takedrop 4 [1; 2; 3] = ([1; 2; 3], [])
takedrop 1 [1; 2; 3] = ([1], [2; 3])
*)
let ntake n l =
if n < 1 then invalid_arg "BatList.ntake";
let took, left = takedrop n l in
let acc = Acc.create took in
let rec loop dst = function
| [] -> inj acc
| li -> let taken, rest = takedrop n li in
loop (Acc.accum dst taken) rest
in
loop acc left
$ T 2 [ ] = [ [ ] ]
ntake 2 [ 1 ] = [ [ 1 ] ]
ntake 2 [ 1 ; 2 ] = [ [ 1 ; 2 ] ]
ntake 2 [ 1 ; 2 ; 3 ] = [ [ 1 ; 2 ] ; [ 3 ] ]
ntake 2 [ 1 ; 2 ; 3 ; 4 ] = [ [ 1 ; 2 ] ; [ 3 ; 4 ] ]
ntake 2 [] = [[]]
ntake 2 [1] = [[1]]
ntake 2 [1; 2] = [[1; 2]]
ntake 2 [1; 2; 3] = [[1; 2]; [3]]
ntake 2 [1; 2; 3; 4] = [[1; 2]; [3; 4]]
*)
let take_while p li =
let rec loop dst = function
| [] -> ()
| x :: xs ->
if p x then
loop (Acc.accum dst x) xs in
let dummy = Acc.dummy () in
loop dummy li;
dummy.tl
$ = take_while & ~printer:(IO.to_string ( List.print Int.print ) )
( take_while ( (= ) 3 ) [ 3;3;4;3;3 ] ) [ 3;3 ]
( take_while ( (= ) 3 ) [ 3 ] ) [ 3 ]
( take_while ( (= ) 3 ) [ 4 ] ) [ ]
( take_while ( (= ) 3 ) [ ] ) [ ]
( take_while ( (= ) 2 ) [ 2 ; 2 ] ) [ 2 ; 2 ]
(take_while ((=) 3) [3;3;4;3;3]) [3;3]
(take_while ((=) 3) [3]) [3]
(take_while ((=) 3) [4]) []
(take_while ((=) 3) []) []
(take_while ((=) 2) [2; 2]) [2; 2]
*)
let rec drop_while f = function
| [] -> []
| x :: xs when f x -> drop_while f xs
| xs -> xs
$ = drop_while & ~printer:(IO.to_string ( List.print Int.print ) )
( drop_while ( (= ) 3 ) [ 3;3;4;3;3 ] ) [ 4;3;3 ]
( drop_while ( (= ) 3 ) [ 3 ] ) [ ]
(drop_while ((=) 3) [3;3;4;3;3]) [4;3;3]
(drop_while ((=) 3) [3]) []
*)
let span p li =
let rec loop dst = function
| [] -> []
| x :: xs as l ->
if p x then
loop (Acc.accum dst x) xs
else l
in
let dummy = Acc.dummy () in
let xs = loop dummy li in
(dummy.tl , xs)
$ = span
( span ( (= ) 3 ) [ 3;3;4;3;3 ] ) ( [ 3;3],[4;3;3 ] )
( span ( (= ) 3 ) [ 3 ] ) ( [ 3 ] , [ ] )
( span ( (= ) 3 ) [ 4 ] ) ( [ ] , [ 4 ] )
( span ( (= ) 3 ) [ ] ) ( [ ] , [ ] )
( span ( (= ) 2 ) [ 2 ; 2 ] ) ( [ 2 ; 2 ] , [ ] )
(span ((=) 3) [3;3;4;3;3]) ([3;3],[4;3;3])
(span ((=) 3) [3]) ([3],[])
(span ((=) 3) [4]) ([],[4])
(span ((=) 3) []) ([],[])
(span ((=) 2) [2; 2]) ([2; 2],[])
*)
let nsplit p = function
| [] -> []
note that returning [ ] on empty inputs is an arbitrary choice
that is made for consistence with the behavior of
BatString.nsplit . Not having this hardcoded case would have
` nsplit p [ ] ` return ` [ [ ] ] ` , which is also a semantically valid
return value ( in fact the two are equivalent , but ` [ [ ] ] ` would be
a more natural choice as it allows to enforce the simply
invariant that ` ` return values are always non - empty ) .
If that was to redo from scratch , ` [ [ ] ] ` would be a better return
value for both ` BatList.nsplit ` and ` BatString.nsplit ` .
that is made for consistence with the behavior of
BatString.nsplit. Not having this hardcoded case would have
`nsplit p []` return `[[]]`, which is also a semantically valid
return value (in fact the two are equivalent, but `[[]]` would be
a more natural choice as it allows to enforce the simply
invariant that `nsplit` return values are always non-empty).
If that was to redo from scratch, `[[]]` would be a better return
value for both `BatList.nsplit` and `BatString.nsplit`.
*)
| li ->
let not_p x = not (p x) in
let rec loop dst l =
let ok, rest = span not_p l in
let r = Acc.accum dst ok in
match rest with
| [] -> ()
| x :: xs -> loop r xs
in
let dummy = Acc.dummy () in
loop dummy li;
dummy.tl
$ T ( (= ) 0 ) [ ] = [ ]
nsplit ( (= ) 0 ) [ 0 ] = [ [ ] ; [ ] ]
nsplit ( (= ) 0 ) [ 1 ; 0 ] = [ [ 1 ] ; [ ] ]
nsplit ( (= ) 0 ) [ 0 ; 1 ] = [ [ ] ; [ 1 ] ]
nsplit ( (= ) 0 ) [ 1 ; 2 ; 0 ; 0 ; 3 ; 4 ; 0 ; 5 ] = [ [ 1 ; 2 ] ; [ ] ; [ 3 ; 4 ] ; [ 5 ] ]
nsplit ((=) 0) [] = []
nsplit ((=) 0) [0] = [[]; []]
nsplit ((=) 0) [1; 0] = [[1]; []]
nsplit ((=) 0) [0; 1] = [[]; [1]]
nsplit ((=) 0) [1; 2; 0; 0; 3; 4; 0; 5] = [[1; 2]; []; [3; 4]; [5]]
*)
$ Q nsplit & ~count:10
( Q.list ( Q.list Q.pos_int ) ) ( fun xss - > \
let join sep xss = flatten ( interleave [ sep ] xss ) in \
( * normalize : the return type of \
is quotiented by the equivalence [ ] ~ [ [ ] ]
(Q.list (Q.list Q.pos_int)) (fun xss -> \
let join sep xss = flatten (interleave [sep] xss) in \
let normalize = function [] -> [[]] | li -> li in \
let neg = -1 in \
normalize xss = normalize (nsplit ((=) neg) (join neg xss)) \
)
(Q.pair Q.small_int (Q.list Q.small_int)) (fun (sep,xs) -> \
let join sep xss = flatten (interleave [sep] xss) in \
xs = join sep (nsplit ((=) sep) xs) \
)
*)
nsplit ( (= ) sep ) la @ ( (= ) sep ) lb = ( (= ) sep ) ( la @ [ sep ] @ lb )
let group_consecutive p l =
let rec loop dst = function
| [] -> ()
| x :: rest ->
let xs, rest = span (p x) rest in
loop (Acc.accum dst (x :: xs)) rest
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
$ = group_consecutive & ~printer:(IO.to_string ( List.print ( List.print Int.print ) ) )
( group_consecutive ( =) [ 3 ; 3 ; 4 ; 3 ; 3 ] ) [ [ 3 ; 3 ] ; [ 4 ] ; [ 3 ; 3 ] ]
( group_consecutive ( =) [ 3 ] ) [ [ 3 ] ]
( group_consecutive ( =) [ ] ) [ ]
( group_consecutive ( =) [ 2 ; 2 ] ) [ [ 2 ; 2 ] ]
(group_consecutive (=) [3; 3; 4; 3; 3]) [[3; 3]; [4]; [3; 3]]
(group_consecutive (=) [3]) [[3]]
(group_consecutive (=) []) []
(group_consecutive (=) [2; 2]) [[2; 2]]
*)
let takewhile = take_while
let dropwhile = drop_while
let interleave ?first ?last (sep:'a) (l:'a list) =
let may_prepend maybe_x lst = match maybe_x with
| None -> lst
| Some x -> x :: lst
in
let rec loop acc = function
| [] -> acc
| x :: xs ->
match acc with
| [] -> loop [x] xs
| _ -> loop (x :: sep :: acc) xs
in
let res = loop [] l in
may_prepend first (rev (may_prepend last res))
$ = interleave & ~printer:(IO.to_string ( List.print Int.print ) )
( interleave 0 [ 1;2;3 ] ) [ 1;0;2;0;3 ]
( interleave 0 [ 1 ] ) [ 1 ]
( interleave 0 [ ] ) [ ]
( interleave ~first:(-1 ) 0 [ 1;2;3 ] ) [ -1;1;0;2;0;3 ]
( interleave ~first:(-1 ) 0 [ 1 ] ) [ -1;1 ]
( interleave ~first:(-1 ) 0 [ ] ) [ -1 ]
( interleave ~last:(-2 ) 0 [ 1;2;3 ] ) [ 1;0;2;0;3;-2 ]
( interleave ~last:(-2 ) 0 [ 1 ] ) [ 1;-2 ]
( interleave ~last:(-2 ) 0 [ ] ) [ -2 ]
( interleave ~first:(-1 ) ~last:(-2 ) 0 [ 1;2;3 ] ) [ -1;1;0;2;0;3;-2 ]
( interleave ~first:(-1 ) ~last:(-2 ) 0 [ 1 ] ) [ -1;1;-2 ]
( interleave ~first:(-1 ) ~last:(-2 ) 0 [ ] ) [ -1;-2 ]
(interleave 0 [1;2;3]) [1;0;2;0;3]
(interleave 0 [1]) [1]
(interleave 0 []) []
(interleave ~first:(-1) 0 [1;2;3]) [-1;1;0;2;0;3]
(interleave ~first:(-1) 0 [1]) [-1;1]
(interleave ~first:(-1) 0 []) [-1]
(interleave ~last:(-2) 0 [1;2;3]) [1;0;2;0;3;-2]
(interleave ~last:(-2) 0 [1]) [1;-2]
(interleave ~last:(-2) 0 []) [-2]
(interleave ~first:(-1) ~last:(-2) 0 [1;2;3]) [-1;1;0;2;0;3;-2]
(interleave ~first:(-1) ~last:(-2) 0 [1]) [-1;1;-2]
(interleave ~first:(-1) ~last:(-2) 0 []) [-1;-2]
*)
let unique ?(eq = ( = )) l =
let rec loop dst = function
| [] -> ()
| h :: t ->
match exists (eq h) t with
| true -> loop dst t
| false ->
loop (Acc.accum dst h) t
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
$ = unique & ~printer:(IO.to_string ( List.print Int.print ) )
[ 1;2;3;4;5;6 ] ( unique [ 1;1;2;2;3;3;4;5;6;4;5;6 ] )
[ 1 ] ( unique [ 1;1;1;1;1;1;1;1;1;1 ] )
[ 1;2 ] ( unique ~eq:(fun x y - > x land 1 = y land 1 ) [ 2;2;2;4;6;8;3;1;2 ] )
[1;2;3;4;5;6] (unique [1;1;2;2;3;3;4;5;6;4;5;6])
[1] (unique [1;1;1;1;1;1;1;1;1;1])
[1;2] (unique ~eq:(fun x y -> x land 1 = y land 1) [2;2;2;4;6;8;3;1;2])
*)
let unique_cmp ?(cmp = Pervasives.compare) l =
let set = ref (BatMap.PMap.create cmp) in
let should_keep x =
if BatMap.PMap.mem x !set then false
else ( set := BatMap.PMap.add x true !set; true )
in
List.filter should_keep l
$ = unique_cmp & ~printer:(IO.to_string ( List.print Int.print ) )
[ 1;2;3;4;5;6 ] ( unique_cmp [ 1;1;2;2;3;3;4;5;6;4;5;6 ] )
[ 1 ] ( unique_cmp [ 1;1;1;1;1;1;1;1;1;1 ] )
[ 2;3 ] ( unique_cmp ~cmp:(fun x y - > Int.compare ( x land 1 ) ( y land 1 ) ) [ 2;2;2;4;6;8;3;1;2 ] )
[1;2;3;4;5;6] (unique_cmp [1;1;2;2;3;3;4;5;6;4;5;6])
[1] (unique_cmp [1;1;1;1;1;1;1;1;1;1])
[2;3] (unique_cmp ~cmp:(fun x y -> Int.compare (x land 1) (y land 1)) [2;2;2;4;6;8;3;1;2])
*)
let unique_hash (type et) ?(hash = Hashtbl.hash) ?(eq = (=)) (l : et list) =
let module HT = Hashtbl.Make(struct type t = et let equal = eq let hash = hash end) in
let ht = HT.create (List.length l) in
let rec loop dst = function
| h::t when not (HT.mem ht h) ->
loop
t
loop dst t
| [] -> ()
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
$ = unique_hash & ~printer:(IO.to_string ( List.print Int.print ) )
[ 1;2;3;4;5;6 ] ( unique_hash [ 1;1;2;2;3;3;4;5;6;4;5;6 ] )
[ 1 ] ( unique_hash [ 1;1;1;1;1;1;1;1;1;1 ] )
[ 2;3 ] ( unique_hash ~hash:(fun x - > Hashtbl.hash ( x land 1 ) ) ~eq:(fun x y - > x land 1 = y land 1 ) [ 2;2;2;4;6;8;3;1;2 ] )
[1;2;3;4;5;6] (unique_hash [1;1;2;2;3;3;4;5;6;4;5;6])
[1] (unique_hash [1;1;1;1;1;1;1;1;1;1])
[2;3] (unique_hash ~hash:(fun x -> Hashtbl.hash (x land 1)) ~eq:(fun x y -> x land 1 = y land 1) [2;2;2;4;6;8;3;1;2])
*)
let filter_map f l =
let rec loop dst = function
| [] -> ()
| h :: t ->
match f h with
| None -> loop dst t
| Some x ->
loop (Acc.accum dst x) t
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
let filteri_map f l =
let rec loop i dst = function
| [] -> ()
| h :: t ->
match f i h with
| None -> loop (succ i) dst t
| Some x ->
loop (succ i) (Acc.accum dst x) t
in
let dummy = Acc.dummy () in
loop 0 dummy l;
dummy.tl
$ T filteri_map
( let r = ref ( -1 ) in filteri_map ( fun i _ - > incr r ; if i = ! r then Some i else None ) [ 5 ; 4 ; 8 ] = [ 0 ; 1 ; 2 ] )
filteri_map ( fun _ x - > if x > 4 then Some ( x , string_of_int x ) else None ) [ 5 ; 4 ; 8 ] = [ ( 5 , " 5 " ) ; ( 8 , " 8 " ) ]
filteri_map ( fun _ _ - > Some ( ) ) [ ] = [ ]
filteri_map ( fun _ _ - > None ) [ 1 ; 2 ] = [ ]
(let r = ref (-1) in filteri_map (fun i _ -> incr r; if i = !r then Some i else None) [5; 4; 8] = [0; 1; 2])
filteri_map (fun _ x -> if x > 4 then Some (x, string_of_int x) else None) [5; 4; 8] = [(5, "5"); (8, "8")]
filteri_map (fun _ _ -> Some ()) [] = []
filteri_map (fun _ _ -> None) [1; 2] = []
*)
let rec find_map f = function
| [] -> raise Not_found
| x :: xs ->
match f x with
| Some y -> y
| None -> find_map f xs
let fold_right_max = 1000
let fold_right f l init =
let rec tail_loop acc = function
| [] -> acc
| h :: t -> tail_loop (f h acc) t
in
let rec loop n = function
| [] -> init
| h :: t ->
if n < fold_right_max then
f h (loop (n+1) t)
else
f h (tail_loop init (rev t))
in
loop 0 l
let map2 f l1 l2 =
let rec loop dst src1 src2 =
match src1, src2 with
| [], [] -> ()
| h1 :: t1, h2 :: t2 ->
loop (Acc.accum dst (f h1 h2)) t1 t2
| _ -> invalid_arg "map2: Different_list_size"
in
let dummy = Acc.dummy () in
loop dummy l1 l2;
dummy.tl
let rec iter2 f l1 l2 =
match l1, l2 with
| [], [] -> ()
| h1 :: t1, h2 :: t2 -> f h1 h2; iter2 f t1 t2
| _ -> invalid_arg "iter2: Different_list_size"
let rec fold_left2 f accum l1 l2 =
match l1, l2 with
| [], [] -> accum
| h1 :: t1, h2 :: t2 -> fold_left2 f (f accum h1 h2) t1 t2
| _ -> invalid_arg "fold_left2: Different_list_size"
let fold_right2 f l1 l2 init =
let rec tail_loop acc l1 l2 =
match l1, l2 with
| [] , [] -> acc
| h1 :: t1 , h2 :: t2 -> tail_loop (f h1 h2 acc) t1 t2
| _ -> invalid_arg "fold_left2: Different_list_size"
in
let rec loop n l1 l2 =
match l1, l2 with
| [], [] -> init
| h1 :: t1, h2 :: t2 ->
if n < fold_right_max then
f h1 h2 (loop (n+1) t1 t2)
else
f h1 h2 (tail_loop init (rev t1) (rev t2))
| _ -> invalid_arg "fold_right2: Different_list_size"
in
loop 0 l1 l2
let for_all2 p l1 l2 =
let rec loop l1 l2 =
match l1, l2 with
| [], [] -> true
| h1 :: t1, h2 :: t2 -> if p h1 h2 then loop t1 t2 else false
| _ -> invalid_arg "for_all2: Different_list_size"
in
loop l1 l2
let exists2 p l1 l2 =
let rec loop l1 l2 =
match l1, l2 with
| [], [] -> false
| h1 :: t1, h2 :: t2 -> if p h1 h2 then true else loop t1 t2
| _ -> invalid_arg "exists2: Different_list_size"
in
loop l1 l2
let remove_assoc x lst =
let rec loop dst = function
| [] -> ()
| (a, _ as pair) :: t ->
if a = x then
dst.tl <- t
else
loop (Acc.accum dst pair) t
in
let dummy = Acc.dummy () in
loop dummy lst;
dummy.tl
let remove_assq x lst =
let rec loop dst = function
| [] -> ()
| (a, _ as pair) :: t ->
if a == x then
dst.tl <- t
else
loop (Acc.accum dst pair) t
in
let dummy = Acc.dummy () in
loop dummy lst;
dummy.tl
let remove_at i lst =
let rec loop dst i = function
| [] -> invalid_arg "BatList.remove_at"
| x :: xs ->
if i = 0 then
dst.tl <- xs
else
loop (Acc.accum dst x) (i - 1) xs
in
if i < 0 then
invalid_arg "BatList.remove_at"
else
let dummy = Acc.dummy () in
loop dummy i lst;
dummy.tl
$ T remove_at
try ignore ( remove_at 0 [ ] ) ; false with Invalid_argument _ - > true
try ignore ( remove_at 1 [ 0 ] ) ; false with Invalid_argument _ - > true
remove_at 0 [ 0 ] = [ ]
remove_at 0 [ 0 ; 1 ; 2 ] = [ 1 ; 2 ]
remove_at 1 [ 0 ; 1 ; 2 ] = [ 0 ; 2 ]
remove_at 2 [ 0 ; 1 ; 2 ] = [ 0 ; 1 ]
try ignore (remove_at 0 []) ; false with Invalid_argument _ -> true
try ignore (remove_at 1 [0]); false with Invalid_argument _ -> true
remove_at 0 [0] = []
remove_at 0 [0; 1; 2] = [1; 2]
remove_at 1 [0; 1; 2] = [0; 2]
remove_at 2 [0; 1; 2] = [0; 1]
*)
let rfind p l = find p (rev l)
let find_all p l =
let rec findnext dst = function
| [] -> ()
| h :: t ->
if p h then
findnext (Acc.accum dst h) t
else
findnext dst t
in
let dummy = Acc.dummy () in
findnext dummy l;
dummy.tl
let rec findi p l =
let rec loop n = function
| [] -> raise Not_found
| h :: t ->
if p n h then (n,h) else loop (n+1) t
in
loop 0 l
let rec index_of e l =
let rec loop n = function
| [] -> None
| h::_ when h = e -> Some n
| _::t -> loop ( n + 1 ) t
in loop 0 l
let rec index_ofq e l =
let rec loop n = function
| [] -> None
| h::_ when h == e -> Some n
| _::t -> loop ( n + 1 ) t
in loop 0 l
let rec rindex_of e l =
let rec loop n acc = function
| [] -> acc
| h::t when h = e -> loop ( n + 1) ( Some n ) t
| _::t -> loop ( n + 1 ) acc t
in loop 0 None l
let rec rindex_ofq e l =
let rec loop n acc = function
| [] -> acc
| h::t when h == e -> loop ( n + 1) ( Some n ) t
| _::t -> loop ( n + 1 ) acc t
in loop 0 None l
let filter = find_all
let filteri f =
let rec aux i = function
| [] -> []
| x::xs when f i x -> x :: aux (succ i) xs
| x::xs -> aux (succ i) xs
in
aux 0
$ T filteri
( let r = ref ( -1 ) in filteri ( fun i _ - > incr r ; i = ! r ) [ 5 ; 4 ; 8 ] = [ 5 ; 4 ; 8 ] )
filteri ( fun _ x - > x > 4 ) [ 5 ; 4 ; 8 ] = [ 5 ; 8 ]
filteri ( fun _ _ - > true ) [ ] = [ ]
(let r = ref (-1) in filteri (fun i _ -> incr r; i = !r) [5; 4; 8] = [5; 4; 8])
filteri (fun _ x -> x > 4) [5; 4; 8] = [5; 8]
filteri (fun _ _ -> true) [] = []
*)
let partition p lst =
let rec loop yesdst nodst = function
| [] -> ()
| h :: t ->
if p h then
loop (Acc.accum yesdst h) nodst t
else
loop yesdst (Acc.accum nodst h) t
in
let yesdummy = Acc.dummy ()
and nodummy = Acc.dummy ()
in
loop yesdummy nodummy lst;
(yesdummy.tl, nodummy.tl)
let split lst =
let rec loop adst bdst = function
| [] -> ()
| (a, b) :: t ->
loop (Acc.accum adst a) (Acc.accum bdst b) t
in
let adummy = Acc.dummy ()
and bdummy = Acc.dummy ()
in
loop adummy bdummy lst;
adummy.tl, bdummy.tl
let combine l1 l2 =
let list_sizes_differ = Invalid_argument "combine: Different_list_size" in
match l1, l2 with
| [], [] -> []
| x :: xs, y :: ys ->
let acc = Acc.create (x, y) in
let rec loop dst l1 l2 = match l1, l2 with
| [], [] -> inj acc
| h1 :: t1, h2 :: t2 -> loop (Acc.accum dst (h1, h2)) t1 t2
| _, _ -> raise list_sizes_differ
in loop acc xs ys
| _, _ -> raise list_sizes_differ
$ T combine
combine [ ] [ ] = [ ]
combine [ 1 ] [ 2 ] = [ ( 1 , 2 ) ]
combine [ 1 ; 3 ] [ 2 ; 4 ] = [ ( 1 , 2 ) ; ( 3 , 4 ) ]
combine [] [] = []
combine [1] [2] = [(1, 2)]
combine [1; 3] [2; 4] = [(1, 2); (3, 4)]
*)
let init size f =
if size = 0 then []
else if size < 0 then invalid_arg "BatList.init"
else
let rec loop dst n =
if n < size then
loop (Acc.accum dst (f n)) (n+1)
in
let r = Acc.create (f 0) in
loop r 1;
inj r
let unfold_exc f =
let rec loop dst =
loop (Acc.accum dst (f ()))
in
let acc = Acc.dummy () in
try
loop acc
with exn -> (acc.tl, exn)
$ T unfold_exc
let exc ( ) = raise End_of_file in \
unfold_exc exc = ( [ ] , End_of_file )
let state = ref 0 in \
let ( ) = \
if ! state = 1 then raise End_of_file \
else let _ = incr state in 0 \
in \
unfold_exc just_zero = ( [ 0 ] , End_of_file )
let exc () = raise End_of_file in \
unfold_exc exc = ([], End_of_file)
let state = ref 0 in \
let just_zero () = \
if !state = 1 then raise End_of_file \
else let _ = incr state in 0 \
in \
unfold_exc just_zero = ([0], End_of_file)
*)
let make i x =
if i < 0 then invalid_arg "List.make";
let rec loop x acc = function
| 0 -> acc
| i -> loop x (x::acc) (i-1)
in
loop x [] i
let range i dir j =
let op = match dir with
| `To ->
if i > j
then invalid_arg (Printf.sprintf "List.range %d `To %d" i j)
else pred
| `Downto ->
if i < j
then invalid_arg (Printf.sprintf "List.range %d `Downto %d" i j)
else succ
in
let rec loop acc k =
if i = k then
k :: acc
else
loop (k :: acc) (op k)
in
loop [] j
$ T range
range 1 ` To 3 = [ 1 ; 2 ; 3 ]
range 1 ` To 1 = [ 1 ]
range 3 ` Downto 1 = [ 3 ; 2 ; 1 ]
range 3 ` Downto 3 = [ 3 ]
try ignore(range 1 ` To 0 ) ; true with > true
try ignore(range 1 ` Downto 2 ) ; true with > true
range 1 `To 3 = [1; 2; 3]
range 1 `To 1 = [1]
range 3 `Downto 1 = [3; 2; 1]
range 3 `Downto 3 = [3]
try ignore(range 1 `To 0); true with Invalid_argument _ -> true
try ignore(range 1 `Downto 2); true with Invalid_argument _ -> true
*)
let mapi f = function
| [] -> []
| h :: t ->
let rec loop dst n = function
| [] -> ()
| h :: t ->
loop (Acc.accum dst (f n h)) (n + 1) t
in
let r = Acc.create (f 0 h) in
loop r 1 t;
inj r
let iteri f l =
let rec loop n = function
| [] -> ()
| h :: t ->
f n h;
loop (n+1) t
in
loop 0 l
let fold_lefti f init l =
let rec loop i acc = function
| [] -> acc
| x :: xs -> loop (i + 1) (f acc i x) xs
in
loop 0 init l
$ T fold_lefti
fold_lefti ( fun acc i x - > ( i , x ) : : acc ) [ ] [ ] = [ ]
fold_lefti ( fun acc i x - > ( i , x ) : : acc ) [ ] [ 0 . ] = [ ( 0 , 0 . ) ]
fold_lefti ( fun acc i x - > ( i , x ) : : acc ) [ ] [ 0 . ; 1 . ] = [ ( 1 , 1 . ) ; ( 0 , 0 . ) ]
fold_lefti (fun acc i x -> (i, x) :: acc) [] [] = []
fold_lefti (fun acc i x -> (i, x) :: acc) [] [0.] = [(0, 0.)]
fold_lefti (fun acc i x -> (i, x) :: acc) [] [0.; 1.] = [(1, 1.); (0, 0.)]
*)
let fold_righti f l init =
let xis =
fold_lefti (fun acc i x -> (i, x) :: acc) [] l
in
fold_left
(fun acc (i, x) -> f i x acc)
init
xis
$ T fold_righti
fold_righti ( fun i x acc - > ( i , x ) : : acc ) [ ] [ ] = [ ]
fold_righti ( fun i x acc - > ( i , x ) : : acc ) [ 0 . ] [ ] = [ ( 0 , 0 . ) ]
fold_righti ( fun i x acc - > ( i , x ) : : acc ) [ 0 . ; 1 . ] [ ] = [ ( 0 , 0 . ) ; ( 1 , 1 . ) ]
fold_righti (fun i x acc -> (i, x) :: acc) [] [] = []
fold_righti (fun i x acc -> (i, x) :: acc) [0.] [] = [(0, 0.)]
fold_righti (fun i x acc -> (i, x) :: acc) [0.; 1.] [] = [(0, 0.); (1, 1.)]
*)
let first = hd
let rec last = function
| [] -> invalid_arg "Empty List"
| h :: [] -> h
| _ :: t -> last t
let split_nth index = function
| [] -> if index = 0 then [],[] else invalid_arg at_after_end_msg
| (h :: t as l) ->
if index = 0 then [],l
else if index < 0 then invalid_arg at_negative_index_msg
else
let rec loop n dst l =
if n = 0 then l else
match l with
| [] -> invalid_arg at_after_end_msg
| h :: t ->
loop (n - 1) (Acc.accum dst h) t
in
let r = Acc.create h in
inj r, loop (index-1) r t
let split_at = split_nth
let find_exn f e l =
try
find f l
with
Not_found -> raise e
let remove l x =
let rec loop dst = function
| [] -> ()
| h :: t ->
if x = h then
dst.tl <- t
else
loop (Acc.accum dst h) t
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
let remove_if f lst =
let rec loop dst = function
| [] -> ()
| x :: l ->
if f x then
dst.tl <- l
else
loop (Acc.accum dst x) l
in
let dummy = Acc.dummy () in
loop dummy lst;
dummy.tl
let remove_all l x =
let rec loop dst = function
| [] -> ()
| h :: t ->
if x = h then
loop dst t
else
loop (Acc.accum dst h) t
in
let dummy = Acc.dummy () in
loop dummy l;
dummy.tl
let transpose = function
| [] -> []
| [x] -> List.map (fun x -> [x]) x
| x::xs ->
let heads = List.map Acc.create x in
ignore ( fold_left
(fun acc x ->
map2
(fun x xs -> Acc.accum xs x)
x acc)
heads xs);
equivalent to List.map inj heads , but without creating a new list
$ T transpose
transpose [ [ 1 ; 2 ; 3 ; ] ; [ 4 ; 5 ; 6 ; ] ; [ 7 ; 8 ; 9 ; ] ] = [ [ 1;4;7];[2;5;8];[3;6;9 ] ]
transpose [ ] = [ ]
transpose [ [ 1 ] ] = [ [ 1 ] ]
transpose [ [1; 2; 3;]; [4; 5; 6;]; [7; 8; 9;] ] = [[1;4;7];[2;5;8];[3;6;9]]
transpose [] = []
transpose [ [1] ] = [ [1] ]
*)
let enum l =
let rec make lr count =
BatEnum.make
~next:(fun () ->
match !lr with
| [] -> raise BatEnum.No_more_elements
| h :: t ->
decr count;
lr := t;
h
)
~count:(fun () ->
if !count < 0 then count := length !lr;
!count
)
~clone:(fun () ->
make (ref !lr) (ref !count)
)
in
make (ref l) (ref (-1))
let of_enum e =
let h = Acc.dummy () in
let _ = BatEnum.fold Acc.accum h e in
h.tl
let backwards l = ( * This version only needs one pass but is actually less lazy
let rec aux acc = function
| [] -> acc
| h::t -> aux BatEnum.append (BatEnum.singleton h) acc
in aux l*)
let of_backwards e =
let rec aux acc = match BatEnum.get e with
| Some h -> aux (h::acc)
| None -> acc
in aux []
let assoc_inv e l =
let rec aux = function
| [] -> raise Not_found
| (a,b)::_ when b = e -> a
| _::t -> aux t
in aux l
let assq_inv e l =
let rec aux = function
| [] -> raise Not_found
| (a,b)::_ when b == e -> a
| _::t -> aux t
in aux l
let modify_opt a f l =
let rec aux p = function
| [] ->
(match f None with
| None -> raise Exit
| Some v -> rev ((a,v)::p))
| (a',b)::t when a' = a ->
(match f (Some b) with
| None -> rev_append p t
| Some b' -> rev_append ((a,b')::p) t)
| p'::t ->
aux (p'::p) t
in
try aux [] l with Exit -> l
$ = modify_opt & ~printer:(IO.to_string ( List.print ( fun fmt ( a , b ) - > Printf.fprintf fmt " % d,%d " a b ) ) )
( * to modify a value
(modify_opt 5 (function Some 1 -> Some 2 | _ -> assert false) [ 1,0 ; 5,1 ; 8,2 ]) \
[ 1,0 ; 5,2 ; 8,2 ]
(modify_opt 5 (function None -> Some 2 | _ -> assert false) [ 1,0 ; 8,2 ]) \
[ 1,0 ; 8,2 ; 5,2 ]
(modify_opt 5 (function Some 1 -> None | _ -> assert false) [ 1,0 ; 5,1 ; 8,2 ]) \
[ 1,0 ; 8,2 ]
*)
let modify a f l =
let f' = function
| None -> raise Not_found
| Some b -> Some (f b)
in
modify_opt a f' l
$ = modify & ~printer:(IO.to_string ( List.print ( fun fmt ( a , b ) - > Printf.fprintf fmt " % d,%d " a b ) ) )
( modify 5 succ [ 1,0 ; 5,1 ; 8,2 ] ) [ 1,0 ; 5,2 ; 8,2 ]
(modify 5 succ [ 1,0 ; 5,1 ; 8,2 ]) [ 1,0 ; 5,2 ; 8,2 ]
*)
$ T modify
try ignore ( modify 5 succ [ 1,0 ; 8,2 ] ) ; false with Not_found - > true
try ignore (modify 5 succ [ 1,0 ; 8,2 ]); false with Not_found -> true
*)
let modify_def dfl a f l =
let f' = function
| None -> Some (f dfl)
| Some b -> Some (f b)
in
modify_opt a f' l
$ = modify_def & ~printer:(IO.to_string ( List.print ( fun fmt ( a , b ) - > Printf.fprintf fmt " % d,%d " a b ) ) )
( modify_def 0 5 succ [ 1,0 ; 5,1 ; 8,2 ] ) [ 1,0 ; 5,2 ; 8,2 ]
( modify_def 0 5 succ [ 1,0 ; 8,2 ] ) [ 1,0 ; 8,2 ; 5,1 ]
(modify_def 0 5 succ [ 1,0 ; 5,1 ; 8,2 ]) [ 1,0 ; 5,2 ; 8,2 ]
(modify_def 0 5 succ [ 1,0 ; 8,2 ]) [ 1,0 ; 8,2 ; 5,1 ]
*)
let modify_opt_at n f l =
if n < 0 then invalid_arg at_negative_index_msg;
let rec loop acc n = function
| [] -> invalid_arg at_after_end_msg
| h :: t ->
if n <> 0 then loop (h :: acc) (n - 1) t
else match f h with
| None -> rev_append acc t
| Some v -> rev_append acc (v :: t)
in
loop [] n l
$ T modify_opt_at
modify_opt_at 2 ( fun n - > Some ( n*n ) ) [ 1;2;3;4;5 ] = [ 1;2;9;4;5 ]
modify_opt_at 2 ( fun _ - > None ) [ 1;2;3;4;5 ] = [ 1;2;4;5 ]
try ignore ( modify_opt_at 0 ( fun _ - > None ) [ ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_opt_at 2 ( fun _ - > None ) [ ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_opt_at ( -1 ) ( fun _ - > None ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_opt_at 5 ( fun _ - > None ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_opt_at 3 ( fun _ - > None ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
modify_opt_at 2 (fun n -> Some (n*n)) [1;2;3;4;5] = [1;2;9;4;5]
modify_opt_at 2 (fun _ -> None) [1;2;3;4;5] = [1;2;4;5]
try ignore (modify_opt_at 0 (fun _ -> None) []); false \
with Invalid_argument _ -> true
try ignore (modify_opt_at 2 (fun _ -> None) []); false \
with Invalid_argument _ -> true
try ignore (modify_opt_at (-1) (fun _ -> None) [1;2;3]); false \
with Invalid_argument _ -> true
try ignore (modify_opt_at 5 (fun _ -> None) [1;2;3]); false \
with Invalid_argument _ -> true
try ignore (modify_opt_at 3 (fun _ -> None) [1;2;3]); false \
with Invalid_argument _ -> true
*)
let modify_at n f l =
modify_opt_at n (fun x -> Some (f x)) l
$ T modify_at
modify_at 2 ( ( + ) 1 ) [ 1;2;3;4 ] = [ 1;2;4;4 ]
try ignore ( modify_at 0 ( ( + ) 1 ) [ ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_at 2 ( ( + ) 1 ) [ ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_at ( -1 ) ( ( + ) 1 ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_at 5 ( ( + ) 1 ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
try ignore ( modify_at 3 ( ( + ) 1 ) [ 1;2;3 ] ) ; false \
with Invalid_argument _ - > true
modify_at 2 ((+) 1) [1;2;3;4] = [1;2;4;4]
try ignore (modify_at 0 ((+) 1) []); false \
with Invalid_argument _ -> true
try ignore (modify_at 2 ((+) 1) []); false \
with Invalid_argument _ -> true
try ignore (modify_at (-1) ((+) 1) [1;2;3]); false \
with Invalid_argument _ -> true
try ignore (modify_at 5 ((+) 1) [1;2;3]); false \
with Invalid_argument _ -> true
try ignore (modify_at 3 ((+) 1) [1;2;3]); false \
with Invalid_argument _ -> true
*)
let sort_unique cmp lst =
let sorted = List.sort cmp lst in
let fold first rest = List.fold_left
(fun (acc, last) elem ->
if (cmp last elem) = 0 then (acc, elem)
else (elem::acc, elem)
)
([first], first)
rest
in
match sorted with
| [] -> []
| hd::tl ->
begin
let rev_result, _ = fold hd tl in
List.rev rev_result
end
let sort_uniq = List.sort_uniq
let group cmp lst =
let sorted = List.sort cmp lst in
let fold first rest = List.fold_left
(fun (acc, agr, last) elem ->
if (cmp last elem) = 0 then (acc, elem::agr, elem)
else (agr::acc, [elem], elem)
)
([], [first], first)
rest
in
match sorted with
| [] -> []
| hd::tl ->
begin
let groups, lastgr, _ = fold hd tl in
List.rev_map List.rev (lastgr::groups)
end
$ T group
group Pervasives.compare [ ] = [ ]
group Pervasives.compare [ 1 ] = [ [ 1 ] ]
group Pervasives.compare [ 2 ; 2 ] = [ [ 2 ; 2 ] ]
group Pervasives.compare [ 5 ; 4 ; 4 ; 2 ; 1 ; 6 ] = [ [ 1 ] ; [ 2 ] ; [ 4 ; 4 ] ; [ 5 ] ; [ 6 ] ]
group Pervasives.compare [] = []
group Pervasives.compare [1] = [[1]]
group Pervasives.compare [2; 2] = [[2; 2]]
group Pervasives.compare [5; 4; 4; 2; 1; 6] = [[1]; [2]; [4; 4]; [5]; [6]]
*)
let cartesian_product l1 l2 =
List.concat (List.map (fun i -> List.map (fun j -> (i,j)) l2) l1)
let rec n_cartesian_product = function
| [] -> [[]]
| h :: t ->
let rest = n_cartesian_product t in
List.concat (List.map (fun i -> List.map (fun r -> i :: r) rest) h)
$ T n_cartesian_product as ncp
ncp [ ] = [ [ ] ]
ncp [ [ ] ] = [ ]
ncp [ [ 1 ] ; [ 2 ] ; [ 3 ] ] = [ [ 1;2;3 ] ]
ncp [ [ 1;2;3 ] ] = [ [ 1 ] ; [ 2 ] ; [ 3 ] ]
ncp [ [ 1;2;3 ] ; [ ] ] = [ ]
ncp [ [ 1;2;3 ] ; [ 4;5 ] ] = [ [ 1;4 ] ; [ 1;5 ] ; [ 2;4 ] ; [ 2;5 ] ; [ 3;4 ] ; [ 3;5 ] ]
ncp [] = [[]]
ncp [[]] = []
ncp [[1]; [2]; [3]] = [[1;2;3]]
ncp [[1;2;3]] = [[1]; [2]; [3]]
ncp [[1;2;3]; []] = []
ncp [[1;2;3]; [4;5]] = [[1;4]; [1;5]; [2;4]; [2;5]; [3;4]; [3;5]]
*)
let print ?(first="[") ?(last="]") ?(sep="; ") print_a out = function
| [] ->
BatInnerIO.nwrite out first;
BatInnerIO.nwrite out last
| [h] ->
BatInnerIO.nwrite out first;
print_a out h;
BatInnerIO.nwrite out last
| h::t ->
BatInnerIO.nwrite out first;
print_a out h;
iter (fun x -> BatInnerIO.nwrite out sep; print_a out x) t;
BatInnerIO.nwrite out last
let t_printer a_printer _paren out x = print (a_printer false) out x
let reduce f = function [] -> invalid_arg "Empty List"
| h::t -> fold_left f h t
let min l = reduce Pervasives.min l
let max l = reduce Pervasives.max l
let sum l = reduce (+) l
let fsum l = reduce (+.) l
let kahan_sum li =
This algorithm is written in a particularly untasteful imperative
style to benefit from the nice unboxing of float references that
is harder to obtain with recursive functions today . See the
definition of sum on arrays , on which this one is directly
modeled .
style to benefit from the nice unboxing of float references that
is harder to obtain with recursive functions today. See the
definition of kahan sum on arrays, on which this one is directly
modeled. *)
let li = ref li in
let continue = ref (!li <> []) in
let sum = ref 0. in
let err = ref 0. in
while !continue do
match !li with
| [] -> continue := false
| x::xs ->
li := xs;
let x = x -. !err in
let new_sum = !sum +. x in
err := (new_sum -. !sum) -. x;
sum := new_sum +. 0.;
done;
!sum +. 0.
$ T kahan_sum
kahan_sum [ ] = 0 .
kahan_sum [ 1 . ; 2 . ] = 3 .
let n , x = 1_000 , 1.1 in \
Float.approx_equal ( float n * . x ) \
( kahan_sum ( List.make n x ) )
kahan_sum [ ] = 0.
kahan_sum [ 1.; 2. ] = 3.
let n, x = 1_000, 1.1 in \
Float.approx_equal (float n *. x) \
(kahan_sum (List.make n x))
*)
let min_max ?cmp:(cmp = Pervasives.compare) = function
| [] -> invalid_arg "List.min_max: Empty List"
| x :: xs ->
fold_left
(fun (curr_min, curr_max) y ->
let new_min =
if cmp curr_min y = 1
then y
else curr_min
in
let new_max =
if cmp curr_max y = -1
then y
else curr_max
in
(new_min, new_max)
)
(x, x)
xs
$ T min_max
min_max [ 1 ] = ( 1 , 1 )
min_max [ 1 ; 1 ] = ( 1 , 1 )
min_max [ 1 ; -2 ; 3 ; 4 ; 5 ; 60 ; 7 ; 8 ] = ( -2 , 60 )
min_max [1] = (1, 1)
min_max [1; 1] = (1, 1)
min_max [1; -2; 3; 4; 5; 60; 7; 8] = (-2, 60)
*)
let unfold b f =
let acc = Acc.dummy () in
let rec loop dst v =
match f v with
| None -> acc.tl
| Some (a, v) -> loop (Acc.accum dst a) v
in loop acc b
$ T unfold
unfold 1 ( fun x - > None ) = [ ]
unfold 0 ( fun x - > if x > 3 then None else Some ( x , succ x ) ) = [ 0;1;2;3 ]
unfold 1 (fun x -> None) = []
unfold 0 (fun x -> if x > 3 then None else Some (x, succ x)) = [0;1;2;3]
*)
let subset cmp l l' = for_all (fun x -> mem_cmp cmp x l') l
$ T subset
subset Pervasives.compare [ 1;2;3;4 ] [ 1;2;3 ] = false
subset Pervasives.compare [ 1;2;3 ] [ 1;2;3 ] = true
subset Pervasives.compare [ 3;2;1 ] [ 1;2;3 ] = true
subset Pervasives.compare [ 1;2 ] [ 1;2;3 ] = true
subset Pervasives.compare [1;2;3;4] [1;2;3] = false
subset Pervasives.compare [1;2;3] [1;2;3] = true
subset Pervasives.compare [3;2;1] [1;2;3] = true
subset Pervasives.compare [1;2] [1;2;3] = true
*)
module Exceptionless = struct
let rfind p l =
try Some (rfind p l)
with Not_found -> None
let find p l =
try Some (find p l)
with Not_found -> None
let findi p l =
try Some (findi p l)
with Not_found -> None
let split_at n l =
try `Ok (split_at n l)
with Invalid_argument s -> `Invalid_argument s
let at n l =
try `Ok (at n l)
with Invalid_argument s -> `Invalid_argument s
let assoc e l =
try Some (assoc e l)
with Not_found -> None
let assq e l =
try Some (assq e l)
with Not_found -> None
let assoc_inv e l =
try Some (assoc_inv e l)
with Not_found -> None
let find_map f l =
try Some(find_map f l)
with Not_found -> None
let hd l =
try Some (hd l)
with Failure _ -> None
let tl l =
try Some (tl l)
with Failure _ -> None
let rec last = function
| [] -> None
| [x] -> Some x
| _ :: l -> last l
end
module Labels = struct
type 'a t = 'a list
let init i ~f = init i f
let make n x = make n x
let iteri ~f l = iteri f l
let map ~f l = map f l
let mapi ~f l = mapi f l
let rfind ~f l = rfind f l
let find ~f l = find f l
let findi ~f = findi f
let find_exn ~f = find_exn f
let filter_map ~f = filter_map f
let remove_if ~f = remove_if f
let take_while ~f = take_while f
let drop_while ~f = drop_while f
let map2 ~f = map2 f
let iter2 ~f = iter2 f
let exists2 ~f = exists2 f
let fold_left ~f ~init = fold_left f init
let fold_right ~f l ~init = fold_right f l init
let fold_left2 ~f ~init = fold_left2 f init
let fold_right2 ~f l1 l2 ~init = fold_right2 f l1 l2 init
let filter ~f = filter f
let find_all ~f = find_all f
let partition ~f = partition f
let rev_map ~f = rev_map f
let rev_map2 ~f = rev_map2 f
let iter ~f = iter f
let for_all ~f = for_all f
let for_all2 ~f = for_all2 f
let exists ~f = exists f
let subset ~cmp = subset cmp
let stable_sort ?(cmp=compare) = stable_sort cmp
let fast_sort ?(cmp=compare) = fast_sort cmp
let sort ?(cmp=compare) = sort cmp
let merge ?(cmp=compare) = merge cmp
module LExceptionless = struct
include Exceptionless
let rfind ~f l = rfind f l
let find ~f l = find f l
let findi ~f l = findi f l
end
end
let ( @ ) = List.append
module Infix = struct
let ( @ ) = ( @ )
end
open BatOrd
let rec eq eq_elt l1 l2 =
match l1 with
| [] -> (match l2 with [] -> true | _ -> false)
| hd1::tl1 ->
(match l2 with
| [] -> false
| hd2::tl2 -> bin_eq eq_elt hd1 hd2 (eq eq_elt) tl1 tl2)
let rec ord ord_elt l1 l2 =
match l1 with
| [] -> (match l2 with [] -> Eq | _::_ -> Lt)
| hd1::tl1 ->
(match l2 with
| [] -> Gt
| hd2::tl2 -> bin_ord ord_elt hd1 hd2 (ord ord_elt) tl1 tl2)
let rec compare comp_elt l1 l2 =
match l1 with
| [] -> (match l2 with [] -> 0 | _::_ -> -1)
| hd1::tl1 ->
(match l2 with
| [] -> 1
| hd2::tl2 -> bin_comp comp_elt hd1 hd2 (compare comp_elt) tl1 tl2)
module Eq (T : Eq) = struct
type t = T.t list
let eq = eq T.eq
end
module Ord (T : Ord) = struct
type t = T.t list
let ord = ord T.ord
end
module Comp (T : Comp) = struct
type t = T.t list
let compare = compare T.compare
end |
9f56aa75683ea217c428cfd4fb08d17c4edc66cd3a5ff123c450a5531f25d239 | slipstream/SlipStreamServer | session_test.cljc | (ns com.sixsq.slipstream.ssclj.resources.spec.session-test
(:require
[clojure.test :refer [deftest is]]
[com.sixsq.slipstream.ssclj.resources.session :refer :all]
[com.sixsq.slipstream.ssclj.resources.spec.session :as session]
[com.sixsq.slipstream.ssclj.resources.spec.spec-test-utils :as stu]))
(def valid-acl {:owner {:principal "ADMIN"
:type "ROLE"}
:rules [{:type "ROLE",
:principal "ADMIN",
:right "ALL"}]})
(deftest check-session-schema
(let [timestamp "1964-08-25T10:00:00.0Z"
cfg {:id (str resource-url "/internal")
:resourceURI resource-uri
:created timestamp
:updated timestamp
:acl valid-acl
:username "ssuser"
:method "internal"
:expiry timestamp
:server "nuv.la"
:clientIP "127.0.0.1"
:redirectURI ""
:sessionTemplate {:href "session-template/internal"}}]
(stu/is-valid ::session/session cfg)
(doseq [attr #{:id :resourceURI :created :updated :acl :method :expiry :sessionTemplate}]
(stu/is-invalid ::session/session (dissoc cfg attr)))
(doseq [attr #{:username :server :clientIP}]
(stu/is-valid ::session/session (dissoc cfg attr)))))
| null | https://raw.githubusercontent.com/slipstream/SlipStreamServer/3ee5c516877699746c61c48fc72779fe3d4e4652/cimi/test/com/sixsq/slipstream/ssclj/resources/spec/session_test.cljc | clojure | (ns com.sixsq.slipstream.ssclj.resources.spec.session-test
(:require
[clojure.test :refer [deftest is]]
[com.sixsq.slipstream.ssclj.resources.session :refer :all]
[com.sixsq.slipstream.ssclj.resources.spec.session :as session]
[com.sixsq.slipstream.ssclj.resources.spec.spec-test-utils :as stu]))
(def valid-acl {:owner {:principal "ADMIN"
:type "ROLE"}
:rules [{:type "ROLE",
:principal "ADMIN",
:right "ALL"}]})
(deftest check-session-schema
(let [timestamp "1964-08-25T10:00:00.0Z"
cfg {:id (str resource-url "/internal")
:resourceURI resource-uri
:created timestamp
:updated timestamp
:acl valid-acl
:username "ssuser"
:method "internal"
:expiry timestamp
:server "nuv.la"
:clientIP "127.0.0.1"
:redirectURI ""
:sessionTemplate {:href "session-template/internal"}}]
(stu/is-valid ::session/session cfg)
(doseq [attr #{:id :resourceURI :created :updated :acl :method :expiry :sessionTemplate}]
(stu/is-invalid ::session/session (dissoc cfg attr)))
(doseq [attr #{:username :server :clientIP}]
(stu/is-valid ::session/session (dissoc cfg attr)))))
| |
e921bf20b07a547c0b9dcca508598a5757fd5cebbf21072f44a93a23c8d103d2 | ksseono/the-joy-of-clojure | promises.clj | (ns joy.promises
(:require [joy.mutation :refer (dothreads!)])
(:require [joy.futures :refer (feed-children)]))
(def x (promise))
(def y (promise))
(def z (promise))
(comment
(dothreads! #(deliver z (+ @x @y)))
(dothreads!
#(do (Thread/sleep 2000) (deliver x 52)))
(dothreads!
#(do (Thread/sleep 4000) (deliver y 86)))
(time @z)
" Elapsed time : 3115.154625 msecs "
138
)
;;
Listing 11.8
;;
(defmacro with-promises [[n tasks _ as] & body]
(when as
`(let [tasks# ~tasks
n# (count tasks#)
promises# (take n# (repeatedly promise))]
(dotimes [i# n#]
(dothreads!
(fn []
(deliver (nth promises# i#)
((nth tasks# i#))))))
(let [~n tasks#
~as promises#]
~@body))))
;;
Listing 11.9
;;
(defrecord TestRun [run passed failed])
(defn pass [] true)
(defn fail [] false)
(defn run-tests [& all-tests]
(with-promises
[tests all-tests :as results]
(into (TestRun. 0 0 0)
(reduce #(merge-with + %1 %2) {}
(for [r results]
(if @r
{:run 1 :passed 1}
{:run 1 :failed 1}))))))
(comment
(run-tests pass fail fail fail pass)
= > # joy.promises . TestRun{:run 5 , : passed 2 , : failed 3 }
)
(defn feed-items [k feed]
(k
(for [item (filter (comp #{:entry :item} :tag)
(feed-children feed))]
(-> item :content first :content))))
(comment
(feed-items
count
"/")
= > 5
(let [p (promise)]
(feed-items #(deliver p (count %))
"/")
@p)
= > 5
)
;;
Listing 11.10
;;
(defn cps->fn [f k]
(fn [& args]
(let [p (promise)]
(apply f (fn [x] (deliver p (k x))) args)
@p)))
(def count-items (cps->fn feed-items count))
(comment
(count-items "/")
= > 5
)
| null | https://raw.githubusercontent.com/ksseono/the-joy-of-clojure/4fee3fb2750236b85b59ae9d7a83e0929040e4f0/src/clj/ch11/joy/promises.clj | clojure | (ns joy.promises
(:require [joy.mutation :refer (dothreads!)])
(:require [joy.futures :refer (feed-children)]))
(def x (promise))
(def y (promise))
(def z (promise))
(comment
(dothreads! #(deliver z (+ @x @y)))
(dothreads!
#(do (Thread/sleep 2000) (deliver x 52)))
(dothreads!
#(do (Thread/sleep 4000) (deliver y 86)))
(time @z)
" Elapsed time : 3115.154625 msecs "
138
)
Listing 11.8
(defmacro with-promises [[n tasks _ as] & body]
(when as
`(let [tasks# ~tasks
n# (count tasks#)
promises# (take n# (repeatedly promise))]
(dotimes [i# n#]
(dothreads!
(fn []
(deliver (nth promises# i#)
((nth tasks# i#))))))
(let [~n tasks#
~as promises#]
~@body))))
Listing 11.9
(defrecord TestRun [run passed failed])
(defn pass [] true)
(defn fail [] false)
(defn run-tests [& all-tests]
(with-promises
[tests all-tests :as results]
(into (TestRun. 0 0 0)
(reduce #(merge-with + %1 %2) {}
(for [r results]
(if @r
{:run 1 :passed 1}
{:run 1 :failed 1}))))))
(comment
(run-tests pass fail fail fail pass)
= > # joy.promises . TestRun{:run 5 , : passed 2 , : failed 3 }
)
(defn feed-items [k feed]
(k
(for [item (filter (comp #{:entry :item} :tag)
(feed-children feed))]
(-> item :content first :content))))
(comment
(feed-items
count
"/")
= > 5
(let [p (promise)]
(feed-items #(deliver p (count %))
"/")
@p)
= > 5
)
Listing 11.10
(defn cps->fn [f k]
(fn [& args]
(let [p (promise)]
(apply f (fn [x] (deliver p (k x))) args)
@p)))
(def count-items (cps->fn feed-items count))
(comment
(count-items "/")
= > 5
)
| |
24fde590b87d80424827ed4400c8496cd81cb0a730c8dff998105d6fa491d12c | jimcrayne/jhc | Demand.hs | module E.Demand(
Demand(..),
DemandSignature(..),
DemandType(..),
SubDemand(..),
analyzeProgram,
absSig,
lazy
) where
import Control.Monad.Reader
import Control.Monad.Writer hiding(Product(..))
import Data.Binary
import Data.List hiding(union,delete)
import Data.Typeable
--import Debug.Trace
import DataConstructors
import Doc.DocLike
import Doc.PPrint
import E.E
import E.Program
import GenUtil
import Info.Types
import Name.Id
import Util.HasSize
import Util.SetLike
import qualified Info.Info as Info
trace _ x = x
data Demand =
Bottom -- always diverges
| L !SubDemand -- lazy
| S !SubDemand -- strict
| Error !SubDemand -- diverges, might use arguments
| Absent -- Not used
deriving(Eq,Ord,Typeable)
{-! derive: Binary !-}
data SubDemand = None | Product ![Demand]
deriving(Eq,Ord)
{-! derive: Binary !-}
data DemandEnv = DemandEnv !(IdMap Demand) !Demand
deriving(Eq,Ord)
{-! derive: Binary !-}
data DemandType = (:=>) !DemandEnv ![Demand]
deriving(Eq,Ord)
{-! derive: Binary !-}
data DemandSignature = DemandSignature {-# UNPACK #-} !Int !DemandType
deriving(Eq,Ord,Typeable)
{-! derive: Binary !-}
idGlb = Absent
absType = (DemandEnv mempty idGlb) :=> []
botType = ( DemandEnv ) : = > [ ]
botType = (DemandEnv mempty Bottom) :=> []
lazyType = ( DemandEnv lazy ) : = > [ ]
--lazySig = DemandSignature 0 lazyType
absSig = DemandSignature 0 absType
class Lattice a where
glb :: a -> a -> a
lub :: a -> a -> a
-- Sp [L .. L] = S
-- Sp [.. _|_ ..] = _|_
sp [] = S None
sp xs = S (allLazy xs) -- None
l None = L None
l (Product xs) = lp xs
s None = S None
s (Product xs) = sp xs
allLazy xs | all (== lazy) xs = None
allLazy xs = Product xs
lp [] = L None
lp xs = L (allLazy (map f xs)) where
f (S None) = lazy
f (S (Product ys)) = lp ys
f Bottom = Absent
f (Error None) = lazy
f (Error (Product xs)) = lp xs
f x = x
{-
sp s = sp' True s where
sp' True [] = S None
sp' False [] = S (Product s)
sp' allLazy (L _:rs) = sp' allLazy rs
sp' _ (Bottom:_) = Error (Product s)
sp' _ (_:rs) = sp' False rs
-}
instance Lattice DemandType where
lub (env :=> ts) (env' :=> ts') | length ts < length ts' = (env `lub` env') :=> strictList (zipWith lub (ts ++ repeat lazy) ts')
| otherwise = (env `lub` env') :=> strictList (zipWith lub ts (ts' ++ repeat lazy))
glb (env :=> ts) (env' :=> ts') | length ts < length ts' = (env `glb` env') :=> strictList (zipWith glb (ts ++ repeat lazy) ts')
| otherwise = (env `glb` env') :=> strictList (zipWith glb ts (ts' ++ repeat lazy))
lazy = L None
strict = S None
err = Error None
strictList (x:xs) = x `seq` xs' `seq` (x:xs') where
xs' = strictList xs
strictList [] = []
comb _ None None = None
comb f None (Product xs) = Product $ zipWith f (repeat lazy) xs
comb f (Product xs) None = Product $ zipWith f xs (repeat lazy)
comb f (Product xs) (Product ys) = Product $ zipWith f xs ys
instance Lattice Demand where
lub Bottom s = s
lub s Bottom = s
lub Absent Absent = Absent
lub (S x) Absent = l x
lub Absent (S x) = l x
lub (L x) Absent = l x
lub Absent (L x) = l x
lub Absent sa = lazy
lub sa Absent = lazy
lub (S x) (S y) = s (comb lub x y)
lub (L x) (L y) = l (comb lub x y)
lub (Error x) (Error y) = Error (comb lub x y)
lub (S x) (L y) = l (comb lub x y)
lub (L x) (S y) = l (comb lub x y)
lub (S x) (Error y) = s (comb lub x y)
lub (Error x) (S y) = s (comb lub x y)
lub (L x) (Error y) = lazy
lub (Error x) (L y) = lazy
glb Bottom Bottom = Bottom
glb Absent sa = sa
glb sa Absent = sa
glb Bottom _ = err
glb _ Bottom = err
glb (S x) (S y) = s (comb glb x y)
glb (L x) (L y) = l (comb glb x y)
glb (Error x) (Error y) = Error (comb glb x y)
glb (S _) (Error _) = err
glb (Error _) (S _) = err
glb (S x) (L y) = s (comb glb x y)
glb (L x) (S y) = s (comb glb x y)
glb (L _) (Error _) = err
glb (Error _) (L _) = err
lenv e (DemandEnv m r) = case mlookup e m of
Nothing -> r
Just x -> x
demandEnvSingleton :: TVr -> Demand -> DemandEnv
demandEnvSingleton _ Absent = DemandEnv mempty idGlb
demandEnvSingleton t d = DemandEnv (msingleton (tvrIdent t) d) idGlb
demandEnvMinus :: DemandEnv -> TVr -> DemandEnv
demandEnvMinus (DemandEnv m r) x = DemandEnv (delete (tvrIdent x) m) r
instance Lattice DemandEnv where
lub d1@(DemandEnv m1 r1) d2@(DemandEnv m2 r2) = DemandEnv m (r1 `lub` r2) where
m = fromList [ (x,lenv x d1 `lub` lenv x d2) | x <- keys (m1 `union` m2)]
glb d1@(DemandEnv m1 r1) d2@(DemandEnv m2 r2) = DemandEnv m (r1 `glb` r2) where
m = fromList [ (x,lenv x d1 `glb` lenv x d2) | x <- keys (m1 `union` m2)]
newtype IM a = IM (Reader (Env,DataTable) a)
deriving(Monad,Functor,MonadReader (Env,DataTable))
type Env = IdMap (Either DemandSignature E)
getEnv :: IM Env
getEnv = asks fst
extEnv TVr { tvrIdent = i } _ | isEmptyId i = id
extEnv t e = local (\ (env,dt) -> (minsert (tvrIdent t) (Left e) env,dt))
extEnvE TVr { tvrIdent = i } _ | isEmptyId i = id
extEnvE t e = local (\ (env,dt) -> (minsert (tvrIdent t) (Right e) env,dt))
instance DataTableMonad IM where
getDataTable = asks snd
runIM :: Monad m => IM a -> DataTable -> m a
runIM (IM im) dt = return $ runReader im (mempty,dt)
-- returns the demand type and whether it was found in the local environment or guessed
determineDemandType :: TVr -> Demand -> IM (Either DemandType E)
determineDemandType tvr demand = do
let g (DemandSignature n dt@(DemandEnv phi _ :=> _)) = f n demand where
f 0 (S _) = dt
f n (S (Product [s])) = f (n - 1) s
f _ _ = lazify (DemandEnv phi Absent) :=> []
env <- getEnv
case mlookup (tvrIdent tvr) env of
Just (Left ds) -> return (Left $ g ds)
Just (Right e) -> return (Right e)
Nothing -> case Info.lookup (tvrInfo tvr) of
Nothing -> return (Left absType)
Just ds -> return (Left $ g ds)
extendSig (DemandSignature n1 t1) (DemandSignature n2 t2) = DemandSignature (max n1 n2) (glb t1 t2)
splitSigma [] = (lazy,[])
splitSigma (x:xs) = (x,xs)
analyze :: E -> Demand -> IM (E,DemandType)
analyze e Absent = return (e,absType)
analyze (EVar v) s = do
ddt <- determineDemandType v s
(phi :=> sigma) <- case ddt of
Left dt -> return dt
Right e -> liftM snd $ analyze e s
return (EVar v,(phi `glb` (demandEnvSingleton v s)) :=> sigma)
analyze (EAp e1 e2) s = do
(e1',phi1 :=> sigma1') <- analyze e1 (sp [s])
let (sa,sigma1) = splitSigma sigma1'
(e2',phi2 :=> sigma2) <- analyze e2 sa
return $ (EAp e1' e2',(phi1 `glb` phi2) :=> sigma1)
analyze el@(ELit lc@LitCons { litName = h, litArgs = ts@(_:_) }) (S (Product ss)) | length ss == length ts = do
dataTable <- getDataTable
case onlyChild dataTable h of
True -> do -- product type
envs <- flip mapM (zip ts ss) $ \(a,s) -> do
(_,env :=> _) <- analyze a s
return env
return (el,foldr1 glb envs :=> [])
_ -> do
rts <- mapM (\e -> analyze e lazy) ts
return (ELit lc { litArgs = fsts rts }, foldr glb absType (snds rts))
analyze (ELit lc@LitCons { litArgs = ts }) _s = do
rts <- mapM (\e -> analyze e lazy) ts
return (ELit lc { litArgs = fsts rts }, foldr glb absType (snds rts))
analyze e s | Just (t1,t2,pt) <- from_dependingOn e = do
(t1',dt1) <- analyze t1 s
(t2',dt2) <- analyze t2 lazy
return (EPrim p_dependingOn [t1',t2'] pt,dt1 `glb` dt2)
analyze (EPrim ap ts pt) _s = do
rts <- mapM (\e -> analyze e lazy) ts
return (EPrim ap (fsts rts) pt, foldr glb absType (snds rts))
analyze (EPi tvr@TVr { tvrType = t1 } t2) _s = do
(t1',dt1) <- analyze t1 lazy
(t2',dt2) <- analyze t2 lazy
return (EPi tvr { tvrType = t1' } t2',dt1 `glb` dt2)
analyze (ELam x@TVr { tvrIdent = eid } e) (S (Product [s])) | eid == emptyId = do
(e',phi :=> sigma) <- analyze e s
let sx = Absent
return (ELam (tvrInfo_u (Info.insert $! sx) x) e',demandEnvMinus phi x :=> (sx:sigma))
analyze (ELam x e) (S (Product [s])) = do
(e',phi :=> sigma) <- analyze e s
let sx = lenv (tvrIdent x) phi
return (ELam (tvrInfo_u (Info.insert $! sx) x) e',demandEnvMinus phi x :=> (sx:sigma))
analyze (ELam x e) (L (Product [s])) = do
(e',phi :=> sigma) <- analyze e s
let sx = lenv (tvrIdent x) phi
return (ELam (tvrInfo_u (Info.insert $! sx) x) e',lazify (demandEnvMinus phi x) :=> (sx:sigma))
analyze (ELam x e) (S None) = analyze (ELam x e) (S (Product [lazy])) -- simply to ensure binder is annotated
analyze (ELam x e) (L None) = analyze (ELam x e) (L (Product [lazy])) -- simply to ensure binder is annotated
analyze (ELam x e) (Error None) = analyze (ELam x e) (Error (Product [lazy])) -- simply to ensure binder is annotated
analyze e@EError {} (S _) = return (e,botType)
analyze e@EError {} (L _) = return (e,absType)
analyze ec@ECase { eCaseBind = b, eCaseAlts = [Alt lc@LitCons { litName = h, litArgs = ts } alt], eCaseDefault = Nothing } s = do
dataTable <- getDataTable
case onlyChild dataTable h of
True -> do -- product type
(alt',enva :=> siga) <- extEnvE b (eCaseScrutinee ec) $ analyze alt s
(e',enve :=> []) <- analyze (eCaseScrutinee ec) (sp [ lenv (tvrIdent t) enva | t <- ts])
let nenv = enve `glb` foldr denvDelete enva (b:ts)
return (caseUpdate $ ec { eCaseScrutinee = e', eCaseAlts = [Alt lc alt'] }, nenv :=> siga)
_ -> analyzeCase ec s
analyze ec@ECase {} s = analyzeCase ec s
analyze ELetRec { eDefs = ds, eBody = b } s = f (decomposeDs ds) [] where
f [] ds' = do
(b',phi :=> sig) <- analyze b s
let g (t,e) = (tvrInfo_u (Info.insert $! (lenv (tvrIdent t) phi)) t,e)
return (ELetRec (map g ds') b', foldr denvDelete phi (fsts ds) :=> sig)
f (Left (t,e):rs) fs =
solveDs' (Just False) [(t,e)] fixupDemandSignature (\nn -> f rs (nn ++ fs))
f (Right rg:rs) fs = do
solveDs' (Just True) rg fixupDemandSignature (\nn -> f rs (nn ++ fs))
analyze Unknown _ = return (Unknown,absType)
analyze es@ESort {} _ = return (es,absType)
analyze es@(ELit LitInt {}) _ = return (es,absType)
analyze e x = fail $ "analyze: " ++ show (e,x)
from_dependingOn (EPrim don [t1,t2] pt) | don == p_dependingOn = return (t1,t2,pt)
from_dependingOn _ = fail "not dependingOn"
lazify (DemandEnv x r) = DemandEnv (fmap f x) Absent where
f (S xs) = l xs
f Absent = Absent
f (L xs) = l xs
f Bottom = Absent
f (Error xs) = l xs
analyzeCase ec s = do
(ec',dts) <- extEnvE (eCaseBind ec) (eCaseScrutinee ec) $ runWriterT $ flip caseBodiesMapM ec $ \e -> do
(ne,dt) <- lift $ analyze e s
tell [dt]
return ne
(ecs,env :=> _) <- analyze (eCaseScrutinee ec') strict
let enva :=> siga = foldr1 lub dts
let nenv = foldr denvDelete (glb enva env) (caseBinds ec')
return (caseUpdate $ ec' {eCaseScrutinee = ecs},nenv :=> siga)
denvDelete x (DemandEnv m r) = DemandEnv (delete (tvrIdent x) m) r
topAnalyze :: TVr -> E -> IM (E,DemandSignature)
topAnalyze tvr e | getProperty prop_PLACEHOLDER tvr = return (e,DemandSignature 0 absType)
topAnalyze _tvr e = clam e strict 0 where
clam (ELam _ x) s n = clam x (sp [s]) (n + 1)
clam _ s n = do
(e,dt) <- analyze e s
return (e,DemandSignature n dt)
fixupDemandSignature (DemandSignature n (DemandEnv _ r :=> dt)) = DemandSignature n (DemandEnv mempty r :=> dt)
shouldBind ELit {} = True
shouldBind EVar {} = True
shouldBind EPi {} = True
shouldBind _ = False
solveDs' :: (Maybe Bool) -> [(TVr,E)] -> (DemandSignature -> DemandSignature) -> ([(TVr,E)] -> IM a) -> IM a
solveDs' Nothing ds fixup wdone = do
let f (Left d:rs) xs = solveDs' (Just False) [d] fixup (\nds -> f rs (nds ++ xs))
f (Right ds:rs) xs = solveDs' (Just True) ds fixup (\nds -> f rs (nds ++ xs))
f [] xs = wdone xs
f (decomposeDs ds) []
solveDs' (Just False) [(t,e)] fixup wdone | shouldBind e = do
(ne,ds) <- topAnalyze t e
extEnvE t e $ wdone [(tvrInfo_u (Info.insert (fixup ds)) t,ne)]
solveDs' (Just False) [(t,e)] fixup wdone = do
(ne,ds) <- topAnalyze t e
extEnv t ds $ wdone [(tvrInfo_u (Info.insert (fixup ds)) t,ne)]
--solveDs' (Just False) ds fixup wdone = solveDs' Nothing ds fixup wdone
solveDs' (Just False) ds fixup wdone = error "solveDs' (Just False) called with more than one definition"
solveDs' (Just True) ds fixup wdone = trace "solveDs': jt" $ do
let ds' = [ ((t,e),sig) | (t,e) <- ds, let sig = maybe absSig id (Info.lookup (tvrInfo t))]
g 0 _ [] ds = trace "gdonetout" $ wdone [ (tvrInfo_u (Info.insert $! (fixup sig)) t,e) | ((t,e),sig) <- ds ]
g _ False [] ds = trace "gdone1" $ wdone [ (tvrInfo_u (Info.insert $! (fixup sig)) t,e) | ((t,e),sig) <- ds ]
g n True [] ds = do
(oe,dt) <- ask
let nenv = fromList [ (tvrIdent t,Left s) | ((t,_),s) <- ds, not (isEmptyId (tvrIdent t))] `Util.SetLike.union` oe
local (const (nenv,dt)) $ trace ("grepeating: " ++ show (length ds)) $ g (n - 1) False ds []
g n ch (((t,e),sig):rs) fs = do
(ne,sig') <- topAnalyze t e
let sig'' = sig `extendSig` sig'
--(if sig'' /= sig then trace ("signe: " ++ show(tvrIdent t,sig)) else id) $
g n (ch || (sig'' /= sig)) rs (((t,ne),sig''):fs)
g (5::Int) True [] ds'
# NOINLINE analyzeProgram #
analyzeProgram prog = do
let ds = programDs prog
nds <- runIM (solveDs' Nothing ds fixupDemandSignature return) (progDataTable prog)
--flip mapM_ nds $ \ (t,_) ->
" strictness : " + + pprint t + + " : " + + show ( maybe absSig i d $ Info.lookup ( tvrInfo t ) )
return $ programSetDs' nds prog
----------------------------
-- show and pprint instances
----------------------------
instance Show Demand where
showsPrec _ Bottom = ("_|_" ++)
showsPrec _ Absent = ('A':)
showsPrec _ (L None) = ('L':)
showsPrec _ (L (Product ds)) = showString "L(" . foldr (.) id (map shows ds) . showString ")"
showsPrec _ (S None) = ('S':)
showsPrec _ (S (Product ds)) = showString "S(" . foldr (.) id (map shows ds) . showString ")"
showsPrec _ (Error None) = showString "Err"
showsPrec _ (Error (Product ds)) = showString "Err(" . foldr (.) id (map shows ds) . showString ")"
instance DocLike d => PPrint d Demand where
pprint demand = tshow demand
instance Show DemandType where
showsPrec _ (DemandEnv e Absent :=> d) | isEmpty e = shows d
showsPrec _ (env :=> ds) = shows env . showString " :=> " . shows ds
instance Show DemandEnv where
showsPrec _ (DemandEnv m Absent) = showString "{" . foldr (.) id (intersperse (showString ",") [ showString (pprint t) . showString " -> " . shows v | (t,v) <- idMapToList m]) . showString "}"
showsPrec _ (DemandEnv _ Bottom) = showString "_|_"
showsPrec _ (DemandEnv m demand) = showString "{" . shows demand . showString " - " . foldr (.) id (intersperse (showString ",") [ showString (pprint t) . showString " -> " . shows v | (t,v) <- idMapToList m]) . showString "}"
instance Show DemandSignature where
showsPrec _ (DemandSignature n dt) = showString "<" . shows n . showString "," . shows dt . showString ">"
| null | https://raw.githubusercontent.com/jimcrayne/jhc/1ff035af3d697f9175f8761c8d08edbffde03b4e/src/E/Demand.hs | haskell | import Debug.Trace
always diverges
lazy
strict
diverges, might use arguments
Not used
! derive: Binary !
! derive: Binary !
! derive: Binary !
! derive: Binary !
# UNPACK #
! derive: Binary !
lazySig = DemandSignature 0 lazyType
Sp [L .. L] = S
Sp [.. _|_ ..] = _|_
None
sp s = sp' True s where
sp' True [] = S None
sp' False [] = S (Product s)
sp' allLazy (L _:rs) = sp' allLazy rs
sp' _ (Bottom:_) = Error (Product s)
sp' _ (_:rs) = sp' False rs
returns the demand type and whether it was found in the local environment or guessed
product type
simply to ensure binder is annotated
simply to ensure binder is annotated
simply to ensure binder is annotated
product type
solveDs' (Just False) ds fixup wdone = solveDs' Nothing ds fixup wdone
(if sig'' /= sig then trace ("signe: " ++ show(tvrIdent t,sig)) else id) $
flip mapM_ nds $ \ (t,_) ->
--------------------------
show and pprint instances
-------------------------- | module E.Demand(
Demand(..),
DemandSignature(..),
DemandType(..),
SubDemand(..),
analyzeProgram,
absSig,
lazy
) where
import Control.Monad.Reader
import Control.Monad.Writer hiding(Product(..))
import Data.Binary
import Data.List hiding(union,delete)
import Data.Typeable
import DataConstructors
import Doc.DocLike
import Doc.PPrint
import E.E
import E.Program
import GenUtil
import Info.Types
import Name.Id
import Util.HasSize
import Util.SetLike
import qualified Info.Info as Info
trace _ x = x
data Demand =
deriving(Eq,Ord,Typeable)
data SubDemand = None | Product ![Demand]
deriving(Eq,Ord)
data DemandEnv = DemandEnv !(IdMap Demand) !Demand
deriving(Eq,Ord)
data DemandType = (:=>) !DemandEnv ![Demand]
deriving(Eq,Ord)
deriving(Eq,Ord,Typeable)
idGlb = Absent
absType = (DemandEnv mempty idGlb) :=> []
botType = ( DemandEnv ) : = > [ ]
botType = (DemandEnv mempty Bottom) :=> []
lazyType = ( DemandEnv lazy ) : = > [ ]
absSig = DemandSignature 0 absType
class Lattice a where
glb :: a -> a -> a
lub :: a -> a -> a
sp [] = S None
l None = L None
l (Product xs) = lp xs
s None = S None
s (Product xs) = sp xs
allLazy xs | all (== lazy) xs = None
allLazy xs = Product xs
lp [] = L None
lp xs = L (allLazy (map f xs)) where
f (S None) = lazy
f (S (Product ys)) = lp ys
f Bottom = Absent
f (Error None) = lazy
f (Error (Product xs)) = lp xs
f x = x
instance Lattice DemandType where
lub (env :=> ts) (env' :=> ts') | length ts < length ts' = (env `lub` env') :=> strictList (zipWith lub (ts ++ repeat lazy) ts')
| otherwise = (env `lub` env') :=> strictList (zipWith lub ts (ts' ++ repeat lazy))
glb (env :=> ts) (env' :=> ts') | length ts < length ts' = (env `glb` env') :=> strictList (zipWith glb (ts ++ repeat lazy) ts')
| otherwise = (env `glb` env') :=> strictList (zipWith glb ts (ts' ++ repeat lazy))
lazy = L None
strict = S None
err = Error None
strictList (x:xs) = x `seq` xs' `seq` (x:xs') where
xs' = strictList xs
strictList [] = []
comb _ None None = None
comb f None (Product xs) = Product $ zipWith f (repeat lazy) xs
comb f (Product xs) None = Product $ zipWith f xs (repeat lazy)
comb f (Product xs) (Product ys) = Product $ zipWith f xs ys
instance Lattice Demand where
lub Bottom s = s
lub s Bottom = s
lub Absent Absent = Absent
lub (S x) Absent = l x
lub Absent (S x) = l x
lub (L x) Absent = l x
lub Absent (L x) = l x
lub Absent sa = lazy
lub sa Absent = lazy
lub (S x) (S y) = s (comb lub x y)
lub (L x) (L y) = l (comb lub x y)
lub (Error x) (Error y) = Error (comb lub x y)
lub (S x) (L y) = l (comb lub x y)
lub (L x) (S y) = l (comb lub x y)
lub (S x) (Error y) = s (comb lub x y)
lub (Error x) (S y) = s (comb lub x y)
lub (L x) (Error y) = lazy
lub (Error x) (L y) = lazy
glb Bottom Bottom = Bottom
glb Absent sa = sa
glb sa Absent = sa
glb Bottom _ = err
glb _ Bottom = err
glb (S x) (S y) = s (comb glb x y)
glb (L x) (L y) = l (comb glb x y)
glb (Error x) (Error y) = Error (comb glb x y)
glb (S _) (Error _) = err
glb (Error _) (S _) = err
glb (S x) (L y) = s (comb glb x y)
glb (L x) (S y) = s (comb glb x y)
glb (L _) (Error _) = err
glb (Error _) (L _) = err
lenv e (DemandEnv m r) = case mlookup e m of
Nothing -> r
Just x -> x
demandEnvSingleton :: TVr -> Demand -> DemandEnv
demandEnvSingleton _ Absent = DemandEnv mempty idGlb
demandEnvSingleton t d = DemandEnv (msingleton (tvrIdent t) d) idGlb
demandEnvMinus :: DemandEnv -> TVr -> DemandEnv
demandEnvMinus (DemandEnv m r) x = DemandEnv (delete (tvrIdent x) m) r
instance Lattice DemandEnv where
lub d1@(DemandEnv m1 r1) d2@(DemandEnv m2 r2) = DemandEnv m (r1 `lub` r2) where
m = fromList [ (x,lenv x d1 `lub` lenv x d2) | x <- keys (m1 `union` m2)]
glb d1@(DemandEnv m1 r1) d2@(DemandEnv m2 r2) = DemandEnv m (r1 `glb` r2) where
m = fromList [ (x,lenv x d1 `glb` lenv x d2) | x <- keys (m1 `union` m2)]
newtype IM a = IM (Reader (Env,DataTable) a)
deriving(Monad,Functor,MonadReader (Env,DataTable))
type Env = IdMap (Either DemandSignature E)
getEnv :: IM Env
getEnv = asks fst
extEnv TVr { tvrIdent = i } _ | isEmptyId i = id
extEnv t e = local (\ (env,dt) -> (minsert (tvrIdent t) (Left e) env,dt))
extEnvE TVr { tvrIdent = i } _ | isEmptyId i = id
extEnvE t e = local (\ (env,dt) -> (minsert (tvrIdent t) (Right e) env,dt))
instance DataTableMonad IM where
getDataTable = asks snd
runIM :: Monad m => IM a -> DataTable -> m a
runIM (IM im) dt = return $ runReader im (mempty,dt)
determineDemandType :: TVr -> Demand -> IM (Either DemandType E)
determineDemandType tvr demand = do
let g (DemandSignature n dt@(DemandEnv phi _ :=> _)) = f n demand where
f 0 (S _) = dt
f n (S (Product [s])) = f (n - 1) s
f _ _ = lazify (DemandEnv phi Absent) :=> []
env <- getEnv
case mlookup (tvrIdent tvr) env of
Just (Left ds) -> return (Left $ g ds)
Just (Right e) -> return (Right e)
Nothing -> case Info.lookup (tvrInfo tvr) of
Nothing -> return (Left absType)
Just ds -> return (Left $ g ds)
extendSig (DemandSignature n1 t1) (DemandSignature n2 t2) = DemandSignature (max n1 n2) (glb t1 t2)
splitSigma [] = (lazy,[])
splitSigma (x:xs) = (x,xs)
analyze :: E -> Demand -> IM (E,DemandType)
analyze e Absent = return (e,absType)
analyze (EVar v) s = do
ddt <- determineDemandType v s
(phi :=> sigma) <- case ddt of
Left dt -> return dt
Right e -> liftM snd $ analyze e s
return (EVar v,(phi `glb` (demandEnvSingleton v s)) :=> sigma)
analyze (EAp e1 e2) s = do
(e1',phi1 :=> sigma1') <- analyze e1 (sp [s])
let (sa,sigma1) = splitSigma sigma1'
(e2',phi2 :=> sigma2) <- analyze e2 sa
return $ (EAp e1' e2',(phi1 `glb` phi2) :=> sigma1)
analyze el@(ELit lc@LitCons { litName = h, litArgs = ts@(_:_) }) (S (Product ss)) | length ss == length ts = do
dataTable <- getDataTable
case onlyChild dataTable h of
envs <- flip mapM (zip ts ss) $ \(a,s) -> do
(_,env :=> _) <- analyze a s
return env
return (el,foldr1 glb envs :=> [])
_ -> do
rts <- mapM (\e -> analyze e lazy) ts
return (ELit lc { litArgs = fsts rts }, foldr glb absType (snds rts))
analyze (ELit lc@LitCons { litArgs = ts }) _s = do
rts <- mapM (\e -> analyze e lazy) ts
return (ELit lc { litArgs = fsts rts }, foldr glb absType (snds rts))
analyze e s | Just (t1,t2,pt) <- from_dependingOn e = do
(t1',dt1) <- analyze t1 s
(t2',dt2) <- analyze t2 lazy
return (EPrim p_dependingOn [t1',t2'] pt,dt1 `glb` dt2)
analyze (EPrim ap ts pt) _s = do
rts <- mapM (\e -> analyze e lazy) ts
return (EPrim ap (fsts rts) pt, foldr glb absType (snds rts))
analyze (EPi tvr@TVr { tvrType = t1 } t2) _s = do
(t1',dt1) <- analyze t1 lazy
(t2',dt2) <- analyze t2 lazy
return (EPi tvr { tvrType = t1' } t2',dt1 `glb` dt2)
analyze (ELam x@TVr { tvrIdent = eid } e) (S (Product [s])) | eid == emptyId = do
(e',phi :=> sigma) <- analyze e s
let sx = Absent
return (ELam (tvrInfo_u (Info.insert $! sx) x) e',demandEnvMinus phi x :=> (sx:sigma))
analyze (ELam x e) (S (Product [s])) = do
(e',phi :=> sigma) <- analyze e s
let sx = lenv (tvrIdent x) phi
return (ELam (tvrInfo_u (Info.insert $! sx) x) e',demandEnvMinus phi x :=> (sx:sigma))
analyze (ELam x e) (L (Product [s])) = do
(e',phi :=> sigma) <- analyze e s
let sx = lenv (tvrIdent x) phi
return (ELam (tvrInfo_u (Info.insert $! sx) x) e',lazify (demandEnvMinus phi x) :=> (sx:sigma))
analyze e@EError {} (S _) = return (e,botType)
analyze e@EError {} (L _) = return (e,absType)
analyze ec@ECase { eCaseBind = b, eCaseAlts = [Alt lc@LitCons { litName = h, litArgs = ts } alt], eCaseDefault = Nothing } s = do
dataTable <- getDataTable
case onlyChild dataTable h of
(alt',enva :=> siga) <- extEnvE b (eCaseScrutinee ec) $ analyze alt s
(e',enve :=> []) <- analyze (eCaseScrutinee ec) (sp [ lenv (tvrIdent t) enva | t <- ts])
let nenv = enve `glb` foldr denvDelete enva (b:ts)
return (caseUpdate $ ec { eCaseScrutinee = e', eCaseAlts = [Alt lc alt'] }, nenv :=> siga)
_ -> analyzeCase ec s
analyze ec@ECase {} s = analyzeCase ec s
analyze ELetRec { eDefs = ds, eBody = b } s = f (decomposeDs ds) [] where
f [] ds' = do
(b',phi :=> sig) <- analyze b s
let g (t,e) = (tvrInfo_u (Info.insert $! (lenv (tvrIdent t) phi)) t,e)
return (ELetRec (map g ds') b', foldr denvDelete phi (fsts ds) :=> sig)
f (Left (t,e):rs) fs =
solveDs' (Just False) [(t,e)] fixupDemandSignature (\nn -> f rs (nn ++ fs))
f (Right rg:rs) fs = do
solveDs' (Just True) rg fixupDemandSignature (\nn -> f rs (nn ++ fs))
analyze Unknown _ = return (Unknown,absType)
analyze es@ESort {} _ = return (es,absType)
analyze es@(ELit LitInt {}) _ = return (es,absType)
analyze e x = fail $ "analyze: " ++ show (e,x)
from_dependingOn (EPrim don [t1,t2] pt) | don == p_dependingOn = return (t1,t2,pt)
from_dependingOn _ = fail "not dependingOn"
lazify (DemandEnv x r) = DemandEnv (fmap f x) Absent where
f (S xs) = l xs
f Absent = Absent
f (L xs) = l xs
f Bottom = Absent
f (Error xs) = l xs
analyzeCase ec s = do
(ec',dts) <- extEnvE (eCaseBind ec) (eCaseScrutinee ec) $ runWriterT $ flip caseBodiesMapM ec $ \e -> do
(ne,dt) <- lift $ analyze e s
tell [dt]
return ne
(ecs,env :=> _) <- analyze (eCaseScrutinee ec') strict
let enva :=> siga = foldr1 lub dts
let nenv = foldr denvDelete (glb enva env) (caseBinds ec')
return (caseUpdate $ ec' {eCaseScrutinee = ecs},nenv :=> siga)
denvDelete x (DemandEnv m r) = DemandEnv (delete (tvrIdent x) m) r
topAnalyze :: TVr -> E -> IM (E,DemandSignature)
topAnalyze tvr e | getProperty prop_PLACEHOLDER tvr = return (e,DemandSignature 0 absType)
topAnalyze _tvr e = clam e strict 0 where
clam (ELam _ x) s n = clam x (sp [s]) (n + 1)
clam _ s n = do
(e,dt) <- analyze e s
return (e,DemandSignature n dt)
fixupDemandSignature (DemandSignature n (DemandEnv _ r :=> dt)) = DemandSignature n (DemandEnv mempty r :=> dt)
shouldBind ELit {} = True
shouldBind EVar {} = True
shouldBind EPi {} = True
shouldBind _ = False
solveDs' :: (Maybe Bool) -> [(TVr,E)] -> (DemandSignature -> DemandSignature) -> ([(TVr,E)] -> IM a) -> IM a
solveDs' Nothing ds fixup wdone = do
let f (Left d:rs) xs = solveDs' (Just False) [d] fixup (\nds -> f rs (nds ++ xs))
f (Right ds:rs) xs = solveDs' (Just True) ds fixup (\nds -> f rs (nds ++ xs))
f [] xs = wdone xs
f (decomposeDs ds) []
solveDs' (Just False) [(t,e)] fixup wdone | shouldBind e = do
(ne,ds) <- topAnalyze t e
extEnvE t e $ wdone [(tvrInfo_u (Info.insert (fixup ds)) t,ne)]
solveDs' (Just False) [(t,e)] fixup wdone = do
(ne,ds) <- topAnalyze t e
extEnv t ds $ wdone [(tvrInfo_u (Info.insert (fixup ds)) t,ne)]
solveDs' (Just False) ds fixup wdone = error "solveDs' (Just False) called with more than one definition"
solveDs' (Just True) ds fixup wdone = trace "solveDs': jt" $ do
let ds' = [ ((t,e),sig) | (t,e) <- ds, let sig = maybe absSig id (Info.lookup (tvrInfo t))]
g 0 _ [] ds = trace "gdonetout" $ wdone [ (tvrInfo_u (Info.insert $! (fixup sig)) t,e) | ((t,e),sig) <- ds ]
g _ False [] ds = trace "gdone1" $ wdone [ (tvrInfo_u (Info.insert $! (fixup sig)) t,e) | ((t,e),sig) <- ds ]
g n True [] ds = do
(oe,dt) <- ask
let nenv = fromList [ (tvrIdent t,Left s) | ((t,_),s) <- ds, not (isEmptyId (tvrIdent t))] `Util.SetLike.union` oe
local (const (nenv,dt)) $ trace ("grepeating: " ++ show (length ds)) $ g (n - 1) False ds []
g n ch (((t,e),sig):rs) fs = do
(ne,sig') <- topAnalyze t e
let sig'' = sig `extendSig` sig'
g n (ch || (sig'' /= sig)) rs (((t,ne),sig''):fs)
g (5::Int) True [] ds'
# NOINLINE analyzeProgram #
analyzeProgram prog = do
let ds = programDs prog
nds <- runIM (solveDs' Nothing ds fixupDemandSignature return) (progDataTable prog)
" strictness : " + + pprint t + + " : " + + show ( maybe absSig i d $ Info.lookup ( tvrInfo t ) )
return $ programSetDs' nds prog
instance Show Demand where
showsPrec _ Bottom = ("_|_" ++)
showsPrec _ Absent = ('A':)
showsPrec _ (L None) = ('L':)
showsPrec _ (L (Product ds)) = showString "L(" . foldr (.) id (map shows ds) . showString ")"
showsPrec _ (S None) = ('S':)
showsPrec _ (S (Product ds)) = showString "S(" . foldr (.) id (map shows ds) . showString ")"
showsPrec _ (Error None) = showString "Err"
showsPrec _ (Error (Product ds)) = showString "Err(" . foldr (.) id (map shows ds) . showString ")"
instance DocLike d => PPrint d Demand where
pprint demand = tshow demand
instance Show DemandType where
showsPrec _ (DemandEnv e Absent :=> d) | isEmpty e = shows d
showsPrec _ (env :=> ds) = shows env . showString " :=> " . shows ds
instance Show DemandEnv where
showsPrec _ (DemandEnv m Absent) = showString "{" . foldr (.) id (intersperse (showString ",") [ showString (pprint t) . showString " -> " . shows v | (t,v) <- idMapToList m]) . showString "}"
showsPrec _ (DemandEnv _ Bottom) = showString "_|_"
showsPrec _ (DemandEnv m demand) = showString "{" . shows demand . showString " - " . foldr (.) id (intersperse (showString ",") [ showString (pprint t) . showString " -> " . shows v | (t,v) <- idMapToList m]) . showString "}"
instance Show DemandSignature where
showsPrec _ (DemandSignature n dt) = showString "<" . shows n . showString "," . shows dt . showString ">"
|
5049d52b7ad95d63bd52bca54efab9a52a3e7cb4c9e4facf44e68e5b96dc8950 | tweag/lagoon | ColumnSpec.hs | Copyright 2020 Pfizer Inc.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
-- -2.0
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE GeneralizedNewtypeDeriving #
module Lagoon.Interface.ColumnSpec (
ColumnIx(..)
, ColumnSpec(..)
, Column_(..)
, Column
, ColumnType(..)
, ForeignSpec(..)
) where
import Data.Aeson
import Data.Text (Text)
import Web.HttpApiData
import qualified Text.PrettyPrint as PP
import Lagoon.Interface.ColumnType
import Lagoon.Interface.DB
import Lagoon.Interface.Pretty
------------------------------------------------------------------------------
Column specification
------------------------------------------------------------------------------
Column specification
-------------------------------------------------------------------------------}
newtype ColumnIx = ColumnIx Ix
deriving (Show, ToJSON, FromJSON, FromHttpApiData, ToHttpApiData)
-- | Information about the columns in a view
newtype ColumnSpec = ColumnSpec { columnSpecToList :: [Column] }
deriving Eq
-- | TODO: Maybe move this out of here
data ForeignSpec = ForeignSpec {
pointingColumn :: ColumnName
, referencedTable :: TableName
, referencedColumn :: ColumnName }
-- | Information about a single column
--
-- The type argument @a@ is used for the column view name; we use this for
-- sanitization.
data Column_ a = Column {
columnName :: ColumnName
, columnHeader :: Maybe Text
, columnType :: ColumnType
, columnInView :: a
}
deriving (Functor, Eq)
type Column = Column_ ColumnName
{-------------------------------------------------------------------------------
Pretty-printing
-------------------------------------------------------------------------------}
instance Pretty ColumnSpec where
pretty (ColumnSpec spec) = PP.vcat $ "\tType\tName" : map aux spec
where
aux :: Column -> Doc
aux Column{..} = PP.hcat $ PP.punctuate "\t" [
pretty columnName
, pretty columnType
, case columnHeader of
Nothing -> "(no header)"
Just hdr -> pretty hdr
PP.<+> PP.parens (pretty columnInView)
]
{-------------------------------------------------------------------------------
JSON
-------------------------------------------------------------------------------}
instance ToJSON a => ToJSON (Column_ a) where
toJSON Column{..} = object [
"name" .= columnName
, "header" .= columnHeader
, "type" .= columnType
, "inView" .= columnInView
]
instance FromJSON a => FromJSON (Column_ a) where
parseJSON (Object o) =
Column <$> o .: "name"
<*> o .: "header"
<*> o .: "type"
<*> o .: "inView"
parseJSON _ = fail "(ColumnSpec.hs) Column_: no parse"
instance ToJSON ColumnSpec where
toJSON (ColumnSpec cols) = toJSON cols
instance FromJSON ColumnSpec where
parseJSON v = ColumnSpec <$> (parseJSON v)
| null | https://raw.githubusercontent.com/tweag/lagoon/2ef0440db810f4f45dbed160b369daf41d92bfa4/src/interface/src/Lagoon/Interface/ColumnSpec.hs | haskell | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
# LANGUAGE OverloadedStrings #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
| Information about the columns in a view
| TODO: Maybe move this out of here
| Information about a single column
The type argument @a@ is used for the column view name; we use this for
sanitization.
------------------------------------------------------------------------------
Pretty-printing
------------------------------------------------------------------------------
------------------------------------------------------------------------------
JSON
------------------------------------------------------------------------------ | Copyright 2020 Pfizer Inc.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
# LANGUAGE GeneralizedNewtypeDeriving #
module Lagoon.Interface.ColumnSpec (
ColumnIx(..)
, ColumnSpec(..)
, Column_(..)
, Column
, ColumnType(..)
, ForeignSpec(..)
) where
import Data.Aeson
import Data.Text (Text)
import Web.HttpApiData
import qualified Text.PrettyPrint as PP
import Lagoon.Interface.ColumnType
import Lagoon.Interface.DB
import Lagoon.Interface.Pretty
Column specification
Column specification
newtype ColumnIx = ColumnIx Ix
deriving (Show, ToJSON, FromJSON, FromHttpApiData, ToHttpApiData)
newtype ColumnSpec = ColumnSpec { columnSpecToList :: [Column] }
deriving Eq
data ForeignSpec = ForeignSpec {
pointingColumn :: ColumnName
, referencedTable :: TableName
, referencedColumn :: ColumnName }
data Column_ a = Column {
columnName :: ColumnName
, columnHeader :: Maybe Text
, columnType :: ColumnType
, columnInView :: a
}
deriving (Functor, Eq)
type Column = Column_ ColumnName
instance Pretty ColumnSpec where
pretty (ColumnSpec spec) = PP.vcat $ "\tType\tName" : map aux spec
where
aux :: Column -> Doc
aux Column{..} = PP.hcat $ PP.punctuate "\t" [
pretty columnName
, pretty columnType
, case columnHeader of
Nothing -> "(no header)"
Just hdr -> pretty hdr
PP.<+> PP.parens (pretty columnInView)
]
instance ToJSON a => ToJSON (Column_ a) where
toJSON Column{..} = object [
"name" .= columnName
, "header" .= columnHeader
, "type" .= columnType
, "inView" .= columnInView
]
instance FromJSON a => FromJSON (Column_ a) where
parseJSON (Object o) =
Column <$> o .: "name"
<*> o .: "header"
<*> o .: "type"
<*> o .: "inView"
parseJSON _ = fail "(ColumnSpec.hs) Column_: no parse"
instance ToJSON ColumnSpec where
toJSON (ColumnSpec cols) = toJSON cols
instance FromJSON ColumnSpec where
parseJSON v = ColumnSpec <$> (parseJSON v)
|
5260c06d1f9a7dff2bdacb3a547966a3aef0856cf0bcfcb019f5d1c13794ee6b | celsobonutti/real_world_ocaml | nested.ml | open Base
module Username : sig
type t
val of_string : string -> t
val to_string : t -> string
val (=) : t -> t -> bool
end = struct
type t = string
let of_string x = x
let to_string x = x
let (=) = String.(=)
end
| null | https://raw.githubusercontent.com/celsobonutti/real_world_ocaml/b829900f33575deb9a358f6defff7c949071be0e/ch4/nested.ml | ocaml | open Base
module Username : sig
type t
val of_string : string -> t
val to_string : t -> string
val (=) : t -> t -> bool
end = struct
type t = string
let of_string x = x
let to_string x = x
let (=) = String.(=)
end
| |
0ccf59004b19aa6419cf57529be6b026f86ed73cae0c5322bf8f22ee727c489b | cgohla/pureshell | Lower.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleContexts #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE KindSignatures #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PolyKinds #
# LANGUAGE TypeApplications #
module Language.PureShell.Combinatory.Lower where
import qualified Language.PureShell.Combinatory.CodeGen as C
import qualified Language.PureShell.Combinatory.Context as C
import qualified Language.PureShell.Combinatory.IR as C
import qualified Language.PureShell.Identifiers as Ids
import qualified Language.PureShell.Procedural.IR as P
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as C8 (pack)
import Data.List.Extra (snoc)
import Data.Singletons
import qualified Data.Text as T (pack)
import qualified Data.Text.Encoding as T (encodeUtf8)
import Polysemy (Member, Sem, run)
import Polysemy.Writer (Writer, runWriter,
tell)
type TopLevelFunDefs = [P.FunDef ByteString]
-- | Presumably the main entry point in this module
lowerModule :: ( Ids.IdsKind ids)
=> C.Module ids (ss :: [ids]) -> P.Module ByteString
lowerModule = C.moduleFold f
where
f n b = n <> (P.Module $ lowerOneTopLevelDefn b)
lowerOneTopLevelDefn :: forall ids (s :: ids). (Ids.IdsKind ids) => C.TopLevelBind ids s -> TopLevelFunDefs
lowerOneTopLevelDefn = uncurry snoc . run . runWriter @TopLevelFunDefs . Ids.runLocalNames @Ids.SimpleBashFunName . lowerTopLevelBind
-- NOTE the function names effect might need to be run at the top of
-- the module, to guarantee function names are unique throughout it.
lowerTopLevelBind :: ( Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.TopLevelBind ids s -> Sem r (P.FunDef ByteString)
lowerTopLevelBind (C.Bind i e) = do
fn <- Ids.mkName @Ids.SimpleBashFunName $ C.simpleBashFunName i
let lowerExpr' = Ids.runLocalNames @Ids.LocalBashVarName . lowerExpr
case e of
C.Abs c f -> P.FunDef fn <$> ps <*> lowerExpr' f
where
ps = Ids.runLocalNames @Ids.LocalBashVarName $ varNamesFromContext c
_ -> P.FunDef fn [] <$> t
where
t = lowerExpr' e
lowerExprLiteral :: ( Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.Literal ids c -> Sem r (P.Sequence ByteString)
lowerExprLiteral = \case
C.StringLiteral s -> literal s
C.NumericLiteral (Left n) -> literal $ T.pack $ show n
_ -> error "not implemented"
where
literal = C.expression . P.Literal . T.encodeUtf8
lowerExprApp :: ( Member (Ids.LocalNames Ids.LocalBashVarName) r
, Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.Expr ids c -> C.ExprList ids d -> Sem r (P.Sequence ByteString)
lowerExprApp e es = do
TODO this produces wrong results in the case of Prim
TODO if an es is a Var then we should use it directly
TODO if e is a Prim we may want to use the literal name
C.sequence (a:as) b
exprEvalAssign :: ( Member (Ids.LocalNames Ids.LocalBashVarName) r
, Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.Expr ids c -> Sem r (Ids.LocalBashVarName, P.Assignment ByteString)
exprEvalAssign e = do
s <- lowerExpr e
v <- Ids.mkName @Ids.LocalBashVarName "r"
pure $ (v, P.Assignment v s)
chainExprEval :: ( Member (Ids.LocalNames Ids.LocalBashVarName) r
, Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> Sem r ([Ids.LocalBashVarName], [P.Assignment ByteString])
-> C.Expr ids c -> Sem r ([Ids.LocalBashVarName], [P.Assignment ByteString])
chainExprEval as e = do
(vs, bs) <- as
(v, a) <- exprEvalAssign e
pure $ ([v] <> vs, [a] <> bs)
lowerExprPrim : : String - > Sem r ( P.Sequence ByteString )
-- lowerExprPrim n = pure $ P.Sequence [] $ P.Application (P.ClosureFromName $ Ids.SimpleBashFunName n') []
-- where
-- n' = C8.pack n -- This is very wrong
lowerExprPrim :: String -> Sem r (P.Sequence ByteString)
lowerExprPrim n = pure $ P.Sequence [] $ P.Literal n'
where
n' = C8.pack n -- This is very wrong
TODO this change does n't solve the problem . We need a Prim value in
-- Procedural as well.
TODO It might be worthwhile to decouple the type for value literals
and parametrize Combinatory and Procedural over it . ( This is a
-- separate issue from the above).
varNamesFromContext :: ( Member (Ids.LocalNames Ids.LocalBashVarName) r
, Ids.IdsKind ids)
=> Sing (c :: C.Context ids)
-> Sem r [Ids.LocalBashVarName]
varNamesFromContext c = do
let mkName' ns s = ns <> [Ids.mkName @Ids.LocalBashVarName $ C.localBashVarName s]
sequence $ C.contextFoldl mkName' [] c
lowerExprAbs :: ( Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> Sing (c :: C.Context ids) -> C.Expr ids d -> Sem r (P.Sequence ByteString)
-- NOTE we are not actually enforcing the binding
-- constraint here anymore. maybe there is an easy way to
-- do that
lowerExprAbs c e = Ids.runLocalNames @Ids.LocalBashVarName $ do
TODO this might be bad , i.e. , running a concrete implementation
-- in business code. Use 'bracket' and 'resources' here.
n <- Ids.mkName @Ids.SimpleBashFunName "lambda"
TODO generalize , so we can include a better name
vs <- varNamesFromContext c
s <- lowerExpr e
let f = P.FunDef n vs s
tell @TopLevelFunDefs [f]
TODO the empty list seems wrong
C.sequence [] a
lowerExprLet :: ( Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Ids.LocalNames Ids.LocalBashVarName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.Bind ids (s :: ids) c -> C.Expr ids d -> Sem r (P.Sequence ByteString)
lowerExprLet (C.Bind i e) f = do
let mkName' = Ids.mkName @Ids.LocalBashVarName . C.localBashVarName
n <- mkName' i
b <- P.Assignment n <$> lowerExpr e
P.Sequence s a <- lowerExpr f
TODO this is not quite right . f needs to be able to capture
-- i. but if i is already taken, we will get a different name back,
-- and the capute would fail.
--
-- the question seems to be how to shadow correctly. we could use a
-- naming effect to make sure that variable references in f are
-- resolved correctly.
C.sequence (b:s) a
TODO implement a renaming effect
lowerExprVar :: (Ids.IdsKind ids) => Sing (s :: ids) -> Sem r (P.Sequence ByteString)
lowerExprVar = C.sequence [] . P.Variable . C.localBashVarName
lowerExpr :: ( Member (Ids.LocalNames Ids.LocalBashVarName) r
, Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.Expr ids c -> Sem r (P.Sequence ByteString)
lowerExpr = \case
C.Var n -> lowerExprVar n
C.Lit l -> lowerExprLiteral l
C.App e es -> lowerExprApp e es
C.Abs c e -> lowerExprAbs c e
C.Prim n -> lowerExprPrim n
C.Let b e -> lowerExprLet b e
| null | https://raw.githubusercontent.com/cgohla/pureshell/caa3ea63da2b73765779c6389f205a5155599dd7/pureshell/Language/PureShell/Combinatory/Lower.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE GADTs #
# LANGUAGE ImpredicativeTypes #
# LANGUAGE KindSignatures #
# LANGUAGE OverloadedStrings #
| Presumably the main entry point in this module
NOTE the function names effect might need to be run at the top of
the module, to guarantee function names are unique throughout it.
lowerExprPrim n = pure $ P.Sequence [] $ P.Application (P.ClosureFromName $ Ids.SimpleBashFunName n') []
where
n' = C8.pack n -- This is very wrong
This is very wrong
Procedural as well.
separate issue from the above).
NOTE we are not actually enforcing the binding
constraint here anymore. maybe there is an easy way to
do that
in business code. Use 'bracket' and 'resources' here.
i. but if i is already taken, we will get a different name back,
and the capute would fail.
the question seems to be how to shadow correctly. we could use a
naming effect to make sure that variable references in f are
resolved correctly. | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE PolyKinds #
# LANGUAGE TypeApplications #
module Language.PureShell.Combinatory.Lower where
import qualified Language.PureShell.Combinatory.CodeGen as C
import qualified Language.PureShell.Combinatory.Context as C
import qualified Language.PureShell.Combinatory.IR as C
import qualified Language.PureShell.Identifiers as Ids
import qualified Language.PureShell.Procedural.IR as P
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as C8 (pack)
import Data.List.Extra (snoc)
import Data.Singletons
import qualified Data.Text as T (pack)
import qualified Data.Text.Encoding as T (encodeUtf8)
import Polysemy (Member, Sem, run)
import Polysemy.Writer (Writer, runWriter,
tell)
type TopLevelFunDefs = [P.FunDef ByteString]
lowerModule :: ( Ids.IdsKind ids)
=> C.Module ids (ss :: [ids]) -> P.Module ByteString
lowerModule = C.moduleFold f
where
f n b = n <> (P.Module $ lowerOneTopLevelDefn b)
lowerOneTopLevelDefn :: forall ids (s :: ids). (Ids.IdsKind ids) => C.TopLevelBind ids s -> TopLevelFunDefs
lowerOneTopLevelDefn = uncurry snoc . run . runWriter @TopLevelFunDefs . Ids.runLocalNames @Ids.SimpleBashFunName . lowerTopLevelBind
lowerTopLevelBind :: ( Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.TopLevelBind ids s -> Sem r (P.FunDef ByteString)
lowerTopLevelBind (C.Bind i e) = do
fn <- Ids.mkName @Ids.SimpleBashFunName $ C.simpleBashFunName i
let lowerExpr' = Ids.runLocalNames @Ids.LocalBashVarName . lowerExpr
case e of
C.Abs c f -> P.FunDef fn <$> ps <*> lowerExpr' f
where
ps = Ids.runLocalNames @Ids.LocalBashVarName $ varNamesFromContext c
_ -> P.FunDef fn [] <$> t
where
t = lowerExpr' e
lowerExprLiteral :: ( Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.Literal ids c -> Sem r (P.Sequence ByteString)
lowerExprLiteral = \case
C.StringLiteral s -> literal s
C.NumericLiteral (Left n) -> literal $ T.pack $ show n
_ -> error "not implemented"
where
literal = C.expression . P.Literal . T.encodeUtf8
lowerExprApp :: ( Member (Ids.LocalNames Ids.LocalBashVarName) r
, Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.Expr ids c -> C.ExprList ids d -> Sem r (P.Sequence ByteString)
lowerExprApp e es = do
TODO this produces wrong results in the case of Prim
TODO if an es is a Var then we should use it directly
TODO if e is a Prim we may want to use the literal name
C.sequence (a:as) b
exprEvalAssign :: ( Member (Ids.LocalNames Ids.LocalBashVarName) r
, Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.Expr ids c -> Sem r (Ids.LocalBashVarName, P.Assignment ByteString)
exprEvalAssign e = do
s <- lowerExpr e
v <- Ids.mkName @Ids.LocalBashVarName "r"
pure $ (v, P.Assignment v s)
chainExprEval :: ( Member (Ids.LocalNames Ids.LocalBashVarName) r
, Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> Sem r ([Ids.LocalBashVarName], [P.Assignment ByteString])
-> C.Expr ids c -> Sem r ([Ids.LocalBashVarName], [P.Assignment ByteString])
chainExprEval as e = do
(vs, bs) <- as
(v, a) <- exprEvalAssign e
pure $ ([v] <> vs, [a] <> bs)
lowerExprPrim : : String - > Sem r ( P.Sequence ByteString )
lowerExprPrim :: String -> Sem r (P.Sequence ByteString)
lowerExprPrim n = pure $ P.Sequence [] $ P.Literal n'
where
TODO this change does n't solve the problem . We need a Prim value in
TODO It might be worthwhile to decouple the type for value literals
and parametrize Combinatory and Procedural over it . ( This is a
varNamesFromContext :: ( Member (Ids.LocalNames Ids.LocalBashVarName) r
, Ids.IdsKind ids)
=> Sing (c :: C.Context ids)
-> Sem r [Ids.LocalBashVarName]
varNamesFromContext c = do
let mkName' ns s = ns <> [Ids.mkName @Ids.LocalBashVarName $ C.localBashVarName s]
sequence $ C.contextFoldl mkName' [] c
lowerExprAbs :: ( Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> Sing (c :: C.Context ids) -> C.Expr ids d -> Sem r (P.Sequence ByteString)
lowerExprAbs c e = Ids.runLocalNames @Ids.LocalBashVarName $ do
TODO this might be bad , i.e. , running a concrete implementation
n <- Ids.mkName @Ids.SimpleBashFunName "lambda"
TODO generalize , so we can include a better name
vs <- varNamesFromContext c
s <- lowerExpr e
let f = P.FunDef n vs s
tell @TopLevelFunDefs [f]
TODO the empty list seems wrong
C.sequence [] a
lowerExprLet :: ( Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Ids.LocalNames Ids.LocalBashVarName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.Bind ids (s :: ids) c -> C.Expr ids d -> Sem r (P.Sequence ByteString)
lowerExprLet (C.Bind i e) f = do
let mkName' = Ids.mkName @Ids.LocalBashVarName . C.localBashVarName
n <- mkName' i
b <- P.Assignment n <$> lowerExpr e
P.Sequence s a <- lowerExpr f
TODO this is not quite right . f needs to be able to capture
C.sequence (b:s) a
TODO implement a renaming effect
lowerExprVar :: (Ids.IdsKind ids) => Sing (s :: ids) -> Sem r (P.Sequence ByteString)
lowerExprVar = C.sequence [] . P.Variable . C.localBashVarName
lowerExpr :: ( Member (Ids.LocalNames Ids.LocalBashVarName) r
, Member (Ids.LocalNames Ids.SimpleBashFunName) r
, Member (Writer TopLevelFunDefs) r
, Ids.IdsKind ids)
=> C.Expr ids c -> Sem r (P.Sequence ByteString)
lowerExpr = \case
C.Var n -> lowerExprVar n
C.Lit l -> lowerExprLiteral l
C.App e es -> lowerExprApp e es
C.Abs c e -> lowerExprAbs c e
C.Prim n -> lowerExprPrim n
C.Let b e -> lowerExprLet b e
|
e874407b67bcd355ddb32501f92b5dae86b1981bdae1747d2a4231775cc394b2 | PLSysSec/FaCT | pseudocode.ml | open Util
open Pos
open Err
open Tast
let sprintf = Printf.sprintf
let concat = String.concat
class pseudocode (m : fact_module) =
object (visit)
val mutable _indent : int = -1
val _minfo : module_info =
let Module(_,_,minfo) = m in minfo
method _prindent n =
"\n" ^ (String.make ((_indent + n) * 2) ' ')
method fact_module () =
let Module(sdecs,fdecs,_) = m in
let sdecs' = List.map visit#sdec sdecs in
let sdecs' = concat "\n\n" sdecs' in
let fdecs' = List.map visit#fdec fdecs in
let fdecs' = concat "\n\n" fdecs' in
sdecs' ^ "\n\n" ^ fdecs'
method sdec =
xwrap @@ fun p ->
fun (StructDef (name,fields)) ->
let fields' = List.map visit#field fields in
sprintf
"struct %s {
%s
};"
name.data
(concat "\n" fields')
method field =
xwrap @@ fun p ->
fun (Field (x,bty)) ->
sprintf
" %s %s;"
(visit#bty bty)
x.data
method fdec =
xwrap @@ fun p -> function
| FunDec(fn,ft,rt,params,body) ->
let params' = concat "," @@ List.map visit#param params in
let body' = visit#scoped body in
sprintf "%s%s %s(%s) %s"
(visit#fnattr ft)
(visit#rty rt)
fn.data
params'
body'
| CExtern(fn,ft,rt,params) ->
let params' = concat "," @@ List.map visit#param params in
sprintf "extern %s%s %s(%s);"
(visit#cfnattr ft)
(visit#rty rt)
fn.data
params'
| StdlibFn(code,ft,rt,params) ->
let params' = concat "," @@ List.map visit#param params in
sprintf "stdlib %s%s %s(%s);"
(visit#fnattr ft)
(visit#rty rt)
(Cstdlib.name_of code).data
params'
method fnattr { export; inline; } =
sprintf "%s%s"
(if export then "export " else "")
(match inline with
| Default -> ""
| Always -> "inline "
| Never -> "noinline ")
method cfnattr { benign; } =
sprintf "%s"
(if benign then "benign " else "")
method rty = function
| None -> "void"
| Some bt -> visit#bty bt
method param =
xwrap @@ fun p -> function
| Param (x,bty) ->
sprintf "\n %s %s"
(visit#bty bty)
x.data
method lbl =
xwrap @@ fun p -> function
| Public -> "public"
| Secret -> "secret"
method mut =
xwrap @@ fun p -> function
| R -> "R"
| W -> "W"
| RW -> "RW"
method bty =
xwrap @@ fun p -> function
| Bool l -> sprintf "%s bool" (visit#lbl l)
| UInt (s,l) -> sprintf "%s uint%d" (visit#lbl l) s
| Int (s,l) -> sprintf "%s int%d" (visit#lbl l) s
| Ref (bt,m) -> sprintf "%s@%s" (visit#bty bt) (visit#mut m)
| Arr (bt,lexpr,vattr) -> sprintf "%s%s[%s]" (visit#vattr vattr) (visit#bty bt) (visit#lexpr lexpr)
| UVec (s,n,l) -> sprintf "%s uint%d<%d>" (visit#lbl l) s n
| Struct s -> sprintf "%s" s.data
| _ -> "X[bty]X"
method bty_nolbl =
xwrap @@ fun p -> function
| Bool l -> "bool"
| UInt (s,l) -> sprintf "uint%d" s
| Int (s,l) -> sprintf "int%d" s
| Ref (bt,m) -> sprintf "%s*%s" (visit#bty bt) (visit#mut m)
| Arr (bt,lexpr,vattr) -> sprintf "%s%s[%s]" (visit#vattr vattr) (visit#bty bt) (visit#lexpr lexpr)
| UVec (s,n,l) -> sprintf "uint%d<%d>" s n
| _ -> "X[bty]X"
method lexpr =
xwrap @@ fun p -> function
| LIntLiteral n -> string_of_int n
| LDynamic x -> x.data
method vattr = function
| { cache_aligned=true } -> "cacheline "
| _ -> ""
method scoped blk =
_indent <- _indent + 1;
let stms = visit#block blk in
let res = sprintf "{%s%s}"
stms
(if stms = "" then " " else visit#_prindent 0)
in
_indent <- _indent - 1;
res
method next nxt =
match nxt.data with
| Block blk -> visit#block blk
| Return e ->
let e' = visit#expr e in
sprintf "%sreturn %s;"
(visit#_prindent 1)
e'
| VoidReturn -> (visit#_prindent 1) ^ "return;"
| End -> ""
method block (blk,next) =
match blk.data with
| Scope blk ->
let scoped = visit#scoped blk in
let next = visit#next next in
scoped ^ next
| ListOfStuff stms ->
let stms' = visit#stms stms in
let next = visit#next next in
stms' ^ next
| If (cond,thens,elses) ->
let cond' = visit#expr cond in
let thens' = visit#scoped thens in
let elses' = visit#scoped elses in
let next = visit#next next in
sprintf "%sif (%s) %s%s%s"
(visit#_prindent 1)
cond'
thens'
(if elses' = "{ }" then "" else " else " ^ elses')
next
| RangeFor (x,bty,e1,e2,blk) ->
let e1' = visit#expr e1 in
let e2' = visit#expr e2 in
let blk' = visit#scoped blk in
let next = visit#next next in
sprintf "%sfor (%s %s from %s to %s) %s%s"
(visit#_prindent 1)
(visit#bty bty)
x.data
e1'
e2'
blk'
next
| ArrayFor (x,bty,e,blk) ->
let e' = visit#expr e in
let blk' = visit#scoped blk in
let next = visit#next next in
sprintf "%sfor (%s %s in %s) %s%s"
(visit#_prindent 1)
(visit#bty bty)
x.data
e'
blk'
next
method stms stms_ =
concat ""
(List.map
(fun stm -> visit#_prindent 1 ^ visit#stm stm)
stms_)
method stm stm_ =
match stm_.data with
| VarDec (x,bty,e) ->
let e' = visit#expr e in
sprintf "%s %s = %s;"
(visit#bty bty)
x.data
e'
| FnCall (x,bty,fn,args) ->
let args' = List.map visit#expr args in
sprintf "%s %s = %s(%s);"
(visit#bty bty)
x.data
fn.data
(concat ", " args')
| VoidFnCall (fn,args) ->
let args' = List.map visit#expr args in
sprintf "%s(%s);"
fn.data
(concat ", " args')
| Assign (e1,e2) ->
let e1' = visit#expr e1 in
let e2' = visit#expr e2 in
sprintf "%s := %s;"
e1' e2'
| Cmov (e1,cond,e2) ->
let e1' = visit#expr e1 in
let cond' = visit#expr cond in
let e2' = visit#expr e2 in
sprintf "%s := %s ?? %s;"
e1' cond' e2'
| Assume e ->
let e' = visit#expr e in
sprintf "assume(%s);" e'
method expr (e_,_) =
match e_.data with
| True -> "true"
| False -> "false"
| IntLiteral n -> string_of_int n
| Variable x -> x.data
| Cast (bty,e) ->
let e' = visit#expr e in
sprintf "%s(%s)"
(visit#bty_nolbl bty)
e'
| UnOp (op,e) ->
let e' = visit#expr e in
sprintf "%s%s"
(visit#unop op)
e'
| BinOp (op,e1,e2) ->
let e1' = visit#expr e1 in
let e2' = visit#expr e2 in
sprintf "(%s %s %s)"
e1'
(visit#binop op)
e2'
| TernOp (e1,e2,e3) ->
let e1' = visit#expr e1 in
let e2' = visit#expr e2 in
let e3' = visit#expr e3 in
sprintf "(%s ? %s : %s)"
e1' e2' e3'
| Select (e1,e2,e3) ->
let e1' = visit#expr e1 in
let e2' = visit#expr e2 in
let e3' = visit#expr e3 in
sprintf "(%s ?? %s :: %s)"
e1' e2' e3'
| Declassify e ->
let e' = visit#expr e in
sprintf "declassify(%s)" e'
| Classify e ->
let e' = visit#expr e in
sprintf "classify(%s)" e'
| Enref e ->
sprintf "ref %s" (visit#expr e)
| Deref e ->
let e' = visit#expr e in
sprintf "*%s" e'
| ArrayGet (e,lexpr) ->
let e' = visit#expr e in
let lexpr' = visit#lexpr lexpr in
sprintf "%s[%s]"
e' lexpr'
| ArrayLit es ->
let es' = List.map visit#expr es in
sprintf "[%s]"
(concat ", " es')
| ArrayZeros lexpr ->
sprintf "zeros(%s)" (visit#lexpr lexpr)
| ArrayCopy e ->
sprintf "clone(%s)" (visit#expr e)
| ArrayView (e,index,len) ->
sprintf "view(%s, %s, %s)"
(visit#expr e)
(visit#lexpr index)
(visit#lexpr len)
| VectorLit ns ->
sprintf "<%s>"
(concat ", " @@ List.map string_of_int ns)
| Shuffle (e,ns) ->
sprintf "%s<%s>"
(visit#expr e)
(concat "," @@ List.map string_of_int ns)
| StructLit entries ->
"X[structlit]X"
| StructGet (e,field) ->
sprintf "%s->%s"
(visit#expr e)
field.data
| StringLiteral s -> sprintf "\"%s\"" s
method unop = function
| Ast.Neg -> "-"
| Ast.LogicalNot -> "!"
| Ast.BitwiseNot -> "~"
method binop = function
| Ast.Plus -> "+"
| Ast.Minus -> "-"
| Ast.Multiply -> "*"
| Ast.Divide -> "/"
| Ast.Modulo -> "%"
| Ast.Equal -> "=="
| Ast.NEqual -> "!="
| Ast.GT -> ">"
| Ast.GTE -> ">="
| Ast.LT -> "<"
| Ast.LTE -> "<="
| Ast.LogicalAnd -> "&&"
| Ast.LogicalOr -> "||"
| Ast.BitwiseAnd -> "&"
| Ast.BitwiseOr -> "|"
| Ast.BitwiseXor -> "^"
| Ast.LeftShift -> "<<"
| Ast.RightShift -> ">>"
| Ast.LeftRotate -> "<<<"
| Ast.RightRotate -> ">>>"
end
let ps = new pseudocode (Module ([],[],{fmap=[]}))
let transform m =
let visit = new pseudocode m in
visit#fact_module ()
| null | https://raw.githubusercontent.com/PLSysSec/FaCT/b6820cf764de0a0f70bd54db0399ff7436bca231/src/pseudocode.ml | ocaml | open Util
open Pos
open Err
open Tast
let sprintf = Printf.sprintf
let concat = String.concat
class pseudocode (m : fact_module) =
object (visit)
val mutable _indent : int = -1
val _minfo : module_info =
let Module(_,_,minfo) = m in minfo
method _prindent n =
"\n" ^ (String.make ((_indent + n) * 2) ' ')
method fact_module () =
let Module(sdecs,fdecs,_) = m in
let sdecs' = List.map visit#sdec sdecs in
let sdecs' = concat "\n\n" sdecs' in
let fdecs' = List.map visit#fdec fdecs in
let fdecs' = concat "\n\n" fdecs' in
sdecs' ^ "\n\n" ^ fdecs'
method sdec =
xwrap @@ fun p ->
fun (StructDef (name,fields)) ->
let fields' = List.map visit#field fields in
sprintf
"struct %s {
%s
};"
name.data
(concat "\n" fields')
method field =
xwrap @@ fun p ->
fun (Field (x,bty)) ->
sprintf
" %s %s;"
(visit#bty bty)
x.data
method fdec =
xwrap @@ fun p -> function
| FunDec(fn,ft,rt,params,body) ->
let params' = concat "," @@ List.map visit#param params in
let body' = visit#scoped body in
sprintf "%s%s %s(%s) %s"
(visit#fnattr ft)
(visit#rty rt)
fn.data
params'
body'
| CExtern(fn,ft,rt,params) ->
let params' = concat "," @@ List.map visit#param params in
sprintf "extern %s%s %s(%s);"
(visit#cfnattr ft)
(visit#rty rt)
fn.data
params'
| StdlibFn(code,ft,rt,params) ->
let params' = concat "," @@ List.map visit#param params in
sprintf "stdlib %s%s %s(%s);"
(visit#fnattr ft)
(visit#rty rt)
(Cstdlib.name_of code).data
params'
method fnattr { export; inline; } =
sprintf "%s%s"
(if export then "export " else "")
(match inline with
| Default -> ""
| Always -> "inline "
| Never -> "noinline ")
method cfnattr { benign; } =
sprintf "%s"
(if benign then "benign " else "")
method rty = function
| None -> "void"
| Some bt -> visit#bty bt
method param =
xwrap @@ fun p -> function
| Param (x,bty) ->
sprintf "\n %s %s"
(visit#bty bty)
x.data
method lbl =
xwrap @@ fun p -> function
| Public -> "public"
| Secret -> "secret"
method mut =
xwrap @@ fun p -> function
| R -> "R"
| W -> "W"
| RW -> "RW"
method bty =
xwrap @@ fun p -> function
| Bool l -> sprintf "%s bool" (visit#lbl l)
| UInt (s,l) -> sprintf "%s uint%d" (visit#lbl l) s
| Int (s,l) -> sprintf "%s int%d" (visit#lbl l) s
| Ref (bt,m) -> sprintf "%s@%s" (visit#bty bt) (visit#mut m)
| Arr (bt,lexpr,vattr) -> sprintf "%s%s[%s]" (visit#vattr vattr) (visit#bty bt) (visit#lexpr lexpr)
| UVec (s,n,l) -> sprintf "%s uint%d<%d>" (visit#lbl l) s n
| Struct s -> sprintf "%s" s.data
| _ -> "X[bty]X"
method bty_nolbl =
xwrap @@ fun p -> function
| Bool l -> "bool"
| UInt (s,l) -> sprintf "uint%d" s
| Int (s,l) -> sprintf "int%d" s
| Ref (bt,m) -> sprintf "%s*%s" (visit#bty bt) (visit#mut m)
| Arr (bt,lexpr,vattr) -> sprintf "%s%s[%s]" (visit#vattr vattr) (visit#bty bt) (visit#lexpr lexpr)
| UVec (s,n,l) -> sprintf "uint%d<%d>" s n
| _ -> "X[bty]X"
method lexpr =
xwrap @@ fun p -> function
| LIntLiteral n -> string_of_int n
| LDynamic x -> x.data
method vattr = function
| { cache_aligned=true } -> "cacheline "
| _ -> ""
method scoped blk =
_indent <- _indent + 1;
let stms = visit#block blk in
let res = sprintf "{%s%s}"
stms
(if stms = "" then " " else visit#_prindent 0)
in
_indent <- _indent - 1;
res
method next nxt =
match nxt.data with
| Block blk -> visit#block blk
| Return e ->
let e' = visit#expr e in
sprintf "%sreturn %s;"
(visit#_prindent 1)
e'
| VoidReturn -> (visit#_prindent 1) ^ "return;"
| End -> ""
method block (blk,next) =
match blk.data with
| Scope blk ->
let scoped = visit#scoped blk in
let next = visit#next next in
scoped ^ next
| ListOfStuff stms ->
let stms' = visit#stms stms in
let next = visit#next next in
stms' ^ next
| If (cond,thens,elses) ->
let cond' = visit#expr cond in
let thens' = visit#scoped thens in
let elses' = visit#scoped elses in
let next = visit#next next in
sprintf "%sif (%s) %s%s%s"
(visit#_prindent 1)
cond'
thens'
(if elses' = "{ }" then "" else " else " ^ elses')
next
| RangeFor (x,bty,e1,e2,blk) ->
let e1' = visit#expr e1 in
let e2' = visit#expr e2 in
let blk' = visit#scoped blk in
let next = visit#next next in
sprintf "%sfor (%s %s from %s to %s) %s%s"
(visit#_prindent 1)
(visit#bty bty)
x.data
e1'
e2'
blk'
next
| ArrayFor (x,bty,e,blk) ->
let e' = visit#expr e in
let blk' = visit#scoped blk in
let next = visit#next next in
sprintf "%sfor (%s %s in %s) %s%s"
(visit#_prindent 1)
(visit#bty bty)
x.data
e'
blk'
next
method stms stms_ =
concat ""
(List.map
(fun stm -> visit#_prindent 1 ^ visit#stm stm)
stms_)
method stm stm_ =
match stm_.data with
| VarDec (x,bty,e) ->
let e' = visit#expr e in
sprintf "%s %s = %s;"
(visit#bty bty)
x.data
e'
| FnCall (x,bty,fn,args) ->
let args' = List.map visit#expr args in
sprintf "%s %s = %s(%s);"
(visit#bty bty)
x.data
fn.data
(concat ", " args')
| VoidFnCall (fn,args) ->
let args' = List.map visit#expr args in
sprintf "%s(%s);"
fn.data
(concat ", " args')
| Assign (e1,e2) ->
let e1' = visit#expr e1 in
let e2' = visit#expr e2 in
sprintf "%s := %s;"
e1' e2'
| Cmov (e1,cond,e2) ->
let e1' = visit#expr e1 in
let cond' = visit#expr cond in
let e2' = visit#expr e2 in
sprintf "%s := %s ?? %s;"
e1' cond' e2'
| Assume e ->
let e' = visit#expr e in
sprintf "assume(%s);" e'
method expr (e_,_) =
match e_.data with
| True -> "true"
| False -> "false"
| IntLiteral n -> string_of_int n
| Variable x -> x.data
| Cast (bty,e) ->
let e' = visit#expr e in
sprintf "%s(%s)"
(visit#bty_nolbl bty)
e'
| UnOp (op,e) ->
let e' = visit#expr e in
sprintf "%s%s"
(visit#unop op)
e'
| BinOp (op,e1,e2) ->
let e1' = visit#expr e1 in
let e2' = visit#expr e2 in
sprintf "(%s %s %s)"
e1'
(visit#binop op)
e2'
| TernOp (e1,e2,e3) ->
let e1' = visit#expr e1 in
let e2' = visit#expr e2 in
let e3' = visit#expr e3 in
sprintf "(%s ? %s : %s)"
e1' e2' e3'
| Select (e1,e2,e3) ->
let e1' = visit#expr e1 in
let e2' = visit#expr e2 in
let e3' = visit#expr e3 in
sprintf "(%s ?? %s :: %s)"
e1' e2' e3'
| Declassify e ->
let e' = visit#expr e in
sprintf "declassify(%s)" e'
| Classify e ->
let e' = visit#expr e in
sprintf "classify(%s)" e'
| Enref e ->
sprintf "ref %s" (visit#expr e)
| Deref e ->
let e' = visit#expr e in
sprintf "*%s" e'
| ArrayGet (e,lexpr) ->
let e' = visit#expr e in
let lexpr' = visit#lexpr lexpr in
sprintf "%s[%s]"
e' lexpr'
| ArrayLit es ->
let es' = List.map visit#expr es in
sprintf "[%s]"
(concat ", " es')
| ArrayZeros lexpr ->
sprintf "zeros(%s)" (visit#lexpr lexpr)
| ArrayCopy e ->
sprintf "clone(%s)" (visit#expr e)
| ArrayView (e,index,len) ->
sprintf "view(%s, %s, %s)"
(visit#expr e)
(visit#lexpr index)
(visit#lexpr len)
| VectorLit ns ->
sprintf "<%s>"
(concat ", " @@ List.map string_of_int ns)
| Shuffle (e,ns) ->
sprintf "%s<%s>"
(visit#expr e)
(concat "," @@ List.map string_of_int ns)
| StructLit entries ->
"X[structlit]X"
| StructGet (e,field) ->
sprintf "%s->%s"
(visit#expr e)
field.data
| StringLiteral s -> sprintf "\"%s\"" s
method unop = function
| Ast.Neg -> "-"
| Ast.LogicalNot -> "!"
| Ast.BitwiseNot -> "~"
method binop = function
| Ast.Plus -> "+"
| Ast.Minus -> "-"
| Ast.Multiply -> "*"
| Ast.Divide -> "/"
| Ast.Modulo -> "%"
| Ast.Equal -> "=="
| Ast.NEqual -> "!="
| Ast.GT -> ">"
| Ast.GTE -> ">="
| Ast.LT -> "<"
| Ast.LTE -> "<="
| Ast.LogicalAnd -> "&&"
| Ast.LogicalOr -> "||"
| Ast.BitwiseAnd -> "&"
| Ast.BitwiseOr -> "|"
| Ast.BitwiseXor -> "^"
| Ast.LeftShift -> "<<"
| Ast.RightShift -> ">>"
| Ast.LeftRotate -> "<<<"
| Ast.RightRotate -> ">>>"
end
let ps = new pseudocode (Module ([],[],{fmap=[]}))
let transform m =
let visit = new pseudocode m in
visit#fact_module ()
| |
5312c77ef76b7cffa50cca1b74aa286ddbf66764ccf763e7638cca127ca68513 | minoki/haskell-floating-point | Conversion.hs | # LANGUAGE DataKinds #
# LANGUAGE HexFloatLiterals #
# LANGUAGE NumericUnderscores #
# OPTIONS_GHC -Wno - type - defaults #
module Conversion (benchmark) where
import Data.Bits
import Data.Functor.Product
import Data.Int
import Data.Ratio
import Data.Word
import Gauge.Benchmark
import Numeric.Floating.IEEE
import qualified Numeric.Floating.IEEE.Internal as IEEE.Internal
import Numeric.Rounded.Hardware
import qualified Numeric.Rounded.Hardware.Backend.C as C
import Numeric.Rounded.Hardware.Class
import Numeric.Rounded.Hardware.Interval
word64ToDouble :: RoundingMode -> Word64 -> Double
word64ToDouble ToNearest x
| x >= 0xFFFF_FFFF_FFFF_FC00 = 0x1p64
| otherwise = let z = countLeadingZeros x
y = if x .&. (0x0000_0000_0000_0800 `unsafeShiftR` z) == 0
then x + (0x0000_0000_0000_03FF `unsafeShiftR` z)
else x + (0x0000_0000_0000_0400 `unsafeShiftR` z)
in fromIntegral (y .&. (0xFFFF_FFFF_FFFF_F800 `unsafeShiftR` z))
word64ToDouble TowardInf x
| x >= 0xFFFF_FFFF_FFFF_F800 = 0x1p64
| otherwise = let z = countLeadingZeros x
y = x + (0x0000_0000_0000_07FF `unsafeShiftR` z)
in fromIntegral (y .&. (0xFFFF_FFFF_FFFF_F800 `unsafeShiftR` z))
word64ToDouble TowardNegInf x = let z = countLeadingZeros x
in fromIntegral (x .&. (0xFFFF_FFFF_FFFF_F800 `unsafeShiftR` z))
word64ToDouble TowardZero x = let z = countLeadingZeros x
in fromIntegral (x .&. (0xFFFF_FFFF_FFFF_F800 `unsafeShiftR` z))
int64ToDouble :: RoundingMode -> Int64 -> Double
int64ToDouble r x | x >= 0 = word64ToDouble r (fromIntegral x)
| r == TowardInf = - word64ToDouble TowardNegInf (fromIntegral (-x))
| r == TowardNegInf = - word64ToDouble TowardInf (fromIntegral (-x))
| otherwise = - word64ToDouble r (fromIntegral (-x))
benchmark :: Benchmark
benchmark = bgroup "Conversion"
[ bgroup "fromInteger/to Double"
[ bgroup name $ map ($ value)
[ bench "plain" . nf (fromInteger :: Integer -> Double)
, bench "Rounded/ToNearest" . nf (fromInteger :: Integer -> Rounded 'ToNearest Double)
, bench "Rounded/TowardInf" . nf (fromInteger :: Integer -> Rounded 'TowardInf Double)
, bench "roundedFromInteger/ToNearest" . nf (roundedFromInteger ToNearest :: Integer -> Double)
, bench "roundedFromInteger/TowardInf" . nf (roundedFromInteger TowardInf :: Integer -> Double)
, bench "fp-ieee/ToNearest" . nf (fromIntegerTiesToEven :: Integer -> Double)
, bench "fp-ieee/TowardInf" . nf (fromIntegerTowardPositive :: Integer -> Double)
, bench "Interval/default" . nf (fromInteger :: Integer -> Interval Double)
, bench "Interval/individual" . nf (\n -> (fromIntegerTowardNegative n, fromIntegerTowardPositive n) :: (Double, Double))
, bench "Interval/fromIntegerR" . nf (\n -> case IEEE.Internal.fromIntegerR n of
Pair (IEEE.Internal.RoundTowardNegative x) (IEEE.Internal.RoundTowardPositive y) -> (x, y) :: (Double, Double)
)
]
| (name, value) <- [ ("small", -2^50 + 2^13 + 127)
, ("medium", -2^60 + 42 * 2^53 - 137 * 2^24 + 3)
, ("large", -2^100 - 37 * 2^80 + 2^13 + 127)
] :: [(String, Integer)]
]
, bgroup "fromIntegral/Int64->Double"
[ bgroup name $ map ($ value)
[ bench "plain" . nf (fromIntegral :: Int64 -> Double)
, bench "Rounded/ToNearest" . nf (fromIntegral :: Int64 -> Rounded 'ToNearest Double)
, bench "Rounded/TowardInf" . nf (fromIntegral :: Int64 -> Rounded 'TowardInf Double)
, bench "roundedFromInteger/ToNearest" . nf (roundedFromInteger ToNearest . fromIntegral :: Int64 -> Double)
, bench "roundedFromInteger/TowardInf" . nf (roundedFromInteger TowardInf . fromIntegral :: Int64 -> Double)
, bench "fp-ieee/ToNearest" . nf (fromIntegralTiesToEven :: Int64 -> Double)
, bench "fp-ieee/TowardInf" . nf (fromIntegralTowardPositive :: Int64 -> Double)
, bench "int64ToDouble/ToNearest" . nf (int64ToDouble ToNearest :: Int64 -> Double)
, bench "int64ToDouble/TowardInf" . nf (int64ToDouble TowardInf :: Int64 -> Double)
, bench "Interval/default" . nf (fromIntegral :: Int64 -> Interval Double)
, bench "Interval/individual" . nf (\n -> (fromIntegralTowardNegative n, fromIntegralTowardPositive n) :: (Double, Double))
, bench "Interval/fromIntegralR" . nf (\n -> case IEEE.Internal.fromIntegralR n of
Pair (IEEE.Internal.RoundTowardNegative x) (IEEE.Internal.RoundTowardPositive y) -> (x, y) :: (Double, Double)
)
, bench "Interval/individual/C" . nf (\n -> (C.roundedDoubleFromInt64 TowardNegInf n, C.roundedDoubleFromInt64 TowardInf n))
]
| (name, value) <- [ ("small", -2^50 + 2^13 + 127)
, ("medium", -2^60 + 42 * 2^53 - 137 * 2^24 + 3)
] :: [(String, Int64)]
]
, bgroup "fromIntegral/Word64->Double"
[ bgroup name $ map ($ value)
[ bench "plain" . nf (fromIntegral :: Word64 -> Double)
, bench "Rounded/ToNearest" . nf (fromIntegral :: Word64 -> Rounded 'ToNearest Double)
, bench "Rounded/TowardInf" . nf (fromIntegral :: Word64 -> Rounded 'TowardInf Double)
, bench "roundedFromInteger/ToNearest" . nf (roundedFromInteger ToNearest . fromIntegral :: Word64 -> Double)
, bench "roundedFromInteger/TowardInf" . nf (roundedFromInteger TowardInf . fromIntegral :: Word64 -> Double)
, bench "fp-ieee/ToNearest" . nf (fromIntegralTiesToEven :: Word64 -> Double)
, bench "fp-ieee/TowardInf" . nf (fromIntegralTowardPositive :: Word64 -> Double)
, bench "word64ToDouble/ToNearest" . nf (word64ToDouble ToNearest :: Word64 -> Double)
, bench "word64ToDouble/TowardInf" . nf (word64ToDouble TowardInf :: Word64 -> Double)
, bench "Interval/default" . nf (fromIntegral :: Word64 -> Interval Double)
, bench "Interval/individual" . nf (\n -> (fromIntegralTowardNegative n, fromIntegralTowardPositive n) :: (Double, Double))
, bench "Interval/fromIntegralR" . nf (\n -> case IEEE.Internal.fromIntegralR n of
Pair (IEEE.Internal.RoundTowardNegative x) (IEEE.Internal.RoundTowardPositive y) -> (x, y) :: (Double, Double)
)
, bench "Interval/individual/C" . nf (\n -> (C.roundedDoubleFromWord64 TowardNegInf n, C.roundedDoubleFromWord64 TowardInf n))
]
| (name, value) <- [ ("small", 2^50 + 2^13 + 127)
, ("medium", 2^63 + 42 * 2^53 - 137 * 2^24 + 3)
] :: [(String, Word64)]
]
, bgroup "fromRational/to Double"
[ bgroup name $ map ($ value)
[ bench "plain" . nf (fromRational :: Rational -> Double)
, bench "Rounded/ToNearest" . nf (fromRational :: Rational -> Rounded 'ToNearest Double)
, bench "Rounded/TowardInf" . nf (fromRational :: Rational -> Rounded 'TowardInf Double)
, bench "fp-ieee/ToNearest" . nf (fromRationalTiesToEven :: Rational -> Double)
, bench "fp-ieee/TowardInf" . nf (fromRationalTowardPositive :: Rational -> Double)
, bench "Interval/default" . nf (fromRational :: Rational -> Interval Double)
, bench "Interval/individual" . nf (\x -> (fromRationalTowardNegative x :: Double, fromRationalTowardPositive x :: Double))
, bench "Interval/fromRationalR" . nf (\x -> case IEEE.Internal.fromRationalR x of
Pair (IEEE.Internal.RoundTowardNegative a) (IEEE.Internal.RoundTowardPositive b) -> (a, b) :: (Double, Double)
)
]
| (name, value) <- [ ("decimal", 3.14159265358979323846264338327950)
, ("binary", 0xcafec0ffeecafec0ffeep-177)
, ("small", 22 % 7)
, ("large", 78326489123342523452342137498719847192 % 348912374981749170413424213275017)
] :: [(String, Rational)]
]
]
| null | https://raw.githubusercontent.com/minoki/haskell-floating-point/7d7bb31bb2b07c637a5eaeda92fc622566e9b141/rounded-hw/benchmark/Conversion.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE HexFloatLiterals #
# LANGUAGE NumericUnderscores #
# OPTIONS_GHC -Wno - type - defaults #
module Conversion (benchmark) where
import Data.Bits
import Data.Functor.Product
import Data.Int
import Data.Ratio
import Data.Word
import Gauge.Benchmark
import Numeric.Floating.IEEE
import qualified Numeric.Floating.IEEE.Internal as IEEE.Internal
import Numeric.Rounded.Hardware
import qualified Numeric.Rounded.Hardware.Backend.C as C
import Numeric.Rounded.Hardware.Class
import Numeric.Rounded.Hardware.Interval
word64ToDouble :: RoundingMode -> Word64 -> Double
word64ToDouble ToNearest x
| x >= 0xFFFF_FFFF_FFFF_FC00 = 0x1p64
| otherwise = let z = countLeadingZeros x
y = if x .&. (0x0000_0000_0000_0800 `unsafeShiftR` z) == 0
then x + (0x0000_0000_0000_03FF `unsafeShiftR` z)
else x + (0x0000_0000_0000_0400 `unsafeShiftR` z)
in fromIntegral (y .&. (0xFFFF_FFFF_FFFF_F800 `unsafeShiftR` z))
word64ToDouble TowardInf x
| x >= 0xFFFF_FFFF_FFFF_F800 = 0x1p64
| otherwise = let z = countLeadingZeros x
y = x + (0x0000_0000_0000_07FF `unsafeShiftR` z)
in fromIntegral (y .&. (0xFFFF_FFFF_FFFF_F800 `unsafeShiftR` z))
word64ToDouble TowardNegInf x = let z = countLeadingZeros x
in fromIntegral (x .&. (0xFFFF_FFFF_FFFF_F800 `unsafeShiftR` z))
word64ToDouble TowardZero x = let z = countLeadingZeros x
in fromIntegral (x .&. (0xFFFF_FFFF_FFFF_F800 `unsafeShiftR` z))
int64ToDouble :: RoundingMode -> Int64 -> Double
int64ToDouble r x | x >= 0 = word64ToDouble r (fromIntegral x)
| r == TowardInf = - word64ToDouble TowardNegInf (fromIntegral (-x))
| r == TowardNegInf = - word64ToDouble TowardInf (fromIntegral (-x))
| otherwise = - word64ToDouble r (fromIntegral (-x))
benchmark :: Benchmark
benchmark = bgroup "Conversion"
[ bgroup "fromInteger/to Double"
[ bgroup name $ map ($ value)
[ bench "plain" . nf (fromInteger :: Integer -> Double)
, bench "Rounded/ToNearest" . nf (fromInteger :: Integer -> Rounded 'ToNearest Double)
, bench "Rounded/TowardInf" . nf (fromInteger :: Integer -> Rounded 'TowardInf Double)
, bench "roundedFromInteger/ToNearest" . nf (roundedFromInteger ToNearest :: Integer -> Double)
, bench "roundedFromInteger/TowardInf" . nf (roundedFromInteger TowardInf :: Integer -> Double)
, bench "fp-ieee/ToNearest" . nf (fromIntegerTiesToEven :: Integer -> Double)
, bench "fp-ieee/TowardInf" . nf (fromIntegerTowardPositive :: Integer -> Double)
, bench "Interval/default" . nf (fromInteger :: Integer -> Interval Double)
, bench "Interval/individual" . nf (\n -> (fromIntegerTowardNegative n, fromIntegerTowardPositive n) :: (Double, Double))
, bench "Interval/fromIntegerR" . nf (\n -> case IEEE.Internal.fromIntegerR n of
Pair (IEEE.Internal.RoundTowardNegative x) (IEEE.Internal.RoundTowardPositive y) -> (x, y) :: (Double, Double)
)
]
| (name, value) <- [ ("small", -2^50 + 2^13 + 127)
, ("medium", -2^60 + 42 * 2^53 - 137 * 2^24 + 3)
, ("large", -2^100 - 37 * 2^80 + 2^13 + 127)
] :: [(String, Integer)]
]
, bgroup "fromIntegral/Int64->Double"
[ bgroup name $ map ($ value)
[ bench "plain" . nf (fromIntegral :: Int64 -> Double)
, bench "Rounded/ToNearest" . nf (fromIntegral :: Int64 -> Rounded 'ToNearest Double)
, bench "Rounded/TowardInf" . nf (fromIntegral :: Int64 -> Rounded 'TowardInf Double)
, bench "roundedFromInteger/ToNearest" . nf (roundedFromInteger ToNearest . fromIntegral :: Int64 -> Double)
, bench "roundedFromInteger/TowardInf" . nf (roundedFromInteger TowardInf . fromIntegral :: Int64 -> Double)
, bench "fp-ieee/ToNearest" . nf (fromIntegralTiesToEven :: Int64 -> Double)
, bench "fp-ieee/TowardInf" . nf (fromIntegralTowardPositive :: Int64 -> Double)
, bench "int64ToDouble/ToNearest" . nf (int64ToDouble ToNearest :: Int64 -> Double)
, bench "int64ToDouble/TowardInf" . nf (int64ToDouble TowardInf :: Int64 -> Double)
, bench "Interval/default" . nf (fromIntegral :: Int64 -> Interval Double)
, bench "Interval/individual" . nf (\n -> (fromIntegralTowardNegative n, fromIntegralTowardPositive n) :: (Double, Double))
, bench "Interval/fromIntegralR" . nf (\n -> case IEEE.Internal.fromIntegralR n of
Pair (IEEE.Internal.RoundTowardNegative x) (IEEE.Internal.RoundTowardPositive y) -> (x, y) :: (Double, Double)
)
, bench "Interval/individual/C" . nf (\n -> (C.roundedDoubleFromInt64 TowardNegInf n, C.roundedDoubleFromInt64 TowardInf n))
]
| (name, value) <- [ ("small", -2^50 + 2^13 + 127)
, ("medium", -2^60 + 42 * 2^53 - 137 * 2^24 + 3)
] :: [(String, Int64)]
]
, bgroup "fromIntegral/Word64->Double"
[ bgroup name $ map ($ value)
[ bench "plain" . nf (fromIntegral :: Word64 -> Double)
, bench "Rounded/ToNearest" . nf (fromIntegral :: Word64 -> Rounded 'ToNearest Double)
, bench "Rounded/TowardInf" . nf (fromIntegral :: Word64 -> Rounded 'TowardInf Double)
, bench "roundedFromInteger/ToNearest" . nf (roundedFromInteger ToNearest . fromIntegral :: Word64 -> Double)
, bench "roundedFromInteger/TowardInf" . nf (roundedFromInteger TowardInf . fromIntegral :: Word64 -> Double)
, bench "fp-ieee/ToNearest" . nf (fromIntegralTiesToEven :: Word64 -> Double)
, bench "fp-ieee/TowardInf" . nf (fromIntegralTowardPositive :: Word64 -> Double)
, bench "word64ToDouble/ToNearest" . nf (word64ToDouble ToNearest :: Word64 -> Double)
, bench "word64ToDouble/TowardInf" . nf (word64ToDouble TowardInf :: Word64 -> Double)
, bench "Interval/default" . nf (fromIntegral :: Word64 -> Interval Double)
, bench "Interval/individual" . nf (\n -> (fromIntegralTowardNegative n, fromIntegralTowardPositive n) :: (Double, Double))
, bench "Interval/fromIntegralR" . nf (\n -> case IEEE.Internal.fromIntegralR n of
Pair (IEEE.Internal.RoundTowardNegative x) (IEEE.Internal.RoundTowardPositive y) -> (x, y) :: (Double, Double)
)
, bench "Interval/individual/C" . nf (\n -> (C.roundedDoubleFromWord64 TowardNegInf n, C.roundedDoubleFromWord64 TowardInf n))
]
| (name, value) <- [ ("small", 2^50 + 2^13 + 127)
, ("medium", 2^63 + 42 * 2^53 - 137 * 2^24 + 3)
] :: [(String, Word64)]
]
, bgroup "fromRational/to Double"
[ bgroup name $ map ($ value)
[ bench "plain" . nf (fromRational :: Rational -> Double)
, bench "Rounded/ToNearest" . nf (fromRational :: Rational -> Rounded 'ToNearest Double)
, bench "Rounded/TowardInf" . nf (fromRational :: Rational -> Rounded 'TowardInf Double)
, bench "fp-ieee/ToNearest" . nf (fromRationalTiesToEven :: Rational -> Double)
, bench "fp-ieee/TowardInf" . nf (fromRationalTowardPositive :: Rational -> Double)
, bench "Interval/default" . nf (fromRational :: Rational -> Interval Double)
, bench "Interval/individual" . nf (\x -> (fromRationalTowardNegative x :: Double, fromRationalTowardPositive x :: Double))
, bench "Interval/fromRationalR" . nf (\x -> case IEEE.Internal.fromRationalR x of
Pair (IEEE.Internal.RoundTowardNegative a) (IEEE.Internal.RoundTowardPositive b) -> (a, b) :: (Double, Double)
)
]
| (name, value) <- [ ("decimal", 3.14159265358979323846264338327950)
, ("binary", 0xcafec0ffeecafec0ffeep-177)
, ("small", 22 % 7)
, ("large", 78326489123342523452342137498719847192 % 348912374981749170413424213275017)
] :: [(String, Rational)]
]
]
| |
d880c95d6787ab63de1c7524478f609ff205224c0c67ce2e49009563116142fb | flipstone/orville | Plan.hs | {-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
module Database.Orville.PostgreSQL.Plan
( Plan
, Planned
, Execute
, Explain
, askParam
-- * Using a Plan after it is constructed
, execute
, explain
-- * Making a Plan to find rows in the database
, findMaybeOne
, findMaybeOneWhere
, findOne
, findOneShowVia
, findOneWhere
, findOneWhereShowVia
, findAll
, findAllWhere
-- * Creating a multi-step Plan from other Plan values
, bind
, use
, using
, chain
, apply
, planMany
, planList
, focusParam
, planEither
, planMaybe
* from other types into Plan
, Op.AssertionFailed
, assert
, planSelect
, planOperation
) where
import Data.Either (partitionEithers)
import qualified Control.Monad.Catch as Catch
import qualified Database.Orville.PostgreSQL.Core as Core
import Database.Orville.PostgreSQL.Internal.MappendCompat ((<>))
import Database.Orville.PostgreSQL.Plan.Many (Many)
import qualified Database.Orville.PostgreSQL.Plan.Many as Many
import qualified Database.Orville.PostgreSQL.Plan.Operation as Op
import qualified Database.Orville.PostgreSQL.Plan.Explanation as Exp
import Database.Orville.PostgreSQL.Select (Select)
|
A ' Plan ' is an executable set of queries that can be executed to load data
from the database , using the results of prior queries as input parameters to
following queries in controlled ways . In particular , the " controlled " aspect
of this allows plans that take a single input to be adapted to take multiple
input parameters in a list without the resulting plan executing N+1 queries .
This restriction means that while query results can be used as input
parameters to later queries , they can not be used to decide to run completely
different queries based on other query results . Allowing this would prevent
the ' Plan ' structure from eliminating N+1 query loops .
Note that during execution queries are never combined across tables to form
joins or subqueries . Queries are still executed in the same sequence as
specified in the plan , just on all the inputs at once rather than in a loop .
If you need to do a join with a plan , you can always construction your
own custom ' Op . Operation ' and use ' planOperation ' to incorporate into a plan .
The @param@ type variable indicates what type of value is expected as input
when the plan is executed .
The @result@ type for a plan indicates what type is produced
when the plan is executed .
The @scope@ type is used internally by Orville to track the plan is currently
executed against a single input or multiple inputs . This type parameter
should never specified as a concrete type in user code , but must be exposed
as a variable to ensure that execute scope is tracked correctly through
usages of ' bind ' .
A 'Plan' is an executable set of queries that can be executed to load data
from the database, using the results of prior queries as input parameters to
following queries in controlled ways. In particular, the "controlled" aspect
of this allows plans that take a single input to be adapted to take multiple
input parameters in a list without the resulting plan executing N+1 queries.
This restriction means that while query results can be used as input
parameters to later queries, they cannot be used to decide to run completely
different queries based on other query results. Allowing this would prevent
the 'Plan' structure from eliminating N+1 query loops.
Note that during execution queries are never combined across tables to form
joins or subqueries. Queries are still executed in the same sequence as
specified in the plan, just on all the inputs at once rather than in a loop.
If you need to do a join with a plan, you can always construction your
own custom 'Op.Operation' and use 'planOperation' to incorporate into a plan.
The @param@ type variable indicates what type of value is expected as input
when the plan is executed.
The @result@ type for a plan indicates what Haskell type is produced
when the plan is executed.
The @scope@ type is used internally by Orville to track the plan is currently
executed against a single input or multiple inputs. This type parameter
should never specified as a concrete type in user code, but must be exposed
as a variable to ensure that execute scope is tracked correctly through
usages of 'bind'.
-}
data Plan scope param result where
PlanOp :: Op.Operation param result -> Plan scope param result
PlanMany :: (forall manyScope. Plan manyScope param result)
-> Plan scope [param] (Many param result)
PlanEither :: Plan scope leftParam leftResult
-> Plan scope rightParam rightResult
-> Plan scope (Either leftParam rightParam) (Either leftResult rightResult)
Bind :: Plan scope param a
-> (Planned scope param a -> Plan scope param result)
-> Plan scope param result
Use :: Planned scope param a -> Plan scope param a
Pure :: a -> Plan scope param a
Apply :: Plan scope param (a -> b)
-> Plan scope param a
-> Plan scope param b
Chain :: Plan scope a b
-> Plan scope b c
-> Plan scope a c
instance Functor (Plan scope param) where
fmap f = Apply (Pure f)
instance Applicative (Plan scope param) where
pure = Pure
(<*>) = Apply
{-|
'Execute' is a tag type used by as the 'scope' variable for
'Plan' values when executing them via the 'execute' function.
-}
data Execute
|
' ExecuteMany ' is an internal tag type used by as the ' scope ' variable for
' Plan ' values when executing them against multiple inputs via the
' executeMany ' internal function .
'ExecuteMany' is an internal tag type used by as the 'scope' variable for
'Plan' values when executing them against multiple inputs via the
'executeMany' internal function.
-}
data ExecuteMany
{-|
A 'Planned' value is a wrapper around the results of previous run queries
when using the 'bind' function. At the time that you are writing a plan you
do not know whether the 'Plan' will be run with a single input or multiple
inputs. A 'Planned' value may end up being either an individual item or a
list of items. Due to this, your ability to interact with the value is
limited to the use of 'fmap' to extract (or build) other values from the
results. 'Planned' values can be used together with the 'use' function to
make a 'Plan' that produces the extracted value.
Note that while 'Planned' could provide an 'Applicative' instance as well, it
does not to avoid confusion with 'Applicative' instance for 'Plan' itself.
If you need to build a value from several 'Planned' values using
'Applicative', you should call 'use' on each of the values and use the
'Applicative' instance for 'Plan'.
-}
data Planned scope param a where
PlannedOne :: a -> Planned Execute param a
PlannedMany :: Many k a -> Planned ExecuteMany k a
PlannedExplain :: Planned Explain param a
instance Functor (Planned scope param) where
fmap = mapPlanned
{-|
'mapPlanned' applies a function to what value or values have been produced by
the plan. This function can also be called as 'fmap' or '<$>' thorugh the
'Functor' instance for 'Planned'.
-}
mapPlanned :: (a -> b) -> Planned scope param a -> Planned scope param b
mapPlanned f planned =
case planned of
PlannedOne a ->
PlannedOne (f a)
PlannedMany manyAs ->
PlannedMany (fmap f manyAs)
PlannedExplain ->
PlannedExplain
{-|
'resolveOne' resolves a 'Planned' value that is known to be in the 'One'
scope to its single wrapped value.
-}
resolveOne :: Planned Execute param a -> a
resolveOne (PlannedOne a) = a
{-|
'resolveMany resolves a 'Planned' value that is known to be in the 'Many'
scope to the 'Many' value wrapped inside it.
-}
resolveMany :: Planned ExecuteMany k a -> Many k a
resolveMany (PlannedMany as) = as
{-|
'planOperation' allows any primitive 'Op.Operation' to be used as an atomic step
in a plan. When the plan is executed, the appropriate 'Op.Operation' functions
will be used depending on the execution context.
-}
planOperation :: Op.Operation param result
-> Plan scope param result
planOperation =
PlanOp
|
' planSelect ' allows any ' Select ' query to be incorporated into a
plan . Note that the ' Select ' can not depend on the plan 's input parameters in
this case . If the plan is executed with multiple inputs the same set of all
the results will be used as the results for each of the input parameters .
'planSelect' allows any Orville 'Select' query to be incorporated into a
plan. Note that the 'Select' cannot depend on the plan's input parameters in
this case. If the plan is executed with multiple inputs the same set of all
the results will be used as the results for each of the input parameters.
-}
planSelect :: Select row -> Plan scope () [row]
planSelect select =
planOperation (Op.findSelect select)
{-|
'askParam' allows the input parameter for the plan to be retrieved as the
result of the plan. Together with 'bind' you can use this to get access to
the input parameter as a 'Planned' value.
-}
askParam :: Plan scope param param
askParam =
planOperation Op.askParam
{-|
'findMaybeOne' constructs a 'Plan' that will find at most one row from
the given table where the plan's input value matches the given database
field.
-}
findMaybeOne :: Ord fieldValue
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Plan scope fieldValue (Maybe readEntity)
findMaybeOne tableDef fieldDef =
planOperation (Op.findOne tableDef fieldDef)
{-|
'findMaybeOneWhere' is similar to 'findMaybeOne', but allows a
'WhereCondition' to be specified to restrict which rows are matched by the
database query.
-}
findMaybeOneWhere :: Ord fieldValue
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Core.WhereCondition
-> Plan scope fieldValue (Maybe readEntity)
findMaybeOneWhere tableDef fieldDef cond =
planOperation (Op.findOneWhere tableDef fieldDef cond)
|
' ' is similar to ' findMaybeOne , but it expects that there will
always be a row found matching the plan 's input value . If no row is found an
' Op . AssertionFailed ' exception will be thrown . This is a useful convenience
when looking up foreign - key associations that are expected to be enforced by
the database itself .
'findOneShowVia' is similar to 'findMaybeOne, but it expects that there will
always be a row found matching the plan's input value. If no row is found an
'Op.AssertionFailed' exception will be thrown. This is a useful convenience
when looking up foreign-key associations that are expected to be enforced by
the database itself.
-}
findOneShowVia :: Ord fieldValue
=> (fieldValue -> String)
-> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Plan scope fieldValue readEntity
findOneShowVia showParam tableDef fieldDef =
assert
(assertFound showParam tableDef fieldDef)
(findMaybeOne tableDef fieldDef)
|
' findOne ' is an alias to ' ' that uses the ' Show ' instance of
' fieldValue ' when producing a failure message in the result the entity can not
be found .
'findOne' is an alias to 'findOneShowVia' that uses the 'Show' instance of
'fieldValue' when producing a failure message in the result the entity cannot
be found.
-}
findOne :: (Show fieldValue, Ord fieldValue)
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Plan scope fieldValue readEntity
findOne tableDef fieldDef =
assert
(assertFound show tableDef fieldDef)
(findMaybeOne tableDef fieldDef)
|
' findOneWhereShowVia ' is similar to ' ' , but allows a ' WhereCondition ' to be
specified to restrict which rows are matched by the database query .
'findOneWhereShowVia' is similar to 'findOneShowVia', but allows a 'WhereCondition' to be
specified to restrict which rows are matched by the database query.
-}
findOneWhereShowVia :: Ord fieldValue
=> (fieldValue -> String)
-> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Core.WhereCondition
-> Plan scope fieldValue readEntity
findOneWhereShowVia showParam tableDef fieldDef cond =
assert
(assertFound showParam tableDef fieldDef)
(findMaybeOneWhere tableDef fieldDef cond)
{-|
'findOneWhere' is an alias to 'findOneWhereShowVia' that uses the 'Show' instance of
'fieldValue' when producing a failure message in the result the entity cannot
be found.
-}
findOneWhere :: (Show fieldValue, Ord fieldValue)
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Core.WhereCondition
-> Plan scope fieldValue readEntity
findOneWhere = findOneWhereShowVia show
|
' ' is an internal helper that checks that row was found where
one was expected .
'assertFound' is an internal helper that checks that row was found where
one was expected.
-}
assertFound :: (fieldValue -> String)
-> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> fieldValue
-> Maybe result
-> Either String result
assertFound showParam tableDef fieldDef param maybeRecord =
case maybeRecord of
Just a ->
Right a
Nothing ->
Left $
unwords
[ "Failed to find record in table"
, Core.tableName tableDef
, "where"
, Core.fieldName fieldDef
, " = "
, showParam param
]
{-|
'findAll' constructs a 'Plan' that will find all the rows from the given
table there the plan's input value matches the given database field.
-}
findAll :: Ord fieldValue
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Plan scope fieldValue [readEntity]
findAll tableDef fieldDef =
planOperation (Op.findAll tableDef fieldDef)
{-|
'findAllWhere' is similar to 'findAll', but allows a 'WhereCondition' to be
specified to restrict which rows are matched by the database query.
-}
findAllWhere :: Ord fieldValue
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Core.WhereCondition
-> Plan scope fieldValue [readEntity]
findAllWhere tableDef fieldDef cond =
planOperation (Op.findAllWhere tableDef fieldDef cond)
{-|
'planMany' adapts a plan that takes a single input parameter to work on
multiple input parameters. When the new plan is executed each query will
execute in the same basic order, but with adjusted conditions to find all the
rows for all inputs at once rather than running the planned queries once for
each input.
-}
planMany :: (forall manyScope. Plan manyScope param result)
-> Plan scope [param] (Many param result)
planMany =
PlanMany
{-|
'planList' lifts a plan so both its param and result become lists.
This saves you from having to fmap in 'Many.elems' when all you want back
from a 'Many' is the list of results inside it.
-}
planList :: (forall scope. Plan scope param result)
-> Plan listScope [param] [result]
planList plan =
Many.elems <$> planMany plan
{-|
'focusParam' builds a plan from a function and an existing plan taking the
result of that function as input. This is especially useful when there is some
structure, and a plan that only needs a part of that structure as input. The
function argument can access part of the structure for the plan argument to use,
so the final returned plan can take the entire structure as input.
-}
focusParam :: (a -> b)
-> Plan scope b result
-> Plan scope a result
focusParam focuser plan =
chain (focuser <$> askParam) plan
|
' ' lets you construct a plan that branches by executing a different
plan for the ' Left ' and ' Right ' sides of an ' Either ' value . When used with a
single input parameter only one of the two plans will be used , based on the
input parameter . When used on multiple input parameters , each of the two
plans will be executed only once with all the ' Left ' and ' Right ' values
provided as input parameters respectively .
'planEither' lets you construct a plan that branches by executing a different
plan for the 'Left' and 'Right' sides of an 'Either' value. When used with a
single input parameter only one of the two plans will be used, based on the
input parameter. When used on multiple input parameters, each of the two
plans will be executed only once with all the 'Left' and 'Right' values
provided as input parameters respectively.
-}
planEither :: Plan scope leftParam leftResult
-> Plan scope rightParam rightResult
-> Plan scope (Either leftParam rightParam) (Either leftResult rightResult)
planEither =
PlanEither
|
' planMaybe ' lifts a plan so both its param and result become ' Maybe 's . This is
useful when modifying an existing plan to deal with optionality . Writing just
one plan can then easily produce both the required and optional versions .
'planMaybe' lifts a plan so both its param and result become 'Maybe's. This is
useful when modifying an existing plan to deal with optionality. Writing just
one plan can then easily produce both the required and optional versions.
-}
planMaybe :: Plan scope a b -> Plan scope (Maybe a) (Maybe b)
planMaybe plan =
focusParam (maybe (Left ()) Right) $
either id id <$> planEither (pure Nothing) (Just <$> plan)
|
' bind ' gives access to the results of a plan to use as input values to future
plans . The plan result is given the input parameter to the provided function ,
which must produce the remaining ' Plan ' to be executed . The value will be
wrapped in the ' Planned ' type , which may represent either a result or
multiple results , depending on whether one plan is currently be executed with
one and multiple input parameters . This ensures that the caller produces only
a single remaining ' Plan ' to be used for all inputs when there are multiple
to eliminate the need to possibly run different queries for different inputs
( which would an introduce N+1 query execution ) .
The ' Planned ' value ( or values ) provided by ' bind ' have actually been
retrieved from the database , so the value can be used multiple times when
constructing the remaining ' Plan ' without fear of causing the query to run
multiple times .
Also see ' use ' for how to lift a ' Planned ' value back into a ' Plan ' .
'bind' gives access to the results of a plan to use as input values to future
plans. The plan result is given the input parameter to the provided function,
which must produce the remaining 'Plan' to be executed. The value will be
wrapped in the 'Planned' type, which may represent either a result or
multiple results, depending on whether one plan is currently be executed with
one and multiple input parameters. This ensures that the caller produces only
a single remaining 'Plan' to be used for all inputs when there are multiple
to eliminate the need to possibly run different queries for different inputs
(which would an introduce N+1 query execution).
The 'Planned' value (or values) provided by 'bind' have actually been
retrieved from the database, so the value can be used multiple times when
constructing the remaining 'Plan' without fear of causing the query to run
multiple times.
Also see 'use' for how to lift a 'Planned' value back into a 'Plan'.
-}
bind :: Plan scope param a
-> (Planned scope param a -> Plan scope param result)
-> Plan scope param result
bind =
Bind
{-|
'use' constructs a 'Plan' that always produces the 'Planned' value
as its result, regardless of the parameter given as input to the plan.
-}
use :: Planned scope param a -> Plan scope param a
use =
Use
{-|
'using' uses a 'Planned' value in the input to another 'Plan'. The
resulting plan will ignore its input and use the 'Planned' value as
the input to produce its result instead.
-}
using :: Planned scope param a
-> Plan scope a b
-> Plan scope param b
using planned plan =
chain (use planned) plan
{-|
'apply' applies a function produced by a plan to the value produced
by another plan. This is usually used via the '<*>' operator through
the 'Applicative' instance for 'Plan'.
-}
apply :: Plan scope param (a -> b)
-> Plan scope param a
-> Plan scope param b
apply =
Apply
|
' chain ' connects the output of one plan to the input of another to form a
larger plan that will execute the first followed by the second .
'chain' connects the output of one plan to the input of another to form a
larger plan that will execute the first followed by the second.
-}
chain :: Plan scope a b
-> Plan scope b c
-> Plan scope a c
chain =
Chain
|
' assert ' allows you to make an assertion about a plans result that will throw
an ' Op . AssertionFailed ' failed exception during execution if it proves to be
false . The first parameter is the assertion function , which should return
either an error message to be given in the exception or the value to be used
as the plan 's result .
'assert' allows you to make an assertion about a plans result that will throw
an 'Op.AssertionFailed' failed exception during execution if it proves to be
false. The first parameter is the assertion function, which should return
either an error message to be given in the exception or the value to be used
as the plan's result.
-}
assert :: (param -> a -> Either String b)
-> Plan scope param a
-> Plan scope param b
assert assertion aPlan =
let
eitherPlan =
assertion
<$> askParam
<*> aPlan
in
chain eitherPlan (PlanOp Op.assertRight)
|
' execute ' accepts the input parameter ( or parameters ) expected by a ' Plan '
and runs the plan to completion , either throwing an ' Op . AssertionFailed '
exception in the monad ' m ' or producing the expected result .
If you have a plan that takes one input and want to provide a list of
input , use ' planMany ' to adapt it to a multple - input plan before calling
' execute ' .
'execute' accepts the input parameter (or parameters) expected by a 'Plan'
and runs the plan to completion, either throwing an 'Op.AssertionFailed'
exception in the monad 'm' or producing the expected result.
If you have a plan that takes one input and want to provide a list of
input, use 'planMany' to adapt it to a multple-input plan before calling
'execute'.
-}
execute :: Core.MonadOrville conn m
=> Plan Execute param result
-> param
-> m result
execute plan param =
executeOne plan param
{-|
'executeOne' is an internal helper that executes a 'Plan' with a concrete
'scope' type to ensure all 'Planned' values are built with 'PlannedOne'.
-}
executeOne :: Core.MonadOrville conn m
=> Plan Execute param result
-> param
-> m result
executeOne plan param =
case plan of
PlanOp operation -> do
opResult <- Op.executeOperationOne operation param
case opResult of
Left err ->
Catch.throwM err
Right result ->
pure result
PlanMany manyPlan ->
executeMany manyPlan param
PlanEither leftPlan rightPlan ->
case param of
Left leftParam ->
Left <$> executeOne leftPlan leftParam
Right rightParam ->
Right <$> executeOne rightPlan rightParam
Bind intermPlan continue -> do
interm <- executeOne intermPlan param
executeOne
(continue (PlannedOne interm))
param
Use planned ->
pure . resolveOne $ planned
Pure a ->
pure a
Apply planF planA ->
executeOne planF param <*> executeOne planA param
Chain planAB planBC -> do
b <- executeOne planAB param
executeOne planBC b
{-|
'executeMany' is an internal helper that executes a 'Plan' with a concrete
@scope@ type to ensure all 'Planned' values are built with 'PlannedMany'.
-}
executeMany :: Core.MonadOrville conn m
=> Plan ExecuteMany param result
-> [param]
-> m (Many.Many param result)
executeMany plan params =
case plan of
PlanOp operation -> do
opResult <- Op.executeOperationMany operation params
case opResult of
Left err ->
Catch.throwM $ err
Right results ->
pure results
PlanMany manyPlan -> do
let
flatParams = concat params
allResults <- executeMany manyPlan flatParams
let
restrictResults subParams =
Many.fromKeys subParams (\k -> Many.lookup k allResults)
pure $ Many.fromKeys params (Right . restrictResults)
PlanEither leftPlan rightPlan -> do
let
(leftParams, rightParams) = partitionEithers params
leftResults <- executeMany leftPlan leftParams
rightResults <- executeMany rightPlan rightParams
let
eitherResult eitherK =
case eitherK of
Left k ->
Left <$> Many.lookup k leftResults
Right k ->
Right <$> Many.lookup k rightResults
pure $ Many.fromKeys params eitherResult
Bind intermPlan continue -> do
interms <- executeMany intermPlan params
executeMany
(continue (PlannedMany interms))
params
Use planned ->
pure . resolveMany $ planned
Pure a ->
pure $ Many.fromKeys params (const (Right a))
Apply planF planA -> do
manyFs <- executeMany planF params
manyAs <- executeMany planA params
pure (Many.apply manyFs manyAs)
Chain planAB planBC -> do
bs <- executeMany planAB params
cs <- executeMany planBC (Many.elems bs)
pure $ Many.compose cs bs
{-|
'Explain' is an tag type used as the 'scope' variable when explaining a
'Plan' via the 'explain' function.
-}
data Explain
= ExplainOne
| ExplainMany
{-|
'explain' produces a textual description of the steps outlined by
a 'Plan' -- in most cases example SQL queries. If you want to see
the explanation of how the plan will run with multiple input parameters,
you can use 'planMany' to adapt it before calling 'explain'.
-}
explain :: Plan Explain param result -> [String]
explain plan =
Exp.explanationSteps $
explainPlan ExplainOne plan
{-|
'explainPlan' is an internal helper to executes a plan with the
'scope' type fixed to 'Explain' to ensure that all 'Planned'
values are constructed with the 'PlannedExplain' constructor.
-}
explainPlan :: Explain
-> Plan Explain param result
-> Exp.Explanation
explainPlan mult plan =
case plan of
PlanOp operation -> do
case mult of
ExplainOne ->
Op.explainOperationOne operation
ExplainMany ->
Op.explainOperationMany operation
PlanMany manyPlan -> do
explainPlan ExplainMany manyPlan
PlanEither leftPlan rightPlan ->
explainPlan mult leftPlan <> explainPlan mult rightPlan
Bind intermPlan continue ->
let
nextPlan = continue PlannedExplain
in
explainPlan mult intermPlan <> explainPlan mult nextPlan
Use _ ->
Exp.noExplanation
Pure _ ->
Exp.noExplanation
Apply planF planA -> do
explainPlan mult planF <> explainPlan mult planA
Chain planAB planBC -> do
explainPlan mult planAB <> explainPlan mult planBC
| null | https://raw.githubusercontent.com/flipstone/orville/832fa1f2a24f35f05653e6d7e659988dbf1a4ae3/orville-postgresql/src/Database/Orville/PostgreSQL/Plan.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes #
* Using a Plan after it is constructed
* Making a Plan to find rows in the database
* Creating a multi-step Plan from other Plan values
|
'Execute' is a tag type used by as the 'scope' variable for
'Plan' values when executing them via the 'execute' function.
|
A 'Planned' value is a wrapper around the results of previous run queries
when using the 'bind' function. At the time that you are writing a plan you
do not know whether the 'Plan' will be run with a single input or multiple
inputs. A 'Planned' value may end up being either an individual item or a
list of items. Due to this, your ability to interact with the value is
limited to the use of 'fmap' to extract (or build) other values from the
results. 'Planned' values can be used together with the 'use' function to
make a 'Plan' that produces the extracted value.
Note that while 'Planned' could provide an 'Applicative' instance as well, it
does not to avoid confusion with 'Applicative' instance for 'Plan' itself.
If you need to build a value from several 'Planned' values using
'Applicative', you should call 'use' on each of the values and use the
'Applicative' instance for 'Plan'.
|
'mapPlanned' applies a function to what value or values have been produced by
the plan. This function can also be called as 'fmap' or '<$>' thorugh the
'Functor' instance for 'Planned'.
|
'resolveOne' resolves a 'Planned' value that is known to be in the 'One'
scope to its single wrapped value.
|
'resolveMany resolves a 'Planned' value that is known to be in the 'Many'
scope to the 'Many' value wrapped inside it.
|
'planOperation' allows any primitive 'Op.Operation' to be used as an atomic step
in a plan. When the plan is executed, the appropriate 'Op.Operation' functions
will be used depending on the execution context.
|
'askParam' allows the input parameter for the plan to be retrieved as the
result of the plan. Together with 'bind' you can use this to get access to
the input parameter as a 'Planned' value.
|
'findMaybeOne' constructs a 'Plan' that will find at most one row from
the given table where the plan's input value matches the given database
field.
|
'findMaybeOneWhere' is similar to 'findMaybeOne', but allows a
'WhereCondition' to be specified to restrict which rows are matched by the
database query.
|
'findOneWhere' is an alias to 'findOneWhereShowVia' that uses the 'Show' instance of
'fieldValue' when producing a failure message in the result the entity cannot
be found.
|
'findAll' constructs a 'Plan' that will find all the rows from the given
table there the plan's input value matches the given database field.
|
'findAllWhere' is similar to 'findAll', but allows a 'WhereCondition' to be
specified to restrict which rows are matched by the database query.
|
'planMany' adapts a plan that takes a single input parameter to work on
multiple input parameters. When the new plan is executed each query will
execute in the same basic order, but with adjusted conditions to find all the
rows for all inputs at once rather than running the planned queries once for
each input.
|
'planList' lifts a plan so both its param and result become lists.
This saves you from having to fmap in 'Many.elems' when all you want back
from a 'Many' is the list of results inside it.
|
'focusParam' builds a plan from a function and an existing plan taking the
result of that function as input. This is especially useful when there is some
structure, and a plan that only needs a part of that structure as input. The
function argument can access part of the structure for the plan argument to use,
so the final returned plan can take the entire structure as input.
|
'use' constructs a 'Plan' that always produces the 'Planned' value
as its result, regardless of the parameter given as input to the plan.
|
'using' uses a 'Planned' value in the input to another 'Plan'. The
resulting plan will ignore its input and use the 'Planned' value as
the input to produce its result instead.
|
'apply' applies a function produced by a plan to the value produced
by another plan. This is usually used via the '<*>' operator through
the 'Applicative' instance for 'Plan'.
|
'executeOne' is an internal helper that executes a 'Plan' with a concrete
'scope' type to ensure all 'Planned' values are built with 'PlannedOne'.
|
'executeMany' is an internal helper that executes a 'Plan' with a concrete
@scope@ type to ensure all 'Planned' values are built with 'PlannedMany'.
|
'Explain' is an tag type used as the 'scope' variable when explaining a
'Plan' via the 'explain' function.
|
'explain' produces a textual description of the steps outlined by
a 'Plan' -- in most cases example SQL queries. If you want to see
the explanation of how the plan will run with multiple input parameters,
you can use 'planMany' to adapt it before calling 'explain'.
|
'explainPlan' is an internal helper to executes a plan with the
'scope' type fixed to 'Explain' to ensure that all 'Planned'
values are constructed with the 'PlannedExplain' constructor.
| module Database.Orville.PostgreSQL.Plan
( Plan
, Planned
, Execute
, Explain
, askParam
, execute
, explain
, findMaybeOne
, findMaybeOneWhere
, findOne
, findOneShowVia
, findOneWhere
, findOneWhereShowVia
, findAll
, findAllWhere
, bind
, use
, using
, chain
, apply
, planMany
, planList
, focusParam
, planEither
, planMaybe
* from other types into Plan
, Op.AssertionFailed
, assert
, planSelect
, planOperation
) where
import Data.Either (partitionEithers)
import qualified Control.Monad.Catch as Catch
import qualified Database.Orville.PostgreSQL.Core as Core
import Database.Orville.PostgreSQL.Internal.MappendCompat ((<>))
import Database.Orville.PostgreSQL.Plan.Many (Many)
import qualified Database.Orville.PostgreSQL.Plan.Many as Many
import qualified Database.Orville.PostgreSQL.Plan.Operation as Op
import qualified Database.Orville.PostgreSQL.Plan.Explanation as Exp
import Database.Orville.PostgreSQL.Select (Select)
|
A ' Plan ' is an executable set of queries that can be executed to load data
from the database , using the results of prior queries as input parameters to
following queries in controlled ways . In particular , the " controlled " aspect
of this allows plans that take a single input to be adapted to take multiple
input parameters in a list without the resulting plan executing N+1 queries .
This restriction means that while query results can be used as input
parameters to later queries , they can not be used to decide to run completely
different queries based on other query results . Allowing this would prevent
the ' Plan ' structure from eliminating N+1 query loops .
Note that during execution queries are never combined across tables to form
joins or subqueries . Queries are still executed in the same sequence as
specified in the plan , just on all the inputs at once rather than in a loop .
If you need to do a join with a plan , you can always construction your
own custom ' Op . Operation ' and use ' planOperation ' to incorporate into a plan .
The @param@ type variable indicates what type of value is expected as input
when the plan is executed .
The @result@ type for a plan indicates what type is produced
when the plan is executed .
The @scope@ type is used internally by Orville to track the plan is currently
executed against a single input or multiple inputs . This type parameter
should never specified as a concrete type in user code , but must be exposed
as a variable to ensure that execute scope is tracked correctly through
usages of ' bind ' .
A 'Plan' is an executable set of queries that can be executed to load data
from the database, using the results of prior queries as input parameters to
following queries in controlled ways. In particular, the "controlled" aspect
of this allows plans that take a single input to be adapted to take multiple
input parameters in a list without the resulting plan executing N+1 queries.
This restriction means that while query results can be used as input
parameters to later queries, they cannot be used to decide to run completely
different queries based on other query results. Allowing this would prevent
the 'Plan' structure from eliminating N+1 query loops.
Note that during execution queries are never combined across tables to form
joins or subqueries. Queries are still executed in the same sequence as
specified in the plan, just on all the inputs at once rather than in a loop.
If you need to do a join with a plan, you can always construction your
own custom 'Op.Operation' and use 'planOperation' to incorporate into a plan.
The @param@ type variable indicates what type of value is expected as input
when the plan is executed.
The @result@ type for a plan indicates what Haskell type is produced
when the plan is executed.
The @scope@ type is used internally by Orville to track the plan is currently
executed against a single input or multiple inputs. This type parameter
should never specified as a concrete type in user code, but must be exposed
as a variable to ensure that execute scope is tracked correctly through
usages of 'bind'.
-}
data Plan scope param result where
PlanOp :: Op.Operation param result -> Plan scope param result
PlanMany :: (forall manyScope. Plan manyScope param result)
-> Plan scope [param] (Many param result)
PlanEither :: Plan scope leftParam leftResult
-> Plan scope rightParam rightResult
-> Plan scope (Either leftParam rightParam) (Either leftResult rightResult)
Bind :: Plan scope param a
-> (Planned scope param a -> Plan scope param result)
-> Plan scope param result
Use :: Planned scope param a -> Plan scope param a
Pure :: a -> Plan scope param a
Apply :: Plan scope param (a -> b)
-> Plan scope param a
-> Plan scope param b
Chain :: Plan scope a b
-> Plan scope b c
-> Plan scope a c
instance Functor (Plan scope param) where
fmap f = Apply (Pure f)
instance Applicative (Plan scope param) where
pure = Pure
(<*>) = Apply
data Execute
|
' ExecuteMany ' is an internal tag type used by as the ' scope ' variable for
' Plan ' values when executing them against multiple inputs via the
' executeMany ' internal function .
'ExecuteMany' is an internal tag type used by as the 'scope' variable for
'Plan' values when executing them against multiple inputs via the
'executeMany' internal function.
-}
data ExecuteMany
data Planned scope param a where
PlannedOne :: a -> Planned Execute param a
PlannedMany :: Many k a -> Planned ExecuteMany k a
PlannedExplain :: Planned Explain param a
instance Functor (Planned scope param) where
fmap = mapPlanned
mapPlanned :: (a -> b) -> Planned scope param a -> Planned scope param b
mapPlanned f planned =
case planned of
PlannedOne a ->
PlannedOne (f a)
PlannedMany manyAs ->
PlannedMany (fmap f manyAs)
PlannedExplain ->
PlannedExplain
resolveOne :: Planned Execute param a -> a
resolveOne (PlannedOne a) = a
resolveMany :: Planned ExecuteMany k a -> Many k a
resolveMany (PlannedMany as) = as
planOperation :: Op.Operation param result
-> Plan scope param result
planOperation =
PlanOp
|
' planSelect ' allows any ' Select ' query to be incorporated into a
plan . Note that the ' Select ' can not depend on the plan 's input parameters in
this case . If the plan is executed with multiple inputs the same set of all
the results will be used as the results for each of the input parameters .
'planSelect' allows any Orville 'Select' query to be incorporated into a
plan. Note that the 'Select' cannot depend on the plan's input parameters in
this case. If the plan is executed with multiple inputs the same set of all
the results will be used as the results for each of the input parameters.
-}
planSelect :: Select row -> Plan scope () [row]
planSelect select =
planOperation (Op.findSelect select)
askParam :: Plan scope param param
askParam =
planOperation Op.askParam
findMaybeOne :: Ord fieldValue
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Plan scope fieldValue (Maybe readEntity)
findMaybeOne tableDef fieldDef =
planOperation (Op.findOne tableDef fieldDef)
findMaybeOneWhere :: Ord fieldValue
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Core.WhereCondition
-> Plan scope fieldValue (Maybe readEntity)
findMaybeOneWhere tableDef fieldDef cond =
planOperation (Op.findOneWhere tableDef fieldDef cond)
|
' ' is similar to ' findMaybeOne , but it expects that there will
always be a row found matching the plan 's input value . If no row is found an
' Op . AssertionFailed ' exception will be thrown . This is a useful convenience
when looking up foreign - key associations that are expected to be enforced by
the database itself .
'findOneShowVia' is similar to 'findMaybeOne, but it expects that there will
always be a row found matching the plan's input value. If no row is found an
'Op.AssertionFailed' exception will be thrown. This is a useful convenience
when looking up foreign-key associations that are expected to be enforced by
the database itself.
-}
findOneShowVia :: Ord fieldValue
=> (fieldValue -> String)
-> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Plan scope fieldValue readEntity
findOneShowVia showParam tableDef fieldDef =
assert
(assertFound showParam tableDef fieldDef)
(findMaybeOne tableDef fieldDef)
|
' findOne ' is an alias to ' ' that uses the ' Show ' instance of
' fieldValue ' when producing a failure message in the result the entity can not
be found .
'findOne' is an alias to 'findOneShowVia' that uses the 'Show' instance of
'fieldValue' when producing a failure message in the result the entity cannot
be found.
-}
findOne :: (Show fieldValue, Ord fieldValue)
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Plan scope fieldValue readEntity
findOne tableDef fieldDef =
assert
(assertFound show tableDef fieldDef)
(findMaybeOne tableDef fieldDef)
|
' findOneWhereShowVia ' is similar to ' ' , but allows a ' WhereCondition ' to be
specified to restrict which rows are matched by the database query .
'findOneWhereShowVia' is similar to 'findOneShowVia', but allows a 'WhereCondition' to be
specified to restrict which rows are matched by the database query.
-}
findOneWhereShowVia :: Ord fieldValue
=> (fieldValue -> String)
-> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Core.WhereCondition
-> Plan scope fieldValue readEntity
findOneWhereShowVia showParam tableDef fieldDef cond =
assert
(assertFound showParam tableDef fieldDef)
(findMaybeOneWhere tableDef fieldDef cond)
findOneWhere :: (Show fieldValue, Ord fieldValue)
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Core.WhereCondition
-> Plan scope fieldValue readEntity
findOneWhere = findOneWhereShowVia show
|
' ' is an internal helper that checks that row was found where
one was expected .
'assertFound' is an internal helper that checks that row was found where
one was expected.
-}
assertFound :: (fieldValue -> String)
-> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> fieldValue
-> Maybe result
-> Either String result
assertFound showParam tableDef fieldDef param maybeRecord =
case maybeRecord of
Just a ->
Right a
Nothing ->
Left $
unwords
[ "Failed to find record in table"
, Core.tableName tableDef
, "where"
, Core.fieldName fieldDef
, " = "
, showParam param
]
findAll :: Ord fieldValue
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Plan scope fieldValue [readEntity]
findAll tableDef fieldDef =
planOperation (Op.findAll tableDef fieldDef)
findAllWhere :: Ord fieldValue
=> Core.TableDefinition readEntity writeEntity key
-> Core.FieldDefinition nullability fieldValue
-> Core.WhereCondition
-> Plan scope fieldValue [readEntity]
findAllWhere tableDef fieldDef cond =
planOperation (Op.findAllWhere tableDef fieldDef cond)
planMany :: (forall manyScope. Plan manyScope param result)
-> Plan scope [param] (Many param result)
planMany =
PlanMany
planList :: (forall scope. Plan scope param result)
-> Plan listScope [param] [result]
planList plan =
Many.elems <$> planMany plan
focusParam :: (a -> b)
-> Plan scope b result
-> Plan scope a result
focusParam focuser plan =
chain (focuser <$> askParam) plan
|
' ' lets you construct a plan that branches by executing a different
plan for the ' Left ' and ' Right ' sides of an ' Either ' value . When used with a
single input parameter only one of the two plans will be used , based on the
input parameter . When used on multiple input parameters , each of the two
plans will be executed only once with all the ' Left ' and ' Right ' values
provided as input parameters respectively .
'planEither' lets you construct a plan that branches by executing a different
plan for the 'Left' and 'Right' sides of an 'Either' value. When used with a
single input parameter only one of the two plans will be used, based on the
input parameter. When used on multiple input parameters, each of the two
plans will be executed only once with all the 'Left' and 'Right' values
provided as input parameters respectively.
-}
planEither :: Plan scope leftParam leftResult
-> Plan scope rightParam rightResult
-> Plan scope (Either leftParam rightParam) (Either leftResult rightResult)
planEither =
PlanEither
|
' planMaybe ' lifts a plan so both its param and result become ' Maybe 's . This is
useful when modifying an existing plan to deal with optionality . Writing just
one plan can then easily produce both the required and optional versions .
'planMaybe' lifts a plan so both its param and result become 'Maybe's. This is
useful when modifying an existing plan to deal with optionality. Writing just
one plan can then easily produce both the required and optional versions.
-}
planMaybe :: Plan scope a b -> Plan scope (Maybe a) (Maybe b)
planMaybe plan =
focusParam (maybe (Left ()) Right) $
either id id <$> planEither (pure Nothing) (Just <$> plan)
|
' bind ' gives access to the results of a plan to use as input values to future
plans . The plan result is given the input parameter to the provided function ,
which must produce the remaining ' Plan ' to be executed . The value will be
wrapped in the ' Planned ' type , which may represent either a result or
multiple results , depending on whether one plan is currently be executed with
one and multiple input parameters . This ensures that the caller produces only
a single remaining ' Plan ' to be used for all inputs when there are multiple
to eliminate the need to possibly run different queries for different inputs
( which would an introduce N+1 query execution ) .
The ' Planned ' value ( or values ) provided by ' bind ' have actually been
retrieved from the database , so the value can be used multiple times when
constructing the remaining ' Plan ' without fear of causing the query to run
multiple times .
Also see ' use ' for how to lift a ' Planned ' value back into a ' Plan ' .
'bind' gives access to the results of a plan to use as input values to future
plans. The plan result is given the input parameter to the provided function,
which must produce the remaining 'Plan' to be executed. The value will be
wrapped in the 'Planned' type, which may represent either a result or
multiple results, depending on whether one plan is currently be executed with
one and multiple input parameters. This ensures that the caller produces only
a single remaining 'Plan' to be used for all inputs when there are multiple
to eliminate the need to possibly run different queries for different inputs
(which would an introduce N+1 query execution).
The 'Planned' value (or values) provided by 'bind' have actually been
retrieved from the database, so the value can be used multiple times when
constructing the remaining 'Plan' without fear of causing the query to run
multiple times.
Also see 'use' for how to lift a 'Planned' value back into a 'Plan'.
-}
bind :: Plan scope param a
-> (Planned scope param a -> Plan scope param result)
-> Plan scope param result
bind =
Bind
use :: Planned scope param a -> Plan scope param a
use =
Use
using :: Planned scope param a
-> Plan scope a b
-> Plan scope param b
using planned plan =
chain (use planned) plan
apply :: Plan scope param (a -> b)
-> Plan scope param a
-> Plan scope param b
apply =
Apply
|
' chain ' connects the output of one plan to the input of another to form a
larger plan that will execute the first followed by the second .
'chain' connects the output of one plan to the input of another to form a
larger plan that will execute the first followed by the second.
-}
chain :: Plan scope a b
-> Plan scope b c
-> Plan scope a c
chain =
Chain
|
' assert ' allows you to make an assertion about a plans result that will throw
an ' Op . AssertionFailed ' failed exception during execution if it proves to be
false . The first parameter is the assertion function , which should return
either an error message to be given in the exception or the value to be used
as the plan 's result .
'assert' allows you to make an assertion about a plans result that will throw
an 'Op.AssertionFailed' failed exception during execution if it proves to be
false. The first parameter is the assertion function, which should return
either an error message to be given in the exception or the value to be used
as the plan's result.
-}
assert :: (param -> a -> Either String b)
-> Plan scope param a
-> Plan scope param b
assert assertion aPlan =
let
eitherPlan =
assertion
<$> askParam
<*> aPlan
in
chain eitherPlan (PlanOp Op.assertRight)
|
' execute ' accepts the input parameter ( or parameters ) expected by a ' Plan '
and runs the plan to completion , either throwing an ' Op . AssertionFailed '
exception in the monad ' m ' or producing the expected result .
If you have a plan that takes one input and want to provide a list of
input , use ' planMany ' to adapt it to a multple - input plan before calling
' execute ' .
'execute' accepts the input parameter (or parameters) expected by a 'Plan'
and runs the plan to completion, either throwing an 'Op.AssertionFailed'
exception in the monad 'm' or producing the expected result.
If you have a plan that takes one input and want to provide a list of
input, use 'planMany' to adapt it to a multple-input plan before calling
'execute'.
-}
execute :: Core.MonadOrville conn m
=> Plan Execute param result
-> param
-> m result
execute plan param =
executeOne plan param
executeOne :: Core.MonadOrville conn m
=> Plan Execute param result
-> param
-> m result
executeOne plan param =
case plan of
PlanOp operation -> do
opResult <- Op.executeOperationOne operation param
case opResult of
Left err ->
Catch.throwM err
Right result ->
pure result
PlanMany manyPlan ->
executeMany manyPlan param
PlanEither leftPlan rightPlan ->
case param of
Left leftParam ->
Left <$> executeOne leftPlan leftParam
Right rightParam ->
Right <$> executeOne rightPlan rightParam
Bind intermPlan continue -> do
interm <- executeOne intermPlan param
executeOne
(continue (PlannedOne interm))
param
Use planned ->
pure . resolveOne $ planned
Pure a ->
pure a
Apply planF planA ->
executeOne planF param <*> executeOne planA param
Chain planAB planBC -> do
b <- executeOne planAB param
executeOne planBC b
executeMany :: Core.MonadOrville conn m
=> Plan ExecuteMany param result
-> [param]
-> m (Many.Many param result)
executeMany plan params =
case plan of
PlanOp operation -> do
opResult <- Op.executeOperationMany operation params
case opResult of
Left err ->
Catch.throwM $ err
Right results ->
pure results
PlanMany manyPlan -> do
let
flatParams = concat params
allResults <- executeMany manyPlan flatParams
let
restrictResults subParams =
Many.fromKeys subParams (\k -> Many.lookup k allResults)
pure $ Many.fromKeys params (Right . restrictResults)
PlanEither leftPlan rightPlan -> do
let
(leftParams, rightParams) = partitionEithers params
leftResults <- executeMany leftPlan leftParams
rightResults <- executeMany rightPlan rightParams
let
eitherResult eitherK =
case eitherK of
Left k ->
Left <$> Many.lookup k leftResults
Right k ->
Right <$> Many.lookup k rightResults
pure $ Many.fromKeys params eitherResult
Bind intermPlan continue -> do
interms <- executeMany intermPlan params
executeMany
(continue (PlannedMany interms))
params
Use planned ->
pure . resolveMany $ planned
Pure a ->
pure $ Many.fromKeys params (const (Right a))
Apply planF planA -> do
manyFs <- executeMany planF params
manyAs <- executeMany planA params
pure (Many.apply manyFs manyAs)
Chain planAB planBC -> do
bs <- executeMany planAB params
cs <- executeMany planBC (Many.elems bs)
pure $ Many.compose cs bs
data Explain
= ExplainOne
| ExplainMany
explain :: Plan Explain param result -> [String]
explain plan =
Exp.explanationSteps $
explainPlan ExplainOne plan
explainPlan :: Explain
-> Plan Explain param result
-> Exp.Explanation
explainPlan mult plan =
case plan of
PlanOp operation -> do
case mult of
ExplainOne ->
Op.explainOperationOne operation
ExplainMany ->
Op.explainOperationMany operation
PlanMany manyPlan -> do
explainPlan ExplainMany manyPlan
PlanEither leftPlan rightPlan ->
explainPlan mult leftPlan <> explainPlan mult rightPlan
Bind intermPlan continue ->
let
nextPlan = continue PlannedExplain
in
explainPlan mult intermPlan <> explainPlan mult nextPlan
Use _ ->
Exp.noExplanation
Pure _ ->
Exp.noExplanation
Apply planF planA -> do
explainPlan mult planF <> explainPlan mult planA
Chain planAB planBC -> do
explainPlan mult planAB <> explainPlan mult planBC
|
8244b3c8047b022c73b20debe3fb2482fe70ec2df390a9330812750f4bfd291d | basho-labs/riak_explorer | re_wm.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2015 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(re_wm).
-export([resources/0,
routes/0,
dispatch/0,
dispatch/1,
base_route/0]).
-export([rd_url/1,
rd_accepts/2,
add_content/2,
add_error/2,
rd_content/2,
rd_cluster_exists/1,
rd_cluster/1,
rd_node_exists/1,
rd_node/1,
maybe_atomize/1,
maybe_to_list/1,
url_decode/1
]).
-export([init/1,
service_available/2,
allowed_methods/2,
content_types_provided/2,
content_types_accepted/2,
resource_exists/2,
provide_content/2,
delete_resource/2,
process_post/2,
provide_text_content/2,
provide_static_content/2,
accept_content/2,
post_is_create/2,
create_path/2,
last_modified/2]).
-include_lib("webmachine/include/webmachine.hrl").
-include("re_wm.hrl").
-record(ctx, {
proxy :: {module(), atom()} | undefined,
route :: route()
}).
%%%===================================================================
%%% API
%%%===================================================================
%%% Routing
-spec resources() -> [module()].
resources() ->
[
re_wm_explore,
re_wm_control,
re_wm_proxy,
re_wm_static
].
-spec routes() -> [route()].
routes() ->
routes(resources(), []).
-spec routes([module()], [route()]) -> [route()].
routes([], Routes) ->
Routes;
routes([Resource|Rest], Routes) ->
routes(Rest, Routes ++ Resource:routes()).
-spec dispatch() -> [{[string() | atom], module(), [term()]}].
dispatch() ->
dispatch([]).
-spec dispatch([term()]) -> [{[string() | atom], module(), [term()]}].
dispatch(Args) ->
WmRoutes = build_wm_routes(base_route(), routes(), []),
[ {R, M, A ++ Args} || {R, M, A} <- WmRoutes ].
-spec base_route() -> string().
base_route() ->
case riak_explorer:is_riak() of
false -> [];
true -> ["admin"]
end.
%%% Utility
-spec rd_url(#wm_reqdata{}) -> string().
rd_url(ReqData) ->
BaseUrl = wrq:base_uri(ReqData),
case base_route() of
[] ->
BaseUrl ++ "/";
[R] ->
BaseUrl ++ "/" ++ R ++ "/"
end.
-spec rd_accepts(string(), #wm_reqdata{}) -> boolean().
rd_accepts(CT, ReqData) ->
case wrq:get_req_header("Accept", ReqData) of
undefined ->
true;
Accept ->
string:str(Accept,CT) > 0
end.
-spec add_content(term(), #wm_reqdata{}) -> {boolean(), #wm_reqdata{}}.
add_content({error, not_found}, ReqData) ->
{{halt, 404}, ReqData};
add_content({error, Reason}, ReqData) ->
{{halt, 500}, add_error(Reason, ReqData)};
add_content(ok, ReqData) ->
{true, ReqData};
add_content(Content, ReqData) ->
Tokens = string:tokens(wrq:path(ReqData), "/"),
Last = lists:nth(length(Tokens), Tokens),
{true, wrq:append_to_response_body(mochijson2:encode([{list_to_binary(Last), Content}]), ReqData)}.
-spec add_error(term(), #wm_reqdata{}) -> #wm_reqdata{}.
add_error(Error, ReqData) ->
wrq:append_to_response_body(mochijson2:encode([{error, list_to_binary(io_lib:format("~p", [Error]))}]), ReqData).
-spec rd_content(term(), #wm_reqdata{}) ->
{[{binary(), term()}], #wm_reqdata{}}.
rd_content({error, not_found}, ReqData) ->
{{halt, 404}, ReqData};
rd_content({error, Reason}, ReqData) ->
{{halt, 500}, add_error(Reason, ReqData)};
rd_content(Content, ReqData) ->
Tokens = string:tokens(wrq:path(ReqData), "/"),
Last = lists:nth(length(Tokens), Tokens),
{[{list_to_binary(Last), Content}], ReqData}.
-spec rd_cluster_exists(#wm_reqdata{}) -> {boolean(), #wm_reqdata{}}.
rd_cluster_exists(ReqData) ->
C = rd_cluster(ReqData),
{re_cluster:exists(C), ReqData}.
-spec rd_cluster(#wm_reqdata{}) -> re_cluster:re_cluster().
rd_cluster(ReqData) ->
maybe_atomize(wrq:path_info(cluster, ReqData)).
-spec rd_node_exists(#wm_reqdata{}) -> {boolean(), #wm_reqdata{}}.
rd_node_exists(ReqData) ->
case rd_cluster_exists(ReqData) of
{true,_} ->
case rd_node(ReqData) of
{error, not_found} ->
{false, ReqData};
N ->
{re_node:exists(N), ReqData}
end;
_ ->
{false, ReqData}
end.
-spec rd_node(#wm_reqdata{}) -> {error, not_found} | re_node:re_node().
rd_node(ReqData) ->
N = url_decode(wrq:path_info(node, ReqData)),
N1 = maybe_atomize(N),
case N1 of
undefined ->
C = rd_cluster(ReqData),
re_cluster:riak_node(maybe_atomize(C));
N2 ->
N2
end.
maybe_to_list(Data) when is_list(Data) -> Data;
maybe_to_list(Data) when is_atom(Data) -> atom_to_list(Data).
maybe_atomize(Data) when is_list(Data) -> list_to_atom(Data);
maybe_atomize(Data) when is_atom(Data) -> Data.
url_decode(Data) ->
re:replace(maybe_to_list(Data), "%40", "@", [{return, list}]).
%%%===================================================================
Webmachine Callbacks
%%%===================================================================
init(Args) ->
Ctx =
case proplists:get_value(proxy, Args) of
undefined ->
#ctx{};
{PM, PF} ->
#ctx{proxy = {PM, PF}}
end,
{ok, Ctx}.
service_available(ReqData, Ctx) ->
Route = case get_route(base_route(), routes(), ReqData) of
#route{}=R ->
R;
_ ->
[R] = re_wm_static:routes(),
R
end,
{Available, ReqData1} =
case Route#route.available of
{M, F} -> maybe_proxy_request(M, F, ReqData, Ctx);
Bool -> {Bool, ReqData}
end,
{Available, ReqData1, Ctx#ctx{route = Route}}.
allowed_methods(ReqData, Ctx = #ctx{route = Route}) ->
{Route#route.methods, ReqData, Ctx}.
content_types_provided(ReqData, Ctx = #ctx{route = Route}) ->
case Route#route.provides of
{M, F} ->
{CTs, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{CTs, ReqData1, Ctx};
Provides ->
{Provides, ReqData, Ctx}
end.
content_types_accepted(ReqData, Ctx = #ctx{route = Route}) ->
{Route#route.accepts, ReqData, Ctx}.
resource_exists(ReqData, Ctx = #ctx{route = #route{exists = {M, F}}}) ->
{Success, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Success, ReqData1, Ctx};
resource_exists(ReqData, Ctx = #ctx{route = #route{exists = Exists}})
when is_boolean(Exists) ->
{Exists, ReqData, Ctx}.
delete_resource(ReqData, Ctx = #ctx{route = #route{delete = {M, F}}}) ->
{Success, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Success, ReqData1, Ctx}.
provide_content(ReqData, Ctx = #ctx{route = #route{content = {M, F}}}) ->
case maybe_proxy_request(M, F, ReqData, Ctx) of
{{halt,_}=Body, ReqData1} ->
{Body, ReqData1, Ctx};
{Body, ReqData1} ->
{mochijson2:encode(Body), ReqData1, Ctx}
end.
provide_text_content(ReqData, Ctx = #ctx{route = #route{content = {M, F}}}) ->
{Body, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
case Body of
{{halt,_}=B, ReqData1} ->
{B, ReqData1, Ctx};
B when is_binary(B) ->
{binary_to_list(B), ReqData1, Ctx};
[{_,Props}]=B ->
%% TODO: Improve
case {proplists:get_value(lines, Props),
proplists:get_value(keys, Props),
proplists:get_value(buckets, Props)} of
{undefined, undefined, undefined} ->
{mochijson2:encode(B), ReqData1, Ctx};
{Values, undefined, undefined} ->
Lines = [binary_to_list(L) || L <- Values],
{string:join(Lines, io_lib:nl()), ReqData1, Ctx};
{undefined, Values, undefined} ->
Lines = [binary_to_list(L) || L <- Values],
{string:join(Lines, io_lib:nl()), ReqData1, Ctx};
{undefined, undefined, Values} ->
Lines = [binary_to_list(L) || L <- Values],
{string:join(Lines, io_lib:nl()), ReqData1, Ctx}
end
end.
provide_static_content(ReqData, Ctx = #ctx{route = #route{content = {M, F}}}) ->
{Body, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Body, ReqData1, Ctx}.
accept_content(ReqData, Ctx = #ctx{route = #route{accept = {M, F}}}) ->
{Success, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Success, ReqData1, Ctx};
accept_content(ReqData, Ctx = #ctx{route = #route{accept = undefined}}) ->
{false, ReqData, Ctx}.
process_post(ReqData, Ctx = #ctx{route = #route{accept = {M, F}}}) ->
{Success, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Success, ReqData1, Ctx}.
post_is_create(ReqData, Ctx = #ctx{route = #route{post_create = PostCreate}}) ->
{PostCreate, ReqData, Ctx}.
create_path(ReqData, Ctx = #ctx{route = #route{post_path = {M, F}}}) ->
{Path, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Path, ReqData1, Ctx}.
last_modified(ReqData, Ctx = #ctx{route = #route{last_modified = undefined}}) ->
{undefined, ReqData, Ctx};
last_modified(ReqData, Ctx = #ctx{route = #route{last_modified = {M, F}}}) ->
{LM, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{LM, ReqData1, Ctx}.
%% ====================================================================
%% Private
%% ====================================================================
get_route(_, [], _ReqData) ->
undefined;
get_route(BaseRoute, [Route=#route{base=[],path=Paths} | Rest], ReqData) ->
case get_route_path(BaseRoute, [], Paths, Route, ReqData) of
undefined ->
get_route(BaseRoute, Rest, ReqData);
R -> R
end;
get_route(BaseRoute, [Route=#route{base=Bases,path=[]} | Rest], ReqData) ->
case get_route_path(BaseRoute, [], Bases, Route, ReqData) of
undefined ->
get_route(BaseRoute, Rest, ReqData);
R -> R
end;
get_route(BaseRoute, [Route=#route{base=Bases,path=Paths} | Rest], ReqData) ->
case get_route_base(BaseRoute, Bases, Paths, Route, ReqData) of
undefined ->
get_route(BaseRoute, Rest, ReqData);
R -> R
end.
get_route_base(_, [], _, _, _) ->
undefined;
get_route_base(BaseRoute, [Base|Rest], Paths, Route, ReqData) ->
case get_route_path(BaseRoute, Base, Paths, Route, ReqData) of
undefined ->
get_route_base(BaseRoute, Rest, Paths, Route, ReqData);
R -> R
end.
get_route_path(_, _, [], _, _) ->
undefined;
get_route_path(BaseRoute, Base, [Path|Rest], Route, ReqData) ->
ReqPath = string:tokens(wrq:path(ReqData), "/"),
case expand_path(BaseRoute ++ Base ++ Path, ReqData, []) of
ReqPath ->
Route;
_ ->
get_route_path(BaseRoute, Base, Rest, Route, ReqData)
end.
expand_path([], _ReqData, Acc) ->
lists:reverse(Acc);
expand_path([Part|Rest], ReqData, Acc) when is_list(Part) ->
expand_path(Rest, ReqData, [Part | Acc]);
expand_path(['*'|Rest], ReqData, Acc) ->
Tokens = string:tokens(wrq:path(ReqData), "/"),
case length(Acc) > length(Tokens) of
true ->
undefined;
false ->
expand_path(Rest, ReqData, lists:reverse(lists:nthtail(length(Acc), Tokens)) ++ Acc)
end;
expand_path([Part|Rest], ReqData, Acc) when is_atom(Part) ->
expand_path(Rest, ReqData, [wrq:path_info(Part, ReqData) | Acc]).
build_wm_routes(_BaseRoute, [], Acc) ->
lists:reverse(lists:flatten(Acc));
build_wm_routes(BaseRoute, [#route{base = [], path = Paths} | Rest], Acc) ->
build_wm_routes(BaseRoute, Rest, [build_wm_route(BaseRoute, [], Paths, []) | Acc]);
build_wm_routes(BaseRoute, [#route{base = Bases, path = []} | Rest], Acc) ->
build_wm_routes(BaseRoute, Rest, [build_wm_route(BaseRoute, [], Bases, []) | Acc]);
build_wm_routes(BaseRoute, [#route{base = Bases, path = Paths} | Rest], Acc) ->
build_wm_routes(BaseRoute, Rest, [build_wm_routes(BaseRoute, Bases, Paths, []) | Acc]).
build_wm_routes(_BaseRoute, [], _, Acc) ->
Acc;
build_wm_routes(BaseRoute, [Base|Rest], Paths, Acc) ->
build_wm_routes(BaseRoute, Rest, Paths, [build_wm_route(BaseRoute, Base, Paths, [])|Acc]).
build_wm_route(_, _, [], Acc) ->
Acc;
build_wm_route(BaseRoute, Base, [Path|Rest], Acc) ->
build_wm_route(BaseRoute, Base, Rest, [{BaseRoute ++ Base ++ Path, ?MODULE, []}|Acc]).
maybe_proxy_request(M, F, ReqData, #ctx{proxy = undefined}) ->
M:F(ReqData);
maybe_proxy_request(M, F, ReqData, #ctx{proxy = {PM, PF}}) ->
case PM:PF(M, F, ReqData) of
{ok, Result} ->
Result;
{forward, local} ->
M:F(ReqData);
{forward, {location, Location, Path, NewPath}} ->
re_wm_proxy:send_proxy_request(Location, Path, NewPath, ReqData)
end.
| null | https://raw.githubusercontent.com/basho-labs/riak_explorer/3d06ca2b14a57ed2850e56efc280d7e5b5f4bbbc/src/re_wm.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
===================================================================
API
===================================================================
Routing
Utility
===================================================================
===================================================================
TODO: Improve
====================================================================
Private
==================================================================== | Copyright ( c ) 2015 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(re_wm).
-export([resources/0,
routes/0,
dispatch/0,
dispatch/1,
base_route/0]).
-export([rd_url/1,
rd_accepts/2,
add_content/2,
add_error/2,
rd_content/2,
rd_cluster_exists/1,
rd_cluster/1,
rd_node_exists/1,
rd_node/1,
maybe_atomize/1,
maybe_to_list/1,
url_decode/1
]).
-export([init/1,
service_available/2,
allowed_methods/2,
content_types_provided/2,
content_types_accepted/2,
resource_exists/2,
provide_content/2,
delete_resource/2,
process_post/2,
provide_text_content/2,
provide_static_content/2,
accept_content/2,
post_is_create/2,
create_path/2,
last_modified/2]).
-include_lib("webmachine/include/webmachine.hrl").
-include("re_wm.hrl").
-record(ctx, {
proxy :: {module(), atom()} | undefined,
route :: route()
}).
-spec resources() -> [module()].
resources() ->
[
re_wm_explore,
re_wm_control,
re_wm_proxy,
re_wm_static
].
-spec routes() -> [route()].
routes() ->
routes(resources(), []).
-spec routes([module()], [route()]) -> [route()].
routes([], Routes) ->
Routes;
routes([Resource|Rest], Routes) ->
routes(Rest, Routes ++ Resource:routes()).
-spec dispatch() -> [{[string() | atom], module(), [term()]}].
dispatch() ->
dispatch([]).
-spec dispatch([term()]) -> [{[string() | atom], module(), [term()]}].
dispatch(Args) ->
WmRoutes = build_wm_routes(base_route(), routes(), []),
[ {R, M, A ++ Args} || {R, M, A} <- WmRoutes ].
-spec base_route() -> string().
base_route() ->
case riak_explorer:is_riak() of
false -> [];
true -> ["admin"]
end.
-spec rd_url(#wm_reqdata{}) -> string().
rd_url(ReqData) ->
BaseUrl = wrq:base_uri(ReqData),
case base_route() of
[] ->
BaseUrl ++ "/";
[R] ->
BaseUrl ++ "/" ++ R ++ "/"
end.
-spec rd_accepts(string(), #wm_reqdata{}) -> boolean().
rd_accepts(CT, ReqData) ->
case wrq:get_req_header("Accept", ReqData) of
undefined ->
true;
Accept ->
string:str(Accept,CT) > 0
end.
-spec add_content(term(), #wm_reqdata{}) -> {boolean(), #wm_reqdata{}}.
add_content({error, not_found}, ReqData) ->
{{halt, 404}, ReqData};
add_content({error, Reason}, ReqData) ->
{{halt, 500}, add_error(Reason, ReqData)};
add_content(ok, ReqData) ->
{true, ReqData};
add_content(Content, ReqData) ->
Tokens = string:tokens(wrq:path(ReqData), "/"),
Last = lists:nth(length(Tokens), Tokens),
{true, wrq:append_to_response_body(mochijson2:encode([{list_to_binary(Last), Content}]), ReqData)}.
-spec add_error(term(), #wm_reqdata{}) -> #wm_reqdata{}.
add_error(Error, ReqData) ->
wrq:append_to_response_body(mochijson2:encode([{error, list_to_binary(io_lib:format("~p", [Error]))}]), ReqData).
-spec rd_content(term(), #wm_reqdata{}) ->
{[{binary(), term()}], #wm_reqdata{}}.
rd_content({error, not_found}, ReqData) ->
{{halt, 404}, ReqData};
rd_content({error, Reason}, ReqData) ->
{{halt, 500}, add_error(Reason, ReqData)};
rd_content(Content, ReqData) ->
Tokens = string:tokens(wrq:path(ReqData), "/"),
Last = lists:nth(length(Tokens), Tokens),
{[{list_to_binary(Last), Content}], ReqData}.
-spec rd_cluster_exists(#wm_reqdata{}) -> {boolean(), #wm_reqdata{}}.
rd_cluster_exists(ReqData) ->
C = rd_cluster(ReqData),
{re_cluster:exists(C), ReqData}.
-spec rd_cluster(#wm_reqdata{}) -> re_cluster:re_cluster().
rd_cluster(ReqData) ->
maybe_atomize(wrq:path_info(cluster, ReqData)).
-spec rd_node_exists(#wm_reqdata{}) -> {boolean(), #wm_reqdata{}}.
rd_node_exists(ReqData) ->
case rd_cluster_exists(ReqData) of
{true,_} ->
case rd_node(ReqData) of
{error, not_found} ->
{false, ReqData};
N ->
{re_node:exists(N), ReqData}
end;
_ ->
{false, ReqData}
end.
-spec rd_node(#wm_reqdata{}) -> {error, not_found} | re_node:re_node().
rd_node(ReqData) ->
N = url_decode(wrq:path_info(node, ReqData)),
N1 = maybe_atomize(N),
case N1 of
undefined ->
C = rd_cluster(ReqData),
re_cluster:riak_node(maybe_atomize(C));
N2 ->
N2
end.
maybe_to_list(Data) when is_list(Data) -> Data;
maybe_to_list(Data) when is_atom(Data) -> atom_to_list(Data).
maybe_atomize(Data) when is_list(Data) -> list_to_atom(Data);
maybe_atomize(Data) when is_atom(Data) -> Data.
url_decode(Data) ->
re:replace(maybe_to_list(Data), "%40", "@", [{return, list}]).
Webmachine Callbacks
init(Args) ->
Ctx =
case proplists:get_value(proxy, Args) of
undefined ->
#ctx{};
{PM, PF} ->
#ctx{proxy = {PM, PF}}
end,
{ok, Ctx}.
service_available(ReqData, Ctx) ->
Route = case get_route(base_route(), routes(), ReqData) of
#route{}=R ->
R;
_ ->
[R] = re_wm_static:routes(),
R
end,
{Available, ReqData1} =
case Route#route.available of
{M, F} -> maybe_proxy_request(M, F, ReqData, Ctx);
Bool -> {Bool, ReqData}
end,
{Available, ReqData1, Ctx#ctx{route = Route}}.
allowed_methods(ReqData, Ctx = #ctx{route = Route}) ->
{Route#route.methods, ReqData, Ctx}.
content_types_provided(ReqData, Ctx = #ctx{route = Route}) ->
case Route#route.provides of
{M, F} ->
{CTs, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{CTs, ReqData1, Ctx};
Provides ->
{Provides, ReqData, Ctx}
end.
content_types_accepted(ReqData, Ctx = #ctx{route = Route}) ->
{Route#route.accepts, ReqData, Ctx}.
resource_exists(ReqData, Ctx = #ctx{route = #route{exists = {M, F}}}) ->
{Success, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Success, ReqData1, Ctx};
resource_exists(ReqData, Ctx = #ctx{route = #route{exists = Exists}})
when is_boolean(Exists) ->
{Exists, ReqData, Ctx}.
delete_resource(ReqData, Ctx = #ctx{route = #route{delete = {M, F}}}) ->
{Success, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Success, ReqData1, Ctx}.
provide_content(ReqData, Ctx = #ctx{route = #route{content = {M, F}}}) ->
case maybe_proxy_request(M, F, ReqData, Ctx) of
{{halt,_}=Body, ReqData1} ->
{Body, ReqData1, Ctx};
{Body, ReqData1} ->
{mochijson2:encode(Body), ReqData1, Ctx}
end.
provide_text_content(ReqData, Ctx = #ctx{route = #route{content = {M, F}}}) ->
{Body, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
case Body of
{{halt,_}=B, ReqData1} ->
{B, ReqData1, Ctx};
B when is_binary(B) ->
{binary_to_list(B), ReqData1, Ctx};
[{_,Props}]=B ->
case {proplists:get_value(lines, Props),
proplists:get_value(keys, Props),
proplists:get_value(buckets, Props)} of
{undefined, undefined, undefined} ->
{mochijson2:encode(B), ReqData1, Ctx};
{Values, undefined, undefined} ->
Lines = [binary_to_list(L) || L <- Values],
{string:join(Lines, io_lib:nl()), ReqData1, Ctx};
{undefined, Values, undefined} ->
Lines = [binary_to_list(L) || L <- Values],
{string:join(Lines, io_lib:nl()), ReqData1, Ctx};
{undefined, undefined, Values} ->
Lines = [binary_to_list(L) || L <- Values],
{string:join(Lines, io_lib:nl()), ReqData1, Ctx}
end
end.
provide_static_content(ReqData, Ctx = #ctx{route = #route{content = {M, F}}}) ->
{Body, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Body, ReqData1, Ctx}.
accept_content(ReqData, Ctx = #ctx{route = #route{accept = {M, F}}}) ->
{Success, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Success, ReqData1, Ctx};
accept_content(ReqData, Ctx = #ctx{route = #route{accept = undefined}}) ->
{false, ReqData, Ctx}.
process_post(ReqData, Ctx = #ctx{route = #route{accept = {M, F}}}) ->
{Success, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Success, ReqData1, Ctx}.
post_is_create(ReqData, Ctx = #ctx{route = #route{post_create = PostCreate}}) ->
{PostCreate, ReqData, Ctx}.
create_path(ReqData, Ctx = #ctx{route = #route{post_path = {M, F}}}) ->
{Path, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{Path, ReqData1, Ctx}.
last_modified(ReqData, Ctx = #ctx{route = #route{last_modified = undefined}}) ->
{undefined, ReqData, Ctx};
last_modified(ReqData, Ctx = #ctx{route = #route{last_modified = {M, F}}}) ->
{LM, ReqData1} = maybe_proxy_request(M, F, ReqData, Ctx),
{LM, ReqData1, Ctx}.
get_route(_, [], _ReqData) ->
undefined;
get_route(BaseRoute, [Route=#route{base=[],path=Paths} | Rest], ReqData) ->
case get_route_path(BaseRoute, [], Paths, Route, ReqData) of
undefined ->
get_route(BaseRoute, Rest, ReqData);
R -> R
end;
get_route(BaseRoute, [Route=#route{base=Bases,path=[]} | Rest], ReqData) ->
case get_route_path(BaseRoute, [], Bases, Route, ReqData) of
undefined ->
get_route(BaseRoute, Rest, ReqData);
R -> R
end;
get_route(BaseRoute, [Route=#route{base=Bases,path=Paths} | Rest], ReqData) ->
case get_route_base(BaseRoute, Bases, Paths, Route, ReqData) of
undefined ->
get_route(BaseRoute, Rest, ReqData);
R -> R
end.
get_route_base(_, [], _, _, _) ->
undefined;
get_route_base(BaseRoute, [Base|Rest], Paths, Route, ReqData) ->
case get_route_path(BaseRoute, Base, Paths, Route, ReqData) of
undefined ->
get_route_base(BaseRoute, Rest, Paths, Route, ReqData);
R -> R
end.
get_route_path(_, _, [], _, _) ->
undefined;
get_route_path(BaseRoute, Base, [Path|Rest], Route, ReqData) ->
ReqPath = string:tokens(wrq:path(ReqData), "/"),
case expand_path(BaseRoute ++ Base ++ Path, ReqData, []) of
ReqPath ->
Route;
_ ->
get_route_path(BaseRoute, Base, Rest, Route, ReqData)
end.
expand_path([], _ReqData, Acc) ->
lists:reverse(Acc);
expand_path([Part|Rest], ReqData, Acc) when is_list(Part) ->
expand_path(Rest, ReqData, [Part | Acc]);
expand_path(['*'|Rest], ReqData, Acc) ->
Tokens = string:tokens(wrq:path(ReqData), "/"),
case length(Acc) > length(Tokens) of
true ->
undefined;
false ->
expand_path(Rest, ReqData, lists:reverse(lists:nthtail(length(Acc), Tokens)) ++ Acc)
end;
expand_path([Part|Rest], ReqData, Acc) when is_atom(Part) ->
expand_path(Rest, ReqData, [wrq:path_info(Part, ReqData) | Acc]).
build_wm_routes(_BaseRoute, [], Acc) ->
lists:reverse(lists:flatten(Acc));
build_wm_routes(BaseRoute, [#route{base = [], path = Paths} | Rest], Acc) ->
build_wm_routes(BaseRoute, Rest, [build_wm_route(BaseRoute, [], Paths, []) | Acc]);
build_wm_routes(BaseRoute, [#route{base = Bases, path = []} | Rest], Acc) ->
build_wm_routes(BaseRoute, Rest, [build_wm_route(BaseRoute, [], Bases, []) | Acc]);
build_wm_routes(BaseRoute, [#route{base = Bases, path = Paths} | Rest], Acc) ->
build_wm_routes(BaseRoute, Rest, [build_wm_routes(BaseRoute, Bases, Paths, []) | Acc]).
build_wm_routes(_BaseRoute, [], _, Acc) ->
Acc;
build_wm_routes(BaseRoute, [Base|Rest], Paths, Acc) ->
build_wm_routes(BaseRoute, Rest, Paths, [build_wm_route(BaseRoute, Base, Paths, [])|Acc]).
build_wm_route(_, _, [], Acc) ->
Acc;
build_wm_route(BaseRoute, Base, [Path|Rest], Acc) ->
build_wm_route(BaseRoute, Base, Rest, [{BaseRoute ++ Base ++ Path, ?MODULE, []}|Acc]).
maybe_proxy_request(M, F, ReqData, #ctx{proxy = undefined}) ->
M:F(ReqData);
maybe_proxy_request(M, F, ReqData, #ctx{proxy = {PM, PF}}) ->
case PM:PF(M, F, ReqData) of
{ok, Result} ->
Result;
{forward, local} ->
M:F(ReqData);
{forward, {location, Location, Path, NewPath}} ->
re_wm_proxy:send_proxy_request(Location, Path, NewPath, ReqData)
end.
|
eacfb8dec35bffd7fd4115c385bd6eb5bf1c692911da005a5f35345b29b34206 | maxcountryman/flake | test_utils.clj | (ns flake.test_utils
(:require [clojure.test :refer [deftest is]]
[flake.utils :as utils]))
(deftest test-base62-encode
(is (= "1c" (utils/base62-encode 100))))
(deftest test-now-from-epoch
(let [epoch (utils/epoch-mean 10)]
(dotimes [_ 100]
(Thread/sleep 1)
(is (>= 1 (- (System/currentTimeMillis)
(utils/now-from-epoch epoch)))))))
(deftest test-with-timeout
(let [start (utils/now)
timeout-ms 10]
(is (thrown? java.util.concurrent.TimeoutException
(utils/with-timeout timeout-ms (while true))))
(is (>= (- (utils/now) start) timeout-ms))
(is (= (utils/with-timeout timeout-ms (identity :foo)) :foo))))
| null | https://raw.githubusercontent.com/maxcountryman/flake/5aba7aac4dcc7ad08c245f70b21ae0e6639fa373/test/flake/test_utils.clj | clojure | (ns flake.test_utils
(:require [clojure.test :refer [deftest is]]
[flake.utils :as utils]))
(deftest test-base62-encode
(is (= "1c" (utils/base62-encode 100))))
(deftest test-now-from-epoch
(let [epoch (utils/epoch-mean 10)]
(dotimes [_ 100]
(Thread/sleep 1)
(is (>= 1 (- (System/currentTimeMillis)
(utils/now-from-epoch epoch)))))))
(deftest test-with-timeout
(let [start (utils/now)
timeout-ms 10]
(is (thrown? java.util.concurrent.TimeoutException
(utils/with-timeout timeout-ms (while true))))
(is (>= (- (utils/now) start) timeout-ms))
(is (= (utils/with-timeout timeout-ms (identity :foo)) :foo))))
| |
813e382245be9ed93389c9fba35186c8ef9c7d8ea9904c8a85fa2c63e3fbeb35 | comby-tools/comby | pipeline.mli | open Comby_kernel
open Configuration
open Command_input
type output =
| Matches of (Match.t list * int)
| Replacement of (Replacement.t list * string * int)
| Nothing
val process_single_source
: (module Matchers.Matcher.S)
-> ?fast_offset_conversion:bool
-> ?verbose:bool
-> ?timeout:int
-> ?metasyntax:Matchers.Metasyntax.t
-> ?fresh:(unit -> string)
-> ?substitute_in_place:bool
-> Matchers.Configuration.t
-> single_source
-> Matchers.Specification.t
-> output
val execute
: (module Matchers.Matcher.S)
-> ?timeout:int
-> ?metasyntax:Matchers.Metasyntax.t
-> ?fresh:(unit -> string)
-> ?configuration:Matchers.Configuration.t
-> ?substitute_in_place:bool
-> single_source
-> Matchers.Specification.t
-> output
val with_timeout : int -> Command_input.single_source -> f:(unit -> 'a list) -> 'a list
val run : Command_configuration.t -> unit
| null | https://raw.githubusercontent.com/comby-tools/comby/a36c63fb1e686adaff3e90aed00e88404f8cda78/lib/app/pipeline/pipeline.mli | ocaml | open Comby_kernel
open Configuration
open Command_input
type output =
| Matches of (Match.t list * int)
| Replacement of (Replacement.t list * string * int)
| Nothing
val process_single_source
: (module Matchers.Matcher.S)
-> ?fast_offset_conversion:bool
-> ?verbose:bool
-> ?timeout:int
-> ?metasyntax:Matchers.Metasyntax.t
-> ?fresh:(unit -> string)
-> ?substitute_in_place:bool
-> Matchers.Configuration.t
-> single_source
-> Matchers.Specification.t
-> output
val execute
: (module Matchers.Matcher.S)
-> ?timeout:int
-> ?metasyntax:Matchers.Metasyntax.t
-> ?fresh:(unit -> string)
-> ?configuration:Matchers.Configuration.t
-> ?substitute_in_place:bool
-> single_source
-> Matchers.Specification.t
-> output
val with_timeout : int -> Command_input.single_source -> f:(unit -> 'a list) -> 'a list
val run : Command_configuration.t -> unit
| |
1a96675faaf9a2d7f3793775648902d931ead41ad843af626c1e41715c301d8e | OCamlPro/ez_search | reStr.ml | Fork of Re . to add ? len option to search_forward
(***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* linking exception. *)
(* *)
(***********************************************************************)
Modified by for integration in RE
$ I d : re_str.ml , v 1.3 2002/07/03 15:47:54 vouillon Exp $
(*module Re = Core *)
type regexp =
{ re: Re.t
; mtch: Re.re Lazy.t
; srch: Re.re Lazy.t }
let compile_regexp s c =
let re = Re.Emacs.re ~case:(not c) s in
{ re
; mtch = lazy (Re.compile (Re.seq [Re.start; re]))
; srch = lazy (Re.compile re) }
let state = ref None
let string_match re s p =
try
state := Some (Re.exec ~pos:p (Lazy.force re.mtch) s);
true
with Not_found ->
state := None;
false
let string_partial_match re s p =
match
Re.exec_partial ~pos:p (Lazy.force re.mtch) s
with
`Full -> string_match re s p
| `Partial -> true
| `Mismatch -> false
let search_forward ?len re s p =
try
let res = Re.exec ~pos:p ?len (Lazy.force re.srch) s in
state := Some res;
fst (Re.Group.offset res 0)
with Not_found ->
state := None;
raise Not_found
let rec search_backward re s p =
try
let res = Re.exec ~pos:p (Lazy.force re.mtch) s in
state := Some res;
p
with Not_found ->
state := None;
if p = 0 then
raise Not_found
else
search_backward re s (p - 1)
let valid_group n =
n >= 0 && n < 10 && (
match !state with
| None -> false
| Some m -> n < Re.Group.nb_groups m
)
let offset_group i =
match !state with
| Some m -> Re.Group.offset m i
| None -> raise Not_found
let group_len i =
try
let (b, e) = offset_group i in
e - b
with Not_found ->
0
let rec repl_length repl p q len =
if p < len then begin
if repl.[p] <> '\\' then
repl_length repl (p + 1) (q + 1) len
else begin
let p = p + 1 in
if p = len then failwith "Str.replace: illegal backslash sequence";
let q =
match repl.[p] with
| '\\' -> q + 1
| '0' .. '9' as c -> q + group_len (Char.code c - Char.code '0')
| _ -> q + 2 in
repl_length repl (p + 1) q len
end
end else
q
let rec replace orig repl p res q len =
if p < len then begin
let c = repl.[p] in
if c <> '\\' then begin
Bytes.set res q c;
replace orig repl (p + 1) res (q + 1) len
end else begin
match repl.[p + 1] with
'\\' ->
Bytes.set res q '\\';
replace orig repl (p + 2) res (q + 1) len
| '0' .. '9' as c ->
let d =
try
let (b, e) = offset_group (Char.code c - Char.code '0') in
let d = e - b in
if d > 0 then String.blit orig b res q d;
d
with Not_found ->
0
in
replace orig repl (p + 2) res (q + d) len
| c ->
Bytes.set res q '\\';
Bytes.set res (q + 1) c;
replace orig repl (p + 2) res (q + 2) len
end
end
let replacement_text repl orig =
let len = String.length repl in
let res = Bytes.create (repl_length repl 0 0 len) in
replace orig repl 0 res 0 (String.length repl);
Bytes.unsafe_to_string res
let quote s =
let len = String.length s in
let buf = Buffer.create (2 * len) in
for i = 0 to len - 1 do
match s.[i] with
'[' | ']' | '*' | '.' | '\\' | '?' | '+' | '^' | '$' as c ->
Buffer.add_char buf '\\';
Buffer.add_char buf c
| c -> Buffer.add_char buf c
done;
Buffer.contents buf
let string_before s n = String.sub s 0 n
let string_after s n = String.sub s n (String.length s - n)
let first_chars s n = String.sub s 0 n
let last_chars s n = String.sub s (String.length s - n) n
let regexp e = compile_regexp e false
let regexp_case_fold e = compile_regexp e true
let regexp_string s = compile_regexp (quote s) false
let regexp_string_case_fold s = compile_regexp (quote s) true
let group_beginning n =
if not (valid_group n) then invalid_arg "Str.group_beginning";
let pos = fst (offset_group n) in
if pos = -1 then
raise Not_found
else
pos
let group_end n =
if not (valid_group n) then invalid_arg "Str.group_end";
let pos = snd (offset_group n) in
if pos = -1 then
raise Not_found
else
pos
let matched_group n txt =
let (b, e) = offset_group n in
String.sub txt b (e - b)
let replace_matched repl matched = replacement_text repl matched
let match_beginning () = group_beginning 0
and match_end () = group_end 0
and matched_string txt = matched_group 0 txt
let substitute_first expr repl_fun text =
try
let pos = search_forward expr text 0 in
String.concat "" [string_before text pos;
repl_fun text;
string_after text (match_end ())]
with Not_found ->
text
let global_substitute expr repl_fun text =
let rec replace accu start last_was_empty =
try
let startpos = if last_was_empty then start + 1 else start in
if startpos > String.length text then raise Not_found;
let pos = search_forward expr text startpos in
let end_pos = match_end () in
let repl_text = repl_fun text in
replace (repl_text :: String.sub text start (pos-start) :: accu)
end_pos (end_pos = pos)
with Not_found ->
(string_after text start) :: accu in
String.concat "" (List.rev (replace [] 0 false))
let global_replace expr repl text =
global_substitute expr (replacement_text repl) text
and replace_first expr repl text =
substitute_first expr (replacement_text repl) text
let search_forward_progress re s p =
let pos = search_forward re s p in
if match_end () > p then
pos
else if p < String.length s then
search_forward re s (p + 1)
else
raise Not_found
let bounded_split expr text num =
let start =
if string_match expr text 0 then match_end () else 0 in
let rec split accu start n =
if start >= String.length text then
accu
else if n = 1 then
(string_after text start) :: accu
else
try
let pos = search_forward_progress expr text start in
split ((String.sub text start (pos-start)) :: accu)
(match_end ()) (n - 1)
with Not_found ->
(string_after text start) :: accu in
List.rev (split [] start num)
let split expr text = bounded_split expr text 0
let bounded_split_delim expr text num =
let rec split accu start n =
if start > String.length text then
accu
else if n = 1 then
(string_after text start) :: accu
else
try
let pos = search_forward_progress expr text start in
split (String.sub text start (pos-start) :: accu)
(match_end ()) (n - 1)
with Not_found ->
(string_after text start) :: accu in
if text = "" then
[]
else
List.rev (split [] 0 num)
let split_delim expr text = bounded_split_delim expr text 0
type split_result = Text of string | Delim of string
let bounded_full_split expr text num =
let rec split accu start n =
if start >= String.length text then
accu
else if n = 1 then
Text (string_after text start) :: accu
else
try
let pos = search_forward_progress expr text start in
let s = matched_string text in
if pos > start then
split (Delim (s) ::
Text (String.sub text start (pos - start)) ::
accu)
(match_end ()) (n - 1)
else
split (Delim (s) :: accu)
(match_end ()) (n - 1)
with Not_found ->
Text (string_after text start) :: accu in
List.rev (split [] 0 num)
let full_split expr text = bounded_full_split expr text 0
| null | https://raw.githubusercontent.com/OCamlPro/ez_search/509b5b9433ba4d95f9402c215e5e2762fb4d2a7c/src/ez_search/reStr.ml | ocaml | *********************************************************************
Objective Caml
linking exception.
*********************************************************************
module Re = Core | Fork of Re . to add ? len option to search_forward
, projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
Modified by for integration in RE
$ I d : re_str.ml , v 1.3 2002/07/03 15:47:54 vouillon Exp $
type regexp =
{ re: Re.t
; mtch: Re.re Lazy.t
; srch: Re.re Lazy.t }
let compile_regexp s c =
let re = Re.Emacs.re ~case:(not c) s in
{ re
; mtch = lazy (Re.compile (Re.seq [Re.start; re]))
; srch = lazy (Re.compile re) }
let state = ref None
let string_match re s p =
try
state := Some (Re.exec ~pos:p (Lazy.force re.mtch) s);
true
with Not_found ->
state := None;
false
let string_partial_match re s p =
match
Re.exec_partial ~pos:p (Lazy.force re.mtch) s
with
`Full -> string_match re s p
| `Partial -> true
| `Mismatch -> false
let search_forward ?len re s p =
try
let res = Re.exec ~pos:p ?len (Lazy.force re.srch) s in
state := Some res;
fst (Re.Group.offset res 0)
with Not_found ->
state := None;
raise Not_found
let rec search_backward re s p =
try
let res = Re.exec ~pos:p (Lazy.force re.mtch) s in
state := Some res;
p
with Not_found ->
state := None;
if p = 0 then
raise Not_found
else
search_backward re s (p - 1)
let valid_group n =
n >= 0 && n < 10 && (
match !state with
| None -> false
| Some m -> n < Re.Group.nb_groups m
)
let offset_group i =
match !state with
| Some m -> Re.Group.offset m i
| None -> raise Not_found
let group_len i =
try
let (b, e) = offset_group i in
e - b
with Not_found ->
0
let rec repl_length repl p q len =
if p < len then begin
if repl.[p] <> '\\' then
repl_length repl (p + 1) (q + 1) len
else begin
let p = p + 1 in
if p = len then failwith "Str.replace: illegal backslash sequence";
let q =
match repl.[p] with
| '\\' -> q + 1
| '0' .. '9' as c -> q + group_len (Char.code c - Char.code '0')
| _ -> q + 2 in
repl_length repl (p + 1) q len
end
end else
q
let rec replace orig repl p res q len =
if p < len then begin
let c = repl.[p] in
if c <> '\\' then begin
Bytes.set res q c;
replace orig repl (p + 1) res (q + 1) len
end else begin
match repl.[p + 1] with
'\\' ->
Bytes.set res q '\\';
replace orig repl (p + 2) res (q + 1) len
| '0' .. '9' as c ->
let d =
try
let (b, e) = offset_group (Char.code c - Char.code '0') in
let d = e - b in
if d > 0 then String.blit orig b res q d;
d
with Not_found ->
0
in
replace orig repl (p + 2) res (q + d) len
| c ->
Bytes.set res q '\\';
Bytes.set res (q + 1) c;
replace orig repl (p + 2) res (q + 2) len
end
end
let replacement_text repl orig =
let len = String.length repl in
let res = Bytes.create (repl_length repl 0 0 len) in
replace orig repl 0 res 0 (String.length repl);
Bytes.unsafe_to_string res
let quote s =
let len = String.length s in
let buf = Buffer.create (2 * len) in
for i = 0 to len - 1 do
match s.[i] with
'[' | ']' | '*' | '.' | '\\' | '?' | '+' | '^' | '$' as c ->
Buffer.add_char buf '\\';
Buffer.add_char buf c
| c -> Buffer.add_char buf c
done;
Buffer.contents buf
let string_before s n = String.sub s 0 n
let string_after s n = String.sub s n (String.length s - n)
let first_chars s n = String.sub s 0 n
let last_chars s n = String.sub s (String.length s - n) n
let regexp e = compile_regexp e false
let regexp_case_fold e = compile_regexp e true
let regexp_string s = compile_regexp (quote s) false
let regexp_string_case_fold s = compile_regexp (quote s) true
let group_beginning n =
if not (valid_group n) then invalid_arg "Str.group_beginning";
let pos = fst (offset_group n) in
if pos = -1 then
raise Not_found
else
pos
let group_end n =
if not (valid_group n) then invalid_arg "Str.group_end";
let pos = snd (offset_group n) in
if pos = -1 then
raise Not_found
else
pos
let matched_group n txt =
let (b, e) = offset_group n in
String.sub txt b (e - b)
let replace_matched repl matched = replacement_text repl matched
let match_beginning () = group_beginning 0
and match_end () = group_end 0
and matched_string txt = matched_group 0 txt
let substitute_first expr repl_fun text =
try
let pos = search_forward expr text 0 in
String.concat "" [string_before text pos;
repl_fun text;
string_after text (match_end ())]
with Not_found ->
text
let global_substitute expr repl_fun text =
let rec replace accu start last_was_empty =
try
let startpos = if last_was_empty then start + 1 else start in
if startpos > String.length text then raise Not_found;
let pos = search_forward expr text startpos in
let end_pos = match_end () in
let repl_text = repl_fun text in
replace (repl_text :: String.sub text start (pos-start) :: accu)
end_pos (end_pos = pos)
with Not_found ->
(string_after text start) :: accu in
String.concat "" (List.rev (replace [] 0 false))
let global_replace expr repl text =
global_substitute expr (replacement_text repl) text
and replace_first expr repl text =
substitute_first expr (replacement_text repl) text
let search_forward_progress re s p =
let pos = search_forward re s p in
if match_end () > p then
pos
else if p < String.length s then
search_forward re s (p + 1)
else
raise Not_found
let bounded_split expr text num =
let start =
if string_match expr text 0 then match_end () else 0 in
let rec split accu start n =
if start >= String.length text then
accu
else if n = 1 then
(string_after text start) :: accu
else
try
let pos = search_forward_progress expr text start in
split ((String.sub text start (pos-start)) :: accu)
(match_end ()) (n - 1)
with Not_found ->
(string_after text start) :: accu in
List.rev (split [] start num)
let split expr text = bounded_split expr text 0
let bounded_split_delim expr text num =
let rec split accu start n =
if start > String.length text then
accu
else if n = 1 then
(string_after text start) :: accu
else
try
let pos = search_forward_progress expr text start in
split (String.sub text start (pos-start) :: accu)
(match_end ()) (n - 1)
with Not_found ->
(string_after text start) :: accu in
if text = "" then
[]
else
List.rev (split [] 0 num)
let split_delim expr text = bounded_split_delim expr text 0
type split_result = Text of string | Delim of string
let bounded_full_split expr text num =
let rec split accu start n =
if start >= String.length text then
accu
else if n = 1 then
Text (string_after text start) :: accu
else
try
let pos = search_forward_progress expr text start in
let s = matched_string text in
if pos > start then
split (Delim (s) ::
Text (String.sub text start (pos - start)) ::
accu)
(match_end ()) (n - 1)
else
split (Delim (s) :: accu)
(match_end ()) (n - 1)
with Not_found ->
Text (string_after text start) :: accu in
List.rev (split [] 0 num)
let full_split expr text = bounded_full_split expr text 0
|
608d777ba8e9699338f78116632da4eada49a895ddeada77a7a6831ccab90632 | xh4/web-toolkit | frequency.lisp | (in-package :css)
(define-dimension frequency ())
(define-dimension-unit hz (frequency))
(define-dimension-unit khz (frequency))
| null | https://raw.githubusercontent.com/xh4/web-toolkit/e510d44a25b36ca8acd66734ed1ee9f5fe6ecd09/css/frequency.lisp | lisp | (in-package :css)
(define-dimension frequency ())
(define-dimension-unit hz (frequency))
(define-dimension-unit khz (frequency))
| |
7412c3b3668df97c2a6308ffe1ffa58bd96a17801149536ddba91714d9f0e9af | manuel-serrano/bigloo | gstelement.scm | ;*=====================================================================*/
;* .../project/bigloo/api/gstreamer/src/Llib/gstelement.scm */
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Sun Dec 30 15:46:10 2007 * /
* Last change : Tue Nov 15 16:56:12 2011 ( serrano ) * /
* Copyright : 2007 - 11 * /
;* ------------------------------------------------------------- */
* GstElement wrapper * /
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module __gstreamer_gstelement
(include "gst.sch")
(use __gstreamer_gsterror
__gstreamer_gstobject
__gstreamer_gststructure
__gstreamer_gstcaps)
(import __gstreamer_gstpluginfeature
__gstreamer_gstelementfactory
__gstreamer_gstpad
__gstreamer_gstreamer)
(extern (macro %gst-lock!::obj () "bgl_gst_lock")
(macro %gst-unlock!::obj () "bgl_gst_unlock")
(export $gst-state->obj "bgl_gst_state_to_obj")
(export $make-gst-element "bgl_gst_element_new"))
(export (class gst-element::gst-object
(element-factory::gst-element-factory
read-only
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($make-gst-element-factory
($gst-element-get-factory
($gst-element $builtin))
#f)))))
(interface-list::pair-nil
read-only
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($gst-element-interface-list
($gst-element $builtin))))))
(name::string
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($gst-element-get-name
($gst-element $builtin)))))
(set
(lambda (o v)
(with-access::gst-element o ($builtin)
($gst-element-set-name!
($gst-element $builtin) v))))))
($make-gst-element ::$gst-element ::obj)
(gst-element-state::symbol ::gst-element #!optional (timeout #l0))
(gst-element-state-set!::symbol ::gst-element ::symbol)
(gst-element-pad::obj ::gst-element ::bstring)
(gst-element-add-pad! ::gst-element ::gst-pad)
(gst-element-compatible-pad::obj ::gst-element ::gst-pad ::gst-caps)
(gst-element-query-position::llong ::gst-element)
(gst-element-query-duration::llong ::gst-element)
(gst-element-seek::bool ::gst-element ::llong)
(gst-element-link! ::gst-element ::gst-element . els)
(gst-element-link-filtered! ::gst-element ::gst-element ::gst-caps)
(gst-element-link-mime! ::gst-element ::gst-element ::bstring . ::obj)
(gst-element-unlink! ::gst-element ::gst-element . els)
($gst-state->obj::symbol ::$gst-state)))
;*---------------------------------------------------------------------*/
;* $make-gst-element ... */
;*---------------------------------------------------------------------*/
(define ($make-gst-element element::$gst-element finalizer::obj)
(instantiate::gst-element
($builtin ($gst-element->object element))
($finalizer finalizer)))
;*---------------------------------------------------------------------*/
;* object-display ::gst-element ... */
;*---------------------------------------------------------------------*/
(define-method (object-display o::gst-element . port)
(with-access::gst-element o (name)
(let ((p (if (pair? port) (car port) (current-output-port))))
(display "<" p)
(display (find-runtime-type o) p)
(display " refcount=" p)
(with-access::gst-object o ($builtin)
(display ($gst-object-refcount $builtin) p))
(display " name=" p)
(display name p)
(display ">" p))))
;*---------------------------------------------------------------------*/
;* gst-element-query-position ... */
;*---------------------------------------------------------------------*/
(define (gst-element-query-position el::gst-element)
(with-access::gst-element el ($builtin)
($gst-element-query-position ($gst-element $builtin))))
;*---------------------------------------------------------------------*/
;* gst-element-query-duration ... */
;*---------------------------------------------------------------------*/
(define (gst-element-query-duration el::gst-element)
(with-access::gst-element el ($builtin)
($gst-element-query-duration ($gst-element $builtin))))
;*---------------------------------------------------------------------*/
;* gst-element-seek ... */
;*---------------------------------------------------------------------*/
(define (gst-element-seek el::gst-element v)
(with-access::gst-element el ($builtin)
($gst-element-seek-simple
($gst-element $builtin)
$gst-format-time (bit-or $gst-seek-flag-flush $gst-seek-flag-key-unit) v)))
;*---------------------------------------------------------------------*/
;* gst-element-link! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-link! el0::gst-element el1::gst-element . els)
(define (link! src dst)
(if (isa? dst gst-element)
(with-access::gst-element src ((src-builtin $builtin))
(with-access::gst-element dst ((dst-builtin $builtin))
(unless ($gst-element-link! ($gst-element src-builtin)
($gst-element dst-builtin))
(raise (instantiate::&gst-error
(proc 'gst-element-link!)
(msg "Element cannot be linked")
(obj (list src dst)))))))
(raise (instantiate::&gst-error
(proc 'gst-element-link!)
(msg "Illegal element ")
(obj dst)))))
(link! el0 el1)
(let loop ((src el1)
(els els))
(when (pair? els)
(link! src (car els))
(loop (car els) (cdr els))))
#unspecified)
;*---------------------------------------------------------------------*/
;* gst-element-link-filtered! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-link-filtered! e0 e1 caps)
(with-access::gst-element e0 ((e0-builtin $builtin))
(with-access::gst-element e1 ((e1-builtin $builtin))
(with-access::gst-caps caps ((caps-builtin $builtin))
(unless ($gst-element-link-filtered! ($gst-element e0-builtin)
($gst-element e1-builtin)
caps-builtin)
(raise (instantiate::&gst-error
(proc 'gst-element-link-filtered!)
(msg "Element cannot be linked")
(obj (list e0 e1 caps)))))))))
;*---------------------------------------------------------------------*/
;* gst-element-link-mime! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-link-mime! e0 e1 mime-type . props)
(let ((caps (apply gst-caps-new-simple mime-type props)))
(gst-element-link-filtered! e0 e1 caps)))
;*---------------------------------------------------------------------*/
;* gst-element-link! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-unlink! el0::gst-element el1::gst-element . els)
(define (unlink! src dst)
(with-access::gst-element src ((src-builtin $builtin))
(with-access::gst-element dst ((dst-builtin $builtin))
($gst-element-unlink!
($gst-element src-builtin) ($gst-element dst-builtin)))))
(unlink! el0 el1)
(let loop ((src el1)
(els els))
(when (pair? els)
(unlink! src (car els))
(loop (car els) (cdr els))))
#unspecified)
;*---------------------------------------------------------------------*/
;* $gst-state ... */
;*---------------------------------------------------------------------*/
(define ($gst-state::$gst-state state::symbol)
(case state
((void-pending) $gst-state-void-pending)
((null) $gst-state-null)
((ready) $gst-state-ready)
((paused) $gst-state-paused)
((playing) $gst-state-playing)
(else (raise (instantiate::&gst-error
(proc '$gst-state)
(msg "Illegal state")
(obj state))))))
;*---------------------------------------------------------------------*/
;* $gst-state->obj ... */
;*---------------------------------------------------------------------*/
(define ($gst-state->obj::symbol state::$gst-state)
(cond
((=fx state $gst-state-void-pending) 'void-pending)
((=fx state $gst-state-null) 'null)
((=fx state $gst-state-ready) 'ready)
((=fx state $gst-state-paused) 'paused)
((=fx state $gst-state-playing) 'playing)
(else 'unknown)))
;*---------------------------------------------------------------------*/
;* $gst-state-change-return->obj ... */
;*---------------------------------------------------------------------*/
(define ($gst-state-change-return->obj::obj state::$gst-state-change-return)
(cond
((eq? state $gst-state-change-failure) 'failure)
((eq? state $gst-state-change-success) 'success)
((eq? state $gst-state-change-async) 'async)
((eq? state $gst-state-change-no-preroll) 'no-preroll)
(else 'unknown)))
;*---------------------------------------------------------------------*/
;* gst-element-state-set! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-state-set! el state)
(%gst-lock!)
(%gst-thread-init!)
($gst-invoke-finalizers)
(%gst-unlock!)
(with-access::gst-element el ($builtin)
($gst-state-change-return->obj
($gst-element-set-state! ($gst-element $builtin)
($gst-state state)))))
;*---------------------------------------------------------------------*/
;* gst-element-state ... */
;*---------------------------------------------------------------------*/
(define (gst-element-state el #!optional (timeout #l0))
(with-access::gst-element el ($builtin)
($gst-state-change-return->obj
($gst-element-get-state ($gst-element $builtin)
0 0
(if (<=llong timeout #l0)
$gst-clock-time-none
timeout)))))
;*---------------------------------------------------------------------*/
;* gst-element-pad ... */
;*---------------------------------------------------------------------*/
(define (gst-element-pad el name)
(with-access::gst-element el ((el-builtin $builtin))
(let* (($el::$gst-element ($gst-element el-builtin))
($spad ($gst-element-get-static-pad $el name)))
(if ($gst-pad-null? $spad)
(let (($rpad ($gst-element-get-request-pad $el name)))
(unless ($gst-pad-null? $rpad)
(instantiate::gst-pad
($builtin ($gst-element->object $rpad))
($finalizer (lambda (o)
(with-access::gst-element o ((o-builtin $builtin))
(%gst-object-finalize-closures! o)
($gst-element-release-request-pad!
($gst-element el-builtin)
($gst-pad o-builtin))))))))
(instantiate::gst-pad
($builtin ($gst-element->object $spad))
($finalizer %gst-object-finalize!))))))
;*---------------------------------------------------------------------*/
;* gst-element-add-pad! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-add-pad! el pad)
(with-access::gst-element el ((el-builtin $builtin))
(with-access::gst-pad pad ((pad-builtin $builtin))
(unless ($gst-element-add-pad! ($gst-element el-builtin)
($gst-pad pad-builtin))
(raise (instantiate::&gst-error
(proc 'gst-element-add-pad!)
(msg "Cannot add pad")
(obj (list el pad))))))))
;*---------------------------------------------------------------------*/
;* gst-element-compatible-pad ... */
;*---------------------------------------------------------------------*/
(define (gst-element-compatible-pad el pad caps)
(with-access::gst-element el ((el-builtin $builtin))
(with-access::gst-pad pad ((pad-builtin $builtin))
(with-access::gst-caps caps ((caps-builtin $builtin))
(let ((pad::$gst-pad ($gst-element-get-compatible-pad
($gst-element el-builtin)
($gst-pad pad-builtin)
caps-builtin)))
(unless ($gst-pad-null? pad)
(instantiate::gst-pad
($builtin ($gst-element->object pad))
($finalizer %gst-object-finalize!))))))))
| null | https://raw.githubusercontent.com/manuel-serrano/bigloo/eb650ed4429155f795a32465e009706bbf1b8d74/api/gstreamer/src/Llib/gstelement.scm | scheme | *=====================================================================*/
* .../project/bigloo/api/gstreamer/src/Llib/gstelement.scm */
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* $make-gst-element ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* object-display ::gst-element ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-query-position ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-query-duration ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-seek ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-link! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-link-filtered! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-link-mime! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-link! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* $gst-state ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* $gst-state->obj ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* $gst-state-change-return->obj ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-state-set! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-state ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-pad ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-add-pad! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-compatible-pad ... */
*---------------------------------------------------------------------*/ | * Author : * /
* Creation : Sun Dec 30 15:46:10 2007 * /
* Last change : Tue Nov 15 16:56:12 2011 ( serrano ) * /
* Copyright : 2007 - 11 * /
* GstElement wrapper * /
(module __gstreamer_gstelement
(include "gst.sch")
(use __gstreamer_gsterror
__gstreamer_gstobject
__gstreamer_gststructure
__gstreamer_gstcaps)
(import __gstreamer_gstpluginfeature
__gstreamer_gstelementfactory
__gstreamer_gstpad
__gstreamer_gstreamer)
(extern (macro %gst-lock!::obj () "bgl_gst_lock")
(macro %gst-unlock!::obj () "bgl_gst_unlock")
(export $gst-state->obj "bgl_gst_state_to_obj")
(export $make-gst-element "bgl_gst_element_new"))
(export (class gst-element::gst-object
(element-factory::gst-element-factory
read-only
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($make-gst-element-factory
($gst-element-get-factory
($gst-element $builtin))
#f)))))
(interface-list::pair-nil
read-only
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($gst-element-interface-list
($gst-element $builtin))))))
(name::string
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($gst-element-get-name
($gst-element $builtin)))))
(set
(lambda (o v)
(with-access::gst-element o ($builtin)
($gst-element-set-name!
($gst-element $builtin) v))))))
($make-gst-element ::$gst-element ::obj)
(gst-element-state::symbol ::gst-element #!optional (timeout #l0))
(gst-element-state-set!::symbol ::gst-element ::symbol)
(gst-element-pad::obj ::gst-element ::bstring)
(gst-element-add-pad! ::gst-element ::gst-pad)
(gst-element-compatible-pad::obj ::gst-element ::gst-pad ::gst-caps)
(gst-element-query-position::llong ::gst-element)
(gst-element-query-duration::llong ::gst-element)
(gst-element-seek::bool ::gst-element ::llong)
(gst-element-link! ::gst-element ::gst-element . els)
(gst-element-link-filtered! ::gst-element ::gst-element ::gst-caps)
(gst-element-link-mime! ::gst-element ::gst-element ::bstring . ::obj)
(gst-element-unlink! ::gst-element ::gst-element . els)
($gst-state->obj::symbol ::$gst-state)))
(define ($make-gst-element element::$gst-element finalizer::obj)
(instantiate::gst-element
($builtin ($gst-element->object element))
($finalizer finalizer)))
(define-method (object-display o::gst-element . port)
(with-access::gst-element o (name)
(let ((p (if (pair? port) (car port) (current-output-port))))
(display "<" p)
(display (find-runtime-type o) p)
(display " refcount=" p)
(with-access::gst-object o ($builtin)
(display ($gst-object-refcount $builtin) p))
(display " name=" p)
(display name p)
(display ">" p))))
(define (gst-element-query-position el::gst-element)
(with-access::gst-element el ($builtin)
($gst-element-query-position ($gst-element $builtin))))
(define (gst-element-query-duration el::gst-element)
(with-access::gst-element el ($builtin)
($gst-element-query-duration ($gst-element $builtin))))
(define (gst-element-seek el::gst-element v)
(with-access::gst-element el ($builtin)
($gst-element-seek-simple
($gst-element $builtin)
$gst-format-time (bit-or $gst-seek-flag-flush $gst-seek-flag-key-unit) v)))
(define (gst-element-link! el0::gst-element el1::gst-element . els)
(define (link! src dst)
(if (isa? dst gst-element)
(with-access::gst-element src ((src-builtin $builtin))
(with-access::gst-element dst ((dst-builtin $builtin))
(unless ($gst-element-link! ($gst-element src-builtin)
($gst-element dst-builtin))
(raise (instantiate::&gst-error
(proc 'gst-element-link!)
(msg "Element cannot be linked")
(obj (list src dst)))))))
(raise (instantiate::&gst-error
(proc 'gst-element-link!)
(msg "Illegal element ")
(obj dst)))))
(link! el0 el1)
(let loop ((src el1)
(els els))
(when (pair? els)
(link! src (car els))
(loop (car els) (cdr els))))
#unspecified)
(define (gst-element-link-filtered! e0 e1 caps)
(with-access::gst-element e0 ((e0-builtin $builtin))
(with-access::gst-element e1 ((e1-builtin $builtin))
(with-access::gst-caps caps ((caps-builtin $builtin))
(unless ($gst-element-link-filtered! ($gst-element e0-builtin)
($gst-element e1-builtin)
caps-builtin)
(raise (instantiate::&gst-error
(proc 'gst-element-link-filtered!)
(msg "Element cannot be linked")
(obj (list e0 e1 caps)))))))))
(define (gst-element-link-mime! e0 e1 mime-type . props)
(let ((caps (apply gst-caps-new-simple mime-type props)))
(gst-element-link-filtered! e0 e1 caps)))
(define (gst-element-unlink! el0::gst-element el1::gst-element . els)
(define (unlink! src dst)
(with-access::gst-element src ((src-builtin $builtin))
(with-access::gst-element dst ((dst-builtin $builtin))
($gst-element-unlink!
($gst-element src-builtin) ($gst-element dst-builtin)))))
(unlink! el0 el1)
(let loop ((src el1)
(els els))
(when (pair? els)
(unlink! src (car els))
(loop (car els) (cdr els))))
#unspecified)
(define ($gst-state::$gst-state state::symbol)
(case state
((void-pending) $gst-state-void-pending)
((null) $gst-state-null)
((ready) $gst-state-ready)
((paused) $gst-state-paused)
((playing) $gst-state-playing)
(else (raise (instantiate::&gst-error
(proc '$gst-state)
(msg "Illegal state")
(obj state))))))
(define ($gst-state->obj::symbol state::$gst-state)
(cond
((=fx state $gst-state-void-pending) 'void-pending)
((=fx state $gst-state-null) 'null)
((=fx state $gst-state-ready) 'ready)
((=fx state $gst-state-paused) 'paused)
((=fx state $gst-state-playing) 'playing)
(else 'unknown)))
(define ($gst-state-change-return->obj::obj state::$gst-state-change-return)
(cond
((eq? state $gst-state-change-failure) 'failure)
((eq? state $gst-state-change-success) 'success)
((eq? state $gst-state-change-async) 'async)
((eq? state $gst-state-change-no-preroll) 'no-preroll)
(else 'unknown)))
(define (gst-element-state-set! el state)
(%gst-lock!)
(%gst-thread-init!)
($gst-invoke-finalizers)
(%gst-unlock!)
(with-access::gst-element el ($builtin)
($gst-state-change-return->obj
($gst-element-set-state! ($gst-element $builtin)
($gst-state state)))))
(define (gst-element-state el #!optional (timeout #l0))
(with-access::gst-element el ($builtin)
($gst-state-change-return->obj
($gst-element-get-state ($gst-element $builtin)
0 0
(if (<=llong timeout #l0)
$gst-clock-time-none
timeout)))))
(define (gst-element-pad el name)
(with-access::gst-element el ((el-builtin $builtin))
(let* (($el::$gst-element ($gst-element el-builtin))
($spad ($gst-element-get-static-pad $el name)))
(if ($gst-pad-null? $spad)
(let (($rpad ($gst-element-get-request-pad $el name)))
(unless ($gst-pad-null? $rpad)
(instantiate::gst-pad
($builtin ($gst-element->object $rpad))
($finalizer (lambda (o)
(with-access::gst-element o ((o-builtin $builtin))
(%gst-object-finalize-closures! o)
($gst-element-release-request-pad!
($gst-element el-builtin)
($gst-pad o-builtin))))))))
(instantiate::gst-pad
($builtin ($gst-element->object $spad))
($finalizer %gst-object-finalize!))))))
(define (gst-element-add-pad! el pad)
(with-access::gst-element el ((el-builtin $builtin))
(with-access::gst-pad pad ((pad-builtin $builtin))
(unless ($gst-element-add-pad! ($gst-element el-builtin)
($gst-pad pad-builtin))
(raise (instantiate::&gst-error
(proc 'gst-element-add-pad!)
(msg "Cannot add pad")
(obj (list el pad))))))))
(define (gst-element-compatible-pad el pad caps)
(with-access::gst-element el ((el-builtin $builtin))
(with-access::gst-pad pad ((pad-builtin $builtin))
(with-access::gst-caps caps ((caps-builtin $builtin))
(let ((pad::$gst-pad ($gst-element-get-compatible-pad
($gst-element el-builtin)
($gst-pad pad-builtin)
caps-builtin)))
(unless ($gst-pad-null? pad)
(instantiate::gst-pad
($builtin ($gst-element->object pad))
($finalizer %gst-object-finalize!))))))))
|
3aa57fccaf960f5df9eab8ef3bf72feb4fad9e0a88208c7ea73fc122d5250840 | albertov/bindings-gdal | OGRInfo.hs | # LANGUAGE RecordWildCards #
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Control.Monad (forM_)
import Data.Conduit
import Data.Monoid ((<>))
import qualified Data.HashMap.Strict as HM
import qualified Data.ByteString.Char8 as BS
import qualified Data.Text as T
import qualified Data.Text.IO as T
import System.Environment (getArgs)
import GDAL
import OGR
main :: IO ()
main = withGDAL $ runGDAL_ $ do
[fname, nameStr] <- liftIO getArgs
let name = T.pack nameStr
ds <- OGR.openReadOnly fname
l <- getLayerByName name ds
schema <- layerFeatureDef l
extent <- layerExtent l
liftIO $ do
T.putStrLn "Extent:"
print extent
T.putStrLn "Schema:"
print schema
runOGR $ sourceLayer (getLayerByName name ds) $$ awaitForever $
\(mFid, Feature{..}) ->
liftIO $ do
T.putStrLn ""
T.putStrLn ""
putStrLn ("FID: " <> maybe ("<unknown>") show mFid)
T.putStrLn "Fields:"
forM_ (HM.toList fFields) $ \(fieldName, fieldValue) -> do
T.putStrLn (" " <> fieldName <> ":")
putStrLn (" " <> show fieldValue)
T.putStrLn ("Geometry:")
BS.putStrLn (maybe "" ((" "<>) . geomToWkt) fGeom)
| null | https://raw.githubusercontent.com/albertov/bindings-gdal/f91087e06a569fc6dc81b4c22e58b5c9a1dcdc73/exe/OGRInfo.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE RecordWildCards #
module Main (main) where
import Control.Monad (forM_)
import Data.Conduit
import Data.Monoid ((<>))
import qualified Data.HashMap.Strict as HM
import qualified Data.ByteString.Char8 as BS
import qualified Data.Text as T
import qualified Data.Text.IO as T
import System.Environment (getArgs)
import GDAL
import OGR
main :: IO ()
main = withGDAL $ runGDAL_ $ do
[fname, nameStr] <- liftIO getArgs
let name = T.pack nameStr
ds <- OGR.openReadOnly fname
l <- getLayerByName name ds
schema <- layerFeatureDef l
extent <- layerExtent l
liftIO $ do
T.putStrLn "Extent:"
print extent
T.putStrLn "Schema:"
print schema
runOGR $ sourceLayer (getLayerByName name ds) $$ awaitForever $
\(mFid, Feature{..}) ->
liftIO $ do
T.putStrLn ""
T.putStrLn ""
putStrLn ("FID: " <> maybe ("<unknown>") show mFid)
T.putStrLn "Fields:"
forM_ (HM.toList fFields) $ \(fieldName, fieldValue) -> do
T.putStrLn (" " <> fieldName <> ":")
putStrLn (" " <> show fieldValue)
T.putStrLn ("Geometry:")
BS.putStrLn (maybe "" ((" "<>) . geomToWkt) fGeom)
|
37638940c2b61e724181fd6ed36c5ae791508d8b92135fe164c0b9a658768eee | Flexiana/tiny-rbac | test.clj | (ns tiny-rbac.test
(:require [clojure.test :refer :all]
[tiny-rbac.builder :as b]
[tiny-rbac.core :as c]))
(declare thrown-with-msg?)
(deftest add-resource
(is (= #{:comment}
(-> (b/add-resource {} :comment)
(c/resources)))
"can add one resource")
(is (= #{:comment :post}
(-> (b/add-resource {} [:comment :post])
(c/resources)))
"can add multiple resources")
(is (= :post
(-> (b/add-resource {} [:comment :post])
(c/resource :post)))
"requesting a resource by keyword")
(is (= #{:post}
(-> (b/add-resource {} :post)
(b/add-resource :post)
c/resources))
"Cannot duplicate resources")
(is (= "post"
(-> (b/add-resource {} [:comment "post"])
(c/resource "post")))
"requesting a resource by string")
(is (= nil
(-> (b/add-resource {} [:comment :post])
(c/resource :tag)))
"nil when resource not found")
(is (= nil
(c/resource {} :tag))
"nil when no resources"))
(deftest delete-resource
(is (= #{:comment}
(-> (b/add-resource {} [:comment :post])
(b/delete-resource :post)
(c/resources)))
"can delete one resource")
(is (= #{:comment}
(-> (b/add-resource {} [:comment :post :tag])
(b/delete-resource [:post :tag])
(c/resources)))
"can delete multiple resources")
(is (= #{}
(-> (b/add-resource {} [:post :tag])
(b/delete-resource [:post :tag])
(c/resources)))
"can delete multiple resources by name")
(is (= #{}
(-> (b/add-resource {} [:post :tag])
(b/delete-resource ::b/all)
(c/resources)))
"can delete all resources")
(is (thrown-with-msg? IllegalArgumentException
#"referred resource does not exists"
(b/delete-resource {} :comment))
"Throws an Exception when resource not available")
(is (thrown-with-msg? IllegalArgumentException
#"referred resource does not exists"
(-> (b/add-resource {} [:post :comment])
(b/delete-resource [:comment :tag])))
"Throws an Exception when resource not available"))
(deftest add-action
(is (= #{:read}
(-> (b/add-resource {} :comment)
(b/add-action :comment :read)
(c/actions :comment)))
"Add an action to resource")
(is (= #{:read :write}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :write])
(c/actions :comment)))
"Add multiple actions to resource")
(is (= #{:read :delete :write}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :write])
(b/add-action :comment [:read :write :delete])
(c/actions :comment)))
"Add actions multiple times to resource")
(is (= :read
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :write :delete])
(c/action :comment :read)))
"Get action by resource and action")
(is (= nil
(-> (b/add-resource {} :comment)
(b/add-action :comment [:write :delete])
(c/action :comment :read)))
"Response with nil if action missing from resource")
(is (thrown-with-msg? IllegalArgumentException
#"referred resource does not exists"
(b/add-action {} :comment :read))
"Throws an Exception when resource not available"))
(deftest delete-resource-deletes-actions
(is (= {::c/resources #{}, ::c/actions {}}
(-> (b/add-resource {} :comment)
(b/add-action :comment :read)
(b/delete-resource :comment)))
"deleting resources removes actions too")
(is (= {::c/resources #{}, ::c/actions {}}
(-> (b/add-resource {} [:comment :post])
(b/add-action :comment :read)
(b/add-action :post :read)
(b/delete-resource ::b/all)))
"deleting resources removes actions too"))
(deftest delete-action
(is (= #{:tag}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :tag])
(b/delete-action :comment :read)
(c/actions :comment)))
"deleting action")
(is (= #{:tag}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :write :tag])
(b/delete-action :comment [:read :write])
(c/actions :comment)))
"deleting multiple actions")
(is (thrown-with-msg? IllegalArgumentException
#"referred resource does not exists"
(b/delete-action {} :comment [:read :write]))
"Throwing error when resource not defined")
(is (thrown-with-msg? IllegalArgumentException
#"referred action does not exists"
(-> (b/add-resource {} :comment)
(b/delete-action :comment :read)))
"Throwing error when action not found")
(is (= #{}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :write :tag])
(b/delete-action :comment ::b/all)
(c/actions :comment)))
"deleting all actions"))
(deftest add-role
(is (= {::c/roles {:poster {}}}
(b/add-role {} :poster)))
(is (= {::c/roles {:poster {}
:admin {}}}
(b/add-role {} [:poster :admin]))))
(deftest add-inheritance
(is (= #{:poster}
(-> (b/add-role {} :reader)
(b/add-role :poster)
(b/add-inheritance :reader :poster)
(c/inherit :reader)))
"Add role as inheritance")
(is (= #{:poster :admin}
(-> (b/add-role {} :reader)
(b/add-role :poster)
(b/add-role :admin)
(b/add-inheritance :reader [:poster :admin])
(c/inherit :reader)))
"Add roles as inheritance")
(is (= #{:poster :admin}
(-> (b/add-role {} :reader)
(b/add-role :poster)
(b/add-role :admin)
(b/add-inheritance :reader [:poster :admin])
(b/add-inheritance :reader :admin)
(c/inherit :reader)))
"add-inheritance does not overwrites given inheritances")
(is (thrown-with-msg? IllegalArgumentException
#"referred role does not exists"
(-> (b/add-role {} :reader)
(b/add-inheritance :reader :poster)))
"Add missing role as inheritance")
(is (thrown-with-msg? IllegalArgumentException
#"referred role does not exists"
(-> (b/add-role {} :reader)
(b/add-role :admin)
(b/add-inheritance :reader [:admin :poster])))
"Add missing role as inheritance")
(is (= #{:poster}
(-> (b/add-role {} :poster)
(b/add-inheritance :reader :poster)
(c/inherit :reader)))
"Creating role for only inheritance")
(is (= #{:poster :admin}
(-> (b/add-role {} :poster)
(b/add-role :admin)
(b/add-inheritance :reader [:poster :admin])
(c/inherit :reader)))
"Creating role with multiple inheritances"))
(deftest circular-inheritance
(is (thrown-with-msg? IllegalArgumentException
#"Circular inheritance detected for :reader"
(-> (b/add-role {} :reader)
(b/add-inheritance :reader :reader)))
"direct circular inheritance detected")
(is (thrown-with-msg? IllegalArgumentException
#"Circular inheritance detected for :reader"
(-> (b/add-role {} :reader)
(b/add-role :poster)
(b/add-inheritance :reader [:poster :reader])))
"direct circular inheritance detected")
(is (thrown-with-msg? IllegalArgumentException
#"Circular inheritance detected for :reader"
(-> (b/add-role {} :reader)
(b/add-inheritance :poster :reader)
(b/add-inheritance :reader :poster)))
"indirect circular inheritance detected")
(is (thrown-with-msg? IllegalArgumentException
#"Circular inheritance detected for :1"
(-> (b/add-role {} :1)
(b/add-inheritance :2 :1)
(b/add-inheritance :3 :2)
(b/add-inheritance :4 :3)
(b/add-inheritance :1 :4)))
"indirect circular inheritance detected"))
(deftest add-permission
(is (= #{::b/all}
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read ::b/all)
(c/permissions :poster :post :read)))
"add single permission")
(is (= #{:own :friend}
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read [:own :friend])
(c/permissions :poster :post :read)))
"add multiple permission")
(is (= #{:own :friend}
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read :own)
(b/add-permission :poster :post :read :friend)
(c/permissions :poster :post :read)))
"add permission multiple times")
(is (thrown-with-msg? IllegalArgumentException
#"referred role does not exists"
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-permission :poster :post :read ::b/all)))
"Missing role")
(is (thrown-with-msg? IllegalArgumentException
#"referred action does not exists"
(-> (b/add-resource {} :post)
(b/add-action :post [:write])
(b/add-permission :poster :post :read ::b/all)))
"Missing action")
(is (thrown-with-msg? IllegalArgumentException
#"referred resource does not exists"
(b/add-permission {} :poster :post :read ::b/all))
"Missing resource"))
(deftest delete-permission
(is (= #{:friend}
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read [:own :friend])
(b/delete-permission :poster :post :read :own)
(c/permissions :poster :post :read)))
"delete single permission")
(is (= #{}
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read [:own :friend])
(b/delete-permission :poster :post :read [:own :friend])
(c/permissions :poster :post :read)))
"delete multi permission")
(is (thrown-with-msg? IllegalArgumentException
#"referred permission does not exists"
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read :own)
(b/delete-permission :poster :post :read [:own :friend])))
"delete missing permission"))
(deftest permission-by-inheritance
(let [role-set (-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :reader)
(b/add-role :poster)
(b/add-permission :reader :post :read [:own :friend])
(b/add-permission :poster :post :write :own)
(b/add-inheritance :poster :reader))]
(is (= #{:own :friend}
(c/permissions role-set :poster :post :read)))
(is (= #{:own :friend}
(c/permissions role-set :reader :post :read)))
(is (= #{:own}
(c/permissions role-set :poster :post :write)))
(is (= #{}
(c/permissions role-set :reader :post :write)))))
(deftest get-permission-via-map
(let [role-set (-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :reader)
(b/add-role :poster)
(b/add-permission :reader :post :read [:own :friend])
(b/add-permission :poster :post :write :own)
(b/add-inheritance :poster :reader))]
(is (= {::c/resources #{:post},
::c/actions {:post #{:read :write}},
::c/inherits {:poster #{:reader}}
::c/roles {:reader {:post
{:read
#{:own :friend}}}
:poster {:post
{:write
#{:own}}}}}
role-set))
(is (= #{:own :friend}
(c/permissions role-set {:role :poster
:resource :post
:action :read})))
(is (= #{:own :friend}
(c/permissions role-set {:role :reader
:resource :post
:action :read})))
(is (= #{:own}
(c/permissions role-set {:role :poster
:resource :post
:action :write})))
(is (= #{}
(c/permissions role-set {:role :reader
:resource :post
:action :write})))))
(deftest has-permission
(let [role-set (-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :reader)
(b/add-role :poster)
(b/add-permission :reader :post :read [:own :friend])
(b/add-permission :poster :post :write :own)
(b/add-inheritance :poster :reader))]
(is (true?
(c/has-permission role-set :reader :post :read))
"Have own permission")
(is (false?
(c/has-permission role-set :reader :post :write))
"Doesn't have permission")
(is (true?
(c/has-permission role-set :poster :post :read))
"Has inherited permission")
(is (true?
(c/has-permission role-set :poster :post :write))
"Has own permission")
(is (false?
(c/has-permission role-set :lurker :post :write))
"Doesn't have permission with invalid role")
(is (false?
(c/has-permission role-set :reader :comment :write))
"Doesn't have permission on invalid resource")
(is (false?
(c/has-permission role-set :reader :post :tag))
"Doesn't have permission for invalid action")))
(deftest building-role-set
(let [expected {::c/resources #{:post},
::c/actions {:post #{:read :write}},
::c/inherits {:poster #{:reader}}
::c/roles {:reader {:post {:read #{:own :friend}}}
:poster {:post {:write #{:own}}}}}]
(is (= expected
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :reader)
(b/add-role :poster)
(b/add-permission :reader :post :read [:own :friend])
(b/add-permission :poster :post :write :own)
(b/add-inheritance :poster :reader)))
"Build by code")
(is (= expected
(b/init {::c/resources :post
::c/actions {:post [:read :write]}
::c/inherits {:poster :reader}
::c/roles {:reader {:post {:read [:own :friend]}}
:poster {:post {:write :own}}}}))
"Build from one map")
(is (= expected
(b/init {:resources :post
:actions {:post [:read :write]}
:inherits {:poster :reader}
:roles {:reader {:post {:read [:own :friend]}}
:poster {:post {:write :own}}}}))
"Build from non namespace map")
(is (= expected
(-> (b/init {::c/resources :post})
(b/init {::c/actions {:post [:read :write]}})
(b/init {::c/roles {:reader {:post {:read #{:own :friend}}}}})
(b/init {::c/roles {:poster {:post {:write #{:own}}}}})
(b/init {::c/inherits {:poster :reader}})))
"Build from multiple maps")))
(deftest delete-resource-deletes-permissions
(is (= {::c/resources #{},
::c/actions {}
::c/roles {:member {}}}
(-> (b/add-resource {} :comment)
(b/add-action :comment :read)
(b/add-role :member)
(b/add-permission :member :comment :read ::b/all)
(b/delete-resource :comment)))
"deleting resource removes permissions too"))
(deftest delete-action-deletes-permissions
(is (= {::c/resources #{:comment}
::c/actions {:comment #{:tag}}
::c/roles {:guest {:comment {}}}}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :tag])
(b/add-role :guest)
(b/add-permission :guest :comment :read ::b/all)
(b/delete-action :comment :read)))
"deleting action removes permissions"))
(deftest delete-role
(is (= {::c/resources #{:comment}
::c/actions {:comment #{:read :tag}}
::c/roles {}}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :tag])
(b/add-role :guest)
(b/delete-role :guest)))
"deleting role removes it from role-set")
(is (= {::c/resources #{:comment}
::c/actions {:comment #{:read :tag}}
::c/inherits {:admin #{}}
::c/roles {:admin {}}}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :tag])
(b/add-role :guest)
(b/add-inheritance :admin :guest)
(b/delete-role :guest)))
"deleting role removes it from inherits"))
(deftest delete-inheritance
(is (= {::c/resources #{:comment}
::c/actions {:comment #{:read :tag}}
::c/roles {:guest {} :guest2 {} :admin {}}
::c/inherits {:admin #{:guest2}}}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :tag])
(b/add-role [:guest :guest2])
(b/add-inheritance :admin [:guest :guest2])
(b/delete-inheritance :admin :guest)))
"deleting inheritance removes it")) | null | https://raw.githubusercontent.com/Flexiana/tiny-rbac/66310b29406450e7e727818286a767db202d7639/test/tiny_rbac/test.clj | clojure | (ns tiny-rbac.test
(:require [clojure.test :refer :all]
[tiny-rbac.builder :as b]
[tiny-rbac.core :as c]))
(declare thrown-with-msg?)
(deftest add-resource
(is (= #{:comment}
(-> (b/add-resource {} :comment)
(c/resources)))
"can add one resource")
(is (= #{:comment :post}
(-> (b/add-resource {} [:comment :post])
(c/resources)))
"can add multiple resources")
(is (= :post
(-> (b/add-resource {} [:comment :post])
(c/resource :post)))
"requesting a resource by keyword")
(is (= #{:post}
(-> (b/add-resource {} :post)
(b/add-resource :post)
c/resources))
"Cannot duplicate resources")
(is (= "post"
(-> (b/add-resource {} [:comment "post"])
(c/resource "post")))
"requesting a resource by string")
(is (= nil
(-> (b/add-resource {} [:comment :post])
(c/resource :tag)))
"nil when resource not found")
(is (= nil
(c/resource {} :tag))
"nil when no resources"))
(deftest delete-resource
(is (= #{:comment}
(-> (b/add-resource {} [:comment :post])
(b/delete-resource :post)
(c/resources)))
"can delete one resource")
(is (= #{:comment}
(-> (b/add-resource {} [:comment :post :tag])
(b/delete-resource [:post :tag])
(c/resources)))
"can delete multiple resources")
(is (= #{}
(-> (b/add-resource {} [:post :tag])
(b/delete-resource [:post :tag])
(c/resources)))
"can delete multiple resources by name")
(is (= #{}
(-> (b/add-resource {} [:post :tag])
(b/delete-resource ::b/all)
(c/resources)))
"can delete all resources")
(is (thrown-with-msg? IllegalArgumentException
#"referred resource does not exists"
(b/delete-resource {} :comment))
"Throws an Exception when resource not available")
(is (thrown-with-msg? IllegalArgumentException
#"referred resource does not exists"
(-> (b/add-resource {} [:post :comment])
(b/delete-resource [:comment :tag])))
"Throws an Exception when resource not available"))
(deftest add-action
(is (= #{:read}
(-> (b/add-resource {} :comment)
(b/add-action :comment :read)
(c/actions :comment)))
"Add an action to resource")
(is (= #{:read :write}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :write])
(c/actions :comment)))
"Add multiple actions to resource")
(is (= #{:read :delete :write}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :write])
(b/add-action :comment [:read :write :delete])
(c/actions :comment)))
"Add actions multiple times to resource")
(is (= :read
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :write :delete])
(c/action :comment :read)))
"Get action by resource and action")
(is (= nil
(-> (b/add-resource {} :comment)
(b/add-action :comment [:write :delete])
(c/action :comment :read)))
"Response with nil if action missing from resource")
(is (thrown-with-msg? IllegalArgumentException
#"referred resource does not exists"
(b/add-action {} :comment :read))
"Throws an Exception when resource not available"))
(deftest delete-resource-deletes-actions
(is (= {::c/resources #{}, ::c/actions {}}
(-> (b/add-resource {} :comment)
(b/add-action :comment :read)
(b/delete-resource :comment)))
"deleting resources removes actions too")
(is (= {::c/resources #{}, ::c/actions {}}
(-> (b/add-resource {} [:comment :post])
(b/add-action :comment :read)
(b/add-action :post :read)
(b/delete-resource ::b/all)))
"deleting resources removes actions too"))
(deftest delete-action
(is (= #{:tag}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :tag])
(b/delete-action :comment :read)
(c/actions :comment)))
"deleting action")
(is (= #{:tag}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :write :tag])
(b/delete-action :comment [:read :write])
(c/actions :comment)))
"deleting multiple actions")
(is (thrown-with-msg? IllegalArgumentException
#"referred resource does not exists"
(b/delete-action {} :comment [:read :write]))
"Throwing error when resource not defined")
(is (thrown-with-msg? IllegalArgumentException
#"referred action does not exists"
(-> (b/add-resource {} :comment)
(b/delete-action :comment :read)))
"Throwing error when action not found")
(is (= #{}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :write :tag])
(b/delete-action :comment ::b/all)
(c/actions :comment)))
"deleting all actions"))
(deftest add-role
(is (= {::c/roles {:poster {}}}
(b/add-role {} :poster)))
(is (= {::c/roles {:poster {}
:admin {}}}
(b/add-role {} [:poster :admin]))))
(deftest add-inheritance
(is (= #{:poster}
(-> (b/add-role {} :reader)
(b/add-role :poster)
(b/add-inheritance :reader :poster)
(c/inherit :reader)))
"Add role as inheritance")
(is (= #{:poster :admin}
(-> (b/add-role {} :reader)
(b/add-role :poster)
(b/add-role :admin)
(b/add-inheritance :reader [:poster :admin])
(c/inherit :reader)))
"Add roles as inheritance")
(is (= #{:poster :admin}
(-> (b/add-role {} :reader)
(b/add-role :poster)
(b/add-role :admin)
(b/add-inheritance :reader [:poster :admin])
(b/add-inheritance :reader :admin)
(c/inherit :reader)))
"add-inheritance does not overwrites given inheritances")
(is (thrown-with-msg? IllegalArgumentException
#"referred role does not exists"
(-> (b/add-role {} :reader)
(b/add-inheritance :reader :poster)))
"Add missing role as inheritance")
(is (thrown-with-msg? IllegalArgumentException
#"referred role does not exists"
(-> (b/add-role {} :reader)
(b/add-role :admin)
(b/add-inheritance :reader [:admin :poster])))
"Add missing role as inheritance")
(is (= #{:poster}
(-> (b/add-role {} :poster)
(b/add-inheritance :reader :poster)
(c/inherit :reader)))
"Creating role for only inheritance")
(is (= #{:poster :admin}
(-> (b/add-role {} :poster)
(b/add-role :admin)
(b/add-inheritance :reader [:poster :admin])
(c/inherit :reader)))
"Creating role with multiple inheritances"))
(deftest circular-inheritance
(is (thrown-with-msg? IllegalArgumentException
#"Circular inheritance detected for :reader"
(-> (b/add-role {} :reader)
(b/add-inheritance :reader :reader)))
"direct circular inheritance detected")
(is (thrown-with-msg? IllegalArgumentException
#"Circular inheritance detected for :reader"
(-> (b/add-role {} :reader)
(b/add-role :poster)
(b/add-inheritance :reader [:poster :reader])))
"direct circular inheritance detected")
(is (thrown-with-msg? IllegalArgumentException
#"Circular inheritance detected for :reader"
(-> (b/add-role {} :reader)
(b/add-inheritance :poster :reader)
(b/add-inheritance :reader :poster)))
"indirect circular inheritance detected")
(is (thrown-with-msg? IllegalArgumentException
#"Circular inheritance detected for :1"
(-> (b/add-role {} :1)
(b/add-inheritance :2 :1)
(b/add-inheritance :3 :2)
(b/add-inheritance :4 :3)
(b/add-inheritance :1 :4)))
"indirect circular inheritance detected"))
(deftest add-permission
(is (= #{::b/all}
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read ::b/all)
(c/permissions :poster :post :read)))
"add single permission")
(is (= #{:own :friend}
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read [:own :friend])
(c/permissions :poster :post :read)))
"add multiple permission")
(is (= #{:own :friend}
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read :own)
(b/add-permission :poster :post :read :friend)
(c/permissions :poster :post :read)))
"add permission multiple times")
(is (thrown-with-msg? IllegalArgumentException
#"referred role does not exists"
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-permission :poster :post :read ::b/all)))
"Missing role")
(is (thrown-with-msg? IllegalArgumentException
#"referred action does not exists"
(-> (b/add-resource {} :post)
(b/add-action :post [:write])
(b/add-permission :poster :post :read ::b/all)))
"Missing action")
(is (thrown-with-msg? IllegalArgumentException
#"referred resource does not exists"
(b/add-permission {} :poster :post :read ::b/all))
"Missing resource"))
(deftest delete-permission
(is (= #{:friend}
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read [:own :friend])
(b/delete-permission :poster :post :read :own)
(c/permissions :poster :post :read)))
"delete single permission")
(is (= #{}
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read [:own :friend])
(b/delete-permission :poster :post :read [:own :friend])
(c/permissions :poster :post :read)))
"delete multi permission")
(is (thrown-with-msg? IllegalArgumentException
#"referred permission does not exists"
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :poster)
(b/add-permission :poster :post :read :own)
(b/delete-permission :poster :post :read [:own :friend])))
"delete missing permission"))
(deftest permission-by-inheritance
(let [role-set (-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :reader)
(b/add-role :poster)
(b/add-permission :reader :post :read [:own :friend])
(b/add-permission :poster :post :write :own)
(b/add-inheritance :poster :reader))]
(is (= #{:own :friend}
(c/permissions role-set :poster :post :read)))
(is (= #{:own :friend}
(c/permissions role-set :reader :post :read)))
(is (= #{:own}
(c/permissions role-set :poster :post :write)))
(is (= #{}
(c/permissions role-set :reader :post :write)))))
(deftest get-permission-via-map
(let [role-set (-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :reader)
(b/add-role :poster)
(b/add-permission :reader :post :read [:own :friend])
(b/add-permission :poster :post :write :own)
(b/add-inheritance :poster :reader))]
(is (= {::c/resources #{:post},
::c/actions {:post #{:read :write}},
::c/inherits {:poster #{:reader}}
::c/roles {:reader {:post
{:read
#{:own :friend}}}
:poster {:post
{:write
#{:own}}}}}
role-set))
(is (= #{:own :friend}
(c/permissions role-set {:role :poster
:resource :post
:action :read})))
(is (= #{:own :friend}
(c/permissions role-set {:role :reader
:resource :post
:action :read})))
(is (= #{:own}
(c/permissions role-set {:role :poster
:resource :post
:action :write})))
(is (= #{}
(c/permissions role-set {:role :reader
:resource :post
:action :write})))))
(deftest has-permission
(let [role-set (-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :reader)
(b/add-role :poster)
(b/add-permission :reader :post :read [:own :friend])
(b/add-permission :poster :post :write :own)
(b/add-inheritance :poster :reader))]
(is (true?
(c/has-permission role-set :reader :post :read))
"Have own permission")
(is (false?
(c/has-permission role-set :reader :post :write))
"Doesn't have permission")
(is (true?
(c/has-permission role-set :poster :post :read))
"Has inherited permission")
(is (true?
(c/has-permission role-set :poster :post :write))
"Has own permission")
(is (false?
(c/has-permission role-set :lurker :post :write))
"Doesn't have permission with invalid role")
(is (false?
(c/has-permission role-set :reader :comment :write))
"Doesn't have permission on invalid resource")
(is (false?
(c/has-permission role-set :reader :post :tag))
"Doesn't have permission for invalid action")))
(deftest building-role-set
(let [expected {::c/resources #{:post},
::c/actions {:post #{:read :write}},
::c/inherits {:poster #{:reader}}
::c/roles {:reader {:post {:read #{:own :friend}}}
:poster {:post {:write #{:own}}}}}]
(is (= expected
(-> (b/add-resource {} :post)
(b/add-action :post [:read :write])
(b/add-role :reader)
(b/add-role :poster)
(b/add-permission :reader :post :read [:own :friend])
(b/add-permission :poster :post :write :own)
(b/add-inheritance :poster :reader)))
"Build by code")
(is (= expected
(b/init {::c/resources :post
::c/actions {:post [:read :write]}
::c/inherits {:poster :reader}
::c/roles {:reader {:post {:read [:own :friend]}}
:poster {:post {:write :own}}}}))
"Build from one map")
(is (= expected
(b/init {:resources :post
:actions {:post [:read :write]}
:inherits {:poster :reader}
:roles {:reader {:post {:read [:own :friend]}}
:poster {:post {:write :own}}}}))
"Build from non namespace map")
(is (= expected
(-> (b/init {::c/resources :post})
(b/init {::c/actions {:post [:read :write]}})
(b/init {::c/roles {:reader {:post {:read #{:own :friend}}}}})
(b/init {::c/roles {:poster {:post {:write #{:own}}}}})
(b/init {::c/inherits {:poster :reader}})))
"Build from multiple maps")))
(deftest delete-resource-deletes-permissions
(is (= {::c/resources #{},
::c/actions {}
::c/roles {:member {}}}
(-> (b/add-resource {} :comment)
(b/add-action :comment :read)
(b/add-role :member)
(b/add-permission :member :comment :read ::b/all)
(b/delete-resource :comment)))
"deleting resource removes permissions too"))
(deftest delete-action-deletes-permissions
(is (= {::c/resources #{:comment}
::c/actions {:comment #{:tag}}
::c/roles {:guest {:comment {}}}}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :tag])
(b/add-role :guest)
(b/add-permission :guest :comment :read ::b/all)
(b/delete-action :comment :read)))
"deleting action removes permissions"))
(deftest delete-role
(is (= {::c/resources #{:comment}
::c/actions {:comment #{:read :tag}}
::c/roles {}}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :tag])
(b/add-role :guest)
(b/delete-role :guest)))
"deleting role removes it from role-set")
(is (= {::c/resources #{:comment}
::c/actions {:comment #{:read :tag}}
::c/inherits {:admin #{}}
::c/roles {:admin {}}}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :tag])
(b/add-role :guest)
(b/add-inheritance :admin :guest)
(b/delete-role :guest)))
"deleting role removes it from inherits"))
(deftest delete-inheritance
(is (= {::c/resources #{:comment}
::c/actions {:comment #{:read :tag}}
::c/roles {:guest {} :guest2 {} :admin {}}
::c/inherits {:admin #{:guest2}}}
(-> (b/add-resource {} :comment)
(b/add-action :comment [:read :tag])
(b/add-role [:guest :guest2])
(b/add-inheritance :admin [:guest :guest2])
(b/delete-inheritance :admin :guest)))
"deleting inheritance removes it")) | |
46f3d00b7ab49ce8187d4e82f8aee2006c5f36ff424c5f64ea9561fb1a128b5f | cbaggers/varjo | variables-from-spec.lisp | (in-package :vari.glsl)
(eval-when (:compile-toplevel :load-toplevel :execute)
(defvar *definitions-missing-from-glsl-spec*
'((:lisp-name "GL-IN" :name "gl_in" :type "{gl_PerVertex" :place-p t
:versions (150 330 400 410 420 430 440 450 460)
:stage :geometry)
)))
(defmacro populate-vars ()
(let ((vars (mapcar (lambda (_)
(destructuring-bind
(&key lisp-name name type place-p versions
(stage t) &allow-other-keys) _
(declare (ignore versions))
(assert lisp-name)
(let* ((lisp-name (intern lisp-name :vari.glsl))
(lisp-type (parse-gl-type-name type)))
`(,stage ,lisp-name ,name ,lisp-type ,place-p))))
(append *definitions-missing-from-glsl-spec*
glsl-spec:*variables*))))
`(progn
(setf varjo.internals::*glsl-variables*
',(mapcar (lambda (stage-name stage-type-name)
(cons stage-type-name
(mapcar #'rest (remove-if-not
(lambda (_) (eq stage-name _))
vars :key #'first))))
(cons t *stage-names*)
(cons t *stage-type-names*)))
(export ',(mapcar #'second vars) :vari.glsl))))
(populate-vars)
| null | https://raw.githubusercontent.com/cbaggers/varjo/9e77f30220053155d2ef8870ceba157f75e538d4/src/vari.glsl/variables-from-spec.lisp | lisp | (in-package :vari.glsl)
(eval-when (:compile-toplevel :load-toplevel :execute)
(defvar *definitions-missing-from-glsl-spec*
'((:lisp-name "GL-IN" :name "gl_in" :type "{gl_PerVertex" :place-p t
:versions (150 330 400 410 420 430 440 450 460)
:stage :geometry)
)))
(defmacro populate-vars ()
(let ((vars (mapcar (lambda (_)
(destructuring-bind
(&key lisp-name name type place-p versions
(stage t) &allow-other-keys) _
(declare (ignore versions))
(assert lisp-name)
(let* ((lisp-name (intern lisp-name :vari.glsl))
(lisp-type (parse-gl-type-name type)))
`(,stage ,lisp-name ,name ,lisp-type ,place-p))))
(append *definitions-missing-from-glsl-spec*
glsl-spec:*variables*))))
`(progn
(setf varjo.internals::*glsl-variables*
',(mapcar (lambda (stage-name stage-type-name)
(cons stage-type-name
(mapcar #'rest (remove-if-not
(lambda (_) (eq stage-name _))
vars :key #'first))))
(cons t *stage-names*)
(cons t *stage-type-names*)))
(export ',(mapcar #'second vars) :vari.glsl))))
(populate-vars)
| |
3394d9c524ad4e18a7f8f69cfed8c5388301f00c3f3076824f8d97a820717381 | karlhof26/gimp-scheme | blackboard-effect_02.scm | * Copyright ( c ) 2007 for www.gimp.org.es
; * All rights reserved.
; *
; * Redistribution and use in source and binary forms, with or without
; * modification, are permitted provided that the following conditions
; * are met:
* 1 . Redistributions of source code must retain the above copyright
; * notice, this list of conditions and the following disclaimer.
* 2 . Redistributions in binary form must reproduce the above copyright
; * notice, this list of conditions and the following disclaimer in the
; * documentation and/or other materials provided with the distribution.
* 3 . Neither the name of copyright holders nor the names of its
; * contributors may be used to endorse or promote products derived
; * from this software without specific prior written permission.
; *
; * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* ` ` AS IS '' AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED
; * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
; * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
; * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
; * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , IN
; * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
; * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
; * POSSIBILITY OF SUCH DAMAGE.
(define (script-fu-simple-tizass img drawable copy aplanar)
(let*
(
;(image)
(testnum 0)
)
(define image (if (= copy TRUE)
(car (gimp-image-duplicate img))
img
)
)
; (if (= copy TRUE)
( set ! image ( car ( gimp - image - duplicate ) ) )
( set ! image )
; )
; (gimp-image-undo-disable image)
. / Start a undo group .
La imagen es aplanada / The image is flatened
a la imagen . / Create new layer and add to the image
(define shadow-layer (car (gimp-layer-copy drawable 1)))
(gimp-image-insert-layer image shadow-layer 0 -1)
(gimp-layer-set-name shadow-layer "Sat") ; Nombre de la capa / Layer's name
(gimp-layer-set-mode shadow-layer 12) ; Modo saturación / Saturation mode
; Create new layer and add to the image
(define shadow-layer2 (car (gimp-layer-copy drawable 1)))
(gimp-image-add-layer image shadow-layer2 -1)
(gimp-layer-set-name shadow-layer2 "Hue / Tono")
(gimp-layer-set-mode shadow-layer2 11)
a la capa base / to base layer
(plug-in-sobel 1 image drawable 1 1 0)
(gimp-drawable-equalize drawable 0)
(if (= aplanar TRUE)
(set! drawable (car (gimp-image-flatten image)))
()
)
(gimp-image-set-active-layer image drawable)
(if (= copy TRUE)
(gimp-display-new image)
)
; (gimp-image-undo-enable image)
(gimp-image-undo-group-end image)
(gimp-displays-flush)
)
)
(script-fu-register "script-fu-simple-tizass"
"Blackboard effect"
"It looks as if it had been drawn on a blackboard with colored chalk. This effect works poorly with excessively compressed JPEG photos. \n file: blackboard-effect_02.scm"
"Is "
"Pucelo (based on a Simon Budig sample script) for www.gimp.org.es"
"2007/4/21"
"RGB*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-TOGGLE "Work on copy" FALSE
SF-TOGGLE "Aplanar la imagen al final (Flatten image at finish)" TRUE
)
(script-fu-menu-register "script-fu-simple-tizass"
"<Toolbox>/Script-Fu/Artistic/")
;end of script | null | https://raw.githubusercontent.com/karlhof26/gimp-scheme/1199124f12bc4d0b5eb90ea4cffebdca28791581/blackboard-effect_02.scm | scheme | * All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* notice, this list of conditions and the following disclaimer.
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
(image)
(if (= copy TRUE)
)
(gimp-image-undo-disable image)
Nombre de la capa / Layer's name
Modo saturación / Saturation mode
Create new layer and add to the image
(gimp-image-undo-enable image)
end of script | * Copyright ( c ) 2007 for www.gimp.org.es
* 1 . Redistributions of source code must retain the above copyright
* 2 . Redistributions in binary form must reproduce the above copyright
* 3 . Neither the name of copyright holders nor the names of its
* ` ` AS IS '' AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED
* BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
* INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , IN
(define (script-fu-simple-tizass img drawable copy aplanar)
(let*
(
(testnum 0)
)
(define image (if (= copy TRUE)
(car (gimp-image-duplicate img))
img
)
)
( set ! image ( car ( gimp - image - duplicate ) ) )
( set ! image )
. / Start a undo group .
La imagen es aplanada / The image is flatened
a la imagen . / Create new layer and add to the image
(define shadow-layer (car (gimp-layer-copy drawable 1)))
(gimp-image-insert-layer image shadow-layer 0 -1)
(define shadow-layer2 (car (gimp-layer-copy drawable 1)))
(gimp-image-add-layer image shadow-layer2 -1)
(gimp-layer-set-name shadow-layer2 "Hue / Tono")
(gimp-layer-set-mode shadow-layer2 11)
a la capa base / to base layer
(plug-in-sobel 1 image drawable 1 1 0)
(gimp-drawable-equalize drawable 0)
(if (= aplanar TRUE)
(set! drawable (car (gimp-image-flatten image)))
()
)
(gimp-image-set-active-layer image drawable)
(if (= copy TRUE)
(gimp-display-new image)
)
(gimp-image-undo-group-end image)
(gimp-displays-flush)
)
)
(script-fu-register "script-fu-simple-tizass"
"Blackboard effect"
"It looks as if it had been drawn on a blackboard with colored chalk. This effect works poorly with excessively compressed JPEG photos. \n file: blackboard-effect_02.scm"
"Is "
"Pucelo (based on a Simon Budig sample script) for www.gimp.org.es"
"2007/4/21"
"RGB*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-TOGGLE "Work on copy" FALSE
SF-TOGGLE "Aplanar la imagen al final (Flatten image at finish)" TRUE
)
(script-fu-menu-register "script-fu-simple-tizass"
"<Toolbox>/Script-Fu/Artistic/")
|
14b0c4fbef8a94a44cc7143dba96562cdf65a954a0b9d2252e8c062f364e64a4 | digitallyinduced/ihp | Controller.hs | module IHP.IDE.Data.Controller where
import IHP.ControllerPrelude
import IHP.IDE.ToolServer.Types
import IHP.IDE.Data.View.ShowDatabase
import IHP.IDE.Data.View.ShowTableRows
import IHP.IDE.Data.View.ShowQuery
import IHP.IDE.Data.View.NewRow
import IHP.IDE.Data.View.EditRow
import IHP.IDE.Data.View.EditValue
import IHP.IDE.Data.View.ShowForeignKeyHoverCard
import qualified Database.PostgreSQL.Simple as PG
import qualified Database.PostgreSQL.Simple.FromField as PG
import qualified Database.PostgreSQL.Simple.FromRow as PG
import qualified Database.PostgreSQL.Simple.ToField as PG
import qualified Database.PostgreSQL.Simple.Types as PG
import qualified Data.Text as T
import qualified Data.ByteString.Builder
import Data.Functor ((<&>))
instance Controller DataController where
action ShowDatabaseAction = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
PG.close connection
case headMay tableNames of
Just tableName -> jumpToAction ShowTableRowsAction { tableName }
Nothing -> render ShowDatabaseView { .. }
action ShowTableRowsAction { tableName } = do
let page :: Int = paramOrDefault @Int 1 "page"
let pageSize :: Int = paramOrDefault @Int 20 "rows"
connection <- connectToAppDb
tableNames <- fetchTableNames connection
primaryKeyFields <- tablePrimaryKeyFields connection tableName
rows :: [[DynamicField]] <- fetchRowsPage connection tableName page pageSize
tableCols <- fetchTableCols connection tableName
totalRows <- tableLength connection tableName
PG.close connection
render ShowTableRowsView { .. }
action NewQueryAction = do
let queryText = ""
let queryResult = Nothing
render ShowQueryView { .. }
action QueryAction = do
connection <- connectToAppDb
let queryText = param @Text "query"
when (isEmpty queryText) do
redirectTo NewQueryAction
let query = fromString $ cs queryText
queryResult :: Maybe (Either PG.SqlError SqlConsoleResult) <- Just <$> if isQuery queryText then
(Right . SelectQueryResult <$> PG.query_ connection query) `catch` (pure . Left)
else
(Right . InsertOrUpdateResult <$> PG.execute_ connection query) `catch` (pure . Left)
PG.close connection
render ShowQueryView { .. }
action DeleteEntryAction { primaryKey, tableName } = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
primaryKeyFields <- tablePrimaryKeyFields connection tableName
let primaryKeyValues = T.splitOn "---" primaryKey
let query = "DELETE FROM " <> tableName <> " WHERE " <> intercalate " AND " ((<> " = ?") <$> primaryKeyFields)
PG.execute connection (PG.Query . cs $! query) primaryKeyValues
PG.close connection
redirectTo ShowTableRowsAction { .. }
action NewRowAction { tableName } = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
rows :: [[DynamicField]] <- fetchRows connection tableName
tableCols <- fetchTableCols connection tableName
PG.close connection
render NewRowView { .. }
action CreateRowAction = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
let tableName = param "tableName"
tableCols <- fetchTableCols connection tableName
let values :: [PG.Action] = map (\col -> parseValues (param @Bool (cs (get #columnName col) <> "_")) (param @Bool (cs (get #columnName col) <> "-isBoolean")) (param @Text (cs (get #columnName col)))) tableCols
let query = "INSERT INTO " <> tableName <> " VALUES (" <> intercalate "," (map (const "?") values) <> ")"
PG.execute connection (PG.Query . cs $! query) values
PG.close connection
redirectTo ShowTableRowsAction { .. }
action EditRowAction { tableName, targetPrimaryKey } = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
primaryKeyFields <- tablePrimaryKeyFields connection tableName
rows :: [[DynamicField]] <- fetchRows connection tableName
tableCols <- fetchTableCols connection tableName
let targetPrimaryKeyValues = T.splitOn "---" targetPrimaryKey
values <- fetchRow connection (cs tableName) targetPrimaryKeyValues
let (Just rowValues) = head values
PG.close connection
render EditRowView { .. }
action UpdateRowAction = do
let tableName = param "tableName"
connection <- connectToAppDb
tableNames <- fetchTableNames connection
tableCols <- fetchTableCols connection tableName
primaryKeyFields <- tablePrimaryKeyFields connection tableName
let values :: [PG.Action] = map (\col -> parseValues (param @Bool (cs (get #columnName col) <> "_")) (param @Bool (cs (get #columnName col) <> "-isBoolean")) (param @Text (cs (get #columnName col)))) tableCols
let columns :: [Text] = map (\col -> cs (get #columnName col)) tableCols
let primaryKeyValues = map (\pkey -> "'" <> (param @Text (cs pkey <> "-pk")) <> "'") primaryKeyFields
let query = "UPDATE " <> tableName <> " SET " <> intercalate ", " (updateValues (zip columns (map (const "?") values))) <> " WHERE " <> intercalate " AND " (updateValues (zip primaryKeyFields primaryKeyValues))
PG.execute connection (PG.Query . cs $! query) values
PG.close connection
redirectTo ShowTableRowsAction { .. }
action EditRowValueAction { tableName, targetName, id } = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
rows :: [[DynamicField]] <- fetchRows connection tableName
let targetId = cs id
PG.close connection
render EditValueView { .. }
action ToggleBooleanFieldAction { tableName, targetName, targetPrimaryKey } = do
let id :: String = cs (param @Text "id")
let tableName = param "tableName"
connection <- connectToAppDb
tableNames <- fetchTableNames connection
tableCols <- fetchTableCols connection tableName
primaryKeyFields <- tablePrimaryKeyFields connection tableName
let targetPrimaryKeyValues = PG.Escape . cs <$> T.splitOn "---" targetPrimaryKey
let query = PG.Query . cs $! "UPDATE ? SET ? = NOT ? WHERE " <> intercalate " AND " ((<> " = ?") <$> primaryKeyFields)
let params = [PG.toField $ PG.Identifier tableName, PG.toField $ PG.Identifier targetName, PG.toField $ PG.Identifier targetName] <> targetPrimaryKeyValues
PG.execute connection query params
PG.close connection
redirectTo ShowTableRowsAction { .. }
action UpdateValueAction = do
let id :: String = cs (param @Text "id")
let tableName = param "tableName"
connection <- connectToAppDb
let targetCol = param "targetName"
let targetValue = param "targetValue"
let query = "UPDATE " <> tableName <> " SET " <> targetCol <> " = '" <> targetValue <> "' WHERE id = '" <> cs id <> "'"
PG.execute_ connection (PG.Query . cs $! query)
PG.close connection
redirectTo ShowTableRowsAction { .. }
action DeleteTableRowsAction { tableName } = do
connection <- connectToAppDb
let query = "TRUNCATE TABLE " <> tableName
PG.execute_ connection (PG.Query . cs $! query)
PG.close connection
redirectTo ShowTableRowsAction { .. }
action AutocompleteForeignKeyColumnAction { tableName, columnName, term } = do
connection <- connectToAppDb
rows :: Maybe [[DynamicField]] <- do
foreignKeyInfo <- fetchForeignKeyInfo connection tableName columnName
case foreignKeyInfo of
Just (foreignTable, foreignColumn) -> Just <$> fetchRowsPage connection foreignTable 1 50
Nothing -> pure Nothing
PG.close connection
case rows of
Just rows -> renderJson rows
Nothing -> renderNotFound
action ShowForeignKeyHoverCardAction { tableName, id, columnName } = do
connection <- connectToAppDb
hovercardData <- do
[Only (foreignId :: UUID)] <- PG.query connection "SELECT ? FROM ? WHERE id = ?" (PG.Identifier columnName, PG.Identifier tableName, id)
foreignKeyInfo <- fetchForeignKeyInfo connection tableName columnName
case foreignKeyInfo of
Just (foreignTable, foreignColumn) -> do
[record] <- PG.query connection "SELECT * FROM ? WHERE ? = ? LIMIT 1" (PG.Identifier foreignTable, PG.Identifier foreignColumn, foreignId)
pure $ Just (record, foreignTable)
Nothing -> pure Nothing
PG.close connection
case hovercardData of
Just (record, foreignTableName) -> render ShowForeignKeyHoverCardView { record, foreignTableName }
Nothing -> renderNotFound
connectToAppDb :: (?context :: ControllerContext) => IO PG.Connection
connectToAppDb = PG.connectPostgreSQL ?context.frameworkConfig.databaseUrl
fetchTableNames :: PG.Connection -> IO [Text]
fetchTableNames connection = do
values :: [[Text]] <- PG.query_ connection "SELECT tablename FROM pg_catalog.pg_tables where schemaname = 'public'"
pure (join values)
fetchTableCols :: PG.Connection -> Text -> IO [ColumnDefinition]
fetchTableCols connection tableName = do
PG.query connection "SELECT column_name,data_type,column_default,CASE WHEN is_nullable='YES' THEN true ELSE false END FROM information_schema.columns where table_name = ? ORDER BY ordinal_position" (PG.Only tableName)
fetchRow :: PG.Connection -> Text -> [Text] -> IO [[DynamicField]]
fetchRow connection tableName primaryKeyValues = do
pkFields <- tablePrimaryKeyFields connection tableName
let query = "SELECT * FROM " <> tableName <> " WHERE " <> intercalate " AND " ((<> " = ?") <$> pkFields)
PG.query connection (PG.Query . cs $! query) primaryKeyValues
instance PG.FromField DynamicField where
fromField field fieldValue = pure DynamicField { .. }
where
fieldName = fromMaybe "" (PG.name field)
instance PG.FromRow ColumnDefinition where
fromRow = ColumnDefinition <$> PG.field <*> PG.field <*> PG.field <*> PG.field
tablePrimaryKeyFields :: PG.Connection -> Text -> IO [Text]
tablePrimaryKeyFields connection tableName = do
fields <- PG.query connection "SELECT a.attname FROM pg_index i JOIN pg_attribute a ON a.attrelid = i.indrelid AND a.attnum = ANY(i.indkey) WHERE i.indrelid = ?::regclass AND i.indisprimary" (PG.Only tableName) :: IO [PG.Only Text]
pure $ PG.fromOnly <$> fields
fetchRows :: FromRow r => PG.Connection -> Text -> IO [r]
fetchRows connection tableName = do
pkFields <- tablePrimaryKeyFields connection tableName
let query = "SELECT * FROM "
<> tableName
<> (if null pkFields
then ""
else " ORDER BY " <> intercalate ", " pkFields
)
PG.query_ connection (PG.Query . cs $! query)
fetchRowsPage :: FromRow r => PG.Connection -> Text -> Int -> Int -> IO [r]
fetchRowsPage connection tableName page rows = do
pkFields <- tablePrimaryKeyFields connection tableName
let slice = " OFFSET " <> show (page * rows - rows) <> " ROWS FETCH FIRST " <> show rows <> " ROWS ONLY"
let query = "SELECT * FROM "
<> tableName
<> (if null pkFields
then ""
else " ORDER BY " <> intercalate ", " pkFields
)
<> slice
PG.query_ connection (PG.Query . cs $! query)
tableLength :: PG.Connection -> Text -> IO Int
tableLength connection tableName = do
[Only count] <- PG.query connection "SELECT COUNT(*) FROM ?" [PG.Identifier tableName]
pure count
parseValues sqlMode isBoolField input
parseValues :: Bool -> Bool -> Text -> PG.Action
parseValues _ True "on" = PG.toField True
parseValues _ True "off" = PG.toField False
parseValues False _ text = PG.toField text
parseValues _ _ text = PG.Plain (Data.ByteString.Builder.byteString (cs text))
updateValues list = map (\elem -> fst elem <> " = " <> snd elem) list
isQuery sql = T.isInfixOf "SELECT" u
where u = T.toUpper sql
fetchForeignKeyInfo :: PG.Connection -> Text -> Text -> IO (Maybe (Text, Text))
fetchForeignKeyInfo connection tableName columnName = do
let sql = [plain|
SELECT
ccu.table_name AS foreign_table_name,
ccu.column_name AS foreign_column_name
FROM
information_schema.table_constraints AS tc
JOIN information_schema.key_column_usage AS kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
JOIN information_schema.constraint_column_usage AS ccu
ON ccu.constraint_name = tc.constraint_name
AND ccu.table_schema = tc.table_schema
WHERE
tc.constraint_type = 'FOREIGN KEY'
AND tc.table_name = ?
AND kcu.column_name = ?
|]
let args = (tableName, columnName)
result <- PG.query connection (PG.Query $ cs sql) args
case result of
[(foreignTableName, foreignColumnName)] -> pure $ Just (foreignTableName, foreignColumnName)
otherwise -> pure $ Nothing
instance {-# OVERLAPS #-} ToJSON [DynamicField] where
toJSON fields = object (map (\DynamicField { fieldName, fieldValue } -> (cs fieldName) .= (fieldValueToJSON fieldValue)) fields)
where
fieldValueToJSON (Just bs) = toJSON ((cs bs) :: Text)
fieldValueToJSON Nothing = toJSON Null
toEncoding fields = pairs $ foldl' (<>) mempty (encodedFields)
where
encodedFields = (map (\DynamicField { fieldName, fieldValue } -> (cs fieldName) .= (fieldValueToJSON fieldValue)) fields)
fieldValueToJSON (Just bs) = toJSON ((cs bs) :: Text)
fieldValueToJSON Nothing = toJSON Null | null | https://raw.githubusercontent.com/digitallyinduced/ihp/ca4691970a130a1589506c66fe8257ac8670e905/IHP/IDE/Data/Controller.hs | haskell | # OVERLAPS # | module IHP.IDE.Data.Controller where
import IHP.ControllerPrelude
import IHP.IDE.ToolServer.Types
import IHP.IDE.Data.View.ShowDatabase
import IHP.IDE.Data.View.ShowTableRows
import IHP.IDE.Data.View.ShowQuery
import IHP.IDE.Data.View.NewRow
import IHP.IDE.Data.View.EditRow
import IHP.IDE.Data.View.EditValue
import IHP.IDE.Data.View.ShowForeignKeyHoverCard
import qualified Database.PostgreSQL.Simple as PG
import qualified Database.PostgreSQL.Simple.FromField as PG
import qualified Database.PostgreSQL.Simple.FromRow as PG
import qualified Database.PostgreSQL.Simple.ToField as PG
import qualified Database.PostgreSQL.Simple.Types as PG
import qualified Data.Text as T
import qualified Data.ByteString.Builder
import Data.Functor ((<&>))
instance Controller DataController where
action ShowDatabaseAction = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
PG.close connection
case headMay tableNames of
Just tableName -> jumpToAction ShowTableRowsAction { tableName }
Nothing -> render ShowDatabaseView { .. }
action ShowTableRowsAction { tableName } = do
let page :: Int = paramOrDefault @Int 1 "page"
let pageSize :: Int = paramOrDefault @Int 20 "rows"
connection <- connectToAppDb
tableNames <- fetchTableNames connection
primaryKeyFields <- tablePrimaryKeyFields connection tableName
rows :: [[DynamicField]] <- fetchRowsPage connection tableName page pageSize
tableCols <- fetchTableCols connection tableName
totalRows <- tableLength connection tableName
PG.close connection
render ShowTableRowsView { .. }
action NewQueryAction = do
let queryText = ""
let queryResult = Nothing
render ShowQueryView { .. }
action QueryAction = do
connection <- connectToAppDb
let queryText = param @Text "query"
when (isEmpty queryText) do
redirectTo NewQueryAction
let query = fromString $ cs queryText
queryResult :: Maybe (Either PG.SqlError SqlConsoleResult) <- Just <$> if isQuery queryText then
(Right . SelectQueryResult <$> PG.query_ connection query) `catch` (pure . Left)
else
(Right . InsertOrUpdateResult <$> PG.execute_ connection query) `catch` (pure . Left)
PG.close connection
render ShowQueryView { .. }
action DeleteEntryAction { primaryKey, tableName } = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
primaryKeyFields <- tablePrimaryKeyFields connection tableName
let primaryKeyValues = T.splitOn "---" primaryKey
let query = "DELETE FROM " <> tableName <> " WHERE " <> intercalate " AND " ((<> " = ?") <$> primaryKeyFields)
PG.execute connection (PG.Query . cs $! query) primaryKeyValues
PG.close connection
redirectTo ShowTableRowsAction { .. }
action NewRowAction { tableName } = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
rows :: [[DynamicField]] <- fetchRows connection tableName
tableCols <- fetchTableCols connection tableName
PG.close connection
render NewRowView { .. }
action CreateRowAction = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
let tableName = param "tableName"
tableCols <- fetchTableCols connection tableName
let values :: [PG.Action] = map (\col -> parseValues (param @Bool (cs (get #columnName col) <> "_")) (param @Bool (cs (get #columnName col) <> "-isBoolean")) (param @Text (cs (get #columnName col)))) tableCols
let query = "INSERT INTO " <> tableName <> " VALUES (" <> intercalate "," (map (const "?") values) <> ")"
PG.execute connection (PG.Query . cs $! query) values
PG.close connection
redirectTo ShowTableRowsAction { .. }
action EditRowAction { tableName, targetPrimaryKey } = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
primaryKeyFields <- tablePrimaryKeyFields connection tableName
rows :: [[DynamicField]] <- fetchRows connection tableName
tableCols <- fetchTableCols connection tableName
let targetPrimaryKeyValues = T.splitOn "---" targetPrimaryKey
values <- fetchRow connection (cs tableName) targetPrimaryKeyValues
let (Just rowValues) = head values
PG.close connection
render EditRowView { .. }
action UpdateRowAction = do
let tableName = param "tableName"
connection <- connectToAppDb
tableNames <- fetchTableNames connection
tableCols <- fetchTableCols connection tableName
primaryKeyFields <- tablePrimaryKeyFields connection tableName
let values :: [PG.Action] = map (\col -> parseValues (param @Bool (cs (get #columnName col) <> "_")) (param @Bool (cs (get #columnName col) <> "-isBoolean")) (param @Text (cs (get #columnName col)))) tableCols
let columns :: [Text] = map (\col -> cs (get #columnName col)) tableCols
let primaryKeyValues = map (\pkey -> "'" <> (param @Text (cs pkey <> "-pk")) <> "'") primaryKeyFields
let query = "UPDATE " <> tableName <> " SET " <> intercalate ", " (updateValues (zip columns (map (const "?") values))) <> " WHERE " <> intercalate " AND " (updateValues (zip primaryKeyFields primaryKeyValues))
PG.execute connection (PG.Query . cs $! query) values
PG.close connection
redirectTo ShowTableRowsAction { .. }
action EditRowValueAction { tableName, targetName, id } = do
connection <- connectToAppDb
tableNames <- fetchTableNames connection
rows :: [[DynamicField]] <- fetchRows connection tableName
let targetId = cs id
PG.close connection
render EditValueView { .. }
action ToggleBooleanFieldAction { tableName, targetName, targetPrimaryKey } = do
let id :: String = cs (param @Text "id")
let tableName = param "tableName"
connection <- connectToAppDb
tableNames <- fetchTableNames connection
tableCols <- fetchTableCols connection tableName
primaryKeyFields <- tablePrimaryKeyFields connection tableName
let targetPrimaryKeyValues = PG.Escape . cs <$> T.splitOn "---" targetPrimaryKey
let query = PG.Query . cs $! "UPDATE ? SET ? = NOT ? WHERE " <> intercalate " AND " ((<> " = ?") <$> primaryKeyFields)
let params = [PG.toField $ PG.Identifier tableName, PG.toField $ PG.Identifier targetName, PG.toField $ PG.Identifier targetName] <> targetPrimaryKeyValues
PG.execute connection query params
PG.close connection
redirectTo ShowTableRowsAction { .. }
action UpdateValueAction = do
let id :: String = cs (param @Text "id")
let tableName = param "tableName"
connection <- connectToAppDb
let targetCol = param "targetName"
let targetValue = param "targetValue"
let query = "UPDATE " <> tableName <> " SET " <> targetCol <> " = '" <> targetValue <> "' WHERE id = '" <> cs id <> "'"
PG.execute_ connection (PG.Query . cs $! query)
PG.close connection
redirectTo ShowTableRowsAction { .. }
action DeleteTableRowsAction { tableName } = do
connection <- connectToAppDb
let query = "TRUNCATE TABLE " <> tableName
PG.execute_ connection (PG.Query . cs $! query)
PG.close connection
redirectTo ShowTableRowsAction { .. }
action AutocompleteForeignKeyColumnAction { tableName, columnName, term } = do
connection <- connectToAppDb
rows :: Maybe [[DynamicField]] <- do
foreignKeyInfo <- fetchForeignKeyInfo connection tableName columnName
case foreignKeyInfo of
Just (foreignTable, foreignColumn) -> Just <$> fetchRowsPage connection foreignTable 1 50
Nothing -> pure Nothing
PG.close connection
case rows of
Just rows -> renderJson rows
Nothing -> renderNotFound
action ShowForeignKeyHoverCardAction { tableName, id, columnName } = do
connection <- connectToAppDb
hovercardData <- do
[Only (foreignId :: UUID)] <- PG.query connection "SELECT ? FROM ? WHERE id = ?" (PG.Identifier columnName, PG.Identifier tableName, id)
foreignKeyInfo <- fetchForeignKeyInfo connection tableName columnName
case foreignKeyInfo of
Just (foreignTable, foreignColumn) -> do
[record] <- PG.query connection "SELECT * FROM ? WHERE ? = ? LIMIT 1" (PG.Identifier foreignTable, PG.Identifier foreignColumn, foreignId)
pure $ Just (record, foreignTable)
Nothing -> pure Nothing
PG.close connection
case hovercardData of
Just (record, foreignTableName) -> render ShowForeignKeyHoverCardView { record, foreignTableName }
Nothing -> renderNotFound
connectToAppDb :: (?context :: ControllerContext) => IO PG.Connection
connectToAppDb = PG.connectPostgreSQL ?context.frameworkConfig.databaseUrl
fetchTableNames :: PG.Connection -> IO [Text]
fetchTableNames connection = do
values :: [[Text]] <- PG.query_ connection "SELECT tablename FROM pg_catalog.pg_tables where schemaname = 'public'"
pure (join values)
fetchTableCols :: PG.Connection -> Text -> IO [ColumnDefinition]
fetchTableCols connection tableName = do
PG.query connection "SELECT column_name,data_type,column_default,CASE WHEN is_nullable='YES' THEN true ELSE false END FROM information_schema.columns where table_name = ? ORDER BY ordinal_position" (PG.Only tableName)
fetchRow :: PG.Connection -> Text -> [Text] -> IO [[DynamicField]]
fetchRow connection tableName primaryKeyValues = do
pkFields <- tablePrimaryKeyFields connection tableName
let query = "SELECT * FROM " <> tableName <> " WHERE " <> intercalate " AND " ((<> " = ?") <$> pkFields)
PG.query connection (PG.Query . cs $! query) primaryKeyValues
instance PG.FromField DynamicField where
fromField field fieldValue = pure DynamicField { .. }
where
fieldName = fromMaybe "" (PG.name field)
instance PG.FromRow ColumnDefinition where
fromRow = ColumnDefinition <$> PG.field <*> PG.field <*> PG.field <*> PG.field
tablePrimaryKeyFields :: PG.Connection -> Text -> IO [Text]
tablePrimaryKeyFields connection tableName = do
fields <- PG.query connection "SELECT a.attname FROM pg_index i JOIN pg_attribute a ON a.attrelid = i.indrelid AND a.attnum = ANY(i.indkey) WHERE i.indrelid = ?::regclass AND i.indisprimary" (PG.Only tableName) :: IO [PG.Only Text]
pure $ PG.fromOnly <$> fields
fetchRows :: FromRow r => PG.Connection -> Text -> IO [r]
fetchRows connection tableName = do
pkFields <- tablePrimaryKeyFields connection tableName
let query = "SELECT * FROM "
<> tableName
<> (if null pkFields
then ""
else " ORDER BY " <> intercalate ", " pkFields
)
PG.query_ connection (PG.Query . cs $! query)
fetchRowsPage :: FromRow r => PG.Connection -> Text -> Int -> Int -> IO [r]
fetchRowsPage connection tableName page rows = do
pkFields <- tablePrimaryKeyFields connection tableName
let slice = " OFFSET " <> show (page * rows - rows) <> " ROWS FETCH FIRST " <> show rows <> " ROWS ONLY"
let query = "SELECT * FROM "
<> tableName
<> (if null pkFields
then ""
else " ORDER BY " <> intercalate ", " pkFields
)
<> slice
PG.query_ connection (PG.Query . cs $! query)
tableLength :: PG.Connection -> Text -> IO Int
tableLength connection tableName = do
[Only count] <- PG.query connection "SELECT COUNT(*) FROM ?" [PG.Identifier tableName]
pure count
parseValues sqlMode isBoolField input
parseValues :: Bool -> Bool -> Text -> PG.Action
parseValues _ True "on" = PG.toField True
parseValues _ True "off" = PG.toField False
parseValues False _ text = PG.toField text
parseValues _ _ text = PG.Plain (Data.ByteString.Builder.byteString (cs text))
updateValues list = map (\elem -> fst elem <> " = " <> snd elem) list
isQuery sql = T.isInfixOf "SELECT" u
where u = T.toUpper sql
fetchForeignKeyInfo :: PG.Connection -> Text -> Text -> IO (Maybe (Text, Text))
fetchForeignKeyInfo connection tableName columnName = do
let sql = [plain|
SELECT
ccu.table_name AS foreign_table_name,
ccu.column_name AS foreign_column_name
FROM
information_schema.table_constraints AS tc
JOIN information_schema.key_column_usage AS kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
JOIN information_schema.constraint_column_usage AS ccu
ON ccu.constraint_name = tc.constraint_name
AND ccu.table_schema = tc.table_schema
WHERE
tc.constraint_type = 'FOREIGN KEY'
AND tc.table_name = ?
AND kcu.column_name = ?
|]
let args = (tableName, columnName)
result <- PG.query connection (PG.Query $ cs sql) args
case result of
[(foreignTableName, foreignColumnName)] -> pure $ Just (foreignTableName, foreignColumnName)
otherwise -> pure $ Nothing
toJSON fields = object (map (\DynamicField { fieldName, fieldValue } -> (cs fieldName) .= (fieldValueToJSON fieldValue)) fields)
where
fieldValueToJSON (Just bs) = toJSON ((cs bs) :: Text)
fieldValueToJSON Nothing = toJSON Null
toEncoding fields = pairs $ foldl' (<>) mempty (encodedFields)
where
encodedFields = (map (\DynamicField { fieldName, fieldValue } -> (cs fieldName) .= (fieldValueToJSON fieldValue)) fields)
fieldValueToJSON (Just bs) = toJSON ((cs bs) :: Text)
fieldValueToJSON Nothing = toJSON Null |
7b41bf4dd8ef350b82283cdb0c4e85585e26636fa9148e88fb9a85ee541b590d | janestreet/ecaml | funcall.mli | * A typeful interface for calling Elisp , as [ external ] does for
open! Core
open! Import
type 'a t
* [ Wrap ] wraps an Elisp function as an OCaml function . Idiomatic use looks like :
{ [
let not = . Wrap.("not " < : bool @- > return bool )
let about_emacs = . Wrap.("about - emacs " < : nullary @- > return nil )
] }
{[
let not = Funcall.Wrap.("not" <: bool @-> return bool)
let about_emacs = Funcall.Wrap.("about-emacs" <: nullary @-> return nil)
]} *)
module Wrap : sig
val return : 'a Value.Type.t -> 'a t
val nil : unit Value.Type.t
val nullary : unit Value.Type.t
val ( <: ) : string -> 'a t -> 'a
val ( @-> ) : 'a Value.Type.t -> 'b t -> ('a -> 'b) t
include Value.Type.S
end
module Private : sig
val apply : 'a t -> 'a -> Value.t list -> on_parse_error:(exn -> Value.t) -> Value.t
val wrap_unrolled : 'a t -> Value.t -> 'a
end
| null | https://raw.githubusercontent.com/janestreet/ecaml/7c16e5720ee1da04e0757cf185a074debf9088df/ecaml_value/src/funcall.mli | ocaml | * A typeful interface for calling Elisp , as [ external ] does for
open! Core
open! Import
type 'a t
* [ Wrap ] wraps an Elisp function as an OCaml function . Idiomatic use looks like :
{ [
let not = . Wrap.("not " < : bool @- > return bool )
let about_emacs = . Wrap.("about - emacs " < : nullary @- > return nil )
] }
{[
let not = Funcall.Wrap.("not" <: bool @-> return bool)
let about_emacs = Funcall.Wrap.("about-emacs" <: nullary @-> return nil)
]} *)
module Wrap : sig
val return : 'a Value.Type.t -> 'a t
val nil : unit Value.Type.t
val nullary : unit Value.Type.t
val ( <: ) : string -> 'a t -> 'a
val ( @-> ) : 'a Value.Type.t -> 'b t -> ('a -> 'b) t
include Value.Type.S
end
module Private : sig
val apply : 'a t -> 'a -> Value.t list -> on_parse_error:(exn -> Value.t) -> Value.t
val wrap_unrolled : 'a t -> Value.t -> 'a
end
| |
3fd8a0d2799541c5afec3c4483cbb8b53467c6e0857eb5bbe18badc736b7358a | racket/redex | typed-info.rkt | #lang racket/base
(require racket/runtime-path
"../util/info-util.rkt")
(provide (all-defined-out))
(define name "rvm")
(define fname (make-path-root 'rvm))
(define-runtime-path here ".")
(define (all-mods)
(all-mods/type 'typed here name fname)) | null | https://raw.githubusercontent.com/racket/redex/4c2dc96d90cedeb08ec1850575079b952c5ad396/redex-benchmark/redex/benchmark/models/rvm/typed-info.rkt | racket | #lang racket/base
(require racket/runtime-path
"../util/info-util.rkt")
(provide (all-defined-out))
(define name "rvm")
(define fname (make-path-root 'rvm))
(define-runtime-path here ".")
(define (all-mods)
(all-mods/type 'typed here name fname)) | |
551a9ea14a543b0fcfc21165521e88ea936fc491a6944a3ef5b7363b875c2da2 | balayette/blockchan | transaction.ml | type t =
{ data : Transaction_data.t;
hash : string;
timestamp : int;
}
let print_transaction t =
Printf.printf "TRANSACTION:\nhash : %s\ntimestamp : %d\n" t.hash t.timestamp;
Transaction_data.print_transaction_data t.data;
Printf.printf "-------------\n\n\n"
let create_transaction data hash timestamp =
{data; hash; timestamp}
let new_transaction data =
let hash = Crypto.transaction_hash data in
create_transaction data hash (int_of_float (Unix.time ()))
let get_hash tr = tr.hash
let get_data tr = tr.data
let get_timestamp tr = tr.timestamp
let transaction_of_json_ds tj =
let open Json_ds_t in
let t_data = Transaction_data.transaction_data_of_json_ds tj.data in
match t_data with
| None -> None
| Some x -> Some (create_transaction x tj.hash tj.timestamp)
let json_ds_of_transaction tr =
let open Json_ds_t in
{data = (Transaction_data.json_ds_of_transaction_data (get_data tr));
hash = (get_hash tr);
timestamp = (get_timestamp tr)
}
| null | https://raw.githubusercontent.com/balayette/blockchan/3b7e292ec9e9574eefcdebd0a6036698a80febdb/src/transaction.ml | ocaml | type t =
{ data : Transaction_data.t;
hash : string;
timestamp : int;
}
let print_transaction t =
Printf.printf "TRANSACTION:\nhash : %s\ntimestamp : %d\n" t.hash t.timestamp;
Transaction_data.print_transaction_data t.data;
Printf.printf "-------------\n\n\n"
let create_transaction data hash timestamp =
{data; hash; timestamp}
let new_transaction data =
let hash = Crypto.transaction_hash data in
create_transaction data hash (int_of_float (Unix.time ()))
let get_hash tr = tr.hash
let get_data tr = tr.data
let get_timestamp tr = tr.timestamp
let transaction_of_json_ds tj =
let open Json_ds_t in
let t_data = Transaction_data.transaction_data_of_json_ds tj.data in
match t_data with
| None -> None
| Some x -> Some (create_transaction x tj.hash tj.timestamp)
let json_ds_of_transaction tr =
let open Json_ds_t in
{data = (Transaction_data.json_ds_of_transaction_data (get_data tr));
hash = (get_hash tr);
timestamp = (get_timestamp tr)
}
| |
df2c3d8f574ef4c543cde52e4b822962ffc33ebfe7b536ff5873a5500945085c | arrdem/sad | stack.clj | (ns me.arrdem.sad.runtime.stack)
(def ^:dynamic rule-stack (atom '("toplevel")))
(def ^:dynamic indent-width (atom 0))
(def ^:dynamic prn-prefix (atom true))
(defn- stack []
(let [s (apply str (interpose "-> " (reverse @rule-stack)))]
(reset! indent-width (count s))
s))
(defn scope-pop! []
(swap! rule-stack clojure.core/pop)
(reset! prn-prefix true))
(defn scope-push! [s]
(swap! rule-stack conj s)
(reset! prn-prefix true))
(defn debug [& msg]
(let [prefix (stack)
indent (apply str (repeat @indent-width " "))]
(if @prn-prefix
(println prefix))
(reset! prn-prefix false)
(doseq [m msg]
(println indent m))))
| null | https://raw.githubusercontent.com/arrdem/sad/11aca91ec38009069d6d7f50de7fd2326cc627b8/src/me/arrdem/sad/runtime/stack.clj | clojure | (ns me.arrdem.sad.runtime.stack)
(def ^:dynamic rule-stack (atom '("toplevel")))
(def ^:dynamic indent-width (atom 0))
(def ^:dynamic prn-prefix (atom true))
(defn- stack []
(let [s (apply str (interpose "-> " (reverse @rule-stack)))]
(reset! indent-width (count s))
s))
(defn scope-pop! []
(swap! rule-stack clojure.core/pop)
(reset! prn-prefix true))
(defn scope-push! [s]
(swap! rule-stack conj s)
(reset! prn-prefix true))
(defn debug [& msg]
(let [prefix (stack)
indent (apply str (repeat @indent-width " "))]
(if @prn-prefix
(println prefix))
(reset! prn-prefix false)
(doseq [m msg]
(println indent m))))
| |
d628c351beea2d813102fd17be0ae4f06c750d2e91a94cc8f39cf702b4bdf2f6 | hammerlab/ketrew | myocamlbuild.ml | open Nonstd
open Solvuu_build.Std
let (//) = Filename.concat
let failwithf fmt = ksprintf failwith fmt
let project_name = "ketrew"
let version = "3.2.0+dev"
let build_tests =
try Sys.getenv "WITH_TESTS" = "true" with _ -> false
let jsoo_debug =
try Sys.getenv "JSOO_DEBUG_MODE" = "true" with _ -> false
let with_bisect =
try Sys.getenv "WITH_BISECT" = "true" with _ -> false
let with_postgresql =
Findlib.installed "postgresql"
let pure_lib_packages = [
"sosa"; "nonstd"; "docout"; "pvem"; "yojson"; "uri"; "cohttp";
"ppx_deriving_yojson"; "ppx_deriving.std"; "react"; "reactiveData";
] @ (if with_bisect then ["bisect_ppx"] else [])
(* Older versionso of Lwt build `lwt.react`, then Lwt ≥ 3.0.0 uses
`lwt_react` as a separate opam package. *)
let lwt_react =
if Findlib.installed "lwt_react"
then "lwt_react"
else "lwt.react"
let lwt_unix_lib_packages = pure_lib_packages @ [
"threads"; "pvem_lwt_unix"; "cmdliner"; "cohttp-lwt-unix"; "conduit";
"dynlink"; "findlib"; lwt_react;
]
@ (if with_postgresql then ["postgresql"] else [])
let joo_packages = pure_lib_packages @ [
"js_of_ocaml"; "js_of_ocaml-lwt"; "js_of_ocaml-ppx"; "js_of_ocaml-tyxml";
]
let ocaml_options (f : _ Project.with_options) =
f ~bin_annot:()
~short_paths:()
~g:()
~w:"+9"
~strict_sequence:()
~safe_string:()
let project_lib =
(ocaml_options Project.lib)
~build_plugin:true
let project_app =
(ocaml_options Project.app)
let meta_dot_ml = "src/pure/metadata.ml"
let generate_meta_data () =
let cmd_option cmd =
try
Some (
Ocamlbuild_pack.My_unix.run_and_read cmd
|> fun x -> String.sub x 0 (String.length x - 1)
)
with _ -> None in
let git_last_commit () = cmd_option "git rev-parse HEAD" in
let git_describe () = cmd_option "git describe --tags --long --dirty" in
let option_to_string =
Option.value_map ~default:"None" ~f:(sprintf "Some %S") in
Solvuu_build.Util.Rule.rule
~name:"meta-data-generation"
~prods:[meta_dot_ml]
~deps:[]
~insert:`bottom
begin fun env builder ->
let def name ~doc fmt =
ksprintf (fun s -> sprintf "\n(** %s *)\nlet %s = %s" doc name s) fmt in
let lines =
List.map ~f:(sprintf "%s\n") [
"(** Metadata Module Generated by the Build System *)";
def "version" ~doc:"Official version string of the current build"
"%S" version;
def "git_commit" ~doc:"Current Git commit (if avaiable at build-time)"
"%s" (git_last_commit () |> option_to_string);
def "git_description"
~doc:"Current result of [\"git describe\"] \
(if avaiable at build-time)"
"%s" (git_describe () |> option_to_string);
def "findlib_packages"
~doc:"List of find-lib packages linked in the [ketrew] binary."
"[%s]"
(List.map lwt_unix_lib_packages ~f:(sprintf "%S")
|> String.concat "; ");
def "with_postgresql"
~doc:"Whether the [ketrew.lwt_unix] (and hence the [ketrew] app) \
are linked with PostgreSQL (and hence “server”) support."
"%b" with_postgresql;
def "jsoo_debug"
~doc:"Whether the WebUI's code was build using a bunch of \
[js_of_ocaml] debug flags."
"%b" with_postgresql;
def "with_bisect"
~doc:"Whether the Ketrew was built with [bisect_ppx]."
"%b" with_bisect;
] in
let open Ocamlbuild_plugin in
Seq [
Echo (lines, meta_dot_ml);
]
end
let pure_lib : Project.item =
project_lib (project_name ^ ".pure")
~thread:()
~findlib_deps:pure_lib_packages
~ml_files:(`Add [Filename.basename meta_dot_ml])
~dir:"src/pure"
~style:(`Pack (project_name ^ "_pure" |> String.capitalize_ascii))
let js_lib : Project.item =
let name = project_name ^ ".client-joo" in
project_lib name
~install:`No
~dir:"src/client-joo/"
~internal_deps:[pure_lib]
~findlib_deps:joo_packages
~style:`Basic
let js_app : Project.item =
let name = project_name ^ "-client-joo" in
project_app name
~install:`No
~file:"src/client-joo/webapp.ml"
~internal_deps:[js_lib]
~findlib_deps:joo_packages
let cmdf fmt = ksprintf Ocamlbuild_plugin.(fun s -> Cmd (Sh s)) fmt
let gui_page = "src/lib/client_html.ml"
let make_gui_page () =
let client_dot_byte =
match js_app with
| Project.Lib _ -> assert false
| Project.App app -> Project.path_of_app ~suffix:".byte" app in
let jsoo_flags =
if jsoo_debug then "--pretty --no-inline --debug-info" else "" in
let css = "src/css/bootstrap_335_min.css" in
let template = "tools/template-gui.sh" in
let jsoo_debug_level = if jsoo_debug then 1 else 0 in
Solvuu_build.Util.Rule.rule
~name:"gui-page-generation"
~prods:[gui_page]
~deps:[client_dot_byte; template; css]
~insert:`bottom
begin fun env builder ->
let open Ocamlbuild_plugin in
Seq [
cmdf "cp ../%s style.css" css;
cmdf "js_of_ocaml %s +weak.js %s -o client.js"
jsoo_flags client_dot_byte;
cmdf "../%s \
gui-page.html client.js \
style.css '' %d" template jsoo_debug_level;
cmdf "ocamlify --var-string gui_page \
gui-page.html --output %s" gui_page;
]
end
let persistent_data_ml = "src/lib/persistent_data.ml"
let make_persistent_data () =
let src = "src/lib/persistent_data.cppo.ml" in
Solvuu_build.Util.Rule.rule
~name:"cppo-persistent-data"
~prods:[persistent_data_ml]
~deps:[src]
~insert:`top
begin fun env builder ->
let open Ocamlbuild_plugin in
Seq [
cmdf "cppo %s %s > %s"
(if with_postgresql then "-D WITH_POSTGRESQL=true" else "")
src persistent_data_ml;
]
end
let lwt_unix_lib : Project.item =
let dir = "src/lib" in
let ml_files = (* We need to override the files to remove the ".cppo.ml" *)
let all_files = try Sys.readdir dir |> Array.to_list with _ -> [] in
[
Filename.basename gui_page;
Filename.basename persistent_data_ml;
] @ List.filter all_files
~f:(fun f ->
not (Filename.check_suffix f ".mli")
&& not (Filename.check_suffix f ".cppo.ml"))
in
project_lib project_name
~thread:()
~findlib_deps:lwt_unix_lib_packages
~ml_files:(`Replace ml_files)
~dir
~internal_deps:[pure_lib]
~style:(`Pack project_name)
let app : Project.item =
project_app project_name
~thread:()
~file:"src/app/cli_main.ml"
~internal_deps:[lwt_unix_lib]
let test_apps : Project.item list =
if build_tests
then [
project_app (project_name ^ "-test")
~thread:()
~file:"src/test/main.ml"
~install:`No
~internal_deps:[lwt_unix_lib];
project_app (project_name ^ "-workflow-examples")
~thread:()
~file:"src/test/Workflow_Examples.ml"
~install:`No
~internal_deps:[lwt_unix_lib];
project_app (project_name ^ "-preconfigured-main-test")
~thread:()
~file:"src/test/preconfigured_main.ml"
~install:`No
~internal_deps:[lwt_unix_lib];
project_app (project_name ^ "-persistance-test")
~thread:()
~file:"src/test/persistance.ml"
~install:`No
~internal_deps:[lwt_unix_lib];
project_app (project_name ^ "-synth-workflows")
~thread:()
~file:"src/test/synthetic_workflows.ml"
~install:`No
~internal_deps:[lwt_unix_lib];
] @ begin
let plugin =
project_lib (project_name ^ ".dummy-plugin")
~thread:()
~dir:"src/test/dummy-plugin/"
~install:`No
~internal_deps:[lwt_unix_lib]
~style:(`Pack "dummy_plugin_test_lib") in
[
plugin;
project_app (project_name ^ "-dummy-plugin-user")
~thread:()
~file:"src/test/dummy_plugin_user.ml"
~install:`No
~internal_deps:[plugin];
]
end
else []
let build_doc () =
let lib = (match lwt_unix_lib with Project.Lib l -> l | _ -> assert false) in
let paths d =
try Sys.readdir d
|> Array.to_list
|> List.filter ~f:(fun f ->
(Filename.check_suffix f ".mli"
|| Filename.check_suffix f ".ml")
)
|> List.map ~f:(fun p -> d // p)
with e ->
failwithf "Cannot read dir: %S, %s, from %s"
d (Printexc.to_string e) (Sys.getcwd ()) in
let deps =
["README.md"]
@ [Project.path_of_pack ~suffix:".cmo" lib]
@ paths "src/doc"
@ paths "src/test"
@ paths "src/test/dummy-plugin"
in
Solvuu_build.Util.Rule.rule
~name:"Build-doc"
~prods:["doc/index.html"]
~deps
~insert:`bottom
begin fun env builder ->
let open Ocamlbuild_plugin in
Seq [
cmdf "../tools/build-documentation.sh %s"
(String.concat "," lwt_unix_lib_packages);
]
end
let ocamlinit_postfix = [
sprintf "open %s_pure" (String.capitalize_ascii project_name);
sprintf "open %s" (String.capitalize_ascii project_name);
]
let () =
Project.basic1 ~project_name ~version ~ocamlinit_postfix
~additional_rules:[
generate_meta_data;
make_gui_page;
make_persistent_data;
We want the rules of the JSOO app but not the app
itself since it does n't build in ` native ` mode
itself since it doesn't build in `native` mode *)
let open Project in
match js_lib, js_app with
| Lib l, App a -> build_app a; build_lib l
| _, _ -> assert false
end;
build_doc;
]
([pure_lib; lwt_unix_lib; app;] @ test_apps)
| null | https://raw.githubusercontent.com/hammerlab/ketrew/8940d48fbe174709f076b7130974ecd0ed831d58/myocamlbuild.ml | ocaml | Older versionso of Lwt build `lwt.react`, then Lwt ≥ 3.0.0 uses
`lwt_react` as a separate opam package.
We need to override the files to remove the ".cppo.ml" | open Nonstd
open Solvuu_build.Std
let (//) = Filename.concat
let failwithf fmt = ksprintf failwith fmt
let project_name = "ketrew"
let version = "3.2.0+dev"
let build_tests =
try Sys.getenv "WITH_TESTS" = "true" with _ -> false
let jsoo_debug =
try Sys.getenv "JSOO_DEBUG_MODE" = "true" with _ -> false
let with_bisect =
try Sys.getenv "WITH_BISECT" = "true" with _ -> false
let with_postgresql =
Findlib.installed "postgresql"
let pure_lib_packages = [
"sosa"; "nonstd"; "docout"; "pvem"; "yojson"; "uri"; "cohttp";
"ppx_deriving_yojson"; "ppx_deriving.std"; "react"; "reactiveData";
] @ (if with_bisect then ["bisect_ppx"] else [])
let lwt_react =
if Findlib.installed "lwt_react"
then "lwt_react"
else "lwt.react"
let lwt_unix_lib_packages = pure_lib_packages @ [
"threads"; "pvem_lwt_unix"; "cmdliner"; "cohttp-lwt-unix"; "conduit";
"dynlink"; "findlib"; lwt_react;
]
@ (if with_postgresql then ["postgresql"] else [])
let joo_packages = pure_lib_packages @ [
"js_of_ocaml"; "js_of_ocaml-lwt"; "js_of_ocaml-ppx"; "js_of_ocaml-tyxml";
]
let ocaml_options (f : _ Project.with_options) =
f ~bin_annot:()
~short_paths:()
~g:()
~w:"+9"
~strict_sequence:()
~safe_string:()
let project_lib =
(ocaml_options Project.lib)
~build_plugin:true
let project_app =
(ocaml_options Project.app)
let meta_dot_ml = "src/pure/metadata.ml"
let generate_meta_data () =
let cmd_option cmd =
try
Some (
Ocamlbuild_pack.My_unix.run_and_read cmd
|> fun x -> String.sub x 0 (String.length x - 1)
)
with _ -> None in
let git_last_commit () = cmd_option "git rev-parse HEAD" in
let git_describe () = cmd_option "git describe --tags --long --dirty" in
let option_to_string =
Option.value_map ~default:"None" ~f:(sprintf "Some %S") in
Solvuu_build.Util.Rule.rule
~name:"meta-data-generation"
~prods:[meta_dot_ml]
~deps:[]
~insert:`bottom
begin fun env builder ->
let def name ~doc fmt =
ksprintf (fun s -> sprintf "\n(** %s *)\nlet %s = %s" doc name s) fmt in
let lines =
List.map ~f:(sprintf "%s\n") [
"(** Metadata Module Generated by the Build System *)";
def "version" ~doc:"Official version string of the current build"
"%S" version;
def "git_commit" ~doc:"Current Git commit (if avaiable at build-time)"
"%s" (git_last_commit () |> option_to_string);
def "git_description"
~doc:"Current result of [\"git describe\"] \
(if avaiable at build-time)"
"%s" (git_describe () |> option_to_string);
def "findlib_packages"
~doc:"List of find-lib packages linked in the [ketrew] binary."
"[%s]"
(List.map lwt_unix_lib_packages ~f:(sprintf "%S")
|> String.concat "; ");
def "with_postgresql"
~doc:"Whether the [ketrew.lwt_unix] (and hence the [ketrew] app) \
are linked with PostgreSQL (and hence “server”) support."
"%b" with_postgresql;
def "jsoo_debug"
~doc:"Whether the WebUI's code was build using a bunch of \
[js_of_ocaml] debug flags."
"%b" with_postgresql;
def "with_bisect"
~doc:"Whether the Ketrew was built with [bisect_ppx]."
"%b" with_bisect;
] in
let open Ocamlbuild_plugin in
Seq [
Echo (lines, meta_dot_ml);
]
end
let pure_lib : Project.item =
project_lib (project_name ^ ".pure")
~thread:()
~findlib_deps:pure_lib_packages
~ml_files:(`Add [Filename.basename meta_dot_ml])
~dir:"src/pure"
~style:(`Pack (project_name ^ "_pure" |> String.capitalize_ascii))
let js_lib : Project.item =
let name = project_name ^ ".client-joo" in
project_lib name
~install:`No
~dir:"src/client-joo/"
~internal_deps:[pure_lib]
~findlib_deps:joo_packages
~style:`Basic
let js_app : Project.item =
let name = project_name ^ "-client-joo" in
project_app name
~install:`No
~file:"src/client-joo/webapp.ml"
~internal_deps:[js_lib]
~findlib_deps:joo_packages
let cmdf fmt = ksprintf Ocamlbuild_plugin.(fun s -> Cmd (Sh s)) fmt
let gui_page = "src/lib/client_html.ml"
let make_gui_page () =
let client_dot_byte =
match js_app with
| Project.Lib _ -> assert false
| Project.App app -> Project.path_of_app ~suffix:".byte" app in
let jsoo_flags =
if jsoo_debug then "--pretty --no-inline --debug-info" else "" in
let css = "src/css/bootstrap_335_min.css" in
let template = "tools/template-gui.sh" in
let jsoo_debug_level = if jsoo_debug then 1 else 0 in
Solvuu_build.Util.Rule.rule
~name:"gui-page-generation"
~prods:[gui_page]
~deps:[client_dot_byte; template; css]
~insert:`bottom
begin fun env builder ->
let open Ocamlbuild_plugin in
Seq [
cmdf "cp ../%s style.css" css;
cmdf "js_of_ocaml %s +weak.js %s -o client.js"
jsoo_flags client_dot_byte;
cmdf "../%s \
gui-page.html client.js \
style.css '' %d" template jsoo_debug_level;
cmdf "ocamlify --var-string gui_page \
gui-page.html --output %s" gui_page;
]
end
let persistent_data_ml = "src/lib/persistent_data.ml"
let make_persistent_data () =
let src = "src/lib/persistent_data.cppo.ml" in
Solvuu_build.Util.Rule.rule
~name:"cppo-persistent-data"
~prods:[persistent_data_ml]
~deps:[src]
~insert:`top
begin fun env builder ->
let open Ocamlbuild_plugin in
Seq [
cmdf "cppo %s %s > %s"
(if with_postgresql then "-D WITH_POSTGRESQL=true" else "")
src persistent_data_ml;
]
end
let lwt_unix_lib : Project.item =
let dir = "src/lib" in
let all_files = try Sys.readdir dir |> Array.to_list with _ -> [] in
[
Filename.basename gui_page;
Filename.basename persistent_data_ml;
] @ List.filter all_files
~f:(fun f ->
not (Filename.check_suffix f ".mli")
&& not (Filename.check_suffix f ".cppo.ml"))
in
project_lib project_name
~thread:()
~findlib_deps:lwt_unix_lib_packages
~ml_files:(`Replace ml_files)
~dir
~internal_deps:[pure_lib]
~style:(`Pack project_name)
let app : Project.item =
project_app project_name
~thread:()
~file:"src/app/cli_main.ml"
~internal_deps:[lwt_unix_lib]
let test_apps : Project.item list =
if build_tests
then [
project_app (project_name ^ "-test")
~thread:()
~file:"src/test/main.ml"
~install:`No
~internal_deps:[lwt_unix_lib];
project_app (project_name ^ "-workflow-examples")
~thread:()
~file:"src/test/Workflow_Examples.ml"
~install:`No
~internal_deps:[lwt_unix_lib];
project_app (project_name ^ "-preconfigured-main-test")
~thread:()
~file:"src/test/preconfigured_main.ml"
~install:`No
~internal_deps:[lwt_unix_lib];
project_app (project_name ^ "-persistance-test")
~thread:()
~file:"src/test/persistance.ml"
~install:`No
~internal_deps:[lwt_unix_lib];
project_app (project_name ^ "-synth-workflows")
~thread:()
~file:"src/test/synthetic_workflows.ml"
~install:`No
~internal_deps:[lwt_unix_lib];
] @ begin
let plugin =
project_lib (project_name ^ ".dummy-plugin")
~thread:()
~dir:"src/test/dummy-plugin/"
~install:`No
~internal_deps:[lwt_unix_lib]
~style:(`Pack "dummy_plugin_test_lib") in
[
plugin;
project_app (project_name ^ "-dummy-plugin-user")
~thread:()
~file:"src/test/dummy_plugin_user.ml"
~install:`No
~internal_deps:[plugin];
]
end
else []
let build_doc () =
let lib = (match lwt_unix_lib with Project.Lib l -> l | _ -> assert false) in
let paths d =
try Sys.readdir d
|> Array.to_list
|> List.filter ~f:(fun f ->
(Filename.check_suffix f ".mli"
|| Filename.check_suffix f ".ml")
)
|> List.map ~f:(fun p -> d // p)
with e ->
failwithf "Cannot read dir: %S, %s, from %s"
d (Printexc.to_string e) (Sys.getcwd ()) in
let deps =
["README.md"]
@ [Project.path_of_pack ~suffix:".cmo" lib]
@ paths "src/doc"
@ paths "src/test"
@ paths "src/test/dummy-plugin"
in
Solvuu_build.Util.Rule.rule
~name:"Build-doc"
~prods:["doc/index.html"]
~deps
~insert:`bottom
begin fun env builder ->
let open Ocamlbuild_plugin in
Seq [
cmdf "../tools/build-documentation.sh %s"
(String.concat "," lwt_unix_lib_packages);
]
end
let ocamlinit_postfix = [
sprintf "open %s_pure" (String.capitalize_ascii project_name);
sprintf "open %s" (String.capitalize_ascii project_name);
]
let () =
Project.basic1 ~project_name ~version ~ocamlinit_postfix
~additional_rules:[
generate_meta_data;
make_gui_page;
make_persistent_data;
We want the rules of the JSOO app but not the app
itself since it does n't build in ` native ` mode
itself since it doesn't build in `native` mode *)
let open Project in
match js_lib, js_app with
| Lib l, App a -> build_app a; build_lib l
| _, _ -> assert false
end;
build_doc;
]
([pure_lib; lwt_unix_lib; app;] @ test_apps)
|
f33dcf496538985bfebf9f0a7fefc787cf9640f432d02c88cebbba5590a72878 | camfort/fortran-src | Fortran2003Spec.hs | module Language.Fortran.Parser.Free.Fortran2003Spec ( spec ) where
import Prelude hiding (GT, EQ, exp, pred)
import Test.Hspec
import TestUtil
import Language.Fortran.Parser.Free.Common
import Language.Fortran.AST
import Language.Fortran.Version
import Language.Fortran.Parser
import Language.Fortran.Parser.Monad ( Parse )
import qualified Language.Fortran.Parser.Free.Fortran2003 as F2003
import qualified Language.Fortran.Parser.Free.Lexer as Free
import qualified Data.ByteString.Char8 as B
parseWith :: Parse Free.AlexInput Free.Token a -> String -> a
parseWith p = parseUnsafe (makeParserFree p Fortran2003) . B.pack
eParser :: String -> Expression ()
eParser = parseUnsafe p . B.pack
where p = makeParser initParseStateFreeExpr F2003.expressionParser Fortran2003
sParser :: String -> Statement ()
sParser = parseWith F2003.statementParser
bParser :: String -> Block ()
bParser = parseWith F2003.blockParser
fParser :: String -> ProgramUnit ()
fParser = parseWith F2003.functionParser
spec :: Spec
spec =
describe "Fortran 2003 Parser" $ do
describe "Modules" $ do
it "parses use statement, intrinsic module" $ do
let renames = fromList ()
[ UseRename () u (varGen "sprod") (varGen "prod")
, UseRename () u (varGen "a") (varGen "b") ]
st = StUse () u (varGen "mod") (Just ModIntrinsic) Permissive (Just renames)
sParser "use, intrinsic :: mod, sprod => prod, a => b" `shouldBe'` st
it "parses use statement, non_intrinsic module" $ do
let renames = fromList ()
[ UseRename () u (varGen "sprod") (varGen "prod")
, UseRename () u (varGen "a") (varGen "b") ]
st = StUse () u (varGen "mod") (Just ModNonIntrinsic) Exclusive (Just renames)
sParser "use, non_intrinsic :: mod, only: sprod => prod, a => b" `shouldBe'` st
it "parses use statement, unspecified nature of module" $ do
let renames = fromList ()
[ UseRename () u (varGen "sprod") (varGen "prod")
, UseRename () u (varGen "a") (varGen "b") ]
st = StUse () u (varGen "mod") Nothing Permissive (Just renames)
sParser "use :: mod, sprod => prod, a => b" `shouldBe'` st
it "parses procedure (interface-name, attribute, proc-decl)" $ do
let call = ExpFunctionCall () u (varGen "c") (aEmpty () u)
st = StProcedure () u (Just (ProcInterfaceName () u (varGen "a")))
(Just (AList () u [AttrSave () u]))
(AList () u [ProcDecl () u (varGen "b") (Just call)])
sParser "PROCEDURE(a), SAVE :: b => c()" `shouldBe'` st
it "parses procedure (class-star, bind-name, proc-decls)" $ do
let call = ExpFunctionCall () u (varGen "c") (aEmpty () u)
clas = TypeSpec () u ClassStar Nothing
st = StProcedure () u (Just (ProcInterfaceType () u clas))
(Just (AList () u [AttrSuffix () u (SfxBind () u (Just (ExpValue () u (ValString "e"))))]))
(AList () u [ProcDecl () u (varGen "b") (Just call)
,ProcDecl () u (varGen "d") (Just call)])
sParser "PROCEDURE(CLASS(*)), BIND(C, NAME=\"e\") :: b => c(), d => c()" `shouldBe'` st
it "parses procedure (class-custom, bind, proc-decls)" $ do
let call = ExpFunctionCall () u (varGen "c") (aEmpty () u)
clas = TypeSpec () u (ClassCustom "e") Nothing
st = StProcedure () u (Just (ProcInterfaceType () u clas))
(Just (AList () u [AttrSuffix () u (SfxBind () u Nothing)]))
(AList () u [ProcDecl () u (varGen "b") (Just call)
,ProcDecl () u (varGen "d") (Just call)])
sParser "PROCEDURE(CLASS(e)), BIND(C) :: b => c(), d => c()" `shouldBe'` st
it "import statements" $ do
let st = StImport () u (AList () u [varGen "a", varGen "b"])
sParser "import a, b" `shouldBe'` st
sParser "import :: a, b" `shouldBe'` st
it "parses function with bind" $ do
let puFunction = PUFunction () u
fType = Nothing
fPre = emptyPrefixes
fSuf = fromList' () [SfxBind () u (Just $ ExpValue () u (ValString "f"))]
fName = "f"
fArgs = Nothing
fRes = Nothing
fBody = []
fSub = Nothing
fStr = init $ unlines ["function f() bind(c,name=\"f\")"
, "end function f" ]
let expected = puFunction fType (fPre, fSuf) fName fArgs fRes fBody fSub
fParser fStr `shouldBe'` expected
it "parses asynchronous decl" $ do
let decls = [declVarGen "a", declVarGen "b"]
st = StAsynchronous () u (AList () u decls)
sParser "asynchronous a, b" `shouldBe'` st
sParser "asynchronous :: a, b" `shouldBe'` st
it "parses asynchronous attribute" $ do
let decls = [declVarGen "a", declVarGen "b"]
ty = TypeSpec () u TypeInteger Nothing
attrs = [AttrAsynchronous () u]
st = StDeclaration () u ty (Just (AList () u attrs)) (AList () u decls)
sParser "integer, asynchronous :: a, b" `shouldBe'` st
it "parses enumerators" $ do
let decls = [ declVariable () u (varGen "a") Nothing (Just (intGen 1))
, declVariable () u (varGen "b") Nothing Nothing ]
st = StEnumerator () u (AList () u decls)
sParser "enum, bind(c)" `shouldBe'` StEnum () u
sParser "enumerator :: a = 1, b" `shouldBe'` st
sParser "end enum" `shouldBe'` StEndEnum () u
it "parses allocate with type_spec" $ do
let sel = Selector () u (Just (ExpValue () u ValColon)) (Just (varGen "foo"))
ty = TypeSpec () u TypeCharacter (Just sel)
decls = AList () u [declVarGen "s"]
st = StDeclaration () u ty (Just (AList () u [AttrAllocatable () u])) decls
sParser "character(len=:,kind=foo), allocatable :: s" `shouldBe'` st
it "parses allocate with type_spec" $ do
let sel = Selector () u (Just (intGen 3)) (Just (varGen "foo"))
ty = TypeSpec () u TypeCharacter (Just sel)
st = StAllocate () u (Just ty) (AList () u [varGen "s"]) Nothing
sParser "allocate(character(len=3,kind=foo) :: s)" `shouldBe'` st
it "parses protected" $ do
let ty = TypeSpec () u TypeReal Nothing
decls = AList () u [declVarGen "x"]
st1 = StDeclaration () u ty (Just (AList () u [AttrProtected () u, AttrPublic () u])) decls
st2 = StProtected () u (Just (AList () u [varGen "x"]))
sParser "real, protected, public :: x" `shouldBe'` st1
sParser "protected x" `shouldBe'` st2
describe "labelled where" $ do
it "parses where construct statement" $
sParser "foo: where (.true.)" `shouldBe'` StWhereConstruct () u (Just "foo") valTrue
it "parses elsewhere statement" $
sParser "elsewhere ab101" `shouldBe'` StElsewhere () u (Just "ab101") Nothing
it "parses elsewhere statement" $ do
let exp = ExpBinary () u GT (varGen "a") (varGen "b")
sParser "elsewhere (a > b) A123" `shouldBe'` StElsewhere () u (Just "a123") (Just exp)
it "parses endwhere statement" $
sParser "endwhere foo1" `shouldBe'` StEndWhere () u (Just "foo1")
describe "associate block" $ do
it "parses multiple assignment associate block" $ do
let text = unlines [ "associate (x => a, y => (a * b))"
, " print *, x"
, " print *, y"
, "end associate" ]
expected = BlAssociate () u Nothing Nothing abbrevs body' Nothing
body' = [blStmtPrint "x", blStmtPrint "y"]
blStmtPrint x = BlStatement () u Nothing (stmtPrint x)
stmtPrint x = StPrint () u starVal (Just $ AList () u [ varGen x ])
abbrevs = AList () u [abbrev "x" (expValVar "a"), abbrev "y" (expBinVars Multiplication "a" "b")]
abbrev var expr = ATuple () u (expValVar var) expr
expValVar x = ExpValue () u (ValVariable x)
expBinVars op x1 x2 = ExpBinary () u op (expValVar x1) (expValVar x2)
bParser text `shouldBe'` expected
specFreeCommon bParser sParser eParser
| null | https://raw.githubusercontent.com/camfort/fortran-src/9229338d6b09a724d38e46bd852f76fe3329d64f/test/Language/Fortran/Parser/Free/Fortran2003Spec.hs | haskell | module Language.Fortran.Parser.Free.Fortran2003Spec ( spec ) where
import Prelude hiding (GT, EQ, exp, pred)
import Test.Hspec
import TestUtil
import Language.Fortran.Parser.Free.Common
import Language.Fortran.AST
import Language.Fortran.Version
import Language.Fortran.Parser
import Language.Fortran.Parser.Monad ( Parse )
import qualified Language.Fortran.Parser.Free.Fortran2003 as F2003
import qualified Language.Fortran.Parser.Free.Lexer as Free
import qualified Data.ByteString.Char8 as B
parseWith :: Parse Free.AlexInput Free.Token a -> String -> a
parseWith p = parseUnsafe (makeParserFree p Fortran2003) . B.pack
eParser :: String -> Expression ()
eParser = parseUnsafe p . B.pack
where p = makeParser initParseStateFreeExpr F2003.expressionParser Fortran2003
sParser :: String -> Statement ()
sParser = parseWith F2003.statementParser
bParser :: String -> Block ()
bParser = parseWith F2003.blockParser
fParser :: String -> ProgramUnit ()
fParser = parseWith F2003.functionParser
spec :: Spec
spec =
describe "Fortran 2003 Parser" $ do
describe "Modules" $ do
it "parses use statement, intrinsic module" $ do
let renames = fromList ()
[ UseRename () u (varGen "sprod") (varGen "prod")
, UseRename () u (varGen "a") (varGen "b") ]
st = StUse () u (varGen "mod") (Just ModIntrinsic) Permissive (Just renames)
sParser "use, intrinsic :: mod, sprod => prod, a => b" `shouldBe'` st
it "parses use statement, non_intrinsic module" $ do
let renames = fromList ()
[ UseRename () u (varGen "sprod") (varGen "prod")
, UseRename () u (varGen "a") (varGen "b") ]
st = StUse () u (varGen "mod") (Just ModNonIntrinsic) Exclusive (Just renames)
sParser "use, non_intrinsic :: mod, only: sprod => prod, a => b" `shouldBe'` st
it "parses use statement, unspecified nature of module" $ do
let renames = fromList ()
[ UseRename () u (varGen "sprod") (varGen "prod")
, UseRename () u (varGen "a") (varGen "b") ]
st = StUse () u (varGen "mod") Nothing Permissive (Just renames)
sParser "use :: mod, sprod => prod, a => b" `shouldBe'` st
it "parses procedure (interface-name, attribute, proc-decl)" $ do
let call = ExpFunctionCall () u (varGen "c") (aEmpty () u)
st = StProcedure () u (Just (ProcInterfaceName () u (varGen "a")))
(Just (AList () u [AttrSave () u]))
(AList () u [ProcDecl () u (varGen "b") (Just call)])
sParser "PROCEDURE(a), SAVE :: b => c()" `shouldBe'` st
it "parses procedure (class-star, bind-name, proc-decls)" $ do
let call = ExpFunctionCall () u (varGen "c") (aEmpty () u)
clas = TypeSpec () u ClassStar Nothing
st = StProcedure () u (Just (ProcInterfaceType () u clas))
(Just (AList () u [AttrSuffix () u (SfxBind () u (Just (ExpValue () u (ValString "e"))))]))
(AList () u [ProcDecl () u (varGen "b") (Just call)
,ProcDecl () u (varGen "d") (Just call)])
sParser "PROCEDURE(CLASS(*)), BIND(C, NAME=\"e\") :: b => c(), d => c()" `shouldBe'` st
it "parses procedure (class-custom, bind, proc-decls)" $ do
let call = ExpFunctionCall () u (varGen "c") (aEmpty () u)
clas = TypeSpec () u (ClassCustom "e") Nothing
st = StProcedure () u (Just (ProcInterfaceType () u clas))
(Just (AList () u [AttrSuffix () u (SfxBind () u Nothing)]))
(AList () u [ProcDecl () u (varGen "b") (Just call)
,ProcDecl () u (varGen "d") (Just call)])
sParser "PROCEDURE(CLASS(e)), BIND(C) :: b => c(), d => c()" `shouldBe'` st
it "import statements" $ do
let st = StImport () u (AList () u [varGen "a", varGen "b"])
sParser "import a, b" `shouldBe'` st
sParser "import :: a, b" `shouldBe'` st
it "parses function with bind" $ do
let puFunction = PUFunction () u
fType = Nothing
fPre = emptyPrefixes
fSuf = fromList' () [SfxBind () u (Just $ ExpValue () u (ValString "f"))]
fName = "f"
fArgs = Nothing
fRes = Nothing
fBody = []
fSub = Nothing
fStr = init $ unlines ["function f() bind(c,name=\"f\")"
, "end function f" ]
let expected = puFunction fType (fPre, fSuf) fName fArgs fRes fBody fSub
fParser fStr `shouldBe'` expected
it "parses asynchronous decl" $ do
let decls = [declVarGen "a", declVarGen "b"]
st = StAsynchronous () u (AList () u decls)
sParser "asynchronous a, b" `shouldBe'` st
sParser "asynchronous :: a, b" `shouldBe'` st
it "parses asynchronous attribute" $ do
let decls = [declVarGen "a", declVarGen "b"]
ty = TypeSpec () u TypeInteger Nothing
attrs = [AttrAsynchronous () u]
st = StDeclaration () u ty (Just (AList () u attrs)) (AList () u decls)
sParser "integer, asynchronous :: a, b" `shouldBe'` st
it "parses enumerators" $ do
let decls = [ declVariable () u (varGen "a") Nothing (Just (intGen 1))
, declVariable () u (varGen "b") Nothing Nothing ]
st = StEnumerator () u (AList () u decls)
sParser "enum, bind(c)" `shouldBe'` StEnum () u
sParser "enumerator :: a = 1, b" `shouldBe'` st
sParser "end enum" `shouldBe'` StEndEnum () u
it "parses allocate with type_spec" $ do
let sel = Selector () u (Just (ExpValue () u ValColon)) (Just (varGen "foo"))
ty = TypeSpec () u TypeCharacter (Just sel)
decls = AList () u [declVarGen "s"]
st = StDeclaration () u ty (Just (AList () u [AttrAllocatable () u])) decls
sParser "character(len=:,kind=foo), allocatable :: s" `shouldBe'` st
it "parses allocate with type_spec" $ do
let sel = Selector () u (Just (intGen 3)) (Just (varGen "foo"))
ty = TypeSpec () u TypeCharacter (Just sel)
st = StAllocate () u (Just ty) (AList () u [varGen "s"]) Nothing
sParser "allocate(character(len=3,kind=foo) :: s)" `shouldBe'` st
it "parses protected" $ do
let ty = TypeSpec () u TypeReal Nothing
decls = AList () u [declVarGen "x"]
st1 = StDeclaration () u ty (Just (AList () u [AttrProtected () u, AttrPublic () u])) decls
st2 = StProtected () u (Just (AList () u [varGen "x"]))
sParser "real, protected, public :: x" `shouldBe'` st1
sParser "protected x" `shouldBe'` st2
describe "labelled where" $ do
it "parses where construct statement" $
sParser "foo: where (.true.)" `shouldBe'` StWhereConstruct () u (Just "foo") valTrue
it "parses elsewhere statement" $
sParser "elsewhere ab101" `shouldBe'` StElsewhere () u (Just "ab101") Nothing
it "parses elsewhere statement" $ do
let exp = ExpBinary () u GT (varGen "a") (varGen "b")
sParser "elsewhere (a > b) A123" `shouldBe'` StElsewhere () u (Just "a123") (Just exp)
it "parses endwhere statement" $
sParser "endwhere foo1" `shouldBe'` StEndWhere () u (Just "foo1")
describe "associate block" $ do
it "parses multiple assignment associate block" $ do
let text = unlines [ "associate (x => a, y => (a * b))"
, " print *, x"
, " print *, y"
, "end associate" ]
expected = BlAssociate () u Nothing Nothing abbrevs body' Nothing
body' = [blStmtPrint "x", blStmtPrint "y"]
blStmtPrint x = BlStatement () u Nothing (stmtPrint x)
stmtPrint x = StPrint () u starVal (Just $ AList () u [ varGen x ])
abbrevs = AList () u [abbrev "x" (expValVar "a"), abbrev "y" (expBinVars Multiplication "a" "b")]
abbrev var expr = ATuple () u (expValVar var) expr
expValVar x = ExpValue () u (ValVariable x)
expBinVars op x1 x2 = ExpBinary () u op (expValVar x1) (expValVar x2)
bParser text `shouldBe'` expected
specFreeCommon bParser sParser eParser
| |
b65647d48b942aec24504665f4f8131348a44a79b87a73a402a818e7d4415bda | cicakhq/potato | flexichain-output-history.lisp | (cl:in-package #:climacs-flexichain-output-history)
(defclass flexichain-pane (clim:application-pane)
()
(:default-initargs :output-record (make-instance 'flexichain-output-history)
:display-time nil
:scroll-bars t))
(defclass flexichain-output-history
(clim:output-record clim:stream-output-history-mixin)
((%parent :initarg :parent :reader clim:output-record-parent)
(%lines :initform (make-instance 'flexichain:standard-flexichain)
:reader lines)
(%prefix-end :initform 0 :accessor prefix-end)
(%prefix-height :initform 0 :accessor prefix-height)
(%width :initform 0 :accessor width)
(%height :initform 0 :accessor height)))
(defmethod initialize-instance :after ((obj flexichain-pane) &key)
(setf (clim:stream-recording-p obj) nil)
(setf (clim:stream-end-of-line-action obj) nil)
(let ((history (clim:stream-output-history obj)))
(setf (slot-value history '%parent) obj)))
(defun forward (history)
(incf (prefix-height history)
(clim:bounding-rectangle-height
(flexichain:element* (lines history) (prefix-end history))))
(incf (prefix-end history)))
(defun backward (history)
(decf (prefix-end history))
(decf (prefix-height history)
(clim:bounding-rectangle-height
(flexichain:element* (lines history) (prefix-end history)))))
(defun adjust-prefix (history viewport-top)
;; If there are lines in the suffix that are entirely above the
;; viewport, then move them to the prefix.
(loop with lines = (lines history)
until (= (prefix-end history) (flexichain:nb-elements lines))
while (<= (+ (prefix-height history)
(clim:bounding-rectangle-height
(flexichain:element* lines (prefix-end history))))
viewport-top)
do (forward history))
;; If there are lines in the prefix that are not entirely above
;; the viewport, then move them to the suffix.
(loop until (zerop (prefix-end history))
while (> (prefix-height history) viewport-top)
do (backward history)))
(defmethod clim:replay-output-record
((record flexichain-output-history) stream &optional region x-offset y-offset)
(declare (ignore x-offset y-offset))
(multiple-value-bind (left top right bottom)
(clim:bounding-rectangle* (clim:pane-viewport-region stream))
(clim:medium-clear-area (clim:sheet-medium stream)
left top right bottom)
(adjust-prefix record top)
(loop with lines = (lines record)
with length = (flexichain:nb-elements lines)
for i from (prefix-end record) below length
for line = (flexichain:element* lines i)
for y = (prefix-height record) then (+ y height)
for height = (clim:bounding-rectangle-height line)
while (< y bottom)
do (setf (clim:output-record-position line) (values 0 y))
(clim:replay-output-record line stream region))))
(defmethod clim:bounding-rectangle* ((history flexichain-output-history))
(values 0 0 (width history) (height history)))
(defun history-insert (history record index)
(when (> (prefix-end history) index)
(incf (prefix-end history))
(incf (prefix-height history)
(clim:bounding-rectangle-height record)))
(incf (height history)
(clim:bounding-rectangle-height record))
(let ((width (clim:bounding-rectangle-width record)))
(when (> width (width history))
(setf (width history) width)))
(flexichain:insert* (lines history) index record))
(defun recompute-width (history)
(setf (width history)
(loop with lines = (lines history)
for i from 0 below (flexichain:nb-elements (lines history))
for record = (flexichain:element* lines i)
maximize (clim:bounding-rectangle-width record))))
(defun history-delete (history index)
(let ((existing (flexichain:element* (lines history) index)))
(when (> (prefix-end history) index)
(decf (prefix-height history)
(clim:bounding-rectangle-height existing))
(decf (prefix-end history)))
(decf (height history)
(clim:bounding-rectangle-height existing))
(flexichain:delete* (lines history) index)
(when (= (clim:bounding-rectangle-width existing) (width history))
(recompute-width history))))
(defun history-replace (history record index)
(let ((existing (flexichain:element* (lines history) index)))
(when (> (prefix-end history) index)
(incf (prefix-height history)
(- (clim:bounding-rectangle-height record)
(clim:bounding-rectangle-height existing))))
(incf (height history)
(- (clim:bounding-rectangle-height record)
(clim:bounding-rectangle-height existing)))
(setf (flexichain:element* (lines history) index) record)
(if (> (clim:bounding-rectangle-width record)
(clim:bounding-rectangle-width existing))
(when (> (clim:bounding-rectangle-width record) (width history))
(setf (width history)
(clim:bounding-rectangle-width record)))
(when (= (clim:bounding-rectangle-width existing) (width history))
(recompute-width history)))))
(defmethod clim:clear-output-record ((history flexichain-output-history))
(let ((chain (lines history)))
(flexichain:delete-elements* chain 0 (flexichain:nb-elements chain))))
(defmethod clim:add-output-record
((record clim:standard-updating-output-record)
(history flexichain-output-history))
(flexichain:push-end (lines history) record))
(defmethod clim:map-over-output-records-containing-position
(function
(history flexichain-output-history)
x y
&optional
x-offset
y-offset
&rest function-args)
(declare (ignore x-offset y-offset))
;; For now, loop over all the records. To do this better, do a
;; binary search.
(loop with lines = (lines history )
for index from 0 below (flexichain:nb-elements lines)
for record = (flexichain:element* lines index)
when (clim:region-contains-position-p record x y)
do (apply function record function-args)))
(defun change-space-requirements (output-history)
(clim:change-space-requirements
(clim:output-record-parent output-history)
:width (width output-history)
:height (height output-history)))
| null | https://raw.githubusercontent.com/cicakhq/potato/88b6c92dbbc80a6c9552435604f7b1ae6f2a4026/contrib/potato-client-clim/src/flexichain-output-history.lisp | lisp | If there are lines in the suffix that are entirely above the
viewport, then move them to the prefix.
If there are lines in the prefix that are not entirely above
the viewport, then move them to the suffix.
For now, loop over all the records. To do this better, do a
binary search. | (cl:in-package #:climacs-flexichain-output-history)
(defclass flexichain-pane (clim:application-pane)
()
(:default-initargs :output-record (make-instance 'flexichain-output-history)
:display-time nil
:scroll-bars t))
(defclass flexichain-output-history
(clim:output-record clim:stream-output-history-mixin)
((%parent :initarg :parent :reader clim:output-record-parent)
(%lines :initform (make-instance 'flexichain:standard-flexichain)
:reader lines)
(%prefix-end :initform 0 :accessor prefix-end)
(%prefix-height :initform 0 :accessor prefix-height)
(%width :initform 0 :accessor width)
(%height :initform 0 :accessor height)))
(defmethod initialize-instance :after ((obj flexichain-pane) &key)
(setf (clim:stream-recording-p obj) nil)
(setf (clim:stream-end-of-line-action obj) nil)
(let ((history (clim:stream-output-history obj)))
(setf (slot-value history '%parent) obj)))
(defun forward (history)
(incf (prefix-height history)
(clim:bounding-rectangle-height
(flexichain:element* (lines history) (prefix-end history))))
(incf (prefix-end history)))
(defun backward (history)
(decf (prefix-end history))
(decf (prefix-height history)
(clim:bounding-rectangle-height
(flexichain:element* (lines history) (prefix-end history)))))
(defun adjust-prefix (history viewport-top)
(loop with lines = (lines history)
until (= (prefix-end history) (flexichain:nb-elements lines))
while (<= (+ (prefix-height history)
(clim:bounding-rectangle-height
(flexichain:element* lines (prefix-end history))))
viewport-top)
do (forward history))
(loop until (zerop (prefix-end history))
while (> (prefix-height history) viewport-top)
do (backward history)))
(defmethod clim:replay-output-record
((record flexichain-output-history) stream &optional region x-offset y-offset)
(declare (ignore x-offset y-offset))
(multiple-value-bind (left top right bottom)
(clim:bounding-rectangle* (clim:pane-viewport-region stream))
(clim:medium-clear-area (clim:sheet-medium stream)
left top right bottom)
(adjust-prefix record top)
(loop with lines = (lines record)
with length = (flexichain:nb-elements lines)
for i from (prefix-end record) below length
for line = (flexichain:element* lines i)
for y = (prefix-height record) then (+ y height)
for height = (clim:bounding-rectangle-height line)
while (< y bottom)
do (setf (clim:output-record-position line) (values 0 y))
(clim:replay-output-record line stream region))))
(defmethod clim:bounding-rectangle* ((history flexichain-output-history))
(values 0 0 (width history) (height history)))
(defun history-insert (history record index)
(when (> (prefix-end history) index)
(incf (prefix-end history))
(incf (prefix-height history)
(clim:bounding-rectangle-height record)))
(incf (height history)
(clim:bounding-rectangle-height record))
(let ((width (clim:bounding-rectangle-width record)))
(when (> width (width history))
(setf (width history) width)))
(flexichain:insert* (lines history) index record))
(defun recompute-width (history)
(setf (width history)
(loop with lines = (lines history)
for i from 0 below (flexichain:nb-elements (lines history))
for record = (flexichain:element* lines i)
maximize (clim:bounding-rectangle-width record))))
(defun history-delete (history index)
(let ((existing (flexichain:element* (lines history) index)))
(when (> (prefix-end history) index)
(decf (prefix-height history)
(clim:bounding-rectangle-height existing))
(decf (prefix-end history)))
(decf (height history)
(clim:bounding-rectangle-height existing))
(flexichain:delete* (lines history) index)
(when (= (clim:bounding-rectangle-width existing) (width history))
(recompute-width history))))
(defun history-replace (history record index)
(let ((existing (flexichain:element* (lines history) index)))
(when (> (prefix-end history) index)
(incf (prefix-height history)
(- (clim:bounding-rectangle-height record)
(clim:bounding-rectangle-height existing))))
(incf (height history)
(- (clim:bounding-rectangle-height record)
(clim:bounding-rectangle-height existing)))
(setf (flexichain:element* (lines history) index) record)
(if (> (clim:bounding-rectangle-width record)
(clim:bounding-rectangle-width existing))
(when (> (clim:bounding-rectangle-width record) (width history))
(setf (width history)
(clim:bounding-rectangle-width record)))
(when (= (clim:bounding-rectangle-width existing) (width history))
(recompute-width history)))))
(defmethod clim:clear-output-record ((history flexichain-output-history))
(let ((chain (lines history)))
(flexichain:delete-elements* chain 0 (flexichain:nb-elements chain))))
(defmethod clim:add-output-record
((record clim:standard-updating-output-record)
(history flexichain-output-history))
(flexichain:push-end (lines history) record))
(defmethod clim:map-over-output-records-containing-position
(function
(history flexichain-output-history)
x y
&optional
x-offset
y-offset
&rest function-args)
(declare (ignore x-offset y-offset))
(loop with lines = (lines history )
for index from 0 below (flexichain:nb-elements lines)
for record = (flexichain:element* lines index)
when (clim:region-contains-position-p record x y)
do (apply function record function-args)))
(defun change-space-requirements (output-history)
(clim:change-space-requirements
(clim:output-record-parent output-history)
:width (width output-history)
:height (height output-history)))
|
11edaa9f42830e1f925ba9d8d84d30a13f7fa991465555146ca7a0cc44810ab5 | hiroshi-unno/coar | linked.ml | open Core
open Ast
open Ast.LogicOld
open CSyntax
exception Error of string
module LinkedStatement : sig
type t =
IF of Formula.t * t ref * t ref
| ASSIGN of string * Term.t * t ref
| NONDET_ASSIGN of string * t ref
| NONDET of t ref * t ref
| ASSUME of Formula.t * t ref
| NOP of t ref
| EXIT
val is_if: t -> bool
val is_assign: t -> bool
val is_nondet_assign: t -> bool
val is_nondet: t -> bool
val is_assume: t -> bool
val is_nop: t -> bool
val is_exit: t -> bool
val mk_if: Formula.t -> t ref -> t ref -> t
val mk_assign: string -> Term.t -> t ref -> t
val mk_nondet_assign: string -> t ref -> t
val mk_nondet: t ref -> t ref -> t
val mk_assume: Formula.t -> t ref -> t
val mk_nop: t ref -> t
val mk_exit: unit -> t
val let_if: t -> Formula.t * t ref * t ref
val let_assign: t -> string * Term.t * t ref
val let_nondet_assign: t -> string * t ref
val let_assume: t -> Formula.t * t ref
val let_nondet: t -> t ref * t ref
val let_nop: t -> t ref
val of_statement: Statement.t -> t
val get_read_vars: t -> Variables.t
val get_written_vars: t -> Variables.t
val get_used_vars: t -> Variables.t
val get_read_vars_from: t -> Variables.t
val get_written_vars_from: t -> Variables.t
val get_used_vars_from: t -> Variables.t
val string_of: ?info:(t -> string) -> t -> string
val get_next_statements: t -> t list
val get_next_statements_ref: t -> t ref list
val get_all_statements: t -> t list
* this returns fresh stmt but this does n't fix the other links
val sub: Ident.tvar -> Term.t -> t -> t
end = struct
type t =
IF of Formula.t * t ref * t ref
| ASSIGN of string * Term.t * t ref
| NONDET_ASSIGN of string * t ref
| NONDET of t ref * t ref
| ASSUME of Formula.t * t ref
| NOP of t ref
| EXIT
let is_if = function IF _ -> true | _ -> false
let is_assign = function ASSIGN _ -> true | _ -> false
let is_nondet_assign = function NONDET_ASSIGN _ -> true | _ -> false
let is_nondet = function NONDET _ -> true | _ -> false
let is_assume = function ASSUME _ -> true | _ -> false
let is_nop = function NOP _ -> true | _ -> false
let is_exit = function EXIT -> true | _ -> false
let mk_if cond_fml t_stmt f_stmt = IF (cond_fml, t_stmt, f_stmt)
let mk_assign varname term nxt_stmt = ASSIGN (varname, term, nxt_stmt)
let mk_nondet_assign varname nxt_stmt = NONDET_ASSIGN (varname, nxt_stmt)
let mk_nondet stmt1 stmt2 = NONDET (stmt1, stmt2)
let mk_assume fml nxt_stmt = ASSUME (fml, nxt_stmt)
let mk_nop stmt = NOP stmt
let mk_exit () = EXIT
let let_if = function IF (cond_fml, t_stmt, f_stmt) -> cond_fml, t_stmt, f_stmt | _ -> assert false
let let_assign = function ASSIGN (varname, term, nxt_stmt) -> varname, term, nxt_stmt | _ -> assert false
let let_nondet_assign = function NONDET_ASSIGN (varname, nxt_stmt) -> varname, nxt_stmt | _ -> assert false
let let_assume = function ASSUME (fml, nxt_stmt) -> fml, nxt_stmt | _ -> assert false
let let_nondet = function NONDET (stmt1, stmt2) -> stmt1, stmt2 | _ -> assert false
let let_nop = function NOP stmt -> stmt | _ -> assert false
let get_next_statements_ref = function
| ASSIGN (_, _, nxt_stmt) -> [nxt_stmt]
| NONDET_ASSIGN (_, nxt_stmt) -> [nxt_stmt]
| IF (_, t_stmt, f_stmt) -> [t_stmt; f_stmt]
| NONDET (stmt1, stmt2) -> [stmt1; stmt2]
| ASSUME (_, nxt_stmt) -> [nxt_stmt]
| NOP stmt -> [stmt]
| EXIT -> []
let get_next_statements stmt =
get_next_statements_ref stmt
|> List.map ~f:(fun stmt' -> !stmt')
let rec get_all_statements_rep stmt res =
if List.exists ~f:(fun stmt' -> phys_equal stmt' stmt) res then
res
else
let res = stmt :: res in
get_next_statements stmt
|> List.fold_left
~f:(fun res nxt_stmt -> get_all_statements_rep nxt_stmt res)
~init:res
let get_all_statements stmt =
get_all_statements_rep stmt []
|> List.rev
let dummy_stmt = ref (mk_exit ())
let rec of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt stmt =
if Statement.is_assign stmt then
let varname, term = Statement.let_assign stmt in
mk_assign varname term nxt_stmt
else if Statement.is_break stmt then
if phys_equal break_nxt_stmt dummy_stmt then
raise (Error "break can use only in while loops")
else
mk_nop break_nxt_stmt
else if Statement.is_compound stmt then
let stmt1, stmt2 = Statement.let_compound stmt in
of_statement_rep label_to_stmt
(ref (of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt stmt2))
break_nxt_stmt
stmt1
else if Statement.is_exit stmt then
mk_exit ()
else if Statement.is_if stmt then
let cond_fml, t_stmt, f_stmt = Statement.let_if stmt in
let t_stmt = of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt t_stmt in
let f_stmt = of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt f_stmt in
mk_if cond_fml (ref t_stmt) (ref f_stmt)
else if Statement.is_loop stmt then
let first_stmt = ref (mk_exit ()) in
let body = of_statement_rep label_to_stmt first_stmt nxt_stmt (Statement.let_loop stmt) in
let body =
if phys_equal body !first_stmt then
(first_stmt := mk_nop first_stmt;
!first_stmt)
else
body
in
first_stmt := body;
body
else if Statement.is_nondet stmt then
let stmt1, stmt2 = Statement.let_nondet stmt in
let stmt1 = of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt stmt1 in
let stmt2 = of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt stmt2 in
mk_nondet (ref stmt1) (ref stmt2)
else if Statement.is_assume stmt then
let fml = Statement.let_assume stmt in
mk_assume fml nxt_stmt
else if Statement.is_nondet_assign stmt then
let varname = Statement.let_nondet_assign stmt in
mk_nondet_assign varname nxt_stmt
else if Statement.is_nop stmt then
mk_nop nxt_stmt
else if Statement.is_label stmt then
let label_name = Statement.let_label stmt in
let stmt = mk_nop nxt_stmt in
Hashtbl.Poly.find_exn label_to_stmt label_name := stmt;
stmt
else if Statement.is_goto stmt then
let label_name = Statement.let_goto stmt in
Hashtbl.Poly.find_exn label_to_stmt label_name |> mk_nop
else if Statement.is_vardecl stmt then
TODO
mk_nop nxt_stmt
else
failwith @@ Printf.sprintf "LinkedStatement.of_statement_rep: not implemented: %s" @@ Statement.string_of stmt
let of_statement stmt =
let labels = Statement.get_all_labels stmt in
let label_to_stmt = Hashtbl.Poly.create ~size:(List.length labels) () in
let exit_stmt = ref (mk_exit ()) in
List.iter ~f:(fun label -> Hashtbl.Poly.add_exn label_to_stmt ~key:label ~data:(ref (mk_nop exit_stmt))) labels;
of_statement_rep label_to_stmt exit_stmt exit_stmt stmt
let rec get_vars_rep get_vars_one stmt (used_stmts, vars) =
if List.exists ~f:(fun used_stmt -> phys_equal used_stmt stmt) used_stmts then
used_stmts, vars
else
let used_stmts = stmt :: used_stmts in
let vars =
get_vars_one stmt
|> Variables.union vars
in
get_next_statements stmt
|> List.fold_left
~f:(fun (used_stmts, vars) nxt_stmt ->
get_vars_rep get_vars_one nxt_stmt (used_stmts, vars))
~init:(used_stmts, vars)
let get_read_vars = function
| IF (fml, _, _) ->
Formula.tvs_of fml
|> Variables.of_tvarset
| ASSIGN (_, term, _) ->
Term.tvs_of term
|> Variables.of_tvarset
| ASSUME (fml, _) ->
Formula.tvs_of fml
|> Variables.of_tvarset
| NONDET_ASSIGN _
| NONDET _
| NOP _
| EXIT ->
Variables.empty
let get_written_vars = function
| NONDET_ASSIGN (varname, _)
| ASSIGN (varname, _, _) ->
Variables.of_varname varname
| ASSUME _
| IF _
| NONDET _
| NOP _
| EXIT ->
Variables.empty
let get_used_vars stmt =
Variables.union (get_read_vars stmt) (get_written_vars stmt)
let get_read_vars_from stmt =
let _, vars = get_vars_rep get_read_vars stmt ([], Variables.empty) in
vars
let get_written_vars_from stmt =
let _, vars = get_vars_rep get_written_vars stmt ([], Variables.empty) in
vars
let get_used_vars_from stmt =
let _, vars = get_vars_rep get_used_vars stmt ([], Variables.empty) in
vars
let string_of_indent n = String.make n ' '
let string_of_labelid n = Printf.sprintf "L%d" n
let rec string_of_stmt_rep ?info used indent stmt =
let id_opt =
List.find
~f:(fun (stmt', _) -> phys_equal stmt' stmt)
used
in
match id_opt with
| Some (_, id) ->
used,
Printf.sprintf "%sgoto %s;" (string_of_indent indent) (string_of_labelid id)
| None ->
let id = List.length used + 1 in
let used = (stmt, id) :: used in
let prefix =
match info with
| None ->
Printf.sprintf "%s%s: "
(string_of_indent indent)
(string_of_labelid id)
| Some to_s ->
Printf.sprintf "%s: // %s\n%s"
(string_of_labelid id)
(to_s stmt)
(string_of_indent indent)
in
let used, bodystr =
match stmt with
IF (cond_fml, t_stmt, f_stmt) ->
let used, t_stmt_str = string_of_stmt_rep ?info used (indent+2) !t_stmt in
let used, f_stmt_str = string_of_stmt_rep ?info used (indent+2) !f_stmt in
used,
Printf.sprintf "if (%s) {\n%s\n%s}\n%selse {\n%s\n%s}"
(Formula.str_of cond_fml)
t_stmt_str
(string_of_indent indent)
(string_of_indent indent)
f_stmt_str
(string_of_indent indent)
| ASSIGN (varname, term, nxt_stmt) ->
let used, nxt_stmt_str = string_of_stmt_rep ?info used indent !nxt_stmt in
used,
Printf.sprintf "%s = %s;\n%s"
varname
(Term.str_of term)
nxt_stmt_str
| NONDET_ASSIGN (varname, nxt_stmt) ->
let used, nxt_stmt_str = string_of_stmt_rep ?info used indent !nxt_stmt in
used,
Printf.sprintf "%s = nondet();\n%s"
varname
nxt_stmt_str
| NONDET (stmt1, stmt2) ->
let used, stmt1_str = string_of_stmt_rep ?info used (indent+2) !stmt1 in
let used, stmt2_str = string_of_stmt_rep ?info used (indent+2) !stmt2 in
used,
Printf.sprintf "nondet {\n%s\n%s}\n%selse {\n%s\n%s}"
stmt1_str
(string_of_indent indent)
(string_of_indent indent)
stmt2_str
(string_of_indent indent)
| ASSUME (fml, nxt_stmt) ->
let used, nxt_stmt_str = string_of_stmt_rep ?info used indent !nxt_stmt in
used,
Printf.sprintf "assume(%s);\n%s"
(Formula.str_of fml)
nxt_stmt_str
| NOP nxt_stmt ->
let used, nxt_stmt_str = string_of_stmt_rep ?info used indent !nxt_stmt in
used,
Printf.sprintf "nop\n%s"
nxt_stmt_str
| EXIT ->
used,
Printf.sprintf "exit 0;"
in
used,
prefix ^ bodystr
let string_of ?info stmt =
let _, str = string_of_stmt_rep ?info [] 0 stmt in
str
let sub tvar term stmt =
let subst = [tvar, term] |> Map.Poly.of_alist_exn in
match stmt with
IF (cond_fml, t_stmt, f_stmt) ->
mk_if (Formula.subst subst cond_fml) t_stmt f_stmt
| ASSIGN (varname, term, nxt_stmt) ->
mk_assign varname (Term.subst subst term) nxt_stmt
| NONDET_ASSIGN (varname, nxt_stmt) ->
mk_nondet_assign varname nxt_stmt
| NONDET (stmt1, stmt2) ->
mk_nondet stmt1 stmt2
| ASSUME (fml, nxt_stmt) ->
mk_assume (Formula.subst subst fml) nxt_stmt
| NOP stmt ->
mk_nop stmt
| EXIT ->
mk_exit ()
end
module LinkedStatementHashtbl =
Stdlib.Hashtbl.Make(struct
type t = LinkedStatement.t
let equal = phys_equal
let hash = Hashtbl.hash
end)
| null | https://raw.githubusercontent.com/hiroshi-unno/coar/90a23a09332c68f380efd4115b3f6fdc825f413d/lib/c/linked.ml | ocaml | open Core
open Ast
open Ast.LogicOld
open CSyntax
exception Error of string
module LinkedStatement : sig
type t =
IF of Formula.t * t ref * t ref
| ASSIGN of string * Term.t * t ref
| NONDET_ASSIGN of string * t ref
| NONDET of t ref * t ref
| ASSUME of Formula.t * t ref
| NOP of t ref
| EXIT
val is_if: t -> bool
val is_assign: t -> bool
val is_nondet_assign: t -> bool
val is_nondet: t -> bool
val is_assume: t -> bool
val is_nop: t -> bool
val is_exit: t -> bool
val mk_if: Formula.t -> t ref -> t ref -> t
val mk_assign: string -> Term.t -> t ref -> t
val mk_nondet_assign: string -> t ref -> t
val mk_nondet: t ref -> t ref -> t
val mk_assume: Formula.t -> t ref -> t
val mk_nop: t ref -> t
val mk_exit: unit -> t
val let_if: t -> Formula.t * t ref * t ref
val let_assign: t -> string * Term.t * t ref
val let_nondet_assign: t -> string * t ref
val let_assume: t -> Formula.t * t ref
val let_nondet: t -> t ref * t ref
val let_nop: t -> t ref
val of_statement: Statement.t -> t
val get_read_vars: t -> Variables.t
val get_written_vars: t -> Variables.t
val get_used_vars: t -> Variables.t
val get_read_vars_from: t -> Variables.t
val get_written_vars_from: t -> Variables.t
val get_used_vars_from: t -> Variables.t
val string_of: ?info:(t -> string) -> t -> string
val get_next_statements: t -> t list
val get_next_statements_ref: t -> t ref list
val get_all_statements: t -> t list
* this returns fresh stmt but this does n't fix the other links
val sub: Ident.tvar -> Term.t -> t -> t
end = struct
type t =
IF of Formula.t * t ref * t ref
| ASSIGN of string * Term.t * t ref
| NONDET_ASSIGN of string * t ref
| NONDET of t ref * t ref
| ASSUME of Formula.t * t ref
| NOP of t ref
| EXIT
let is_if = function IF _ -> true | _ -> false
let is_assign = function ASSIGN _ -> true | _ -> false
let is_nondet_assign = function NONDET_ASSIGN _ -> true | _ -> false
let is_nondet = function NONDET _ -> true | _ -> false
let is_assume = function ASSUME _ -> true | _ -> false
let is_nop = function NOP _ -> true | _ -> false
let is_exit = function EXIT -> true | _ -> false
let mk_if cond_fml t_stmt f_stmt = IF (cond_fml, t_stmt, f_stmt)
let mk_assign varname term nxt_stmt = ASSIGN (varname, term, nxt_stmt)
let mk_nondet_assign varname nxt_stmt = NONDET_ASSIGN (varname, nxt_stmt)
let mk_nondet stmt1 stmt2 = NONDET (stmt1, stmt2)
let mk_assume fml nxt_stmt = ASSUME (fml, nxt_stmt)
let mk_nop stmt = NOP stmt
let mk_exit () = EXIT
let let_if = function IF (cond_fml, t_stmt, f_stmt) -> cond_fml, t_stmt, f_stmt | _ -> assert false
let let_assign = function ASSIGN (varname, term, nxt_stmt) -> varname, term, nxt_stmt | _ -> assert false
let let_nondet_assign = function NONDET_ASSIGN (varname, nxt_stmt) -> varname, nxt_stmt | _ -> assert false
let let_assume = function ASSUME (fml, nxt_stmt) -> fml, nxt_stmt | _ -> assert false
let let_nondet = function NONDET (stmt1, stmt2) -> stmt1, stmt2 | _ -> assert false
let let_nop = function NOP stmt -> stmt | _ -> assert false
let get_next_statements_ref = function
| ASSIGN (_, _, nxt_stmt) -> [nxt_stmt]
| NONDET_ASSIGN (_, nxt_stmt) -> [nxt_stmt]
| IF (_, t_stmt, f_stmt) -> [t_stmt; f_stmt]
| NONDET (stmt1, stmt2) -> [stmt1; stmt2]
| ASSUME (_, nxt_stmt) -> [nxt_stmt]
| NOP stmt -> [stmt]
| EXIT -> []
let get_next_statements stmt =
get_next_statements_ref stmt
|> List.map ~f:(fun stmt' -> !stmt')
let rec get_all_statements_rep stmt res =
if List.exists ~f:(fun stmt' -> phys_equal stmt' stmt) res then
res
else
let res = stmt :: res in
get_next_statements stmt
|> List.fold_left
~f:(fun res nxt_stmt -> get_all_statements_rep nxt_stmt res)
~init:res
let get_all_statements stmt =
get_all_statements_rep stmt []
|> List.rev
let dummy_stmt = ref (mk_exit ())
let rec of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt stmt =
if Statement.is_assign stmt then
let varname, term = Statement.let_assign stmt in
mk_assign varname term nxt_stmt
else if Statement.is_break stmt then
if phys_equal break_nxt_stmt dummy_stmt then
raise (Error "break can use only in while loops")
else
mk_nop break_nxt_stmt
else if Statement.is_compound stmt then
let stmt1, stmt2 = Statement.let_compound stmt in
of_statement_rep label_to_stmt
(ref (of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt stmt2))
break_nxt_stmt
stmt1
else if Statement.is_exit stmt then
mk_exit ()
else if Statement.is_if stmt then
let cond_fml, t_stmt, f_stmt = Statement.let_if stmt in
let t_stmt = of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt t_stmt in
let f_stmt = of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt f_stmt in
mk_if cond_fml (ref t_stmt) (ref f_stmt)
else if Statement.is_loop stmt then
let first_stmt = ref (mk_exit ()) in
let body = of_statement_rep label_to_stmt first_stmt nxt_stmt (Statement.let_loop stmt) in
let body =
if phys_equal body !first_stmt then
(first_stmt := mk_nop first_stmt;
!first_stmt)
else
body
in
first_stmt := body;
body
else if Statement.is_nondet stmt then
let stmt1, stmt2 = Statement.let_nondet stmt in
let stmt1 = of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt stmt1 in
let stmt2 = of_statement_rep label_to_stmt nxt_stmt break_nxt_stmt stmt2 in
mk_nondet (ref stmt1) (ref stmt2)
else if Statement.is_assume stmt then
let fml = Statement.let_assume stmt in
mk_assume fml nxt_stmt
else if Statement.is_nondet_assign stmt then
let varname = Statement.let_nondet_assign stmt in
mk_nondet_assign varname nxt_stmt
else if Statement.is_nop stmt then
mk_nop nxt_stmt
else if Statement.is_label stmt then
let label_name = Statement.let_label stmt in
let stmt = mk_nop nxt_stmt in
Hashtbl.Poly.find_exn label_to_stmt label_name := stmt;
stmt
else if Statement.is_goto stmt then
let label_name = Statement.let_goto stmt in
Hashtbl.Poly.find_exn label_to_stmt label_name |> mk_nop
else if Statement.is_vardecl stmt then
TODO
mk_nop nxt_stmt
else
failwith @@ Printf.sprintf "LinkedStatement.of_statement_rep: not implemented: %s" @@ Statement.string_of stmt
let of_statement stmt =
let labels = Statement.get_all_labels stmt in
let label_to_stmt = Hashtbl.Poly.create ~size:(List.length labels) () in
let exit_stmt = ref (mk_exit ()) in
List.iter ~f:(fun label -> Hashtbl.Poly.add_exn label_to_stmt ~key:label ~data:(ref (mk_nop exit_stmt))) labels;
of_statement_rep label_to_stmt exit_stmt exit_stmt stmt
let rec get_vars_rep get_vars_one stmt (used_stmts, vars) =
if List.exists ~f:(fun used_stmt -> phys_equal used_stmt stmt) used_stmts then
used_stmts, vars
else
let used_stmts = stmt :: used_stmts in
let vars =
get_vars_one stmt
|> Variables.union vars
in
get_next_statements stmt
|> List.fold_left
~f:(fun (used_stmts, vars) nxt_stmt ->
get_vars_rep get_vars_one nxt_stmt (used_stmts, vars))
~init:(used_stmts, vars)
let get_read_vars = function
| IF (fml, _, _) ->
Formula.tvs_of fml
|> Variables.of_tvarset
| ASSIGN (_, term, _) ->
Term.tvs_of term
|> Variables.of_tvarset
| ASSUME (fml, _) ->
Formula.tvs_of fml
|> Variables.of_tvarset
| NONDET_ASSIGN _
| NONDET _
| NOP _
| EXIT ->
Variables.empty
let get_written_vars = function
| NONDET_ASSIGN (varname, _)
| ASSIGN (varname, _, _) ->
Variables.of_varname varname
| ASSUME _
| IF _
| NONDET _
| NOP _
| EXIT ->
Variables.empty
let get_used_vars stmt =
Variables.union (get_read_vars stmt) (get_written_vars stmt)
let get_read_vars_from stmt =
let _, vars = get_vars_rep get_read_vars stmt ([], Variables.empty) in
vars
let get_written_vars_from stmt =
let _, vars = get_vars_rep get_written_vars stmt ([], Variables.empty) in
vars
let get_used_vars_from stmt =
let _, vars = get_vars_rep get_used_vars stmt ([], Variables.empty) in
vars
let string_of_indent n = String.make n ' '
let string_of_labelid n = Printf.sprintf "L%d" n
let rec string_of_stmt_rep ?info used indent stmt =
let id_opt =
List.find
~f:(fun (stmt', _) -> phys_equal stmt' stmt)
used
in
match id_opt with
| Some (_, id) ->
used,
Printf.sprintf "%sgoto %s;" (string_of_indent indent) (string_of_labelid id)
| None ->
let id = List.length used + 1 in
let used = (stmt, id) :: used in
let prefix =
match info with
| None ->
Printf.sprintf "%s%s: "
(string_of_indent indent)
(string_of_labelid id)
| Some to_s ->
Printf.sprintf "%s: // %s\n%s"
(string_of_labelid id)
(to_s stmt)
(string_of_indent indent)
in
let used, bodystr =
match stmt with
IF (cond_fml, t_stmt, f_stmt) ->
let used, t_stmt_str = string_of_stmt_rep ?info used (indent+2) !t_stmt in
let used, f_stmt_str = string_of_stmt_rep ?info used (indent+2) !f_stmt in
used,
Printf.sprintf "if (%s) {\n%s\n%s}\n%selse {\n%s\n%s}"
(Formula.str_of cond_fml)
t_stmt_str
(string_of_indent indent)
(string_of_indent indent)
f_stmt_str
(string_of_indent indent)
| ASSIGN (varname, term, nxt_stmt) ->
let used, nxt_stmt_str = string_of_stmt_rep ?info used indent !nxt_stmt in
used,
Printf.sprintf "%s = %s;\n%s"
varname
(Term.str_of term)
nxt_stmt_str
| NONDET_ASSIGN (varname, nxt_stmt) ->
let used, nxt_stmt_str = string_of_stmt_rep ?info used indent !nxt_stmt in
used,
Printf.sprintf "%s = nondet();\n%s"
varname
nxt_stmt_str
| NONDET (stmt1, stmt2) ->
let used, stmt1_str = string_of_stmt_rep ?info used (indent+2) !stmt1 in
let used, stmt2_str = string_of_stmt_rep ?info used (indent+2) !stmt2 in
used,
Printf.sprintf "nondet {\n%s\n%s}\n%selse {\n%s\n%s}"
stmt1_str
(string_of_indent indent)
(string_of_indent indent)
stmt2_str
(string_of_indent indent)
| ASSUME (fml, nxt_stmt) ->
let used, nxt_stmt_str = string_of_stmt_rep ?info used indent !nxt_stmt in
used,
Printf.sprintf "assume(%s);\n%s"
(Formula.str_of fml)
nxt_stmt_str
| NOP nxt_stmt ->
let used, nxt_stmt_str = string_of_stmt_rep ?info used indent !nxt_stmt in
used,
Printf.sprintf "nop\n%s"
nxt_stmt_str
| EXIT ->
used,
Printf.sprintf "exit 0;"
in
used,
prefix ^ bodystr
let string_of ?info stmt =
let _, str = string_of_stmt_rep ?info [] 0 stmt in
str
let sub tvar term stmt =
let subst = [tvar, term] |> Map.Poly.of_alist_exn in
match stmt with
IF (cond_fml, t_stmt, f_stmt) ->
mk_if (Formula.subst subst cond_fml) t_stmt f_stmt
| ASSIGN (varname, term, nxt_stmt) ->
mk_assign varname (Term.subst subst term) nxt_stmt
| NONDET_ASSIGN (varname, nxt_stmt) ->
mk_nondet_assign varname nxt_stmt
| NONDET (stmt1, stmt2) ->
mk_nondet stmt1 stmt2
| ASSUME (fml, nxt_stmt) ->
mk_assume (Formula.subst subst fml) nxt_stmt
| NOP stmt ->
mk_nop stmt
| EXIT ->
mk_exit ()
end
module LinkedStatementHashtbl =
Stdlib.Hashtbl.Make(struct
type t = LinkedStatement.t
let equal = phys_equal
let hash = Hashtbl.hash
end)
| |
8f00bcd828376d31c99119c494d45f4a6cdb4996d3414661355e006f67113d9c | Workiva/eva | protocols.clj | Copyright 2015 - 2019 Workiva Inc.
;;
;; Licensed under the Eclipse Public License 1.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -1.0.php
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns eva.query.datalog.qsqr.protocols)
(defprotocol State
(rule-log [state] [state queries] "Returns/sets a nested datastructure representing the generalized rule queries that have been seen already: { predicate-symbol { decoration generalized-pred }} The decoration structure is exactly what is returned by eva.query.datalog.protocols/decoration; the generalized-pred is an eva.query.datalog.predicate.GeneralizedPredicate.")
(extension-log [state] [state queries] "Returns/sets a nested datastructure representing the generalized edb queries that have been seen already: { predicate-symbol { decoration generalized-pred }} The decoration structure is exactly what is returned by eva.query.datalog.protocols/decoration; the generalized-pred is an eva.query.datalog.predicate.GeneralizedPredicate.")
(evaluation-log [state] [state queries] "Returns/sets a nested datastructure representing the generalized evaluable queries that have been seen already: { predicate-symbol { decoration generalized-pred }} The decoration structure is exactly what is returned by eva.query.datalog.protocols/decoration; the generalized-pred is an eva.query.datalog.predicate.GeneralizedPredicate.")
(add-query [state query] "Adds this generalized predicate to the state blob; if a matching predicate/decoration combination already exists, this merges the two.")
(derived [state] "Returns a nested datastructure representing the current derivations of constants for particular predicates: { relation-symbol #{ constant-tuples }}")
(reset-bindings [state])
(bindings [state] [state x] "Returns/sets a set of unifier maps: #{ unifier, unifier, unifier ... }. (unifier = { ?a ?b, ?b c ...} )")
(select-antecedent [state predicates] "From the supplied sequence of predicates, selects one to evaluate next. Returns [selected others]. May throw an exception if, for any reason, a predicate cannot be selected.")
(select-rule [state query rules] "From the supplied sequence of rules, selects one to employ next. Returns [selected others]. May throw an exception if, for any reason, a rule cannot be selected."))
| null | https://raw.githubusercontent.com/Workiva/eva/b7b8a6a5215cccb507a92aa67e0168dc777ffeac/core/src/eva/query/datalog/qsqr/protocols.clj | clojure |
Licensed under the Eclipse Public License 1.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-1.0.php
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright 2015 - 2019 Workiva Inc.
distributed under the License is distributed on an " AS IS " BASIS ,
(ns eva.query.datalog.qsqr.protocols)
(defprotocol State
(rule-log [state] [state queries] "Returns/sets a nested datastructure representing the generalized rule queries that have been seen already: { predicate-symbol { decoration generalized-pred }} The decoration structure is exactly what is returned by eva.query.datalog.protocols/decoration; the generalized-pred is an eva.query.datalog.predicate.GeneralizedPredicate.")
(extension-log [state] [state queries] "Returns/sets a nested datastructure representing the generalized edb queries that have been seen already: { predicate-symbol { decoration generalized-pred }} The decoration structure is exactly what is returned by eva.query.datalog.protocols/decoration; the generalized-pred is an eva.query.datalog.predicate.GeneralizedPredicate.")
(evaluation-log [state] [state queries] "Returns/sets a nested datastructure representing the generalized evaluable queries that have been seen already: { predicate-symbol { decoration generalized-pred }} The decoration structure is exactly what is returned by eva.query.datalog.protocols/decoration; the generalized-pred is an eva.query.datalog.predicate.GeneralizedPredicate.")
(add-query [state query] "Adds this generalized predicate to the state blob; if a matching predicate/decoration combination already exists, this merges the two.")
(derived [state] "Returns a nested datastructure representing the current derivations of constants for particular predicates: { relation-symbol #{ constant-tuples }}")
(reset-bindings [state])
(bindings [state] [state x] "Returns/sets a set of unifier maps: #{ unifier, unifier, unifier ... }. (unifier = { ?a ?b, ?b c ...} )")
(select-antecedent [state predicates] "From the supplied sequence of predicates, selects one to evaluate next. Returns [selected others]. May throw an exception if, for any reason, a predicate cannot be selected.")
(select-rule [state query rules] "From the supplied sequence of rules, selects one to employ next. Returns [selected others]. May throw an exception if, for any reason, a rule cannot be selected."))
|
b414dc85bf1ee8584ddefae7f8a6b905dd5fcab2c8c6a7ce4050ecc71c937b68 | kitnil/dotfiles | video.scm | (define-module (home services video)
#:use-module (gnu home services)
#:use-module (gnu home services shepherd)
#:use-module (guix gexp)
#:use-module (guix records)
#:use-module (gnu services)
#:use-module (home config)
#:export (home-mpv-service))
(define home-mpv-service
(simple-service 'mpv-config
home-files-service-type
(list `(".config/mpv/input.conf" ,(local-file (string-append %project-directory "/dot_config/mpv/input.conf")))
`(".config/mpv/mpv.conf" ,(local-file (string-append %project-directory "/dot_config/mpv/mpv.conf"))))))
| null | https://raw.githubusercontent.com/kitnil/dotfiles/354a101e7e2789ad37e8b0c9f4534e2a9fc55439/dotfiles/guixsd/modules/home/services/video.scm | scheme | (define-module (home services video)
#:use-module (gnu home services)
#:use-module (gnu home services shepherd)
#:use-module (guix gexp)
#:use-module (guix records)
#:use-module (gnu services)
#:use-module (home config)
#:export (home-mpv-service))
(define home-mpv-service
(simple-service 'mpv-config
home-files-service-type
(list `(".config/mpv/input.conf" ,(local-file (string-append %project-directory "/dot_config/mpv/input.conf")))
`(".config/mpv/mpv.conf" ,(local-file (string-append %project-directory "/dot_config/mpv/mpv.conf"))))))
| |
9b78d24fd06dddcdd670a4b07cc2cc998e784449f06ed2567d6ee54afa837249 | DaiF1/Oditor | oditor.ml | (*
file: oditor.ml
dependencies: editor.ml display.ml input.ml
Main file
*)
open Editor;;
open Display;;
open Input;;
open Default_keymaps;;
open Vim_keymaps;;
(* Keymap setup *)
store_keymap "default" setup_defaultkeymaps;;
store_keymap "vim" setup_vimkeymaps;;
Main loop
Refresh screen and process keys . If process returns false , exit editor
Refresh screen and process keys. If process returns false, exit editor *)
let rec loop () =
refresh_screen ();
if process_key () then loop ()
else (clear_screen (); exit_raw ());;
Activate raw mode before starting main loop
let () = enter_raw (); load_keymap "default"; loop ();;
| null | https://raw.githubusercontent.com/DaiF1/Oditor/9f49ce05281f3253c166475b21c282a1e36c99f7/oditor.ml | ocaml |
file: oditor.ml
dependencies: editor.ml display.ml input.ml
Main file
Keymap setup |
open Editor;;
open Display;;
open Input;;
open Default_keymaps;;
open Vim_keymaps;;
store_keymap "default" setup_defaultkeymaps;;
store_keymap "vim" setup_vimkeymaps;;
Main loop
Refresh screen and process keys . If process returns false , exit editor
Refresh screen and process keys. If process returns false, exit editor *)
let rec loop () =
refresh_screen ();
if process_key () then loop ()
else (clear_screen (); exit_raw ());;
Activate raw mode before starting main loop
let () = enter_raw (); load_keymap "default"; loop ();;
|
b63b2fc044a8b4cbb141d5eb8f261d0918906ec0840aa0bbe695c820d1c98a7a | dgiot/dgiot | dgiot_charref.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2020 - 2021 DGIOT Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
@doc Converts HTML 5 charrefs and entities to codepoints ( or lists of code points ) .
-module(dgiot_charref).
-author("johnliu").
-export([charref/1]).
%% External API.
%% @doc Convert a decimal charref, hex charref, or html entity to a unicode
%% codepoint, or return undefined on failure.
%% The input should not include an ampersand or semicolon.
charref("#38 " ) = 38 , charref("#x26 " ) = 38 , charref("amp " ) = 38 .
-spec charref(binary() | string()) -> integer() | [integer()] | undefined.
charref(B) when is_binary(B) ->
charref(binary_to_list(B));
charref([$#, C | L]) when C =:= $x orelse C =:= $X ->
try erlang:list_to_integer(L, 16)
catch
error:badarg -> undefined
end;
charref([$# | L]) ->
try list_to_integer(L)
catch
error:badarg -> undefined
end;
charref(L) ->
entity(L).
%% Internal API.
[ 2011 - 10 - 14 ] Generated from :
%% -character-references.html
entity("AElig") -> 16#000C6;
entity("AMP") -> 16#00026;
entity("Aacute") -> 16#000C1;
entity("Abreve") -> 16#00102;
entity("Acirc") -> 16#000C2;
entity("Acy") -> 16#00410;
entity("Afr") -> 16#1D504;
entity("Agrave") -> 16#000C0;
entity("Alpha") -> 16#00391;
entity("Amacr") -> 16#00100;
entity("And") -> 16#02A53;
entity("Aogon") -> 16#00104;
entity("Aopf") -> 16#1D538;
entity("ApplyFunction") -> 16#02061;
entity("Aring") -> 16#000C5;
entity("Ascr") -> 16#1D49C;
entity("Assign") -> 16#02254;
entity("Atilde") -> 16#000C3;
entity("Auml") -> 16#000C4;
entity("Backslash") -> 16#02216;
entity("Barv") -> 16#02AE7;
entity("Barwed") -> 16#02306;
entity("Bcy") -> 16#00411;
entity("Because") -> 16#02235;
entity("Bernoullis") -> 16#0212C;
entity("Beta") -> 16#00392;
entity("Bfr") -> 16#1D505;
entity("Bopf") -> 16#1D539;
entity("Breve") -> 16#002D8;
entity("Bscr") -> 16#0212C;
entity("Bumpeq") -> 16#0224E;
entity("CHcy") -> 16#00427;
entity("COPY") -> 16#000A9;
entity("Cacute") -> 16#00106;
entity("Cap") -> 16#022D2;
entity("CapitalDifferentialD") -> 16#02145;
entity("Cayleys") -> 16#0212D;
entity("Ccaron") -> 16#0010C;
entity("Ccedil") -> 16#000C7;
entity("Ccirc") -> 16#00108;
entity("Cconint") -> 16#02230;
entity("Cdot") -> 16#0010A;
entity("Cedilla") -> 16#000B8;
entity("CenterDot") -> 16#000B7;
entity("Cfr") -> 16#0212D;
entity("Chi") -> 16#003A7;
entity("CircleDot") -> 16#02299;
entity("CircleMinus") -> 16#02296;
entity("CirclePlus") -> 16#02295;
entity("CircleTimes") -> 16#02297;
entity("ClockwiseContourIntegral") -> 16#02232;
entity("CloseCurlyDoubleQuote") -> 16#0201D;
entity("CloseCurlyQuote") -> 16#02019;
entity("Colon") -> 16#02237;
entity("Colone") -> 16#02A74;
entity("Congruent") -> 16#02261;
entity("Conint") -> 16#0222F;
entity("ContourIntegral") -> 16#0222E;
entity("Copf") -> 16#02102;
entity("Coproduct") -> 16#02210;
entity("CounterClockwiseContourIntegral") -> 16#02233;
entity("Cross") -> 16#02A2F;
entity("Cscr") -> 16#1D49E;
entity("Cup") -> 16#022D3;
entity("CupCap") -> 16#0224D;
entity("DD") -> 16#02145;
entity("DDotrahd") -> 16#02911;
entity("DJcy") -> 16#00402;
entity("DScy") -> 16#00405;
entity("DZcy") -> 16#0040F;
entity("Dagger") -> 16#02021;
entity("Darr") -> 16#021A1;
entity("Dashv") -> 16#02AE4;
entity("Dcaron") -> 16#0010E;
entity("Dcy") -> 16#00414;
entity("Del") -> 16#02207;
entity("Delta") -> 16#00394;
entity("Dfr") -> 16#1D507;
entity("DiacriticalAcute") -> 16#000B4;
entity("DiacriticalDot") -> 16#002D9;
entity("DiacriticalDoubleAcute") -> 16#002DD;
entity("DiacriticalGrave") -> 16#00060;
entity("DiacriticalTilde") -> 16#002DC;
entity("Diamond") -> 16#022C4;
entity("DifferentialD") -> 16#02146;
entity("Dopf") -> 16#1D53B;
entity("Dot") -> 16#000A8;
entity("DotDot") -> 16#020DC;
entity("DotEqual") -> 16#02250;
entity("DoubleContourIntegral") -> 16#0222F;
entity("DoubleDot") -> 16#000A8;
entity("DoubleDownArrow") -> 16#021D3;
entity("DoubleLeftArrow") -> 16#021D0;
entity("DoubleLeftRightArrow") -> 16#021D4;
entity("DoubleLeftTee") -> 16#02AE4;
entity("DoubleLongLeftArrow") -> 16#027F8;
entity("DoubleLongLeftRightArrow") -> 16#027FA;
entity("DoubleLongRightArrow") -> 16#027F9;
entity("DoubleRightArrow") -> 16#021D2;
entity("DoubleRightTee") -> 16#022A8;
entity("DoubleUpArrow") -> 16#021D1;
entity("DoubleUpDownArrow") -> 16#021D5;
entity("DoubleVerticalBar") -> 16#02225;
entity("DownArrow") -> 16#02193;
entity("DownArrowBar") -> 16#02913;
entity("DownArrowUpArrow") -> 16#021F5;
entity("DownBreve") -> 16#00311;
entity("DownLeftRightVector") -> 16#02950;
entity("DownLeftTeeVector") -> 16#0295E;
entity("DownLeftVector") -> 16#021BD;
entity("DownLeftVectorBar") -> 16#02956;
entity("DownRightTeeVector") -> 16#0295F;
entity("DownRightVector") -> 16#021C1;
entity("DownRightVectorBar") -> 16#02957;
entity("DownTee") -> 16#022A4;
entity("DownTeeArrow") -> 16#021A7;
entity("Downarrow") -> 16#021D3;
entity("Dscr") -> 16#1D49F;
entity("Dstrok") -> 16#00110;
entity("ENG") -> 16#0014A;
entity("ETH") -> 16#000D0;
entity("Eacute") -> 16#000C9;
entity("Ecaron") -> 16#0011A;
entity("Ecirc") -> 16#000CA;
entity("Ecy") -> 16#0042D;
entity("Edot") -> 16#00116;
entity("Efr") -> 16#1D508;
entity("Egrave") -> 16#000C8;
entity("Element") -> 16#02208;
entity("Emacr") -> 16#00112;
entity("EmptySmallSquare") -> 16#025FB;
entity("EmptyVerySmallSquare") -> 16#025AB;
entity("Eogon") -> 16#00118;
entity("Eopf") -> 16#1D53C;
entity("Epsilon") -> 16#00395;
entity("Equal") -> 16#02A75;
entity("EqualTilde") -> 16#02242;
entity("Equilibrium") -> 16#021CC;
entity("Escr") -> 16#02130;
entity("Esim") -> 16#02A73;
entity("Eta") -> 16#00397;
entity("Euml") -> 16#000CB;
entity("Exists") -> 16#02203;
entity("ExponentialE") -> 16#02147;
entity("Fcy") -> 16#00424;
entity("Ffr") -> 16#1D509;
entity("FilledSmallSquare") -> 16#025FC;
entity("FilledVerySmallSquare") -> 16#025AA;
entity("Fopf") -> 16#1D53D;
entity("ForAll") -> 16#02200;
entity("Fouriertrf") -> 16#02131;
entity("Fscr") -> 16#02131;
entity("GJcy") -> 16#00403;
entity("GT") -> 16#0003E;
entity("Gamma") -> 16#00393;
entity("Gammad") -> 16#003DC;
entity("Gbreve") -> 16#0011E;
entity("Gcedil") -> 16#00122;
entity("Gcirc") -> 16#0011C;
entity("Gcy") -> 16#00413;
entity("Gdot") -> 16#00120;
entity("Gfr") -> 16#1D50A;
entity("Gg") -> 16#022D9;
entity("Gopf") -> 16#1D53E;
entity("GreaterEqual") -> 16#02265;
entity("GreaterEqualLess") -> 16#022DB;
entity("GreaterFullEqual") -> 16#02267;
entity("GreaterGreater") -> 16#02AA2;
entity("GreaterLess") -> 16#02277;
entity("GreaterSlantEqual") -> 16#02A7E;
entity("GreaterTilde") -> 16#02273;
entity("Gscr") -> 16#1D4A2;
entity("Gt") -> 16#0226B;
entity("HARDcy") -> 16#0042A;
entity("Hacek") -> 16#002C7;
entity("Hat") -> 16#0005E;
entity("Hcirc") -> 16#00124;
entity("Hfr") -> 16#0210C;
entity("HilbertSpace") -> 16#0210B;
entity("Hopf") -> 16#0210D;
entity("HorizontalLine") -> 16#02500;
entity("Hscr") -> 16#0210B;
entity("Hstrok") -> 16#00126;
entity("HumpDownHump") -> 16#0224E;
entity("HumpEqual") -> 16#0224F;
entity("IEcy") -> 16#00415;
entity("IJlig") -> 16#00132;
entity("IOcy") -> 16#00401;
entity("Iacute") -> 16#000CD;
entity("Icirc") -> 16#000CE;
entity("Icy") -> 16#00418;
entity("Idot") -> 16#00130;
entity("Ifr") -> 16#02111;
entity("Igrave") -> 16#000CC;
entity("Im") -> 16#02111;
entity("Imacr") -> 16#0012A;
entity("ImaginaryI") -> 16#02148;
entity("Implies") -> 16#021D2;
entity("Int") -> 16#0222C;
entity("Integral") -> 16#0222B;
entity("Intersection") -> 16#022C2;
entity("InvisibleComma") -> 16#02063;
entity("InvisibleTimes") -> 16#02062;
entity("Iogon") -> 16#0012E;
entity("Iopf") -> 16#1D540;
entity("Iota") -> 16#00399;
entity("Iscr") -> 16#02110;
entity("Itilde") -> 16#00128;
entity("Iukcy") -> 16#00406;
entity("Iuml") -> 16#000CF;
entity("Jcirc") -> 16#00134;
entity("Jcy") -> 16#00419;
entity("Jfr") -> 16#1D50D;
entity("Jopf") -> 16#1D541;
entity("Jscr") -> 16#1D4A5;
entity("Jsercy") -> 16#00408;
entity("Jukcy") -> 16#00404;
entity("KHcy") -> 16#00425;
entity("KJcy") -> 16#0040C;
entity("Kappa") -> 16#0039A;
entity("Kcedil") -> 16#00136;
entity("Kcy") -> 16#0041A;
entity("Kfr") -> 16#1D50E;
entity("Kopf") -> 16#1D542;
entity("Kscr") -> 16#1D4A6;
entity("LJcy") -> 16#00409;
entity("LT") -> 16#0003C;
entity("Lacute") -> 16#00139;
entity("Lambda") -> 16#0039B;
entity("Lang") -> 16#027EA;
entity("Laplacetrf") -> 16#02112;
entity("Larr") -> 16#0219E;
entity("Lcaron") -> 16#0013D;
entity("Lcedil") -> 16#0013B;
entity("Lcy") -> 16#0041B;
entity("LeftAngleBracket") -> 16#027E8;
entity("LeftArrow") -> 16#02190;
entity("LeftArrowBar") -> 16#021E4;
entity("LeftArrowRightArrow") -> 16#021C6;
entity("LeftCeiling") -> 16#02308;
entity("LeftDoubleBracket") -> 16#027E6;
entity("LeftDownTeeVector") -> 16#02961;
entity("LeftDownVector") -> 16#021C3;
entity("LeftDownVectorBar") -> 16#02959;
entity("LeftFloor") -> 16#0230A;
entity("LeftRightArrow") -> 16#02194;
entity("LeftRightVector") -> 16#0294E;
entity("LeftTee") -> 16#022A3;
entity("LeftTeeArrow") -> 16#021A4;
entity("LeftTeeVector") -> 16#0295A;
entity("LeftTriangle") -> 16#022B2;
entity("LeftTriangleBar") -> 16#029CF;
entity("LeftTriangleEqual") -> 16#022B4;
entity("LeftUpDownVector") -> 16#02951;
entity("LeftUpTeeVector") -> 16#02960;
entity("LeftUpVector") -> 16#021BF;
entity("LeftUpVectorBar") -> 16#02958;
entity("LeftVector") -> 16#021BC;
entity("LeftVectorBar") -> 16#02952;
entity("Leftarrow") -> 16#021D0;
entity("Leftrightarrow") -> 16#021D4;
entity("LessEqualGreater") -> 16#022DA;
entity("LessFullEqual") -> 16#02266;
entity("LessGreater") -> 16#02276;
entity("LessLess") -> 16#02AA1;
entity("LessSlantEqual") -> 16#02A7D;
entity("LessTilde") -> 16#02272;
entity("Lfr") -> 16#1D50F;
entity("Ll") -> 16#022D8;
entity("Lleftarrow") -> 16#021DA;
entity("Lmidot") -> 16#0013F;
entity("LongLeftArrow") -> 16#027F5;
entity("LongLeftRightArrow") -> 16#027F7;
entity("LongRightArrow") -> 16#027F6;
entity("Longleftarrow") -> 16#027F8;
entity("Longleftrightarrow") -> 16#027FA;
entity("Longrightarrow") -> 16#027F9;
entity("Lopf") -> 16#1D543;
entity("LowerLeftArrow") -> 16#02199;
entity("LowerRightArrow") -> 16#02198;
entity("Lscr") -> 16#02112;
entity("Lsh") -> 16#021B0;
entity("Lstrok") -> 16#00141;
entity("Lt") -> 16#0226A;
entity("Map") -> 16#02905;
entity("Mcy") -> 16#0041C;
entity("MediumSpace") -> 16#0205F;
entity("Mellintrf") -> 16#02133;
entity("Mfr") -> 16#1D510;
entity("MinusPlus") -> 16#02213;
entity("Mopf") -> 16#1D544;
entity("Mscr") -> 16#02133;
entity("Mu") -> 16#0039C;
entity("NJcy") -> 16#0040A;
entity("Nacute") -> 16#00143;
entity("Ncaron") -> 16#00147;
entity("Ncedil") -> 16#00145;
entity("Ncy") -> 16#0041D;
entity("NegativeMediumSpace") -> 16#0200B;
entity("NegativeThickSpace") -> 16#0200B;
entity("NegativeThinSpace") -> 16#0200B;
entity("NegativeVeryThinSpace") -> 16#0200B;
entity("NestedGreaterGreater") -> 16#0226B;
entity("NestedLessLess") -> 16#0226A;
entity("NewLine") -> 16#0000A;
entity("Nfr") -> 16#1D511;
entity("NoBreak") -> 16#02060;
entity("NonBreakingSpace") -> 16#000A0;
entity("Nopf") -> 16#02115;
entity("Not") -> 16#02AEC;
entity("NotCongruent") -> 16#02262;
entity("NotCupCap") -> 16#0226D;
entity("NotDoubleVerticalBar") -> 16#02226;
entity("NotElement") -> 16#02209;
entity("NotEqual") -> 16#02260;
entity("NotEqualTilde") -> [16#02242, 16#00338];
entity("NotExists") -> 16#02204;
entity("NotGreater") -> 16#0226F;
entity("NotGreaterEqual") -> 16#02271;
entity("NotGreaterFullEqual") -> [16#02267, 16#00338];
entity("NotGreaterGreater") -> [16#0226B, 16#00338];
entity("NotGreaterLess") -> 16#02279;
entity("NotGreaterSlantEqual") -> [16#02A7E, 16#00338];
entity("NotGreaterTilde") -> 16#02275;
entity("NotHumpDownHump") -> [16#0224E, 16#00338];
entity("NotHumpEqual") -> [16#0224F, 16#00338];
entity("NotLeftTriangle") -> 16#022EA;
entity("NotLeftTriangleBar") -> [16#029CF, 16#00338];
entity("NotLeftTriangleEqual") -> 16#022EC;
entity("NotLess") -> 16#0226E;
entity("NotLessEqual") -> 16#02270;
entity("NotLessGreater") -> 16#02278;
entity("NotLessLess") -> [16#0226A, 16#00338];
entity("NotLessSlantEqual") -> [16#02A7D, 16#00338];
entity("NotLessTilde") -> 16#02274;
entity("NotNestedGreaterGreater") -> [16#02AA2, 16#00338];
entity("NotNestedLessLess") -> [16#02AA1, 16#00338];
entity("NotPrecedes") -> 16#02280;
entity("NotPrecedesEqual") -> [16#02AAF, 16#00338];
entity("NotPrecedesSlantEqual") -> 16#022E0;
entity("NotReverseElement") -> 16#0220C;
entity("NotRightTriangle") -> 16#022EB;
entity("NotRightTriangleBar") -> [16#029D0, 16#00338];
entity("NotRightTriangleEqual") -> 16#022ED;
entity("NotSquareSubset") -> [16#0228F, 16#00338];
entity("NotSquareSubsetEqual") -> 16#022E2;
entity("NotSquareSuperset") -> [16#02290, 16#00338];
entity("NotSquareSupersetEqual") -> 16#022E3;
entity("NotSubset") -> [16#02282, 16#020D2];
entity("NotSubsetEqual") -> 16#02288;
entity("NotSucceeds") -> 16#02281;
entity("NotSucceedsEqual") -> [16#02AB0, 16#00338];
entity("NotSucceedsSlantEqual") -> 16#022E1;
entity("NotSucceedsTilde") -> [16#0227F, 16#00338];
entity("NotSuperset") -> [16#02283, 16#020D2];
entity("NotSupersetEqual") -> 16#02289;
entity("NotTilde") -> 16#02241;
entity("NotTildeEqual") -> 16#02244;
entity("NotTildeFullEqual") -> 16#02247;
entity("NotTildeTilde") -> 16#02249;
entity("NotVerticalBar") -> 16#02224;
entity("Nscr") -> 16#1D4A9;
entity("Ntilde") -> 16#000D1;
entity("Nu") -> 16#0039D;
entity("OElig") -> 16#00152;
entity("Oacute") -> 16#000D3;
entity("Ocirc") -> 16#000D4;
entity("Ocy") -> 16#0041E;
entity("Odblac") -> 16#00150;
entity("Ofr") -> 16#1D512;
entity("Ograve") -> 16#000D2;
entity("Omacr") -> 16#0014C;
entity("Omega") -> 16#003A9;
entity("Omicron") -> 16#0039F;
entity("Oopf") -> 16#1D546;
entity("OpenCurlyDoubleQuote") -> 16#0201C;
entity("OpenCurlyQuote") -> 16#02018;
entity("Or") -> 16#02A54;
entity("Oscr") -> 16#1D4AA;
entity("Oslash") -> 16#000D8;
entity("Otilde") -> 16#000D5;
entity("Otimes") -> 16#02A37;
entity("Ouml") -> 16#000D6;
entity("OverBar") -> 16#0203E;
entity("OverBrace") -> 16#023DE;
entity("OverBracket") -> 16#023B4;
entity("OverParenthesis") -> 16#023DC;
entity("PartialD") -> 16#02202;
entity("Pcy") -> 16#0041F;
entity("Pfr") -> 16#1D513;
entity("Phi") -> 16#003A6;
entity("Pi") -> 16#003A0;
entity("PlusMinus") -> 16#000B1;
entity("Poincareplane") -> 16#0210C;
entity("Popf") -> 16#02119;
entity("Pr") -> 16#02ABB;
entity("Precedes") -> 16#0227A;
entity("PrecedesEqual") -> 16#02AAF;
entity("PrecedesSlantEqual") -> 16#0227C;
entity("PrecedesTilde") -> 16#0227E;
entity("Prime") -> 16#02033;
entity("Product") -> 16#0220F;
entity("Proportion") -> 16#02237;
entity("Proportional") -> 16#0221D;
entity("Pscr") -> 16#1D4AB;
entity("Psi") -> 16#003A8;
entity("QUOT") -> 16#00022;
entity("Qfr") -> 16#1D514;
entity("Qopf") -> 16#0211A;
entity("Qscr") -> 16#1D4AC;
entity("RBarr") -> 16#02910;
entity("REG") -> 16#000AE;
entity("Racute") -> 16#00154;
entity("Rang") -> 16#027EB;
entity("Rarr") -> 16#021A0;
entity("Rarrtl") -> 16#02916;
entity("Rcaron") -> 16#00158;
entity("Rcedil") -> 16#00156;
entity("Rcy") -> 16#00420;
entity("Re") -> 16#0211C;
entity("ReverseElement") -> 16#0220B;
entity("ReverseEquilibrium") -> 16#021CB;
entity("ReverseUpEquilibrium") -> 16#0296F;
entity("Rfr") -> 16#0211C;
entity("Rho") -> 16#003A1;
entity("RightAngleBracket") -> 16#027E9;
entity("RightArrow") -> 16#02192;
entity("RightArrowBar") -> 16#021E5;
entity("RightArrowLeftArrow") -> 16#021C4;
entity("RightCeiling") -> 16#02309;
entity("RightDoubleBracket") -> 16#027E7;
entity("RightDownTeeVector") -> 16#0295D;
entity("RightDownVector") -> 16#021C2;
entity("RightDownVectorBar") -> 16#02955;
entity("RightFloor") -> 16#0230B;
entity("RightTee") -> 16#022A2;
entity("RightTeeArrow") -> 16#021A6;
entity("RightTeeVector") -> 16#0295B;
entity("RightTriangle") -> 16#022B3;
entity("RightTriangleBar") -> 16#029D0;
entity("RightTriangleEqual") -> 16#022B5;
entity("RightUpDownVector") -> 16#0294F;
entity("RightUpTeeVector") -> 16#0295C;
entity("RightUpVector") -> 16#021BE;
entity("RightUpVectorBar") -> 16#02954;
entity("RightVector") -> 16#021C0;
entity("RightVectorBar") -> 16#02953;
entity("Rightarrow") -> 16#021D2;
entity("Ropf") -> 16#0211D;
entity("RoundImplies") -> 16#02970;
entity("Rrightarrow") -> 16#021DB;
entity("Rscr") -> 16#0211B;
entity("Rsh") -> 16#021B1;
entity("RuleDelayed") -> 16#029F4;
entity("SHCHcy") -> 16#00429;
entity("SHcy") -> 16#00428;
entity("SOFTcy") -> 16#0042C;
entity("Sacute") -> 16#0015A;
entity("Sc") -> 16#02ABC;
entity("Scaron") -> 16#00160;
entity("Scedil") -> 16#0015E;
entity("Scirc") -> 16#0015C;
entity("Scy") -> 16#00421;
entity("Sfr") -> 16#1D516;
entity("ShortDownArrow") -> 16#02193;
entity("ShortLeftArrow") -> 16#02190;
entity("ShortRightArrow") -> 16#02192;
entity("ShortUpArrow") -> 16#02191;
entity("Sigma") -> 16#003A3;
entity("SmallCircle") -> 16#02218;
entity("Sopf") -> 16#1D54A;
entity("Sqrt") -> 16#0221A;
entity("Square") -> 16#025A1;
entity("SquareIntersection") -> 16#02293;
entity("SquareSubset") -> 16#0228F;
entity("SquareSubsetEqual") -> 16#02291;
entity("SquareSuperset") -> 16#02290;
entity("SquareSupersetEqual") -> 16#02292;
entity("SquareUnion") -> 16#02294;
entity("Sscr") -> 16#1D4AE;
entity("Star") -> 16#022C6;
entity("Sub") -> 16#022D0;
entity("Subset") -> 16#022D0;
entity("SubsetEqual") -> 16#02286;
entity("Succeeds") -> 16#0227B;
entity("SucceedsEqual") -> 16#02AB0;
entity("SucceedsSlantEqual") -> 16#0227D;
entity("SucceedsTilde") -> 16#0227F;
entity("SuchThat") -> 16#0220B;
entity("Sum") -> 16#02211;
entity("Sup") -> 16#022D1;
entity("Superset") -> 16#02283;
entity("SupersetEqual") -> 16#02287;
entity("Supset") -> 16#022D1;
entity("THORN") -> 16#000DE;
entity("TRADE") -> 16#02122;
entity("TSHcy") -> 16#0040B;
entity("TScy") -> 16#00426;
entity("Tab") -> 16#00009;
entity("Tau") -> 16#003A4;
entity("Tcaron") -> 16#00164;
entity("Tcedil") -> 16#00162;
entity("Tcy") -> 16#00422;
entity("Tfr") -> 16#1D517;
entity("Therefore") -> 16#02234;
entity("Theta") -> 16#00398;
entity("ThickSpace") -> [16#0205F, 16#0200A];
entity("ThinSpace") -> 16#02009;
entity("Tilde") -> 16#0223C;
entity("TildeEqual") -> 16#02243;
entity("TildeFullEqual") -> 16#02245;
entity("TildeTilde") -> 16#02248;
entity("Topf") -> 16#1D54B;
entity("TripleDot") -> 16#020DB;
entity("Tscr") -> 16#1D4AF;
entity("Tstrok") -> 16#00166;
entity("Uacute") -> 16#000DA;
entity("Uarr") -> 16#0219F;
entity("Uarrocir") -> 16#02949;
entity("Ubrcy") -> 16#0040E;
entity("Ubreve") -> 16#0016C;
entity("Ucirc") -> 16#000DB;
entity("Ucy") -> 16#00423;
entity("Udblac") -> 16#00170;
entity("Ufr") -> 16#1D518;
entity("Ugrave") -> 16#000D9;
entity("Umacr") -> 16#0016A;
entity("UnderBar") -> 16#0005F;
entity("UnderBrace") -> 16#023DF;
entity("UnderBracket") -> 16#023B5;
entity("UnderParenthesis") -> 16#023DD;
entity("Union") -> 16#022C3;
entity("UnionPlus") -> 16#0228E;
entity("Uogon") -> 16#00172;
entity("Uopf") -> 16#1D54C;
entity("UpArrow") -> 16#02191;
entity("UpArrowBar") -> 16#02912;
entity("UpArrowDownArrow") -> 16#021C5;
entity("UpDownArrow") -> 16#02195;
entity("UpEquilibrium") -> 16#0296E;
entity("UpTee") -> 16#022A5;
entity("UpTeeArrow") -> 16#021A5;
entity("Uparrow") -> 16#021D1;
entity("Updownarrow") -> 16#021D5;
entity("UpperLeftArrow") -> 16#02196;
entity("UpperRightArrow") -> 16#02197;
entity("Upsi") -> 16#003D2;
entity("Upsilon") -> 16#003A5;
entity("Uring") -> 16#0016E;
entity("Uscr") -> 16#1D4B0;
entity("Utilde") -> 16#00168;
entity("Uuml") -> 16#000DC;
entity("VDash") -> 16#022AB;
entity("Vbar") -> 16#02AEB;
entity("Vcy") -> 16#00412;
entity("Vdash") -> 16#022A9;
entity("Vdashl") -> 16#02AE6;
entity("Vee") -> 16#022C1;
entity("Verbar") -> 16#02016;
entity("Vert") -> 16#02016;
entity("VerticalBar") -> 16#02223;
entity("VerticalLine") -> 16#0007C;
entity("VerticalSeparator") -> 16#02758;
entity("VerticalTilde") -> 16#02240;
entity("VeryThinSpace") -> 16#0200A;
entity("Vfr") -> 16#1D519;
entity("Vopf") -> 16#1D54D;
entity("Vscr") -> 16#1D4B1;
entity("Vvdash") -> 16#022AA;
entity("Wcirc") -> 16#00174;
entity("Wedge") -> 16#022C0;
entity("Wfr") -> 16#1D51A;
entity("Wopf") -> 16#1D54E;
entity("Wscr") -> 16#1D4B2;
entity("Xfr") -> 16#1D51B;
entity("Xi") -> 16#0039E;
entity("Xopf") -> 16#1D54F;
entity("Xscr") -> 16#1D4B3;
entity("YAcy") -> 16#0042F;
entity("YIcy") -> 16#00407;
entity("YUcy") -> 16#0042E;
entity("Yacute") -> 16#000DD;
entity("Ycirc") -> 16#00176;
entity("Ycy") -> 16#0042B;
entity("Yfr") -> 16#1D51C;
entity("Yopf") -> 16#1D550;
entity("Yscr") -> 16#1D4B4;
entity("Yuml") -> 16#00178;
entity("ZHcy") -> 16#00416;
entity("Zacute") -> 16#00179;
entity("Zcaron") -> 16#0017D;
entity("Zcy") -> 16#00417;
entity("Zdot") -> 16#0017B;
entity("ZeroWidthSpace") -> 16#0200B;
entity("Zeta") -> 16#00396;
entity("Zfr") -> 16#02128;
entity("Zopf") -> 16#02124;
entity("Zscr") -> 16#1D4B5;
entity("aacute") -> 16#000E1;
entity("abreve") -> 16#00103;
entity("ac") -> 16#0223E;
entity("acE") -> [16#0223E, 16#00333];
entity("acd") -> 16#0223F;
entity("acirc") -> 16#000E2;
entity("acute") -> 16#000B4;
entity("acy") -> 16#00430;
entity("aelig") -> 16#000E6;
entity("af") -> 16#02061;
entity("afr") -> 16#1D51E;
entity("agrave") -> 16#000E0;
entity("alefsym") -> 16#02135;
entity("aleph") -> 16#02135;
entity("alpha") -> 16#003B1;
entity("amacr") -> 16#00101;
entity("amalg") -> 16#02A3F;
entity("amp") -> 16#00026;
entity("and") -> 16#02227;
entity("andand") -> 16#02A55;
entity("andd") -> 16#02A5C;
entity("andslope") -> 16#02A58;
entity("andv") -> 16#02A5A;
entity("ang") -> 16#02220;
entity("ange") -> 16#029A4;
entity("angle") -> 16#02220;
entity("angmsd") -> 16#02221;
entity("angmsdaa") -> 16#029A8;
entity("angmsdab") -> 16#029A9;
entity("angmsdac") -> 16#029AA;
entity("angmsdad") -> 16#029AB;
entity("angmsdae") -> 16#029AC;
entity("angmsdaf") -> 16#029AD;
entity("angmsdag") -> 16#029AE;
entity("angmsdah") -> 16#029AF;
entity("angrt") -> 16#0221F;
entity("angrtvb") -> 16#022BE;
entity("angrtvbd") -> 16#0299D;
entity("angsph") -> 16#02222;
entity("angst") -> 16#000C5;
entity("angzarr") -> 16#0237C;
entity("aogon") -> 16#00105;
entity("aopf") -> 16#1D552;
entity("ap") -> 16#02248;
entity("apE") -> 16#02A70;
entity("apacir") -> 16#02A6F;
entity("ape") -> 16#0224A;
entity("apid") -> 16#0224B;
entity("apos") -> 16#00027;
entity("approx") -> 16#02248;
entity("approxeq") -> 16#0224A;
entity("aring") -> 16#000E5;
entity("ascr") -> 16#1D4B6;
entity("ast") -> 16#0002A;
entity("asymp") -> 16#02248;
entity("asympeq") -> 16#0224D;
entity("atilde") -> 16#000E3;
entity("auml") -> 16#000E4;
entity("awconint") -> 16#02233;
entity("awint") -> 16#02A11;
entity("bNot") -> 16#02AED;
entity("backcong") -> 16#0224C;
entity("backepsilon") -> 16#003F6;
entity("backprime") -> 16#02035;
entity("backsim") -> 16#0223D;
entity("backsimeq") -> 16#022CD;
entity("barvee") -> 16#022BD;
entity("barwed") -> 16#02305;
entity("barwedge") -> 16#02305;
entity("bbrk") -> 16#023B5;
entity("bbrktbrk") -> 16#023B6;
entity("bcong") -> 16#0224C;
entity("bcy") -> 16#00431;
entity("bdquo") -> 16#0201E;
entity("becaus") -> 16#02235;
entity("because") -> 16#02235;
entity("bemptyv") -> 16#029B0;
entity("bepsi") -> 16#003F6;
entity("bernou") -> 16#0212C;
entity("beta") -> 16#003B2;
entity("beth") -> 16#02136;
entity("between") -> 16#0226C;
entity("bfr") -> 16#1D51F;
entity("bigcap") -> 16#022C2;
entity("bigcirc") -> 16#025EF;
entity("bigcup") -> 16#022C3;
entity("bigodot") -> 16#02A00;
entity("bigoplus") -> 16#02A01;
entity("bigotimes") -> 16#02A02;
entity("bigsqcup") -> 16#02A06;
entity("bigstar") -> 16#02605;
entity("bigtriangledown") -> 16#025BD;
entity("bigtriangleup") -> 16#025B3;
entity("biguplus") -> 16#02A04;
entity("bigvee") -> 16#022C1;
entity("bigwedge") -> 16#022C0;
entity("bkarow") -> 16#0290D;
entity("blacklozenge") -> 16#029EB;
entity("blacksquare") -> 16#025AA;
entity("blacktriangle") -> 16#025B4;
entity("blacktriangledown") -> 16#025BE;
entity("blacktriangleleft") -> 16#025C2;
entity("blacktriangleright") -> 16#025B8;
entity("blank") -> 16#02423;
entity("blk12") -> 16#02592;
entity("blk14") -> 16#02591;
entity("blk34") -> 16#02593;
entity("block") -> 16#02588;
entity("bne") -> [16#0003D, 16#020E5];
entity("bnequiv") -> [16#02261, 16#020E5];
entity("bnot") -> 16#02310;
entity("bopf") -> 16#1D553;
entity("bot") -> 16#022A5;
entity("bottom") -> 16#022A5;
entity("bowtie") -> 16#022C8;
entity("boxDL") -> 16#02557;
entity("boxDR") -> 16#02554;
entity("boxDl") -> 16#02556;
entity("boxDr") -> 16#02553;
entity("boxH") -> 16#02550;
entity("boxHD") -> 16#02566;
entity("boxHU") -> 16#02569;
entity("boxHd") -> 16#02564;
entity("boxHu") -> 16#02567;
entity("boxUL") -> 16#0255D;
entity("boxUR") -> 16#0255A;
entity("boxUl") -> 16#0255C;
entity("boxUr") -> 16#02559;
entity("boxV") -> 16#02551;
entity("boxVH") -> 16#0256C;
entity("boxVL") -> 16#02563;
entity("boxVR") -> 16#02560;
entity("boxVh") -> 16#0256B;
entity("boxVl") -> 16#02562;
entity("boxVr") -> 16#0255F;
entity("boxbox") -> 16#029C9;
entity("boxdL") -> 16#02555;
entity("boxdR") -> 16#02552;
entity("boxdl") -> 16#02510;
entity("boxdr") -> 16#0250C;
entity("boxh") -> 16#02500;
entity("boxhD") -> 16#02565;
entity("boxhU") -> 16#02568;
entity("boxhd") -> 16#0252C;
entity("boxhu") -> 16#02534;
entity("boxminus") -> 16#0229F;
entity("boxplus") -> 16#0229E;
entity("boxtimes") -> 16#022A0;
entity("boxuL") -> 16#0255B;
entity("boxuR") -> 16#02558;
entity("boxul") -> 16#02518;
entity("boxur") -> 16#02514;
entity("boxv") -> 16#02502;
entity("boxvH") -> 16#0256A;
entity("boxvL") -> 16#02561;
entity("boxvR") -> 16#0255E;
entity("boxvh") -> 16#0253C;
entity("boxvl") -> 16#02524;
entity("boxvr") -> 16#0251C;
entity("bprime") -> 16#02035;
entity("breve") -> 16#002D8;
entity("brvbar") -> 16#000A6;
entity("bscr") -> 16#1D4B7;
entity("bsemi") -> 16#0204F;
entity("bsim") -> 16#0223D;
entity("bsime") -> 16#022CD;
entity("bsol") -> 16#0005C;
entity("bsolb") -> 16#029C5;
entity("bsolhsub") -> 16#027C8;
entity("bull") -> 16#02022;
entity("bullet") -> 16#02022;
entity("bump") -> 16#0224E;
entity("bumpE") -> 16#02AAE;
entity("bumpe") -> 16#0224F;
entity("bumpeq") -> 16#0224F;
entity("cacute") -> 16#00107;
entity("cap") -> 16#02229;
entity("capand") -> 16#02A44;
entity("capbrcup") -> 16#02A49;
entity("capcap") -> 16#02A4B;
entity("capcup") -> 16#02A47;
entity("capdot") -> 16#02A40;
entity("caps") -> [16#02229, 16#0FE00];
entity("caret") -> 16#02041;
entity("caron") -> 16#002C7;
entity("ccaps") -> 16#02A4D;
entity("ccaron") -> 16#0010D;
entity("ccedil") -> 16#000E7;
entity("ccirc") -> 16#00109;
entity("ccups") -> 16#02A4C;
entity("ccupssm") -> 16#02A50;
entity("cdot") -> 16#0010B;
entity("cedil") -> 16#000B8;
entity("cemptyv") -> 16#029B2;
entity("cent") -> 16#000A2;
entity("centerdot") -> 16#000B7;
entity("cfr") -> 16#1D520;
entity("chcy") -> 16#00447;
entity("check") -> 16#02713;
entity("checkmark") -> 16#02713;
entity("chi") -> 16#003C7;
entity("cir") -> 16#025CB;
entity("cirE") -> 16#029C3;
entity("circ") -> 16#002C6;
entity("circeq") -> 16#02257;
entity("circlearrowleft") -> 16#021BA;
entity("circlearrowright") -> 16#021BB;
entity("circledR") -> 16#000AE;
entity("circledS") -> 16#024C8;
entity("circledast") -> 16#0229B;
entity("circledcirc") -> 16#0229A;
entity("circleddash") -> 16#0229D;
entity("cire") -> 16#02257;
entity("cirfnint") -> 16#02A10;
entity("cirmid") -> 16#02AEF;
entity("cirscir") -> 16#029C2;
entity("clubs") -> 16#02663;
entity("clubsuit") -> 16#02663;
entity("colon") -> 16#0003A;
entity("colone") -> 16#02254;
entity("coloneq") -> 16#02254;
entity("comma") -> 16#0002C;
entity("commat") -> 16#00040;
entity("comp") -> 16#02201;
entity("compfn") -> 16#02218;
entity("complement") -> 16#02201;
entity("complexes") -> 16#02102;
entity("cong") -> 16#02245;
entity("congdot") -> 16#02A6D;
entity("conint") -> 16#0222E;
entity("copf") -> 16#1D554;
entity("coprod") -> 16#02210;
entity("copy") -> 16#000A9;
entity("copysr") -> 16#02117;
entity("crarr") -> 16#021B5;
entity("cross") -> 16#02717;
entity("cscr") -> 16#1D4B8;
entity("csub") -> 16#02ACF;
entity("csube") -> 16#02AD1;
entity("csup") -> 16#02AD0;
entity("csupe") -> 16#02AD2;
entity("ctdot") -> 16#022EF;
entity("cudarrl") -> 16#02938;
entity("cudarrr") -> 16#02935;
entity("cuepr") -> 16#022DE;
entity("cuesc") -> 16#022DF;
entity("cularr") -> 16#021B6;
entity("cularrp") -> 16#0293D;
entity("cup") -> 16#0222A;
entity("cupbrcap") -> 16#02A48;
entity("cupcap") -> 16#02A46;
entity("cupcup") -> 16#02A4A;
entity("cupdot") -> 16#0228D;
entity("cupor") -> 16#02A45;
entity("cups") -> [16#0222A, 16#0FE00];
entity("curarr") -> 16#021B7;
entity("curarrm") -> 16#0293C;
entity("curlyeqprec") -> 16#022DE;
entity("curlyeqsucc") -> 16#022DF;
entity("curlyvee") -> 16#022CE;
entity("curlywedge") -> 16#022CF;
entity("curren") -> 16#000A4;
entity("curvearrowleft") -> 16#021B6;
entity("curvearrowright") -> 16#021B7;
entity("cuvee") -> 16#022CE;
entity("cuwed") -> 16#022CF;
entity("cwconint") -> 16#02232;
entity("cwint") -> 16#02231;
entity("cylcty") -> 16#0232D;
entity("dArr") -> 16#021D3;
entity("dHar") -> 16#02965;
entity("dagger") -> 16#02020;
entity("daleth") -> 16#02138;
entity("darr") -> 16#02193;
entity("dash") -> 16#02010;
entity("dashv") -> 16#022A3;
entity("dbkarow") -> 16#0290F;
entity("dblac") -> 16#002DD;
entity("dcaron") -> 16#0010F;
entity("dcy") -> 16#00434;
entity("dd") -> 16#02146;
entity("ddagger") -> 16#02021;
entity("ddarr") -> 16#021CA;
entity("ddotseq") -> 16#02A77;
entity("deg") -> 16#000B0;
entity("delta") -> 16#003B4;
entity("demptyv") -> 16#029B1;
entity("dfisht") -> 16#0297F;
entity("dfr") -> 16#1D521;
entity("dharl") -> 16#021C3;
entity("dharr") -> 16#021C2;
entity("diam") -> 16#022C4;
entity("diamond") -> 16#022C4;
entity("diamondsuit") -> 16#02666;
entity("diams") -> 16#02666;
entity("die") -> 16#000A8;
entity("digamma") -> 16#003DD;
entity("disin") -> 16#022F2;
entity("div") -> 16#000F7;
entity("divide") -> 16#000F7;
entity("divideontimes") -> 16#022C7;
entity("divonx") -> 16#022C7;
entity("djcy") -> 16#00452;
entity("dlcorn") -> 16#0231E;
entity("dlcrop") -> 16#0230D;
entity("dollar") -> 16#00024;
entity("dopf") -> 16#1D555;
entity("dot") -> 16#002D9;
entity("doteq") -> 16#02250;
entity("doteqdot") -> 16#02251;
entity("dotminus") -> 16#02238;
entity("dotplus") -> 16#02214;
entity("dotsquare") -> 16#022A1;
entity("doublebarwedge") -> 16#02306;
entity("downarrow") -> 16#02193;
entity("downdownarrows") -> 16#021CA;
entity("downharpoonleft") -> 16#021C3;
entity("downharpoonright") -> 16#021C2;
entity("drbkarow") -> 16#02910;
entity("drcorn") -> 16#0231F;
entity("drcrop") -> 16#0230C;
entity("dscr") -> 16#1D4B9;
entity("dscy") -> 16#00455;
entity("dsol") -> 16#029F6;
entity("dstrok") -> 16#00111;
entity("dtdot") -> 16#022F1;
entity("dtri") -> 16#025BF;
entity("dtrif") -> 16#025BE;
entity("duarr") -> 16#021F5;
entity("duhar") -> 16#0296F;
entity("dwangle") -> 16#029A6;
entity("dzcy") -> 16#0045F;
entity("dzigrarr") -> 16#027FF;
entity("eDDot") -> 16#02A77;
entity("eDot") -> 16#02251;
entity("eacute") -> 16#000E9;
entity("easter") -> 16#02A6E;
entity("ecaron") -> 16#0011B;
entity("ecir") -> 16#02256;
entity("ecirc") -> 16#000EA;
entity("ecolon") -> 16#02255;
entity("ecy") -> 16#0044D;
entity("edot") -> 16#00117;
entity("ee") -> 16#02147;
entity("efDot") -> 16#02252;
entity("efr") -> 16#1D522;
entity("eg") -> 16#02A9A;
entity("egrave") -> 16#000E8;
entity("egs") -> 16#02A96;
entity("egsdot") -> 16#02A98;
entity("el") -> 16#02A99;
entity("elinters") -> 16#023E7;
entity("ell") -> 16#02113;
entity("els") -> 16#02A95;
entity("elsdot") -> 16#02A97;
entity("emacr") -> 16#00113;
entity("empty") -> 16#02205;
entity("emptyset") -> 16#02205;
entity("emptyv") -> 16#02205;
entity("emsp") -> 16#02003;
entity("emsp13") -> 16#02004;
entity("emsp14") -> 16#02005;
entity("eng") -> 16#0014B;
entity("ensp") -> 16#02002;
entity("eogon") -> 16#00119;
entity("eopf") -> 16#1D556;
entity("epar") -> 16#022D5;
entity("eparsl") -> 16#029E3;
entity("eplus") -> 16#02A71;
entity("epsi") -> 16#003B5;
entity("epsilon") -> 16#003B5;
entity("epsiv") -> 16#003F5;
entity("eqcirc") -> 16#02256;
entity("eqcolon") -> 16#02255;
entity("eqsim") -> 16#02242;
entity("eqslantgtr") -> 16#02A96;
entity("eqslantless") -> 16#02A95;
entity("equals") -> 16#0003D;
entity("equest") -> 16#0225F;
entity("equiv") -> 16#02261;
entity("equivDD") -> 16#02A78;
entity("eqvparsl") -> 16#029E5;
entity("erDot") -> 16#02253;
entity("erarr") -> 16#02971;
entity("escr") -> 16#0212F;
entity("esdot") -> 16#02250;
entity("esim") -> 16#02242;
entity("eta") -> 16#003B7;
entity("eth") -> 16#000F0;
entity("euml") -> 16#000EB;
entity("euro") -> 16#020AC;
entity("excl") -> 16#00021;
entity("exist") -> 16#02203;
entity("expectation") -> 16#02130;
entity("exponentiale") -> 16#02147;
entity("fallingdotseq") -> 16#02252;
entity("fcy") -> 16#00444;
entity("female") -> 16#02640;
entity("ffilig") -> 16#0FB03;
entity("fflig") -> 16#0FB00;
entity("ffllig") -> 16#0FB04;
entity("ffr") -> 16#1D523;
entity("filig") -> 16#0FB01;
entity("fjlig") -> [16#00066, 16#0006A];
entity("flat") -> 16#0266D;
entity("fllig") -> 16#0FB02;
entity("fltns") -> 16#025B1;
entity("fnof") -> 16#00192;
entity("fopf") -> 16#1D557;
entity("forall") -> 16#02200;
entity("fork") -> 16#022D4;
entity("forkv") -> 16#02AD9;
entity("fpartint") -> 16#02A0D;
entity("frac12") -> 16#000BD;
entity("frac13") -> 16#02153;
entity("frac14") -> 16#000BC;
entity("frac15") -> 16#02155;
entity("frac16") -> 16#02159;
entity("frac18") -> 16#0215B;
entity("frac23") -> 16#02154;
entity("frac25") -> 16#02156;
entity("frac34") -> 16#000BE;
entity("frac35") -> 16#02157;
entity("frac38") -> 16#0215C;
entity("frac45") -> 16#02158;
entity("frac56") -> 16#0215A;
entity("frac58") -> 16#0215D;
entity("frac78") -> 16#0215E;
entity("frasl") -> 16#02044;
entity("frown") -> 16#02322;
entity("fscr") -> 16#1D4BB;
entity("gE") -> 16#02267;
entity("gEl") -> 16#02A8C;
entity("gacute") -> 16#001F5;
entity("gamma") -> 16#003B3;
entity("gammad") -> 16#003DD;
entity("gap") -> 16#02A86;
entity("gbreve") -> 16#0011F;
entity("gcirc") -> 16#0011D;
entity("gcy") -> 16#00433;
entity("gdot") -> 16#00121;
entity("ge") -> 16#02265;
entity("gel") -> 16#022DB;
entity("geq") -> 16#02265;
entity("geqq") -> 16#02267;
entity("geqslant") -> 16#02A7E;
entity("ges") -> 16#02A7E;
entity("gescc") -> 16#02AA9;
entity("gesdot") -> 16#02A80;
entity("gesdoto") -> 16#02A82;
entity("gesdotol") -> 16#02A84;
entity("gesl") -> [16#022DB, 16#0FE00];
entity("gesles") -> 16#02A94;
entity("gfr") -> 16#1D524;
entity("gg") -> 16#0226B;
entity("ggg") -> 16#022D9;
entity("gimel") -> 16#02137;
entity("gjcy") -> 16#00453;
entity("gl") -> 16#02277;
entity("glE") -> 16#02A92;
entity("gla") -> 16#02AA5;
entity("glj") -> 16#02AA4;
entity("gnE") -> 16#02269;
entity("gnap") -> 16#02A8A;
entity("gnapprox") -> 16#02A8A;
entity("gne") -> 16#02A88;
entity("gneq") -> 16#02A88;
entity("gneqq") -> 16#02269;
entity("gnsim") -> 16#022E7;
entity("gopf") -> 16#1D558;
entity("grave") -> 16#00060;
entity("gscr") -> 16#0210A;
entity("gsim") -> 16#02273;
entity("gsime") -> 16#02A8E;
entity("gsiml") -> 16#02A90;
entity("gt") -> 16#0003E;
entity("gtcc") -> 16#02AA7;
entity("gtcir") -> 16#02A7A;
entity("gtdot") -> 16#022D7;
entity("gtlPar") -> 16#02995;
entity("gtquest") -> 16#02A7C;
entity("gtrapprox") -> 16#02A86;
entity("gtrarr") -> 16#02978;
entity("gtrdot") -> 16#022D7;
entity("gtreqless") -> 16#022DB;
entity("gtreqqless") -> 16#02A8C;
entity("gtrless") -> 16#02277;
entity("gtrsim") -> 16#02273;
entity("gvertneqq") -> [16#02269, 16#0FE00];
entity("gvnE") -> [16#02269, 16#0FE00];
entity("hArr") -> 16#021D4;
entity("hairsp") -> 16#0200A;
entity("half") -> 16#000BD;
entity("hamilt") -> 16#0210B;
entity("hardcy") -> 16#0044A;
entity("harr") -> 16#02194;
entity("harrcir") -> 16#02948;
entity("harrw") -> 16#021AD;
entity("hbar") -> 16#0210F;
entity("hcirc") -> 16#00125;
entity("hearts") -> 16#02665;
entity("heartsuit") -> 16#02665;
entity("hellip") -> 16#02026;
entity("hercon") -> 16#022B9;
entity("hfr") -> 16#1D525;
entity("hksearow") -> 16#02925;
entity("hkswarow") -> 16#02926;
entity("hoarr") -> 16#021FF;
entity("homtht") -> 16#0223B;
entity("hookleftarrow") -> 16#021A9;
entity("hookrightarrow") -> 16#021AA;
entity("hopf") -> 16#1D559;
entity("horbar") -> 16#02015;
entity("hscr") -> 16#1D4BD;
entity("hslash") -> 16#0210F;
entity("hstrok") -> 16#00127;
entity("hybull") -> 16#02043;
entity("hyphen") -> 16#02010;
entity("iacute") -> 16#000ED;
entity("ic") -> 16#02063;
entity("icirc") -> 16#000EE;
entity("icy") -> 16#00438;
entity("iecy") -> 16#00435;
entity("iexcl") -> 16#000A1;
entity("iff") -> 16#021D4;
entity("ifr") -> 16#1D526;
entity("igrave") -> 16#000EC;
entity("ii") -> 16#02148;
entity("iiiint") -> 16#02A0C;
entity("iiint") -> 16#0222D;
entity("iinfin") -> 16#029DC;
entity("iiota") -> 16#02129;
entity("ijlig") -> 16#00133;
entity("imacr") -> 16#0012B;
entity("image") -> 16#02111;
entity("imagline") -> 16#02110;
entity("imagpart") -> 16#02111;
entity("imath") -> 16#00131;
entity("imof") -> 16#022B7;
entity("imped") -> 16#001B5;
entity("in") -> 16#02208;
entity("incare") -> 16#02105;
entity("infin") -> 16#0221E;
entity("infintie") -> 16#029DD;
entity("inodot") -> 16#00131;
entity("int") -> 16#0222B;
entity("intcal") -> 16#022BA;
entity("integers") -> 16#02124;
entity("intercal") -> 16#022BA;
entity("intlarhk") -> 16#02A17;
entity("intprod") -> 16#02A3C;
entity("iocy") -> 16#00451;
entity("iogon") -> 16#0012F;
entity("iopf") -> 16#1D55A;
entity("iota") -> 16#003B9;
entity("iprod") -> 16#02A3C;
entity("iquest") -> 16#000BF;
entity("iscr") -> 16#1D4BE;
entity("isin") -> 16#02208;
entity("isinE") -> 16#022F9;
entity("isindot") -> 16#022F5;
entity("isins") -> 16#022F4;
entity("isinsv") -> 16#022F3;
entity("isinv") -> 16#02208;
entity("it") -> 16#02062;
entity("itilde") -> 16#00129;
entity("iukcy") -> 16#00456;
entity("iuml") -> 16#000EF;
entity("jcirc") -> 16#00135;
entity("jcy") -> 16#00439;
entity("jfr") -> 16#1D527;
entity("jmath") -> 16#00237;
entity("jopf") -> 16#1D55B;
entity("jscr") -> 16#1D4BF;
entity("jsercy") -> 16#00458;
entity("jukcy") -> 16#00454;
entity("kappa") -> 16#003BA;
entity("kappav") -> 16#003F0;
entity("kcedil") -> 16#00137;
entity("kcy") -> 16#0043A;
entity("kfr") -> 16#1D528;
entity("kgreen") -> 16#00138;
entity("khcy") -> 16#00445;
entity("kjcy") -> 16#0045C;
entity("kopf") -> 16#1D55C;
entity("kscr") -> 16#1D4C0;
entity("lAarr") -> 16#021DA;
entity("lArr") -> 16#021D0;
entity("lAtail") -> 16#0291B;
entity("lBarr") -> 16#0290E;
entity("lE") -> 16#02266;
entity("lEg") -> 16#02A8B;
entity("lHar") -> 16#02962;
entity("lacute") -> 16#0013A;
entity("laemptyv") -> 16#029B4;
entity("lagran") -> 16#02112;
entity("lambda") -> 16#003BB;
entity("lang") -> 16#027E8;
entity("langd") -> 16#02991;
entity("langle") -> 16#027E8;
entity("lap") -> 16#02A85;
entity("laquo") -> 16#000AB;
entity("larr") -> 16#02190;
entity("larrb") -> 16#021E4;
entity("larrbfs") -> 16#0291F;
entity("larrfs") -> 16#0291D;
entity("larrhk") -> 16#021A9;
entity("larrlp") -> 16#021AB;
entity("larrpl") -> 16#02939;
entity("larrsim") -> 16#02973;
entity("larrtl") -> 16#021A2;
entity("lat") -> 16#02AAB;
entity("latail") -> 16#02919;
entity("late") -> 16#02AAD;
entity("lates") -> [16#02AAD, 16#0FE00];
entity("lbarr") -> 16#0290C;
entity("lbbrk") -> 16#02772;
entity("lbrace") -> 16#0007B;
entity("lbrack") -> 16#0005B;
entity("lbrke") -> 16#0298B;
entity("lbrksld") -> 16#0298F;
entity("lbrkslu") -> 16#0298D;
entity("lcaron") -> 16#0013E;
entity("lcedil") -> 16#0013C;
entity("lceil") -> 16#02308;
entity("lcub") -> 16#0007B;
entity("lcy") -> 16#0043B;
entity("ldca") -> 16#02936;
entity("ldquo") -> 16#0201C;
entity("ldquor") -> 16#0201E;
entity("ldrdhar") -> 16#02967;
entity("ldrushar") -> 16#0294B;
entity("ldsh") -> 16#021B2;
entity("le") -> 16#02264;
entity("leftarrow") -> 16#02190;
entity("leftarrowtail") -> 16#021A2;
entity("leftharpoondown") -> 16#021BD;
entity("leftharpoonup") -> 16#021BC;
entity("leftleftarrows") -> 16#021C7;
entity("leftrightarrow") -> 16#02194;
entity("leftrightarrows") -> 16#021C6;
entity("leftrightharpoons") -> 16#021CB;
entity("leftrightsquigarrow") -> 16#021AD;
entity("leftthreetimes") -> 16#022CB;
entity("leg") -> 16#022DA;
entity("leq") -> 16#02264;
entity("leqq") -> 16#02266;
entity("leqslant") -> 16#02A7D;
entity("les") -> 16#02A7D;
entity("lescc") -> 16#02AA8;
entity("lesdot") -> 16#02A7F;
entity("lesdoto") -> 16#02A81;
entity("lesdotor") -> 16#02A83;
entity("lesg") -> [16#022DA, 16#0FE00];
entity("lesges") -> 16#02A93;
entity("lessapprox") -> 16#02A85;
entity("lessdot") -> 16#022D6;
entity("lesseqgtr") -> 16#022DA;
entity("lesseqqgtr") -> 16#02A8B;
entity("lessgtr") -> 16#02276;
entity("lesssim") -> 16#02272;
entity("lfisht") -> 16#0297C;
entity("lfloor") -> 16#0230A;
entity("lfr") -> 16#1D529;
entity("lg") -> 16#02276;
entity("lgE") -> 16#02A91;
entity("lhard") -> 16#021BD;
entity("lharu") -> 16#021BC;
entity("lharul") -> 16#0296A;
entity("lhblk") -> 16#02584;
entity("ljcy") -> 16#00459;
entity("ll") -> 16#0226A;
entity("llarr") -> 16#021C7;
entity("llcorner") -> 16#0231E;
entity("llhard") -> 16#0296B;
entity("lltri") -> 16#025FA;
entity("lmidot") -> 16#00140;
entity("lmoust") -> 16#023B0;
entity("lmoustache") -> 16#023B0;
entity("lnE") -> 16#02268;
entity("lnap") -> 16#02A89;
entity("lnapprox") -> 16#02A89;
entity("lne") -> 16#02A87;
entity("lneq") -> 16#02A87;
entity("lneqq") -> 16#02268;
entity("lnsim") -> 16#022E6;
entity("loang") -> 16#027EC;
entity("loarr") -> 16#021FD;
entity("lobrk") -> 16#027E6;
entity("longleftarrow") -> 16#027F5;
entity("longleftrightarrow") -> 16#027F7;
entity("longmapsto") -> 16#027FC;
entity("longrightarrow") -> 16#027F6;
entity("looparrowleft") -> 16#021AB;
entity("looparrowright") -> 16#021AC;
entity("lopar") -> 16#02985;
entity("lopf") -> 16#1D55D;
entity("loplus") -> 16#02A2D;
entity("lotimes") -> 16#02A34;
entity("lowast") -> 16#02217;
entity("lowbar") -> 16#0005F;
entity("loz") -> 16#025CA;
entity("lozenge") -> 16#025CA;
entity("lozf") -> 16#029EB;
entity("lpar") -> 16#00028;
entity("lparlt") -> 16#02993;
entity("lrarr") -> 16#021C6;
entity("lrcorner") -> 16#0231F;
entity("lrhar") -> 16#021CB;
entity("lrhard") -> 16#0296D;
entity("lrm") -> 16#0200E;
entity("lrtri") -> 16#022BF;
entity("lsaquo") -> 16#02039;
entity("lscr") -> 16#1D4C1;
entity("lsh") -> 16#021B0;
entity("lsim") -> 16#02272;
entity("lsime") -> 16#02A8D;
entity("lsimg") -> 16#02A8F;
entity("lsqb") -> 16#0005B;
entity("lsquo") -> 16#02018;
entity("lsquor") -> 16#0201A;
entity("lstrok") -> 16#00142;
entity("lt") -> 16#0003C;
entity("ltcc") -> 16#02AA6;
entity("ltcir") -> 16#02A79;
entity("ltdot") -> 16#022D6;
entity("lthree") -> 16#022CB;
entity("ltimes") -> 16#022C9;
entity("ltlarr") -> 16#02976;
entity("ltquest") -> 16#02A7B;
entity("ltrPar") -> 16#02996;
entity("ltri") -> 16#025C3;
entity("ltrie") -> 16#022B4;
entity("ltrif") -> 16#025C2;
entity("lurdshar") -> 16#0294A;
entity("luruhar") -> 16#02966;
entity("lvertneqq") -> [16#02268, 16#0FE00];
entity("lvnE") -> [16#02268, 16#0FE00];
entity("mDDot") -> 16#0223A;
entity("macr") -> 16#000AF;
entity("male") -> 16#02642;
entity("malt") -> 16#02720;
entity("maltese") -> 16#02720;
entity("map") -> 16#021A6;
entity("mapsto") -> 16#021A6;
entity("mapstodown") -> 16#021A7;
entity("mapstoleft") -> 16#021A4;
entity("mapstoup") -> 16#021A5;
entity("marker") -> 16#025AE;
entity("mcomma") -> 16#02A29;
entity("mcy") -> 16#0043C;
entity("mdash") -> 16#02014;
entity("measuredangle") -> 16#02221;
entity("mfr") -> 16#1D52A;
entity("mho") -> 16#02127;
entity("micro") -> 16#000B5;
entity("mid") -> 16#02223;
entity("midast") -> 16#0002A;
entity("midcir") -> 16#02AF0;
entity("middot") -> 16#000B7;
entity("minus") -> 16#02212;
entity("minusb") -> 16#0229F;
entity("minusd") -> 16#02238;
entity("minusdu") -> 16#02A2A;
entity("mlcp") -> 16#02ADB;
entity("mldr") -> 16#02026;
entity("mnplus") -> 16#02213;
entity("models") -> 16#022A7;
entity("mopf") -> 16#1D55E;
entity("mp") -> 16#02213;
entity("mscr") -> 16#1D4C2;
entity("mstpos") -> 16#0223E;
entity("mu") -> 16#003BC;
entity("multimap") -> 16#022B8;
entity("mumap") -> 16#022B8;
entity("nGg") -> [16#022D9, 16#00338];
entity("nGt") -> [16#0226B, 16#020D2];
entity("nGtv") -> [16#0226B, 16#00338];
entity("nLeftarrow") -> 16#021CD;
entity("nLeftrightarrow") -> 16#021CE;
entity("nLl") -> [16#022D8, 16#00338];
entity("nLt") -> [16#0226A, 16#020D2];
entity("nLtv") -> [16#0226A, 16#00338];
entity("nRightarrow") -> 16#021CF;
entity("nVDash") -> 16#022AF;
entity("nVdash") -> 16#022AE;
entity("nabla") -> 16#02207;
entity("nacute") -> 16#00144;
entity("nang") -> [16#02220, 16#020D2];
entity("nap") -> 16#02249;
entity("napE") -> [16#02A70, 16#00338];
entity("napid") -> [16#0224B, 16#00338];
entity("napos") -> 16#00149;
entity("napprox") -> 16#02249;
entity("natur") -> 16#0266E;
entity("natural") -> 16#0266E;
entity("naturals") -> 16#02115;
entity("nbsp") -> 16#000A0;
entity("nbump") -> [16#0224E, 16#00338];
entity("nbumpe") -> [16#0224F, 16#00338];
entity("ncap") -> 16#02A43;
entity("ncaron") -> 16#00148;
entity("ncedil") -> 16#00146;
entity("ncong") -> 16#02247;
entity("ncongdot") -> [16#02A6D, 16#00338];
entity("ncup") -> 16#02A42;
entity("ncy") -> 16#0043D;
entity("ndash") -> 16#02013;
entity("ne") -> 16#02260;
entity("neArr") -> 16#021D7;
entity("nearhk") -> 16#02924;
entity("nearr") -> 16#02197;
entity("nearrow") -> 16#02197;
entity("nedot") -> [16#02250, 16#00338];
entity("nequiv") -> 16#02262;
entity("nesear") -> 16#02928;
entity("nesim") -> [16#02242, 16#00338];
entity("nexist") -> 16#02204;
entity("nexists") -> 16#02204;
entity("nfr") -> 16#1D52B;
entity("ngE") -> [16#02267, 16#00338];
entity("nge") -> 16#02271;
entity("ngeq") -> 16#02271;
entity("ngeqq") -> [16#02267, 16#00338];
entity("ngeqslant") -> [16#02A7E, 16#00338];
entity("nges") -> [16#02A7E, 16#00338];
entity("ngsim") -> 16#02275;
entity("ngt") -> 16#0226F;
entity("ngtr") -> 16#0226F;
entity("nhArr") -> 16#021CE;
entity("nharr") -> 16#021AE;
entity("nhpar") -> 16#02AF2;
entity("ni") -> 16#0220B;
entity("nis") -> 16#022FC;
entity("nisd") -> 16#022FA;
entity("niv") -> 16#0220B;
entity("njcy") -> 16#0045A;
entity("nlArr") -> 16#021CD;
entity("nlE") -> [16#02266, 16#00338];
entity("nlarr") -> 16#0219A;
entity("nldr") -> 16#02025;
entity("nle") -> 16#02270;
entity("nleftarrow") -> 16#0219A;
entity("nleftrightarrow") -> 16#021AE;
entity("nleq") -> 16#02270;
entity("nleqq") -> [16#02266, 16#00338];
entity("nleqslant") -> [16#02A7D, 16#00338];
entity("nles") -> [16#02A7D, 16#00338];
entity("nless") -> 16#0226E;
entity("nlsim") -> 16#02274;
entity("nlt") -> 16#0226E;
entity("nltri") -> 16#022EA;
entity("nltrie") -> 16#022EC;
entity("nmid") -> 16#02224;
entity("nopf") -> 16#1D55F;
entity("not") -> 16#000AC;
entity("notin") -> 16#02209;
entity("notinE") -> [16#022F9, 16#00338];
entity("notindot") -> [16#022F5, 16#00338];
entity("notinva") -> 16#02209;
entity("notinvb") -> 16#022F7;
entity("notinvc") -> 16#022F6;
entity("notni") -> 16#0220C;
entity("notniva") -> 16#0220C;
entity("notnivb") -> 16#022FE;
entity("notnivc") -> 16#022FD;
entity("npar") -> 16#02226;
entity("nparallel") -> 16#02226;
entity("nparsl") -> [16#02AFD, 16#020E5];
entity("npart") -> [16#02202, 16#00338];
entity("npolint") -> 16#02A14;
entity("npr") -> 16#02280;
entity("nprcue") -> 16#022E0;
entity("npre") -> [16#02AAF, 16#00338];
entity("nprec") -> 16#02280;
entity("npreceq") -> [16#02AAF, 16#00338];
entity("nrArr") -> 16#021CF;
entity("nrarr") -> 16#0219B;
entity("nrarrc") -> [16#02933, 16#00338];
entity("nrarrw") -> [16#0219D, 16#00338];
entity("nrightarrow") -> 16#0219B;
entity("nrtri") -> 16#022EB;
entity("nrtrie") -> 16#022ED;
entity("nsc") -> 16#02281;
entity("nsccue") -> 16#022E1;
entity("nsce") -> [16#02AB0, 16#00338];
entity("nscr") -> 16#1D4C3;
entity("nshortmid") -> 16#02224;
entity("nshortparallel") -> 16#02226;
entity("nsim") -> 16#02241;
entity("nsime") -> 16#02244;
entity("nsimeq") -> 16#02244;
entity("nsmid") -> 16#02224;
entity("nspar") -> 16#02226;
entity("nsqsube") -> 16#022E2;
entity("nsqsupe") -> 16#022E3;
entity("nsub") -> 16#02284;
entity("nsubE") -> [16#02AC5, 16#00338];
entity("nsube") -> 16#02288;
entity("nsubset") -> [16#02282, 16#020D2];
entity("nsubseteq") -> 16#02288;
entity("nsubseteqq") -> [16#02AC5, 16#00338];
entity("nsucc") -> 16#02281;
entity("nsucceq") -> [16#02AB0, 16#00338];
entity("nsup") -> 16#02285;
entity("nsupE") -> [16#02AC6, 16#00338];
entity("nsupe") -> 16#02289;
entity("nsupset") -> [16#02283, 16#020D2];
entity("nsupseteq") -> 16#02289;
entity("nsupseteqq") -> [16#02AC6, 16#00338];
entity("ntgl") -> 16#02279;
entity("ntilde") -> 16#000F1;
entity("ntlg") -> 16#02278;
entity("ntriangleleft") -> 16#022EA;
entity("ntrianglelefteq") -> 16#022EC;
entity("ntriangleright") -> 16#022EB;
entity("ntrianglerighteq") -> 16#022ED;
entity("nu") -> 16#003BD;
entity("num") -> 16#00023;
entity("numero") -> 16#02116;
entity("numsp") -> 16#02007;
entity("nvDash") -> 16#022AD;
entity("nvHarr") -> 16#02904;
entity("nvap") -> [16#0224D, 16#020D2];
entity("nvdash") -> 16#022AC;
entity("nvge") -> [16#02265, 16#020D2];
entity("nvgt") -> [16#0003E, 16#020D2];
entity("nvinfin") -> 16#029DE;
entity("nvlArr") -> 16#02902;
entity("nvle") -> [16#02264, 16#020D2];
entity("nvlt") -> [16#0003C, 16#020D2];
entity("nvltrie") -> [16#022B4, 16#020D2];
entity("nvrArr") -> 16#02903;
entity("nvrtrie") -> [16#022B5, 16#020D2];
entity("nvsim") -> [16#0223C, 16#020D2];
entity("nwArr") -> 16#021D6;
entity("nwarhk") -> 16#02923;
entity("nwarr") -> 16#02196;
entity("nwarrow") -> 16#02196;
entity("nwnear") -> 16#02927;
entity("oS") -> 16#024C8;
entity("oacute") -> 16#000F3;
entity("oast") -> 16#0229B;
entity("ocir") -> 16#0229A;
entity("ocirc") -> 16#000F4;
entity("ocy") -> 16#0043E;
entity("odash") -> 16#0229D;
entity("odblac") -> 16#00151;
entity("odiv") -> 16#02A38;
entity("odot") -> 16#02299;
entity("odsold") -> 16#029BC;
entity("oelig") -> 16#00153;
entity("ofcir") -> 16#029BF;
entity("ofr") -> 16#1D52C;
entity("ogon") -> 16#002DB;
entity("ograve") -> 16#000F2;
entity("ogt") -> 16#029C1;
entity("ohbar") -> 16#029B5;
entity("ohm") -> 16#003A9;
entity("oint") -> 16#0222E;
entity("olarr") -> 16#021BA;
entity("olcir") -> 16#029BE;
entity("olcross") -> 16#029BB;
entity("oline") -> 16#0203E;
entity("olt") -> 16#029C0;
entity("omacr") -> 16#0014D;
entity("omega") -> 16#003C9;
entity("omicron") -> 16#003BF;
entity("omid") -> 16#029B6;
entity("ominus") -> 16#02296;
entity("oopf") -> 16#1D560;
entity("opar") -> 16#029B7;
entity("operp") -> 16#029B9;
entity("oplus") -> 16#02295;
entity("or") -> 16#02228;
entity("orarr") -> 16#021BB;
entity("ord") -> 16#02A5D;
entity("order") -> 16#02134;
entity("orderof") -> 16#02134;
entity("ordf") -> 16#000AA;
entity("ordm") -> 16#000BA;
entity("origof") -> 16#022B6;
entity("oror") -> 16#02A56;
entity("orslope") -> 16#02A57;
entity("orv") -> 16#02A5B;
entity("oscr") -> 16#02134;
entity("oslash") -> 16#000F8;
entity("osol") -> 16#02298;
entity("otilde") -> 16#000F5;
entity("otimes") -> 16#02297;
entity("otimesas") -> 16#02A36;
entity("ouml") -> 16#000F6;
entity("ovbar") -> 16#0233D;
entity("par") -> 16#02225;
entity("para") -> 16#000B6;
entity("parallel") -> 16#02225;
entity("parsim") -> 16#02AF3;
entity("parsl") -> 16#02AFD;
entity("part") -> 16#02202;
entity("pcy") -> 16#0043F;
entity("percnt") -> 16#00025;
entity("period") -> 16#0002E;
entity("permil") -> 16#02030;
entity("perp") -> 16#022A5;
entity("pertenk") -> 16#02031;
entity("pfr") -> 16#1D52D;
entity("phi") -> 16#003C6;
entity("phiv") -> 16#003D5;
entity("phmmat") -> 16#02133;
entity("phone") -> 16#0260E;
entity("pi") -> 16#003C0;
entity("pitchfork") -> 16#022D4;
entity("piv") -> 16#003D6;
entity("planck") -> 16#0210F;
entity("planckh") -> 16#0210E;
entity("plankv") -> 16#0210F;
entity("plus") -> 16#0002B;
entity("plusacir") -> 16#02A23;
entity("plusb") -> 16#0229E;
entity("pluscir") -> 16#02A22;
entity("plusdo") -> 16#02214;
entity("plusdu") -> 16#02A25;
entity("pluse") -> 16#02A72;
entity("plusmn") -> 16#000B1;
entity("plussim") -> 16#02A26;
entity("plustwo") -> 16#02A27;
entity("pm") -> 16#000B1;
entity("pointint") -> 16#02A15;
entity("popf") -> 16#1D561;
entity("pound") -> 16#000A3;
entity("pr") -> 16#0227A;
entity("prE") -> 16#02AB3;
entity("prap") -> 16#02AB7;
entity("prcue") -> 16#0227C;
entity("pre") -> 16#02AAF;
entity("prec") -> 16#0227A;
entity("precapprox") -> 16#02AB7;
entity("preccurlyeq") -> 16#0227C;
entity("preceq") -> 16#02AAF;
entity("precnapprox") -> 16#02AB9;
entity("precneqq") -> 16#02AB5;
entity("precnsim") -> 16#022E8;
entity("precsim") -> 16#0227E;
entity("prime") -> 16#02032;
entity("primes") -> 16#02119;
entity("prnE") -> 16#02AB5;
entity("prnap") -> 16#02AB9;
entity("prnsim") -> 16#022E8;
entity("prod") -> 16#0220F;
entity("profalar") -> 16#0232E;
entity("profline") -> 16#02312;
entity("profsurf") -> 16#02313;
entity("prop") -> 16#0221D;
entity("propto") -> 16#0221D;
entity("prsim") -> 16#0227E;
entity("prurel") -> 16#022B0;
entity("pscr") -> 16#1D4C5;
entity("psi") -> 16#003C8;
entity("puncsp") -> 16#02008;
entity("qfr") -> 16#1D52E;
entity("qint") -> 16#02A0C;
entity("qopf") -> 16#1D562;
entity("qprime") -> 16#02057;
entity("qscr") -> 16#1D4C6;
entity("quaternions") -> 16#0210D;
entity("quatint") -> 16#02A16;
entity("quest") -> 16#0003F;
entity("questeq") -> 16#0225F;
entity("quot") -> 16#00022;
entity("rAarr") -> 16#021DB;
entity("rArr") -> 16#021D2;
entity("rAtail") -> 16#0291C;
entity("rBarr") -> 16#0290F;
entity("rHar") -> 16#02964;
entity("race") -> [16#0223D, 16#00331];
entity("racute") -> 16#00155;
entity("radic") -> 16#0221A;
entity("raemptyv") -> 16#029B3;
entity("rang") -> 16#027E9;
entity("rangd") -> 16#02992;
entity("range") -> 16#029A5;
entity("rangle") -> 16#027E9;
entity("raquo") -> 16#000BB;
entity("rarr") -> 16#02192;
entity("rarrap") -> 16#02975;
entity("rarrb") -> 16#021E5;
entity("rarrbfs") -> 16#02920;
entity("rarrc") -> 16#02933;
entity("rarrfs") -> 16#0291E;
entity("rarrhk") -> 16#021AA;
entity("rarrlp") -> 16#021AC;
entity("rarrpl") -> 16#02945;
entity("rarrsim") -> 16#02974;
entity("rarrtl") -> 16#021A3;
entity("rarrw") -> 16#0219D;
entity("ratail") -> 16#0291A;
entity("ratio") -> 16#02236;
entity("rationals") -> 16#0211A;
entity("rbarr") -> 16#0290D;
entity("rbbrk") -> 16#02773;
entity("rbrace") -> 16#0007D;
entity("rbrack") -> 16#0005D;
entity("rbrke") -> 16#0298C;
entity("rbrksld") -> 16#0298E;
entity("rbrkslu") -> 16#02990;
entity("rcaron") -> 16#00159;
entity("rcedil") -> 16#00157;
entity("rceil") -> 16#02309;
entity("rcub") -> 16#0007D;
entity("rcy") -> 16#00440;
entity("rdca") -> 16#02937;
entity("rdldhar") -> 16#02969;
entity("rdquo") -> 16#0201D;
entity("rdquor") -> 16#0201D;
entity("rdsh") -> 16#021B3;
entity("real") -> 16#0211C;
entity("realine") -> 16#0211B;
entity("realpart") -> 16#0211C;
entity("reals") -> 16#0211D;
entity("rect") -> 16#025AD;
entity("reg") -> 16#000AE;
entity("rfisht") -> 16#0297D;
entity("rfloor") -> 16#0230B;
entity("rfr") -> 16#1D52F;
entity("rhard") -> 16#021C1;
entity("rharu") -> 16#021C0;
entity("rharul") -> 16#0296C;
entity("rho") -> 16#003C1;
entity("rhov") -> 16#003F1;
entity("rightarrow") -> 16#02192;
entity("rightarrowtail") -> 16#021A3;
entity("rightharpoondown") -> 16#021C1;
entity("rightharpoonup") -> 16#021C0;
entity("rightleftarrows") -> 16#021C4;
entity("rightleftharpoons") -> 16#021CC;
entity("rightrightarrows") -> 16#021C9;
entity("rightsquigarrow") -> 16#0219D;
entity("rightthreetimes") -> 16#022CC;
entity("ring") -> 16#002DA;
entity("risingdotseq") -> 16#02253;
entity("rlarr") -> 16#021C4;
entity("rlhar") -> 16#021CC;
entity("rlm") -> 16#0200F;
entity("rmoust") -> 16#023B1;
entity("rmoustache") -> 16#023B1;
entity("rnmid") -> 16#02AEE;
entity("roang") -> 16#027ED;
entity("roarr") -> 16#021FE;
entity("robrk") -> 16#027E7;
entity("ropar") -> 16#02986;
entity("ropf") -> 16#1D563;
entity("roplus") -> 16#02A2E;
entity("rotimes") -> 16#02A35;
entity("rpar") -> 16#00029;
entity("rpargt") -> 16#02994;
entity("rppolint") -> 16#02A12;
entity("rrarr") -> 16#021C9;
entity("rsaquo") -> 16#0203A;
entity("rscr") -> 16#1D4C7;
entity("rsh") -> 16#021B1;
entity("rsqb") -> 16#0005D;
entity("rsquo") -> 16#02019;
entity("rsquor") -> 16#02019;
entity("rthree") -> 16#022CC;
entity("rtimes") -> 16#022CA;
entity("rtri") -> 16#025B9;
entity("rtrie") -> 16#022B5;
entity("rtrif") -> 16#025B8;
entity("rtriltri") -> 16#029CE;
entity("ruluhar") -> 16#02968;
entity("rx") -> 16#0211E;
entity("sacute") -> 16#0015B;
entity("sbquo") -> 16#0201A;
entity("sc") -> 16#0227B;
entity("scE") -> 16#02AB4;
entity("scap") -> 16#02AB8;
entity("scaron") -> 16#00161;
entity("sccue") -> 16#0227D;
entity("sce") -> 16#02AB0;
entity("scedil") -> 16#0015F;
entity("scirc") -> 16#0015D;
entity("scnE") -> 16#02AB6;
entity("scnap") -> 16#02ABA;
entity("scnsim") -> 16#022E9;
entity("scpolint") -> 16#02A13;
entity("scsim") -> 16#0227F;
entity("scy") -> 16#00441;
entity("sdot") -> 16#022C5;
entity("sdotb") -> 16#022A1;
entity("sdote") -> 16#02A66;
entity("seArr") -> 16#021D8;
entity("searhk") -> 16#02925;
entity("searr") -> 16#02198;
entity("searrow") -> 16#02198;
entity("sect") -> 16#000A7;
entity("semi") -> 16#0003B;
entity("seswar") -> 16#02929;
entity("setminus") -> 16#02216;
entity("setmn") -> 16#02216;
entity("sext") -> 16#02736;
entity("sfr") -> 16#1D530;
entity("sfrown") -> 16#02322;
entity("sharp") -> 16#0266F;
entity("shchcy") -> 16#00449;
entity("shcy") -> 16#00448;
entity("shortmid") -> 16#02223;
entity("shortparallel") -> 16#02225;
entity("shy") -> 16#000AD;
entity("sigma") -> 16#003C3;
entity("sigmaf") -> 16#003C2;
entity("sigmav") -> 16#003C2;
entity("sim") -> 16#0223C;
entity("simdot") -> 16#02A6A;
entity("sime") -> 16#02243;
entity("simeq") -> 16#02243;
entity("simg") -> 16#02A9E;
entity("simgE") -> 16#02AA0;
entity("siml") -> 16#02A9D;
entity("simlE") -> 16#02A9F;
entity("simne") -> 16#02246;
entity("simplus") -> 16#02A24;
entity("simrarr") -> 16#02972;
entity("slarr") -> 16#02190;
entity("smallsetminus") -> 16#02216;
entity("smashp") -> 16#02A33;
entity("smeparsl") -> 16#029E4;
entity("smid") -> 16#02223;
entity("smile") -> 16#02323;
entity("smt") -> 16#02AAA;
entity("smte") -> 16#02AAC;
entity("smtes") -> [16#02AAC, 16#0FE00];
entity("softcy") -> 16#0044C;
entity("sol") -> 16#0002F;
entity("solb") -> 16#029C4;
entity("solbar") -> 16#0233F;
entity("sopf") -> 16#1D564;
entity("spades") -> 16#02660;
entity("spadesuit") -> 16#02660;
entity("spar") -> 16#02225;
entity("sqcap") -> 16#02293;
entity("sqcaps") -> [16#02293, 16#0FE00];
entity("sqcup") -> 16#02294;
entity("sqcups") -> [16#02294, 16#0FE00];
entity("sqsub") -> 16#0228F;
entity("sqsube") -> 16#02291;
entity("sqsubset") -> 16#0228F;
entity("sqsubseteq") -> 16#02291;
entity("sqsup") -> 16#02290;
entity("sqsupe") -> 16#02292;
entity("sqsupset") -> 16#02290;
entity("sqsupseteq") -> 16#02292;
entity("squ") -> 16#025A1;
entity("square") -> 16#025A1;
entity("squarf") -> 16#025AA;
entity("squf") -> 16#025AA;
entity("srarr") -> 16#02192;
entity("sscr") -> 16#1D4C8;
entity("ssetmn") -> 16#02216;
entity("ssmile") -> 16#02323;
entity("sstarf") -> 16#022C6;
entity("star") -> 16#02606;
entity("starf") -> 16#02605;
entity("straightepsilon") -> 16#003F5;
entity("straightphi") -> 16#003D5;
entity("strns") -> 16#000AF;
entity("sub") -> 16#02282;
entity("subE") -> 16#02AC5;
entity("subdot") -> 16#02ABD;
entity("sube") -> 16#02286;
entity("subedot") -> 16#02AC3;
entity("submult") -> 16#02AC1;
entity("subnE") -> 16#02ACB;
entity("subne") -> 16#0228A;
entity("subplus") -> 16#02ABF;
entity("subrarr") -> 16#02979;
entity("subset") -> 16#02282;
entity("subseteq") -> 16#02286;
entity("subseteqq") -> 16#02AC5;
entity("subsetneq") -> 16#0228A;
entity("subsetneqq") -> 16#02ACB;
entity("subsim") -> 16#02AC7;
entity("subsub") -> 16#02AD5;
entity("subsup") -> 16#02AD3;
entity("succ") -> 16#0227B;
entity("succapprox") -> 16#02AB8;
entity("succcurlyeq") -> 16#0227D;
entity("succeq") -> 16#02AB0;
entity("succnapprox") -> 16#02ABA;
entity("succneqq") -> 16#02AB6;
entity("succnsim") -> 16#022E9;
entity("succsim") -> 16#0227F;
entity("sum") -> 16#02211;
entity("sung") -> 16#0266A;
entity("sup") -> 16#02283;
entity("sup1") -> 16#000B9;
entity("sup2") -> 16#000B2;
entity("sup3") -> 16#000B3;
entity("supE") -> 16#02AC6;
entity("supdot") -> 16#02ABE;
entity("supdsub") -> 16#02AD8;
entity("supe") -> 16#02287;
entity("supedot") -> 16#02AC4;
entity("suphsol") -> 16#027C9;
entity("suphsub") -> 16#02AD7;
entity("suplarr") -> 16#0297B;
entity("supmult") -> 16#02AC2;
entity("supnE") -> 16#02ACC;
entity("supne") -> 16#0228B;
entity("supplus") -> 16#02AC0;
entity("supset") -> 16#02283;
entity("supseteq") -> 16#02287;
entity("supseteqq") -> 16#02AC6;
entity("supsetneq") -> 16#0228B;
entity("supsetneqq") -> 16#02ACC;
entity("supsim") -> 16#02AC8;
entity("supsub") -> 16#02AD4;
entity("supsup") -> 16#02AD6;
entity("swArr") -> 16#021D9;
entity("swarhk") -> 16#02926;
entity("swarr") -> 16#02199;
entity("swarrow") -> 16#02199;
entity("swnwar") -> 16#0292A;
entity("szlig") -> 16#000DF;
entity("target") -> 16#02316;
entity("tau") -> 16#003C4;
entity("tbrk") -> 16#023B4;
entity("tcaron") -> 16#00165;
entity("tcedil") -> 16#00163;
entity("tcy") -> 16#00442;
entity("tdot") -> 16#020DB;
entity("telrec") -> 16#02315;
entity("tfr") -> 16#1D531;
entity("there4") -> 16#02234;
entity("therefore") -> 16#02234;
entity("theta") -> 16#003B8;
entity("thetasym") -> 16#003D1;
entity("thetav") -> 16#003D1;
entity("thickapprox") -> 16#02248;
entity("thicksim") -> 16#0223C;
entity("thinsp") -> 16#02009;
entity("thkap") -> 16#02248;
entity("thksim") -> 16#0223C;
entity("thorn") -> 16#000FE;
entity("tilde") -> 16#002DC;
entity("times") -> 16#000D7;
entity("timesb") -> 16#022A0;
entity("timesbar") -> 16#02A31;
entity("timesd") -> 16#02A30;
entity("tint") -> 16#0222D;
entity("toea") -> 16#02928;
entity("top") -> 16#022A4;
entity("topbot") -> 16#02336;
entity("topcir") -> 16#02AF1;
entity("topf") -> 16#1D565;
entity("topfork") -> 16#02ADA;
entity("tosa") -> 16#02929;
entity("tprime") -> 16#02034;
entity("trade") -> 16#02122;
entity("triangle") -> 16#025B5;
entity("triangledown") -> 16#025BF;
entity("triangleleft") -> 16#025C3;
entity("trianglelefteq") -> 16#022B4;
entity("triangleq") -> 16#0225C;
entity("triangleright") -> 16#025B9;
entity("trianglerighteq") -> 16#022B5;
entity("tridot") -> 16#025EC;
entity("trie") -> 16#0225C;
entity("triminus") -> 16#02A3A;
entity("triplus") -> 16#02A39;
entity("trisb") -> 16#029CD;
entity("tritime") -> 16#02A3B;
entity("trpezium") -> 16#023E2;
entity("tscr") -> 16#1D4C9;
entity("tscy") -> 16#00446;
entity("tshcy") -> 16#0045B;
entity("tstrok") -> 16#00167;
entity("twixt") -> 16#0226C;
entity("twoheadleftarrow") -> 16#0219E;
entity("twoheadrightarrow") -> 16#021A0;
entity("uArr") -> 16#021D1;
entity("uHar") -> 16#02963;
entity("uacute") -> 16#000FA;
entity("uarr") -> 16#02191;
entity("ubrcy") -> 16#0045E;
entity("ubreve") -> 16#0016D;
entity("ucirc") -> 16#000FB;
entity("ucy") -> 16#00443;
entity("udarr") -> 16#021C5;
entity("udblac") -> 16#00171;
entity("udhar") -> 16#0296E;
entity("ufisht") -> 16#0297E;
entity("ufr") -> 16#1D532;
entity("ugrave") -> 16#000F9;
entity("uharl") -> 16#021BF;
entity("uharr") -> 16#021BE;
entity("uhblk") -> 16#02580;
entity("ulcorn") -> 16#0231C;
entity("ulcorner") -> 16#0231C;
entity("ulcrop") -> 16#0230F;
entity("ultri") -> 16#025F8;
entity("umacr") -> 16#0016B;
entity("uml") -> 16#000A8;
entity("uogon") -> 16#00173;
entity("uopf") -> 16#1D566;
entity("uparrow") -> 16#02191;
entity("updownarrow") -> 16#02195;
entity("upharpoonleft") -> 16#021BF;
entity("upharpoonright") -> 16#021BE;
entity("uplus") -> 16#0228E;
entity("upsi") -> 16#003C5;
entity("upsih") -> 16#003D2;
entity("upsilon") -> 16#003C5;
entity("upuparrows") -> 16#021C8;
entity("urcorn") -> 16#0231D;
entity("urcorner") -> 16#0231D;
entity("urcrop") -> 16#0230E;
entity("uring") -> 16#0016F;
entity("urtri") -> 16#025F9;
entity("uscr") -> 16#1D4CA;
entity("utdot") -> 16#022F0;
entity("utilde") -> 16#00169;
entity("utri") -> 16#025B5;
entity("utrif") -> 16#025B4;
entity("uuarr") -> 16#021C8;
entity("uuml") -> 16#000FC;
entity("uwangle") -> 16#029A7;
entity("vArr") -> 16#021D5;
entity("vBar") -> 16#02AE8;
entity("vBarv") -> 16#02AE9;
entity("vDash") -> 16#022A8;
entity("vangrt") -> 16#0299C;
entity("varepsilon") -> 16#003F5;
entity("varkappa") -> 16#003F0;
entity("varnothing") -> 16#02205;
entity("varphi") -> 16#003D5;
entity("varpi") -> 16#003D6;
entity("varpropto") -> 16#0221D;
entity("varr") -> 16#02195;
entity("varrho") -> 16#003F1;
entity("varsigma") -> 16#003C2;
entity("varsubsetneq") -> [16#0228A, 16#0FE00];
entity("varsubsetneqq") -> [16#02ACB, 16#0FE00];
entity("varsupsetneq") -> [16#0228B, 16#0FE00];
entity("varsupsetneqq") -> [16#02ACC, 16#0FE00];
entity("vartheta") -> 16#003D1;
entity("vartriangleleft") -> 16#022B2;
entity("vartriangleright") -> 16#022B3;
entity("vcy") -> 16#00432;
entity("vdash") -> 16#022A2;
entity("vee") -> 16#02228;
entity("veebar") -> 16#022BB;
entity("veeeq") -> 16#0225A;
entity("vellip") -> 16#022EE;
entity("verbar") -> 16#0007C;
entity("vert") -> 16#0007C;
entity("vfr") -> 16#1D533;
entity("vltri") -> 16#022B2;
entity("vnsub") -> [16#02282, 16#020D2];
entity("vnsup") -> [16#02283, 16#020D2];
entity("vopf") -> 16#1D567;
entity("vprop") -> 16#0221D;
entity("vrtri") -> 16#022B3;
entity("vscr") -> 16#1D4CB;
entity("vsubnE") -> [16#02ACB, 16#0FE00];
entity("vsubne") -> [16#0228A, 16#0FE00];
entity("vsupnE") -> [16#02ACC, 16#0FE00];
entity("vsupne") -> [16#0228B, 16#0FE00];
entity("vzigzag") -> 16#0299A;
entity("wcirc") -> 16#00175;
entity("wedbar") -> 16#02A5F;
entity("wedge") -> 16#02227;
entity("wedgeq") -> 16#02259;
entity("weierp") -> 16#02118;
entity("wfr") -> 16#1D534;
entity("wopf") -> 16#1D568;
entity("wp") -> 16#02118;
entity("wr") -> 16#02240;
entity("wreath") -> 16#02240;
entity("wscr") -> 16#1D4CC;
entity("xcap") -> 16#022C2;
entity("xcirc") -> 16#025EF;
entity("xcup") -> 16#022C3;
entity("xdtri") -> 16#025BD;
entity("xfr") -> 16#1D535;
entity("xhArr") -> 16#027FA;
entity("xharr") -> 16#027F7;
entity("xi") -> 16#003BE;
entity("xlArr") -> 16#027F8;
entity("xlarr") -> 16#027F5;
entity("xmap") -> 16#027FC;
entity("xnis") -> 16#022FB;
entity("xodot") -> 16#02A00;
entity("xopf") -> 16#1D569;
entity("xoplus") -> 16#02A01;
entity("xotime") -> 16#02A02;
entity("xrArr") -> 16#027F9;
entity("xrarr") -> 16#027F6;
entity("xscr") -> 16#1D4CD;
entity("xsqcup") -> 16#02A06;
entity("xuplus") -> 16#02A04;
entity("xutri") -> 16#025B3;
entity("xvee") -> 16#022C1;
entity("xwedge") -> 16#022C0;
entity("yacute") -> 16#000FD;
entity("yacy") -> 16#0044F;
entity("ycirc") -> 16#00177;
entity("ycy") -> 16#0044B;
entity("yen") -> 16#000A5;
entity("yfr") -> 16#1D536;
entity("yicy") -> 16#00457;
entity("yopf") -> 16#1D56A;
entity("yscr") -> 16#1D4CE;
entity("yucy") -> 16#0044E;
entity("yuml") -> 16#000FF;
entity("zacute") -> 16#0017A;
entity("zcaron") -> 16#0017E;
entity("zcy") -> 16#00437;
entity("zdot") -> 16#0017C;
entity("zeetrf") -> 16#02128;
entity("zeta") -> 16#003B6;
entity("zfr") -> 16#1D537;
entity("zhcy") -> 16#00436;
entity("zigrarr") -> 16#021DD;
entity("zopf") -> 16#1D56B;
entity("zscr") -> 16#1D4CF;
entity("zwj") -> 16#0200D;
entity("zwnj") -> 16#0200C;
entity(_) -> undefined.
%%
%% Tests
%%
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
exhaustive_entity_test() ->
T = dgiot_cover:clause_lookup_table(?MODULE, entity),
[?assertEqual(V, entity(K)) || {K, V} <- T].
charref_test() ->
1234 = charref("#1234"),
255 = charref("#xfF"),
255 = charref(<<"#XFf">>),
38 = charref("amp"),
38 = charref(<<"amp">>),
undefined = charref("not_an_entity"),
undefined = charref("#not_an_entity"),
undefined = charref("#xnot_an_entity"),
ok.
-endif.
| null | https://raw.githubusercontent.com/dgiot/dgiot/c9f2f78af71692ba532e4806621b611db2afe0c9/apps/dgiot_api/src/utils/dgiot_charref.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
External API.
@doc Convert a decimal charref, hex charref, or html entity to a unicode
codepoint, or return undefined on failure.
The input should not include an ampersand or semicolon.
Internal API.
-character-references.html
Tests
| Copyright ( c ) 2020 - 2021 DGIOT Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@doc Converts HTML 5 charrefs and entities to codepoints ( or lists of code points ) .
-module(dgiot_charref).
-author("johnliu").
-export([charref/1]).
charref("#38 " ) = 38 , charref("#x26 " ) = 38 , charref("amp " ) = 38 .
-spec charref(binary() | string()) -> integer() | [integer()] | undefined.
charref(B) when is_binary(B) ->
charref(binary_to_list(B));
charref([$#, C | L]) when C =:= $x orelse C =:= $X ->
try erlang:list_to_integer(L, 16)
catch
error:badarg -> undefined
end;
charref([$# | L]) ->
try list_to_integer(L)
catch
error:badarg -> undefined
end;
charref(L) ->
entity(L).
[ 2011 - 10 - 14 ] Generated from :
entity("AElig") -> 16#000C6;
entity("AMP") -> 16#00026;
entity("Aacute") -> 16#000C1;
entity("Abreve") -> 16#00102;
entity("Acirc") -> 16#000C2;
entity("Acy") -> 16#00410;
entity("Afr") -> 16#1D504;
entity("Agrave") -> 16#000C0;
entity("Alpha") -> 16#00391;
entity("Amacr") -> 16#00100;
entity("And") -> 16#02A53;
entity("Aogon") -> 16#00104;
entity("Aopf") -> 16#1D538;
entity("ApplyFunction") -> 16#02061;
entity("Aring") -> 16#000C5;
entity("Ascr") -> 16#1D49C;
entity("Assign") -> 16#02254;
entity("Atilde") -> 16#000C3;
entity("Auml") -> 16#000C4;
entity("Backslash") -> 16#02216;
entity("Barv") -> 16#02AE7;
entity("Barwed") -> 16#02306;
entity("Bcy") -> 16#00411;
entity("Because") -> 16#02235;
entity("Bernoullis") -> 16#0212C;
entity("Beta") -> 16#00392;
entity("Bfr") -> 16#1D505;
entity("Bopf") -> 16#1D539;
entity("Breve") -> 16#002D8;
entity("Bscr") -> 16#0212C;
entity("Bumpeq") -> 16#0224E;
entity("CHcy") -> 16#00427;
entity("COPY") -> 16#000A9;
entity("Cacute") -> 16#00106;
entity("Cap") -> 16#022D2;
entity("CapitalDifferentialD") -> 16#02145;
entity("Cayleys") -> 16#0212D;
entity("Ccaron") -> 16#0010C;
entity("Ccedil") -> 16#000C7;
entity("Ccirc") -> 16#00108;
entity("Cconint") -> 16#02230;
entity("Cdot") -> 16#0010A;
entity("Cedilla") -> 16#000B8;
entity("CenterDot") -> 16#000B7;
entity("Cfr") -> 16#0212D;
entity("Chi") -> 16#003A7;
entity("CircleDot") -> 16#02299;
entity("CircleMinus") -> 16#02296;
entity("CirclePlus") -> 16#02295;
entity("CircleTimes") -> 16#02297;
entity("ClockwiseContourIntegral") -> 16#02232;
entity("CloseCurlyDoubleQuote") -> 16#0201D;
entity("CloseCurlyQuote") -> 16#02019;
entity("Colon") -> 16#02237;
entity("Colone") -> 16#02A74;
entity("Congruent") -> 16#02261;
entity("Conint") -> 16#0222F;
entity("ContourIntegral") -> 16#0222E;
entity("Copf") -> 16#02102;
entity("Coproduct") -> 16#02210;
entity("CounterClockwiseContourIntegral") -> 16#02233;
entity("Cross") -> 16#02A2F;
entity("Cscr") -> 16#1D49E;
entity("Cup") -> 16#022D3;
entity("CupCap") -> 16#0224D;
entity("DD") -> 16#02145;
entity("DDotrahd") -> 16#02911;
entity("DJcy") -> 16#00402;
entity("DScy") -> 16#00405;
entity("DZcy") -> 16#0040F;
entity("Dagger") -> 16#02021;
entity("Darr") -> 16#021A1;
entity("Dashv") -> 16#02AE4;
entity("Dcaron") -> 16#0010E;
entity("Dcy") -> 16#00414;
entity("Del") -> 16#02207;
entity("Delta") -> 16#00394;
entity("Dfr") -> 16#1D507;
entity("DiacriticalAcute") -> 16#000B4;
entity("DiacriticalDot") -> 16#002D9;
entity("DiacriticalDoubleAcute") -> 16#002DD;
entity("DiacriticalGrave") -> 16#00060;
entity("DiacriticalTilde") -> 16#002DC;
entity("Diamond") -> 16#022C4;
entity("DifferentialD") -> 16#02146;
entity("Dopf") -> 16#1D53B;
entity("Dot") -> 16#000A8;
entity("DotDot") -> 16#020DC;
entity("DotEqual") -> 16#02250;
entity("DoubleContourIntegral") -> 16#0222F;
entity("DoubleDot") -> 16#000A8;
entity("DoubleDownArrow") -> 16#021D3;
entity("DoubleLeftArrow") -> 16#021D0;
entity("DoubleLeftRightArrow") -> 16#021D4;
entity("DoubleLeftTee") -> 16#02AE4;
entity("DoubleLongLeftArrow") -> 16#027F8;
entity("DoubleLongLeftRightArrow") -> 16#027FA;
entity("DoubleLongRightArrow") -> 16#027F9;
entity("DoubleRightArrow") -> 16#021D2;
entity("DoubleRightTee") -> 16#022A8;
entity("DoubleUpArrow") -> 16#021D1;
entity("DoubleUpDownArrow") -> 16#021D5;
entity("DoubleVerticalBar") -> 16#02225;
entity("DownArrow") -> 16#02193;
entity("DownArrowBar") -> 16#02913;
entity("DownArrowUpArrow") -> 16#021F5;
entity("DownBreve") -> 16#00311;
entity("DownLeftRightVector") -> 16#02950;
entity("DownLeftTeeVector") -> 16#0295E;
entity("DownLeftVector") -> 16#021BD;
entity("DownLeftVectorBar") -> 16#02956;
entity("DownRightTeeVector") -> 16#0295F;
entity("DownRightVector") -> 16#021C1;
entity("DownRightVectorBar") -> 16#02957;
entity("DownTee") -> 16#022A4;
entity("DownTeeArrow") -> 16#021A7;
entity("Downarrow") -> 16#021D3;
entity("Dscr") -> 16#1D49F;
entity("Dstrok") -> 16#00110;
entity("ENG") -> 16#0014A;
entity("ETH") -> 16#000D0;
entity("Eacute") -> 16#000C9;
entity("Ecaron") -> 16#0011A;
entity("Ecirc") -> 16#000CA;
entity("Ecy") -> 16#0042D;
entity("Edot") -> 16#00116;
entity("Efr") -> 16#1D508;
entity("Egrave") -> 16#000C8;
entity("Element") -> 16#02208;
entity("Emacr") -> 16#00112;
entity("EmptySmallSquare") -> 16#025FB;
entity("EmptyVerySmallSquare") -> 16#025AB;
entity("Eogon") -> 16#00118;
entity("Eopf") -> 16#1D53C;
entity("Epsilon") -> 16#00395;
entity("Equal") -> 16#02A75;
entity("EqualTilde") -> 16#02242;
entity("Equilibrium") -> 16#021CC;
entity("Escr") -> 16#02130;
entity("Esim") -> 16#02A73;
entity("Eta") -> 16#00397;
entity("Euml") -> 16#000CB;
entity("Exists") -> 16#02203;
entity("ExponentialE") -> 16#02147;
entity("Fcy") -> 16#00424;
entity("Ffr") -> 16#1D509;
entity("FilledSmallSquare") -> 16#025FC;
entity("FilledVerySmallSquare") -> 16#025AA;
entity("Fopf") -> 16#1D53D;
entity("ForAll") -> 16#02200;
entity("Fouriertrf") -> 16#02131;
entity("Fscr") -> 16#02131;
entity("GJcy") -> 16#00403;
entity("GT") -> 16#0003E;
entity("Gamma") -> 16#00393;
entity("Gammad") -> 16#003DC;
entity("Gbreve") -> 16#0011E;
entity("Gcedil") -> 16#00122;
entity("Gcirc") -> 16#0011C;
entity("Gcy") -> 16#00413;
entity("Gdot") -> 16#00120;
entity("Gfr") -> 16#1D50A;
entity("Gg") -> 16#022D9;
entity("Gopf") -> 16#1D53E;
entity("GreaterEqual") -> 16#02265;
entity("GreaterEqualLess") -> 16#022DB;
entity("GreaterFullEqual") -> 16#02267;
entity("GreaterGreater") -> 16#02AA2;
entity("GreaterLess") -> 16#02277;
entity("GreaterSlantEqual") -> 16#02A7E;
entity("GreaterTilde") -> 16#02273;
entity("Gscr") -> 16#1D4A2;
entity("Gt") -> 16#0226B;
entity("HARDcy") -> 16#0042A;
entity("Hacek") -> 16#002C7;
entity("Hat") -> 16#0005E;
entity("Hcirc") -> 16#00124;
entity("Hfr") -> 16#0210C;
entity("HilbertSpace") -> 16#0210B;
entity("Hopf") -> 16#0210D;
entity("HorizontalLine") -> 16#02500;
entity("Hscr") -> 16#0210B;
entity("Hstrok") -> 16#00126;
entity("HumpDownHump") -> 16#0224E;
entity("HumpEqual") -> 16#0224F;
entity("IEcy") -> 16#00415;
entity("IJlig") -> 16#00132;
entity("IOcy") -> 16#00401;
entity("Iacute") -> 16#000CD;
entity("Icirc") -> 16#000CE;
entity("Icy") -> 16#00418;
entity("Idot") -> 16#00130;
entity("Ifr") -> 16#02111;
entity("Igrave") -> 16#000CC;
entity("Im") -> 16#02111;
entity("Imacr") -> 16#0012A;
entity("ImaginaryI") -> 16#02148;
entity("Implies") -> 16#021D2;
entity("Int") -> 16#0222C;
entity("Integral") -> 16#0222B;
entity("Intersection") -> 16#022C2;
entity("InvisibleComma") -> 16#02063;
entity("InvisibleTimes") -> 16#02062;
entity("Iogon") -> 16#0012E;
entity("Iopf") -> 16#1D540;
entity("Iota") -> 16#00399;
entity("Iscr") -> 16#02110;
entity("Itilde") -> 16#00128;
entity("Iukcy") -> 16#00406;
entity("Iuml") -> 16#000CF;
entity("Jcirc") -> 16#00134;
entity("Jcy") -> 16#00419;
entity("Jfr") -> 16#1D50D;
entity("Jopf") -> 16#1D541;
entity("Jscr") -> 16#1D4A5;
entity("Jsercy") -> 16#00408;
entity("Jukcy") -> 16#00404;
entity("KHcy") -> 16#00425;
entity("KJcy") -> 16#0040C;
entity("Kappa") -> 16#0039A;
entity("Kcedil") -> 16#00136;
entity("Kcy") -> 16#0041A;
entity("Kfr") -> 16#1D50E;
entity("Kopf") -> 16#1D542;
entity("Kscr") -> 16#1D4A6;
entity("LJcy") -> 16#00409;
entity("LT") -> 16#0003C;
entity("Lacute") -> 16#00139;
entity("Lambda") -> 16#0039B;
entity("Lang") -> 16#027EA;
entity("Laplacetrf") -> 16#02112;
entity("Larr") -> 16#0219E;
entity("Lcaron") -> 16#0013D;
entity("Lcedil") -> 16#0013B;
entity("Lcy") -> 16#0041B;
entity("LeftAngleBracket") -> 16#027E8;
entity("LeftArrow") -> 16#02190;
entity("LeftArrowBar") -> 16#021E4;
entity("LeftArrowRightArrow") -> 16#021C6;
entity("LeftCeiling") -> 16#02308;
entity("LeftDoubleBracket") -> 16#027E6;
entity("LeftDownTeeVector") -> 16#02961;
entity("LeftDownVector") -> 16#021C3;
entity("LeftDownVectorBar") -> 16#02959;
entity("LeftFloor") -> 16#0230A;
entity("LeftRightArrow") -> 16#02194;
entity("LeftRightVector") -> 16#0294E;
entity("LeftTee") -> 16#022A3;
entity("LeftTeeArrow") -> 16#021A4;
entity("LeftTeeVector") -> 16#0295A;
entity("LeftTriangle") -> 16#022B2;
entity("LeftTriangleBar") -> 16#029CF;
entity("LeftTriangleEqual") -> 16#022B4;
entity("LeftUpDownVector") -> 16#02951;
entity("LeftUpTeeVector") -> 16#02960;
entity("LeftUpVector") -> 16#021BF;
entity("LeftUpVectorBar") -> 16#02958;
entity("LeftVector") -> 16#021BC;
entity("LeftVectorBar") -> 16#02952;
entity("Leftarrow") -> 16#021D0;
entity("Leftrightarrow") -> 16#021D4;
entity("LessEqualGreater") -> 16#022DA;
entity("LessFullEqual") -> 16#02266;
entity("LessGreater") -> 16#02276;
entity("LessLess") -> 16#02AA1;
entity("LessSlantEqual") -> 16#02A7D;
entity("LessTilde") -> 16#02272;
entity("Lfr") -> 16#1D50F;
entity("Ll") -> 16#022D8;
entity("Lleftarrow") -> 16#021DA;
entity("Lmidot") -> 16#0013F;
entity("LongLeftArrow") -> 16#027F5;
entity("LongLeftRightArrow") -> 16#027F7;
entity("LongRightArrow") -> 16#027F6;
entity("Longleftarrow") -> 16#027F8;
entity("Longleftrightarrow") -> 16#027FA;
entity("Longrightarrow") -> 16#027F9;
entity("Lopf") -> 16#1D543;
entity("LowerLeftArrow") -> 16#02199;
entity("LowerRightArrow") -> 16#02198;
entity("Lscr") -> 16#02112;
entity("Lsh") -> 16#021B0;
entity("Lstrok") -> 16#00141;
entity("Lt") -> 16#0226A;
entity("Map") -> 16#02905;
entity("Mcy") -> 16#0041C;
entity("MediumSpace") -> 16#0205F;
entity("Mellintrf") -> 16#02133;
entity("Mfr") -> 16#1D510;
entity("MinusPlus") -> 16#02213;
entity("Mopf") -> 16#1D544;
entity("Mscr") -> 16#02133;
entity("Mu") -> 16#0039C;
entity("NJcy") -> 16#0040A;
entity("Nacute") -> 16#00143;
entity("Ncaron") -> 16#00147;
entity("Ncedil") -> 16#00145;
entity("Ncy") -> 16#0041D;
entity("NegativeMediumSpace") -> 16#0200B;
entity("NegativeThickSpace") -> 16#0200B;
entity("NegativeThinSpace") -> 16#0200B;
entity("NegativeVeryThinSpace") -> 16#0200B;
entity("NestedGreaterGreater") -> 16#0226B;
entity("NestedLessLess") -> 16#0226A;
entity("NewLine") -> 16#0000A;
entity("Nfr") -> 16#1D511;
entity("NoBreak") -> 16#02060;
entity("NonBreakingSpace") -> 16#000A0;
entity("Nopf") -> 16#02115;
entity("Not") -> 16#02AEC;
entity("NotCongruent") -> 16#02262;
entity("NotCupCap") -> 16#0226D;
entity("NotDoubleVerticalBar") -> 16#02226;
entity("NotElement") -> 16#02209;
entity("NotEqual") -> 16#02260;
entity("NotEqualTilde") -> [16#02242, 16#00338];
entity("NotExists") -> 16#02204;
entity("NotGreater") -> 16#0226F;
entity("NotGreaterEqual") -> 16#02271;
entity("NotGreaterFullEqual") -> [16#02267, 16#00338];
entity("NotGreaterGreater") -> [16#0226B, 16#00338];
entity("NotGreaterLess") -> 16#02279;
entity("NotGreaterSlantEqual") -> [16#02A7E, 16#00338];
entity("NotGreaterTilde") -> 16#02275;
entity("NotHumpDownHump") -> [16#0224E, 16#00338];
entity("NotHumpEqual") -> [16#0224F, 16#00338];
entity("NotLeftTriangle") -> 16#022EA;
entity("NotLeftTriangleBar") -> [16#029CF, 16#00338];
entity("NotLeftTriangleEqual") -> 16#022EC;
entity("NotLess") -> 16#0226E;
entity("NotLessEqual") -> 16#02270;
entity("NotLessGreater") -> 16#02278;
entity("NotLessLess") -> [16#0226A, 16#00338];
entity("NotLessSlantEqual") -> [16#02A7D, 16#00338];
entity("NotLessTilde") -> 16#02274;
entity("NotNestedGreaterGreater") -> [16#02AA2, 16#00338];
entity("NotNestedLessLess") -> [16#02AA1, 16#00338];
entity("NotPrecedes") -> 16#02280;
entity("NotPrecedesEqual") -> [16#02AAF, 16#00338];
entity("NotPrecedesSlantEqual") -> 16#022E0;
entity("NotReverseElement") -> 16#0220C;
entity("NotRightTriangle") -> 16#022EB;
entity("NotRightTriangleBar") -> [16#029D0, 16#00338];
entity("NotRightTriangleEqual") -> 16#022ED;
entity("NotSquareSubset") -> [16#0228F, 16#00338];
entity("NotSquareSubsetEqual") -> 16#022E2;
entity("NotSquareSuperset") -> [16#02290, 16#00338];
entity("NotSquareSupersetEqual") -> 16#022E3;
entity("NotSubset") -> [16#02282, 16#020D2];
entity("NotSubsetEqual") -> 16#02288;
entity("NotSucceeds") -> 16#02281;
entity("NotSucceedsEqual") -> [16#02AB0, 16#00338];
entity("NotSucceedsSlantEqual") -> 16#022E1;
entity("NotSucceedsTilde") -> [16#0227F, 16#00338];
entity("NotSuperset") -> [16#02283, 16#020D2];
entity("NotSupersetEqual") -> 16#02289;
entity("NotTilde") -> 16#02241;
entity("NotTildeEqual") -> 16#02244;
entity("NotTildeFullEqual") -> 16#02247;
entity("NotTildeTilde") -> 16#02249;
entity("NotVerticalBar") -> 16#02224;
entity("Nscr") -> 16#1D4A9;
entity("Ntilde") -> 16#000D1;
entity("Nu") -> 16#0039D;
entity("OElig") -> 16#00152;
entity("Oacute") -> 16#000D3;
entity("Ocirc") -> 16#000D4;
entity("Ocy") -> 16#0041E;
entity("Odblac") -> 16#00150;
entity("Ofr") -> 16#1D512;
entity("Ograve") -> 16#000D2;
entity("Omacr") -> 16#0014C;
entity("Omega") -> 16#003A9;
entity("Omicron") -> 16#0039F;
entity("Oopf") -> 16#1D546;
entity("OpenCurlyDoubleQuote") -> 16#0201C;
entity("OpenCurlyQuote") -> 16#02018;
entity("Or") -> 16#02A54;
entity("Oscr") -> 16#1D4AA;
entity("Oslash") -> 16#000D8;
entity("Otilde") -> 16#000D5;
entity("Otimes") -> 16#02A37;
entity("Ouml") -> 16#000D6;
entity("OverBar") -> 16#0203E;
entity("OverBrace") -> 16#023DE;
entity("OverBracket") -> 16#023B4;
entity("OverParenthesis") -> 16#023DC;
entity("PartialD") -> 16#02202;
entity("Pcy") -> 16#0041F;
entity("Pfr") -> 16#1D513;
entity("Phi") -> 16#003A6;
entity("Pi") -> 16#003A0;
entity("PlusMinus") -> 16#000B1;
entity("Poincareplane") -> 16#0210C;
entity("Popf") -> 16#02119;
entity("Pr") -> 16#02ABB;
entity("Precedes") -> 16#0227A;
entity("PrecedesEqual") -> 16#02AAF;
entity("PrecedesSlantEqual") -> 16#0227C;
entity("PrecedesTilde") -> 16#0227E;
entity("Prime") -> 16#02033;
entity("Product") -> 16#0220F;
entity("Proportion") -> 16#02237;
entity("Proportional") -> 16#0221D;
entity("Pscr") -> 16#1D4AB;
entity("Psi") -> 16#003A8;
entity("QUOT") -> 16#00022;
entity("Qfr") -> 16#1D514;
entity("Qopf") -> 16#0211A;
entity("Qscr") -> 16#1D4AC;
entity("RBarr") -> 16#02910;
entity("REG") -> 16#000AE;
entity("Racute") -> 16#00154;
entity("Rang") -> 16#027EB;
entity("Rarr") -> 16#021A0;
entity("Rarrtl") -> 16#02916;
entity("Rcaron") -> 16#00158;
entity("Rcedil") -> 16#00156;
entity("Rcy") -> 16#00420;
entity("Re") -> 16#0211C;
entity("ReverseElement") -> 16#0220B;
entity("ReverseEquilibrium") -> 16#021CB;
entity("ReverseUpEquilibrium") -> 16#0296F;
entity("Rfr") -> 16#0211C;
entity("Rho") -> 16#003A1;
entity("RightAngleBracket") -> 16#027E9;
entity("RightArrow") -> 16#02192;
entity("RightArrowBar") -> 16#021E5;
entity("RightArrowLeftArrow") -> 16#021C4;
entity("RightCeiling") -> 16#02309;
entity("RightDoubleBracket") -> 16#027E7;
entity("RightDownTeeVector") -> 16#0295D;
entity("RightDownVector") -> 16#021C2;
entity("RightDownVectorBar") -> 16#02955;
entity("RightFloor") -> 16#0230B;
entity("RightTee") -> 16#022A2;
entity("RightTeeArrow") -> 16#021A6;
entity("RightTeeVector") -> 16#0295B;
entity("RightTriangle") -> 16#022B3;
entity("RightTriangleBar") -> 16#029D0;
entity("RightTriangleEqual") -> 16#022B5;
entity("RightUpDownVector") -> 16#0294F;
entity("RightUpTeeVector") -> 16#0295C;
entity("RightUpVector") -> 16#021BE;
entity("RightUpVectorBar") -> 16#02954;
entity("RightVector") -> 16#021C0;
entity("RightVectorBar") -> 16#02953;
entity("Rightarrow") -> 16#021D2;
entity("Ropf") -> 16#0211D;
entity("RoundImplies") -> 16#02970;
entity("Rrightarrow") -> 16#021DB;
entity("Rscr") -> 16#0211B;
entity("Rsh") -> 16#021B1;
entity("RuleDelayed") -> 16#029F4;
entity("SHCHcy") -> 16#00429;
entity("SHcy") -> 16#00428;
entity("SOFTcy") -> 16#0042C;
entity("Sacute") -> 16#0015A;
entity("Sc") -> 16#02ABC;
entity("Scaron") -> 16#00160;
entity("Scedil") -> 16#0015E;
entity("Scirc") -> 16#0015C;
entity("Scy") -> 16#00421;
entity("Sfr") -> 16#1D516;
entity("ShortDownArrow") -> 16#02193;
entity("ShortLeftArrow") -> 16#02190;
entity("ShortRightArrow") -> 16#02192;
entity("ShortUpArrow") -> 16#02191;
entity("Sigma") -> 16#003A3;
entity("SmallCircle") -> 16#02218;
entity("Sopf") -> 16#1D54A;
entity("Sqrt") -> 16#0221A;
entity("Square") -> 16#025A1;
entity("SquareIntersection") -> 16#02293;
entity("SquareSubset") -> 16#0228F;
entity("SquareSubsetEqual") -> 16#02291;
entity("SquareSuperset") -> 16#02290;
entity("SquareSupersetEqual") -> 16#02292;
entity("SquareUnion") -> 16#02294;
entity("Sscr") -> 16#1D4AE;
entity("Star") -> 16#022C6;
entity("Sub") -> 16#022D0;
entity("Subset") -> 16#022D0;
entity("SubsetEqual") -> 16#02286;
entity("Succeeds") -> 16#0227B;
entity("SucceedsEqual") -> 16#02AB0;
entity("SucceedsSlantEqual") -> 16#0227D;
entity("SucceedsTilde") -> 16#0227F;
entity("SuchThat") -> 16#0220B;
entity("Sum") -> 16#02211;
entity("Sup") -> 16#022D1;
entity("Superset") -> 16#02283;
entity("SupersetEqual") -> 16#02287;
entity("Supset") -> 16#022D1;
entity("THORN") -> 16#000DE;
entity("TRADE") -> 16#02122;
entity("TSHcy") -> 16#0040B;
entity("TScy") -> 16#00426;
entity("Tab") -> 16#00009;
entity("Tau") -> 16#003A4;
entity("Tcaron") -> 16#00164;
entity("Tcedil") -> 16#00162;
entity("Tcy") -> 16#00422;
entity("Tfr") -> 16#1D517;
entity("Therefore") -> 16#02234;
entity("Theta") -> 16#00398;
entity("ThickSpace") -> [16#0205F, 16#0200A];
entity("ThinSpace") -> 16#02009;
entity("Tilde") -> 16#0223C;
entity("TildeEqual") -> 16#02243;
entity("TildeFullEqual") -> 16#02245;
entity("TildeTilde") -> 16#02248;
entity("Topf") -> 16#1D54B;
entity("TripleDot") -> 16#020DB;
entity("Tscr") -> 16#1D4AF;
entity("Tstrok") -> 16#00166;
entity("Uacute") -> 16#000DA;
entity("Uarr") -> 16#0219F;
entity("Uarrocir") -> 16#02949;
entity("Ubrcy") -> 16#0040E;
entity("Ubreve") -> 16#0016C;
entity("Ucirc") -> 16#000DB;
entity("Ucy") -> 16#00423;
entity("Udblac") -> 16#00170;
entity("Ufr") -> 16#1D518;
entity("Ugrave") -> 16#000D9;
entity("Umacr") -> 16#0016A;
entity("UnderBar") -> 16#0005F;
entity("UnderBrace") -> 16#023DF;
entity("UnderBracket") -> 16#023B5;
entity("UnderParenthesis") -> 16#023DD;
entity("Union") -> 16#022C3;
entity("UnionPlus") -> 16#0228E;
entity("Uogon") -> 16#00172;
entity("Uopf") -> 16#1D54C;
entity("UpArrow") -> 16#02191;
entity("UpArrowBar") -> 16#02912;
entity("UpArrowDownArrow") -> 16#021C5;
entity("UpDownArrow") -> 16#02195;
entity("UpEquilibrium") -> 16#0296E;
entity("UpTee") -> 16#022A5;
entity("UpTeeArrow") -> 16#021A5;
entity("Uparrow") -> 16#021D1;
entity("Updownarrow") -> 16#021D5;
entity("UpperLeftArrow") -> 16#02196;
entity("UpperRightArrow") -> 16#02197;
entity("Upsi") -> 16#003D2;
entity("Upsilon") -> 16#003A5;
entity("Uring") -> 16#0016E;
entity("Uscr") -> 16#1D4B0;
entity("Utilde") -> 16#00168;
entity("Uuml") -> 16#000DC;
entity("VDash") -> 16#022AB;
entity("Vbar") -> 16#02AEB;
entity("Vcy") -> 16#00412;
entity("Vdash") -> 16#022A9;
entity("Vdashl") -> 16#02AE6;
entity("Vee") -> 16#022C1;
entity("Verbar") -> 16#02016;
entity("Vert") -> 16#02016;
entity("VerticalBar") -> 16#02223;
entity("VerticalLine") -> 16#0007C;
entity("VerticalSeparator") -> 16#02758;
entity("VerticalTilde") -> 16#02240;
entity("VeryThinSpace") -> 16#0200A;
entity("Vfr") -> 16#1D519;
entity("Vopf") -> 16#1D54D;
entity("Vscr") -> 16#1D4B1;
entity("Vvdash") -> 16#022AA;
entity("Wcirc") -> 16#00174;
entity("Wedge") -> 16#022C0;
entity("Wfr") -> 16#1D51A;
entity("Wopf") -> 16#1D54E;
entity("Wscr") -> 16#1D4B2;
entity("Xfr") -> 16#1D51B;
entity("Xi") -> 16#0039E;
entity("Xopf") -> 16#1D54F;
entity("Xscr") -> 16#1D4B3;
entity("YAcy") -> 16#0042F;
entity("YIcy") -> 16#00407;
entity("YUcy") -> 16#0042E;
entity("Yacute") -> 16#000DD;
entity("Ycirc") -> 16#00176;
entity("Ycy") -> 16#0042B;
entity("Yfr") -> 16#1D51C;
entity("Yopf") -> 16#1D550;
entity("Yscr") -> 16#1D4B4;
entity("Yuml") -> 16#00178;
entity("ZHcy") -> 16#00416;
entity("Zacute") -> 16#00179;
entity("Zcaron") -> 16#0017D;
entity("Zcy") -> 16#00417;
entity("Zdot") -> 16#0017B;
entity("ZeroWidthSpace") -> 16#0200B;
entity("Zeta") -> 16#00396;
entity("Zfr") -> 16#02128;
entity("Zopf") -> 16#02124;
entity("Zscr") -> 16#1D4B5;
entity("aacute") -> 16#000E1;
entity("abreve") -> 16#00103;
entity("ac") -> 16#0223E;
entity("acE") -> [16#0223E, 16#00333];
entity("acd") -> 16#0223F;
entity("acirc") -> 16#000E2;
entity("acute") -> 16#000B4;
entity("acy") -> 16#00430;
entity("aelig") -> 16#000E6;
entity("af") -> 16#02061;
entity("afr") -> 16#1D51E;
entity("agrave") -> 16#000E0;
entity("alefsym") -> 16#02135;
entity("aleph") -> 16#02135;
entity("alpha") -> 16#003B1;
entity("amacr") -> 16#00101;
entity("amalg") -> 16#02A3F;
entity("amp") -> 16#00026;
entity("and") -> 16#02227;
entity("andand") -> 16#02A55;
entity("andd") -> 16#02A5C;
entity("andslope") -> 16#02A58;
entity("andv") -> 16#02A5A;
entity("ang") -> 16#02220;
entity("ange") -> 16#029A4;
entity("angle") -> 16#02220;
entity("angmsd") -> 16#02221;
entity("angmsdaa") -> 16#029A8;
entity("angmsdab") -> 16#029A9;
entity("angmsdac") -> 16#029AA;
entity("angmsdad") -> 16#029AB;
entity("angmsdae") -> 16#029AC;
entity("angmsdaf") -> 16#029AD;
entity("angmsdag") -> 16#029AE;
entity("angmsdah") -> 16#029AF;
entity("angrt") -> 16#0221F;
entity("angrtvb") -> 16#022BE;
entity("angrtvbd") -> 16#0299D;
entity("angsph") -> 16#02222;
entity("angst") -> 16#000C5;
entity("angzarr") -> 16#0237C;
entity("aogon") -> 16#00105;
entity("aopf") -> 16#1D552;
entity("ap") -> 16#02248;
entity("apE") -> 16#02A70;
entity("apacir") -> 16#02A6F;
entity("ape") -> 16#0224A;
entity("apid") -> 16#0224B;
entity("apos") -> 16#00027;
entity("approx") -> 16#02248;
entity("approxeq") -> 16#0224A;
entity("aring") -> 16#000E5;
entity("ascr") -> 16#1D4B6;
entity("ast") -> 16#0002A;
entity("asymp") -> 16#02248;
entity("asympeq") -> 16#0224D;
entity("atilde") -> 16#000E3;
entity("auml") -> 16#000E4;
entity("awconint") -> 16#02233;
entity("awint") -> 16#02A11;
entity("bNot") -> 16#02AED;
entity("backcong") -> 16#0224C;
entity("backepsilon") -> 16#003F6;
entity("backprime") -> 16#02035;
entity("backsim") -> 16#0223D;
entity("backsimeq") -> 16#022CD;
entity("barvee") -> 16#022BD;
entity("barwed") -> 16#02305;
entity("barwedge") -> 16#02305;
entity("bbrk") -> 16#023B5;
entity("bbrktbrk") -> 16#023B6;
entity("bcong") -> 16#0224C;
entity("bcy") -> 16#00431;
entity("bdquo") -> 16#0201E;
entity("becaus") -> 16#02235;
entity("because") -> 16#02235;
entity("bemptyv") -> 16#029B0;
entity("bepsi") -> 16#003F6;
entity("bernou") -> 16#0212C;
entity("beta") -> 16#003B2;
entity("beth") -> 16#02136;
entity("between") -> 16#0226C;
entity("bfr") -> 16#1D51F;
entity("bigcap") -> 16#022C2;
entity("bigcirc") -> 16#025EF;
entity("bigcup") -> 16#022C3;
entity("bigodot") -> 16#02A00;
entity("bigoplus") -> 16#02A01;
entity("bigotimes") -> 16#02A02;
entity("bigsqcup") -> 16#02A06;
entity("bigstar") -> 16#02605;
entity("bigtriangledown") -> 16#025BD;
entity("bigtriangleup") -> 16#025B3;
entity("biguplus") -> 16#02A04;
entity("bigvee") -> 16#022C1;
entity("bigwedge") -> 16#022C0;
entity("bkarow") -> 16#0290D;
entity("blacklozenge") -> 16#029EB;
entity("blacksquare") -> 16#025AA;
entity("blacktriangle") -> 16#025B4;
entity("blacktriangledown") -> 16#025BE;
entity("blacktriangleleft") -> 16#025C2;
entity("blacktriangleright") -> 16#025B8;
entity("blank") -> 16#02423;
entity("blk12") -> 16#02592;
entity("blk14") -> 16#02591;
entity("blk34") -> 16#02593;
entity("block") -> 16#02588;
entity("bne") -> [16#0003D, 16#020E5];
entity("bnequiv") -> [16#02261, 16#020E5];
entity("bnot") -> 16#02310;
entity("bopf") -> 16#1D553;
entity("bot") -> 16#022A5;
entity("bottom") -> 16#022A5;
entity("bowtie") -> 16#022C8;
entity("boxDL") -> 16#02557;
entity("boxDR") -> 16#02554;
entity("boxDl") -> 16#02556;
entity("boxDr") -> 16#02553;
entity("boxH") -> 16#02550;
entity("boxHD") -> 16#02566;
entity("boxHU") -> 16#02569;
entity("boxHd") -> 16#02564;
entity("boxHu") -> 16#02567;
entity("boxUL") -> 16#0255D;
entity("boxUR") -> 16#0255A;
entity("boxUl") -> 16#0255C;
entity("boxUr") -> 16#02559;
entity("boxV") -> 16#02551;
entity("boxVH") -> 16#0256C;
entity("boxVL") -> 16#02563;
entity("boxVR") -> 16#02560;
entity("boxVh") -> 16#0256B;
entity("boxVl") -> 16#02562;
entity("boxVr") -> 16#0255F;
entity("boxbox") -> 16#029C9;
entity("boxdL") -> 16#02555;
entity("boxdR") -> 16#02552;
entity("boxdl") -> 16#02510;
entity("boxdr") -> 16#0250C;
entity("boxh") -> 16#02500;
entity("boxhD") -> 16#02565;
entity("boxhU") -> 16#02568;
entity("boxhd") -> 16#0252C;
entity("boxhu") -> 16#02534;
entity("boxminus") -> 16#0229F;
entity("boxplus") -> 16#0229E;
entity("boxtimes") -> 16#022A0;
entity("boxuL") -> 16#0255B;
entity("boxuR") -> 16#02558;
entity("boxul") -> 16#02518;
entity("boxur") -> 16#02514;
entity("boxv") -> 16#02502;
entity("boxvH") -> 16#0256A;
entity("boxvL") -> 16#02561;
entity("boxvR") -> 16#0255E;
entity("boxvh") -> 16#0253C;
entity("boxvl") -> 16#02524;
entity("boxvr") -> 16#0251C;
entity("bprime") -> 16#02035;
entity("breve") -> 16#002D8;
entity("brvbar") -> 16#000A6;
entity("bscr") -> 16#1D4B7;
entity("bsemi") -> 16#0204F;
entity("bsim") -> 16#0223D;
entity("bsime") -> 16#022CD;
entity("bsol") -> 16#0005C;
entity("bsolb") -> 16#029C5;
entity("bsolhsub") -> 16#027C8;
entity("bull") -> 16#02022;
entity("bullet") -> 16#02022;
entity("bump") -> 16#0224E;
entity("bumpE") -> 16#02AAE;
entity("bumpe") -> 16#0224F;
entity("bumpeq") -> 16#0224F;
entity("cacute") -> 16#00107;
entity("cap") -> 16#02229;
entity("capand") -> 16#02A44;
entity("capbrcup") -> 16#02A49;
entity("capcap") -> 16#02A4B;
entity("capcup") -> 16#02A47;
entity("capdot") -> 16#02A40;
entity("caps") -> [16#02229, 16#0FE00];
entity("caret") -> 16#02041;
entity("caron") -> 16#002C7;
entity("ccaps") -> 16#02A4D;
entity("ccaron") -> 16#0010D;
entity("ccedil") -> 16#000E7;
entity("ccirc") -> 16#00109;
entity("ccups") -> 16#02A4C;
entity("ccupssm") -> 16#02A50;
entity("cdot") -> 16#0010B;
entity("cedil") -> 16#000B8;
entity("cemptyv") -> 16#029B2;
entity("cent") -> 16#000A2;
entity("centerdot") -> 16#000B7;
entity("cfr") -> 16#1D520;
entity("chcy") -> 16#00447;
entity("check") -> 16#02713;
entity("checkmark") -> 16#02713;
entity("chi") -> 16#003C7;
entity("cir") -> 16#025CB;
entity("cirE") -> 16#029C3;
entity("circ") -> 16#002C6;
entity("circeq") -> 16#02257;
entity("circlearrowleft") -> 16#021BA;
entity("circlearrowright") -> 16#021BB;
entity("circledR") -> 16#000AE;
entity("circledS") -> 16#024C8;
entity("circledast") -> 16#0229B;
entity("circledcirc") -> 16#0229A;
entity("circleddash") -> 16#0229D;
entity("cire") -> 16#02257;
entity("cirfnint") -> 16#02A10;
entity("cirmid") -> 16#02AEF;
entity("cirscir") -> 16#029C2;
entity("clubs") -> 16#02663;
entity("clubsuit") -> 16#02663;
entity("colon") -> 16#0003A;
entity("colone") -> 16#02254;
entity("coloneq") -> 16#02254;
entity("comma") -> 16#0002C;
entity("commat") -> 16#00040;
entity("comp") -> 16#02201;
entity("compfn") -> 16#02218;
entity("complement") -> 16#02201;
entity("complexes") -> 16#02102;
entity("cong") -> 16#02245;
entity("congdot") -> 16#02A6D;
entity("conint") -> 16#0222E;
entity("copf") -> 16#1D554;
entity("coprod") -> 16#02210;
entity("copy") -> 16#000A9;
entity("copysr") -> 16#02117;
entity("crarr") -> 16#021B5;
entity("cross") -> 16#02717;
entity("cscr") -> 16#1D4B8;
entity("csub") -> 16#02ACF;
entity("csube") -> 16#02AD1;
entity("csup") -> 16#02AD0;
entity("csupe") -> 16#02AD2;
entity("ctdot") -> 16#022EF;
entity("cudarrl") -> 16#02938;
entity("cudarrr") -> 16#02935;
entity("cuepr") -> 16#022DE;
entity("cuesc") -> 16#022DF;
entity("cularr") -> 16#021B6;
entity("cularrp") -> 16#0293D;
entity("cup") -> 16#0222A;
entity("cupbrcap") -> 16#02A48;
entity("cupcap") -> 16#02A46;
entity("cupcup") -> 16#02A4A;
entity("cupdot") -> 16#0228D;
entity("cupor") -> 16#02A45;
entity("cups") -> [16#0222A, 16#0FE00];
entity("curarr") -> 16#021B7;
entity("curarrm") -> 16#0293C;
entity("curlyeqprec") -> 16#022DE;
entity("curlyeqsucc") -> 16#022DF;
entity("curlyvee") -> 16#022CE;
entity("curlywedge") -> 16#022CF;
entity("curren") -> 16#000A4;
entity("curvearrowleft") -> 16#021B6;
entity("curvearrowright") -> 16#021B7;
entity("cuvee") -> 16#022CE;
entity("cuwed") -> 16#022CF;
entity("cwconint") -> 16#02232;
entity("cwint") -> 16#02231;
entity("cylcty") -> 16#0232D;
entity("dArr") -> 16#021D3;
entity("dHar") -> 16#02965;
entity("dagger") -> 16#02020;
entity("daleth") -> 16#02138;
entity("darr") -> 16#02193;
entity("dash") -> 16#02010;
entity("dashv") -> 16#022A3;
entity("dbkarow") -> 16#0290F;
entity("dblac") -> 16#002DD;
entity("dcaron") -> 16#0010F;
entity("dcy") -> 16#00434;
entity("dd") -> 16#02146;
entity("ddagger") -> 16#02021;
entity("ddarr") -> 16#021CA;
entity("ddotseq") -> 16#02A77;
entity("deg") -> 16#000B0;
entity("delta") -> 16#003B4;
entity("demptyv") -> 16#029B1;
entity("dfisht") -> 16#0297F;
entity("dfr") -> 16#1D521;
entity("dharl") -> 16#021C3;
entity("dharr") -> 16#021C2;
entity("diam") -> 16#022C4;
entity("diamond") -> 16#022C4;
entity("diamondsuit") -> 16#02666;
entity("diams") -> 16#02666;
entity("die") -> 16#000A8;
entity("digamma") -> 16#003DD;
entity("disin") -> 16#022F2;
entity("div") -> 16#000F7;
entity("divide") -> 16#000F7;
entity("divideontimes") -> 16#022C7;
entity("divonx") -> 16#022C7;
entity("djcy") -> 16#00452;
entity("dlcorn") -> 16#0231E;
entity("dlcrop") -> 16#0230D;
entity("dollar") -> 16#00024;
entity("dopf") -> 16#1D555;
entity("dot") -> 16#002D9;
entity("doteq") -> 16#02250;
entity("doteqdot") -> 16#02251;
entity("dotminus") -> 16#02238;
entity("dotplus") -> 16#02214;
entity("dotsquare") -> 16#022A1;
entity("doublebarwedge") -> 16#02306;
entity("downarrow") -> 16#02193;
entity("downdownarrows") -> 16#021CA;
entity("downharpoonleft") -> 16#021C3;
entity("downharpoonright") -> 16#021C2;
entity("drbkarow") -> 16#02910;
entity("drcorn") -> 16#0231F;
entity("drcrop") -> 16#0230C;
entity("dscr") -> 16#1D4B9;
entity("dscy") -> 16#00455;
entity("dsol") -> 16#029F6;
entity("dstrok") -> 16#00111;
entity("dtdot") -> 16#022F1;
entity("dtri") -> 16#025BF;
entity("dtrif") -> 16#025BE;
entity("duarr") -> 16#021F5;
entity("duhar") -> 16#0296F;
entity("dwangle") -> 16#029A6;
entity("dzcy") -> 16#0045F;
entity("dzigrarr") -> 16#027FF;
entity("eDDot") -> 16#02A77;
entity("eDot") -> 16#02251;
entity("eacute") -> 16#000E9;
entity("easter") -> 16#02A6E;
entity("ecaron") -> 16#0011B;
entity("ecir") -> 16#02256;
entity("ecirc") -> 16#000EA;
entity("ecolon") -> 16#02255;
entity("ecy") -> 16#0044D;
entity("edot") -> 16#00117;
entity("ee") -> 16#02147;
entity("efDot") -> 16#02252;
entity("efr") -> 16#1D522;
entity("eg") -> 16#02A9A;
entity("egrave") -> 16#000E8;
entity("egs") -> 16#02A96;
entity("egsdot") -> 16#02A98;
entity("el") -> 16#02A99;
entity("elinters") -> 16#023E7;
entity("ell") -> 16#02113;
entity("els") -> 16#02A95;
entity("elsdot") -> 16#02A97;
entity("emacr") -> 16#00113;
entity("empty") -> 16#02205;
entity("emptyset") -> 16#02205;
entity("emptyv") -> 16#02205;
entity("emsp") -> 16#02003;
entity("emsp13") -> 16#02004;
entity("emsp14") -> 16#02005;
entity("eng") -> 16#0014B;
entity("ensp") -> 16#02002;
entity("eogon") -> 16#00119;
entity("eopf") -> 16#1D556;
entity("epar") -> 16#022D5;
entity("eparsl") -> 16#029E3;
entity("eplus") -> 16#02A71;
entity("epsi") -> 16#003B5;
entity("epsilon") -> 16#003B5;
entity("epsiv") -> 16#003F5;
entity("eqcirc") -> 16#02256;
entity("eqcolon") -> 16#02255;
entity("eqsim") -> 16#02242;
entity("eqslantgtr") -> 16#02A96;
entity("eqslantless") -> 16#02A95;
entity("equals") -> 16#0003D;
entity("equest") -> 16#0225F;
entity("equiv") -> 16#02261;
entity("equivDD") -> 16#02A78;
entity("eqvparsl") -> 16#029E5;
entity("erDot") -> 16#02253;
entity("erarr") -> 16#02971;
entity("escr") -> 16#0212F;
entity("esdot") -> 16#02250;
entity("esim") -> 16#02242;
entity("eta") -> 16#003B7;
entity("eth") -> 16#000F0;
entity("euml") -> 16#000EB;
entity("euro") -> 16#020AC;
entity("excl") -> 16#00021;
entity("exist") -> 16#02203;
entity("expectation") -> 16#02130;
entity("exponentiale") -> 16#02147;
entity("fallingdotseq") -> 16#02252;
entity("fcy") -> 16#00444;
entity("female") -> 16#02640;
entity("ffilig") -> 16#0FB03;
entity("fflig") -> 16#0FB00;
entity("ffllig") -> 16#0FB04;
entity("ffr") -> 16#1D523;
entity("filig") -> 16#0FB01;
entity("fjlig") -> [16#00066, 16#0006A];
entity("flat") -> 16#0266D;
entity("fllig") -> 16#0FB02;
entity("fltns") -> 16#025B1;
entity("fnof") -> 16#00192;
entity("fopf") -> 16#1D557;
entity("forall") -> 16#02200;
entity("fork") -> 16#022D4;
entity("forkv") -> 16#02AD9;
entity("fpartint") -> 16#02A0D;
entity("frac12") -> 16#000BD;
entity("frac13") -> 16#02153;
entity("frac14") -> 16#000BC;
entity("frac15") -> 16#02155;
entity("frac16") -> 16#02159;
entity("frac18") -> 16#0215B;
entity("frac23") -> 16#02154;
entity("frac25") -> 16#02156;
entity("frac34") -> 16#000BE;
entity("frac35") -> 16#02157;
entity("frac38") -> 16#0215C;
entity("frac45") -> 16#02158;
entity("frac56") -> 16#0215A;
entity("frac58") -> 16#0215D;
entity("frac78") -> 16#0215E;
entity("frasl") -> 16#02044;
entity("frown") -> 16#02322;
entity("fscr") -> 16#1D4BB;
entity("gE") -> 16#02267;
entity("gEl") -> 16#02A8C;
entity("gacute") -> 16#001F5;
entity("gamma") -> 16#003B3;
entity("gammad") -> 16#003DD;
entity("gap") -> 16#02A86;
entity("gbreve") -> 16#0011F;
entity("gcirc") -> 16#0011D;
entity("gcy") -> 16#00433;
entity("gdot") -> 16#00121;
entity("ge") -> 16#02265;
entity("gel") -> 16#022DB;
entity("geq") -> 16#02265;
entity("geqq") -> 16#02267;
entity("geqslant") -> 16#02A7E;
entity("ges") -> 16#02A7E;
entity("gescc") -> 16#02AA9;
entity("gesdot") -> 16#02A80;
entity("gesdoto") -> 16#02A82;
entity("gesdotol") -> 16#02A84;
entity("gesl") -> [16#022DB, 16#0FE00];
entity("gesles") -> 16#02A94;
entity("gfr") -> 16#1D524;
entity("gg") -> 16#0226B;
entity("ggg") -> 16#022D9;
entity("gimel") -> 16#02137;
entity("gjcy") -> 16#00453;
entity("gl") -> 16#02277;
entity("glE") -> 16#02A92;
entity("gla") -> 16#02AA5;
entity("glj") -> 16#02AA4;
entity("gnE") -> 16#02269;
entity("gnap") -> 16#02A8A;
entity("gnapprox") -> 16#02A8A;
entity("gne") -> 16#02A88;
entity("gneq") -> 16#02A88;
entity("gneqq") -> 16#02269;
entity("gnsim") -> 16#022E7;
entity("gopf") -> 16#1D558;
entity("grave") -> 16#00060;
entity("gscr") -> 16#0210A;
entity("gsim") -> 16#02273;
entity("gsime") -> 16#02A8E;
entity("gsiml") -> 16#02A90;
entity("gt") -> 16#0003E;
entity("gtcc") -> 16#02AA7;
entity("gtcir") -> 16#02A7A;
entity("gtdot") -> 16#022D7;
entity("gtlPar") -> 16#02995;
entity("gtquest") -> 16#02A7C;
entity("gtrapprox") -> 16#02A86;
entity("gtrarr") -> 16#02978;
entity("gtrdot") -> 16#022D7;
entity("gtreqless") -> 16#022DB;
entity("gtreqqless") -> 16#02A8C;
entity("gtrless") -> 16#02277;
entity("gtrsim") -> 16#02273;
entity("gvertneqq") -> [16#02269, 16#0FE00];
entity("gvnE") -> [16#02269, 16#0FE00];
entity("hArr") -> 16#021D4;
entity("hairsp") -> 16#0200A;
entity("half") -> 16#000BD;
entity("hamilt") -> 16#0210B;
entity("hardcy") -> 16#0044A;
entity("harr") -> 16#02194;
entity("harrcir") -> 16#02948;
entity("harrw") -> 16#021AD;
entity("hbar") -> 16#0210F;
entity("hcirc") -> 16#00125;
entity("hearts") -> 16#02665;
entity("heartsuit") -> 16#02665;
entity("hellip") -> 16#02026;
entity("hercon") -> 16#022B9;
entity("hfr") -> 16#1D525;
entity("hksearow") -> 16#02925;
entity("hkswarow") -> 16#02926;
entity("hoarr") -> 16#021FF;
entity("homtht") -> 16#0223B;
entity("hookleftarrow") -> 16#021A9;
entity("hookrightarrow") -> 16#021AA;
entity("hopf") -> 16#1D559;
entity("horbar") -> 16#02015;
entity("hscr") -> 16#1D4BD;
entity("hslash") -> 16#0210F;
entity("hstrok") -> 16#00127;
entity("hybull") -> 16#02043;
entity("hyphen") -> 16#02010;
entity("iacute") -> 16#000ED;
entity("ic") -> 16#02063;
entity("icirc") -> 16#000EE;
entity("icy") -> 16#00438;
entity("iecy") -> 16#00435;
entity("iexcl") -> 16#000A1;
entity("iff") -> 16#021D4;
entity("ifr") -> 16#1D526;
entity("igrave") -> 16#000EC;
entity("ii") -> 16#02148;
entity("iiiint") -> 16#02A0C;
entity("iiint") -> 16#0222D;
entity("iinfin") -> 16#029DC;
entity("iiota") -> 16#02129;
entity("ijlig") -> 16#00133;
entity("imacr") -> 16#0012B;
entity("image") -> 16#02111;
entity("imagline") -> 16#02110;
entity("imagpart") -> 16#02111;
entity("imath") -> 16#00131;
entity("imof") -> 16#022B7;
entity("imped") -> 16#001B5;
entity("in") -> 16#02208;
entity("incare") -> 16#02105;
entity("infin") -> 16#0221E;
entity("infintie") -> 16#029DD;
entity("inodot") -> 16#00131;
entity("int") -> 16#0222B;
entity("intcal") -> 16#022BA;
entity("integers") -> 16#02124;
entity("intercal") -> 16#022BA;
entity("intlarhk") -> 16#02A17;
entity("intprod") -> 16#02A3C;
entity("iocy") -> 16#00451;
entity("iogon") -> 16#0012F;
entity("iopf") -> 16#1D55A;
entity("iota") -> 16#003B9;
entity("iprod") -> 16#02A3C;
entity("iquest") -> 16#000BF;
entity("iscr") -> 16#1D4BE;
entity("isin") -> 16#02208;
entity("isinE") -> 16#022F9;
entity("isindot") -> 16#022F5;
entity("isins") -> 16#022F4;
entity("isinsv") -> 16#022F3;
entity("isinv") -> 16#02208;
entity("it") -> 16#02062;
entity("itilde") -> 16#00129;
entity("iukcy") -> 16#00456;
entity("iuml") -> 16#000EF;
entity("jcirc") -> 16#00135;
entity("jcy") -> 16#00439;
entity("jfr") -> 16#1D527;
entity("jmath") -> 16#00237;
entity("jopf") -> 16#1D55B;
entity("jscr") -> 16#1D4BF;
entity("jsercy") -> 16#00458;
entity("jukcy") -> 16#00454;
entity("kappa") -> 16#003BA;
entity("kappav") -> 16#003F0;
entity("kcedil") -> 16#00137;
entity("kcy") -> 16#0043A;
entity("kfr") -> 16#1D528;
entity("kgreen") -> 16#00138;
entity("khcy") -> 16#00445;
entity("kjcy") -> 16#0045C;
entity("kopf") -> 16#1D55C;
entity("kscr") -> 16#1D4C0;
entity("lAarr") -> 16#021DA;
entity("lArr") -> 16#021D0;
entity("lAtail") -> 16#0291B;
entity("lBarr") -> 16#0290E;
entity("lE") -> 16#02266;
entity("lEg") -> 16#02A8B;
entity("lHar") -> 16#02962;
entity("lacute") -> 16#0013A;
entity("laemptyv") -> 16#029B4;
entity("lagran") -> 16#02112;
entity("lambda") -> 16#003BB;
entity("lang") -> 16#027E8;
entity("langd") -> 16#02991;
entity("langle") -> 16#027E8;
entity("lap") -> 16#02A85;
entity("laquo") -> 16#000AB;
entity("larr") -> 16#02190;
entity("larrb") -> 16#021E4;
entity("larrbfs") -> 16#0291F;
entity("larrfs") -> 16#0291D;
entity("larrhk") -> 16#021A9;
entity("larrlp") -> 16#021AB;
entity("larrpl") -> 16#02939;
entity("larrsim") -> 16#02973;
entity("larrtl") -> 16#021A2;
entity("lat") -> 16#02AAB;
entity("latail") -> 16#02919;
entity("late") -> 16#02AAD;
entity("lates") -> [16#02AAD, 16#0FE00];
entity("lbarr") -> 16#0290C;
entity("lbbrk") -> 16#02772;
entity("lbrace") -> 16#0007B;
entity("lbrack") -> 16#0005B;
entity("lbrke") -> 16#0298B;
entity("lbrksld") -> 16#0298F;
entity("lbrkslu") -> 16#0298D;
entity("lcaron") -> 16#0013E;
entity("lcedil") -> 16#0013C;
entity("lceil") -> 16#02308;
entity("lcub") -> 16#0007B;
entity("lcy") -> 16#0043B;
entity("ldca") -> 16#02936;
entity("ldquo") -> 16#0201C;
entity("ldquor") -> 16#0201E;
entity("ldrdhar") -> 16#02967;
entity("ldrushar") -> 16#0294B;
entity("ldsh") -> 16#021B2;
entity("le") -> 16#02264;
entity("leftarrow") -> 16#02190;
entity("leftarrowtail") -> 16#021A2;
entity("leftharpoondown") -> 16#021BD;
entity("leftharpoonup") -> 16#021BC;
entity("leftleftarrows") -> 16#021C7;
entity("leftrightarrow") -> 16#02194;
entity("leftrightarrows") -> 16#021C6;
entity("leftrightharpoons") -> 16#021CB;
entity("leftrightsquigarrow") -> 16#021AD;
entity("leftthreetimes") -> 16#022CB;
entity("leg") -> 16#022DA;
entity("leq") -> 16#02264;
entity("leqq") -> 16#02266;
entity("leqslant") -> 16#02A7D;
entity("les") -> 16#02A7D;
entity("lescc") -> 16#02AA8;
entity("lesdot") -> 16#02A7F;
entity("lesdoto") -> 16#02A81;
entity("lesdotor") -> 16#02A83;
entity("lesg") -> [16#022DA, 16#0FE00];
entity("lesges") -> 16#02A93;
entity("lessapprox") -> 16#02A85;
entity("lessdot") -> 16#022D6;
entity("lesseqgtr") -> 16#022DA;
entity("lesseqqgtr") -> 16#02A8B;
entity("lessgtr") -> 16#02276;
entity("lesssim") -> 16#02272;
entity("lfisht") -> 16#0297C;
entity("lfloor") -> 16#0230A;
entity("lfr") -> 16#1D529;
entity("lg") -> 16#02276;
entity("lgE") -> 16#02A91;
entity("lhard") -> 16#021BD;
entity("lharu") -> 16#021BC;
entity("lharul") -> 16#0296A;
entity("lhblk") -> 16#02584;
entity("ljcy") -> 16#00459;
entity("ll") -> 16#0226A;
entity("llarr") -> 16#021C7;
entity("llcorner") -> 16#0231E;
entity("llhard") -> 16#0296B;
entity("lltri") -> 16#025FA;
entity("lmidot") -> 16#00140;
entity("lmoust") -> 16#023B0;
entity("lmoustache") -> 16#023B0;
entity("lnE") -> 16#02268;
entity("lnap") -> 16#02A89;
entity("lnapprox") -> 16#02A89;
entity("lne") -> 16#02A87;
entity("lneq") -> 16#02A87;
entity("lneqq") -> 16#02268;
entity("lnsim") -> 16#022E6;
entity("loang") -> 16#027EC;
entity("loarr") -> 16#021FD;
entity("lobrk") -> 16#027E6;
entity("longleftarrow") -> 16#027F5;
entity("longleftrightarrow") -> 16#027F7;
entity("longmapsto") -> 16#027FC;
entity("longrightarrow") -> 16#027F6;
entity("looparrowleft") -> 16#021AB;
entity("looparrowright") -> 16#021AC;
entity("lopar") -> 16#02985;
entity("lopf") -> 16#1D55D;
entity("loplus") -> 16#02A2D;
entity("lotimes") -> 16#02A34;
entity("lowast") -> 16#02217;
entity("lowbar") -> 16#0005F;
entity("loz") -> 16#025CA;
entity("lozenge") -> 16#025CA;
entity("lozf") -> 16#029EB;
entity("lpar") -> 16#00028;
entity("lparlt") -> 16#02993;
entity("lrarr") -> 16#021C6;
entity("lrcorner") -> 16#0231F;
entity("lrhar") -> 16#021CB;
entity("lrhard") -> 16#0296D;
entity("lrm") -> 16#0200E;
entity("lrtri") -> 16#022BF;
entity("lsaquo") -> 16#02039;
entity("lscr") -> 16#1D4C1;
entity("lsh") -> 16#021B0;
entity("lsim") -> 16#02272;
entity("lsime") -> 16#02A8D;
entity("lsimg") -> 16#02A8F;
entity("lsqb") -> 16#0005B;
entity("lsquo") -> 16#02018;
entity("lsquor") -> 16#0201A;
entity("lstrok") -> 16#00142;
entity("lt") -> 16#0003C;
entity("ltcc") -> 16#02AA6;
entity("ltcir") -> 16#02A79;
entity("ltdot") -> 16#022D6;
entity("lthree") -> 16#022CB;
entity("ltimes") -> 16#022C9;
entity("ltlarr") -> 16#02976;
entity("ltquest") -> 16#02A7B;
entity("ltrPar") -> 16#02996;
entity("ltri") -> 16#025C3;
entity("ltrie") -> 16#022B4;
entity("ltrif") -> 16#025C2;
entity("lurdshar") -> 16#0294A;
entity("luruhar") -> 16#02966;
entity("lvertneqq") -> [16#02268, 16#0FE00];
entity("lvnE") -> [16#02268, 16#0FE00];
entity("mDDot") -> 16#0223A;
entity("macr") -> 16#000AF;
entity("male") -> 16#02642;
entity("malt") -> 16#02720;
entity("maltese") -> 16#02720;
entity("map") -> 16#021A6;
entity("mapsto") -> 16#021A6;
entity("mapstodown") -> 16#021A7;
entity("mapstoleft") -> 16#021A4;
entity("mapstoup") -> 16#021A5;
entity("marker") -> 16#025AE;
entity("mcomma") -> 16#02A29;
entity("mcy") -> 16#0043C;
entity("mdash") -> 16#02014;
entity("measuredangle") -> 16#02221;
entity("mfr") -> 16#1D52A;
entity("mho") -> 16#02127;
entity("micro") -> 16#000B5;
entity("mid") -> 16#02223;
entity("midast") -> 16#0002A;
entity("midcir") -> 16#02AF0;
entity("middot") -> 16#000B7;
entity("minus") -> 16#02212;
entity("minusb") -> 16#0229F;
entity("minusd") -> 16#02238;
entity("minusdu") -> 16#02A2A;
entity("mlcp") -> 16#02ADB;
entity("mldr") -> 16#02026;
entity("mnplus") -> 16#02213;
entity("models") -> 16#022A7;
entity("mopf") -> 16#1D55E;
entity("mp") -> 16#02213;
entity("mscr") -> 16#1D4C2;
entity("mstpos") -> 16#0223E;
entity("mu") -> 16#003BC;
entity("multimap") -> 16#022B8;
entity("mumap") -> 16#022B8;
entity("nGg") -> [16#022D9, 16#00338];
entity("nGt") -> [16#0226B, 16#020D2];
entity("nGtv") -> [16#0226B, 16#00338];
entity("nLeftarrow") -> 16#021CD;
entity("nLeftrightarrow") -> 16#021CE;
entity("nLl") -> [16#022D8, 16#00338];
entity("nLt") -> [16#0226A, 16#020D2];
entity("nLtv") -> [16#0226A, 16#00338];
entity("nRightarrow") -> 16#021CF;
entity("nVDash") -> 16#022AF;
entity("nVdash") -> 16#022AE;
entity("nabla") -> 16#02207;
entity("nacute") -> 16#00144;
entity("nang") -> [16#02220, 16#020D2];
entity("nap") -> 16#02249;
entity("napE") -> [16#02A70, 16#00338];
entity("napid") -> [16#0224B, 16#00338];
entity("napos") -> 16#00149;
entity("napprox") -> 16#02249;
entity("natur") -> 16#0266E;
entity("natural") -> 16#0266E;
entity("naturals") -> 16#02115;
entity("nbsp") -> 16#000A0;
entity("nbump") -> [16#0224E, 16#00338];
entity("nbumpe") -> [16#0224F, 16#00338];
entity("ncap") -> 16#02A43;
entity("ncaron") -> 16#00148;
entity("ncedil") -> 16#00146;
entity("ncong") -> 16#02247;
entity("ncongdot") -> [16#02A6D, 16#00338];
entity("ncup") -> 16#02A42;
entity("ncy") -> 16#0043D;
entity("ndash") -> 16#02013;
entity("ne") -> 16#02260;
entity("neArr") -> 16#021D7;
entity("nearhk") -> 16#02924;
entity("nearr") -> 16#02197;
entity("nearrow") -> 16#02197;
entity("nedot") -> [16#02250, 16#00338];
entity("nequiv") -> 16#02262;
entity("nesear") -> 16#02928;
entity("nesim") -> [16#02242, 16#00338];
entity("nexist") -> 16#02204;
entity("nexists") -> 16#02204;
entity("nfr") -> 16#1D52B;
entity("ngE") -> [16#02267, 16#00338];
entity("nge") -> 16#02271;
entity("ngeq") -> 16#02271;
entity("ngeqq") -> [16#02267, 16#00338];
entity("ngeqslant") -> [16#02A7E, 16#00338];
entity("nges") -> [16#02A7E, 16#00338];
entity("ngsim") -> 16#02275;
entity("ngt") -> 16#0226F;
entity("ngtr") -> 16#0226F;
entity("nhArr") -> 16#021CE;
entity("nharr") -> 16#021AE;
entity("nhpar") -> 16#02AF2;
entity("ni") -> 16#0220B;
entity("nis") -> 16#022FC;
entity("nisd") -> 16#022FA;
entity("niv") -> 16#0220B;
entity("njcy") -> 16#0045A;
entity("nlArr") -> 16#021CD;
entity("nlE") -> [16#02266, 16#00338];
entity("nlarr") -> 16#0219A;
entity("nldr") -> 16#02025;
entity("nle") -> 16#02270;
entity("nleftarrow") -> 16#0219A;
entity("nleftrightarrow") -> 16#021AE;
entity("nleq") -> 16#02270;
entity("nleqq") -> [16#02266, 16#00338];
entity("nleqslant") -> [16#02A7D, 16#00338];
entity("nles") -> [16#02A7D, 16#00338];
entity("nless") -> 16#0226E;
entity("nlsim") -> 16#02274;
entity("nlt") -> 16#0226E;
entity("nltri") -> 16#022EA;
entity("nltrie") -> 16#022EC;
entity("nmid") -> 16#02224;
entity("nopf") -> 16#1D55F;
entity("not") -> 16#000AC;
entity("notin") -> 16#02209;
entity("notinE") -> [16#022F9, 16#00338];
entity("notindot") -> [16#022F5, 16#00338];
entity("notinva") -> 16#02209;
entity("notinvb") -> 16#022F7;
entity("notinvc") -> 16#022F6;
entity("notni") -> 16#0220C;
entity("notniva") -> 16#0220C;
entity("notnivb") -> 16#022FE;
entity("notnivc") -> 16#022FD;
entity("npar") -> 16#02226;
entity("nparallel") -> 16#02226;
entity("nparsl") -> [16#02AFD, 16#020E5];
entity("npart") -> [16#02202, 16#00338];
entity("npolint") -> 16#02A14;
entity("npr") -> 16#02280;
entity("nprcue") -> 16#022E0;
entity("npre") -> [16#02AAF, 16#00338];
entity("nprec") -> 16#02280;
entity("npreceq") -> [16#02AAF, 16#00338];
entity("nrArr") -> 16#021CF;
entity("nrarr") -> 16#0219B;
entity("nrarrc") -> [16#02933, 16#00338];
entity("nrarrw") -> [16#0219D, 16#00338];
entity("nrightarrow") -> 16#0219B;
entity("nrtri") -> 16#022EB;
entity("nrtrie") -> 16#022ED;
entity("nsc") -> 16#02281;
entity("nsccue") -> 16#022E1;
entity("nsce") -> [16#02AB0, 16#00338];
entity("nscr") -> 16#1D4C3;
entity("nshortmid") -> 16#02224;
entity("nshortparallel") -> 16#02226;
entity("nsim") -> 16#02241;
entity("nsime") -> 16#02244;
entity("nsimeq") -> 16#02244;
entity("nsmid") -> 16#02224;
entity("nspar") -> 16#02226;
entity("nsqsube") -> 16#022E2;
entity("nsqsupe") -> 16#022E3;
entity("nsub") -> 16#02284;
entity("nsubE") -> [16#02AC5, 16#00338];
entity("nsube") -> 16#02288;
entity("nsubset") -> [16#02282, 16#020D2];
entity("nsubseteq") -> 16#02288;
entity("nsubseteqq") -> [16#02AC5, 16#00338];
entity("nsucc") -> 16#02281;
entity("nsucceq") -> [16#02AB0, 16#00338];
entity("nsup") -> 16#02285;
entity("nsupE") -> [16#02AC6, 16#00338];
entity("nsupe") -> 16#02289;
entity("nsupset") -> [16#02283, 16#020D2];
entity("nsupseteq") -> 16#02289;
entity("nsupseteqq") -> [16#02AC6, 16#00338];
entity("ntgl") -> 16#02279;
entity("ntilde") -> 16#000F1;
entity("ntlg") -> 16#02278;
entity("ntriangleleft") -> 16#022EA;
entity("ntrianglelefteq") -> 16#022EC;
entity("ntriangleright") -> 16#022EB;
entity("ntrianglerighteq") -> 16#022ED;
entity("nu") -> 16#003BD;
entity("num") -> 16#00023;
entity("numero") -> 16#02116;
entity("numsp") -> 16#02007;
entity("nvDash") -> 16#022AD;
entity("nvHarr") -> 16#02904;
entity("nvap") -> [16#0224D, 16#020D2];
entity("nvdash") -> 16#022AC;
entity("nvge") -> [16#02265, 16#020D2];
entity("nvgt") -> [16#0003E, 16#020D2];
entity("nvinfin") -> 16#029DE;
entity("nvlArr") -> 16#02902;
entity("nvle") -> [16#02264, 16#020D2];
entity("nvlt") -> [16#0003C, 16#020D2];
entity("nvltrie") -> [16#022B4, 16#020D2];
entity("nvrArr") -> 16#02903;
entity("nvrtrie") -> [16#022B5, 16#020D2];
entity("nvsim") -> [16#0223C, 16#020D2];
entity("nwArr") -> 16#021D6;
entity("nwarhk") -> 16#02923;
entity("nwarr") -> 16#02196;
entity("nwarrow") -> 16#02196;
entity("nwnear") -> 16#02927;
entity("oS") -> 16#024C8;
entity("oacute") -> 16#000F3;
entity("oast") -> 16#0229B;
entity("ocir") -> 16#0229A;
entity("ocirc") -> 16#000F4;
entity("ocy") -> 16#0043E;
entity("odash") -> 16#0229D;
entity("odblac") -> 16#00151;
entity("odiv") -> 16#02A38;
entity("odot") -> 16#02299;
entity("odsold") -> 16#029BC;
entity("oelig") -> 16#00153;
entity("ofcir") -> 16#029BF;
entity("ofr") -> 16#1D52C;
entity("ogon") -> 16#002DB;
entity("ograve") -> 16#000F2;
entity("ogt") -> 16#029C1;
entity("ohbar") -> 16#029B5;
entity("ohm") -> 16#003A9;
entity("oint") -> 16#0222E;
entity("olarr") -> 16#021BA;
entity("olcir") -> 16#029BE;
entity("olcross") -> 16#029BB;
entity("oline") -> 16#0203E;
entity("olt") -> 16#029C0;
entity("omacr") -> 16#0014D;
entity("omega") -> 16#003C9;
entity("omicron") -> 16#003BF;
entity("omid") -> 16#029B6;
entity("ominus") -> 16#02296;
entity("oopf") -> 16#1D560;
entity("opar") -> 16#029B7;
entity("operp") -> 16#029B9;
entity("oplus") -> 16#02295;
entity("or") -> 16#02228;
entity("orarr") -> 16#021BB;
entity("ord") -> 16#02A5D;
entity("order") -> 16#02134;
entity("orderof") -> 16#02134;
entity("ordf") -> 16#000AA;
entity("ordm") -> 16#000BA;
entity("origof") -> 16#022B6;
entity("oror") -> 16#02A56;
entity("orslope") -> 16#02A57;
entity("orv") -> 16#02A5B;
entity("oscr") -> 16#02134;
entity("oslash") -> 16#000F8;
entity("osol") -> 16#02298;
entity("otilde") -> 16#000F5;
entity("otimes") -> 16#02297;
entity("otimesas") -> 16#02A36;
entity("ouml") -> 16#000F6;
entity("ovbar") -> 16#0233D;
entity("par") -> 16#02225;
entity("para") -> 16#000B6;
entity("parallel") -> 16#02225;
entity("parsim") -> 16#02AF3;
entity("parsl") -> 16#02AFD;
entity("part") -> 16#02202;
entity("pcy") -> 16#0043F;
entity("percnt") -> 16#00025;
entity("period") -> 16#0002E;
entity("permil") -> 16#02030;
entity("perp") -> 16#022A5;
entity("pertenk") -> 16#02031;
entity("pfr") -> 16#1D52D;
entity("phi") -> 16#003C6;
entity("phiv") -> 16#003D5;
entity("phmmat") -> 16#02133;
entity("phone") -> 16#0260E;
entity("pi") -> 16#003C0;
entity("pitchfork") -> 16#022D4;
entity("piv") -> 16#003D6;
entity("planck") -> 16#0210F;
entity("planckh") -> 16#0210E;
entity("plankv") -> 16#0210F;
entity("plus") -> 16#0002B;
entity("plusacir") -> 16#02A23;
entity("plusb") -> 16#0229E;
entity("pluscir") -> 16#02A22;
entity("plusdo") -> 16#02214;
entity("plusdu") -> 16#02A25;
entity("pluse") -> 16#02A72;
entity("plusmn") -> 16#000B1;
entity("plussim") -> 16#02A26;
entity("plustwo") -> 16#02A27;
entity("pm") -> 16#000B1;
entity("pointint") -> 16#02A15;
entity("popf") -> 16#1D561;
entity("pound") -> 16#000A3;
entity("pr") -> 16#0227A;
entity("prE") -> 16#02AB3;
entity("prap") -> 16#02AB7;
entity("prcue") -> 16#0227C;
entity("pre") -> 16#02AAF;
entity("prec") -> 16#0227A;
entity("precapprox") -> 16#02AB7;
entity("preccurlyeq") -> 16#0227C;
entity("preceq") -> 16#02AAF;
entity("precnapprox") -> 16#02AB9;
entity("precneqq") -> 16#02AB5;
entity("precnsim") -> 16#022E8;
entity("precsim") -> 16#0227E;
entity("prime") -> 16#02032;
entity("primes") -> 16#02119;
entity("prnE") -> 16#02AB5;
entity("prnap") -> 16#02AB9;
entity("prnsim") -> 16#022E8;
entity("prod") -> 16#0220F;
entity("profalar") -> 16#0232E;
entity("profline") -> 16#02312;
entity("profsurf") -> 16#02313;
entity("prop") -> 16#0221D;
entity("propto") -> 16#0221D;
entity("prsim") -> 16#0227E;
entity("prurel") -> 16#022B0;
entity("pscr") -> 16#1D4C5;
entity("psi") -> 16#003C8;
entity("puncsp") -> 16#02008;
entity("qfr") -> 16#1D52E;
entity("qint") -> 16#02A0C;
entity("qopf") -> 16#1D562;
entity("qprime") -> 16#02057;
entity("qscr") -> 16#1D4C6;
entity("quaternions") -> 16#0210D;
entity("quatint") -> 16#02A16;
entity("quest") -> 16#0003F;
entity("questeq") -> 16#0225F;
entity("quot") -> 16#00022;
entity("rAarr") -> 16#021DB;
entity("rArr") -> 16#021D2;
entity("rAtail") -> 16#0291C;
entity("rBarr") -> 16#0290F;
entity("rHar") -> 16#02964;
entity("race") -> [16#0223D, 16#00331];
entity("racute") -> 16#00155;
entity("radic") -> 16#0221A;
entity("raemptyv") -> 16#029B3;
entity("rang") -> 16#027E9;
entity("rangd") -> 16#02992;
entity("range") -> 16#029A5;
entity("rangle") -> 16#027E9;
entity("raquo") -> 16#000BB;
entity("rarr") -> 16#02192;
entity("rarrap") -> 16#02975;
entity("rarrb") -> 16#021E5;
entity("rarrbfs") -> 16#02920;
entity("rarrc") -> 16#02933;
entity("rarrfs") -> 16#0291E;
entity("rarrhk") -> 16#021AA;
entity("rarrlp") -> 16#021AC;
entity("rarrpl") -> 16#02945;
entity("rarrsim") -> 16#02974;
entity("rarrtl") -> 16#021A3;
entity("rarrw") -> 16#0219D;
entity("ratail") -> 16#0291A;
entity("ratio") -> 16#02236;
entity("rationals") -> 16#0211A;
entity("rbarr") -> 16#0290D;
entity("rbbrk") -> 16#02773;
entity("rbrace") -> 16#0007D;
entity("rbrack") -> 16#0005D;
entity("rbrke") -> 16#0298C;
entity("rbrksld") -> 16#0298E;
entity("rbrkslu") -> 16#02990;
entity("rcaron") -> 16#00159;
entity("rcedil") -> 16#00157;
entity("rceil") -> 16#02309;
entity("rcub") -> 16#0007D;
entity("rcy") -> 16#00440;
entity("rdca") -> 16#02937;
entity("rdldhar") -> 16#02969;
entity("rdquo") -> 16#0201D;
entity("rdquor") -> 16#0201D;
entity("rdsh") -> 16#021B3;
entity("real") -> 16#0211C;
entity("realine") -> 16#0211B;
entity("realpart") -> 16#0211C;
entity("reals") -> 16#0211D;
entity("rect") -> 16#025AD;
entity("reg") -> 16#000AE;
entity("rfisht") -> 16#0297D;
entity("rfloor") -> 16#0230B;
entity("rfr") -> 16#1D52F;
entity("rhard") -> 16#021C1;
entity("rharu") -> 16#021C0;
entity("rharul") -> 16#0296C;
entity("rho") -> 16#003C1;
entity("rhov") -> 16#003F1;
entity("rightarrow") -> 16#02192;
entity("rightarrowtail") -> 16#021A3;
entity("rightharpoondown") -> 16#021C1;
entity("rightharpoonup") -> 16#021C0;
entity("rightleftarrows") -> 16#021C4;
entity("rightleftharpoons") -> 16#021CC;
entity("rightrightarrows") -> 16#021C9;
entity("rightsquigarrow") -> 16#0219D;
entity("rightthreetimes") -> 16#022CC;
entity("ring") -> 16#002DA;
entity("risingdotseq") -> 16#02253;
entity("rlarr") -> 16#021C4;
entity("rlhar") -> 16#021CC;
entity("rlm") -> 16#0200F;
entity("rmoust") -> 16#023B1;
entity("rmoustache") -> 16#023B1;
entity("rnmid") -> 16#02AEE;
entity("roang") -> 16#027ED;
entity("roarr") -> 16#021FE;
entity("robrk") -> 16#027E7;
entity("ropar") -> 16#02986;
entity("ropf") -> 16#1D563;
entity("roplus") -> 16#02A2E;
entity("rotimes") -> 16#02A35;
entity("rpar") -> 16#00029;
entity("rpargt") -> 16#02994;
entity("rppolint") -> 16#02A12;
entity("rrarr") -> 16#021C9;
entity("rsaquo") -> 16#0203A;
entity("rscr") -> 16#1D4C7;
entity("rsh") -> 16#021B1;
entity("rsqb") -> 16#0005D;
entity("rsquo") -> 16#02019;
entity("rsquor") -> 16#02019;
entity("rthree") -> 16#022CC;
entity("rtimes") -> 16#022CA;
entity("rtri") -> 16#025B9;
entity("rtrie") -> 16#022B5;
entity("rtrif") -> 16#025B8;
entity("rtriltri") -> 16#029CE;
entity("ruluhar") -> 16#02968;
entity("rx") -> 16#0211E;
entity("sacute") -> 16#0015B;
entity("sbquo") -> 16#0201A;
entity("sc") -> 16#0227B;
entity("scE") -> 16#02AB4;
entity("scap") -> 16#02AB8;
entity("scaron") -> 16#00161;
entity("sccue") -> 16#0227D;
entity("sce") -> 16#02AB0;
entity("scedil") -> 16#0015F;
entity("scirc") -> 16#0015D;
entity("scnE") -> 16#02AB6;
entity("scnap") -> 16#02ABA;
entity("scnsim") -> 16#022E9;
entity("scpolint") -> 16#02A13;
entity("scsim") -> 16#0227F;
entity("scy") -> 16#00441;
entity("sdot") -> 16#022C5;
entity("sdotb") -> 16#022A1;
entity("sdote") -> 16#02A66;
entity("seArr") -> 16#021D8;
entity("searhk") -> 16#02925;
entity("searr") -> 16#02198;
entity("searrow") -> 16#02198;
entity("sect") -> 16#000A7;
entity("semi") -> 16#0003B;
entity("seswar") -> 16#02929;
entity("setminus") -> 16#02216;
entity("setmn") -> 16#02216;
entity("sext") -> 16#02736;
entity("sfr") -> 16#1D530;
entity("sfrown") -> 16#02322;
entity("sharp") -> 16#0266F;
entity("shchcy") -> 16#00449;
entity("shcy") -> 16#00448;
entity("shortmid") -> 16#02223;
entity("shortparallel") -> 16#02225;
entity("shy") -> 16#000AD;
entity("sigma") -> 16#003C3;
entity("sigmaf") -> 16#003C2;
entity("sigmav") -> 16#003C2;
entity("sim") -> 16#0223C;
entity("simdot") -> 16#02A6A;
entity("sime") -> 16#02243;
entity("simeq") -> 16#02243;
entity("simg") -> 16#02A9E;
entity("simgE") -> 16#02AA0;
entity("siml") -> 16#02A9D;
entity("simlE") -> 16#02A9F;
entity("simne") -> 16#02246;
entity("simplus") -> 16#02A24;
entity("simrarr") -> 16#02972;
entity("slarr") -> 16#02190;
entity("smallsetminus") -> 16#02216;
entity("smashp") -> 16#02A33;
entity("smeparsl") -> 16#029E4;
entity("smid") -> 16#02223;
entity("smile") -> 16#02323;
entity("smt") -> 16#02AAA;
entity("smte") -> 16#02AAC;
entity("smtes") -> [16#02AAC, 16#0FE00];
entity("softcy") -> 16#0044C;
entity("sol") -> 16#0002F;
entity("solb") -> 16#029C4;
entity("solbar") -> 16#0233F;
entity("sopf") -> 16#1D564;
entity("spades") -> 16#02660;
entity("spadesuit") -> 16#02660;
entity("spar") -> 16#02225;
entity("sqcap") -> 16#02293;
entity("sqcaps") -> [16#02293, 16#0FE00];
entity("sqcup") -> 16#02294;
entity("sqcups") -> [16#02294, 16#0FE00];
entity("sqsub") -> 16#0228F;
entity("sqsube") -> 16#02291;
entity("sqsubset") -> 16#0228F;
entity("sqsubseteq") -> 16#02291;
entity("sqsup") -> 16#02290;
entity("sqsupe") -> 16#02292;
entity("sqsupset") -> 16#02290;
entity("sqsupseteq") -> 16#02292;
entity("squ") -> 16#025A1;
entity("square") -> 16#025A1;
entity("squarf") -> 16#025AA;
entity("squf") -> 16#025AA;
entity("srarr") -> 16#02192;
entity("sscr") -> 16#1D4C8;
entity("ssetmn") -> 16#02216;
entity("ssmile") -> 16#02323;
entity("sstarf") -> 16#022C6;
entity("star") -> 16#02606;
entity("starf") -> 16#02605;
entity("straightepsilon") -> 16#003F5;
entity("straightphi") -> 16#003D5;
entity("strns") -> 16#000AF;
entity("sub") -> 16#02282;
entity("subE") -> 16#02AC5;
entity("subdot") -> 16#02ABD;
entity("sube") -> 16#02286;
entity("subedot") -> 16#02AC3;
entity("submult") -> 16#02AC1;
entity("subnE") -> 16#02ACB;
entity("subne") -> 16#0228A;
entity("subplus") -> 16#02ABF;
entity("subrarr") -> 16#02979;
entity("subset") -> 16#02282;
entity("subseteq") -> 16#02286;
entity("subseteqq") -> 16#02AC5;
entity("subsetneq") -> 16#0228A;
entity("subsetneqq") -> 16#02ACB;
entity("subsim") -> 16#02AC7;
entity("subsub") -> 16#02AD5;
entity("subsup") -> 16#02AD3;
entity("succ") -> 16#0227B;
entity("succapprox") -> 16#02AB8;
entity("succcurlyeq") -> 16#0227D;
entity("succeq") -> 16#02AB0;
entity("succnapprox") -> 16#02ABA;
entity("succneqq") -> 16#02AB6;
entity("succnsim") -> 16#022E9;
entity("succsim") -> 16#0227F;
entity("sum") -> 16#02211;
entity("sung") -> 16#0266A;
entity("sup") -> 16#02283;
entity("sup1") -> 16#000B9;
entity("sup2") -> 16#000B2;
entity("sup3") -> 16#000B3;
entity("supE") -> 16#02AC6;
entity("supdot") -> 16#02ABE;
entity("supdsub") -> 16#02AD8;
entity("supe") -> 16#02287;
entity("supedot") -> 16#02AC4;
entity("suphsol") -> 16#027C9;
entity("suphsub") -> 16#02AD7;
entity("suplarr") -> 16#0297B;
entity("supmult") -> 16#02AC2;
entity("supnE") -> 16#02ACC;
entity("supne") -> 16#0228B;
entity("supplus") -> 16#02AC0;
entity("supset") -> 16#02283;
entity("supseteq") -> 16#02287;
entity("supseteqq") -> 16#02AC6;
entity("supsetneq") -> 16#0228B;
entity("supsetneqq") -> 16#02ACC;
entity("supsim") -> 16#02AC8;
entity("supsub") -> 16#02AD4;
entity("supsup") -> 16#02AD6;
entity("swArr") -> 16#021D9;
entity("swarhk") -> 16#02926;
entity("swarr") -> 16#02199;
entity("swarrow") -> 16#02199;
entity("swnwar") -> 16#0292A;
entity("szlig") -> 16#000DF;
entity("target") -> 16#02316;
entity("tau") -> 16#003C4;
entity("tbrk") -> 16#023B4;
entity("tcaron") -> 16#00165;
entity("tcedil") -> 16#00163;
entity("tcy") -> 16#00442;
entity("tdot") -> 16#020DB;
entity("telrec") -> 16#02315;
entity("tfr") -> 16#1D531;
entity("there4") -> 16#02234;
entity("therefore") -> 16#02234;
entity("theta") -> 16#003B8;
entity("thetasym") -> 16#003D1;
entity("thetav") -> 16#003D1;
entity("thickapprox") -> 16#02248;
entity("thicksim") -> 16#0223C;
entity("thinsp") -> 16#02009;
entity("thkap") -> 16#02248;
entity("thksim") -> 16#0223C;
entity("thorn") -> 16#000FE;
entity("tilde") -> 16#002DC;
entity("times") -> 16#000D7;
entity("timesb") -> 16#022A0;
entity("timesbar") -> 16#02A31;
entity("timesd") -> 16#02A30;
entity("tint") -> 16#0222D;
entity("toea") -> 16#02928;
entity("top") -> 16#022A4;
entity("topbot") -> 16#02336;
entity("topcir") -> 16#02AF1;
entity("topf") -> 16#1D565;
entity("topfork") -> 16#02ADA;
entity("tosa") -> 16#02929;
entity("tprime") -> 16#02034;
entity("trade") -> 16#02122;
entity("triangle") -> 16#025B5;
entity("triangledown") -> 16#025BF;
entity("triangleleft") -> 16#025C3;
entity("trianglelefteq") -> 16#022B4;
entity("triangleq") -> 16#0225C;
entity("triangleright") -> 16#025B9;
entity("trianglerighteq") -> 16#022B5;
entity("tridot") -> 16#025EC;
entity("trie") -> 16#0225C;
entity("triminus") -> 16#02A3A;
entity("triplus") -> 16#02A39;
entity("trisb") -> 16#029CD;
entity("tritime") -> 16#02A3B;
entity("trpezium") -> 16#023E2;
entity("tscr") -> 16#1D4C9;
entity("tscy") -> 16#00446;
entity("tshcy") -> 16#0045B;
entity("tstrok") -> 16#00167;
entity("twixt") -> 16#0226C;
entity("twoheadleftarrow") -> 16#0219E;
entity("twoheadrightarrow") -> 16#021A0;
entity("uArr") -> 16#021D1;
entity("uHar") -> 16#02963;
entity("uacute") -> 16#000FA;
entity("uarr") -> 16#02191;
entity("ubrcy") -> 16#0045E;
entity("ubreve") -> 16#0016D;
entity("ucirc") -> 16#000FB;
entity("ucy") -> 16#00443;
entity("udarr") -> 16#021C5;
entity("udblac") -> 16#00171;
entity("udhar") -> 16#0296E;
entity("ufisht") -> 16#0297E;
entity("ufr") -> 16#1D532;
entity("ugrave") -> 16#000F9;
entity("uharl") -> 16#021BF;
entity("uharr") -> 16#021BE;
entity("uhblk") -> 16#02580;
entity("ulcorn") -> 16#0231C;
entity("ulcorner") -> 16#0231C;
entity("ulcrop") -> 16#0230F;
entity("ultri") -> 16#025F8;
entity("umacr") -> 16#0016B;
entity("uml") -> 16#000A8;
entity("uogon") -> 16#00173;
entity("uopf") -> 16#1D566;
entity("uparrow") -> 16#02191;
entity("updownarrow") -> 16#02195;
entity("upharpoonleft") -> 16#021BF;
entity("upharpoonright") -> 16#021BE;
entity("uplus") -> 16#0228E;
entity("upsi") -> 16#003C5;
entity("upsih") -> 16#003D2;
entity("upsilon") -> 16#003C5;
entity("upuparrows") -> 16#021C8;
entity("urcorn") -> 16#0231D;
entity("urcorner") -> 16#0231D;
entity("urcrop") -> 16#0230E;
entity("uring") -> 16#0016F;
entity("urtri") -> 16#025F9;
entity("uscr") -> 16#1D4CA;
entity("utdot") -> 16#022F0;
entity("utilde") -> 16#00169;
entity("utri") -> 16#025B5;
entity("utrif") -> 16#025B4;
entity("uuarr") -> 16#021C8;
entity("uuml") -> 16#000FC;
entity("uwangle") -> 16#029A7;
entity("vArr") -> 16#021D5;
entity("vBar") -> 16#02AE8;
entity("vBarv") -> 16#02AE9;
entity("vDash") -> 16#022A8;
entity("vangrt") -> 16#0299C;
entity("varepsilon") -> 16#003F5;
entity("varkappa") -> 16#003F0;
entity("varnothing") -> 16#02205;
entity("varphi") -> 16#003D5;
entity("varpi") -> 16#003D6;
entity("varpropto") -> 16#0221D;
entity("varr") -> 16#02195;
entity("varrho") -> 16#003F1;
entity("varsigma") -> 16#003C2;
entity("varsubsetneq") -> [16#0228A, 16#0FE00];
entity("varsubsetneqq") -> [16#02ACB, 16#0FE00];
entity("varsupsetneq") -> [16#0228B, 16#0FE00];
entity("varsupsetneqq") -> [16#02ACC, 16#0FE00];
entity("vartheta") -> 16#003D1;
entity("vartriangleleft") -> 16#022B2;
entity("vartriangleright") -> 16#022B3;
entity("vcy") -> 16#00432;
entity("vdash") -> 16#022A2;
entity("vee") -> 16#02228;
entity("veebar") -> 16#022BB;
entity("veeeq") -> 16#0225A;
entity("vellip") -> 16#022EE;
entity("verbar") -> 16#0007C;
entity("vert") -> 16#0007C;
entity("vfr") -> 16#1D533;
entity("vltri") -> 16#022B2;
entity("vnsub") -> [16#02282, 16#020D2];
entity("vnsup") -> [16#02283, 16#020D2];
entity("vopf") -> 16#1D567;
entity("vprop") -> 16#0221D;
entity("vrtri") -> 16#022B3;
entity("vscr") -> 16#1D4CB;
entity("vsubnE") -> [16#02ACB, 16#0FE00];
entity("vsubne") -> [16#0228A, 16#0FE00];
entity("vsupnE") -> [16#02ACC, 16#0FE00];
entity("vsupne") -> [16#0228B, 16#0FE00];
entity("vzigzag") -> 16#0299A;
entity("wcirc") -> 16#00175;
entity("wedbar") -> 16#02A5F;
entity("wedge") -> 16#02227;
entity("wedgeq") -> 16#02259;
entity("weierp") -> 16#02118;
entity("wfr") -> 16#1D534;
entity("wopf") -> 16#1D568;
entity("wp") -> 16#02118;
entity("wr") -> 16#02240;
entity("wreath") -> 16#02240;
entity("wscr") -> 16#1D4CC;
entity("xcap") -> 16#022C2;
entity("xcirc") -> 16#025EF;
entity("xcup") -> 16#022C3;
entity("xdtri") -> 16#025BD;
entity("xfr") -> 16#1D535;
entity("xhArr") -> 16#027FA;
entity("xharr") -> 16#027F7;
entity("xi") -> 16#003BE;
entity("xlArr") -> 16#027F8;
entity("xlarr") -> 16#027F5;
entity("xmap") -> 16#027FC;
entity("xnis") -> 16#022FB;
entity("xodot") -> 16#02A00;
entity("xopf") -> 16#1D569;
entity("xoplus") -> 16#02A01;
entity("xotime") -> 16#02A02;
entity("xrArr") -> 16#027F9;
entity("xrarr") -> 16#027F6;
entity("xscr") -> 16#1D4CD;
entity("xsqcup") -> 16#02A06;
entity("xuplus") -> 16#02A04;
entity("xutri") -> 16#025B3;
entity("xvee") -> 16#022C1;
entity("xwedge") -> 16#022C0;
entity("yacute") -> 16#000FD;
entity("yacy") -> 16#0044F;
entity("ycirc") -> 16#00177;
entity("ycy") -> 16#0044B;
entity("yen") -> 16#000A5;
entity("yfr") -> 16#1D536;
entity("yicy") -> 16#00457;
entity("yopf") -> 16#1D56A;
entity("yscr") -> 16#1D4CE;
entity("yucy") -> 16#0044E;
entity("yuml") -> 16#000FF;
entity("zacute") -> 16#0017A;
entity("zcaron") -> 16#0017E;
entity("zcy") -> 16#00437;
entity("zdot") -> 16#0017C;
entity("zeetrf") -> 16#02128;
entity("zeta") -> 16#003B6;
entity("zfr") -> 16#1D537;
entity("zhcy") -> 16#00436;
entity("zigrarr") -> 16#021DD;
entity("zopf") -> 16#1D56B;
entity("zscr") -> 16#1D4CF;
entity("zwj") -> 16#0200D;
entity("zwnj") -> 16#0200C;
entity(_) -> undefined.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
exhaustive_entity_test() ->
T = dgiot_cover:clause_lookup_table(?MODULE, entity),
[?assertEqual(V, entity(K)) || {K, V} <- T].
charref_test() ->
1234 = charref("#1234"),
255 = charref("#xfF"),
255 = charref(<<"#XFf">>),
38 = charref("amp"),
38 = charref(<<"amp">>),
undefined = charref("not_an_entity"),
undefined = charref("#not_an_entity"),
undefined = charref("#xnot_an_entity"),
ok.
-endif.
|
7b969f62ee5aa527b0e479702e2059a465f60558bc4c03a404ff52ebc515f7bd | tweag/ormolu | warning-single-line-out.hs | {-# DEPRECATED test, foo "This is a deprecation" #-}
{-# WARNING test "This is a warning" #-}
test :: IO ()
test = pure ()
bar = 3
{-# DEPRECATED bar "Bar is deprecated" #-}
# DEPRECATED baz " is also deprecated " #
baz = 5
data Number = Number Dobule
{-# DEPRECATED Number "Use Scientific instead." #-}
| null | https://raw.githubusercontent.com/tweag/ormolu/34bdf62429768f24b70d0f8ba7730fc4d8ae73ba/data/examples/declaration/warning/warning-single-line-out.hs | haskell | # DEPRECATED test, foo "This is a deprecation" #
# WARNING test "This is a warning" #
# DEPRECATED bar "Bar is deprecated" #
# DEPRECATED Number "Use Scientific instead." # | test :: IO ()
test = pure ()
bar = 3
# DEPRECATED baz " is also deprecated " #
baz = 5
data Number = Number Dobule
|
7e3621ab15740ed28f55ee99b55323b084fa9fd2c545d988669099e297837785 | iloveponies/sudoku | project.clj | (defproject sudoku "1.0.0-SNAPSHOT"
:dependencies [[org.clojure/clojure "1.10.0"]
[iloveponies.tests/sudoku "0.2.0-SNAPSHOT"]]
:profiles {:dev {:plugins [[lein-midje "3.2.1"]]}})
| null | https://raw.githubusercontent.com/iloveponies/sudoku/e0ea7bb1af2b47c6fef3ca9e014089e235b8fe54/project.clj | clojure | (defproject sudoku "1.0.0-SNAPSHOT"
:dependencies [[org.clojure/clojure "1.10.0"]
[iloveponies.tests/sudoku "0.2.0-SNAPSHOT"]]
:profiles {:dev {:plugins [[lein-midje "3.2.1"]]}})
| |
fc4ee1901403c055271a0cc9e5c23501b1a9a200278eb738d5a4f14bfb0b2fdf | graninas/Functional-Design-and-Architecture | Runtime.hs | module Andromeda.Hardware.Impl.Runtime where
import qualified Andromeda.Hardware.Common as T
import qualified Andromeda.Hardware.Domain as T
import qualified Andromeda.Hardware.Impl.Service as SImpl
import qualified Andromeda.Hardware.Impl.Device.Types as TImpl
import qualified Data.Map as Map
import Data.IORef (IORef, newIORef)
type DeviceImpl = (TImpl.ControllerImpl, TImpl.Device)
type Devices = Map.Map T.Controller DeviceImpl
data HardwareRuntime = HardwareRuntime
{ _devicesRef :: IORef Devices
, _hardwareServiceRef :: IORef SImpl.HardwareService
}
createHardwareRuntime :: SImpl.HardwareService -> IO HardwareRuntime
createHardwareRuntime hService = do
devicesRef <- newIORef Map.empty
hServiceRef <- newIORef hService
pure $ HardwareRuntime devicesRef hServiceRef
| null | https://raw.githubusercontent.com/graninas/Functional-Design-and-Architecture/b6a78f80a2a2e0b913bcab1d2279fc137a90db4c/Second-Edition-Manning-Publications/BookSamples/CH08/Section8p1/src/Andromeda/Hardware/Impl/Runtime.hs | haskell | module Andromeda.Hardware.Impl.Runtime where
import qualified Andromeda.Hardware.Common as T
import qualified Andromeda.Hardware.Domain as T
import qualified Andromeda.Hardware.Impl.Service as SImpl
import qualified Andromeda.Hardware.Impl.Device.Types as TImpl
import qualified Data.Map as Map
import Data.IORef (IORef, newIORef)
type DeviceImpl = (TImpl.ControllerImpl, TImpl.Device)
type Devices = Map.Map T.Controller DeviceImpl
data HardwareRuntime = HardwareRuntime
{ _devicesRef :: IORef Devices
, _hardwareServiceRef :: IORef SImpl.HardwareService
}
createHardwareRuntime :: SImpl.HardwareService -> IO HardwareRuntime
createHardwareRuntime hService = do
devicesRef <- newIORef Map.empty
hServiceRef <- newIORef hService
pure $ HardwareRuntime devicesRef hServiceRef
| |
f1a635c60a14cb191914d9e8674124c10b14d3922308b08efdf5acb1b95a73a1 | repl-electric/.sonic-pi | knit.sps | #key: knit
#point_line:0
#point_index:6
# --
(knit ,1).tick()
| null | https://raw.githubusercontent.com/repl-electric/.sonic-pi/a00c733f0a5fa1fa0aa65bf06fe7ab71654d2da9/snippets/knit.sps | scheme | #key: knit
#point_line:0
#point_index:6
# --
(knit ,1).tick()
| |
920c1b978ba5218cd280067bcfac2938cfc39d37cad8d95aa25f6d6cb20bd5a7 | franzinc/clim2 | db-stream.lisp | -*- Mode : Lisp ; Syntax : ANSI - Common - Lisp ; Package : CLIM - INTERNALS ; Base : 10 ; Lowercase : Yes -*-
;; See the file LICENSE for the full license governing this code.
;;
(in-package :clim-internals)
" Copyright ( c ) 1990 by International Lisp Associates . All rights reserved .
Portions copyright ( c ) 1991 , 1992 Franz , Inc. All rights reserved .
Portions copyright ( c ) 1989 , 1990 Symbolics , Inc. All rights reserved . "
CLIM stream sheets and panes
;;--- How to keep PANE-BACKGROUND/FOREGROUND in sync with the medium?
;;--- I'm not convinced that including WINDOW-STREAM here is right...
(defclass clim-stream-sheet
(window-stream ;includes output recording
sheet-permanently-enabled-mixin
sheet-mute-input-mixin
sheet-multiple-child-mixin
space-requirement-mixin
space-requirement-cache-mixin
permanent-medium-sheet-output-mixin
basic-pane)
((input-editor-stream :initform nil :accessor stream-input-editor-stream))
(:default-initargs
:medium t
:transformation +identity-transformation+))
(defmethod stream-input-editor-stream ((stream sheet)) nil)
(defmethod (setf stream-input-editor-stream) (value (stream sheet))
value)
(defmethod stream-input-editor-stream ((stream standard-encapsulating-stream))
(stream-input-editor-stream (encapsulating-stream-stream stream)))
(defmethod (setf stream-input-editor-stream) (value (stream standard-encapsulating-stream))
(setf (stream-input-editor-stream (encapsulating-stream-stream stream)) value))
(defun maybe-redraw-input-editor-stream (stream region)
(let ((input-editor-stream (stream-input-editor-stream stream)))
(when input-editor-stream
(multiple-value-bind (x-pos y-pos)
(input-buffer-input-position->cursor-position input-editor-stream 0)
(when (region-contains-position-p (or region +everywhere+) x-pos y-pos)
(with-end-of-page-action (input-editor-stream :allow)
(redraw-input-buffer input-editor-stream)))))))
(defmethod handle-repaint :after ((sheet clim-stream-sheet) region)
(maybe-redraw-input-editor-stream sheet region))
;;--- Do we still need this?
(defmethod pane-stream ((pane clim-stream-sheet))
pane)
(defmethod note-sheet-region-changed :after ((pane clim-stream-sheet) &key &allow-other-keys)
(setf (stream-default-text-margin pane)
(bounding-rectangle-width (window-viewport pane))))
(defmethod viewport-region-changed ((pane t) viewport)
(declare (ignore viewport)))
(defmethod viewport-region-changed ((pane clim-stream-sheet) viewport)
(let ((region (sheet-region pane)))
;; It should be safe to modify the sheet's region
(setf (slot-value region 'left) 0
(slot-value region 'top) 0
(slot-value region 'right) (max (bounding-rectangle-width pane)
(bounding-rectangle-width viewport))
(slot-value region 'bottom) (max (bounding-rectangle-height pane)
(bounding-rectangle-height viewport)))
(note-sheet-region-changed pane))
(setf (stream-default-text-margin pane)
(bounding-rectangle-width (sheet-region viewport))))
(defmethod update-region ((sheet clim-stream-sheet) nleft ntop nright nbottom &key no-repaint)
(declare (ignore no-repaint))
(with-bounding-rectangle* (left top right bottom) sheet
(when (or (< nleft left)
(< ntop top)
(> nright right)
(> nbottom bottom))
;; It should be safe to modify the sheet's region
(let ((region (sheet-region sheet)))
(setf (slot-value region 'left) (min nleft left)
(slot-value region 'top) (min ntop top)
(slot-value region 'right) (max nright right)
(slot-value region 'bottom) (max nbottom bottom))
(note-sheet-region-changed sheet)))))
(defmethod invoke-with-drawing-options ((sheet clim-stream-sheet) continuation
&rest options)
(declare (dynamic-extent options))
;; Changed arglist to make dynamic-extent declaration effective. JPM Jan 98.
(let* ((ink (second (member :ink options)))
(medium (sheet-medium sheet))
(ink-changing (and ink (not (eq (medium-ink medium) ink)))))
(when ink-changing
;; Close the current output record if the drawing ink is changing
(stream-close-text-output-record sheet))
(multiple-value-prog1
(apply #'invoke-with-drawing-options medium continuation options)
(when ink-changing
;; If it changed on the way in, it's changing back on the way out
;; This might create more text output records that it should, but
;; better to be safe than sorry
(stream-close-text-output-record sheet)))))
(defmethod default-space-requirements ((pane clim-stream-sheet)
&key (min-width 1)
(width 100)
(max-width +fill+)
(min-height 1)
(height 100)
(max-height +fill+))
;; It seems to me that if (method resize-sheet (basic-sheet)) is going to
generate an error for min - width or min - height of zero , then compose - space
had better not suggest zeros by default . Got ta have at least one pixel . JPM
(values width min-width max-width height min-height max-height))
(defclass clim-stream-pane (clim-stream-sheet)
((incremental-redisplay-p
:initarg :incremental-redisplay :initform nil)
(display-function
:reader pane-display-function
:initarg :display-function :initform nil)
(display-time
:reader pane-display-time
:initarg :display-time :initform :command-loop
:type (member nil :command-loop :no-clear t))))
(defmethod (setf pane-needs-redisplay) (value (pane clim-stream-pane))
(with-slots (display-time) pane
(setf display-time value)))
(defmethod pane-needs-redisplay ((pane clim-stream-pane))
(declare (values needs-redisplay clear))
(with-slots (display-time) pane
(ecase display-time
((t)
(setq display-time nil)
(values t t))
((nil)
(values nil nil))
(:command-loop
(values t t))
(:no-clear
(values t nil)))))
(defmethod pane-needs-redisplay ((pane basic-pane))
(values nil nil))
(defmethod (setf pane-needs-redisplay) (value (pane basic-pane))
value)
;;--- Although the unit options are mostly applicable here I guess
;;--- other classes might want to use it also.
;;--- Perhaps we add a COMPOSE-SPACE method on pane which does this.
;;--- Perhaps the class hierarchy needs a big sort out.
#+Genera (zwei:defindentation (do-with-space-req-components 0 3 1 3 2 3 3 1))
(defmacro with-space-requirement ((sr &rest vars) &body body)
;; A handy macro that makes it slightly easy to manipulate space requirements
(unless vars
(setq vars '(sr-width sr-min-width sr-max-width sr-height sr-min-height sr-max-height)))
`(multiple-value-bind ,vars
(space-requirement-components ,sr)
(macrolet ((do-with-space-req-components (operator var vars &body body)
`(,operator
,@(mapcar #'(lambda (a-var)
`(symbol-macrolet ((,var ,a-var))
,@body))
vars)))
(make-sr ()
`,'(make-space-requirement
,@(mapcan #'list '(:width :min-width :max-width
:height :min-height :max-height)
vars))))
,@body)))
(defmethod compose-space ((pane clim-stream-pane) &key width height)
(compute-space-for-clim-stream-pane pane (call-next-method) width height))
(defun compute-space-for-clim-stream-pane (pane sr width height)
(labels
((process-compute-space-requirements ()
(with-space-requirement (sr)
(when (do-with-space-req-components or
sr-component
(sr-width sr-min-width sr-max-width
sr-height sr-min-height sr-max-height)
(eq sr-component :compute))
(multiple-value-bind (width height)
(let ((record
(let ((history (stream-output-history pane)))
(if (and history
(> (output-record-count history :fastp t) 0))
history
(let ((*sizing-application-frame* t))
(with-output-to-output-record (pane)
(funcall
(if (slot-value pane 'incremental-redisplay-p)
#'invoke-pane-redisplay-function
#'invoke-pane-display-function)
(pane-frame pane) pane
;;--- Are all pane display functions prepared to
;;--- ignore these arguments? I think not...
:max-width width
:max-height height)))))))
(with-bounding-rectangle* (left top right bottom) record
(values (- right (min 0 left))
(- bottom (min 0 top)))))
(when (zerop width) (setq width 100))
(when (zerop height) (setq height 100))
(flet ((process-computes (size preferred min max)
(values
(if (eq preferred :compute)
(let ((size size))
(when (numberp min) (maxf size min))
(when (numberp max) (minf size max))
size)
preferred)
(if (eq min :compute) size min)
(if (eq max :compute) size max))))
(multiple-value-setq (sr-width sr-min-width sr-max-width)
(process-computes width sr-width sr-min-width sr-max-width))
(multiple-value-setq (sr-height sr-min-height sr-max-height)
(process-computes height sr-height sr-min-height sr-max-height)))
#+(or ignore aclpc acl86win32)
(do-with-space-req-components progn
sr-component (sr-width sr-min-width sr-max-width)
(when (eq sr-component :compute)
(setq sr-component width)))
#+(or ignore aclpc acl86win32)
(do-with-space-req-components progn
sr-component (sr-height sr-min-height sr-max-height)
(when (eq sr-component :compute)
(setq sr-component height))))
(setq sr (make-sr)))))
(process-unit-space-requirements ()
(with-space-requirement (sr)
(let ((changed nil))
(do-with-space-req-components progn
sr-component
(sr-width sr-min-width sr-max-width
sr-height sr-min-height sr-max-height)
(when (unit-space-requirement-p sr-component)
(setq sr-component (process-unit-space-requirement pane sr-component)
changed t)))
(when changed
(setq sr (make-sr))))))
(process-relative-space-requirements ()
(with-space-requirement (sr)
(unless (and (numberp sr-width)
(numberp sr-height)
(do-with-space-req-components and
sr-component
(sr-min-width sr-max-width
sr-min-height sr-max-height)
(or (numberp sr-component)
(relative-space-requirement-p sr-component))))
(error "Illegal space requirement ~S" sr))
(let ((changed nil))
(when (relative-space-requirement-p sr-min-width)
(setq sr-min-width (- sr-width (process-unit-space-requirement
pane (car sr-min-width)))
changed t))
(when (relative-space-requirement-p sr-max-width)
(setq sr-max-width (+ sr-width (process-unit-space-requirement
pane (car sr-max-width)))
changed t))
(when (relative-space-requirement-p sr-min-height)
(setq sr-min-height (- sr-height (process-unit-space-requirement
pane (car sr-min-height)))
changed t))
(when (relative-space-requirement-p sr-max-height)
(setq sr-max-height (+ sr-height (process-unit-space-requirement
pane (car sr-max-height)))
changed t))
(when changed
(setq sr (make-sr)))))))
(declare (dynamic-extent #'process-compute-space-requirements
#'process-unit-space-requirements
#'process-relative-space-requirements))
(process-unit-space-requirements)
(process-compute-space-requirements)
(process-relative-space-requirements)
sr))
(defun relative-space-requirement-p (sr)
(and (consp sr)
(= (length sr) 2)
(or (numberp (second sr))
(unit-space-requirement-p (second sr)))))
(defun unit-space-requirement-p (sr)
(and (consp sr)
(= (length sr) 2)
(member (second sr) '(:line :character :mm :point :pixel))))
(defun process-unit-space-requirement (pane sr)
(destructuring-bind (number unit) sr
(let ((graft (or (graft pane)
(find-graft)))) ;--- is this right?
(ecase unit
(:pixel number)
(:mm (* number (/ (graft-pixel-width graft)
(graft-mm-width graft))))
(:point (* number (graft-pixels-per-point graft)))
(:character (* number (stream-string-width pane "M")))
(:line (+ (* number (stream-line-height pane))
(* (1- number) (stream-vertical-spacing pane))))))))
#+++ignore ;obsolete now that the default coordinate origin is :NW
(defmethod note-sheet-grafted :after ((pane clim-stream-pane))
(let ((xform (sheet-transformation pane)))
(setq xform (make-scaling-transformation 1 -1))
(setf (sheet-transformation pane) xform)))
;; This is a soon-to-be-obsolete method, but we need it for now when the
CLIM - STREAM - PANE is a child of the old - style viewport . It should n't
;; get called under the new viewport scheme.
#+++ignore ;obsolete now that the default coordinate origin is :NW
(defmethod allocate-space :after ((pane clim-stream-pane) width height)
(declare (ignore width height))
(ecase (graft-origin (graft pane))
(:nw)
(:sw
(let ((xform (sheet-transformation pane)))
(setq xform (make-scaling-transformation 1 -1))
;; Stream panes always have to have a parent to manage the
;; viewport clipping, etc.
(setq xform (compose-transformations
xform
(make-translation-transformation
0 (1- (bounding-rectangle-height (sheet-parent pane))))))
(setf (sheet-transformation pane) xform)))))
(defmethod pane-stream ((pane clim-stream-pane))
(unless (port pane)
(error "Can't call ~S on ~S until it's been grafted!"
'pane-stream pane))
pane)
;; This assumes that the stream-pane is always inside a viewport, which
;; actually defines its visible size. The stream pane's size is supposed
;; to represent the size of the contents, but may be stretched to fill the
;; available viewport space.
;; well this method breaks accepting-values :own-window t by ignoring
explicit : so I 'm going to ignore it and see what else
;; breaks instead. Then perhaps we can make a fix which satisfies both
constraints . ( cim 2/13/95 )
#+ignore
(defmethod change-space-requirements :around
((pane clim-stream-pane) &rest keys &key width height &allow-other-keys)
(declare (dynamic-extent keys))
;; Assume always called with width and height
(multiple-value-bind (history-width history-height)
(if (stream-output-history pane)
(bounding-rectangle-size (stream-output-history pane))
(values width height))
;; Don't ever shrink down smaller than our contents.
(if (and (numberp width)
(numberp height))
(apply #'call-next-method pane :width (max width history-width)
:height (max height history-height) keys)
(call-next-method))))
(defclass interactor-pane (clim-stream-pane) ())
(defclass application-pane (clim-stream-pane) ())
(defclass accept-values-pane (clim-stream-pane)
()
(:default-initargs :default-view +gadget-dialog-view+))
(eval-when (compile)
;; defined later in the compilation...
(declaim (special *default-menu-text-style*))
)
(defclass pointer-documentation-pane (clim-stream-pane) ()
(:default-initargs
:text-style *default-menu-text-style*))
(defclass title-pane (clim-stream-pane)
((display-string :initform nil :initarg :display-string)))
(defclass command-menu-pane (clim-stream-pane) ())
(defun make-clim-stream-pane-1 (framem frame
&rest options
&key (type 'clim-stream-pane)
label
(label-alignment #+Genera :bottom #-Genera :top)
(scroll-bars :vertical)
(borders t)
(display-after-commands nil dac-p)
background
name
&allow-other-keys)
(with-look-and-feel-realization (framem frame)
(setq options (remove-keywords options '(:type :scroll-bars :borders
:label :label-alignment
:display-after-commands)))
(when dac-p
(setf (getf options :display-time)
(cond ((eq display-after-commands t) :command-loop)
((eq display-after-commands :no-clear) :no-clear)
(t nil))))
(let* ((stream (apply #'make-pane type options))
(pane stream))
(when scroll-bars
(let ((scroller-pane-options
(if (consp scroll-bars)
`(:scroll-bars ,@scroll-bars)
`(:scroll-bars ,scroll-bars))))
(setq pane (apply #'make-pane 'scroller-pane
:contents pane
:name name
:background background
scroller-pane-options))))
(when label
(let ((label (if (stringp label)
(make-pane 'label-pane
:label label
:max-width +fill+
:background background)
(apply #'make-pane 'label-pane
:label (first label)
:max-width +fill+
:background background
(rest label)))))
(setq pane (make-pane 'vbox-pane
:contents
(ecase label-alignment
(:bottom (list pane label))
(:top (list label pane)))
:background background))))
(when borders
(setq pane
(make-pane 'outlined-pane
:name name
:thickness 1
:contents (make-pane 'spacing-pane
:name name
:thickness 1
:contents pane
:background background)
:background background)))
(values pane stream))))
(defmacro make-clim-interactor-pane (&rest options)
`(make-clim-stream-pane :type 'interactor-pane ,@options))
(defmacro make-clim-application-pane (&rest options)
`(make-clim-stream-pane :type 'application-pane ,@options))
;;; "Window protocol"
(defun-inline window-stream-p (x)
(typep x 'clim-stream-sheet))
(defmethod window-clear ((stream clim-stream-sheet))
(let ((medium (sheet-medium stream)))
(letf-globally (((medium-transformation medium) +identity-transformation+))
(clear-output-history stream)
(window-erase-viewport stream)
(when (extended-output-stream-p stream) ;can we assume this?
;; This is important since if the viewport position is at some
;; negative position then things get really confused since the
cursor might be visible at ( 0,0 ) and the extent is big
;; enough but...
;;--- This does a lot of uncessary expensive bitblting, but
;;--- how do we avoid it, since a lot of what it does we need
;;--- to do to reset the viewport
(scroll-extent stream 0 0)
(stream-set-cursor-position stream 0 0)
(setf (stream-baseline stream) (coordinate 0)
(stream-current-line-height stream) (coordinate 0)))
;; Flush the old mouse position relative to this window
;; so that we don't get bogus highlighted presentations
when menus first pop up .
#+++ignore ;--- what is this trying to do?
(let ((pointer (stream-primary-pointer stream)))
(when pointer
(setf (pointer-sheet pointer) nil)))
;; We need to do a FORCE-OUTPUT in case it is a long time before
;; anything gets drawn on the same stream.
(force-output stream))))
(defmethod window-refresh ((stream clim-stream-sheet))
(window-erase-viewport stream))
(defmethod window-refresh :after ((stream clim-stream-sheet))
(frame-replay *application-frame* stream)
(let ((text-record (stream-text-output-record stream)))
(when text-record (replay text-record stream)))
(let ((presentation (highlighted-presentation stream nil)))
(when presentation
(highlight-presentation
presentation (presentation-type presentation) stream :highlight))))
(defmethod window-refresh :around ((stream clim-stream-sheet))
(with-viewport-position-saved (stream)
(call-next-method)))
(defmethod window-erase-viewport ((stream clim-stream-sheet))
(let ((medium (sheet-medium stream)))
(multiple-value-call #'medium-clear-area
medium (bounding-rectangle* (window-viewport stream)))))
(defmethod window-expose ((stream clim-stream-sheet))
(setf (window-visibility stream) t))
;;--- Is there any way to do this?
(defmethod (setf window-label) (label (stream clim-stream-sheet))
(declare (ignore label))
nil)
(defmethod (setf window-visibility) (visibility (stream clim-stream-sheet))
(let ((frame (pane-frame stream)))
(if frame
(if visibility
(enable-frame frame)
(disable-frame frame))
(setf (sheet-enabled-p stream) visibility))))
(defmethod window-visibility ((stream clim-stream-sheet))
: Is the Unix code more correct ? ? ?
#+(or aclpc acl86win32)
(mirror-visible-p (port stream) stream)
#-(or aclpc acl86win32)
(let ((frame (pane-frame stream)))
(and (if frame
(eq (frame-state frame) :enabled)
(sheet-enabled-p stream))
(mirror-visible-p (port stream) stream)))
)
(defmethod window-viewport ((stream clim-stream-sheet))
;;;---why doesn't this return a viewport?? (cim 10/12/94)
(or (pane-viewport-region stream)
;; Not a scrolling pane, so the sheet's region is the viewport
(sheet-region stream)))
(defmethod window-viewport-position ((stream clim-stream-sheet))
(bounding-rectangle-position (window-viewport stream)))
(defmethod window-set-viewport-position ((stream clim-stream-sheet) x y)
(when (pane-viewport stream)
(scroll-extent stream x y)))
(defgeneric* (setf window-viewport-position) (x y stream))
(defmethod* (setf window-viewport-position) (x y (stream clim-stream-sheet))
(window-set-viewport-position stream x y))
(defmethod window-inside-edges ((stream clim-stream-sheet))
(bounding-rectangle* (sheet-region (or (pane-viewport stream) stream))))
(defun window-inside-left (stream)
(multiple-value-bind (left top right bottom)
(window-inside-edges stream)
(declare (ignore top right bottom))
left))
(defun window-inside-top (stream)
(multiple-value-bind (left top right bottom)
(window-inside-edges stream)
(declare (ignore left right bottom))
top))
(defun window-inside-right (stream)
(multiple-value-bind (left top right bottom)
(window-inside-edges stream)
(declare (ignore left top bottom))
right))
(defun window-inside-bottom (stream)
(multiple-value-bind (left top right bottom)
(window-inside-edges stream)
(declare (ignore left top right))
bottom))
(defmethod window-inside-size ((stream clim-stream-sheet))
(bounding-rectangle-size (window-viewport stream)))
(defmethod window-set-inside-size ((stream clim-stream-sheet) width height)
(change-space-requirements stream :width width :height height :resize-frame t))
(defmethod window-inside-width ((stream clim-stream-sheet))
(bounding-rectangle-width (window-viewport stream)))
(defmethod window-inside-height ((stream clim-stream-sheet))
(bounding-rectangle-height (window-viewport stream)))
(defmethod window-margins ((stream clim-stream-sheet))
(values (coordinate 0) (coordinate 0)
(coordinate 0) (coordinate 0)))
(defun-inline window-parent (window)
(sheet-parent window))
(defun-inline window-children (window)
(sheet-children window))
(defun window-root (window)
(graft window))
(defun-inline window-top-level-window (window)
(sheet-top-level-sheet window))
(defmethod window-stack-on-bottom ((stream clim-stream-sheet))
(bury-sheet (window-top-level-window stream)))
(defmethod window-stack-on-top ((stream clim-stream-sheet))
(raise-sheet (window-top-level-window stream)))
(defun beep (&optional (stream *standard-output*))
(typecase stream
(sheet
(medium-beep (sheet-medium stream)))
(encapsulating-stream
(beep (encapsulating-stream-stream stream)))))
;; If you close window in a frame, just exit from the frame.
;; Otherwise, destroy directly mirrored sheets, or just disable the sheet.
;;--- This functionality is dubious
(defmethod close ((sheet clim-stream-sheet) &key abort)
(declare (ignore abort))
(let ((frame (pane-frame sheet)))
(if frame
(frame-exit frame)
(if (sheet-direct-mirror sheet)
(destroy-mirror (port sheet) sheet)
(setf (sheet-enabled-p sheet) nil)))))
;; This is called by SCROLL-EXTENT. It shifts a region of the "host screen"
;; that's visible to some other visible location. It does NOT do any cleaning
;; up after itself. It does not side-effect the output history of the window.
;; It calls COPY-AREA, whose contract is to do the above, the whole above, and
;; nothing but the above.
(defmethod window-shift-visible-region ((window clim-stream-sheet)
old-left old-top old-right old-bottom
new-left new-top new-right new-bottom)
(declare (type coordinate new-left new-top new-right new-bottom))
(declare (ignore old-right old-bottom new-right new-bottom))
(let ((delta-x (- old-left new-left))
(delta-y (- old-top new-top)))
(multiple-value-bind (stream-width stream-height)
(bounding-rectangle-size (pane-viewport-region window))
(declare (type coordinate stream-width stream-height))
(let (from-x from-y)
(cond ((and (>= delta-x 0)
(>= delta-y 0))
;; shifting down and to the right
(setq from-x 0
from-y 0))
((and (>= delta-x 0)
(<= delta-y 0))
;; shifting up and to the right
(setq from-x 0
from-y (- delta-y)))
((>= delta-y 0)
;; shifting down and to the left
(setq from-x (- delta-x)
from-y 0))
(t
;; shifting up and to the left
(setq from-x (- delta-x)
from-y (- delta-y))))
(let ((width (- stream-width (abs delta-x)))
(height (- stream-height (abs delta-y)))
(transform (sheet-transformation window)))
(multiple-value-call #'copy-area
window
(untransform-position transform from-x from-y)
(untransform-distance transform width height)
(untransform-position transform (+ from-x delta-x) (+ from-y delta-y))))))))
;;;--- Why do we need this?
(defmethod window-shift-visible-region ((window t)
old-left old-top old-right old-bottom
new-left new-top new-right new-bottom)
(multiple-value-bind (valid-p left top right bottom)
(ltrb-overlaps-ltrb-p old-left old-top old-right old-bottom
new-left new-top new-right new-bottom)
(when valid-p
(with-sheet-medium (medium window)
(medium-clear-area medium left top right bottom)
(repaint-sheet window (make-bounding-rectangle left top right bottom))))))
#+Genera
(defgeneric stream-compatible-inside-size (window)
(:selector :inside-size))
#+Genera
(defmethod stream-compatible-inside-size ((window clim-stream-sheet))
(bounding-rectangle-size (window-viewport window)))
#+Genera
(defgeneric stream-compatible-visible-cursorpos-limits (window &optional unit)
(:selector :visible-cursorpos-limits))
#+Genera
(defmethod stream-compatible-visible-cursorpos-limits
((window clim-stream-sheet) &optional (unit ':pixel))
(with-bounding-rectangle* (left top right bottom) (window-viewport window)
(ecase unit
(:pixel (values left top right bottom))
(:character (let ((char-width (stream-character-width window #\M))
(line-height (stream-line-height window)))
(values (floor left char-width) (floor top line-height)
(floor right char-width) (floor bottom line-height)))))))
#+Genera
(defgeneric stream-compatible-size-in-characters (window)
(:selector :size-in-characters))
#+Genera
(defmethod stream-compatible-size-in-characters ((window clim-stream-sheet))
(with-bounding-rectangle* (left top right bottom) (window-viewport window)
(let ((char-width (stream-character-width window #\M))
(line-height (stream-line-height window)))
(values (floor (- right left) char-width)
(floor (- bottom top) line-height)))))
| null | https://raw.githubusercontent.com/franzinc/clim2/e8d03da80e1f000be40c37d088e283d95365bfdd/clim/db-stream.lisp | lisp | Syntax : ANSI - Common - Lisp ; Package : CLIM - INTERNALS ; Base : 10 ; Lowercase : Yes -*-
See the file LICENSE for the full license governing this code.
--- How to keep PANE-BACKGROUND/FOREGROUND in sync with the medium?
--- I'm not convinced that including WINDOW-STREAM here is right...
includes output recording
--- Do we still need this?
It should be safe to modify the sheet's region
It should be safe to modify the sheet's region
Changed arglist to make dynamic-extent declaration effective. JPM Jan 98.
Close the current output record if the drawing ink is changing
If it changed on the way in, it's changing back on the way out
This might create more text output records that it should, but
better to be safe than sorry
It seems to me that if (method resize-sheet (basic-sheet)) is going to
--- Although the unit options are mostly applicable here I guess
--- other classes might want to use it also.
--- Perhaps we add a COMPOSE-SPACE method on pane which does this.
--- Perhaps the class hierarchy needs a big sort out.
A handy macro that makes it slightly easy to manipulate space requirements
--- Are all pane display functions prepared to
--- ignore these arguments? I think not...
--- is this right?
obsolete now that the default coordinate origin is :NW
This is a soon-to-be-obsolete method, but we need it for now when the
get called under the new viewport scheme.
obsolete now that the default coordinate origin is :NW
Stream panes always have to have a parent to manage the
viewport clipping, etc.
This assumes that the stream-pane is always inside a viewport, which
actually defines its visible size. The stream pane's size is supposed
to represent the size of the contents, but may be stretched to fill the
available viewport space.
well this method breaks accepting-values :own-window t by ignoring
breaks instead. Then perhaps we can make a fix which satisfies both
Assume always called with width and height
Don't ever shrink down smaller than our contents.
defined later in the compilation...
"Window protocol"
can we assume this?
This is important since if the viewport position is at some
negative position then things get really confused since the
enough but...
--- This does a lot of uncessary expensive bitblting, but
--- how do we avoid it, since a lot of what it does we need
--- to do to reset the viewport
Flush the old mouse position relative to this window
so that we don't get bogus highlighted presentations
--- what is this trying to do?
We need to do a FORCE-OUTPUT in case it is a long time before
anything gets drawn on the same stream.
--- Is there any way to do this?
---why doesn't this return a viewport?? (cim 10/12/94)
Not a scrolling pane, so the sheet's region is the viewport
If you close window in a frame, just exit from the frame.
Otherwise, destroy directly mirrored sheets, or just disable the sheet.
--- This functionality is dubious
This is called by SCROLL-EXTENT. It shifts a region of the "host screen"
that's visible to some other visible location. It does NOT do any cleaning
up after itself. It does not side-effect the output history of the window.
It calls COPY-AREA, whose contract is to do the above, the whole above, and
nothing but the above.
shifting down and to the right
shifting up and to the right
shifting down and to the left
shifting up and to the left
--- Why do we need this? |
(in-package :clim-internals)
" Copyright ( c ) 1990 by International Lisp Associates . All rights reserved .
Portions copyright ( c ) 1991 , 1992 Franz , Inc. All rights reserved .
Portions copyright ( c ) 1989 , 1990 Symbolics , Inc. All rights reserved . "
CLIM stream sheets and panes
(defclass clim-stream-sheet
sheet-permanently-enabled-mixin
sheet-mute-input-mixin
sheet-multiple-child-mixin
space-requirement-mixin
space-requirement-cache-mixin
permanent-medium-sheet-output-mixin
basic-pane)
((input-editor-stream :initform nil :accessor stream-input-editor-stream))
(:default-initargs
:medium t
:transformation +identity-transformation+))
(defmethod stream-input-editor-stream ((stream sheet)) nil)
(defmethod (setf stream-input-editor-stream) (value (stream sheet))
value)
(defmethod stream-input-editor-stream ((stream standard-encapsulating-stream))
(stream-input-editor-stream (encapsulating-stream-stream stream)))
(defmethod (setf stream-input-editor-stream) (value (stream standard-encapsulating-stream))
(setf (stream-input-editor-stream (encapsulating-stream-stream stream)) value))
(defun maybe-redraw-input-editor-stream (stream region)
(let ((input-editor-stream (stream-input-editor-stream stream)))
(when input-editor-stream
(multiple-value-bind (x-pos y-pos)
(input-buffer-input-position->cursor-position input-editor-stream 0)
(when (region-contains-position-p (or region +everywhere+) x-pos y-pos)
(with-end-of-page-action (input-editor-stream :allow)
(redraw-input-buffer input-editor-stream)))))))
(defmethod handle-repaint :after ((sheet clim-stream-sheet) region)
(maybe-redraw-input-editor-stream sheet region))
(defmethod pane-stream ((pane clim-stream-sheet))
pane)
(defmethod note-sheet-region-changed :after ((pane clim-stream-sheet) &key &allow-other-keys)
(setf (stream-default-text-margin pane)
(bounding-rectangle-width (window-viewport pane))))
(defmethod viewport-region-changed ((pane t) viewport)
(declare (ignore viewport)))
(defmethod viewport-region-changed ((pane clim-stream-sheet) viewport)
(let ((region (sheet-region pane)))
(setf (slot-value region 'left) 0
(slot-value region 'top) 0
(slot-value region 'right) (max (bounding-rectangle-width pane)
(bounding-rectangle-width viewport))
(slot-value region 'bottom) (max (bounding-rectangle-height pane)
(bounding-rectangle-height viewport)))
(note-sheet-region-changed pane))
(setf (stream-default-text-margin pane)
(bounding-rectangle-width (sheet-region viewport))))
(defmethod update-region ((sheet clim-stream-sheet) nleft ntop nright nbottom &key no-repaint)
(declare (ignore no-repaint))
(with-bounding-rectangle* (left top right bottom) sheet
(when (or (< nleft left)
(< ntop top)
(> nright right)
(> nbottom bottom))
(let ((region (sheet-region sheet)))
(setf (slot-value region 'left) (min nleft left)
(slot-value region 'top) (min ntop top)
(slot-value region 'right) (max nright right)
(slot-value region 'bottom) (max nbottom bottom))
(note-sheet-region-changed sheet)))))
(defmethod invoke-with-drawing-options ((sheet clim-stream-sheet) continuation
&rest options)
(declare (dynamic-extent options))
(let* ((ink (second (member :ink options)))
(medium (sheet-medium sheet))
(ink-changing (and ink (not (eq (medium-ink medium) ink)))))
(when ink-changing
(stream-close-text-output-record sheet))
(multiple-value-prog1
(apply #'invoke-with-drawing-options medium continuation options)
(when ink-changing
(stream-close-text-output-record sheet)))))
(defmethod default-space-requirements ((pane clim-stream-sheet)
&key (min-width 1)
(width 100)
(max-width +fill+)
(min-height 1)
(height 100)
(max-height +fill+))
generate an error for min - width or min - height of zero , then compose - space
had better not suggest zeros by default . Got ta have at least one pixel . JPM
(values width min-width max-width height min-height max-height))
(defclass clim-stream-pane (clim-stream-sheet)
((incremental-redisplay-p
:initarg :incremental-redisplay :initform nil)
(display-function
:reader pane-display-function
:initarg :display-function :initform nil)
(display-time
:reader pane-display-time
:initarg :display-time :initform :command-loop
:type (member nil :command-loop :no-clear t))))
(defmethod (setf pane-needs-redisplay) (value (pane clim-stream-pane))
(with-slots (display-time) pane
(setf display-time value)))
(defmethod pane-needs-redisplay ((pane clim-stream-pane))
(declare (values needs-redisplay clear))
(with-slots (display-time) pane
(ecase display-time
((t)
(setq display-time nil)
(values t t))
((nil)
(values nil nil))
(:command-loop
(values t t))
(:no-clear
(values t nil)))))
(defmethod pane-needs-redisplay ((pane basic-pane))
(values nil nil))
(defmethod (setf pane-needs-redisplay) (value (pane basic-pane))
value)
#+Genera (zwei:defindentation (do-with-space-req-components 0 3 1 3 2 3 3 1))
(defmacro with-space-requirement ((sr &rest vars) &body body)
(unless vars
(setq vars '(sr-width sr-min-width sr-max-width sr-height sr-min-height sr-max-height)))
`(multiple-value-bind ,vars
(space-requirement-components ,sr)
(macrolet ((do-with-space-req-components (operator var vars &body body)
`(,operator
,@(mapcar #'(lambda (a-var)
`(symbol-macrolet ((,var ,a-var))
,@body))
vars)))
(make-sr ()
`,'(make-space-requirement
,@(mapcan #'list '(:width :min-width :max-width
:height :min-height :max-height)
vars))))
,@body)))
(defmethod compose-space ((pane clim-stream-pane) &key width height)
(compute-space-for-clim-stream-pane pane (call-next-method) width height))
(defun compute-space-for-clim-stream-pane (pane sr width height)
(labels
((process-compute-space-requirements ()
(with-space-requirement (sr)
(when (do-with-space-req-components or
sr-component
(sr-width sr-min-width sr-max-width
sr-height sr-min-height sr-max-height)
(eq sr-component :compute))
(multiple-value-bind (width height)
(let ((record
(let ((history (stream-output-history pane)))
(if (and history
(> (output-record-count history :fastp t) 0))
history
(let ((*sizing-application-frame* t))
(with-output-to-output-record (pane)
(funcall
(if (slot-value pane 'incremental-redisplay-p)
#'invoke-pane-redisplay-function
#'invoke-pane-display-function)
(pane-frame pane) pane
:max-width width
:max-height height)))))))
(with-bounding-rectangle* (left top right bottom) record
(values (- right (min 0 left))
(- bottom (min 0 top)))))
(when (zerop width) (setq width 100))
(when (zerop height) (setq height 100))
(flet ((process-computes (size preferred min max)
(values
(if (eq preferred :compute)
(let ((size size))
(when (numberp min) (maxf size min))
(when (numberp max) (minf size max))
size)
preferred)
(if (eq min :compute) size min)
(if (eq max :compute) size max))))
(multiple-value-setq (sr-width sr-min-width sr-max-width)
(process-computes width sr-width sr-min-width sr-max-width))
(multiple-value-setq (sr-height sr-min-height sr-max-height)
(process-computes height sr-height sr-min-height sr-max-height)))
#+(or ignore aclpc acl86win32)
(do-with-space-req-components progn
sr-component (sr-width sr-min-width sr-max-width)
(when (eq sr-component :compute)
(setq sr-component width)))
#+(or ignore aclpc acl86win32)
(do-with-space-req-components progn
sr-component (sr-height sr-min-height sr-max-height)
(when (eq sr-component :compute)
(setq sr-component height))))
(setq sr (make-sr)))))
(process-unit-space-requirements ()
(with-space-requirement (sr)
(let ((changed nil))
(do-with-space-req-components progn
sr-component
(sr-width sr-min-width sr-max-width
sr-height sr-min-height sr-max-height)
(when (unit-space-requirement-p sr-component)
(setq sr-component (process-unit-space-requirement pane sr-component)
changed t)))
(when changed
(setq sr (make-sr))))))
(process-relative-space-requirements ()
(with-space-requirement (sr)
(unless (and (numberp sr-width)
(numberp sr-height)
(do-with-space-req-components and
sr-component
(sr-min-width sr-max-width
sr-min-height sr-max-height)
(or (numberp sr-component)
(relative-space-requirement-p sr-component))))
(error "Illegal space requirement ~S" sr))
(let ((changed nil))
(when (relative-space-requirement-p sr-min-width)
(setq sr-min-width (- sr-width (process-unit-space-requirement
pane (car sr-min-width)))
changed t))
(when (relative-space-requirement-p sr-max-width)
(setq sr-max-width (+ sr-width (process-unit-space-requirement
pane (car sr-max-width)))
changed t))
(when (relative-space-requirement-p sr-min-height)
(setq sr-min-height (- sr-height (process-unit-space-requirement
pane (car sr-min-height)))
changed t))
(when (relative-space-requirement-p sr-max-height)
(setq sr-max-height (+ sr-height (process-unit-space-requirement
pane (car sr-max-height)))
changed t))
(when changed
(setq sr (make-sr)))))))
(declare (dynamic-extent #'process-compute-space-requirements
#'process-unit-space-requirements
#'process-relative-space-requirements))
(process-unit-space-requirements)
(process-compute-space-requirements)
(process-relative-space-requirements)
sr))
(defun relative-space-requirement-p (sr)
(and (consp sr)
(= (length sr) 2)
(or (numberp (second sr))
(unit-space-requirement-p (second sr)))))
(defun unit-space-requirement-p (sr)
(and (consp sr)
(= (length sr) 2)
(member (second sr) '(:line :character :mm :point :pixel))))
(defun process-unit-space-requirement (pane sr)
(destructuring-bind (number unit) sr
(let ((graft (or (graft pane)
(ecase unit
(:pixel number)
(:mm (* number (/ (graft-pixel-width graft)
(graft-mm-width graft))))
(:point (* number (graft-pixels-per-point graft)))
(:character (* number (stream-string-width pane "M")))
(:line (+ (* number (stream-line-height pane))
(* (1- number) (stream-vertical-spacing pane))))))))
(defmethod note-sheet-grafted :after ((pane clim-stream-pane))
(let ((xform (sheet-transformation pane)))
(setq xform (make-scaling-transformation 1 -1))
(setf (sheet-transformation pane) xform)))
CLIM - STREAM - PANE is a child of the old - style viewport . It should n't
(defmethod allocate-space :after ((pane clim-stream-pane) width height)
(declare (ignore width height))
(ecase (graft-origin (graft pane))
(:nw)
(:sw
(let ((xform (sheet-transformation pane)))
(setq xform (make-scaling-transformation 1 -1))
(setq xform (compose-transformations
xform
(make-translation-transformation
0 (1- (bounding-rectangle-height (sheet-parent pane))))))
(setf (sheet-transformation pane) xform)))))
(defmethod pane-stream ((pane clim-stream-pane))
(unless (port pane)
(error "Can't call ~S on ~S until it's been grafted!"
'pane-stream pane))
pane)
explicit : so I 'm going to ignore it and see what else
constraints . ( cim 2/13/95 )
#+ignore
(defmethod change-space-requirements :around
((pane clim-stream-pane) &rest keys &key width height &allow-other-keys)
(declare (dynamic-extent keys))
(multiple-value-bind (history-width history-height)
(if (stream-output-history pane)
(bounding-rectangle-size (stream-output-history pane))
(values width height))
(if (and (numberp width)
(numberp height))
(apply #'call-next-method pane :width (max width history-width)
:height (max height history-height) keys)
(call-next-method))))
(defclass interactor-pane (clim-stream-pane) ())
(defclass application-pane (clim-stream-pane) ())
(defclass accept-values-pane (clim-stream-pane)
()
(:default-initargs :default-view +gadget-dialog-view+))
(eval-when (compile)
(declaim (special *default-menu-text-style*))
)
(defclass pointer-documentation-pane (clim-stream-pane) ()
(:default-initargs
:text-style *default-menu-text-style*))
(defclass title-pane (clim-stream-pane)
((display-string :initform nil :initarg :display-string)))
(defclass command-menu-pane (clim-stream-pane) ())
(defun make-clim-stream-pane-1 (framem frame
&rest options
&key (type 'clim-stream-pane)
label
(label-alignment #+Genera :bottom #-Genera :top)
(scroll-bars :vertical)
(borders t)
(display-after-commands nil dac-p)
background
name
&allow-other-keys)
(with-look-and-feel-realization (framem frame)
(setq options (remove-keywords options '(:type :scroll-bars :borders
:label :label-alignment
:display-after-commands)))
(when dac-p
(setf (getf options :display-time)
(cond ((eq display-after-commands t) :command-loop)
((eq display-after-commands :no-clear) :no-clear)
(t nil))))
(let* ((stream (apply #'make-pane type options))
(pane stream))
(when scroll-bars
(let ((scroller-pane-options
(if (consp scroll-bars)
`(:scroll-bars ,@scroll-bars)
`(:scroll-bars ,scroll-bars))))
(setq pane (apply #'make-pane 'scroller-pane
:contents pane
:name name
:background background
scroller-pane-options))))
(when label
(let ((label (if (stringp label)
(make-pane 'label-pane
:label label
:max-width +fill+
:background background)
(apply #'make-pane 'label-pane
:label (first label)
:max-width +fill+
:background background
(rest label)))))
(setq pane (make-pane 'vbox-pane
:contents
(ecase label-alignment
(:bottom (list pane label))
(:top (list label pane)))
:background background))))
(when borders
(setq pane
(make-pane 'outlined-pane
:name name
:thickness 1
:contents (make-pane 'spacing-pane
:name name
:thickness 1
:contents pane
:background background)
:background background)))
(values pane stream))))
(defmacro make-clim-interactor-pane (&rest options)
`(make-clim-stream-pane :type 'interactor-pane ,@options))
(defmacro make-clim-application-pane (&rest options)
`(make-clim-stream-pane :type 'application-pane ,@options))
(defun-inline window-stream-p (x)
(typep x 'clim-stream-sheet))
(defmethod window-clear ((stream clim-stream-sheet))
(let ((medium (sheet-medium stream)))
(letf-globally (((medium-transformation medium) +identity-transformation+))
(clear-output-history stream)
(window-erase-viewport stream)
cursor might be visible at ( 0,0 ) and the extent is big
(scroll-extent stream 0 0)
(stream-set-cursor-position stream 0 0)
(setf (stream-baseline stream) (coordinate 0)
(stream-current-line-height stream) (coordinate 0)))
when menus first pop up .
(let ((pointer (stream-primary-pointer stream)))
(when pointer
(setf (pointer-sheet pointer) nil)))
(force-output stream))))
(defmethod window-refresh ((stream clim-stream-sheet))
(window-erase-viewport stream))
(defmethod window-refresh :after ((stream clim-stream-sheet))
(frame-replay *application-frame* stream)
(let ((text-record (stream-text-output-record stream)))
(when text-record (replay text-record stream)))
(let ((presentation (highlighted-presentation stream nil)))
(when presentation
(highlight-presentation
presentation (presentation-type presentation) stream :highlight))))
(defmethod window-refresh :around ((stream clim-stream-sheet))
(with-viewport-position-saved (stream)
(call-next-method)))
(defmethod window-erase-viewport ((stream clim-stream-sheet))
(let ((medium (sheet-medium stream)))
(multiple-value-call #'medium-clear-area
medium (bounding-rectangle* (window-viewport stream)))))
(defmethod window-expose ((stream clim-stream-sheet))
(setf (window-visibility stream) t))
(defmethod (setf window-label) (label (stream clim-stream-sheet))
(declare (ignore label))
nil)
(defmethod (setf window-visibility) (visibility (stream clim-stream-sheet))
(let ((frame (pane-frame stream)))
(if frame
(if visibility
(enable-frame frame)
(disable-frame frame))
(setf (sheet-enabled-p stream) visibility))))
(defmethod window-visibility ((stream clim-stream-sheet))
: Is the Unix code more correct ? ? ?
#+(or aclpc acl86win32)
(mirror-visible-p (port stream) stream)
#-(or aclpc acl86win32)
(let ((frame (pane-frame stream)))
(and (if frame
(eq (frame-state frame) :enabled)
(sheet-enabled-p stream))
(mirror-visible-p (port stream) stream)))
)
(defmethod window-viewport ((stream clim-stream-sheet))
(or (pane-viewport-region stream)
(sheet-region stream)))
(defmethod window-viewport-position ((stream clim-stream-sheet))
(bounding-rectangle-position (window-viewport stream)))
(defmethod window-set-viewport-position ((stream clim-stream-sheet) x y)
(when (pane-viewport stream)
(scroll-extent stream x y)))
(defgeneric* (setf window-viewport-position) (x y stream))
(defmethod* (setf window-viewport-position) (x y (stream clim-stream-sheet))
(window-set-viewport-position stream x y))
(defmethod window-inside-edges ((stream clim-stream-sheet))
(bounding-rectangle* (sheet-region (or (pane-viewport stream) stream))))
(defun window-inside-left (stream)
(multiple-value-bind (left top right bottom)
(window-inside-edges stream)
(declare (ignore top right bottom))
left))
(defun window-inside-top (stream)
(multiple-value-bind (left top right bottom)
(window-inside-edges stream)
(declare (ignore left right bottom))
top))
(defun window-inside-right (stream)
(multiple-value-bind (left top right bottom)
(window-inside-edges stream)
(declare (ignore left top bottom))
right))
(defun window-inside-bottom (stream)
(multiple-value-bind (left top right bottom)
(window-inside-edges stream)
(declare (ignore left top right))
bottom))
(defmethod window-inside-size ((stream clim-stream-sheet))
(bounding-rectangle-size (window-viewport stream)))
(defmethod window-set-inside-size ((stream clim-stream-sheet) width height)
(change-space-requirements stream :width width :height height :resize-frame t))
(defmethod window-inside-width ((stream clim-stream-sheet))
(bounding-rectangle-width (window-viewport stream)))
(defmethod window-inside-height ((stream clim-stream-sheet))
(bounding-rectangle-height (window-viewport stream)))
(defmethod window-margins ((stream clim-stream-sheet))
(values (coordinate 0) (coordinate 0)
(coordinate 0) (coordinate 0)))
(defun-inline window-parent (window)
(sheet-parent window))
(defun-inline window-children (window)
(sheet-children window))
(defun window-root (window)
(graft window))
(defun-inline window-top-level-window (window)
(sheet-top-level-sheet window))
(defmethod window-stack-on-bottom ((stream clim-stream-sheet))
(bury-sheet (window-top-level-window stream)))
(defmethod window-stack-on-top ((stream clim-stream-sheet))
(raise-sheet (window-top-level-window stream)))
(defun beep (&optional (stream *standard-output*))
(typecase stream
(sheet
(medium-beep (sheet-medium stream)))
(encapsulating-stream
(beep (encapsulating-stream-stream stream)))))
(defmethod close ((sheet clim-stream-sheet) &key abort)
(declare (ignore abort))
(let ((frame (pane-frame sheet)))
(if frame
(frame-exit frame)
(if (sheet-direct-mirror sheet)
(destroy-mirror (port sheet) sheet)
(setf (sheet-enabled-p sheet) nil)))))
(defmethod window-shift-visible-region ((window clim-stream-sheet)
old-left old-top old-right old-bottom
new-left new-top new-right new-bottom)
(declare (type coordinate new-left new-top new-right new-bottom))
(declare (ignore old-right old-bottom new-right new-bottom))
(let ((delta-x (- old-left new-left))
(delta-y (- old-top new-top)))
(multiple-value-bind (stream-width stream-height)
(bounding-rectangle-size (pane-viewport-region window))
(declare (type coordinate stream-width stream-height))
(let (from-x from-y)
(cond ((and (>= delta-x 0)
(>= delta-y 0))
(setq from-x 0
from-y 0))
((and (>= delta-x 0)
(<= delta-y 0))
(setq from-x 0
from-y (- delta-y)))
((>= delta-y 0)
(setq from-x (- delta-x)
from-y 0))
(t
(setq from-x (- delta-x)
from-y (- delta-y))))
(let ((width (- stream-width (abs delta-x)))
(height (- stream-height (abs delta-y)))
(transform (sheet-transformation window)))
(multiple-value-call #'copy-area
window
(untransform-position transform from-x from-y)
(untransform-distance transform width height)
(untransform-position transform (+ from-x delta-x) (+ from-y delta-y))))))))
(defmethod window-shift-visible-region ((window t)
old-left old-top old-right old-bottom
new-left new-top new-right new-bottom)
(multiple-value-bind (valid-p left top right bottom)
(ltrb-overlaps-ltrb-p old-left old-top old-right old-bottom
new-left new-top new-right new-bottom)
(when valid-p
(with-sheet-medium (medium window)
(medium-clear-area medium left top right bottom)
(repaint-sheet window (make-bounding-rectangle left top right bottom))))))
#+Genera
(defgeneric stream-compatible-inside-size (window)
(:selector :inside-size))
#+Genera
(defmethod stream-compatible-inside-size ((window clim-stream-sheet))
(bounding-rectangle-size (window-viewport window)))
#+Genera
(defgeneric stream-compatible-visible-cursorpos-limits (window &optional unit)
(:selector :visible-cursorpos-limits))
#+Genera
(defmethod stream-compatible-visible-cursorpos-limits
((window clim-stream-sheet) &optional (unit ':pixel))
(with-bounding-rectangle* (left top right bottom) (window-viewport window)
(ecase unit
(:pixel (values left top right bottom))
(:character (let ((char-width (stream-character-width window #\M))
(line-height (stream-line-height window)))
(values (floor left char-width) (floor top line-height)
(floor right char-width) (floor bottom line-height)))))))
#+Genera
(defgeneric stream-compatible-size-in-characters (window)
(:selector :size-in-characters))
#+Genera
(defmethod stream-compatible-size-in-characters ((window clim-stream-sheet))
(with-bounding-rectangle* (left top right bottom) (window-viewport window)
(let ((char-width (stream-character-width window #\M))
(line-height (stream-line-height window)))
(values (floor (- right left) char-width)
(floor (- bottom top) line-height)))))
|
58e0cbac5961de48300fd383e78b0f93cac947d4a5f34dd55fa9a27814fdd729 | Feuerlabs/yang | yang_scan_nif.erl | %%%---- BEGIN COPYRIGHT -------------------------------------------------------
%%%
Copyright ( C ) 2007 - 2012 , Rogvall Invest AB , < >
%%%
%%% This software is licensed as described in the file COPYRIGHT, which
%%% you should have received as part of this distribution. The terms
%%% are also available at .
%%%
%%% You may opt to use, copy, modify, merge, publish, distribute and/or sell
copies of the Software , and permit persons to whom the Software is
%%% furnished to do so, under the terms of the COPYRIGHT file.
%%%
This software is distributed on an " AS IS " basis , WITHOUT WARRANTY OF ANY
%%% KIND, either express or implied.
%%%
%%%---- END COPYRIGHT ---------------------------------------------------------
@author < >
%%% @doc
scanner
%%% @end
Created : 4 Jan 2012 by < >
-module(yang_scan_nif).
-on_load(init/0).
-export([open/1, open/2, string/1, next/1, push_back/2, close/1]).
-export([all/1, all/2]).
-export([file/1, file/2]).
%% nif exports
-export([new/0, next_token/1, next_token/2]).
-import(lists, [reverse/1]).
-type line() :: pos_integer().
-type token() :: {string,line(),string()} |
{word,line(),string()} |
{'{',line()} |
{'}',line()} |
{';',line()}.
-record(yang_scan,
{
tokens = [], %% token buffer
scanner, %% nif scanner state
stream %% stream data
}).
init() ->
Nif = filename:join(code:priv_dir(yang),"yang_drv"),
io:format("Loading: ~s\n", [Nif]),
erlang:load_nif(Nif, 0).
new() ->
erlang:error(nif_not_loaded).
next_token(_Scanner) ->
erlang:error(nif_not_loaded).
next_token(_Scanner,_Binary) ->
erlang:error(nif_not_loaded).
file(File) ->
file(File, []).
file(File, Opts) ->
case open(File, Opts) of
{ok,Scan} ->
Res = all(Scan),
close(Scan),
Res;
Error ->
Error
end.
open(File) ->
open(File, []).
open(File,Opts) ->
ChunkSize = proplists:get_value(chunk_size, Opts, 1024),
case yang_scan_erl:open_file(File, Opts) of
{ok,Fd} ->
Scanner = new(),
{ok, #yang_scan { scanner = Scanner, stream={Fd,ChunkSize}}};
Error ->
Error
end.
string(Binary) when is_binary(Binary) ->
Scanner = new(),
Token = next_token(Scanner, Binary),
%% FIXME: check for more!
{ok, #yang_scan { scanner=Scanner, tokens=[Token] }};
string(List) when is_list(List) ->
Scanner = new(),
Token = next_token(Scanner, list_to_binary(List)),
%% FIXME: check for more!
{ok, #yang_scan { scanner=Scanner, tokens=[Token] }}.
close(#yang_scan { stream=undefined }) ->
ok;
close(#yang_scan { stream={Fd,_} }) ->
%% cleanup scanner ? this is now delayed until garbage collection
file:close(Fd).
all(Scan) ->
all(Scan, []).
all(Scan, Acc) ->
case next(Scan) of
Error = {error,_} ->
Error;
eof ->
{ok,reverse(Acc)};
{Token,Scan1} ->
all(Scan1,[Token|Acc])
end.
%% @doc
%% Unread a token from the token strem.
%% @end
-spec push_back(Token::token(), Scan::#yang_scan{}) ->
#yang_scan{}.
push_back(Token, Scan = #yang_scan { tokens=Ts}) ->
Scan#yang_scan { tokens=[Token|Ts] }.
%% @doc
%% Read next token, while handle string + string cases
%% @end
-spec next(Scan::#yang_scan{}) ->
{token(), #yang_scan{}} |
eof |
{error, term()}.
next(Scan) ->
case load_token(Scan) of
{Token={string,_,_},Scan1} ->
next_(Scan1, Token);
Other ->
Other
end.
next_(Scan, StringToken) ->
case load_token(Scan) of
{PlusToken={word,_,<<"+">>},Scan1} ->
next_(Scan1, StringToken, PlusToken);
Error = {error,_} ->
Error;
{Token,Scan1} ->
{StringToken,Scan1#yang_scan { tokens=[Token] }};
eof ->
{StringToken,Scan#yang_scan { tokens=[eof] }}
end.
next_(Scan, StringToken={string,Ln,Str1}, PlusToken) ->
case load_token(Scan) of
{{string,_,Str2},Scan1} ->
next_(Scan1, {string,Ln,<<Str1/binary,Str2/binary>>});
Error = {error,_} ->
Error;
{Token,Scan1} ->
{StringToken,Scan1#yang_scan { tokens=[PlusToken,Token] }};
eof ->
{StringToken,Scan#yang_scan { tokens=[PlusToken,eof] }}
end.
load_token(Y=#yang_scan { tokens=[T|Ts]}) ->
{T, Y#yang_scan { tokens=Ts }};
load_token(Y=#yang_scan { stream=S,tokens=[],scanner=Scanner}) ->
case next_token(Scanner) of
more ->
case load_more(Scanner, S) of
eof -> eof;
Error = {error,_} ->Error;
Token -> {Token, Y}
end;
Token ->
{Token,Y}
end.
load_more(Scanner, S) ->
case read(S) of
{ok,Binary} ->
case next_token(Scanner, Binary) of
more ->
load_more(Scanner, S);
Token ->
Token
end;
Reason ->
Reason
end.
read(undefined) ->
eof;
read({Fd,Size}) ->
case file:read(Fd, Size) of
{ok,Bin} -> {ok,Bin};
Error -> Error
end.
| null | https://raw.githubusercontent.com/Feuerlabs/yang/92330c742cdd2a8e7ce07f99b34c0fe761806e82/src/yang_scan_nif.erl | erlang | ---- BEGIN COPYRIGHT -------------------------------------------------------
This software is licensed as described in the file COPYRIGHT, which
you should have received as part of this distribution. The terms
are also available at .
You may opt to use, copy, modify, merge, publish, distribute and/or sell
furnished to do so, under the terms of the COPYRIGHT file.
KIND, either express or implied.
---- END COPYRIGHT ---------------------------------------------------------
@doc
@end
nif exports
token buffer
nif scanner state
stream data
FIXME: check for more!
FIXME: check for more!
cleanup scanner ? this is now delayed until garbage collection
@doc
Unread a token from the token strem.
@end
@doc
Read next token, while handle string + string cases
@end | Copyright ( C ) 2007 - 2012 , Rogvall Invest AB , < >
copies of the Software , and permit persons to whom the Software is
This software is distributed on an " AS IS " basis , WITHOUT WARRANTY OF ANY
@author < >
scanner
Created : 4 Jan 2012 by < >
-module(yang_scan_nif).
-on_load(init/0).
-export([open/1, open/2, string/1, next/1, push_back/2, close/1]).
-export([all/1, all/2]).
-export([file/1, file/2]).
-export([new/0, next_token/1, next_token/2]).
-import(lists, [reverse/1]).
-type line() :: pos_integer().
-type token() :: {string,line(),string()} |
{word,line(),string()} |
{'{',line()} |
{'}',line()} |
{';',line()}.
-record(yang_scan,
{
}).
init() ->
Nif = filename:join(code:priv_dir(yang),"yang_drv"),
io:format("Loading: ~s\n", [Nif]),
erlang:load_nif(Nif, 0).
new() ->
erlang:error(nif_not_loaded).
next_token(_Scanner) ->
erlang:error(nif_not_loaded).
next_token(_Scanner,_Binary) ->
erlang:error(nif_not_loaded).
file(File) ->
file(File, []).
file(File, Opts) ->
case open(File, Opts) of
{ok,Scan} ->
Res = all(Scan),
close(Scan),
Res;
Error ->
Error
end.
open(File) ->
open(File, []).
open(File,Opts) ->
ChunkSize = proplists:get_value(chunk_size, Opts, 1024),
case yang_scan_erl:open_file(File, Opts) of
{ok,Fd} ->
Scanner = new(),
{ok, #yang_scan { scanner = Scanner, stream={Fd,ChunkSize}}};
Error ->
Error
end.
string(Binary) when is_binary(Binary) ->
Scanner = new(),
Token = next_token(Scanner, Binary),
{ok, #yang_scan { scanner=Scanner, tokens=[Token] }};
string(List) when is_list(List) ->
Scanner = new(),
Token = next_token(Scanner, list_to_binary(List)),
{ok, #yang_scan { scanner=Scanner, tokens=[Token] }}.
close(#yang_scan { stream=undefined }) ->
ok;
close(#yang_scan { stream={Fd,_} }) ->
file:close(Fd).
all(Scan) ->
all(Scan, []).
all(Scan, Acc) ->
case next(Scan) of
Error = {error,_} ->
Error;
eof ->
{ok,reverse(Acc)};
{Token,Scan1} ->
all(Scan1,[Token|Acc])
end.
-spec push_back(Token::token(), Scan::#yang_scan{}) ->
#yang_scan{}.
push_back(Token, Scan = #yang_scan { tokens=Ts}) ->
Scan#yang_scan { tokens=[Token|Ts] }.
-spec next(Scan::#yang_scan{}) ->
{token(), #yang_scan{}} |
eof |
{error, term()}.
next(Scan) ->
case load_token(Scan) of
{Token={string,_,_},Scan1} ->
next_(Scan1, Token);
Other ->
Other
end.
next_(Scan, StringToken) ->
case load_token(Scan) of
{PlusToken={word,_,<<"+">>},Scan1} ->
next_(Scan1, StringToken, PlusToken);
Error = {error,_} ->
Error;
{Token,Scan1} ->
{StringToken,Scan1#yang_scan { tokens=[Token] }};
eof ->
{StringToken,Scan#yang_scan { tokens=[eof] }}
end.
next_(Scan, StringToken={string,Ln,Str1}, PlusToken) ->
case load_token(Scan) of
{{string,_,Str2},Scan1} ->
next_(Scan1, {string,Ln,<<Str1/binary,Str2/binary>>});
Error = {error,_} ->
Error;
{Token,Scan1} ->
{StringToken,Scan1#yang_scan { tokens=[PlusToken,Token] }};
eof ->
{StringToken,Scan#yang_scan { tokens=[PlusToken,eof] }}
end.
load_token(Y=#yang_scan { tokens=[T|Ts]}) ->
{T, Y#yang_scan { tokens=Ts }};
load_token(Y=#yang_scan { stream=S,tokens=[],scanner=Scanner}) ->
case next_token(Scanner) of
more ->
case load_more(Scanner, S) of
eof -> eof;
Error = {error,_} ->Error;
Token -> {Token, Y}
end;
Token ->
{Token,Y}
end.
load_more(Scanner, S) ->
case read(S) of
{ok,Binary} ->
case next_token(Scanner, Binary) of
more ->
load_more(Scanner, S);
Token ->
Token
end;
Reason ->
Reason
end.
read(undefined) ->
eof;
read({Fd,Size}) ->
case file:read(Fd, Size) of
{ok,Bin} -> {ok,Bin};
Error -> Error
end.
|
e144df0c4cb6e67b0611871208a95502e287e5a60b0b8d748a262eae91d916ea | kaol/heist-tutorial | Examples.hs | module Tutorial.Examples
( examplesMap
, launchExample
, example1
, example2
, example3
, example4
) where
{-
If you are looking for the example sources, look for the numbered
Example files.
-}
import Tutorial.Example1
import Tutorial.Example2
import Tutorial.Example3
import Tutorial.Example4
import Site
import Snap.Snaplet
import Snap.Http.Server.Config
import Data.Map.Syntax
import Data.Map.Lazy
import Application
allExamples :: [Example]
allExamples =
[ example1Data
, example2Data
, example3Data
, example4Data
]
launchExample :: Example -> IO ()
launchExample ex = serveSnaplet defaultConfig (app ex)
examplesMap :: Map String Example
examplesMap = either (const empty) id $ runMap $
mapM (\ex -> (examplePath ex) ## ex) allExamples
example1 :: IO ()
example1 = launchExample example1Data
example2 :: IO ()
example2 = launchExample example2Data
example3 :: IO ()
example3 = launchExample example3Data
example4 :: IO ()
example4 = launchExample example4Data
| null | https://raw.githubusercontent.com/kaol/heist-tutorial/c85d4a7165dd39ab6e0c674c5e382a42dbd9fca8/src/Tutorial/Examples.hs | haskell |
If you are looking for the example sources, look for the numbered
Example files.
| module Tutorial.Examples
( examplesMap
, launchExample
, example1
, example2
, example3
, example4
) where
import Tutorial.Example1
import Tutorial.Example2
import Tutorial.Example3
import Tutorial.Example4
import Site
import Snap.Snaplet
import Snap.Http.Server.Config
import Data.Map.Syntax
import Data.Map.Lazy
import Application
allExamples :: [Example]
allExamples =
[ example1Data
, example2Data
, example3Data
, example4Data
]
launchExample :: Example -> IO ()
launchExample ex = serveSnaplet defaultConfig (app ex)
examplesMap :: Map String Example
examplesMap = either (const empty) id $ runMap $
mapM (\ex -> (examplePath ex) ## ex) allExamples
example1 :: IO ()
example1 = launchExample example1Data
example2 :: IO ()
example2 = launchExample example2Data
example3 :: IO ()
example3 = launchExample example3Data
example4 :: IO ()
example4 = launchExample example4Data
|
06ec138de4bbceda656a1a942b357964a2188354422caa9429b6eabbadcef3e7 | AccelerateHS/accelerate | RealFloat.hs | {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - warn - orphans #
-- |
-- Module : Data.Array.Accelerate.Classes.RealFloat
Copyright : [ 2016 .. 2020 ] The Accelerate Team
-- License : BSD3
--
Maintainer : < >
-- Stability : experimental
Portability : non - portable ( GHC extensions )
--
module Data.Array.Accelerate.Classes.RealFloat (
RealFloat(..),
) where
import Data.Array.Accelerate.Error
import Data.Array.Accelerate.Language ( cond, while )
import Data.Array.Accelerate.Pattern
import Data.Array.Accelerate.Smart
import Data.Array.Accelerate.Type
import Data.Array.Accelerate.Data.Bits
import Data.Array.Accelerate.Classes.Eq
import Data.Array.Accelerate.Classes.Floating
import Data.Array.Accelerate.Classes.FromIntegral
import Data.Array.Accelerate.Classes.Num
import Data.Array.Accelerate.Classes.Ord
import Data.Array.Accelerate.Classes.RealFrac
import Data.Text.Lazy.Builder
import Formatting
import Text.Printf
import Prelude ( (.), ($), String, error, undefined, unlines, otherwise )
import qualified Prelude as P
-- | Efficient, machine-independent access to the components of a floating-point
-- number
--
class (RealFrac a, Floating a) => RealFloat a where
| The radix of the representation ( often 2 ) ( constant )
Integer
default floatRadix :: P.RealFloat a => Exp a -> Exp Int64
floatRadix _ = P.fromInteger (P.floatRadix (undefined::a))
-- | The number of digits of 'floatRadix' in the significand (constant)
floatDigits :: Exp a -> Exp Int
default floatDigits :: P.RealFloat a => Exp a -> Exp Int
floatDigits _ = constant (P.floatDigits (undefined::a))
-- | The lowest and highest values the exponent may assume (constant)
floatRange :: Exp a -> (Exp Int, Exp Int)
default floatRange :: P.RealFloat a => Exp a -> (Exp Int, Exp Int)
floatRange _ = let (m,n) = P.floatRange (undefined::a)
in (constant m, constant n)
-- | Return the significand and an appropriately scaled exponent. If
@(m , n ) = ' decodeFloat ' x@ then @x = m*b^^n@ , where @b@ is the
floating - point radix ( ' floatRadix ' ) . Furthermore , either @m@ and @n@ are
both zero , or @b^(d-1 ) < = ' abs ' m < b^d@ , where = ' floatDigits ' x@.
Integer
-- | Inverse of 'decodeFloat'
Integer
default encodeFloat :: (FromIntegral Int a, FromIntegral Int64 a) => Exp Int64 -> Exp Int -> Exp a
encodeFloat x e = fromIntegral x * (fromIntegral (floatRadix (undefined :: Exp a)) ** fromIntegral e)
| Corresponds to the second component of ' decodeFloat '
exponent :: Exp a -> Exp Int
exponent x = let (m,n) = decodeFloat x
in cond (m == 0)
0
(n + floatDigits x)
| Corresponds to the first component of ' decodeFloat '
significand :: Exp a -> Exp a
significand x = let (m,_) = decodeFloat x
in encodeFloat m (negate (floatDigits x))
-- | Multiply a floating point number by an integer power of the radix
scaleFloat :: Exp Int -> Exp a -> Exp a
scaleFloat k x =
cond (k == 0 || isFix) x
$ encodeFloat m (n + clamp b)
where
isFix = x == 0 || isNaN x || isInfinite x
(m,n) = decodeFloat x
(l,h) = floatRange x
d = floatDigits x
b = h - l + 4*d
-- n+k may overflow, which would lead to incorrect results, hence we clamp
-- the scaling parameter. If (n+k) would be larger than h, (n + clamp b k)
-- must be too, similar for smaller than (l-d).
clamp bd = max (-bd) (min bd k)
| ' True ' if the argument is an IEEE \"not - a - number\ " ( NaN ) value
isNaN :: Exp a -> Exp Bool
-- | 'True' if the argument is an IEEE infinity or negative-infinity
isInfinite :: Exp a -> Exp Bool
-- | 'True' if the argument is too small to be represented in normalized
-- format
isDenormalized :: Exp a -> Exp Bool
| ' True ' if the argument is an IEEE negative zero
isNegativeZero :: Exp a -> Exp Bool
-- | 'True' if the argument is an IEEE floating point number
isIEEE :: Exp a -> Exp Bool
default isIEEE :: P.RealFloat a => Exp a -> Exp Bool
isIEEE _ = constant (P.isIEEE (undefined::a))
| A version of arctangent taking two real floating - point arguments .
For real floating @x@ and @y@ , @'atan2 ' y computes the angle ( from the
-- positive x-axis) of the vector from the origin to the point @(x,y)@.
@'atan2 ' y returns a value in the range [ @-pi@ , @pi@ ] .
atan2 :: Exp a -> Exp a -> Exp a
instance RealFloat Half where
atan2 = mkAtan2
isNaN = mkIsNaN
isInfinite = mkIsInfinite
isDenormalized = ieee754 "isDenormalized" (ieee754_f16_is_denormalized . mkBitcast)
isNegativeZero = ieee754 "isNegativeZero" (ieee754_f16_is_negative_zero . mkBitcast)
decodeFloat = ieee754 "decodeFloat" (\x -> let T2 m n = ieee754_f16_decode (mkBitcast x)
in (fromIntegral m, n))
instance RealFloat Float where
atan2 = mkAtan2
isNaN = mkIsNaN
isInfinite = mkIsInfinite
isDenormalized = ieee754 "isDenormalized" (ieee754_f32_is_denormalized . mkBitcast)
isNegativeZero = ieee754 "isNegativeZero" (ieee754_f32_is_negative_zero . mkBitcast)
decodeFloat = ieee754 "decodeFloat" (\x -> let T2 m n = ieee754_f32_decode (mkBitcast x)
in (fromIntegral m, n))
instance RealFloat Double where
atan2 = mkAtan2
isNaN = mkIsNaN
isInfinite = mkIsInfinite
isDenormalized = ieee754 "isDenormalized" (ieee754_f64_is_denormalized . mkBitcast)
isNegativeZero = ieee754 "isNegativeZero" (ieee754_f64_is_negative_zero . mkBitcast)
decodeFloat = ieee754 "decodeFloat" (\x -> let T2 m n = ieee754_f64_decode (mkBitcast x)
in (m, n))
instance RealFloat CFloat where
atan2 = mkAtan2
isNaN = mkIsNaN . mkBitcast @Float
isInfinite = mkIsInfinite . mkBitcast @Float
isDenormalized = ieee754 "isDenormalized" (ieee754_f32_is_denormalized . mkBitcast)
isNegativeZero = ieee754 "isNegativeZero" (ieee754_f32_is_negative_zero . mkBitcast)
decodeFloat = ieee754 "decodeFloat" (\x -> let T2 m n = ieee754_f32_decode (mkBitcast x)
in (fromIntegral m, n))
encodeFloat x e = mkBitcast (encodeFloat @Float x e)
instance RealFloat CDouble where
atan2 = mkAtan2
isNaN = mkIsNaN . mkBitcast @Double
isInfinite = mkIsInfinite . mkBitcast @Double
isDenormalized = ieee754 "isDenormalized" (ieee754_f64_is_denormalized . mkBitcast)
isNegativeZero = ieee754 "isNegativeZero" (ieee754_f64_is_negative_zero . mkBitcast)
decodeFloat = ieee754 "decodeFloat" (\x -> let T2 m n = ieee754_f64_decode (mkBitcast x)
in (m, n))
encodeFloat x e = mkBitcast (encodeFloat @Double x e)
-- To satisfy superclass constraints
--
instance RealFloat a => P.RealFloat (Exp a) where
floatRadix = preludeError "floatRadix"
floatDigits = preludeError "floatDigits"
floatRange = preludeError "floatRange"
decodeFloat = preludeError "decodeFloat"
encodeFloat = preludeError "encodeFloat"
isNaN = preludeError "isNaN"
isInfinite = preludeError "isInfinite"
isDenormalized = preludeError "isDenormalized"
isNegativeZero = preludeError "isNegativeZero"
isIEEE = preludeError "isIEEE"
preludeError :: String -> a
preludeError x
= error
$ unlines [ printf "Prelude.%s applied to EDSL types: use Data.Array.Accelerate.%s instead" x x
, ""
, "These Prelude.RealFloat instances are present only to fulfil superclass"
, "constraints for subsequent classes in the standard Haskell numeric hierarchy."
]
ieee754 :: forall a b. HasCallStack => P.RealFloat a => Builder -> (Exp a -> b) -> Exp a -> b
ieee754 name f x
| P.isIEEE (undefined::a) = f x
| otherwise = internalError (builder % ": Not implemented for non-IEEE floating point") name
From : ghc / libraries / base / cbits / primFloat.c
-- ------------------------------------------
-- An IEEE754 number is denormalised iff:
* exponent is zero
-- * mantissa is non-zero.
-- * (don't care about setting of sign bit.)
--
ieee754_f64_is_denormalized :: Exp Word64 -> Exp Bool
ieee754_f64_is_denormalized x =
ieee754_f64_mantissa x == 0 &&
ieee754_f64_exponent x /= 0
ieee754_f32_is_denormalized :: Exp Word32 -> Exp Bool
ieee754_f32_is_denormalized x =
ieee754_f32_mantissa x == 0 &&
ieee754_f32_exponent x /= 0
ieee754_f16_is_denormalized :: Exp Word16 -> Exp Bool
ieee754_f16_is_denormalized x =
ieee754_f16_mantissa x == 0 &&
ieee754_f16_exponent x /= 0
-- Negative zero if only the sign bit is set
--
ieee754_f64_is_negative_zero :: Exp Word64 -> Exp Bool
ieee754_f64_is_negative_zero x =
ieee754_f64_negative x &&
ieee754_f64_exponent x == 0 &&
ieee754_f64_mantissa x == 0
ieee754_f32_is_negative_zero :: Exp Word32 -> Exp Bool
ieee754_f32_is_negative_zero x =
ieee754_f32_negative x &&
ieee754_f32_exponent x == 0 &&
ieee754_f32_mantissa x == 0
ieee754_f16_is_negative_zero :: Exp Word16 -> Exp Bool
ieee754_f16_is_negative_zero x =
ieee754_f16_negative x &&
ieee754_f16_exponent x == 0 &&
ieee754_f16_mantissa x == 0
-- Assume the host processor stores integers and floating point numbers in the
-- same endianness (true for modern processors).
--
-- To recap, here's the representation of a double precision
-- IEEE floating point number:
--
sign 63 sign bit ( 0==positive , 1==negative )
exponent 62 - 52 exponent ( biased by 1023 )
fraction 51 - 0 fraction ( bits to right of binary point )
--
ieee754_f64_mantissa :: Exp Word64 -> Exp Word64
ieee754_f64_mantissa x = x .&. 0xFFFFFFFFFFFFF
ieee754_f64_exponent :: Exp Word64 -> Exp Word16
ieee754_f64_exponent x = fromIntegral (x `unsafeShiftR` 52) .&. 0x7FF
ieee754_f64_negative :: Exp Word64 -> Exp Bool
ieee754_f64_negative x = testBit x 63
Representation of single precision IEEE floating point number :
--
sign 31 sign bit ( 0==positive , 1==negative )
exponent 30 - 23 exponent ( biased by 127 )
fraction 22 - 0 fraction ( bits to right of binary point )
--
ieee754_f32_mantissa :: Exp Word32 -> Exp Word32
ieee754_f32_mantissa x = x .&. 0x7FFFFF
ieee754_f32_exponent :: Exp Word32 -> Exp Word8
ieee754_f32_exponent x = fromIntegral (x `unsafeShiftR` 23)
ieee754_f32_negative :: Exp Word32 -> Exp Bool
ieee754_f32_negative x = testBit x 31
Representation of half precision IEEE floating point number :
--
sign 15 sign bit ( 0==positive , 1==negative )
exponent 14 - 10 exponent ( biased by 15 )
fraction 9 - 0 fraction ( bits to right of binary point )
--
ieee754_f16_mantissa :: Exp Word16 -> Exp Word16
ieee754_f16_mantissa x = x .&. 0x3FF
ieee754_f16_exponent :: Exp Word16 -> Exp Word8
ieee754_f16_exponent x = fromIntegral (x `unsafeShiftR` 10) .&. 0x1F
ieee754_f16_negative :: Exp Word16 -> Exp Bool
ieee754_f16_negative x = testBit x 15
-- reverse engineered following the below
ieee754_f16_decode :: Exp Word16 -> Exp (Int16, Int)
ieee754_f16_decode i =
let
_HHIGHBIT = 0x0400
_HMSBIT = 0x8000
_HMINEXP = ((_HALF_MIN_EXP) - (_HALF_MANT_DIG) - 1)
_HALF_MANT_DIG = floatDigits (undefined::Exp Half)
(_HALF_MIN_EXP, _HALF_MAX_EXP) = floatRange (undefined::Exp Half)
high1 = fromIntegral i
high2 = high1 .&. (_HHIGHBIT - 1)
exp1 = ((fromIntegral high1 `unsafeShiftR` 10) .&. 0x1F) + _HMINEXP
exp2 = exp1 + 1
T2 high3 exp3
= cond (exp1 /= _HMINEXP)
-- don't add hidden bit to denorms
(T2 (high2 .|. _HHIGHBIT) exp1)
-- a denorm, normalise the mantissa
(while (\(T2 h _) -> (h .&. _HHIGHBIT) /= 0 )
(\(T2 h e) -> T2 (h `unsafeShiftL` 1) (e-1))
(T2 high2 exp2))
high4 = cond (fromIntegral i < (0 :: Exp Int16)) (-high3) high3
in
cond (high1 .&. complement _HMSBIT == 0)
(T2 0 0)
(T2 high4 exp3)
From : ghc / rts / StgPrimFloat.c
-- ----------------------------
ieee754_f32_decode :: Exp Word32 -> Exp (Int32, Int)
ieee754_f32_decode i =
let
_FHIGHBIT = 0x00800000
_FMSBIT = 0x80000000
_FMINEXP = ((_FLT_MIN_EXP) - (_FLT_MANT_DIG) - 1)
_FLT_MANT_DIG = floatDigits (undefined::Exp Float)
(_FLT_MIN_EXP, _FLT_MAX_EXP) = floatRange (undefined::Exp Float)
high1 = fromIntegral i
high2 = high1 .&. (_FHIGHBIT - 1)
exp1 = ((fromIntegral high1 `unsafeShiftR` 23) .&. 0xFF) + _FMINEXP
exp2 = exp1 + 1
T2 high3 exp3
= cond (exp1 /= _FMINEXP)
-- don't add hidden bit to denorms
(T2 (high2 .|. _FHIGHBIT) exp1)
-- a denorm, normalise the mantissa
(while (\(T2 h _) -> (h .&. _FHIGHBIT) /= 0 )
(\(T2 h e) -> T2 (h `unsafeShiftL` 1) (e-1))
(T2 high2 exp2))
high4 = cond (fromIntegral i < (0 :: Exp Int32)) (-high3) high3
in
cond (high1 .&. complement _FMSBIT == 0)
(T2 0 0)
(T2 high4 exp3)
ieee754_f64_decode :: Exp Word64 -> Exp (Int64, Int)
ieee754_f64_decode i =
let T4 s h l e = ieee754_f64_decode2 i
in T2 (fromIntegral s * (fromIntegral h `unsafeShiftL` 32 .|. fromIntegral l)) e
ieee754_f64_decode2 :: Exp Word64 -> Exp (Int, Word32, Word32, Int)
ieee754_f64_decode2 i =
let
_DHIGHBIT = 0x00100000
_DMSBIT = 0x80000000
_DMINEXP = ((_DBL_MIN_EXP) - (_DBL_MANT_DIG) - 1)
_DBL_MANT_DIG = floatDigits (undefined::Exp Double)
(_DBL_MIN_EXP, _DBL_MAX_EXP) = floatRange (undefined::Exp Double)
low = fromIntegral i
high = fromIntegral (i `unsafeShiftR` 32)
iexp = (fromIntegral ((high `unsafeShiftR` 20) .&. 0x7FF) + _DMINEXP)
sign = cond (fromIntegral i < (0 :: Exp Int64)) (-1) 1
high2 = high .&. (_DHIGHBIT - 1)
iexp2 = iexp + 1
T3 hi lo ie
= cond (iexp2 /= _DMINEXP)
-- don't add hidden bit to denorms
(T3 (high2 .|. _DHIGHBIT) low iexp)
-- a denorm, nermalise the mantissa
(while (\(T3 h _ _) -> (h .&. _DHIGHBIT) /= 0)
(\(T3 h l e) ->
let h1 = h `unsafeShiftL` 1
h2 = cond ((l .&. _DMSBIT) /= 0) (h1+1) h1
in T3 h2 (l `unsafeShiftL` 1) (e-1))
(T3 high2 low iexp2))
in
cond (low == 0 && (high .&. (complement _DMSBIT)) == 0)
(T4 1 0 0 0)
(T4 sign hi lo ie)
| null | https://raw.githubusercontent.com/AccelerateHS/accelerate/5d32f1710e39f8aa7596f6beffebd2e8369cc36d/src/Data/Array/Accelerate/Classes/RealFloat.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DefaultSignatures #
# LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedStrings #
|
Module : Data.Array.Accelerate.Classes.RealFloat
License : BSD3
Stability : experimental
| Efficient, machine-independent access to the components of a floating-point
number
| The number of digits of 'floatRadix' in the significand (constant)
| The lowest and highest values the exponent may assume (constant)
| Return the significand and an appropriately scaled exponent. If
| Inverse of 'decodeFloat'
| Multiply a floating point number by an integer power of the radix
n+k may overflow, which would lead to incorrect results, hence we clamp
the scaling parameter. If (n+k) would be larger than h, (n + clamp b k)
must be too, similar for smaller than (l-d).
| 'True' if the argument is an IEEE infinity or negative-infinity
| 'True' if the argument is too small to be represented in normalized
format
| 'True' if the argument is an IEEE floating point number
positive x-axis) of the vector from the origin to the point @(x,y)@.
To satisfy superclass constraints
------------------------------------------
An IEEE754 number is denormalised iff:
* mantissa is non-zero.
* (don't care about setting of sign bit.)
Negative zero if only the sign bit is set
Assume the host processor stores integers and floating point numbers in the
same endianness (true for modern processors).
To recap, here's the representation of a double precision
IEEE floating point number:
reverse engineered following the below
don't add hidden bit to denorms
a denorm, normalise the mantissa
----------------------------
don't add hidden bit to denorms
a denorm, normalise the mantissa
don't add hidden bit to denorms
a denorm, nermalise the mantissa | # LANGUAGE FlexibleInstances #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - warn - orphans #
Copyright : [ 2016 .. 2020 ] The Accelerate Team
Maintainer : < >
Portability : non - portable ( GHC extensions )
module Data.Array.Accelerate.Classes.RealFloat (
RealFloat(..),
) where
import Data.Array.Accelerate.Error
import Data.Array.Accelerate.Language ( cond, while )
import Data.Array.Accelerate.Pattern
import Data.Array.Accelerate.Smart
import Data.Array.Accelerate.Type
import Data.Array.Accelerate.Data.Bits
import Data.Array.Accelerate.Classes.Eq
import Data.Array.Accelerate.Classes.Floating
import Data.Array.Accelerate.Classes.FromIntegral
import Data.Array.Accelerate.Classes.Num
import Data.Array.Accelerate.Classes.Ord
import Data.Array.Accelerate.Classes.RealFrac
import Data.Text.Lazy.Builder
import Formatting
import Text.Printf
import Prelude ( (.), ($), String, error, undefined, unlines, otherwise )
import qualified Prelude as P
class (RealFrac a, Floating a) => RealFloat a where
| The radix of the representation ( often 2 ) ( constant )
Integer
default floatRadix :: P.RealFloat a => Exp a -> Exp Int64
floatRadix _ = P.fromInteger (P.floatRadix (undefined::a))
floatDigits :: Exp a -> Exp Int
default floatDigits :: P.RealFloat a => Exp a -> Exp Int
floatDigits _ = constant (P.floatDigits (undefined::a))
floatRange :: Exp a -> (Exp Int, Exp Int)
default floatRange :: P.RealFloat a => Exp a -> (Exp Int, Exp Int)
floatRange _ = let (m,n) = P.floatRange (undefined::a)
in (constant m, constant n)
@(m , n ) = ' decodeFloat ' x@ then @x = m*b^^n@ , where @b@ is the
floating - point radix ( ' floatRadix ' ) . Furthermore , either @m@ and @n@ are
both zero , or @b^(d-1 ) < = ' abs ' m < b^d@ , where = ' floatDigits ' x@.
Integer
Integer
default encodeFloat :: (FromIntegral Int a, FromIntegral Int64 a) => Exp Int64 -> Exp Int -> Exp a
encodeFloat x e = fromIntegral x * (fromIntegral (floatRadix (undefined :: Exp a)) ** fromIntegral e)
| Corresponds to the second component of ' decodeFloat '
exponent :: Exp a -> Exp Int
exponent x = let (m,n) = decodeFloat x
in cond (m == 0)
0
(n + floatDigits x)
| Corresponds to the first component of ' decodeFloat '
significand :: Exp a -> Exp a
significand x = let (m,_) = decodeFloat x
in encodeFloat m (negate (floatDigits x))
scaleFloat :: Exp Int -> Exp a -> Exp a
scaleFloat k x =
cond (k == 0 || isFix) x
$ encodeFloat m (n + clamp b)
where
isFix = x == 0 || isNaN x || isInfinite x
(m,n) = decodeFloat x
(l,h) = floatRange x
d = floatDigits x
b = h - l + 4*d
clamp bd = max (-bd) (min bd k)
| ' True ' if the argument is an IEEE \"not - a - number\ " ( NaN ) value
isNaN :: Exp a -> Exp Bool
isInfinite :: Exp a -> Exp Bool
isDenormalized :: Exp a -> Exp Bool
| ' True ' if the argument is an IEEE negative zero
isNegativeZero :: Exp a -> Exp Bool
isIEEE :: Exp a -> Exp Bool
default isIEEE :: P.RealFloat a => Exp a -> Exp Bool
isIEEE _ = constant (P.isIEEE (undefined::a))
| A version of arctangent taking two real floating - point arguments .
For real floating @x@ and @y@ , @'atan2 ' y computes the angle ( from the
@'atan2 ' y returns a value in the range [ @-pi@ , @pi@ ] .
atan2 :: Exp a -> Exp a -> Exp a
instance RealFloat Half where
atan2 = mkAtan2
isNaN = mkIsNaN
isInfinite = mkIsInfinite
isDenormalized = ieee754 "isDenormalized" (ieee754_f16_is_denormalized . mkBitcast)
isNegativeZero = ieee754 "isNegativeZero" (ieee754_f16_is_negative_zero . mkBitcast)
decodeFloat = ieee754 "decodeFloat" (\x -> let T2 m n = ieee754_f16_decode (mkBitcast x)
in (fromIntegral m, n))
instance RealFloat Float where
atan2 = mkAtan2
isNaN = mkIsNaN
isInfinite = mkIsInfinite
isDenormalized = ieee754 "isDenormalized" (ieee754_f32_is_denormalized . mkBitcast)
isNegativeZero = ieee754 "isNegativeZero" (ieee754_f32_is_negative_zero . mkBitcast)
decodeFloat = ieee754 "decodeFloat" (\x -> let T2 m n = ieee754_f32_decode (mkBitcast x)
in (fromIntegral m, n))
instance RealFloat Double where
atan2 = mkAtan2
isNaN = mkIsNaN
isInfinite = mkIsInfinite
isDenormalized = ieee754 "isDenormalized" (ieee754_f64_is_denormalized . mkBitcast)
isNegativeZero = ieee754 "isNegativeZero" (ieee754_f64_is_negative_zero . mkBitcast)
decodeFloat = ieee754 "decodeFloat" (\x -> let T2 m n = ieee754_f64_decode (mkBitcast x)
in (m, n))
instance RealFloat CFloat where
atan2 = mkAtan2
isNaN = mkIsNaN . mkBitcast @Float
isInfinite = mkIsInfinite . mkBitcast @Float
isDenormalized = ieee754 "isDenormalized" (ieee754_f32_is_denormalized . mkBitcast)
isNegativeZero = ieee754 "isNegativeZero" (ieee754_f32_is_negative_zero . mkBitcast)
decodeFloat = ieee754 "decodeFloat" (\x -> let T2 m n = ieee754_f32_decode (mkBitcast x)
in (fromIntegral m, n))
encodeFloat x e = mkBitcast (encodeFloat @Float x e)
instance RealFloat CDouble where
atan2 = mkAtan2
isNaN = mkIsNaN . mkBitcast @Double
isInfinite = mkIsInfinite . mkBitcast @Double
isDenormalized = ieee754 "isDenormalized" (ieee754_f64_is_denormalized . mkBitcast)
isNegativeZero = ieee754 "isNegativeZero" (ieee754_f64_is_negative_zero . mkBitcast)
decodeFloat = ieee754 "decodeFloat" (\x -> let T2 m n = ieee754_f64_decode (mkBitcast x)
in (m, n))
encodeFloat x e = mkBitcast (encodeFloat @Double x e)
instance RealFloat a => P.RealFloat (Exp a) where
floatRadix = preludeError "floatRadix"
floatDigits = preludeError "floatDigits"
floatRange = preludeError "floatRange"
decodeFloat = preludeError "decodeFloat"
encodeFloat = preludeError "encodeFloat"
isNaN = preludeError "isNaN"
isInfinite = preludeError "isInfinite"
isDenormalized = preludeError "isDenormalized"
isNegativeZero = preludeError "isNegativeZero"
isIEEE = preludeError "isIEEE"
preludeError :: String -> a
preludeError x
= error
$ unlines [ printf "Prelude.%s applied to EDSL types: use Data.Array.Accelerate.%s instead" x x
, ""
, "These Prelude.RealFloat instances are present only to fulfil superclass"
, "constraints for subsequent classes in the standard Haskell numeric hierarchy."
]
ieee754 :: forall a b. HasCallStack => P.RealFloat a => Builder -> (Exp a -> b) -> Exp a -> b
ieee754 name f x
| P.isIEEE (undefined::a) = f x
| otherwise = internalError (builder % ": Not implemented for non-IEEE floating point") name
From : ghc / libraries / base / cbits / primFloat.c
* exponent is zero
ieee754_f64_is_denormalized :: Exp Word64 -> Exp Bool
ieee754_f64_is_denormalized x =
ieee754_f64_mantissa x == 0 &&
ieee754_f64_exponent x /= 0
ieee754_f32_is_denormalized :: Exp Word32 -> Exp Bool
ieee754_f32_is_denormalized x =
ieee754_f32_mantissa x == 0 &&
ieee754_f32_exponent x /= 0
ieee754_f16_is_denormalized :: Exp Word16 -> Exp Bool
ieee754_f16_is_denormalized x =
ieee754_f16_mantissa x == 0 &&
ieee754_f16_exponent x /= 0
ieee754_f64_is_negative_zero :: Exp Word64 -> Exp Bool
ieee754_f64_is_negative_zero x =
ieee754_f64_negative x &&
ieee754_f64_exponent x == 0 &&
ieee754_f64_mantissa x == 0
ieee754_f32_is_negative_zero :: Exp Word32 -> Exp Bool
ieee754_f32_is_negative_zero x =
ieee754_f32_negative x &&
ieee754_f32_exponent x == 0 &&
ieee754_f32_mantissa x == 0
ieee754_f16_is_negative_zero :: Exp Word16 -> Exp Bool
ieee754_f16_is_negative_zero x =
ieee754_f16_negative x &&
ieee754_f16_exponent x == 0 &&
ieee754_f16_mantissa x == 0
sign 63 sign bit ( 0==positive , 1==negative )
exponent 62 - 52 exponent ( biased by 1023 )
fraction 51 - 0 fraction ( bits to right of binary point )
ieee754_f64_mantissa :: Exp Word64 -> Exp Word64
ieee754_f64_mantissa x = x .&. 0xFFFFFFFFFFFFF
ieee754_f64_exponent :: Exp Word64 -> Exp Word16
ieee754_f64_exponent x = fromIntegral (x `unsafeShiftR` 52) .&. 0x7FF
ieee754_f64_negative :: Exp Word64 -> Exp Bool
ieee754_f64_negative x = testBit x 63
Representation of single precision IEEE floating point number :
sign 31 sign bit ( 0==positive , 1==negative )
exponent 30 - 23 exponent ( biased by 127 )
fraction 22 - 0 fraction ( bits to right of binary point )
ieee754_f32_mantissa :: Exp Word32 -> Exp Word32
ieee754_f32_mantissa x = x .&. 0x7FFFFF
ieee754_f32_exponent :: Exp Word32 -> Exp Word8
ieee754_f32_exponent x = fromIntegral (x `unsafeShiftR` 23)
ieee754_f32_negative :: Exp Word32 -> Exp Bool
ieee754_f32_negative x = testBit x 31
Representation of half precision IEEE floating point number :
sign 15 sign bit ( 0==positive , 1==negative )
exponent 14 - 10 exponent ( biased by 15 )
fraction 9 - 0 fraction ( bits to right of binary point )
ieee754_f16_mantissa :: Exp Word16 -> Exp Word16
ieee754_f16_mantissa x = x .&. 0x3FF
ieee754_f16_exponent :: Exp Word16 -> Exp Word8
ieee754_f16_exponent x = fromIntegral (x `unsafeShiftR` 10) .&. 0x1F
ieee754_f16_negative :: Exp Word16 -> Exp Bool
ieee754_f16_negative x = testBit x 15
ieee754_f16_decode :: Exp Word16 -> Exp (Int16, Int)
ieee754_f16_decode i =
let
_HHIGHBIT = 0x0400
_HMSBIT = 0x8000
_HMINEXP = ((_HALF_MIN_EXP) - (_HALF_MANT_DIG) - 1)
_HALF_MANT_DIG = floatDigits (undefined::Exp Half)
(_HALF_MIN_EXP, _HALF_MAX_EXP) = floatRange (undefined::Exp Half)
high1 = fromIntegral i
high2 = high1 .&. (_HHIGHBIT - 1)
exp1 = ((fromIntegral high1 `unsafeShiftR` 10) .&. 0x1F) + _HMINEXP
exp2 = exp1 + 1
T2 high3 exp3
= cond (exp1 /= _HMINEXP)
(T2 (high2 .|. _HHIGHBIT) exp1)
(while (\(T2 h _) -> (h .&. _HHIGHBIT) /= 0 )
(\(T2 h e) -> T2 (h `unsafeShiftL` 1) (e-1))
(T2 high2 exp2))
high4 = cond (fromIntegral i < (0 :: Exp Int16)) (-high3) high3
in
cond (high1 .&. complement _HMSBIT == 0)
(T2 0 0)
(T2 high4 exp3)
From : ghc / rts / StgPrimFloat.c
ieee754_f32_decode :: Exp Word32 -> Exp (Int32, Int)
ieee754_f32_decode i =
let
_FHIGHBIT = 0x00800000
_FMSBIT = 0x80000000
_FMINEXP = ((_FLT_MIN_EXP) - (_FLT_MANT_DIG) - 1)
_FLT_MANT_DIG = floatDigits (undefined::Exp Float)
(_FLT_MIN_EXP, _FLT_MAX_EXP) = floatRange (undefined::Exp Float)
high1 = fromIntegral i
high2 = high1 .&. (_FHIGHBIT - 1)
exp1 = ((fromIntegral high1 `unsafeShiftR` 23) .&. 0xFF) + _FMINEXP
exp2 = exp1 + 1
T2 high3 exp3
= cond (exp1 /= _FMINEXP)
(T2 (high2 .|. _FHIGHBIT) exp1)
(while (\(T2 h _) -> (h .&. _FHIGHBIT) /= 0 )
(\(T2 h e) -> T2 (h `unsafeShiftL` 1) (e-1))
(T2 high2 exp2))
high4 = cond (fromIntegral i < (0 :: Exp Int32)) (-high3) high3
in
cond (high1 .&. complement _FMSBIT == 0)
(T2 0 0)
(T2 high4 exp3)
ieee754_f64_decode :: Exp Word64 -> Exp (Int64, Int)
ieee754_f64_decode i =
let T4 s h l e = ieee754_f64_decode2 i
in T2 (fromIntegral s * (fromIntegral h `unsafeShiftL` 32 .|. fromIntegral l)) e
ieee754_f64_decode2 :: Exp Word64 -> Exp (Int, Word32, Word32, Int)
ieee754_f64_decode2 i =
let
_DHIGHBIT = 0x00100000
_DMSBIT = 0x80000000
_DMINEXP = ((_DBL_MIN_EXP) - (_DBL_MANT_DIG) - 1)
_DBL_MANT_DIG = floatDigits (undefined::Exp Double)
(_DBL_MIN_EXP, _DBL_MAX_EXP) = floatRange (undefined::Exp Double)
low = fromIntegral i
high = fromIntegral (i `unsafeShiftR` 32)
iexp = (fromIntegral ((high `unsafeShiftR` 20) .&. 0x7FF) + _DMINEXP)
sign = cond (fromIntegral i < (0 :: Exp Int64)) (-1) 1
high2 = high .&. (_DHIGHBIT - 1)
iexp2 = iexp + 1
T3 hi lo ie
= cond (iexp2 /= _DMINEXP)
(T3 (high2 .|. _DHIGHBIT) low iexp)
(while (\(T3 h _ _) -> (h .&. _DHIGHBIT) /= 0)
(\(T3 h l e) ->
let h1 = h `unsafeShiftL` 1
h2 = cond ((l .&. _DMSBIT) /= 0) (h1+1) h1
in T3 h2 (l `unsafeShiftL` 1) (e-1))
(T3 high2 low iexp2))
in
cond (low == 0 && (high .&. (complement _DMSBIT)) == 0)
(T4 1 0 0 0)
(T4 sign hi lo ie)
|
c88871ace1680e36814a6f03d63034f03117c64dba0cce1c40a2f7d7cb971407 | informatimago/lisp | utility.lisp | -*- mode : lisp;coding : utf-8 -*-
;;;;**************************************************************************
FILE : utility.lisp
;;;;LANGUAGE: Common-Lisp
;;;;SYSTEM: Common-Lisp
USER - INTERFACE :
;;;;DESCRIPTION
;;;;
;;;; This file defines a few utilities.
;;;;
< PJB > < >
MODIFICATIONS
2012 - 01 - 15 < PJB > Extracted from ' virtual-fs.lisp ' .
;;;;LEGAL
;;;; GPL
;;;;
Copyright 2012 - 2016
;;;;
;;;; This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version
2 of the License , or ( at your option ) any later version .
;;;;
;;;; This program is distributed in the hope that it will be
;;;; useful, but WITHOUT ANY WARRANTY; without even the implied
;;;; warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
;;;; PURPOSE. See the GNU General Public License for more details.
;;;;
You should have received a copy of the GNU General Public
;;;; License along with this program; if not, write to the Free
Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
;;;;**************************************************************************
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(in-package "COM.INFORMATIMAGO.COMMON-LISP.VIRTUAL-FILE-SYSTEM")
(defun proper-list-p (object)
(labels ((proper (current slow)
(cond ((null current) t)
((atom current) nil)
((null (cdr current)) t)
((atom (cdr current)) nil)
((eq current slow) nil)
(t (proper (cddr current) (cdr slow))))))
(proper object (cons nil object))))
(defun test-proper-list-p ()
(assert
(every
(function identity)
(mapcar (lambda (test) (eq (first test) (proper-list-p (second test))))
'((nil x)
(t ())
(t (a))
(t (a b))
(t (a b c))
(t (a b c d))
(nil (a . x))
(nil (a b . x))
(nil (a b c . x))
(nil (a b c d . x))
(nil #1=(a . #1#))
(nil #2=(a b . #2#))
(nil #3=(a b c . #3#))
(nil #4=(a b c d . #4#))
(nil (1 . #1#))
(nil (1 2 . #1#))
(nil (1 2 3 . #1#))
(nil (1 2 3 4 . #1#))
(nil (1 . #2#))
(nil (1 2 . #2#))
(nil (1 2 3 . #2#))
(nil (1 2 3 4 . #2#))
(nil (1 . #3#))
(nil (1 2 . #3#))
(nil (1 2 3 . #3#))
(nil (1 2 3 4 . #3#))
(nil (1 . #4#))
(nil (1 2 . #4#))
(nil (1 2 3 . #4#))
(nil (1 2 3 4 . #4#)))))))
(defun unsplit-string (string-list &optional (separator " "))
"
DO: The inverse than split-string.
If no separator is provided then a simple space is used.
SEPARATOR: (OR NULL STRINGP CHARACTERP)
"
(check-type separator (or string character symbol) "a string designator.")
(if string-list
(cl:with-output-to-string (cl:*standard-output*)
(cl:princ (pop string-list))
(dolist (item string-list)
(cl:princ separator) (cl:princ item)))
""))
(defun assert-type (datum expected-type)
"
DO: Signal a TYPE-ERROR if DATUM is not of the EXPECTED-TYPE.
NOTICE: CHECK-TYPE signals a PROGRAM-ERROR.
"
(or (typep datum expected-type)
(error (make-condition 'type-error
:datum datum :expected-type expected-type))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; regular expressions
;;;
(defun re-compile (re &key extended)
#+clisp
(regexp:regexp-compile re :extended extended)
#+(and (not clisp) cl-ppcre)
(cl-ppcre:create-scanner re :extended-mode extended)
#-(or clisp cl-ppcre)
(error "Please implement RE-COMPILE"))
(defun re-exec (re string &key (start 0) (end nil))
#+clisp
(mapcar (lambda (match)
(list (regexp:match-start match)
(regexp:match-end match)
match))
(multiple-value-list (regexp:regexp-exec re string :start start :end (or end (length string)))))
#+(and (not clisp) cl-ppcre)
(multiple-value-bind (start end starts ends)
(cl-ppcre:scan re string :start start :end (or end (length string)))
(and start end
(values-list (cons (list start end)
(map 'list (lambda (s e)
(if (or s e)
(list s e)
nil))
starts ends)))))
#-(or clisp cl-ppcre)
(error "Please implement RE-EXEC"))
(defun re-match-string (string match)
#+clisp
(regexp:match-string string (third match))
#+(and (not clisp) cl-ppcre)
(subseq string (first match) (second match))
#-(or clisp cl-ppcre)
(error "Please implement RE-MATCH-STRING"))
(defun re-match (regexp string)
(re-exec (re-compile regexp :extended t) string))
(defun re-quote (re &key extended)
(assert extended (extended) "re-quote is not implemented yet for non-extended regexps.")
(cl:with-output-to-string (out)
(loop
:for ch :across re
:do (cond
((alphanumericp ch) (princ ch out))
(t (princ "\\" out) (princ ch out))))))
;;;; THE END ;;;;
| null | https://raw.githubusercontent.com/informatimago/lisp/571af24c06ba466e01b4c9483f8bb7690bc46d03/future/vfs/utility.lisp | lisp | coding : utf-8 -*-
**************************************************************************
LANGUAGE: Common-Lisp
SYSTEM: Common-Lisp
DESCRIPTION
This file defines a few utilities.
LEGAL
GPL
This program is free software; you can redistribute it and/or
either version
This program is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
License along with this program; if not, write to the Free
**************************************************************************
regular expressions
THE END ;;;; | FILE : utility.lisp
USER - INTERFACE :
< PJB > < >
MODIFICATIONS
2012 - 01 - 15 < PJB > Extracted from ' virtual-fs.lisp ' .
Copyright 2012 - 2016
modify it under the terms of the GNU General Public License
2 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU General Public
Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(in-package "COM.INFORMATIMAGO.COMMON-LISP.VIRTUAL-FILE-SYSTEM")
(defun proper-list-p (object)
(labels ((proper (current slow)
(cond ((null current) t)
((atom current) nil)
((null (cdr current)) t)
((atom (cdr current)) nil)
((eq current slow) nil)
(t (proper (cddr current) (cdr slow))))))
(proper object (cons nil object))))
(defun test-proper-list-p ()
(assert
(every
(function identity)
(mapcar (lambda (test) (eq (first test) (proper-list-p (second test))))
'((nil x)
(t ())
(t (a))
(t (a b))
(t (a b c))
(t (a b c d))
(nil (a . x))
(nil (a b . x))
(nil (a b c . x))
(nil (a b c d . x))
(nil #1=(a . #1#))
(nil #2=(a b . #2#))
(nil #3=(a b c . #3#))
(nil #4=(a b c d . #4#))
(nil (1 . #1#))
(nil (1 2 . #1#))
(nil (1 2 3 . #1#))
(nil (1 2 3 4 . #1#))
(nil (1 . #2#))
(nil (1 2 . #2#))
(nil (1 2 3 . #2#))
(nil (1 2 3 4 . #2#))
(nil (1 . #3#))
(nil (1 2 . #3#))
(nil (1 2 3 . #3#))
(nil (1 2 3 4 . #3#))
(nil (1 . #4#))
(nil (1 2 . #4#))
(nil (1 2 3 . #4#))
(nil (1 2 3 4 . #4#)))))))
(defun unsplit-string (string-list &optional (separator " "))
"
DO: The inverse than split-string.
If no separator is provided then a simple space is used.
SEPARATOR: (OR NULL STRINGP CHARACTERP)
"
(check-type separator (or string character symbol) "a string designator.")
(if string-list
(cl:with-output-to-string (cl:*standard-output*)
(cl:princ (pop string-list))
(dolist (item string-list)
(cl:princ separator) (cl:princ item)))
""))
(defun assert-type (datum expected-type)
"
DO: Signal a TYPE-ERROR if DATUM is not of the EXPECTED-TYPE.
NOTICE: CHECK-TYPE signals a PROGRAM-ERROR.
"
(or (typep datum expected-type)
(error (make-condition 'type-error
:datum datum :expected-type expected-type))))
(defun re-compile (re &key extended)
#+clisp
(regexp:regexp-compile re :extended extended)
#+(and (not clisp) cl-ppcre)
(cl-ppcre:create-scanner re :extended-mode extended)
#-(or clisp cl-ppcre)
(error "Please implement RE-COMPILE"))
(defun re-exec (re string &key (start 0) (end nil))
#+clisp
(mapcar (lambda (match)
(list (regexp:match-start match)
(regexp:match-end match)
match))
(multiple-value-list (regexp:regexp-exec re string :start start :end (or end (length string)))))
#+(and (not clisp) cl-ppcre)
(multiple-value-bind (start end starts ends)
(cl-ppcre:scan re string :start start :end (or end (length string)))
(and start end
(values-list (cons (list start end)
(map 'list (lambda (s e)
(if (or s e)
(list s e)
nil))
starts ends)))))
#-(or clisp cl-ppcre)
(error "Please implement RE-EXEC"))
(defun re-match-string (string match)
#+clisp
(regexp:match-string string (third match))
#+(and (not clisp) cl-ppcre)
(subseq string (first match) (second match))
#-(or clisp cl-ppcre)
(error "Please implement RE-MATCH-STRING"))
(defun re-match (regexp string)
(re-exec (re-compile regexp :extended t) string))
(defun re-quote (re &key extended)
(assert extended (extended) "re-quote is not implemented yet for non-extended regexps.")
(cl:with-output-to-string (out)
(loop
:for ch :across re
:do (cond
((alphanumericp ch) (princ ch out))
(t (princ "\\" out) (princ ch out))))))
|
51dfec5d6fa2ab04248b60fc0709925b232ae3d49fe40f6acd70ddd5f1a5bcd8 | roelvandijk/numerals | TestData.hs | |
[ @ISO639 - 1@ ] -
[ @ISO639 - 2@ ] -
[ @ISO639 - 3@ ] cod
[ @Native name@ ]
[ @English name@ ]
[@ISO639-1@] -
[@ISO639-2@] -
[@ISO639-3@] cod
[@Native name@] Kokáma
[@English name@] Cocama
-}
module Text.Numeral.Language.COD.TestData (cardinals) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "numerals" Text.Numeral.Grammar ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
--------------------------------------------------------------------------------
-- Test data
--------------------------------------------------------------------------------
{-
Sources:
-to-count-in-cocama/en/cod/
-}
cardinals :: (Num i) => TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (1, "huepe")
, (2, "mucuica")
, (3, "mutsapɨrɨca")
, (4, "iruaca")
, (5, "pichca")
, (6, "socta")
, (7, "cansi")
, (8, "pusa")
, (9, "iscun")
, (10, "chunga")
, (11, "chunga huepe")
, (12, "chunga mucuica")
, (13, "chunga mutsapɨrɨca")
, (14, "chunga iruaca")
, (15, "chunga pichca")
, (16, "chunga socta")
, (17, "chunga cansi")
, (18, "chunga pusa")
, (19, "chunga iscun")
, (20, "mucuica chunga")
, (21, "mucuica chunga huepe")
, (22, "mucuica chunga mucuica")
, (23, "mucuica chunga mutsapɨrɨca")
, (24, "mucuica chunga iruaca")
, (25, "mucuica chunga pichca")
, (26, "mucuica chunga socta")
, (27, "mucuica chunga cansi")
, (28, "mucuica chunga pusa")
, (29, "mucuica chunga iscun")
, (30, "mutsapɨrɨca chunga")
, (31, "mutsapɨrɨca chunga huepe")
, (32, "mutsapɨrɨca chunga mucuica")
, (33, "mutsapɨrɨca chunga mutsapɨrɨca")
, (34, "mutsapɨrɨca chunga iruaca")
, (35, "mutsapɨrɨca chunga pichca")
, (36, "mutsapɨrɨca chunga socta")
, (37, "mutsapɨrɨca chunga cansi")
, (38, "mutsapɨrɨca chunga pusa")
, (39, "mutsapɨrɨca chunga iscun")
, (40, "iruaca chunga")
, (41, "iruaca chunga huepe")
, (42, "iruaca chunga mucuica")
, (43, "iruaca chunga mutsapɨrɨca")
, (44, "iruaca chunga iruaca")
, (45, "iruaca chunga pichca")
, (46, "iruaca chunga socta")
, (47, "iruaca chunga cansi")
, (48, "iruaca chunga pusa")
, (49, "iruaca chunga iscun")
, (50, "pichca chunga")
, (51, "pichca chunga huepe")
, (52, "pichca chunga mucuica")
, (53, "pichca chunga mutsapɨrɨca")
, (54, "pichca chunga iruaca")
, (55, "pichca chunga pichca")
, (56, "pichca chunga socta")
, (57, "pichca chunga cansi")
, (58, "pichca chunga pusa")
, (59, "pichca chunga iscun")
, (60, "socta chunga")
, (61, "socta chunga huepe")
, (62, "socta chunga mucuica")
, (63, "socta chunga mutsapɨrɨca")
, (64, "socta chunga iruaca")
, (65, "socta chunga pichca")
, (66, "socta chunga socta")
, (67, "socta chunga cansi")
, (68, "socta chunga pusa")
, (69, "socta chunga iscun")
, (70, "cansi chunga")
, (71, "cansi chunga huepe")
, (72, "cansi chunga mucuica")
, (73, "cansi chunga mutsapɨrɨca")
, (74, "cansi chunga iruaca")
, (75, "cansi chunga pichca")
, (76, "cansi chunga socta")
, (77, "cansi chunga cansi")
, (78, "cansi chunga pusa")
, (79, "cansi chunga iscun")
, (80, "pusa chunga")
, (81, "pusa chunga huepe")
, (82, "pusa chunga mucuica")
, (83, "pusa chunga mutsapɨrɨca")
, (84, "pusa chunga iruaca")
, (85, "pusa chunga pichca")
, (86, "pusa chunga socta")
, (87, "pusa chunga cansi")
, (88, "pusa chunga pusa")
, (89, "pusa chunga iscun")
, (90, "iscun chunga")
, (91, "iscun chunga huepe")
, (92, "iscun chunga mucuica")
, (93, "iscun chunga mutsapɨrɨca")
, (94, "iscun chunga iruaca")
, (95, "iscun chunga pichca")
, (96, "iscun chunga socta")
, (97, "iscun chunga cansi")
, (98, "iscun chunga pusa")
, (99, "iscun chunga iscun")
, (100, "pacha")
, (101, "pacha huepe")
, (102, "pacha mucuica")
, (103, "pacha mutsapɨrɨca")
, (104, "pacha iruaca")
, (105, "pacha pichca")
, (106, "pacha socta")
, (107, "pacha cansi")
, (108, "pacha pusa")
, (109, "pacha iscun")
, (110, "pacha chunga")
, (123, "pacha mucuica chunga mutsapɨrɨca")
, (200, "mucuica pacha")
, (300, "mutsapɨrɨca pacha")
, (321, "mutsapɨrɨca pacha mucuica chunga huepe")
, (400, "iruaca pacha")
, (500, "pichca pacha")
, (600, "socta pacha")
, (700, "cansi pacha")
, (800, "pusa pacha")
, (900, "iscun pacha")
, (909, "iscun pacha iscun")
, (990, "iscun pacha iscun chunga")
, (999, "iscun pacha iscun chunga iscun")
, (1000, "huaranga")
, (1001, "huaranga huepe")
, (1008, "huaranga pusa")
, (1234, "huaranga mucuica pacha mutsapɨrɨca chunga iruaca")
, (2000, "mucuica huaranga")
, (3000, "mutsapɨrɨca huaranga")
, (4000, "iruaca huaranga")
, (4321, "iruaca huaranga mutsapɨrɨca pacha mucuica chunga huepe")
, (5000, "pichca huaranga")
, (6000, "socta huaranga")
, (7000, "cansi huaranga")
, (8000, "pusa huaranga")
, (9000, "iscun huaranga")
]
)
]
| null | https://raw.githubusercontent.com/roelvandijk/numerals/b1e4121e0824ac0646a3230bd311818e159ec127/src-test/Text/Numeral/Language/COD/TestData.hs | haskell | ------------------------------------------------------------------------------
Imports
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Test data
------------------------------------------------------------------------------
Sources:
-to-count-in-cocama/en/cod/
| |
[ @ISO639 - 1@ ] -
[ @ISO639 - 2@ ] -
[ @ISO639 - 3@ ] cod
[ @Native name@ ]
[ @English name@ ]
[@ISO639-1@] -
[@ISO639-2@] -
[@ISO639-3@] cod
[@Native name@] Kokáma
[@English name@] Cocama
-}
module Text.Numeral.Language.COD.TestData (cardinals) where
import "numerals" Text.Numeral.Grammar ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
cardinals :: (Num i) => TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (1, "huepe")
, (2, "mucuica")
, (3, "mutsapɨrɨca")
, (4, "iruaca")
, (5, "pichca")
, (6, "socta")
, (7, "cansi")
, (8, "pusa")
, (9, "iscun")
, (10, "chunga")
, (11, "chunga huepe")
, (12, "chunga mucuica")
, (13, "chunga mutsapɨrɨca")
, (14, "chunga iruaca")
, (15, "chunga pichca")
, (16, "chunga socta")
, (17, "chunga cansi")
, (18, "chunga pusa")
, (19, "chunga iscun")
, (20, "mucuica chunga")
, (21, "mucuica chunga huepe")
, (22, "mucuica chunga mucuica")
, (23, "mucuica chunga mutsapɨrɨca")
, (24, "mucuica chunga iruaca")
, (25, "mucuica chunga pichca")
, (26, "mucuica chunga socta")
, (27, "mucuica chunga cansi")
, (28, "mucuica chunga pusa")
, (29, "mucuica chunga iscun")
, (30, "mutsapɨrɨca chunga")
, (31, "mutsapɨrɨca chunga huepe")
, (32, "mutsapɨrɨca chunga mucuica")
, (33, "mutsapɨrɨca chunga mutsapɨrɨca")
, (34, "mutsapɨrɨca chunga iruaca")
, (35, "mutsapɨrɨca chunga pichca")
, (36, "mutsapɨrɨca chunga socta")
, (37, "mutsapɨrɨca chunga cansi")
, (38, "mutsapɨrɨca chunga pusa")
, (39, "mutsapɨrɨca chunga iscun")
, (40, "iruaca chunga")
, (41, "iruaca chunga huepe")
, (42, "iruaca chunga mucuica")
, (43, "iruaca chunga mutsapɨrɨca")
, (44, "iruaca chunga iruaca")
, (45, "iruaca chunga pichca")
, (46, "iruaca chunga socta")
, (47, "iruaca chunga cansi")
, (48, "iruaca chunga pusa")
, (49, "iruaca chunga iscun")
, (50, "pichca chunga")
, (51, "pichca chunga huepe")
, (52, "pichca chunga mucuica")
, (53, "pichca chunga mutsapɨrɨca")
, (54, "pichca chunga iruaca")
, (55, "pichca chunga pichca")
, (56, "pichca chunga socta")
, (57, "pichca chunga cansi")
, (58, "pichca chunga pusa")
, (59, "pichca chunga iscun")
, (60, "socta chunga")
, (61, "socta chunga huepe")
, (62, "socta chunga mucuica")
, (63, "socta chunga mutsapɨrɨca")
, (64, "socta chunga iruaca")
, (65, "socta chunga pichca")
, (66, "socta chunga socta")
, (67, "socta chunga cansi")
, (68, "socta chunga pusa")
, (69, "socta chunga iscun")
, (70, "cansi chunga")
, (71, "cansi chunga huepe")
, (72, "cansi chunga mucuica")
, (73, "cansi chunga mutsapɨrɨca")
, (74, "cansi chunga iruaca")
, (75, "cansi chunga pichca")
, (76, "cansi chunga socta")
, (77, "cansi chunga cansi")
, (78, "cansi chunga pusa")
, (79, "cansi chunga iscun")
, (80, "pusa chunga")
, (81, "pusa chunga huepe")
, (82, "pusa chunga mucuica")
, (83, "pusa chunga mutsapɨrɨca")
, (84, "pusa chunga iruaca")
, (85, "pusa chunga pichca")
, (86, "pusa chunga socta")
, (87, "pusa chunga cansi")
, (88, "pusa chunga pusa")
, (89, "pusa chunga iscun")
, (90, "iscun chunga")
, (91, "iscun chunga huepe")
, (92, "iscun chunga mucuica")
, (93, "iscun chunga mutsapɨrɨca")
, (94, "iscun chunga iruaca")
, (95, "iscun chunga pichca")
, (96, "iscun chunga socta")
, (97, "iscun chunga cansi")
, (98, "iscun chunga pusa")
, (99, "iscun chunga iscun")
, (100, "pacha")
, (101, "pacha huepe")
, (102, "pacha mucuica")
, (103, "pacha mutsapɨrɨca")
, (104, "pacha iruaca")
, (105, "pacha pichca")
, (106, "pacha socta")
, (107, "pacha cansi")
, (108, "pacha pusa")
, (109, "pacha iscun")
, (110, "pacha chunga")
, (123, "pacha mucuica chunga mutsapɨrɨca")
, (200, "mucuica pacha")
, (300, "mutsapɨrɨca pacha")
, (321, "mutsapɨrɨca pacha mucuica chunga huepe")
, (400, "iruaca pacha")
, (500, "pichca pacha")
, (600, "socta pacha")
, (700, "cansi pacha")
, (800, "pusa pacha")
, (900, "iscun pacha")
, (909, "iscun pacha iscun")
, (990, "iscun pacha iscun chunga")
, (999, "iscun pacha iscun chunga iscun")
, (1000, "huaranga")
, (1001, "huaranga huepe")
, (1008, "huaranga pusa")
, (1234, "huaranga mucuica pacha mutsapɨrɨca chunga iruaca")
, (2000, "mucuica huaranga")
, (3000, "mutsapɨrɨca huaranga")
, (4000, "iruaca huaranga")
, (4321, "iruaca huaranga mutsapɨrɨca pacha mucuica chunga huepe")
, (5000, "pichca huaranga")
, (6000, "socta huaranga")
, (7000, "cansi huaranga")
, (8000, "pusa huaranga")
, (9000, "iscun huaranga")
]
)
]
|
df32bc55c5d650861b0704887eb0b4c1b7b3d6d025f62bfe563a00e94a288c94 | BranchTaken/Hemlock | test_min_max.ml | open! Basis.Rudiments
open! Basis
open Zint
let test () =
let rec test_pairs = function
| [] -> ()
| (x, y) :: pairs' -> begin
File.Fmt.stdout
|> Fmt.fmt "min,max "
|> fmt ~alt:true ~radix:Radix.Hex x
|> Fmt.fmt " "
|> fmt ~alt:true ~radix:Radix.Hex y
|> Fmt.fmt " -> "
|> fmt ~alt:true ~radix:Radix.Hex (min x y)
|> Fmt.fmt ", "
|> fmt ~alt:true ~radix:Radix.Hex (max x y)
|> Fmt.fmt "\n"
|> ignore;
test_pairs pairs'
end
in
let pairs = [
(of_string "0", of_string "0");
(of_string "0", of_string "1");
(of_string "1", of_string "0");
(of_string "1", of_string "1");
(of_string "0", of_string
"0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff");
] in
test_pairs pairs
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/a07e362d66319108c1478a4cbebab765c1808b1a/bootstrap/test/basis/zint/test_min_max.ml | ocaml | open! Basis.Rudiments
open! Basis
open Zint
let test () =
let rec test_pairs = function
| [] -> ()
| (x, y) :: pairs' -> begin
File.Fmt.stdout
|> Fmt.fmt "min,max "
|> fmt ~alt:true ~radix:Radix.Hex x
|> Fmt.fmt " "
|> fmt ~alt:true ~radix:Radix.Hex y
|> Fmt.fmt " -> "
|> fmt ~alt:true ~radix:Radix.Hex (min x y)
|> Fmt.fmt ", "
|> fmt ~alt:true ~radix:Radix.Hex (max x y)
|> Fmt.fmt "\n"
|> ignore;
test_pairs pairs'
end
in
let pairs = [
(of_string "0", of_string "0");
(of_string "0", of_string "1");
(of_string "1", of_string "0");
(of_string "1", of_string "1");
(of_string "0", of_string
"0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff");
] in
test_pairs pairs
let _ = test ()
| |
9eb17a68b2aaee548f3b05fc7f02280a58236ec7c6a2bca8d6ac573fef3c6906 | rollacaster/hiccup-d3 | pack.cljs | (ns tech.thomas-sojka.hiccup-d3.charts.pack
(:require ["d3" :as d3]
[tech.thomas-sojka.hiccup-d3.utils :refer [fetch-json]])
(:require-macros [tech.thomas-sojka.hiccup-d3.macros :as m]))
(def plain
(m/build-chart
"plain"
(fn [data]
(let [size 300
color (d3/scaleOrdinal d3/schemeCategory10)
margin 7
root ((-> (d3/pack)
(.size (into-array [(- size margin) (- size margin)])))
(-> (d3/hierarchy data)
(.sum (fn [d] (.-value d)))
(.sort (fn [a b] (- (.-value b) (.-value a))))))]
[:svg {:viewBox (str 0 " " 0 " " size " " size)}
[:filter {:id "dropshadow" :filterUnits "userSpaceOnUse"}
[:feGaussianBlur {:in "SourceAlpha" :stdDeviation "3"}]
[:feOffset {:dx (/ margin 2) :dy (/ margin 2)}]
[:feMerge
[:feMergeNode]
[:feMergeNode {:in "SourceGraphic"}]]]
(map
(fn [node]
[:circle {:key ^js (.-data.name node)
:cx (.-x node) :cy (.-y node) :r (.-r node)
:fill (color (.-height node))
:filter "url(#dropshadow)"}])
(.descendants root))]))))
(def pack
{:title "Pack"
:load (fn [] (-> (fetch-json "data/flare-2.json")))
:charts [plain]})
| null | https://raw.githubusercontent.com/rollacaster/hiccup-d3/1c124d293889793638fa65f1ab6c82b79df52b85/src/main/tech/thomas_sojka/hiccup_d3/charts/pack.cljs | clojure | (ns tech.thomas-sojka.hiccup-d3.charts.pack
(:require ["d3" :as d3]
[tech.thomas-sojka.hiccup-d3.utils :refer [fetch-json]])
(:require-macros [tech.thomas-sojka.hiccup-d3.macros :as m]))
(def plain
(m/build-chart
"plain"
(fn [data]
(let [size 300
color (d3/scaleOrdinal d3/schemeCategory10)
margin 7
root ((-> (d3/pack)
(.size (into-array [(- size margin) (- size margin)])))
(-> (d3/hierarchy data)
(.sum (fn [d] (.-value d)))
(.sort (fn [a b] (- (.-value b) (.-value a))))))]
[:svg {:viewBox (str 0 " " 0 " " size " " size)}
[:filter {:id "dropshadow" :filterUnits "userSpaceOnUse"}
[:feGaussianBlur {:in "SourceAlpha" :stdDeviation "3"}]
[:feOffset {:dx (/ margin 2) :dy (/ margin 2)}]
[:feMerge
[:feMergeNode]
[:feMergeNode {:in "SourceGraphic"}]]]
(map
(fn [node]
[:circle {:key ^js (.-data.name node)
:cx (.-x node) :cy (.-y node) :r (.-r node)
:fill (color (.-height node))
:filter "url(#dropshadow)"}])
(.descendants root))]))))
(def pack
{:title "Pack"
:load (fn [] (-> (fetch-json "data/flare-2.json")))
:charts [plain]})
| |
ffdf94ac61d657ec8117cdeed2d4db0d1138a5a0e433803908851a2ce885a987 | informatimago/lisp | rdp-lisp-boilerplate.lisp | -*- mode : lisp;coding : utf-8 -*-
;;;;**************************************************************************
FILE : rdp-lisp-boilerplate.lisp
;;;;LANGUAGE: Common-Lisp
;;;;SYSTEM: Common-Lisp
USER - INTERFACE :
;;;;DESCRIPTION
;;;;
;;;; The lisp parser boilerplate.
;;;;
< PJB > < >
MODIFICATIONS
2012 - 05 - 06 < PJB > Extracted from rdp.lisp .
;;;;LEGAL
AGPL3
;;;;
Copyright 2012 - 2016
;;;;
;;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; This program is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details .
;;;;
You should have received a copy of the GNU Affero General Public License
;;;; along with this program. If not, see </>.
;;;;**************************************************************************
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(in-package "COM.INFORMATIMAGO.RDP")
(declaim (declaration stepper))
(defvar *non-terminal-stack* '()
"For error reporting.")
(define-condition parser-error (error)
((file :initarg :file :initform nil :reader parser-error-file)
(line :initarg :line :initform 1 :reader parser-error-line)
(column :initarg :column :initform 0 :reader parser-error-column)
(grammar :initarg :grammar :initform nil :reader parser-error-grammar)
(scanner :initarg :scanner :initform nil :reader parser-error-scanner)
(non-terminal-stack :initarg :non-terminal-stack :initform '() :reader parser-error-non-terminal-stack)
(format-control :initarg :format-control :initform "" :reader parser-error-format-control)
(format-arguments :initarg :format-arguments :initform '() :reader parser-error-format-arguments))
(:report print-parser-error))
(defmethod print-parser-error ((err parser-error) stream)
(declare (stepper disable))
(let ((*print-circle* nil)
(*print-pretty* nil))
(format stream
"~&~@[~A:~]~D:~D: ~?~%"
(let ((source (scanner-source (parser-error-scanner err))))
(typecase source
((or string file-stream) (or (ignore-errors (pathname source))
(parser-error-file err)))
(t (parser-error-file err))))
(parser-error-line err)
(parser-error-column err)
(parser-error-format-control err)
(parser-error-format-arguments err))
err))
(define-condition parser-end-of-source-not-reached (parser-error)
()
(:default-initargs
:format-control "Parsing finished before end-of-source."))
(define-condition unexpected-token-error (scanner-error)
((expected-tokens :initarg :expected-tokens
:initform '()
:reader unexpected-token-error-expected-tokens)
(non-terminal-stack :initarg :non-terminal-stack
:initform '()
:reader unexpected-token-error-non-terminal-stack))
(:report print-scanner-error))
(defmethod print-scanner-error ((err unexpected-token-error) stream)
(declare (stepper disable))
(when (next-method-p) (call-next-method))
(let ((*print-circle* nil)
(*print-pretty* nil))
(format stream "~&Expected token: ~S~%Non-terminal stack: ~S~%"
(unexpected-token-error-expected-tokens err)
(unexpected-token-error-non-terminal-stack err)))
err)
(defclass rdp-scanner (buffered-scanner)
()
(:default-initargs :line 0))
(defmethod scanner-current-token ((scanner rdp-scanner))
(token-kind (call-next-method)))
(defmethod scanner-end-of-line-p ((scanner rdp-scanner))
(or (null (scanner-buffer scanner))
column is 1 - based :
(< (length (scanner-buffer scanner))
(scanner-column scanner))))
(defmethod scanner-end-of-source-p ((scanner rdp-scanner))
(and (scanner-end-of-line-p scanner)
(let ((ps (slot-value scanner 'stream)))
(not (ungetchar ps (getchar ps))))))
(defmethod advance-line ((scanner rdp-scanner))
"RETURN: The new current token = old next token"
(cond
((scanner-end-of-source-p scanner)
#|End of File -- don't move.|#
(scanner-current-token scanner))
((setf (scanner-buffer scanner) (readline (slot-value scanner 'stream)))
;; We must skip the empty lines.
(incf (scanner-line scanner))
(setf (scanner-column scanner) 1
(scanner-current-text scanner) ""
(scanner-current-token scanner) nil)
;; (loop :do (incf (scanner-line scanner))
: while ( and ( zerop ( length ( scanner - buffer scanner ) ) )
( setf ( scanner - buffer scanner ) ( readline ( slot - value scanner ' stream ) ) ) ) )
;; got a line -- advance a token.
(scan-next-token scanner))
(t
Just got EOF
(setf (scanner-current-text scanner) "<END OF FILE>"
(scanner-current-token scanner) '|<END OF FILE>|))))
(defmethod accept ((scanner rdp-scanner) token)
(unless (word-equal token (scanner-current-token scanner))
(error-unexpected-token scanner token nil))
(prog1 (list (token-kind (scanner-current-token scanner))
(scanner-current-text scanner)
(scanner-column scanner))
(scan-next-token scanner)))
;;;; THE END ;;;;
| null | https://raw.githubusercontent.com/informatimago/lisp/571af24c06ba466e01b4c9483f8bb7690bc46d03/rdp/rdp-lisp-boilerplate.lisp | lisp | coding : utf-8 -*-
**************************************************************************
LANGUAGE: Common-Lisp
SYSTEM: Common-Lisp
DESCRIPTION
The lisp parser boilerplate.
LEGAL
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>.
**************************************************************************
End of File -- don't move.
We must skip the empty lines.
(loop :do (incf (scanner-line scanner))
got a line -- advance a token.
THE END ;;;; | FILE : rdp-lisp-boilerplate.lisp
USER - INTERFACE :
< PJB > < >
MODIFICATIONS
2012 - 05 - 06 < PJB > Extracted from rdp.lisp .
AGPL3
Copyright 2012 - 2016
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(in-package "COM.INFORMATIMAGO.RDP")
(declaim (declaration stepper))
(defvar *non-terminal-stack* '()
"For error reporting.")
(define-condition parser-error (error)
((file :initarg :file :initform nil :reader parser-error-file)
(line :initarg :line :initform 1 :reader parser-error-line)
(column :initarg :column :initform 0 :reader parser-error-column)
(grammar :initarg :grammar :initform nil :reader parser-error-grammar)
(scanner :initarg :scanner :initform nil :reader parser-error-scanner)
(non-terminal-stack :initarg :non-terminal-stack :initform '() :reader parser-error-non-terminal-stack)
(format-control :initarg :format-control :initform "" :reader parser-error-format-control)
(format-arguments :initarg :format-arguments :initform '() :reader parser-error-format-arguments))
(:report print-parser-error))
(defmethod print-parser-error ((err parser-error) stream)
(declare (stepper disable))
(let ((*print-circle* nil)
(*print-pretty* nil))
(format stream
"~&~@[~A:~]~D:~D: ~?~%"
(let ((source (scanner-source (parser-error-scanner err))))
(typecase source
((or string file-stream) (or (ignore-errors (pathname source))
(parser-error-file err)))
(t (parser-error-file err))))
(parser-error-line err)
(parser-error-column err)
(parser-error-format-control err)
(parser-error-format-arguments err))
err))
(define-condition parser-end-of-source-not-reached (parser-error)
()
(:default-initargs
:format-control "Parsing finished before end-of-source."))
(define-condition unexpected-token-error (scanner-error)
((expected-tokens :initarg :expected-tokens
:initform '()
:reader unexpected-token-error-expected-tokens)
(non-terminal-stack :initarg :non-terminal-stack
:initform '()
:reader unexpected-token-error-non-terminal-stack))
(:report print-scanner-error))
(defmethod print-scanner-error ((err unexpected-token-error) stream)
(declare (stepper disable))
(when (next-method-p) (call-next-method))
(let ((*print-circle* nil)
(*print-pretty* nil))
(format stream "~&Expected token: ~S~%Non-terminal stack: ~S~%"
(unexpected-token-error-expected-tokens err)
(unexpected-token-error-non-terminal-stack err)))
err)
(defclass rdp-scanner (buffered-scanner)
()
(:default-initargs :line 0))
(defmethod scanner-current-token ((scanner rdp-scanner))
(token-kind (call-next-method)))
(defmethod scanner-end-of-line-p ((scanner rdp-scanner))
(or (null (scanner-buffer scanner))
column is 1 - based :
(< (length (scanner-buffer scanner))
(scanner-column scanner))))
(defmethod scanner-end-of-source-p ((scanner rdp-scanner))
(and (scanner-end-of-line-p scanner)
(let ((ps (slot-value scanner 'stream)))
(not (ungetchar ps (getchar ps))))))
(defmethod advance-line ((scanner rdp-scanner))
"RETURN: The new current token = old next token"
(cond
((scanner-end-of-source-p scanner)
(scanner-current-token scanner))
((setf (scanner-buffer scanner) (readline (slot-value scanner 'stream)))
(incf (scanner-line scanner))
(setf (scanner-column scanner) 1
(scanner-current-text scanner) ""
(scanner-current-token scanner) nil)
: while ( and ( zerop ( length ( scanner - buffer scanner ) ) )
( setf ( scanner - buffer scanner ) ( readline ( slot - value scanner ' stream ) ) ) ) )
(scan-next-token scanner))
(t
Just got EOF
(setf (scanner-current-text scanner) "<END OF FILE>"
(scanner-current-token scanner) '|<END OF FILE>|))))
(defmethod accept ((scanner rdp-scanner) token)
(unless (word-equal token (scanner-current-token scanner))
(error-unexpected-token scanner token nil))
(prog1 (list (token-kind (scanner-current-token scanner))
(scanner-current-text scanner)
(scanner-column scanner))
(scan-next-token scanner)))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.