_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
b4b6cf33188e5df83bb7c7487a7f616c511d127b647ef397d615eda7d96a6940 | franklindyer/cs357-ta-materials | control_flow.rkt | #lang racket
(define x '(begin (display "x evaluated\n") #f))
(define y '(begin (display "y evaluated\n") #f))
(define z '(begin (display "z evaluated\n") #t))
(define btm '(eval btm))
'(and x y)
'(and (eval x) (eval y))
'(and (eval z) (eval x))
'(and (eval z) (eval z) (eval x) (eval z))
(define (comp-list-1 l1 l2)
(cond ((empty? l1) (empty? l2))
((empty? l2) #f)
(else (and (eqv? (eval (car l1)) (eval (car l2)))
(comp-list-1 (cdr l1) (cdr l2))))))
'(comp-list-1 '(1 2 3) '(1 2 3))
'(comp-list-1 '(1 2 3) '(1 2 3 4))
'(comp-list-1 '(x y z) '(y x z))
'(comp-list-1 '((eval x) (eval y) (eval z)) '((eval y) (eval x) (eval z)))
'(comp-list-1 '((eval x) (eval btm)) '((eval y) (eval btm)))
'(comp-list-1 '((eval x) (eval btm)) '((eval z) (eval btm)))
(define (comp-list-2 l1 l2)
(cond ((empty? l1) (empty? l2))
((empty? l2) #f)
(else (and (comp-list-2 (cdr l1) (cdr l2))
(eqv? (eval (car l1)) (eval (car l2)))))))
| null | https://raw.githubusercontent.com/franklindyer/cs357-ta-materials/087cdc29e13d7f0796d70f2bc9e08a20f626ac92/scheme/misc/control_flow.rkt | racket | #lang racket
(define x '(begin (display "x evaluated\n") #f))
(define y '(begin (display "y evaluated\n") #f))
(define z '(begin (display "z evaluated\n") #t))
(define btm '(eval btm))
'(and x y)
'(and (eval x) (eval y))
'(and (eval z) (eval x))
'(and (eval z) (eval z) (eval x) (eval z))
(define (comp-list-1 l1 l2)
(cond ((empty? l1) (empty? l2))
((empty? l2) #f)
(else (and (eqv? (eval (car l1)) (eval (car l2)))
(comp-list-1 (cdr l1) (cdr l2))))))
'(comp-list-1 '(1 2 3) '(1 2 3))
'(comp-list-1 '(1 2 3) '(1 2 3 4))
'(comp-list-1 '(x y z) '(y x z))
'(comp-list-1 '((eval x) (eval y) (eval z)) '((eval y) (eval x) (eval z)))
'(comp-list-1 '((eval x) (eval btm)) '((eval y) (eval btm)))
'(comp-list-1 '((eval x) (eval btm)) '((eval z) (eval btm)))
(define (comp-list-2 l1 l2)
(cond ((empty? l1) (empty? l2))
((empty? l2) #f)
(else (and (comp-list-2 (cdr l1) (cdr l2))
(eqv? (eval (car l1)) (eval (car l2)))))))
| |
944cb3b6acf06d8476af6e5d73a3581db4ee05da8f6bbd8668ed8145839ec9df | project-oak/hafnium-verification | exp.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(** Expressions *)
[@@@warning "+9"]
module T = struct
module T0 = struct
type op1 =
(* conversion *)
| Signed of {bits: int}
| Unsigned of {bits: int}
| Convert of {src: Typ.t}
(* array/struct operations *)
| Splat
| Select of int
[@@deriving compare, equal, hash, sexp]
type op2 =
(* comparison *)
| Eq
| Dq
| Gt
| Ge
| Lt
| Le
| Ugt
| Uge
| Ult
| Ule
| Ord
| Uno
(* arithmetic, numeric and pointer *)
| Add
| Sub
| Mul
| Div
| Rem
| Udiv
| Urem
(* boolean / bitwise *)
| And
| Or
| Xor
| Shl
| Lshr
| Ashr
(* array/struct operations *)
| Update of int
[@@deriving compare, equal, hash, sexp]
type op3 = (* if-then-else *)
| Conditional
[@@deriving compare, equal, hash, sexp]
type opN =
(* array/struct constants *)
| Record
| Struct_rec (** NOTE: may be cyclic *)
[@@deriving compare, equal, hash, sexp]
type t = {desc: desc; term: Term.t}
and desc =
| Reg of {name: string; typ: Typ.t}
| Nondet of {msg: string; typ: Typ.t}
| Label of {parent: string; name: string}
| Integer of {data: Z.t; typ: Typ.t}
| Float of {data: string; typ: Typ.t}
| Ap1 of op1 * Typ.t * t
| Ap2 of op2 * Typ.t * t * t
| Ap3 of op3 * Typ.t * t * t * t
| ApN of opN * Typ.t * t vector
[@@deriving compare, equal, hash, sexp]
end
include T0
include Comparator.Make (T0)
end
include T
let term e = e.term
let fix (f : (t -> 'a as 'f) -> 'f) (bot : 'f) (e : t) : 'a =
let rec fix_f seen e =
match e.desc with
| ApN (Struct_rec, _, _) ->
if List.mem ~equal:( == ) seen e then f bot e
else f (fix_f (e :: seen)) e
| _ -> f (fix_f seen) e
in
let rec fix_f_seen_nil e =
match e.desc with
| ApN (Struct_rec, _, _) -> f (fix_f [e]) e
| _ -> f fix_f_seen_nil e
in
fix_f_seen_nil e
let fix_flip (f : ('z -> t -> 'a as 'f) -> 'f) (bot : 'f) (z : 'z) (e : t) =
fix (fun f' e z -> f (fun z e -> f' e z) z e) (fun e z -> bot z e) e z
let pp_op2 fs op =
let pf fmt = Format.fprintf fs fmt in
match op with
| Eq -> pf "="
| Dq -> pf "@<1>≠"
| Gt -> pf ">"
| Ge -> pf "@<1>≥"
| Lt -> pf "<"
| Le -> pf "@<1>≤"
| Ugt -> pf "u>"
| Uge -> pf "@<2>u≥"
| Ult -> pf "u<"
| Ule -> pf "@<2>u≤"
| Ord -> pf "ord"
| Uno -> pf "uno"
| Add -> pf "+"
| Sub -> pf "-"
| Mul -> pf "@<1>×"
| Div -> pf "/"
| Udiv -> pf "udiv"
| Rem -> pf "rem"
| Urem -> pf "urem"
| And -> pf "&&"
| Or -> pf "||"
| Xor -> pf "xor"
| Shl -> pf "shl"
| Lshr -> pf "lshr"
| Ashr -> pf "ashr"
| Update idx -> pf "[_|%i→_]" idx
let rec pp fs exp =
let pp_ pp fs exp =
let pf fmt =
Format.pp_open_box fs 2 ;
Format.kfprintf (fun fs -> Format.pp_close_box fs ()) fs fmt
in
match exp.desc with
| Reg {name} -> (
match Var.of_term exp.term with
| Some v when Var.global v -> pf "%@%s" name
| _ -> pf "%%%s" name )
| Nondet {msg} -> pf "nondet \"%s\"" msg
| Label {name} -> pf "%s" name
| Integer {data; typ= Pointer _} when Z.equal Z.zero data -> pf "null"
| Integer {data} -> Trace.pp_styled `Magenta "%a" fs Z.pp data
| Float {data} -> pf "%s" data
| Ap1 (Signed {bits}, dst, arg) ->
pf "((%a)(s%i)@ %a)" Typ.pp dst bits pp arg
| Ap1 (Unsigned {bits}, dst, arg) ->
pf "((%a)(u%i)@ %a)" Typ.pp dst bits pp arg
| Ap1 (Convert {src}, dst, arg) ->
pf "((%a)(%a)@ %a)" Typ.pp dst Typ.pp src pp arg
| Ap1 (Splat, _, byt) -> pf "%a^" pp byt
| Ap1 (Select idx, _, rcd) -> pf "%a[%i]" pp rcd idx
| Ap2 (Update idx, _, rcd, elt) ->
pf "[%a@ @[| %i → %a@]]" pp rcd idx pp elt
| Ap2 (Xor, Integer {bits= 1}, {desc= Integer {data}}, x)
when Z.is_true data ->
pf "¬%a" pp x
| Ap2 (Xor, Integer {bits= 1}, x, {desc= Integer {data}})
when Z.is_true data ->
pf "¬%a" pp x
| Ap2 (op, _, x, y) -> pf "(%a@ %a %a)" pp x pp_op2 op pp y
| Ap3 (Conditional, _, cnd, thn, els) ->
pf "(%a@ ? %a@ : %a)" pp cnd pp thn pp els
| ApN (Record, _, elts) -> pf "{%a}" pp_record elts
| ApN (Struct_rec, _, elts) -> pf "{|%a|}" (Vector.pp ",@ " pp) elts
in
fix_flip pp_ (fun _ _ -> ()) fs exp
[@@warning "-9"]
and pp_record fs elts =
[%Trace.fprintf
fs "%a"
(fun fs elts ->
match
String.init (Vector.length elts) ~f:(fun i ->
match (Vector.get elts i).desc with
| Integer {data} -> Char.of_int_exn (Z.to_int data)
| _ -> raise (Invalid_argument "not a string") )
with
| s -> Format.fprintf fs "@[<h>%s@]" (String.escaped s)
| exception _ ->
Format.fprintf fs "@[<h>%a@]" (Vector.pp ",@ " pp) elts )
elts]
[@@warning "-9"]
(** Invariant *)
let valid_idx idx elts = 0 <= idx && idx < Vector.length elts
let rec invariant exp =
Invariant.invariant [%here] exp [%sexp_of: t]
@@ fun () ->
match exp.desc with
| Reg {typ} | Nondet {typ} -> assert (Typ.is_sized typ)
| Integer {data; typ} -> (
match typ with
| Integer {bits} ->
data in −(2^(bits − 1 ) ) to 2^(bits − 1 ) − 1
let n = Z.shift_left Z.one (bits - 1) in
assert (Z.(Compare.(neg n <= data && data < n)))
| Pointer _ -> assert (Z.equal Z.zero data)
| _ -> assert false )
| Float {typ} -> (
match typ with Float _ -> assert true | _ -> assert false )
| Label _ -> assert true
| Ap1 (Signed {bits}, dst, arg) -> (
match (dst, typ_of arg) with
| Integer {bits= dst_bits}, Typ.Integer _ -> assert (bits <= dst_bits)
| _ -> assert false )
| Ap1 (Unsigned {bits}, dst, arg) -> (
match (dst, typ_of arg) with
| Integer {bits= dst_bits}, Typ.Integer _ -> assert (bits < dst_bits)
| _ -> assert false )
| Ap1 (Convert {src= Integer _}, Integer _, _) -> assert false
| Ap1 (Convert {src}, dst, arg) ->
assert (Typ.convertible src dst) ;
assert (Typ.castable src (typ_of arg)) ;
assert (not (Typ.equal src dst) (* avoid redundant representations *))
| Ap1 (Select idx, typ, rcd) -> (
assert (Typ.castable typ (typ_of rcd)) ;
match typ with
| Array _ -> assert true
| Tuple {elts} | Struct {elts} -> assert (valid_idx idx elts)
| _ -> assert false )
| Ap1 (Splat, typ, byt) ->
assert (Typ.convertible Typ.byt (typ_of byt)) ;
assert (Typ.is_sized typ)
| Ap2 (Update idx, typ, rcd, elt) -> (
assert (Typ.castable typ (typ_of rcd)) ;
match typ with
| Tuple {elts} | Struct {elts} ->
assert (valid_idx idx elts) ;
assert (Typ.castable (Vector.get elts idx) (typ_of elt))
| Array {elt= typ_elt} -> assert (Typ.castable typ_elt (typ_of elt))
| _ -> assert false )
| Ap2 (op, typ, x, y) -> (
match (op, typ) with
| (Eq | Dq | Gt | Ge | Lt | Le), (Integer _ | Float _ | Pointer _)
|(Ugt | Uge | Ult | Ule), (Integer _ | Pointer _)
|(Ord | Uno), Float _
|(Add | Sub), (Integer _ | Float _ | Pointer _)
|(Mul | Div | Rem), (Integer _ | Float _)
|(Udiv | Urem | And | Or | Xor | Shl | Lshr | Ashr), Integer _ ->
let typ_x = typ_of x and typ_y = typ_of y in
assert (Typ.castable typ typ_x) ;
assert (Typ.castable typ_x typ_y)
| _ -> assert false )
| Ap3 (Conditional, typ, cnd, thn, els) ->
assert (Typ.is_sized typ) ;
assert (Typ.castable Typ.bool (typ_of cnd)) ;
assert (Typ.castable typ (typ_of thn)) ;
assert (Typ.castable typ (typ_of els))
| ApN ((Record | Struct_rec), typ, args) -> (
match typ with
| Array {elt} ->
assert (
Vector.for_all args ~f:(fun arg -> Typ.castable elt (typ_of arg))
)
| Tuple {elts} | Struct {elts} ->
assert (Vector.length elts = Vector.length args) ;
assert (
Vector.for_all2_exn elts args ~f:(fun typ arg ->
Typ.castable typ (typ_of arg) ) )
| _ -> assert false )
[@@warning "-9"]
(** Type query *)
and typ_of exp =
match exp.desc with
| Reg {typ} | Nondet {typ} | Integer {typ} | Float {typ} -> typ
| Label _ -> Typ.ptr
| Ap1 ((Signed _ | Unsigned _ | Convert _ | Splat), dst, _) -> dst
| Ap1 (Select idx, typ, _) -> (
match typ with
| Array {elt} -> elt
| Tuple {elts} | Struct {elts} -> Vector.get elts idx
| _ -> violates invariant exp )
| Ap2
( (Eq | Dq | Gt | Ge | Lt | Le | Ugt | Uge | Ult | Ule | Ord | Uno)
, _
, _
, _ ) ->
Typ.bool
| Ap2
( ( Add | Sub | Mul | Div | Rem | Udiv | Urem | And | Or | Xor | Shl
| Lshr | Ashr | Update _ )
, typ
, _
, _ )
|Ap3 (Conditional, typ, _, _, _)
|ApN ((Record | Struct_rec), typ, _) ->
typ
[@@warning "-9"]
let typ = typ_of
let pp_exp = pp
(** Registers are the expressions constructed by [Reg] *)
module Reg = struct
include T
let pp = pp
let var r =
match Var.of_term r.term with Some v -> v | _ -> violates invariant r
module Set = struct
include (
Set :
module type of Set with type ('elt, 'cmp) t := ('elt, 'cmp) Set.t )
type t = Set.M(T).t [@@deriving compare, equal, sexp]
let pp = Set.pp pp_exp
let empty = Set.empty (module T)
let of_list = Set.of_list (module T)
let union_list = Set.union_list (module T)
let vars = Set.fold ~init:Var.Set.empty ~f:(fun s r -> add s (var r))
end
module Map = struct
include (
Map :
module type of Map
with type ('key, 'value, 'cmp) t := ('key, 'value, 'cmp) Map.t )
type 'v t = 'v Map.M(T).t [@@deriving compare, equal, sexp]
let empty = Map.empty (module T)
end
let demangle =
let open Ctypes in
let cxa_demangle =
(* char *__cxa_demangle(const char *, char *, size_t *, int * ) *)
Foreign.foreign "__cxa_demangle"
( string @-> ptr char @-> ptr size_t @-> ptr int
@-> returning string_opt )
in
let null_ptr_char = from_voidp char null in
let null_ptr_size_t = from_voidp size_t null in
let status = allocate int 0 in
fun mangled ->
let demangled =
cxa_demangle mangled null_ptr_char null_ptr_size_t status
in
if !@status = 0 then demangled else None
let pp_demangled fs e =
match e.desc with
| Reg {name} -> (
match demangle name with
| Some demangled when not (String.equal name demangled) ->
Format.fprintf fs "“%s”" demangled
| _ -> () )
| _ -> ()
[@@warning "-9"]
let invariant x =
Invariant.invariant [%here] x [%sexp_of: t]
@@ fun () ->
match x.desc with Reg _ -> invariant x | _ -> assert false
let name r =
match r.desc with Reg x -> x.name | _ -> violates invariant r
let typ r = match r.desc with Reg x -> x.typ | _ -> violates invariant r
let of_exp e =
match e.desc with Reg _ -> Some (e |> check invariant) | _ -> None
let program ?global typ name =
{desc= Reg {name; typ}; term= Term.var (Var.program ?global name)}
|> check invariant
end
(** Construct *)
(* registers *)
let reg x = x
(* constants *)
let nondet typ msg =
{desc= Nondet {msg; typ}; term= Term.nondet msg} |> check invariant
let label ~parent ~name =
{desc= Label {parent; name}; term= Term.label ~parent ~name}
|> check invariant
let integer typ data =
{desc= Integer {data; typ}; term= Term.integer data} |> check invariant
let null = integer Typ.ptr Z.zero
let bool b = integer Typ.bool (Z.of_bool b)
let true_ = bool true
let false_ = bool false
let float typ data =
{desc= Float {data; typ}; term= Term.float data} |> check invariant
(* type conversions *)
let signed bits x ~to_:typ =
{desc= Ap1 (Signed {bits}, typ, x); term= Term.signed bits x.term}
|> check invariant
let unsigned bits x ~to_:typ =
{desc= Ap1 (Unsigned {bits}, typ, x); term= Term.unsigned bits x.term}
|> check invariant
let convert src ~to_:dst exp =
{ desc= Ap1 (Convert {src}, dst, exp)
; term= Term.convert src ~to_:dst exp.term }
|> check invariant
(* comparisons *)
let binary op mk ?typ x y =
let typ = match typ with Some typ -> typ | None -> typ_of x in
{desc= Ap2 (op, typ, x, y); term= mk x.term y.term} |> check invariant
let ubinary op mk ?typ x y =
let typ = match typ with Some typ -> typ | None -> typ_of x in
let umk x y =
let unsigned = Term.unsigned (Typ.bit_size_of typ) in
mk (unsigned x) (unsigned y)
in
binary op umk ~typ x y
let eq = binary Eq Term.eq
let dq = binary Dq Term.dq
let gt = binary Gt (fun x y -> Term.lt y x)
let ge = binary Ge (fun x y -> Term.le y x)
let lt = binary Lt Term.lt
let le = binary Le Term.le
let ugt = ubinary Ugt (fun x y -> Term.lt y x)
let uge = ubinary Uge (fun x y -> Term.le y x)
let ult = ubinary Ult Term.lt
let ule = ubinary Ule Term.le
let ord = binary Ord Term.ord
let uno = binary Uno Term.uno
(* arithmetic *)
let add = binary Add Term.add
let sub = binary Sub Term.sub
let mul = binary Mul Term.mul
let div = binary Div Term.div
let rem = binary Rem Term.rem
let udiv = ubinary Udiv Term.div
let urem = ubinary Urem Term.rem
(* boolean / bitwise *)
let and_ = binary And Term.and_
let or_ = binary Or Term.or_
(* bitwise *)
let xor = binary Xor Term.xor
let shl = binary Shl Term.shl
let lshr = binary Lshr Term.lshr
let ashr = binary Ashr Term.ashr
(* if-then-else *)
let conditional ?typ ~cnd ~thn ~els =
let typ = match typ with Some typ -> typ | None -> typ_of thn in
{ desc= Ap3 (Conditional, typ, cnd, thn, els)
; term= Term.conditional ~cnd:cnd.term ~thn:thn.term ~els:els.term }
|> check invariant
(* memory *)
let splat typ byt =
{desc= Ap1 (Splat, typ, byt); term= Term.splat byt.term}
|> check invariant
(* records (struct / array values) *)
let record typ elts =
{ desc= ApN (Record, typ, elts)
; term= Term.record (Vector.map ~f:(fun elt -> elt.term) elts) }
|> check invariant
let select typ rcd idx =
{desc= Ap1 (Select idx, typ, rcd); term= Term.select ~rcd:rcd.term ~idx}
|> check invariant
let update typ ~rcd idx ~elt =
{ desc= Ap2 (Update idx, typ, rcd, elt)
; term= Term.update ~rcd:rcd.term ~idx ~elt:elt.term }
|> check invariant
let struct_rec key =
let memo_id = Hashtbl.create key in
let rec_app = (Staged.unstage (Term.rec_app key)) Term.Record in
Staged.stage
@@ fun ~id typ elt_thks ->
match Hashtbl.find memo_id id with
| None ->
(* Add placeholder to prevent computing [elts] in calls to
[struct_rec] from [elt_thks] for recursive occurrences of [id]. *)
let elta = Array.create ~len:(Vector.length elt_thks) null in
let elts = Vector.of_array elta in
Hashtbl.set memo_id ~key:id ~data:elts ;
let term =
rec_app ~id (Vector.map ~f:(fun elt -> lazy elt.term) elts)
in
Vector.iteri elt_thks ~f:(fun i (lazy elt) -> elta.(i) <- elt) ;
{desc= ApN (Struct_rec, typ, elts); term} |> check invariant
| Some elts ->
(* Do not check invariant as invariant will be checked above after the
thunks are forced, before which invariant-checking may spuriously
fail. Note that it is important that the value constructed here
shares the array in the memo table, so that the update after
forcing the recursive thunks also updates this value. *)
{desc= ApN (Struct_rec, typ, elts); term= rec_app ~id Vector.empty}
let size_of exp = integer Typ.siz (Z.of_int (Typ.size_of (typ exp)))
(** Traverse *)
let fold_exps e ~init ~f =
let fold_exps_ fold_exps_ e z =
let z =
match e.desc with
| Ap1 (_, _, x) -> fold_exps_ x z
| Ap2 (_, _, x, y) -> fold_exps_ y (fold_exps_ x z)
| Ap3 (_, _, w, x, y) -> fold_exps_ w (fold_exps_ y (fold_exps_ x z))
| ApN (_, _, xs) ->
Vector.fold xs ~init:z ~f:(fun z elt -> fold_exps_ elt z)
| _ -> z
in
f z e
in
fix fold_exps_ (fun _ z -> z) e init
let fold_regs e ~init ~f =
fold_exps e ~init ~f:(fun z x ->
match x.desc with Reg _ -> f z (x :> Reg.t) | _ -> z )
(** Query *)
let is_true e =
match e.desc with
| Integer {data; typ= Integer {bits= 1; _}} -> Z.is_true data
| _ -> false
let is_false e =
match e.desc with
| Integer {data; typ= Integer {bits= 1; _}} -> Z.is_false data
| _ -> false
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/sledge/src/llair/exp.ml | ocaml | * Expressions
conversion
array/struct operations
comparison
arithmetic, numeric and pointer
boolean / bitwise
array/struct operations
if-then-else
array/struct constants
* NOTE: may be cyclic
* Invariant
avoid redundant representations
* Type query
* Registers are the expressions constructed by [Reg]
char *__cxa_demangle(const char *, char *, size_t *, int * )
* Construct
registers
constants
type conversions
comparisons
arithmetic
boolean / bitwise
bitwise
if-then-else
memory
records (struct / array values)
Add placeholder to prevent computing [elts] in calls to
[struct_rec] from [elt_thks] for recursive occurrences of [id].
Do not check invariant as invariant will be checked above after the
thunks are forced, before which invariant-checking may spuriously
fail. Note that it is important that the value constructed here
shares the array in the memo table, so that the update after
forcing the recursive thunks also updates this value.
* Traverse
* Query |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
[@@@warning "+9"]
module T = struct
module T0 = struct
type op1 =
| Signed of {bits: int}
| Unsigned of {bits: int}
| Convert of {src: Typ.t}
| Splat
| Select of int
[@@deriving compare, equal, hash, sexp]
type op2 =
| Eq
| Dq
| Gt
| Ge
| Lt
| Le
| Ugt
| Uge
| Ult
| Ule
| Ord
| Uno
| Add
| Sub
| Mul
| Div
| Rem
| Udiv
| Urem
| And
| Or
| Xor
| Shl
| Lshr
| Ashr
| Update of int
[@@deriving compare, equal, hash, sexp]
| Conditional
[@@deriving compare, equal, hash, sexp]
type opN =
| Record
[@@deriving compare, equal, hash, sexp]
type t = {desc: desc; term: Term.t}
and desc =
| Reg of {name: string; typ: Typ.t}
| Nondet of {msg: string; typ: Typ.t}
| Label of {parent: string; name: string}
| Integer of {data: Z.t; typ: Typ.t}
| Float of {data: string; typ: Typ.t}
| Ap1 of op1 * Typ.t * t
| Ap2 of op2 * Typ.t * t * t
| Ap3 of op3 * Typ.t * t * t * t
| ApN of opN * Typ.t * t vector
[@@deriving compare, equal, hash, sexp]
end
include T0
include Comparator.Make (T0)
end
include T
let term e = e.term
let fix (f : (t -> 'a as 'f) -> 'f) (bot : 'f) (e : t) : 'a =
let rec fix_f seen e =
match e.desc with
| ApN (Struct_rec, _, _) ->
if List.mem ~equal:( == ) seen e then f bot e
else f (fix_f (e :: seen)) e
| _ -> f (fix_f seen) e
in
let rec fix_f_seen_nil e =
match e.desc with
| ApN (Struct_rec, _, _) -> f (fix_f [e]) e
| _ -> f fix_f_seen_nil e
in
fix_f_seen_nil e
let fix_flip (f : ('z -> t -> 'a as 'f) -> 'f) (bot : 'f) (z : 'z) (e : t) =
fix (fun f' e z -> f (fun z e -> f' e z) z e) (fun e z -> bot z e) e z
let pp_op2 fs op =
let pf fmt = Format.fprintf fs fmt in
match op with
| Eq -> pf "="
| Dq -> pf "@<1>≠"
| Gt -> pf ">"
| Ge -> pf "@<1>≥"
| Lt -> pf "<"
| Le -> pf "@<1>≤"
| Ugt -> pf "u>"
| Uge -> pf "@<2>u≥"
| Ult -> pf "u<"
| Ule -> pf "@<2>u≤"
| Ord -> pf "ord"
| Uno -> pf "uno"
| Add -> pf "+"
| Sub -> pf "-"
| Mul -> pf "@<1>×"
| Div -> pf "/"
| Udiv -> pf "udiv"
| Rem -> pf "rem"
| Urem -> pf "urem"
| And -> pf "&&"
| Or -> pf "||"
| Xor -> pf "xor"
| Shl -> pf "shl"
| Lshr -> pf "lshr"
| Ashr -> pf "ashr"
| Update idx -> pf "[_|%i→_]" idx
let rec pp fs exp =
let pp_ pp fs exp =
let pf fmt =
Format.pp_open_box fs 2 ;
Format.kfprintf (fun fs -> Format.pp_close_box fs ()) fs fmt
in
match exp.desc with
| Reg {name} -> (
match Var.of_term exp.term with
| Some v when Var.global v -> pf "%@%s" name
| _ -> pf "%%%s" name )
| Nondet {msg} -> pf "nondet \"%s\"" msg
| Label {name} -> pf "%s" name
| Integer {data; typ= Pointer _} when Z.equal Z.zero data -> pf "null"
| Integer {data} -> Trace.pp_styled `Magenta "%a" fs Z.pp data
| Float {data} -> pf "%s" data
| Ap1 (Signed {bits}, dst, arg) ->
pf "((%a)(s%i)@ %a)" Typ.pp dst bits pp arg
| Ap1 (Unsigned {bits}, dst, arg) ->
pf "((%a)(u%i)@ %a)" Typ.pp dst bits pp arg
| Ap1 (Convert {src}, dst, arg) ->
pf "((%a)(%a)@ %a)" Typ.pp dst Typ.pp src pp arg
| Ap1 (Splat, _, byt) -> pf "%a^" pp byt
| Ap1 (Select idx, _, rcd) -> pf "%a[%i]" pp rcd idx
| Ap2 (Update idx, _, rcd, elt) ->
pf "[%a@ @[| %i → %a@]]" pp rcd idx pp elt
| Ap2 (Xor, Integer {bits= 1}, {desc= Integer {data}}, x)
when Z.is_true data ->
pf "¬%a" pp x
| Ap2 (Xor, Integer {bits= 1}, x, {desc= Integer {data}})
when Z.is_true data ->
pf "¬%a" pp x
| Ap2 (op, _, x, y) -> pf "(%a@ %a %a)" pp x pp_op2 op pp y
| Ap3 (Conditional, _, cnd, thn, els) ->
pf "(%a@ ? %a@ : %a)" pp cnd pp thn pp els
| ApN (Record, _, elts) -> pf "{%a}" pp_record elts
| ApN (Struct_rec, _, elts) -> pf "{|%a|}" (Vector.pp ",@ " pp) elts
in
fix_flip pp_ (fun _ _ -> ()) fs exp
[@@warning "-9"]
and pp_record fs elts =
[%Trace.fprintf
fs "%a"
(fun fs elts ->
match
String.init (Vector.length elts) ~f:(fun i ->
match (Vector.get elts i).desc with
| Integer {data} -> Char.of_int_exn (Z.to_int data)
| _ -> raise (Invalid_argument "not a string") )
with
| s -> Format.fprintf fs "@[<h>%s@]" (String.escaped s)
| exception _ ->
Format.fprintf fs "@[<h>%a@]" (Vector.pp ",@ " pp) elts )
elts]
[@@warning "-9"]
let valid_idx idx elts = 0 <= idx && idx < Vector.length elts
let rec invariant exp =
Invariant.invariant [%here] exp [%sexp_of: t]
@@ fun () ->
match exp.desc with
| Reg {typ} | Nondet {typ} -> assert (Typ.is_sized typ)
| Integer {data; typ} -> (
match typ with
| Integer {bits} ->
data in −(2^(bits − 1 ) ) to 2^(bits − 1 ) − 1
let n = Z.shift_left Z.one (bits - 1) in
assert (Z.(Compare.(neg n <= data && data < n)))
| Pointer _ -> assert (Z.equal Z.zero data)
| _ -> assert false )
| Float {typ} -> (
match typ with Float _ -> assert true | _ -> assert false )
| Label _ -> assert true
| Ap1 (Signed {bits}, dst, arg) -> (
match (dst, typ_of arg) with
| Integer {bits= dst_bits}, Typ.Integer _ -> assert (bits <= dst_bits)
| _ -> assert false )
| Ap1 (Unsigned {bits}, dst, arg) -> (
match (dst, typ_of arg) with
| Integer {bits= dst_bits}, Typ.Integer _ -> assert (bits < dst_bits)
| _ -> assert false )
| Ap1 (Convert {src= Integer _}, Integer _, _) -> assert false
| Ap1 (Convert {src}, dst, arg) ->
assert (Typ.convertible src dst) ;
assert (Typ.castable src (typ_of arg)) ;
| Ap1 (Select idx, typ, rcd) -> (
assert (Typ.castable typ (typ_of rcd)) ;
match typ with
| Array _ -> assert true
| Tuple {elts} | Struct {elts} -> assert (valid_idx idx elts)
| _ -> assert false )
| Ap1 (Splat, typ, byt) ->
assert (Typ.convertible Typ.byt (typ_of byt)) ;
assert (Typ.is_sized typ)
| Ap2 (Update idx, typ, rcd, elt) -> (
assert (Typ.castable typ (typ_of rcd)) ;
match typ with
| Tuple {elts} | Struct {elts} ->
assert (valid_idx idx elts) ;
assert (Typ.castable (Vector.get elts idx) (typ_of elt))
| Array {elt= typ_elt} -> assert (Typ.castable typ_elt (typ_of elt))
| _ -> assert false )
| Ap2 (op, typ, x, y) -> (
match (op, typ) with
| (Eq | Dq | Gt | Ge | Lt | Le), (Integer _ | Float _ | Pointer _)
|(Ugt | Uge | Ult | Ule), (Integer _ | Pointer _)
|(Ord | Uno), Float _
|(Add | Sub), (Integer _ | Float _ | Pointer _)
|(Mul | Div | Rem), (Integer _ | Float _)
|(Udiv | Urem | And | Or | Xor | Shl | Lshr | Ashr), Integer _ ->
let typ_x = typ_of x and typ_y = typ_of y in
assert (Typ.castable typ typ_x) ;
assert (Typ.castable typ_x typ_y)
| _ -> assert false )
| Ap3 (Conditional, typ, cnd, thn, els) ->
assert (Typ.is_sized typ) ;
assert (Typ.castable Typ.bool (typ_of cnd)) ;
assert (Typ.castable typ (typ_of thn)) ;
assert (Typ.castable typ (typ_of els))
| ApN ((Record | Struct_rec), typ, args) -> (
match typ with
| Array {elt} ->
assert (
Vector.for_all args ~f:(fun arg -> Typ.castable elt (typ_of arg))
)
| Tuple {elts} | Struct {elts} ->
assert (Vector.length elts = Vector.length args) ;
assert (
Vector.for_all2_exn elts args ~f:(fun typ arg ->
Typ.castable typ (typ_of arg) ) )
| _ -> assert false )
[@@warning "-9"]
and typ_of exp =
match exp.desc with
| Reg {typ} | Nondet {typ} | Integer {typ} | Float {typ} -> typ
| Label _ -> Typ.ptr
| Ap1 ((Signed _ | Unsigned _ | Convert _ | Splat), dst, _) -> dst
| Ap1 (Select idx, typ, _) -> (
match typ with
| Array {elt} -> elt
| Tuple {elts} | Struct {elts} -> Vector.get elts idx
| _ -> violates invariant exp )
| Ap2
( (Eq | Dq | Gt | Ge | Lt | Le | Ugt | Uge | Ult | Ule | Ord | Uno)
, _
, _
, _ ) ->
Typ.bool
| Ap2
( ( Add | Sub | Mul | Div | Rem | Udiv | Urem | And | Or | Xor | Shl
| Lshr | Ashr | Update _ )
, typ
, _
, _ )
|Ap3 (Conditional, typ, _, _, _)
|ApN ((Record | Struct_rec), typ, _) ->
typ
[@@warning "-9"]
let typ = typ_of
let pp_exp = pp
module Reg = struct
include T
let pp = pp
let var r =
match Var.of_term r.term with Some v -> v | _ -> violates invariant r
module Set = struct
include (
Set :
module type of Set with type ('elt, 'cmp) t := ('elt, 'cmp) Set.t )
type t = Set.M(T).t [@@deriving compare, equal, sexp]
let pp = Set.pp pp_exp
let empty = Set.empty (module T)
let of_list = Set.of_list (module T)
let union_list = Set.union_list (module T)
let vars = Set.fold ~init:Var.Set.empty ~f:(fun s r -> add s (var r))
end
module Map = struct
include (
Map :
module type of Map
with type ('key, 'value, 'cmp) t := ('key, 'value, 'cmp) Map.t )
type 'v t = 'v Map.M(T).t [@@deriving compare, equal, sexp]
let empty = Map.empty (module T)
end
let demangle =
let open Ctypes in
let cxa_demangle =
Foreign.foreign "__cxa_demangle"
( string @-> ptr char @-> ptr size_t @-> ptr int
@-> returning string_opt )
in
let null_ptr_char = from_voidp char null in
let null_ptr_size_t = from_voidp size_t null in
let status = allocate int 0 in
fun mangled ->
let demangled =
cxa_demangle mangled null_ptr_char null_ptr_size_t status
in
if !@status = 0 then demangled else None
let pp_demangled fs e =
match e.desc with
| Reg {name} -> (
match demangle name with
| Some demangled when not (String.equal name demangled) ->
Format.fprintf fs "“%s”" demangled
| _ -> () )
| _ -> ()
[@@warning "-9"]
let invariant x =
Invariant.invariant [%here] x [%sexp_of: t]
@@ fun () ->
match x.desc with Reg _ -> invariant x | _ -> assert false
let name r =
match r.desc with Reg x -> x.name | _ -> violates invariant r
let typ r = match r.desc with Reg x -> x.typ | _ -> violates invariant r
let of_exp e =
match e.desc with Reg _ -> Some (e |> check invariant) | _ -> None
let program ?global typ name =
{desc= Reg {name; typ}; term= Term.var (Var.program ?global name)}
|> check invariant
end
let reg x = x
let nondet typ msg =
{desc= Nondet {msg; typ}; term= Term.nondet msg} |> check invariant
let label ~parent ~name =
{desc= Label {parent; name}; term= Term.label ~parent ~name}
|> check invariant
let integer typ data =
{desc= Integer {data; typ}; term= Term.integer data} |> check invariant
let null = integer Typ.ptr Z.zero
let bool b = integer Typ.bool (Z.of_bool b)
let true_ = bool true
let false_ = bool false
let float typ data =
{desc= Float {data; typ}; term= Term.float data} |> check invariant
let signed bits x ~to_:typ =
{desc= Ap1 (Signed {bits}, typ, x); term= Term.signed bits x.term}
|> check invariant
let unsigned bits x ~to_:typ =
{desc= Ap1 (Unsigned {bits}, typ, x); term= Term.unsigned bits x.term}
|> check invariant
let convert src ~to_:dst exp =
{ desc= Ap1 (Convert {src}, dst, exp)
; term= Term.convert src ~to_:dst exp.term }
|> check invariant
let binary op mk ?typ x y =
let typ = match typ with Some typ -> typ | None -> typ_of x in
{desc= Ap2 (op, typ, x, y); term= mk x.term y.term} |> check invariant
let ubinary op mk ?typ x y =
let typ = match typ with Some typ -> typ | None -> typ_of x in
let umk x y =
let unsigned = Term.unsigned (Typ.bit_size_of typ) in
mk (unsigned x) (unsigned y)
in
binary op umk ~typ x y
let eq = binary Eq Term.eq
let dq = binary Dq Term.dq
let gt = binary Gt (fun x y -> Term.lt y x)
let ge = binary Ge (fun x y -> Term.le y x)
let lt = binary Lt Term.lt
let le = binary Le Term.le
let ugt = ubinary Ugt (fun x y -> Term.lt y x)
let uge = ubinary Uge (fun x y -> Term.le y x)
let ult = ubinary Ult Term.lt
let ule = ubinary Ule Term.le
let ord = binary Ord Term.ord
let uno = binary Uno Term.uno
let add = binary Add Term.add
let sub = binary Sub Term.sub
let mul = binary Mul Term.mul
let div = binary Div Term.div
let rem = binary Rem Term.rem
let udiv = ubinary Udiv Term.div
let urem = ubinary Urem Term.rem
let and_ = binary And Term.and_
let or_ = binary Or Term.or_
let xor = binary Xor Term.xor
let shl = binary Shl Term.shl
let lshr = binary Lshr Term.lshr
let ashr = binary Ashr Term.ashr
let conditional ?typ ~cnd ~thn ~els =
let typ = match typ with Some typ -> typ | None -> typ_of thn in
{ desc= Ap3 (Conditional, typ, cnd, thn, els)
; term= Term.conditional ~cnd:cnd.term ~thn:thn.term ~els:els.term }
|> check invariant
let splat typ byt =
{desc= Ap1 (Splat, typ, byt); term= Term.splat byt.term}
|> check invariant
let record typ elts =
{ desc= ApN (Record, typ, elts)
; term= Term.record (Vector.map ~f:(fun elt -> elt.term) elts) }
|> check invariant
let select typ rcd idx =
{desc= Ap1 (Select idx, typ, rcd); term= Term.select ~rcd:rcd.term ~idx}
|> check invariant
let update typ ~rcd idx ~elt =
{ desc= Ap2 (Update idx, typ, rcd, elt)
; term= Term.update ~rcd:rcd.term ~idx ~elt:elt.term }
|> check invariant
let struct_rec key =
let memo_id = Hashtbl.create key in
let rec_app = (Staged.unstage (Term.rec_app key)) Term.Record in
Staged.stage
@@ fun ~id typ elt_thks ->
match Hashtbl.find memo_id id with
| None ->
let elta = Array.create ~len:(Vector.length elt_thks) null in
let elts = Vector.of_array elta in
Hashtbl.set memo_id ~key:id ~data:elts ;
let term =
rec_app ~id (Vector.map ~f:(fun elt -> lazy elt.term) elts)
in
Vector.iteri elt_thks ~f:(fun i (lazy elt) -> elta.(i) <- elt) ;
{desc= ApN (Struct_rec, typ, elts); term} |> check invariant
| Some elts ->
{desc= ApN (Struct_rec, typ, elts); term= rec_app ~id Vector.empty}
let size_of exp = integer Typ.siz (Z.of_int (Typ.size_of (typ exp)))
let fold_exps e ~init ~f =
let fold_exps_ fold_exps_ e z =
let z =
match e.desc with
| Ap1 (_, _, x) -> fold_exps_ x z
| Ap2 (_, _, x, y) -> fold_exps_ y (fold_exps_ x z)
| Ap3 (_, _, w, x, y) -> fold_exps_ w (fold_exps_ y (fold_exps_ x z))
| ApN (_, _, xs) ->
Vector.fold xs ~init:z ~f:(fun z elt -> fold_exps_ elt z)
| _ -> z
in
f z e
in
fix fold_exps_ (fun _ z -> z) e init
let fold_regs e ~init ~f =
fold_exps e ~init ~f:(fun z x ->
match x.desc with Reg _ -> f z (x :> Reg.t) | _ -> z )
let is_true e =
match e.desc with
| Integer {data; typ= Integer {bits= 1; _}} -> Z.is_true data
| _ -> false
let is_false e =
match e.desc with
| Integer {data; typ= Integer {bits= 1; _}} -> Z.is_false data
| _ -> false
|
1cfbde1f566d998b2c1ef1a4e8da4de971a5dfb680859ec9adca45d453093884 | mransan/raft | raft_protocol.ml | module Types = Raft_types
module Follower = Raft_helper.Follower
module Candidate = Raft_helper.Candidate
module Leader = Raft_helper.Leader
module Configuration = Raft_helper.Configuration
module Log = Raft_log
module Timeout_event = Raft_helper.Timeout_event
module Helper = Raft_helper
let make_result ?(msgs_to_send = []) ?leader_change ?(deleted_logs = [])
?(committed_logs = []) ?(added_logs = []) state = {
Types.state;
messages_to_send = msgs_to_send;
leader_change;
committed_logs;
added_logs;
deleted_logs;
}
module Log_entry_util = struct
let make_append_entries prev_log_index state =
let to_send, prev_log_term =
let since = prev_log_index in
let max = state.Types.configuration.Types.max_nb_logs_per_message in
Log.log_entries_since ~since ~max state.Types.log
in
let request = Types.({
leader_term = state.current_term;
leader_id = state.server_id;
prev_log_index;
prev_log_term;
log_entries = to_send;
leader_commit = state.commit_index;
}) in
request
let compute_append_entries ?(force = false) state followers now =
let rec aux followers msgs_to_send = function
| [] -> (List.rev followers, msgs_to_send)
| follower::tl ->
let {
Types.follower_id;
heartbeat_deadline;
outstanding_request;
next_index;_
} = follower in
let shoud_send_request =
if force || now >= heartbeat_deadline
then true
(* The heartbeat deadline is past due, the [Leader] must
* sent an [Append_entries] request. *)
else
if outstanding_request
then false
(* In case of an outstanding request there is no point
* in sending a new request to that server.
* Even if the outstanding request was lost and it could be
* beneficial to send a new request, this would happen
* at the next heartbeat. We assume it's more likely that
* the server is down and therefore there is no need to keep
* on sending the same request over and over. *)
else
let prev_index = next_index - 1 in
let last_log_index = Log.last_log_index state.Types.log in
if prev_index = last_log_index
then false
(* The receipient has the most recent log entry and the
* heartbeat deadline has not expired, no need to send a
* new heartbeat. *)
else true
(* The recipient is missing recent log entries *)
in
if shoud_send_request
then
let request = make_append_entries (next_index - 1) state in
let follower =
let outstanding_request = true in
let heartbeat_deadline =
now +. state.Types.configuration.Types.hearbeat_timeout
in
{follower with
Types.outstanding_request;
Types.heartbeat_deadline;
}
in
let followers = follower::followers in
let msgs_to_send =
let msg = (Types.Append_entries_request request, follower_id) in
msg::msgs_to_send
in
aux followers msgs_to_send tl
else
aux (follower::followers) msgs_to_send tl
in
aux [] [] followers
* { 2 Request Vote }
let make_request_vote_request state =
let index, term = Log.last_log_index_and_term state.Types.log in
Types.({
candidate_term = state.current_term;
candidate_id = state.server_id;
candidate_last_log_index = index;
candidate_last_log_term = term;
})
let handle_request_vote_request state request now =
let {
Types.candidate_id;
candidate_term;
candidate_last_log_index;_} = request in
let make_response state vote_granted =
Types.({
voter_term = state.current_term;
voter_id = state.server_id ;
vote_granted;
})
in
if candidate_term < state.Types.current_term
then
(* This request is coming from a candidate lagging behind ...
* no vote for him. *)
(state, make_response state false)
else
let state =
Enforce invariant that if this server is lagging behind
* it must convert to a follower and update to that term .
* it must convert to a follower and update to that term. *)
if candidate_term > state.Types.current_term
then Follower.become ~term:candidate_term ~now state
else state
in
let last_log_index = Log.last_log_index state.Types.log in
if candidate_last_log_index < last_log_index
then
Enforce the safety constraint by denying vote if this server
* last log is more recent than the candidate one .
* last log is more recent than the candidate one.*)
(state, make_response state false)
else
let role = state.Types.role in
match role with
| Types.Follower {Types.voted_for = None; _} ->
This server has never voted before , candidate is getting the vote
*
* In accordance to the ` Rules for Servers ` , the follower must
* reset its election deadline when granting its vote .
*
* In accordance to the `Rules for Servers`, the follower must
* reset its election deadline when granting its vote. *)
let {
Types.configuration = {
Types.election_timeout; _
}; _} = state in
let state = {state with
Types.role = Types.Follower {
Types.voted_for = Some candidate_id;
Types.current_leader = None;
Types.election_deadline = now +. election_timeout;
}
} in
(state, make_response state true)
| Types.Follower {Types.voted_for = Some id; _} when id = candidate_id ->
(* This server has already voted for that candidate... reminding him *)
(state, make_response state true)
| _ ->
(* Server has previously voted for another candidate or
* itself *)
(state, make_response state false)
let handle_request_vote_response state response now =
let {Types.current_term; role; configuration; _ } = state in
let {Types.voter_term; vote_granted; _ } = response in
if voter_term > current_term
then
Enforce invariant that if this server is lagging behind
* it must convert to a follower and update to the latest term .
* it must convert to a follower and update to the latest term. *)
let state = Follower.become ~term:voter_term ~now state in
(state, [])
else
match role, vote_granted with
| Types.Candidate ({Types.vote_count; _ } as candidate_state) , true ->
if Configuration.is_majority configuration (vote_count + 1)
then
(* By reaching a majority of votes, the candidate is now
* the new leader *)
let state = Leader.become ~now state in
(* As a new leader, the server must send Append entries request
* to the other servers to both establish its leadership and
* start synching its log with the others. *)
begin match state.Types.role with
| Types.Leader followers ->
let followers, msgs_to_send =
let force = true in
Log_entry_util.compute_append_entries ~force state followers now
in
let state = Types.{state with role = Leader followers} in
(state, msgs_to_send)
| _ -> assert(false)
end
else
(* Candidate has a new vote but not yet reached the majority *)
let new_state = Types.{state with
role = Candidate (Candidate.increment_vote_count candidate_state);
} in
(new_state, [] (* no message to send *))
| Types.Candidate _ , false
(* The vote was denied, the election keeps on going until
* its deadline. *)
| Types.Follower _ , _
| Types.Leader _ , _ -> (state, [])
If the server is either or Leader , it means that
* it has changed role in between the time it sent the
* [ RequestVote ] request and this response .
* This response can therefore safely be ignored and the server
* keeps its current state . Types .
* it has changed role in between the time it sent the
* [RequestVote] request and this response.
* This response can therefore safely be ignored and the server
* keeps its current state.Types. *)
* { 2 Append Entries }
let update_state leader_commit receiver_last_log_index log state =
if leader_commit > state.Types.commit_index
then
let commit_index = min leader_commit receiver_last_log_index in
{state with Types.log; commit_index}
else
{state with Types.log}
let handle_append_entries_request state request now =
let {Types.leader_term; leader_id; _} = request in
let make_response state result =
Types.({
receiver_term = state.current_term;
receiver_id = state.server_id;
result;
})
in
if leader_term < state.Types.current_term
then
(* This request is coming from a leader lagging behind... *)
(state, make_response state Types.Term_failure, Log.empty_diff)
else
(* This request is coming from a legetimate leader,
* let's ensure that this server is a follower. *)
let state =
let current_leader = leader_id and term = leader_term in
Follower.become ~current_leader ~term ~now state
in
Next step is to handle the log entries from the leader .
let {
Types.prev_log_index;
prev_log_term;
log_entries;
leader_commit; _ } = request in
let (
receiver_last_log_index,
receiver_last_log_term
) = Log.last_log_index_and_term state.Types.log in
let commit_index = state.Types.commit_index in
if prev_log_index < commit_index
then
(* The only reason that can happen is if the messages
* are delivered out of order. (Which is completely possible).
* No need to update this follower. *)
let success = Types.Success receiver_last_log_index in
(state, make_response state success, Log.empty_diff)
else
if leader_term = receiver_last_log_term
then
match compare prev_log_index receiver_last_log_index with
| 0 ->
(* Leader info about the receiver last log index is matching
* perfectly, we can append the logs. *)
let log, log_diff =
Log.add_log_entries ~log_entries state.Types.log
in
let receiver_last_log_index = Log.last_log_index log in
let state =
update_state leader_commit receiver_last_log_index log state
in
let success = Types.Success receiver_last_log_index in
(state, make_response state success, log_diff)
| x when x > 0 ->
(* This should really never happen since:
* - No logs belonging to the Leader term can be removed
* - The leader is supposed to keep track of the latest log from
* the receiver within the same term.
*)
let failure = Types.Log_failure receiver_last_log_index in
(state, make_response state failure, Log.empty_diff)
x when x < 0
(* This case is possible when messages are received out of order by
* the Follower
*
* Note that even if the prev_log_index is earlier, it's important
* that no log entry is removed from the log if they come from the
* current leader.
*
* The current leader might have sent a commit message back to a
* client believing that the log entry is replicated on this server.
* If we remove the log entry we violate the assumption. *)
let success = Types.Success receiver_last_log_index in
(state, make_response state success, Log.empty_diff)
else (* leader_term > receiver_last_log_term *)
if prev_log_index > receiver_last_log_index
then
(* This is likely the case after a new election, the Leader has
* more log entries in its log and assumes that all followers have
* the same number of log entries. *)
let failure = Types.Log_failure receiver_last_log_index in
(state, make_response state failure, Log.empty_diff)
else
(* Because it is a new Leader, this follower can safely remove all
* the logs from previous terms which were not committed. *)
match Log.remove_log_since ~prev_log_index
~prev_log_term state.Types.log with
| exception Not_found ->
let failure = Types.Log_failure commit_index in
(state, make_response state failure, Log.empty_diff)
(* This is the case where there is a mismatch between the [Leader]
* and this server and the log entry identified with
* (prev_log_index, prev_log_term)
* could not be found.
*
* In such a case, the safest log entry to synchronize upon is the
* commit_index
* of the follower. *)
| log, log_diff ->
let log, log_diff' = Log.add_log_entries ~log_entries log in
let log_diff = Log.merge_diff log_diff log_diff' in
let receiver_last_log_index = Log.last_log_index log in
let state =
update_state leader_commit receiver_last_log_index log state
in
let success = Types.Success receiver_last_log_index in
(state, make_response state success, log_diff)
let handle_append_entries_response state response now =
let {
Types.receiver_term;
receiver_id = follower_id ;
result} = response in
if receiver_term > state.Types.current_term
then
Enforce invariant that if the server is lagging behind
* it must convert to a follower and update to that term .
* it must convert to a follower and update to that term. *)
(Follower.become ~term:receiver_term ~now state , [])
else
match state.Types.role with
| Types.Follower _
| Types.Candidate _ -> (state, [])
| Types.Leader followers ->
let followers =
Leader.record_response_received ~follower_id followers
in
begin match result with
| Types.Success follower_last_log_index ->
(* Log entries were successfully inserted by the receiver...
*
* let's update our leader state about that receiver *)
let configuration = state.Types.configuration in
let followers , nb_of_replications =
Leader.update_follower_last_log_index
~follower_id ~index:follower_last_log_index followers
in
let state =
(* Check if the received log entry from has reached
* a majority of server.
* Note that we need to add `+1` simply to count this
* server (ie leader) which does not update its next/match
* *)
if Configuration.is_majority configuration (nb_of_replications + 1) &&
follower_last_log_index > state.Types.commit_index
then {state with Types.commit_index = follower_last_log_index;}
else state
in
let followers, msgs_to_send =
Log_entry_util.compute_append_entries state followers now
in
let state = Types.({state with role = Leader followers}) in
(state, msgs_to_send)
| Types.Log_failure follower_last_log_index ->
(* The receiver log is not matching this server current belief.
* If a leader this server should decrement the next
* log index and retry the append. *)
let leader_state =
Leader.decrement_next_index
~follower_last_log_index ~follower_id state followers
in
let leader_state, msgs_to_send =
Log_entry_util.compute_append_entries state leader_state now
in
let state = Types.({state with role = Leader leader_state}) in
(state, msgs_to_send)
| Types.Term_failure ->
(state, [])
(* The receiver could have detected that this server term was not the
* latest one and sent the [Term_failure] response.
*
* This could typically happen in a network partition:
*
* Old-leader---------X-------New Leader
* \ /
* ---------Follower---------
*
* In the diagram above this server is the Old leader.
*)
end (* match result *)
let init ?log ?commit_index ?current_term ~configuration ~now ~server_id () =
Follower.make ?log ?commit_index ?current_term
~configuration ~now ~server_id ()
let handle_message state message now =
let state', msgs_to_send, log_diff =
match message with
| Types.Request_vote_request ({Types.candidate_id; _ } as r) ->
let state, response = handle_request_vote_request state r now in
let msgs =
(Types.Request_vote_response response, candidate_id)::[]
in
(state, msgs, Log.empty_diff)
| Types.Append_entries_request ({Types.leader_id; _ } as r) ->
let state, response, log_diff =
handle_append_entries_request state r now
in
let msgs =
(Types.Append_entries_response response, leader_id) :: []
in
(state, msgs, log_diff)
| Types.Request_vote_response r ->
let state, msgs = handle_request_vote_response state r now in
(state, msgs, Log.empty_diff)
| Types.Append_entries_response r ->
let state, msgs = handle_append_entries_response state r now in
(state, msgs, Log.empty_diff)
in
let leader_change = Helper.Diff.leader_change state state' in
let committed_logs = Helper.Diff.committed_logs state state' in
let {Log.added_logs; deleted_logs} = log_diff in
make_result ~msgs_to_send ?leader_change ~added_logs
~deleted_logs ~committed_logs state'
Iterates over all the other server ids . ( ie the ones different
* from the state i d ) .
* from the state id). *)
let fold_over_servers f e0 state =
let {
Types.server_id;
configuration = {Types.nb_of_server; _}; _
} = state in
let rec aux acc = function
| -1 -> acc
| id ->
let next = id - 1 in
if id = server_id
then aux acc next
else aux (f acc id) next
in
aux e0 (nb_of_server - 1)
let handle_new_election_timeout state now =
let state' = Candidate.become ~now state in
let msgs_to_send =
fold_over_servers (fun acc server_id ->
let request = make_request_vote_request state' in
(Types.Request_vote_request request, server_id) :: acc
) [] state'
in
let leader_change = Helper.Diff.leader_change state state' in
let committed_logs = Helper.Diff.committed_logs state state' in
make_result ~msgs_to_send ?leader_change ~committed_logs state'
let handle_heartbeat_timeout state now =
match state.Types.role with
| Types.Leader leader_state ->
let leader_state, msgs_to_send =
Log_entry_util.compute_append_entries state leader_state now
in
let state = Types.({state with role = Leader leader_state}) in
make_result ~msgs_to_send state
| _ ->
make_result state
type new_log_response =
| Appended of Types.result
| Forward_to_leader of int
| Delay
let handle_add_log_entries state datas now =
match state.Types.role with
| Types.Follower {Types.current_leader = None ; _ }
| Types.Candidate _ ->
Delay
(* Server in the middle of an election with no [Leader]
* agreed upon yet *)
| Types.Follower {Types.current_leader = Some leader_id; _ } ->
Forward_to_leader leader_id
The [ Leader ] should be the one centralizing all the
* new log entries .
* new log entries. *)
| Types.Leader leader_state ->
let log, log_diff =
Log.add_log_datas state.Types.current_term datas state.Types.log
in
let state' = Types.({state with log }) in
let leader_state, msgs_to_send =
Log_entry_util.compute_append_entries state' leader_state now
in
let state' = Types.({state' with role = Leader leader_state }) in
let {Log.added_logs; deleted_logs} = log_diff in
Appended (make_result ~msgs_to_send ~added_logs ~deleted_logs state')
let next_timeout_event state now = Timeout_event.next ~now state
let committed_entries_since ~since {Types.commit_index; log; _} =
let max = commit_index - since in
fst @@ Log.log_entries_since ~since ~max:(Log.Number max)log
| null | https://raw.githubusercontent.com/mransan/raft/292f99475183d67e960b3a199ed4fc01b1f183e2/src/raft_protocol.ml | ocaml | The heartbeat deadline is past due, the [Leader] must
* sent an [Append_entries] request.
In case of an outstanding request there is no point
* in sending a new request to that server.
* Even if the outstanding request was lost and it could be
* beneficial to send a new request, this would happen
* at the next heartbeat. We assume it's more likely that
* the server is down and therefore there is no need to keep
* on sending the same request over and over.
The receipient has the most recent log entry and the
* heartbeat deadline has not expired, no need to send a
* new heartbeat.
The recipient is missing recent log entries
This request is coming from a candidate lagging behind ...
* no vote for him.
This server has already voted for that candidate... reminding him
Server has previously voted for another candidate or
* itself
By reaching a majority of votes, the candidate is now
* the new leader
As a new leader, the server must send Append entries request
* to the other servers to both establish its leadership and
* start synching its log with the others.
Candidate has a new vote but not yet reached the majority
no message to send
The vote was denied, the election keeps on going until
* its deadline.
This request is coming from a leader lagging behind...
This request is coming from a legetimate leader,
* let's ensure that this server is a follower.
The only reason that can happen is if the messages
* are delivered out of order. (Which is completely possible).
* No need to update this follower.
Leader info about the receiver last log index is matching
* perfectly, we can append the logs.
This should really never happen since:
* - No logs belonging to the Leader term can be removed
* - The leader is supposed to keep track of the latest log from
* the receiver within the same term.
This case is possible when messages are received out of order by
* the Follower
*
* Note that even if the prev_log_index is earlier, it's important
* that no log entry is removed from the log if they come from the
* current leader.
*
* The current leader might have sent a commit message back to a
* client believing that the log entry is replicated on this server.
* If we remove the log entry we violate the assumption.
leader_term > receiver_last_log_term
This is likely the case after a new election, the Leader has
* more log entries in its log and assumes that all followers have
* the same number of log entries.
Because it is a new Leader, this follower can safely remove all
* the logs from previous terms which were not committed.
This is the case where there is a mismatch between the [Leader]
* and this server and the log entry identified with
* (prev_log_index, prev_log_term)
* could not be found.
*
* In such a case, the safest log entry to synchronize upon is the
* commit_index
* of the follower.
Log entries were successfully inserted by the receiver...
*
* let's update our leader state about that receiver
Check if the received log entry from has reached
* a majority of server.
* Note that we need to add `+1` simply to count this
* server (ie leader) which does not update its next/match
*
The receiver log is not matching this server current belief.
* If a leader this server should decrement the next
* log index and retry the append.
The receiver could have detected that this server term was not the
* latest one and sent the [Term_failure] response.
*
* This could typically happen in a network partition:
*
* Old-leader---------X-------New Leader
* \ /
* ---------Follower---------
*
* In the diagram above this server is the Old leader.
match result
Server in the middle of an election with no [Leader]
* agreed upon yet | module Types = Raft_types
module Follower = Raft_helper.Follower
module Candidate = Raft_helper.Candidate
module Leader = Raft_helper.Leader
module Configuration = Raft_helper.Configuration
module Log = Raft_log
module Timeout_event = Raft_helper.Timeout_event
module Helper = Raft_helper
let make_result ?(msgs_to_send = []) ?leader_change ?(deleted_logs = [])
?(committed_logs = []) ?(added_logs = []) state = {
Types.state;
messages_to_send = msgs_to_send;
leader_change;
committed_logs;
added_logs;
deleted_logs;
}
module Log_entry_util = struct
let make_append_entries prev_log_index state =
let to_send, prev_log_term =
let since = prev_log_index in
let max = state.Types.configuration.Types.max_nb_logs_per_message in
Log.log_entries_since ~since ~max state.Types.log
in
let request = Types.({
leader_term = state.current_term;
leader_id = state.server_id;
prev_log_index;
prev_log_term;
log_entries = to_send;
leader_commit = state.commit_index;
}) in
request
let compute_append_entries ?(force = false) state followers now =
let rec aux followers msgs_to_send = function
| [] -> (List.rev followers, msgs_to_send)
| follower::tl ->
let {
Types.follower_id;
heartbeat_deadline;
outstanding_request;
next_index;_
} = follower in
let shoud_send_request =
if force || now >= heartbeat_deadline
then true
else
if outstanding_request
then false
else
let prev_index = next_index - 1 in
let last_log_index = Log.last_log_index state.Types.log in
if prev_index = last_log_index
then false
else true
in
if shoud_send_request
then
let request = make_append_entries (next_index - 1) state in
let follower =
let outstanding_request = true in
let heartbeat_deadline =
now +. state.Types.configuration.Types.hearbeat_timeout
in
{follower with
Types.outstanding_request;
Types.heartbeat_deadline;
}
in
let followers = follower::followers in
let msgs_to_send =
let msg = (Types.Append_entries_request request, follower_id) in
msg::msgs_to_send
in
aux followers msgs_to_send tl
else
aux (follower::followers) msgs_to_send tl
in
aux [] [] followers
* { 2 Request Vote }
let make_request_vote_request state =
let index, term = Log.last_log_index_and_term state.Types.log in
Types.({
candidate_term = state.current_term;
candidate_id = state.server_id;
candidate_last_log_index = index;
candidate_last_log_term = term;
})
let handle_request_vote_request state request now =
let {
Types.candidate_id;
candidate_term;
candidate_last_log_index;_} = request in
let make_response state vote_granted =
Types.({
voter_term = state.current_term;
voter_id = state.server_id ;
vote_granted;
})
in
if candidate_term < state.Types.current_term
then
(state, make_response state false)
else
let state =
Enforce invariant that if this server is lagging behind
* it must convert to a follower and update to that term .
* it must convert to a follower and update to that term. *)
if candidate_term > state.Types.current_term
then Follower.become ~term:candidate_term ~now state
else state
in
let last_log_index = Log.last_log_index state.Types.log in
if candidate_last_log_index < last_log_index
then
Enforce the safety constraint by denying vote if this server
* last log is more recent than the candidate one .
* last log is more recent than the candidate one.*)
(state, make_response state false)
else
let role = state.Types.role in
match role with
| Types.Follower {Types.voted_for = None; _} ->
This server has never voted before , candidate is getting the vote
*
* In accordance to the ` Rules for Servers ` , the follower must
* reset its election deadline when granting its vote .
*
* In accordance to the `Rules for Servers`, the follower must
* reset its election deadline when granting its vote. *)
let {
Types.configuration = {
Types.election_timeout; _
}; _} = state in
let state = {state with
Types.role = Types.Follower {
Types.voted_for = Some candidate_id;
Types.current_leader = None;
Types.election_deadline = now +. election_timeout;
}
} in
(state, make_response state true)
| Types.Follower {Types.voted_for = Some id; _} when id = candidate_id ->
(state, make_response state true)
| _ ->
(state, make_response state false)
let handle_request_vote_response state response now =
let {Types.current_term; role; configuration; _ } = state in
let {Types.voter_term; vote_granted; _ } = response in
if voter_term > current_term
then
Enforce invariant that if this server is lagging behind
* it must convert to a follower and update to the latest term .
* it must convert to a follower and update to the latest term. *)
let state = Follower.become ~term:voter_term ~now state in
(state, [])
else
match role, vote_granted with
| Types.Candidate ({Types.vote_count; _ } as candidate_state) , true ->
if Configuration.is_majority configuration (vote_count + 1)
then
let state = Leader.become ~now state in
begin match state.Types.role with
| Types.Leader followers ->
let followers, msgs_to_send =
let force = true in
Log_entry_util.compute_append_entries ~force state followers now
in
let state = Types.{state with role = Leader followers} in
(state, msgs_to_send)
| _ -> assert(false)
end
else
let new_state = Types.{state with
role = Candidate (Candidate.increment_vote_count candidate_state);
} in
| Types.Candidate _ , false
| Types.Follower _ , _
| Types.Leader _ , _ -> (state, [])
If the server is either or Leader , it means that
* it has changed role in between the time it sent the
* [ RequestVote ] request and this response .
* This response can therefore safely be ignored and the server
* keeps its current state . Types .
* it has changed role in between the time it sent the
* [RequestVote] request and this response.
* This response can therefore safely be ignored and the server
* keeps its current state.Types. *)
* { 2 Append Entries }
let update_state leader_commit receiver_last_log_index log state =
if leader_commit > state.Types.commit_index
then
let commit_index = min leader_commit receiver_last_log_index in
{state with Types.log; commit_index}
else
{state with Types.log}
let handle_append_entries_request state request now =
let {Types.leader_term; leader_id; _} = request in
let make_response state result =
Types.({
receiver_term = state.current_term;
receiver_id = state.server_id;
result;
})
in
if leader_term < state.Types.current_term
then
(state, make_response state Types.Term_failure, Log.empty_diff)
else
let state =
let current_leader = leader_id and term = leader_term in
Follower.become ~current_leader ~term ~now state
in
Next step is to handle the log entries from the leader .
let {
Types.prev_log_index;
prev_log_term;
log_entries;
leader_commit; _ } = request in
let (
receiver_last_log_index,
receiver_last_log_term
) = Log.last_log_index_and_term state.Types.log in
let commit_index = state.Types.commit_index in
if prev_log_index < commit_index
then
let success = Types.Success receiver_last_log_index in
(state, make_response state success, Log.empty_diff)
else
if leader_term = receiver_last_log_term
then
match compare prev_log_index receiver_last_log_index with
| 0 ->
let log, log_diff =
Log.add_log_entries ~log_entries state.Types.log
in
let receiver_last_log_index = Log.last_log_index log in
let state =
update_state leader_commit receiver_last_log_index log state
in
let success = Types.Success receiver_last_log_index in
(state, make_response state success, log_diff)
| x when x > 0 ->
let failure = Types.Log_failure receiver_last_log_index in
(state, make_response state failure, Log.empty_diff)
x when x < 0
let success = Types.Success receiver_last_log_index in
(state, make_response state success, Log.empty_diff)
if prev_log_index > receiver_last_log_index
then
let failure = Types.Log_failure receiver_last_log_index in
(state, make_response state failure, Log.empty_diff)
else
match Log.remove_log_since ~prev_log_index
~prev_log_term state.Types.log with
| exception Not_found ->
let failure = Types.Log_failure commit_index in
(state, make_response state failure, Log.empty_diff)
| log, log_diff ->
let log, log_diff' = Log.add_log_entries ~log_entries log in
let log_diff = Log.merge_diff log_diff log_diff' in
let receiver_last_log_index = Log.last_log_index log in
let state =
update_state leader_commit receiver_last_log_index log state
in
let success = Types.Success receiver_last_log_index in
(state, make_response state success, log_diff)
let handle_append_entries_response state response now =
let {
Types.receiver_term;
receiver_id = follower_id ;
result} = response in
if receiver_term > state.Types.current_term
then
Enforce invariant that if the server is lagging behind
* it must convert to a follower and update to that term .
* it must convert to a follower and update to that term. *)
(Follower.become ~term:receiver_term ~now state , [])
else
match state.Types.role with
| Types.Follower _
| Types.Candidate _ -> (state, [])
| Types.Leader followers ->
let followers =
Leader.record_response_received ~follower_id followers
in
begin match result with
| Types.Success follower_last_log_index ->
let configuration = state.Types.configuration in
let followers , nb_of_replications =
Leader.update_follower_last_log_index
~follower_id ~index:follower_last_log_index followers
in
let state =
if Configuration.is_majority configuration (nb_of_replications + 1) &&
follower_last_log_index > state.Types.commit_index
then {state with Types.commit_index = follower_last_log_index;}
else state
in
let followers, msgs_to_send =
Log_entry_util.compute_append_entries state followers now
in
let state = Types.({state with role = Leader followers}) in
(state, msgs_to_send)
| Types.Log_failure follower_last_log_index ->
let leader_state =
Leader.decrement_next_index
~follower_last_log_index ~follower_id state followers
in
let leader_state, msgs_to_send =
Log_entry_util.compute_append_entries state leader_state now
in
let state = Types.({state with role = Leader leader_state}) in
(state, msgs_to_send)
| Types.Term_failure ->
(state, [])
let init ?log ?commit_index ?current_term ~configuration ~now ~server_id () =
Follower.make ?log ?commit_index ?current_term
~configuration ~now ~server_id ()
let handle_message state message now =
let state', msgs_to_send, log_diff =
match message with
| Types.Request_vote_request ({Types.candidate_id; _ } as r) ->
let state, response = handle_request_vote_request state r now in
let msgs =
(Types.Request_vote_response response, candidate_id)::[]
in
(state, msgs, Log.empty_diff)
| Types.Append_entries_request ({Types.leader_id; _ } as r) ->
let state, response, log_diff =
handle_append_entries_request state r now
in
let msgs =
(Types.Append_entries_response response, leader_id) :: []
in
(state, msgs, log_diff)
| Types.Request_vote_response r ->
let state, msgs = handle_request_vote_response state r now in
(state, msgs, Log.empty_diff)
| Types.Append_entries_response r ->
let state, msgs = handle_append_entries_response state r now in
(state, msgs, Log.empty_diff)
in
let leader_change = Helper.Diff.leader_change state state' in
let committed_logs = Helper.Diff.committed_logs state state' in
let {Log.added_logs; deleted_logs} = log_diff in
make_result ~msgs_to_send ?leader_change ~added_logs
~deleted_logs ~committed_logs state'
Iterates over all the other server ids . ( ie the ones different
* from the state i d ) .
* from the state id). *)
let fold_over_servers f e0 state =
let {
Types.server_id;
configuration = {Types.nb_of_server; _}; _
} = state in
let rec aux acc = function
| -1 -> acc
| id ->
let next = id - 1 in
if id = server_id
then aux acc next
else aux (f acc id) next
in
aux e0 (nb_of_server - 1)
let handle_new_election_timeout state now =
let state' = Candidate.become ~now state in
let msgs_to_send =
fold_over_servers (fun acc server_id ->
let request = make_request_vote_request state' in
(Types.Request_vote_request request, server_id) :: acc
) [] state'
in
let leader_change = Helper.Diff.leader_change state state' in
let committed_logs = Helper.Diff.committed_logs state state' in
make_result ~msgs_to_send ?leader_change ~committed_logs state'
let handle_heartbeat_timeout state now =
match state.Types.role with
| Types.Leader leader_state ->
let leader_state, msgs_to_send =
Log_entry_util.compute_append_entries state leader_state now
in
let state = Types.({state with role = Leader leader_state}) in
make_result ~msgs_to_send state
| _ ->
make_result state
type new_log_response =
| Appended of Types.result
| Forward_to_leader of int
| Delay
let handle_add_log_entries state datas now =
match state.Types.role with
| Types.Follower {Types.current_leader = None ; _ }
| Types.Candidate _ ->
Delay
| Types.Follower {Types.current_leader = Some leader_id; _ } ->
Forward_to_leader leader_id
The [ Leader ] should be the one centralizing all the
* new log entries .
* new log entries. *)
| Types.Leader leader_state ->
let log, log_diff =
Log.add_log_datas state.Types.current_term datas state.Types.log
in
let state' = Types.({state with log }) in
let leader_state, msgs_to_send =
Log_entry_util.compute_append_entries state' leader_state now
in
let state' = Types.({state' with role = Leader leader_state }) in
let {Log.added_logs; deleted_logs} = log_diff in
Appended (make_result ~msgs_to_send ~added_logs ~deleted_logs state')
let next_timeout_event state now = Timeout_event.next ~now state
let committed_entries_since ~since {Types.commit_index; log; _} =
let max = commit_index - since in
fst @@ Log.log_entries_since ~since ~max:(Log.Number max)log
|
39bd99952436c7fe11b544363da8154156bb961ae675382a1e779ad4663774ac | mariari/Misc-Lisp-Scripts | cache-fstar-source.lisp |
;; (eval-when (:compile-toplevel :load-toplevel :execute)
;; (ql:quickload "inferior-shell")
;; (asdf:load-system :uiop))
(defpackage #:scripts.cache-fstar
(:use #:uiop #:inferior-shell)
(:use #:common-lisp)
(:export :generate-cache))
(in-package :scripts.cache-fstar)
;; This does not work for ulib sadly, it has many finicky parameters see here
;;
;; for some reason if you qualify the entire file instead of assuming current file, it
;; may fail with errors (looking at you FStar.Seq.Base.fst)
;; , this script does not account for that. Please fix those ones manually,
;; or add custom lisp logic to solve that certain case!!!!!!
(defun generate-cache (starting-file &key (r-limit 5))
(labels ((rec (current-file tried-list)
(let* ((tried (nth-value 1
(run `(fstar.exe ,current-file
--cache_checked_modules
--record_hints
--use_hints
--z3rlimit ,r-limit)
:show t
:error-output :lines)))
maybe - not - exist can be either of these two if something is wrong
;; not exist ==> (file.checked does not exist)
;; stale check ==> (digest mismatch for file)
(maybe-not-exist (last (split-string (car (last tried)))
4)))
(println tried)
(cond ((equal (cdr maybe-not-exist) '("does" "not" "exist"))
(let ((new-file
;; removed the checked off the file, as it doesn't exist
(string-trim ".checked" (car maybe-not-exist))))
(rec new-file (cons current-file tried-list))))
((equal (butlast maybe-not-exist) '("(digest" "mismatch" "for"))
(let ((new-file
(string-trim ")" (car (last maybe-not-exist)))))
(rec new-file (cons current-file tried-list))))
;; put some logic here popping off dat list
((null tried-list) nil)
(t (rec (car tried-list) (cdr tried-list)))))))
(rec starting-file '())))
| null | https://raw.githubusercontent.com/mariari/Misc-Lisp-Scripts/acecadc75fcbe15e6b97e084d179aacdbbde06a8/scripts/cache-fstar-source.lisp | lisp | (eval-when (:compile-toplevel :load-toplevel :execute)
(ql:quickload "inferior-shell")
(asdf:load-system :uiop))
This does not work for ulib sadly, it has many finicky parameters see here
for some reason if you qualify the entire file instead of assuming current file, it
may fail with errors (looking at you FStar.Seq.Base.fst)
, this script does not account for that. Please fix those ones manually,
or add custom lisp logic to solve that certain case!!!!!!
not exist ==> (file.checked does not exist)
stale check ==> (digest mismatch for file)
removed the checked off the file, as it doesn't exist
put some logic here popping off dat list |
(defpackage #:scripts.cache-fstar
(:use #:uiop #:inferior-shell)
(:use #:common-lisp)
(:export :generate-cache))
(in-package :scripts.cache-fstar)
(defun generate-cache (starting-file &key (r-limit 5))
(labels ((rec (current-file tried-list)
(let* ((tried (nth-value 1
(run `(fstar.exe ,current-file
--cache_checked_modules
--record_hints
--use_hints
--z3rlimit ,r-limit)
:show t
:error-output :lines)))
maybe - not - exist can be either of these two if something is wrong
(maybe-not-exist (last (split-string (car (last tried)))
4)))
(println tried)
(cond ((equal (cdr maybe-not-exist) '("does" "not" "exist"))
(let ((new-file
(string-trim ".checked" (car maybe-not-exist))))
(rec new-file (cons current-file tried-list))))
((equal (butlast maybe-not-exist) '("(digest" "mismatch" "for"))
(let ((new-file
(string-trim ")" (car (last maybe-not-exist)))))
(rec new-file (cons current-file tried-list))))
((null tried-list) nil)
(t (rec (car tried-list) (cdr tried-list)))))))
(rec starting-file '())))
|
7794c3c9afec9f558b226d39116b939ed45137ee36412e3b7997b857e30d49af | ucsd-progsys/dsolve | bsearch.ml | val arraysize: ('a).{n:nat} 'a array(n) -> int(n)
fun bs_aux key vec l u =
if u < l then NONE
else
let
val m = l + (u-l) / 2
val x = sub (vec, m)
in
if x < key then bs_aux key vec (m+1) u
else if x > key then bs_aux key vec l (m-1)
else SOME (m)
end
withtype int -> {n:nat} int array(n) ->
{i:int,j:int | 0 <= i <= j+1 <= n} <j+1-i> =>
int(i) -> int(j) -> int option
fun bsearch (key, vec) = bs_aux key vec 0 (arraysize vec - 1)
withtype {n:nat} int * int array(n) -> int option
| null | https://raw.githubusercontent.com/ucsd-progsys/dsolve/bfbbb8ed9bbf352d74561e9f9127ab07b7882c0c/tests/POPL2008/xiog/DMLex/bsearch.ml | ocaml | val arraysize: ('a).{n:nat} 'a array(n) -> int(n)
fun bs_aux key vec l u =
if u < l then NONE
else
let
val m = l + (u-l) / 2
val x = sub (vec, m)
in
if x < key then bs_aux key vec (m+1) u
else if x > key then bs_aux key vec l (m-1)
else SOME (m)
end
withtype int -> {n:nat} int array(n) ->
{i:int,j:int | 0 <= i <= j+1 <= n} <j+1-i> =>
int(i) -> int(j) -> int option
fun bsearch (key, vec) = bs_aux key vec 0 (arraysize vec - 1)
withtype {n:nat} int * int array(n) -> int option
| |
010fe6d33e0bff180f70b7d0a4bd0ae60f47013333f6f0072a8b1652435bef26 | hypernumbers/hypernumbers | starling_sup.erl | -module(starling_sup).
-behaviour(supervisor).
-export([start_link/1,
init/1]).
-define(SERVER, ?MODULE).
%% Starts the supervisor.
start_link(Args) ->
supervisor:start_link(starling_sup, Args).
%% Supervisor callback. Returns restart strategy, maximum restart frequency,
%% and child specs.
init([ExtProg, PoolSize, Group]) ->
ChildSpecs = get_childspecs(PoolSize, ExtProg, Group, []),
{ok, {{one_for_one, 3, 10},
ChildSpecs}}.
get_childspecs(0, _ExtProg, _Group, Acc) -> Acc;
get_childspecs(N, ExtProg, Group, Acc) ->
ID = list_to_atom("starling_server_" ++ integer_to_list(N)),
Child = {ID,{starling_server, start_link,
[ExtProg, ID, Group]},
permanent, 10, worker, [starling_server]},
NewAcc = [Child | Acc],
get_childspecs(N - 1, ExtProg, Group, NewAcc).
| null | https://raw.githubusercontent.com/hypernumbers/hypernumbers/281319f60c0ac60fb009ee6d1e4826f4f2d51c4e/lib/starling/src/starling_sup.erl | erlang | Starts the supervisor.
Supervisor callback. Returns restart strategy, maximum restart frequency,
and child specs. | -module(starling_sup).
-behaviour(supervisor).
-export([start_link/1,
init/1]).
-define(SERVER, ?MODULE).
start_link(Args) ->
supervisor:start_link(starling_sup, Args).
init([ExtProg, PoolSize, Group]) ->
ChildSpecs = get_childspecs(PoolSize, ExtProg, Group, []),
{ok, {{one_for_one, 3, 10},
ChildSpecs}}.
get_childspecs(0, _ExtProg, _Group, Acc) -> Acc;
get_childspecs(N, ExtProg, Group, Acc) ->
ID = list_to_atom("starling_server_" ++ integer_to_list(N)),
Child = {ID,{starling_server, start_link,
[ExtProg, ID, Group]},
permanent, 10, worker, [starling_server]},
NewAcc = [Child | Acc],
get_childspecs(N - 1, ExtProg, Group, NewAcc).
|
90d31fcf3a67af1ce83a49ee47b649b9f5c2d121e2ce4a867f01f32dd9c44dcb | yesodweb/yesod | Redirect.hs | # LANGUAGE QuasiQuotes , TemplateHaskell , TypeFamilies , MultiParamTypeClasses , OverloadedStrings #
module YesodCoreTest.Redirect
( specs
, Widget
, resourcesY
) where
import YesodCoreTest.YesodTest
import Yesod.Core.Handler (redirectWith, setEtag, setWeakEtag)
import qualified Network.HTTP.Types as H
data Y = Y
mkYesod "Y" [parseRoutes|
/ RootR GET POST
/r301 R301 GET
/r303 R303 GET
/r307 R307 GET
/rregular RRegular GET
/etag EtagR GET
/weak-etag WeakEtagR GET
|]
instance Yesod Y where approot = ApprootStatic ""
app :: Session () -> IO ()
app = yesod Y
getRootR :: Handler ()
getRootR = return ()
postRootR :: Handler ()
postRootR = return ()
getR301, getR303, getR307, getRRegular, getEtagR, getWeakEtagR :: Handler ()
getR301 = redirectWith H.status301 RootR
getR303 = redirectWith H.status303 RootR
getR307 = redirectWith H.status307 RootR
getRRegular = redirect RootR
getEtagR = setEtag "hello world"
getWeakEtagR = setWeakEtag "hello world"
specs :: Spec
specs = describe "Redirect" $ do
it "no redirect" $ app $ do
res <- request defaultRequest { pathInfo = [], requestMethod = "POST" }
assertStatus 200 res
assertBodyContains "" res
it "301 redirect" $ app $ do
res <- request defaultRequest { pathInfo = ["r301"] }
assertStatus 301 res
assertBodyContains "" res
it "303 redirect" $ app $ do
res <- request defaultRequest { pathInfo = ["r303"] }
assertStatus 303 res
assertBodyContains "" res
it "307 redirect" $ app $ do
res <- request defaultRequest { pathInfo = ["r307"] }
assertStatus 307 res
assertBodyContains "" res
it "303 redirect for regular, HTTP 1.1" $ app $ do
res <- request defaultRequest {
pathInfo = ["rregular"],
httpVersion = H.http11
}
assertStatus 303 res
assertBodyContains "" res
it "302 redirect for regular, HTTP 1.0" $ app $ do
res <- request defaultRequest {
pathInfo = ["rregular"]
, httpVersion = H.http10
}
assertStatus 302 res
assertBodyContains "" res
describe "etag" $ do
it "no if-none-match" $ app $ do
res <- request defaultRequest { pathInfo = ["etag"] }
assertStatus 200 res
assertHeader "etag" "\"hello world\"" res
-- Note: this violates the RFC around ETag format, but is being left as is
-- out of concerns that it might break existing users with misbehaving clients.
it "single, unquoted if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "hello world")]
}
assertStatus 304 res
it "different if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "hello world!")]
}
assertStatus 200 res
assertHeader "etag" "\"hello world\"" res
it "single, quoted if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "\"hello world\"")]
}
assertStatus 304 res
it "multiple quoted if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "\"foo\", \"hello world\"")]
}
assertStatus 304 res
it "ignore weak when provided normal etag" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "\"foo\", W/\"hello world\"")]
}
assertStatus 200 res
it "weak etag" $ app $ do
res <- request defaultRequest
{ pathInfo = ["weak-etag"]
, requestHeaders = [("if-none-match", "\"foo\", W/\"hello world\"")]
}
assertStatus 304 res
it "different if-none-match for weak etag" $ app $ do
res <- request defaultRequest
{ pathInfo = ["weak-etag"]
, requestHeaders = [("if-none-match", "W/\"foo\"")]
}
assertStatus 200 res
it "ignore strong when expecting weak" $ app $ do
res <- request defaultRequest
{ pathInfo = ["weak-etag"]
, requestHeaders = [("if-none-match", "\"hello world\", W/\"foo\"")]
}
assertStatus 200 res
| null | https://raw.githubusercontent.com/yesodweb/yesod/c59993ff287b880abbf768f1e3f56ae9b19df51e/yesod-core/test/YesodCoreTest/Redirect.hs | haskell | Note: this violates the RFC around ETag format, but is being left as is
out of concerns that it might break existing users with misbehaving clients. | # LANGUAGE QuasiQuotes , TemplateHaskell , TypeFamilies , MultiParamTypeClasses , OverloadedStrings #
module YesodCoreTest.Redirect
( specs
, Widget
, resourcesY
) where
import YesodCoreTest.YesodTest
import Yesod.Core.Handler (redirectWith, setEtag, setWeakEtag)
import qualified Network.HTTP.Types as H
data Y = Y
mkYesod "Y" [parseRoutes|
/ RootR GET POST
/r301 R301 GET
/r303 R303 GET
/r307 R307 GET
/rregular RRegular GET
/etag EtagR GET
/weak-etag WeakEtagR GET
|]
instance Yesod Y where approot = ApprootStatic ""
app :: Session () -> IO ()
app = yesod Y
getRootR :: Handler ()
getRootR = return ()
postRootR :: Handler ()
postRootR = return ()
getR301, getR303, getR307, getRRegular, getEtagR, getWeakEtagR :: Handler ()
getR301 = redirectWith H.status301 RootR
getR303 = redirectWith H.status303 RootR
getR307 = redirectWith H.status307 RootR
getRRegular = redirect RootR
getEtagR = setEtag "hello world"
getWeakEtagR = setWeakEtag "hello world"
specs :: Spec
specs = describe "Redirect" $ do
it "no redirect" $ app $ do
res <- request defaultRequest { pathInfo = [], requestMethod = "POST" }
assertStatus 200 res
assertBodyContains "" res
it "301 redirect" $ app $ do
res <- request defaultRequest { pathInfo = ["r301"] }
assertStatus 301 res
assertBodyContains "" res
it "303 redirect" $ app $ do
res <- request defaultRequest { pathInfo = ["r303"] }
assertStatus 303 res
assertBodyContains "" res
it "307 redirect" $ app $ do
res <- request defaultRequest { pathInfo = ["r307"] }
assertStatus 307 res
assertBodyContains "" res
it "303 redirect for regular, HTTP 1.1" $ app $ do
res <- request defaultRequest {
pathInfo = ["rregular"],
httpVersion = H.http11
}
assertStatus 303 res
assertBodyContains "" res
it "302 redirect for regular, HTTP 1.0" $ app $ do
res <- request defaultRequest {
pathInfo = ["rregular"]
, httpVersion = H.http10
}
assertStatus 302 res
assertBodyContains "" res
describe "etag" $ do
it "no if-none-match" $ app $ do
res <- request defaultRequest { pathInfo = ["etag"] }
assertStatus 200 res
assertHeader "etag" "\"hello world\"" res
it "single, unquoted if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "hello world")]
}
assertStatus 304 res
it "different if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "hello world!")]
}
assertStatus 200 res
assertHeader "etag" "\"hello world\"" res
it "single, quoted if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "\"hello world\"")]
}
assertStatus 304 res
it "multiple quoted if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "\"foo\", \"hello world\"")]
}
assertStatus 304 res
it "ignore weak when provided normal etag" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "\"foo\", W/\"hello world\"")]
}
assertStatus 200 res
it "weak etag" $ app $ do
res <- request defaultRequest
{ pathInfo = ["weak-etag"]
, requestHeaders = [("if-none-match", "\"foo\", W/\"hello world\"")]
}
assertStatus 304 res
it "different if-none-match for weak etag" $ app $ do
res <- request defaultRequest
{ pathInfo = ["weak-etag"]
, requestHeaders = [("if-none-match", "W/\"foo\"")]
}
assertStatus 200 res
it "ignore strong when expecting weak" $ app $ do
res <- request defaultRequest
{ pathInfo = ["weak-etag"]
, requestHeaders = [("if-none-match", "\"hello world\", W/\"foo\"")]
}
assertStatus 200 res
|
92d3008b700e6fb3efa9129c6671269194fc298dfefe33cd840b987b87273bc6 | clojure/core.typed | contract_utils_test.clj | (ns clojure.core.typed.test.contract-utils-test
(:refer-clojure :exclude [boolean?])
(:require [clojure.core.typed.test.test-utils :refer :all]
[clojure.test :refer :all]
[clojure.core.typed.contract-utils :as con :refer :all]))
(deftest hmap-c-test
(is ((hmap-c?)
{}))
(is (not ((hmap-c?)
nil)))
(is ((hmap-c? :k symbol?)
{:k 'a}))
(is (not ((hmap-c? :k symbol?)
{})))
(is ((hmap-c? (optional :k) symbol?)
{:k 'a}))
(is (not ((hmap-c? (optional :k) symbol?)
{:k :a})))
(is ((hmap-c? (optional :k) symbol?)
{})))
| null | https://raw.githubusercontent.com/clojure/core.typed/f5b7d00bbb29d09000d7fef7cca5b40416c9fa91/typed/checker.jvm/test/clojure/core/typed/test/contract_utils_test.clj | clojure | (ns clojure.core.typed.test.contract-utils-test
(:refer-clojure :exclude [boolean?])
(:require [clojure.core.typed.test.test-utils :refer :all]
[clojure.test :refer :all]
[clojure.core.typed.contract-utils :as con :refer :all]))
(deftest hmap-c-test
(is ((hmap-c?)
{}))
(is (not ((hmap-c?)
nil)))
(is ((hmap-c? :k symbol?)
{:k 'a}))
(is (not ((hmap-c? :k symbol?)
{})))
(is ((hmap-c? (optional :k) symbol?)
{:k 'a}))
(is (not ((hmap-c? (optional :k) symbol?)
{:k :a})))
(is ((hmap-c? (optional :k) symbol?)
{})))
| |
1944d937d4d1fd605af8fc7b34ab1e5d5b796a16b0a41d0af79a3a213907932b | yakaz/yamerl | yamerl.erl | %-
Copyright ( c ) 2012 - 2014 Yakaz
Copyright ( c ) 2016 - 2022 < >
% All rights reserved.
%
% Redistribution and use in source and binary forms, with or without
% modification, are permitted provided that the following conditions
% are met:
1 . Redistributions of source code must retain the above copyright
% notice, this list of conditions and the following disclaimer.
2 . Redistributions in binary form must reproduce the above copyright
% notice, this list of conditions and the following disclaimer in the
% documentation and/or other materials provided with the distribution.
%
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ` ` AS IS '' AND
% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
% ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
% DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
% OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
% OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
% SUCH DAMAGE.
@author < >
2012 - 2014 Yakaz ,
2016 - 2022 < >
%%
%% @doc Wrappers for common uses of {@link yamerl_constr}.
-module(yamerl).
-include("yamerl_nodes.hrl").
-include("yamerl_constr.hrl").
%% Public API.
-export([
decode/1,
decode/2,
decode_file/1,
decode_file/2
]).
%% -------------------------------------------------------------------
Public API : YAML to Erlang .
%% -------------------------------------------------------------------
%% All those functions are only wrapper above yamerl_constr common
%% functions. The purpose is just to avoid some typing.
%% @equiv yamerl_constr:string(String)
-spec decode(String) ->
Result | no_return() when
String :: unicode_data(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
decode(String) ->
yamerl_constr:string(String).
@equiv yamerl_constr : string(String , Options )
-spec decode(String, Options) ->
Result | no_return() when
String :: unicode_data(),
Options :: [ yamerl_parser:yamerl_parser_option()
| yamerl_constr_option()
| proplists:property()],
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
decode(String, Options) ->
yamerl_constr:string(String, Options).
%% @equiv yamerl_constr:file(Filename)
-spec decode_file(Filename) ->
Result | no_return() when
Filename :: string(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
decode_file(Filename) ->
yamerl_constr:file(Filename).
%% @equiv yamerl_constr:file(Filename, Options)
-spec decode_file(Filename, Options) ->
Result | no_return() when
Filename :: string(),
Options :: [ yamerl_parser:yamerl_parser_option()
| yamerl_constr_option()
| proplists:property()],
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
decode_file(Filename, Options) ->
yamerl_constr:file(Filename, Options).
| null | https://raw.githubusercontent.com/yakaz/yamerl/bf9d8b743bfc9775f2ddad9fb8d18ba5dc29d3e1/src/yamerl.erl | erlang | -
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
@doc Wrappers for common uses of {@link yamerl_constr}.
Public API.
-------------------------------------------------------------------
-------------------------------------------------------------------
All those functions are only wrapper above yamerl_constr common
functions. The purpose is just to avoid some typing.
@equiv yamerl_constr:string(String)
@equiv yamerl_constr:file(Filename)
@equiv yamerl_constr:file(Filename, Options) | Copyright ( c ) 2012 - 2014 Yakaz
Copyright ( c ) 2016 - 2022 < >
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ` ` AS IS '' AND
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
@author < >
2012 - 2014 Yakaz ,
2016 - 2022 < >
-module(yamerl).
-include("yamerl_nodes.hrl").
-include("yamerl_constr.hrl").
-export([
decode/1,
decode/2,
decode_file/1,
decode_file/2
]).
Public API : YAML to Erlang .
-spec decode(String) ->
Result | no_return() when
String :: unicode_data(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
decode(String) ->
yamerl_constr:string(String).
@equiv yamerl_constr : string(String , Options )
-spec decode(String, Options) ->
Result | no_return() when
String :: unicode_data(),
Options :: [ yamerl_parser:yamerl_parser_option()
| yamerl_constr_option()
| proplists:property()],
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
decode(String, Options) ->
yamerl_constr:string(String, Options).
-spec decode_file(Filename) ->
Result | no_return() when
Filename :: string(),
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
decode_file(Filename) ->
yamerl_constr:file(Filename).
-spec decode_file(Filename, Options) ->
Result | no_return() when
Filename :: string(),
Options :: [ yamerl_parser:yamerl_parser_option()
| yamerl_constr_option()
| proplists:property()],
Result :: [yamerl_doc()]
| [yamerl_simple_doc()]
| term().
decode_file(Filename, Options) ->
yamerl_constr:file(Filename, Options).
|
1d2c9b61b0fabbafc238df0da2f4d422cda9670d197f3edaa4aa9d7f8b86f730 | craigfe/compact | hashset.ml | — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — —
Copyright ( c ) 2020–2021 < >
Distributed under the MIT license . See terms at the end of this file .
— — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — —
Copyright (c) 2020–2021 Craig Ferguson <>
Distributed under the MIT license. See terms at the end of this file.
————————————————————————————————————————————————————————————————————————————*)
include Hashed_container.No_decoder
type nonrec 'a t = ('a, unit, 'a, 'a) t
let add t k = replace t k k
module type Key = sig
type t
val equal : t -> t -> bool
val compare : t -> t -> int
val hash : t -> int
val hash_size : int
end
let vtable_of_key :
type k.
(module Key with type t = k)
-> (k, unit, k, unit, k) Hashed_container.vtable =
fun (module Key) ->
{ key_hash = Key.hash
; key_hash_size = Key.hash_size
; key_equal = Key.equal
; entry_key = (fun k -> k)
; entry_value = (fun _ -> ())
; entry_compare = Stdlib.compare (* XXX: polymorphic comparison *)
; packed_key = (fun () k -> k)
; packed_entry = (fun () k -> k)
; packed_of_entry = (fun () k -> k)
}
let create_generic (type a) ~(entry_size : (a, _) Hashed_container.Entry_size.t)
~initial_capacity (key : (module Key with type t = a)) : a t =
Hashed_container.create ~vtable:(vtable_of_key key) ~entry_size
~initial_capacity ()
let create (type a) ~initial_capacity (module Key : Key with type t = a) : a t =
create_generic ~entry_size:Value1 ~initial_capacity (module Key)
module Immediate = struct
include Hashed_container.No_decoder
type nonrec 'a t = ('a, unit, 'a, 'a) t
let add t k = replace t k k
let entry_size = Hashed_container.Entry_size.Immediate
let create (type a) ~initial_capacity (module Key : Key with type t = a) : a t
=
create_generic ~entry_size ~initial_capacity (module Key)
end
module Int = struct
include Immediate
type nonrec t = int t
module Key = struct
include Stdlib.Int
let hash = Stdlib.Hashtbl.hash
let hash_size = 30
end
let create ~initial_capacity () : t =
create_generic ~entry_size:Immediate.entry_size ~initial_capacity
(module Key)
end
module Immediate64 = struct
include Hashed_container.No_decoder
type nonrec 'a t = ('a, unit, 'a, 'a) t
let add t k = replace t k k
type _ boxed_entry_size =
| E : ('a, _) Hashed_container.Entry_size.t -> 'a boxed_entry_size
[@@unboxed]
let entry_size = if Sys.word_size = 64 then E Immediate else E Value1
let create (type a) ~initial_capacity (module Key : Key with type t = a) : a t
=
let (E entry_size) = entry_size in
create_generic ~entry_size ~initial_capacity (module Key)
end
module Fixed_size_string = Hashset_fixed_size_string
module Internal = struct
type nonrec 'a t = 'a t
let repr = Type_equality.Refl
end
— — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — —
Copyright ( c ) 2020–2021 < >
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE .
— — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — —
Copyright (c) 2020–2021 Craig Ferguson <>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
————————————————————————————————————————————————————————————————————————————*)
| null | https://raw.githubusercontent.com/craigfe/compact/daa1b516c917585b80e2fbace74690766a9ac907/src/hashset.ml | ocaml | XXX: polymorphic comparison | — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — —
Copyright ( c ) 2020–2021 < >
Distributed under the MIT license . See terms at the end of this file .
— — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — —
Copyright (c) 2020–2021 Craig Ferguson <>
Distributed under the MIT license. See terms at the end of this file.
————————————————————————————————————————————————————————————————————————————*)
include Hashed_container.No_decoder
type nonrec 'a t = ('a, unit, 'a, 'a) t
let add t k = replace t k k
module type Key = sig
type t
val equal : t -> t -> bool
val compare : t -> t -> int
val hash : t -> int
val hash_size : int
end
let vtable_of_key :
type k.
(module Key with type t = k)
-> (k, unit, k, unit, k) Hashed_container.vtable =
fun (module Key) ->
{ key_hash = Key.hash
; key_hash_size = Key.hash_size
; key_equal = Key.equal
; entry_key = (fun k -> k)
; entry_value = (fun _ -> ())
; packed_key = (fun () k -> k)
; packed_entry = (fun () k -> k)
; packed_of_entry = (fun () k -> k)
}
let create_generic (type a) ~(entry_size : (a, _) Hashed_container.Entry_size.t)
~initial_capacity (key : (module Key with type t = a)) : a t =
Hashed_container.create ~vtable:(vtable_of_key key) ~entry_size
~initial_capacity ()
let create (type a) ~initial_capacity (module Key : Key with type t = a) : a t =
create_generic ~entry_size:Value1 ~initial_capacity (module Key)
module Immediate = struct
include Hashed_container.No_decoder
type nonrec 'a t = ('a, unit, 'a, 'a) t
let add t k = replace t k k
let entry_size = Hashed_container.Entry_size.Immediate
let create (type a) ~initial_capacity (module Key : Key with type t = a) : a t
=
create_generic ~entry_size ~initial_capacity (module Key)
end
module Int = struct
include Immediate
type nonrec t = int t
module Key = struct
include Stdlib.Int
let hash = Stdlib.Hashtbl.hash
let hash_size = 30
end
let create ~initial_capacity () : t =
create_generic ~entry_size:Immediate.entry_size ~initial_capacity
(module Key)
end
module Immediate64 = struct
include Hashed_container.No_decoder
type nonrec 'a t = ('a, unit, 'a, 'a) t
let add t k = replace t k k
type _ boxed_entry_size =
| E : ('a, _) Hashed_container.Entry_size.t -> 'a boxed_entry_size
[@@unboxed]
let entry_size = if Sys.word_size = 64 then E Immediate else E Value1
let create (type a) ~initial_capacity (module Key : Key with type t = a) : a t
=
let (E entry_size) = entry_size in
create_generic ~entry_size ~initial_capacity (module Key)
end
module Fixed_size_string = Hashset_fixed_size_string
module Internal = struct
type nonrec 'a t = 'a t
let repr = Type_equality.Refl
end
— — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — —
Copyright ( c ) 2020–2021 < >
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE .
— — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — — —
Copyright (c) 2020–2021 Craig Ferguson <>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
————————————————————————————————————————————————————————————————————————————*)
|
934750efb0aa388c642374350226e09b0e1d0f8fcb4e284dda4acaf02578b25f | alexandergunnarson/quantum | format.cljc | (ns
^{:doc "An alias of the clj-time.format namespace."
:attribution "alexandergunnarson"}
quantum.core.time.format
#_(:require [quantum.core.ns :as ns #?@(:clj [:refer [alias-ns]])]))
#_(:clj (alias-ns 'clj-time.format))
| null | https://raw.githubusercontent.com/alexandergunnarson/quantum/0c655af439734709566110949f9f2f482e468509/src/quantum/core/time/format.cljc | clojure | (ns
^{:doc "An alias of the clj-time.format namespace."
:attribution "alexandergunnarson"}
quantum.core.time.format
#_(:require [quantum.core.ns :as ns #?@(:clj [:refer [alias-ns]])]))
#_(:clj (alias-ns 'clj-time.format))
| |
e83464706525c0aebe3e8840ead484e97a458fa5c63253baac0fc166113a2c99 | primetype/inspector | Main.hs | # LANGUAGE TypeApplications #
# LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
# OPTIONS_GHC -fno - warn - orphans #
module Main (main) where
import Inspector
import qualified Inspector.TestVector.Types as Type
import qualified Inspector.TestVector.Value as Value
import Foundation
import Foundation.Check (Arbitrary(..))
import Data.ByteArray (Bytes)
import Crypto.Hash
import Crypto.KDF.PBKDF2 (fastPBKDF2_SHA1, Parameters (..))
type GoldenSHA1 = "hash" :> "SHA1" :> Payload "payload" String :> Payload "hash" (Digest SHA1)
type GoldenSHA256 = "hash" :> "SHA256" :> Payload "payload" String :> Payload "hash" (Digest SHA256)
type GoldenPBKDF2 = "kdf" :> "PBKDF2" :> "SHA1"
:> Payload "parameters" Parameters :> Payload "password" String :> Payload "salt" String :> Payload "hash" Bytes
main :: IO ()
main = defaultTest $ do
group $ do
summary "Secure Hash Algorithm"
golden (Proxy @GoldenSHA1) hash
golden (Proxy @GoldenSHA256) hash
group $ do
summary "Password-Based Key Derivation"
golden (Proxy @GoldenPBKDF2) $ \params pwd salt ->
fastPBKDF2_SHA1 params pwd salt
instance Arbitrary Parameters where
arbitrary = undefined
instance Inspectable Parameters where
documentation _ = "PBKDF2 Parameters."
exportType _ = Type.Object $ Type.ObjectDef
[ ("iter", Type.Signed64)
, ("len", Type.Signed64)
]
builder (Parameters iter len) = Value.Object $ Value.ObjectDef
[ ("iter", builder iter)
, ("len", builder len)
]
parser = withStructure "Parameters" $ \obj -> do
iter <- parser =<< field obj "iter"
len <- parser =<< field obj "len"
pure $ Parameters iter len
| null | https://raw.githubusercontent.com/primetype/inspector/bd2ee67c757729d2a725282b27d3b98458f3fe2e/example/Main.hs | haskell | # LANGUAGE TypeApplications #
# LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
# OPTIONS_GHC -fno - warn - orphans #
module Main (main) where
import Inspector
import qualified Inspector.TestVector.Types as Type
import qualified Inspector.TestVector.Value as Value
import Foundation
import Foundation.Check (Arbitrary(..))
import Data.ByteArray (Bytes)
import Crypto.Hash
import Crypto.KDF.PBKDF2 (fastPBKDF2_SHA1, Parameters (..))
type GoldenSHA1 = "hash" :> "SHA1" :> Payload "payload" String :> Payload "hash" (Digest SHA1)
type GoldenSHA256 = "hash" :> "SHA256" :> Payload "payload" String :> Payload "hash" (Digest SHA256)
type GoldenPBKDF2 = "kdf" :> "PBKDF2" :> "SHA1"
:> Payload "parameters" Parameters :> Payload "password" String :> Payload "salt" String :> Payload "hash" Bytes
main :: IO ()
main = defaultTest $ do
group $ do
summary "Secure Hash Algorithm"
golden (Proxy @GoldenSHA1) hash
golden (Proxy @GoldenSHA256) hash
group $ do
summary "Password-Based Key Derivation"
golden (Proxy @GoldenPBKDF2) $ \params pwd salt ->
fastPBKDF2_SHA1 params pwd salt
instance Arbitrary Parameters where
arbitrary = undefined
instance Inspectable Parameters where
documentation _ = "PBKDF2 Parameters."
exportType _ = Type.Object $ Type.ObjectDef
[ ("iter", Type.Signed64)
, ("len", Type.Signed64)
]
builder (Parameters iter len) = Value.Object $ Value.ObjectDef
[ ("iter", builder iter)
, ("len", builder len)
]
parser = withStructure "Parameters" $ \obj -> do
iter <- parser =<< field obj "iter"
len <- parser =<< field obj "len"
pure $ Parameters iter len
| |
3a4971e5dd65560ed24034e0ed0699890456899a89941bd8a8f24cf21409f258 | hasktorch/hasktorch | Main.hs | # LANGUAGE AllowAmbiguousTypes #
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE DataKinds #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PartialTypeSignatures #
# LANGUAGE PolyKinds #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE UndecidableSuperClasses #
# LANGUAGE NoStarIsType #
module Main where
import Control.Monad
( foldM,
when,
)
import Control.Monad.Cont (ContT (runContT))
import GHC.Generics
import GHC.TypeLits
import Pipes
import Pipes (ListT (enumerate))
import qualified Pipes.Prelude as P
import Torch.Data.Pipeline
import Torch.Data.StreamedPipeline
import Torch.Typed hiding (Device)
import Prelude hiding (tanh)
--------------------------------------------------------------------------------
Multi - Layer Perceptron ( MLP )
--------------------------------------------------------------------------------
data
MLPSpec
(inputFeatures :: Nat)
(outputFeatures :: Nat)
(hiddenFeatures :: Nat)
(dtype :: DType)
(device :: (DeviceType, Nat))
= MLPSpec
deriving (Eq, Show)
data
MLP
(inputFeatures :: Nat)
(outputFeatures :: Nat)
(hiddenFeatures :: Nat)
(dtype :: DType)
(device :: (DeviceType, Nat)) = MLP
{ layer0 :: Linear inputFeatures hiddenFeatures dtype device,
layer1 :: Linear hiddenFeatures hiddenFeatures dtype device,
layer2 :: Linear hiddenFeatures outputFeatures dtype device
}
deriving (Show, Generic, Parameterized)
instance
(StandardFloatingPointDTypeValidation device dtype) =>
HasForward
(MLP inputFeatures outputFeatures hiddenFeatures dtype device)
(Tensor device dtype '[batchSize, inputFeatures])
(Tensor device dtype '[batchSize, outputFeatures])
where
forward MLP {..} = forward layer2 . tanh . forward layer1 . tanh . forward layer0
forwardStoch = (pure .) . forward
instance
( KnownDevice device,
KnownDType dtype,
All KnownNat '[inputFeatures, outputFeatures, hiddenFeatures],
RandDTypeIsValid device dtype
) =>
Randomizable
(MLPSpec inputFeatures outputFeatures hiddenFeatures dtype device)
(MLP inputFeatures outputFeatures hiddenFeatures dtype device)
where
sample MLPSpec =
MLP <$> sample LinearSpec <*> sample LinearSpec <*> sample LinearSpec
xor ::
forall batchSize dtype device.
KnownDevice device =>
Tensor device dtype '[batchSize, 2] ->
Tensor device dtype '[batchSize]
xor t = (1 - (1 - a) * (1 - b)) * (1 - (a * b))
where
a = select @1 @0 t
b = select @1 @1 t
newtype Xor device batchSize = Xor {iters :: Int}
instance
( KnownNat batchSize,
KnownDevice device,
RandDTypeIsValid device 'Float,
ComparisonDTypeIsValid device 'Float
) =>
Datastream IO () (Xor device batchSize) (Tensor device 'Float '[batchSize, 2])
where
streamSamples Xor {..} _ = Select $ P.replicateM iters randBool
where
randBool =
toDType @'Float @'Bool
. gt (toDevice @device (0.5 :: CPUTensor 'Float '[]))
<$> rand @'[batchSize, 2] @'Float @device
type Device = '( 'CUDA, 0)
train ::
forall device batchSize model optim.
(model ~ MLP 2 1 4 'Float device, _) =>
LearningRate device 'Float ->
(model, optim) ->
ListT IO (Tensor device 'Float '[batchSize, 2]) ->
IO (model, optim)
train learningRate (model, optim) = P.foldM step begin done . enumerateData
where
step (model, optim) (input, i) = do
let actualOutput = squeezeAll . ((sigmoid .) . forward) model $ input
expectedOutput = xor input
loss = mseLoss @ReduceMean actualOutput expectedOutput
when (i `mod` 2500 == 0) (print loss)
runStep model optim loss learningRate
begin = pure (model, optim)
done = pure
main :: IO ()
main = do
let numIters = 100000
learningRate = 0.1
initModel <- sample (MLPSpec :: MLPSpec 2 1 4 'Float Device)
let initOptim = mkAdam 0 0.9 0.999 (flattenParameters initModel)
dataset = Xor @Device @256 numIters
dataSource = streamFrom' datastreamOpts dataset [()]
(trained, _) <- runContT dataSource $ train learningRate (initModel, initOptim)
print trained
| null | https://raw.githubusercontent.com/hasktorch/hasktorch/4e846fdcd89df5c7c6991cb9d6142007a6bb0a58/examples/static-xor-mlp/Main.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DeriveAnyClass #
# LANGUAGE GADTs #
# LANGUAGE RankNTypes #
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PartialTypeSignatures #
# LANGUAGE PolyKinds #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE UndecidableSuperClasses #
# LANGUAGE NoStarIsType #
module Main where
import Control.Monad
( foldM,
when,
)
import Control.Monad.Cont (ContT (runContT))
import GHC.Generics
import GHC.TypeLits
import Pipes
import Pipes (ListT (enumerate))
import qualified Pipes.Prelude as P
import Torch.Data.Pipeline
import Torch.Data.StreamedPipeline
import Torch.Typed hiding (Device)
import Prelude hiding (tanh)
Multi - Layer Perceptron ( MLP )
data
MLPSpec
(inputFeatures :: Nat)
(outputFeatures :: Nat)
(hiddenFeatures :: Nat)
(dtype :: DType)
(device :: (DeviceType, Nat))
= MLPSpec
deriving (Eq, Show)
data
MLP
(inputFeatures :: Nat)
(outputFeatures :: Nat)
(hiddenFeatures :: Nat)
(dtype :: DType)
(device :: (DeviceType, Nat)) = MLP
{ layer0 :: Linear inputFeatures hiddenFeatures dtype device,
layer1 :: Linear hiddenFeatures hiddenFeatures dtype device,
layer2 :: Linear hiddenFeatures outputFeatures dtype device
}
deriving (Show, Generic, Parameterized)
instance
(StandardFloatingPointDTypeValidation device dtype) =>
HasForward
(MLP inputFeatures outputFeatures hiddenFeatures dtype device)
(Tensor device dtype '[batchSize, inputFeatures])
(Tensor device dtype '[batchSize, outputFeatures])
where
forward MLP {..} = forward layer2 . tanh . forward layer1 . tanh . forward layer0
forwardStoch = (pure .) . forward
instance
( KnownDevice device,
KnownDType dtype,
All KnownNat '[inputFeatures, outputFeatures, hiddenFeatures],
RandDTypeIsValid device dtype
) =>
Randomizable
(MLPSpec inputFeatures outputFeatures hiddenFeatures dtype device)
(MLP inputFeatures outputFeatures hiddenFeatures dtype device)
where
sample MLPSpec =
MLP <$> sample LinearSpec <*> sample LinearSpec <*> sample LinearSpec
xor ::
forall batchSize dtype device.
KnownDevice device =>
Tensor device dtype '[batchSize, 2] ->
Tensor device dtype '[batchSize]
xor t = (1 - (1 - a) * (1 - b)) * (1 - (a * b))
where
a = select @1 @0 t
b = select @1 @1 t
newtype Xor device batchSize = Xor {iters :: Int}
instance
( KnownNat batchSize,
KnownDevice device,
RandDTypeIsValid device 'Float,
ComparisonDTypeIsValid device 'Float
) =>
Datastream IO () (Xor device batchSize) (Tensor device 'Float '[batchSize, 2])
where
streamSamples Xor {..} _ = Select $ P.replicateM iters randBool
where
randBool =
toDType @'Float @'Bool
. gt (toDevice @device (0.5 :: CPUTensor 'Float '[]))
<$> rand @'[batchSize, 2] @'Float @device
type Device = '( 'CUDA, 0)
train ::
forall device batchSize model optim.
(model ~ MLP 2 1 4 'Float device, _) =>
LearningRate device 'Float ->
(model, optim) ->
ListT IO (Tensor device 'Float '[batchSize, 2]) ->
IO (model, optim)
train learningRate (model, optim) = P.foldM step begin done . enumerateData
where
step (model, optim) (input, i) = do
let actualOutput = squeezeAll . ((sigmoid .) . forward) model $ input
expectedOutput = xor input
loss = mseLoss @ReduceMean actualOutput expectedOutput
when (i `mod` 2500 == 0) (print loss)
runStep model optim loss learningRate
begin = pure (model, optim)
done = pure
main :: IO ()
main = do
let numIters = 100000
learningRate = 0.1
initModel <- sample (MLPSpec :: MLPSpec 2 1 4 'Float Device)
let initOptim = mkAdam 0 0.9 0.999 (flattenParameters initModel)
dataset = Xor @Device @256 numIters
dataSource = streamFrom' datastreamOpts dataset [()]
(trained, _) <- runContT dataSource $ train learningRate (initModel, initOptim)
print trained
|
6c41df4b5248a391f2563e9984d80d61492d54256ec6bf443267f04c94960fd1 | rescript-lang/rescript-compiler | flow_ast_utils.mli |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type 'loc binding = 'loc * string
type 'loc ident = 'loc * string
type 'loc source = 'loc * string
val fold_bindings_of_pattern :
('a -> ('m, 't) Flow_ast.Identifier.t -> 'a) -> 'a -> ('m, 't) Flow_ast.Pattern.t -> 'a
val fold_bindings_of_variable_declarations :
(bool -> 'a -> ('m, 't) Flow_ast.Identifier.t -> 'a) ->
'a ->
('m, 't) Flow_ast.Statement.VariableDeclaration.Declarator.t list ->
'a
val partition_directives :
(Loc.t, Loc.t) Flow_ast.Statement.t list ->
(Loc.t, Loc.t) Flow_ast.Statement.t list * (Loc.t, Loc.t) Flow_ast.Statement.t list
val hoist_function_declarations :
('a, 'b) Flow_ast.Statement.t list -> ('a, 'b) Flow_ast.Statement.t list
val is_call_to_invariant : ('a, 'b) Flow_ast.Expression.t -> bool
val is_call_to_is_array : ('a, 'b) Flow_ast.Expression.t -> bool
val is_call_to_object_dot_freeze : ('a, 'b) Flow_ast.Expression.t -> bool
val is_call_to_object_static_method : ('a, 'b) Flow_ast.Expression.t -> bool
val negate_number_literal : float * string -> float * string
val loc_of_expression : ('a, 'a) Flow_ast.Expression.t -> 'a
val loc_of_statement : ('a, 'a) Flow_ast.Statement.t -> 'a
val loc_of_pattern : ('a, 'a) Flow_ast.Pattern.t -> 'a
val loc_of_ident : ('a, 'a) Flow_ast.Identifier.t -> 'a
val name_of_ident : ('loc, 'a) Flow_ast.Identifier.t -> string
val source_of_ident : ('a, 'a) Flow_ast.Identifier.t -> 'a source
val ident_of_source :
?comments:('a, unit) Flow_ast.Syntax.t -> 'a source -> ('a, 'a) Flow_ast.Identifier.t
val mk_comments :
?leading:'loc Flow_ast.Comment.t list ->
?trailing:'loc Flow_ast.Comment.t list ->
'a ->
('loc, 'a) Flow_ast.Syntax.t
val mk_comments_opt :
?leading:'loc Flow_ast.Comment.t list ->
?trailing:'loc Flow_ast.Comment.t list ->
unit ->
('loc, unit) Flow_ast.Syntax.t option
val mk_comments_with_internal_opt :
?leading:'loc Flow_ast.Comment.t list ->
?trailing:'loc Flow_ast.Comment.t list ->
internal:'loc Flow_ast.Comment.t list ->
unit ->
('loc, 'loc Flow_ast.Comment.t list) Flow_ast.Syntax.t option
val merge_comments :
inner:('M, unit) Flow_ast.Syntax.t option ->
outer:('M, unit) Flow_ast.Syntax.t option ->
('M, unit) Flow_ast.Syntax.t option
val merge_comments_with_internal :
inner:('M, 'loc Flow_ast.Comment.t list) Flow_ast.Syntax.t option ->
outer:('M, 'a) Flow_ast.Syntax.t option ->
('M, 'loc Flow_ast.Comment.t list) Flow_ast.Syntax.t option
val split_comments :
('loc, unit) Flow_ast.Syntax.t option ->
('loc, unit) Flow_ast.Syntax.t option * ('loc, unit) Flow_ast.Syntax.t option
module ExpressionSort : sig
type t =
| Array
| ArrowFunction
| Assignment
| Binary
| Call
| Class
| Comprehension
| Conditional
| Function
| Generator
| Identifier
| Import
| JSXElement
| JSXFragment
| Literal
| Logical
| Member
| MetaProperty
| New
| Object
| OptionalCall
| OptionalMember
| Sequence
| Super
| TaggedTemplate
| TemplateLiteral
| This
| TypeCast
| Unary
| Update
| Yield
val to_string : t -> string
end
val string_of_assignment_operator : Flow_ast.Expression.Assignment.operator -> string
val string_of_binary_operator : Flow_ast.Expression.Binary.operator -> string
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/0f3c02b13cb8a9c5e2586541622f4a0f5f561216/jscomp/js_parser/flow_ast_utils.mli | ocaml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type 'loc binding = 'loc * string
type 'loc ident = 'loc * string
type 'loc source = 'loc * string
val fold_bindings_of_pattern :
('a -> ('m, 't) Flow_ast.Identifier.t -> 'a) -> 'a -> ('m, 't) Flow_ast.Pattern.t -> 'a
val fold_bindings_of_variable_declarations :
(bool -> 'a -> ('m, 't) Flow_ast.Identifier.t -> 'a) ->
'a ->
('m, 't) Flow_ast.Statement.VariableDeclaration.Declarator.t list ->
'a
val partition_directives :
(Loc.t, Loc.t) Flow_ast.Statement.t list ->
(Loc.t, Loc.t) Flow_ast.Statement.t list * (Loc.t, Loc.t) Flow_ast.Statement.t list
val hoist_function_declarations :
('a, 'b) Flow_ast.Statement.t list -> ('a, 'b) Flow_ast.Statement.t list
val is_call_to_invariant : ('a, 'b) Flow_ast.Expression.t -> bool
val is_call_to_is_array : ('a, 'b) Flow_ast.Expression.t -> bool
val is_call_to_object_dot_freeze : ('a, 'b) Flow_ast.Expression.t -> bool
val is_call_to_object_static_method : ('a, 'b) Flow_ast.Expression.t -> bool
val negate_number_literal : float * string -> float * string
val loc_of_expression : ('a, 'a) Flow_ast.Expression.t -> 'a
val loc_of_statement : ('a, 'a) Flow_ast.Statement.t -> 'a
val loc_of_pattern : ('a, 'a) Flow_ast.Pattern.t -> 'a
val loc_of_ident : ('a, 'a) Flow_ast.Identifier.t -> 'a
val name_of_ident : ('loc, 'a) Flow_ast.Identifier.t -> string
val source_of_ident : ('a, 'a) Flow_ast.Identifier.t -> 'a source
val ident_of_source :
?comments:('a, unit) Flow_ast.Syntax.t -> 'a source -> ('a, 'a) Flow_ast.Identifier.t
val mk_comments :
?leading:'loc Flow_ast.Comment.t list ->
?trailing:'loc Flow_ast.Comment.t list ->
'a ->
('loc, 'a) Flow_ast.Syntax.t
val mk_comments_opt :
?leading:'loc Flow_ast.Comment.t list ->
?trailing:'loc Flow_ast.Comment.t list ->
unit ->
('loc, unit) Flow_ast.Syntax.t option
val mk_comments_with_internal_opt :
?leading:'loc Flow_ast.Comment.t list ->
?trailing:'loc Flow_ast.Comment.t list ->
internal:'loc Flow_ast.Comment.t list ->
unit ->
('loc, 'loc Flow_ast.Comment.t list) Flow_ast.Syntax.t option
val merge_comments :
inner:('M, unit) Flow_ast.Syntax.t option ->
outer:('M, unit) Flow_ast.Syntax.t option ->
('M, unit) Flow_ast.Syntax.t option
val merge_comments_with_internal :
inner:('M, 'loc Flow_ast.Comment.t list) Flow_ast.Syntax.t option ->
outer:('M, 'a) Flow_ast.Syntax.t option ->
('M, 'loc Flow_ast.Comment.t list) Flow_ast.Syntax.t option
val split_comments :
('loc, unit) Flow_ast.Syntax.t option ->
('loc, unit) Flow_ast.Syntax.t option * ('loc, unit) Flow_ast.Syntax.t option
module ExpressionSort : sig
type t =
| Array
| ArrowFunction
| Assignment
| Binary
| Call
| Class
| Comprehension
| Conditional
| Function
| Generator
| Identifier
| Import
| JSXElement
| JSXFragment
| Literal
| Logical
| Member
| MetaProperty
| New
| Object
| OptionalCall
| OptionalMember
| Sequence
| Super
| TaggedTemplate
| TemplateLiteral
| This
| TypeCast
| Unary
| Update
| Yield
val to_string : t -> string
end
val string_of_assignment_operator : Flow_ast.Expression.Assignment.operator -> string
val string_of_binary_operator : Flow_ast.Expression.Binary.operator -> string
| |
56342440755a34f4b0fc13373aeea28dd547f0cae737a0dd8803d843e4999051 | ucsd-progsys/liquidhaskell | Build.hs | {-# LANGUAGE OverloadedStrings #-}
module Test.Build where
import qualified Shelly as Sh
import Shelly (Sh)
import Test.Groups
import Test.Options (Options(..))
import System.Exit (exitSuccess, exitFailure, exitWith)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import System.Process.Typed
import System.Environment
import Data.Foldable (for_)
-- | Wrapper around runProcess that just returns the exit code.
runCommand :: Text -> [Text] -> IO ExitCode
runCommand cmd args = runProcess (proc (T.unpack cmd) (T.unpack <$> args))
-- | Build using cabal, selecting the project file from the
-- `LIQUID_CABAL_PROJECT_FILE` environment variable if possible, otherwise using
-- the default.
cabalRun :: Options
-> [Text] -- ^ Test groups to build
-> IO ExitCode
cabalRun opts names = do
projectFile <- lookupEnv "LIQUID_CABAL_PROJECT_FILE"
runCommand "cabal" $
[ "build" ]
<> (case projectFile of Nothing -> []; Just projectFile' -> [ "--project-file", T.pack projectFile' ])
<> (if measureTimings opts then ["--flags=measure-timings", "-j1"] else ["--keep-going"])
<> names
-- | Runs stack on the given test groups
stackRun :: Options -> [Text] -> IO ExitCode
stackRun opts names =
runCommand "stack" $
[ "build", "--flag", "tests:stack" ]
<> concat [ ["--flag=tests:measure-timings", "-j1"] | measureTimings opts ]
-- Enables that particular executable in the cabal file
<> testFlags
<> [ "--" ]
<> testNames
where
testNames = fmap ("tests:" <>) names
testFlags = concatMap (("--flag" :) . pure) testNames
-- | Ensure prog is on the PATH
ensurePathContains :: Text -> Sh ()
ensurePathContains prog =
Sh.unlessM (Sh.test_px $ T.unpack prog) $ do
Sh.errorExit $ "Cannot find " <> prog <> " on the path."
-- | Make sure cabal is available
cabalTestEnv :: Sh ()
cabalTestEnv = ensurePathContains "cabal"
-- | Make sure stack is available
stackTestEnv :: Sh ()
stackTestEnv = ensurePathContains "stack"
-- | Main program; reused between cabal and stack drivers
program :: Sh () -> (Options -> [Text] -> IO ExitCode) -> Options ->IO ()
program testEnv runner opts
| showAll opts = do
for_ allTestGroupNames T.putStrLn
exitSuccess
| otherwise = do
Sh.shelly testEnv
let goodGroups = all (`elem` allTestGroupNames) (testGroups opts)
if not goodGroups
then do
T.putStrLn "You selected a bad test group name. Run with --help to see available options."
exitFailure
else do
let selectedTestGroups = if null (testGroups opts) then allTestGroupNames else testGroups opts
T.putStrLn "Running integration tests!"
runner opts selectedTestGroups >>= exitWith
| null | https://raw.githubusercontent.com/ucsd-progsys/liquidhaskell/a2958c5c60ba82270259434fd1e44547dc45febb/tests/harness/Test/Build.hs | haskell | # LANGUAGE OverloadedStrings #
| Wrapper around runProcess that just returns the exit code.
| Build using cabal, selecting the project file from the
`LIQUID_CABAL_PROJECT_FILE` environment variable if possible, otherwise using
the default.
^ Test groups to build
| Runs stack on the given test groups
Enables that particular executable in the cabal file
| Ensure prog is on the PATH
| Make sure cabal is available
| Make sure stack is available
| Main program; reused between cabal and stack drivers |
module Test.Build where
import qualified Shelly as Sh
import Shelly (Sh)
import Test.Groups
import Test.Options (Options(..))
import System.Exit (exitSuccess, exitFailure, exitWith)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import System.Process.Typed
import System.Environment
import Data.Foldable (for_)
runCommand :: Text -> [Text] -> IO ExitCode
runCommand cmd args = runProcess (proc (T.unpack cmd) (T.unpack <$> args))
cabalRun :: Options
-> IO ExitCode
cabalRun opts names = do
projectFile <- lookupEnv "LIQUID_CABAL_PROJECT_FILE"
runCommand "cabal" $
[ "build" ]
<> (case projectFile of Nothing -> []; Just projectFile' -> [ "--project-file", T.pack projectFile' ])
<> (if measureTimings opts then ["--flags=measure-timings", "-j1"] else ["--keep-going"])
<> names
stackRun :: Options -> [Text] -> IO ExitCode
stackRun opts names =
runCommand "stack" $
[ "build", "--flag", "tests:stack" ]
<> concat [ ["--flag=tests:measure-timings", "-j1"] | measureTimings opts ]
<> testFlags
<> [ "--" ]
<> testNames
where
testNames = fmap ("tests:" <>) names
testFlags = concatMap (("--flag" :) . pure) testNames
ensurePathContains :: Text -> Sh ()
ensurePathContains prog =
Sh.unlessM (Sh.test_px $ T.unpack prog) $ do
Sh.errorExit $ "Cannot find " <> prog <> " on the path."
cabalTestEnv :: Sh ()
cabalTestEnv = ensurePathContains "cabal"
stackTestEnv :: Sh ()
stackTestEnv = ensurePathContains "stack"
program :: Sh () -> (Options -> [Text] -> IO ExitCode) -> Options ->IO ()
program testEnv runner opts
| showAll opts = do
for_ allTestGroupNames T.putStrLn
exitSuccess
| otherwise = do
Sh.shelly testEnv
let goodGroups = all (`elem` allTestGroupNames) (testGroups opts)
if not goodGroups
then do
T.putStrLn "You selected a bad test group name. Run with --help to see available options."
exitFailure
else do
let selectedTestGroups = if null (testGroups opts) then allTestGroupNames else testGroups opts
T.putStrLn "Running integration tests!"
runner opts selectedTestGroups >>= exitWith
|
3717b2a3c037d2beb15cec61d444a4e03d07aa9e620bfd97b49b9c6863795014 | jacius/lispbuilder | functions.lisp | ;;;;; Converted from the "Functions" Processing example at:
;;;;; ""
;;;;; (C)2006 Luke J Crook
(in-package #:sdl-gfx-examples)
(defun draw-target (xloc yloc size num)
(let ((grayvalues (sdl:cast-to-int (/ 255 num)))
(steps (sdl:cast-to-int (/ size num))))
(dotimes (i num)
(sdl:with-color (col (sdl:color :r (* i grayvalues) :g (* i grayvalues) :b (* i grayvalues)))
(sdl-gfx:draw-filled-ellipse (sdl:point :x xloc :y yloc) (- size (* i steps)) (- size (* i steps))
:surface sdl:*default-display*)))))
(defun functions ()
(let ((width 200) (height 200))
(sdl:with-init ()
(sdl:window width height :title-caption "Functions, from Processing.org")
(setf (sdl:frame-rate) 5)
(sdl:clear-display (sdl:color :r 51 :g 51 :b 51))
(draw-target 68 34 100 10)
(draw-target 152 16 50 3)
(draw-target 100 144 40 5)
(sdl:update-display)
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))))))
| null | https://raw.githubusercontent.com/jacius/lispbuilder/e693651b95f6818e3cab70f0074af9f9511584c3/lispbuilder-sdl-gfx/examples/functions.lisp | lisp | Converted from the "Functions" Processing example at:
""
(C)2006 Luke J Crook |
(in-package #:sdl-gfx-examples)
(defun draw-target (xloc yloc size num)
(let ((grayvalues (sdl:cast-to-int (/ 255 num)))
(steps (sdl:cast-to-int (/ size num))))
(dotimes (i num)
(sdl:with-color (col (sdl:color :r (* i grayvalues) :g (* i grayvalues) :b (* i grayvalues)))
(sdl-gfx:draw-filled-ellipse (sdl:point :x xloc :y yloc) (- size (* i steps)) (- size (* i steps))
:surface sdl:*default-display*)))))
(defun functions ()
(let ((width 200) (height 200))
(sdl:with-init ()
(sdl:window width height :title-caption "Functions, from Processing.org")
(setf (sdl:frame-rate) 5)
(sdl:clear-display (sdl:color :r 51 :g 51 :b 51))
(draw-target 68 34 100 10)
(draw-target 152 16 50 3)
(draw-target 100 144 40 5)
(sdl:update-display)
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))))))
|
9917add03c220ccc6e0a14ef8eddf6407601ba9dc8a785587d9f72b5bf951dfa | runtimeverification/haskell-backend | Sorts.hs | |
Module : . Rewrite . SMT.Representation . Sorts
Description : Builds an SMT representation for sorts .
Copyright : ( c ) Runtime Verification , 2019 - 2021
License : BSD-3 - Clause
Maintainer :
Module : Kore.Rewrite.SMT.Representation.Sorts
Description : Builds an SMT representation for sorts.
Copyright : (c) Runtime Verification, 2019-2021
License : BSD-3-Clause
Maintainer :
-}
module Kore.Rewrite.SMT.Representation.Sorts (
buildRepresentations,
sortSmtFromSortArgs,
emptySortArgsToSmt,
applyToArgs,
) where
import Control.Monad (
zipWithM,
)
import Data.Map.Strict (
Map,
)
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Data.Text qualified as Text
import Kore.Attribute.Hook (
Hook (Hook),
)
import Kore.Attribute.Hook qualified as Hook
import Kore.Attribute.Smtlib (
applySExpr,
)
import Kore.Attribute.Smtlib.Smtlib (
Smtlib (Smtlib),
)
import Kore.Attribute.Smtlib.Smtlib qualified as Smtlib
import Kore.Attribute.Sort qualified as Attribute (
Sort,
)
import Kore.Attribute.Sort qualified as Attribute.Sort
import Kore.Attribute.Sort.Constructors qualified as Attribute (
Constructors,
)
import Kore.Attribute.Sort.Constructors qualified as Attribute.Constructors (
Constructor (Constructor),
ConstructorLike (ConstructorLikeConstructor),
Constructors (getConstructors),
)
import Kore.Attribute.Sort.Constructors qualified as Constructors.DoNotUse
import Kore.Builtin.Bool qualified as Bool
import Kore.Builtin.Int qualified as Int
import Kore.IndexedModule.IndexedModule (
VerifiedModule,
recursiveIndexedModuleSortDescriptions,
)
import Kore.Internal.TermLike
import Kore.Rewrite.SMT.AST qualified as AST
import Kore.Sort qualified as Kore
import Kore.Syntax.Sentence (
SentenceSort (SentenceSort),
)
import Kore.Syntax.Sentence qualified as SentenceSort (
SentenceSort (..),
)
import Kore.Unparser (
unparseToString,
)
import Kore.Verified qualified as Verified
import Prelude.Kore
import SMT qualified (
Constructor (Constructor),
ConstructorArgument (ConstructorArgument),
DataTypeDeclaration (DataTypeDeclaration),
SExpr (Atom, List),
SortDeclaration (SortDeclaration),
showSExpr,
tBool,
tInt,
)
import SMT qualified as SMT.Constructor (
Constructor (..),
)
import SMT qualified as SMT.ConstructorArgument (
ConstructorArgument (..),
)
import SMT qualified as SMT.DataTypeDeclaration (
DataTypeDeclaration (..),
)
import SMT qualified as SMT.SortDeclaration (
SortDeclaration (..),
)
translateSort ::
Map.Map Id AST.SmtSort ->
Sort ->
Maybe SMT.SExpr
translateSort
sorts
(SortActualSort SortActual{sortActualName, sortActualSorts}) =
do
AST.Sort{sortData} <- Map.lookup sortActualName sorts
sortSmtFromSortArgs sortData sorts sortActualSorts
translateSort _ _ = Nothing
| Builds smt representations for sorts in the given module .
May ignore sorts that we do n't handle yet ( e.g. parameterized sorts ) .
All references to other sorts and symbols are left unresolved .
May ignore sorts that we don't handle yet (e.g. parameterized sorts).
All references to other sorts and symbols are left unresolved.
-}
buildRepresentations ::
forall symbolAttribute.
VerifiedModule symbolAttribute ->
Map.Map Id Attribute.Constructors ->
AST.UnresolvedDeclarations
buildRepresentations indexedModule sortConstructors =
builtinAndSmtLibDeclarations
`AST.mergePreferFirst` listToDeclarations
(sortsWithConstructors builtinAndSmtLibSorts simpleSortIDs)
`AST.mergePreferFirst` listToDeclarations simpleSortDeclarations
where
listToDeclarations ::
[(Id, AST.UnresolvedSort)] -> AST.UnresolvedDeclarations
listToDeclarations list =
AST.Declarations
{ sorts = Map.fromList list
, symbols = Map.empty
}
builtinAndSmtLibDeclarations :: AST.UnresolvedDeclarations
builtinAndSmtLibDeclarations =
listToDeclarations builtinSortDeclarations
`AST.mergePreferFirst` listToDeclarations smtlibSortDeclarations
builtinAndSmtLibSorts :: Set.Set Id
builtinAndSmtLibSorts = Map.keysSet sorts
where
AST.Declarations{sorts} = builtinAndSmtLibDeclarations
sortsWithConstructors ::
Set.Set Id ->
[Id] ->
[(Id, AST.UnresolvedSort)]
sortsWithConstructors blacklist whitelist =
mapMaybe
(sortWithConstructor sortConstructors)
(filter (`Set.notMember` blacklist) whitelist)
builtinSortDeclarations :: [(Id, AST.UnresolvedSort)]
builtinSortDeclarations =
extractDefinitionsFromSentences builtinSortDeclaration
smtlibSortDeclarations :: [(Id, AST.UnresolvedSort)]
smtlibSortDeclarations =
extractDefinitionsFromSentences smtlibSortDeclaration
simpleSortIDs :: [Id]
simpleSortIDs = map fst simpleSortDeclarations
simpleSortDeclarations :: [(Id, AST.UnresolvedSort)]
simpleSortDeclarations =
extractDefinitionsFromSentences simpleSortDeclaration
extractDefinitionsFromSentences ::
( ( Attribute.Sort
, Verified.SentenceSort
) ->
Maybe (Id, AST.UnresolvedSort)
) ->
[(Id, AST.UnresolvedSort)]
extractDefinitionsFromSentences definitionExtractor =
mapMaybe
definitionExtractor
(Map.elems $ recursiveIndexedModuleSortDescriptions indexedModule)
builtinSortDeclaration ::
( Attribute.Sort
, Verified.SentenceSort
) ->
Maybe (Id, AST.UnresolvedSort)
builtinSortDeclaration
(attributes, SentenceSort{sentenceSortName}) =
do
smtRepresentation <- case getHook of
Just name
| name == Int.sort -> return SMT.tInt
| name == Bool.sort -> return SMT.tBool
_ -> Nothing
return
( sentenceSortName
, AST.Sort
{ sortData = AST.EmptySortArgsToSmt smtRepresentation
, sortDeclaration =
AST.SortDeclaredIndirectly
(AST.AlreadyEncoded smtRepresentation)
}
)
where
Hook{getHook} = Attribute.Sort.hook attributes
smtlibSortDeclaration ::
( Attribute.Sort
, Verified.SentenceSort
) ->
Maybe (Id, AST.UnresolvedSort)
smtlibSortDeclaration
(attributes, SentenceSort{sentenceSortName}) =
do
smtRepresentation@(SMT.List (SMT.Atom smtName : sortArgs)) <- getSmtlib
return
( sentenceSortName
, AST.Sort
{ sortData = AST.ApplyToArgs smtRepresentation
, sortDeclaration =
AST.SortDeclarationSort
SMT.SortDeclaration
{ name = AST.AlreadyEncoded $ SMT.Atom smtName
, arity = length sortArgs
}
}
)
where
Smtlib{getSmtlib} = Attribute.Sort.smtlib attributes
applyToArgs ::
SMT.SExpr ->
Map.Map Id AST.SmtSort ->
[Sort] ->
Maybe SMT.SExpr
applyToArgs sExpr definitions children = do
children' <- mapM (translateSort definitions) children
return $ applySExpr sExpr children'
simpleSortDeclaration ::
( Attribute.Sort
, Verified.SentenceSort
) ->
Maybe (Id, AST.UnresolvedSort)
simpleSortDeclaration
( _attribute
, SentenceSort
{ sentenceSortName
, sentenceSortParameters = []
}
) =
Just
( sentenceSortName
, AST.Sort
{ sortData =
AST.EmptySortArgsToSmt (AST.encode encodedName)
, sortDeclaration =
AST.SortDeclarationSort
SMT.SortDeclaration
{ name = encodedName
, arity = 0
}
}
)
where
encodedName = AST.encodable sentenceSortName
simpleSortDeclaration _ = Nothing
sortSmtFromSortArgs :: AST.SortSExprSpec -> Map Kore.Id AST.SmtSort -> [Kore.Sort] -> Maybe SMT.SExpr
sortSmtFromSortArgs (AST.EmptySortArgsToSmt smtRepresentation) = emptySortArgsToSmt smtRepresentation
sortSmtFromSortArgs (AST.ApplyToArgs smtRepresentation) = applyToArgs smtRepresentation
sortSmtFromSortArgs (AST.ConstSExpr smtRepresentation) = const $ const $ Just smtRepresentation
emptySortArgsToSmt ::
SMT.SExpr ->
Map.Map Id AST.SmtSort ->
[Sort] ->
Maybe SMT.SExpr
emptySortArgsToSmt representation _ [] = Just representation
emptySortArgsToSmt representation _ args =
(error . unlines)
[ "Sorts with arguments not supported yet."
, "representation=" ++ SMT.showSExpr representation
, "args = " ++ show (fmap unparseToString args)
]
sortWithConstructor ::
Map.Map Id Attribute.Constructors ->
Id ->
Maybe (Id, AST.UnresolvedSort)
sortWithConstructor sortConstructors sortId = do
-- Maybe
constructors <- Map.lookup sortId sortConstructors
constructorLikeList <- Attribute.Constructors.getConstructors constructors
constructorList <- traverse constructorFromLike constructorLikeList
finalConstructors <- traverse buildConstructor (toList constructorList)
return
( sortId
, AST.Sort
{ sortData =
AST.EmptySortArgsToSmt (AST.encode encodedName)
, sortDeclaration =
AST.SortDeclarationDataType
SMT.DataTypeDeclaration
{ name = encodedName
, typeArguments = []
, constructors = finalConstructors
}
}
)
where
encodedName = AST.encodable sortId
constructorFromLike
(Attribute.Constructors.ConstructorLikeConstructor c) =
Just c
constructorFromLike _ = Nothing
buildConstructor ::
Attribute.Constructors.Constructor ->
Maybe AST.UnresolvedConstructor
buildConstructor
Attribute.Constructors.Constructor
{ name = Symbol{symbolConstructor, symbolParams = []}
, sorts
} =
do
-- Maybe monad
args <- zipWithM (buildConstructorArgument encodedName) [1 ..] sorts
return
SMT.Constructor
{ name = AST.SymbolReference symbolConstructor
, arguments = args
}
where
encodedName = getId symbolConstructor
buildConstructor _ = Nothing
buildConstructorArgument ::
Text.Text ->
Integer ->
Sort ->
Maybe AST.UnresolvedConstructorArgument
buildConstructorArgument
constructorName
index
sort@(SortActualSort SortActual{sortActualSorts = []}) =
Just
SMT.ConstructorArgument
{ name =
AST.Encodable $
SMT.Atom $
constructorName <> (Text.pack . show) index
, argType = AST.SortReference sort
}
buildConstructorArgument _ _ _ = Nothing
| null | https://raw.githubusercontent.com/runtimeverification/haskell-backend/7c5bb857080b60e57ac1d72d88ffe63faf15a718/kore/src/Kore/Rewrite/SMT/Representation/Sorts.hs | haskell | Maybe
Maybe monad | |
Module : . Rewrite . SMT.Representation . Sorts
Description : Builds an SMT representation for sorts .
Copyright : ( c ) Runtime Verification , 2019 - 2021
License : BSD-3 - Clause
Maintainer :
Module : Kore.Rewrite.SMT.Representation.Sorts
Description : Builds an SMT representation for sorts.
Copyright : (c) Runtime Verification, 2019-2021
License : BSD-3-Clause
Maintainer :
-}
module Kore.Rewrite.SMT.Representation.Sorts (
buildRepresentations,
sortSmtFromSortArgs,
emptySortArgsToSmt,
applyToArgs,
) where
import Control.Monad (
zipWithM,
)
import Data.Map.Strict (
Map,
)
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Data.Text qualified as Text
import Kore.Attribute.Hook (
Hook (Hook),
)
import Kore.Attribute.Hook qualified as Hook
import Kore.Attribute.Smtlib (
applySExpr,
)
import Kore.Attribute.Smtlib.Smtlib (
Smtlib (Smtlib),
)
import Kore.Attribute.Smtlib.Smtlib qualified as Smtlib
import Kore.Attribute.Sort qualified as Attribute (
Sort,
)
import Kore.Attribute.Sort qualified as Attribute.Sort
import Kore.Attribute.Sort.Constructors qualified as Attribute (
Constructors,
)
import Kore.Attribute.Sort.Constructors qualified as Attribute.Constructors (
Constructor (Constructor),
ConstructorLike (ConstructorLikeConstructor),
Constructors (getConstructors),
)
import Kore.Attribute.Sort.Constructors qualified as Constructors.DoNotUse
import Kore.Builtin.Bool qualified as Bool
import Kore.Builtin.Int qualified as Int
import Kore.IndexedModule.IndexedModule (
VerifiedModule,
recursiveIndexedModuleSortDescriptions,
)
import Kore.Internal.TermLike
import Kore.Rewrite.SMT.AST qualified as AST
import Kore.Sort qualified as Kore
import Kore.Syntax.Sentence (
SentenceSort (SentenceSort),
)
import Kore.Syntax.Sentence qualified as SentenceSort (
SentenceSort (..),
)
import Kore.Unparser (
unparseToString,
)
import Kore.Verified qualified as Verified
import Prelude.Kore
import SMT qualified (
Constructor (Constructor),
ConstructorArgument (ConstructorArgument),
DataTypeDeclaration (DataTypeDeclaration),
SExpr (Atom, List),
SortDeclaration (SortDeclaration),
showSExpr,
tBool,
tInt,
)
import SMT qualified as SMT.Constructor (
Constructor (..),
)
import SMT qualified as SMT.ConstructorArgument (
ConstructorArgument (..),
)
import SMT qualified as SMT.DataTypeDeclaration (
DataTypeDeclaration (..),
)
import SMT qualified as SMT.SortDeclaration (
SortDeclaration (..),
)
translateSort ::
Map.Map Id AST.SmtSort ->
Sort ->
Maybe SMT.SExpr
translateSort
sorts
(SortActualSort SortActual{sortActualName, sortActualSorts}) =
do
AST.Sort{sortData} <- Map.lookup sortActualName sorts
sortSmtFromSortArgs sortData sorts sortActualSorts
translateSort _ _ = Nothing
| Builds smt representations for sorts in the given module .
May ignore sorts that we do n't handle yet ( e.g. parameterized sorts ) .
All references to other sorts and symbols are left unresolved .
May ignore sorts that we don't handle yet (e.g. parameterized sorts).
All references to other sorts and symbols are left unresolved.
-}
buildRepresentations ::
forall symbolAttribute.
VerifiedModule symbolAttribute ->
Map.Map Id Attribute.Constructors ->
AST.UnresolvedDeclarations
buildRepresentations indexedModule sortConstructors =
builtinAndSmtLibDeclarations
`AST.mergePreferFirst` listToDeclarations
(sortsWithConstructors builtinAndSmtLibSorts simpleSortIDs)
`AST.mergePreferFirst` listToDeclarations simpleSortDeclarations
where
listToDeclarations ::
[(Id, AST.UnresolvedSort)] -> AST.UnresolvedDeclarations
listToDeclarations list =
AST.Declarations
{ sorts = Map.fromList list
, symbols = Map.empty
}
builtinAndSmtLibDeclarations :: AST.UnresolvedDeclarations
builtinAndSmtLibDeclarations =
listToDeclarations builtinSortDeclarations
`AST.mergePreferFirst` listToDeclarations smtlibSortDeclarations
builtinAndSmtLibSorts :: Set.Set Id
builtinAndSmtLibSorts = Map.keysSet sorts
where
AST.Declarations{sorts} = builtinAndSmtLibDeclarations
sortsWithConstructors ::
Set.Set Id ->
[Id] ->
[(Id, AST.UnresolvedSort)]
sortsWithConstructors blacklist whitelist =
mapMaybe
(sortWithConstructor sortConstructors)
(filter (`Set.notMember` blacklist) whitelist)
builtinSortDeclarations :: [(Id, AST.UnresolvedSort)]
builtinSortDeclarations =
extractDefinitionsFromSentences builtinSortDeclaration
smtlibSortDeclarations :: [(Id, AST.UnresolvedSort)]
smtlibSortDeclarations =
extractDefinitionsFromSentences smtlibSortDeclaration
simpleSortIDs :: [Id]
simpleSortIDs = map fst simpleSortDeclarations
simpleSortDeclarations :: [(Id, AST.UnresolvedSort)]
simpleSortDeclarations =
extractDefinitionsFromSentences simpleSortDeclaration
extractDefinitionsFromSentences ::
( ( Attribute.Sort
, Verified.SentenceSort
) ->
Maybe (Id, AST.UnresolvedSort)
) ->
[(Id, AST.UnresolvedSort)]
extractDefinitionsFromSentences definitionExtractor =
mapMaybe
definitionExtractor
(Map.elems $ recursiveIndexedModuleSortDescriptions indexedModule)
builtinSortDeclaration ::
( Attribute.Sort
, Verified.SentenceSort
) ->
Maybe (Id, AST.UnresolvedSort)
builtinSortDeclaration
(attributes, SentenceSort{sentenceSortName}) =
do
smtRepresentation <- case getHook of
Just name
| name == Int.sort -> return SMT.tInt
| name == Bool.sort -> return SMT.tBool
_ -> Nothing
return
( sentenceSortName
, AST.Sort
{ sortData = AST.EmptySortArgsToSmt smtRepresentation
, sortDeclaration =
AST.SortDeclaredIndirectly
(AST.AlreadyEncoded smtRepresentation)
}
)
where
Hook{getHook} = Attribute.Sort.hook attributes
smtlibSortDeclaration ::
( Attribute.Sort
, Verified.SentenceSort
) ->
Maybe (Id, AST.UnresolvedSort)
smtlibSortDeclaration
(attributes, SentenceSort{sentenceSortName}) =
do
smtRepresentation@(SMT.List (SMT.Atom smtName : sortArgs)) <- getSmtlib
return
( sentenceSortName
, AST.Sort
{ sortData = AST.ApplyToArgs smtRepresentation
, sortDeclaration =
AST.SortDeclarationSort
SMT.SortDeclaration
{ name = AST.AlreadyEncoded $ SMT.Atom smtName
, arity = length sortArgs
}
}
)
where
Smtlib{getSmtlib} = Attribute.Sort.smtlib attributes
applyToArgs ::
SMT.SExpr ->
Map.Map Id AST.SmtSort ->
[Sort] ->
Maybe SMT.SExpr
applyToArgs sExpr definitions children = do
children' <- mapM (translateSort definitions) children
return $ applySExpr sExpr children'
simpleSortDeclaration ::
( Attribute.Sort
, Verified.SentenceSort
) ->
Maybe (Id, AST.UnresolvedSort)
simpleSortDeclaration
( _attribute
, SentenceSort
{ sentenceSortName
, sentenceSortParameters = []
}
) =
Just
( sentenceSortName
, AST.Sort
{ sortData =
AST.EmptySortArgsToSmt (AST.encode encodedName)
, sortDeclaration =
AST.SortDeclarationSort
SMT.SortDeclaration
{ name = encodedName
, arity = 0
}
}
)
where
encodedName = AST.encodable sentenceSortName
simpleSortDeclaration _ = Nothing
sortSmtFromSortArgs :: AST.SortSExprSpec -> Map Kore.Id AST.SmtSort -> [Kore.Sort] -> Maybe SMT.SExpr
sortSmtFromSortArgs (AST.EmptySortArgsToSmt smtRepresentation) = emptySortArgsToSmt smtRepresentation
sortSmtFromSortArgs (AST.ApplyToArgs smtRepresentation) = applyToArgs smtRepresentation
sortSmtFromSortArgs (AST.ConstSExpr smtRepresentation) = const $ const $ Just smtRepresentation
emptySortArgsToSmt ::
SMT.SExpr ->
Map.Map Id AST.SmtSort ->
[Sort] ->
Maybe SMT.SExpr
emptySortArgsToSmt representation _ [] = Just representation
emptySortArgsToSmt representation _ args =
(error . unlines)
[ "Sorts with arguments not supported yet."
, "representation=" ++ SMT.showSExpr representation
, "args = " ++ show (fmap unparseToString args)
]
sortWithConstructor ::
Map.Map Id Attribute.Constructors ->
Id ->
Maybe (Id, AST.UnresolvedSort)
sortWithConstructor sortConstructors sortId = do
constructors <- Map.lookup sortId sortConstructors
constructorLikeList <- Attribute.Constructors.getConstructors constructors
constructorList <- traverse constructorFromLike constructorLikeList
finalConstructors <- traverse buildConstructor (toList constructorList)
return
( sortId
, AST.Sort
{ sortData =
AST.EmptySortArgsToSmt (AST.encode encodedName)
, sortDeclaration =
AST.SortDeclarationDataType
SMT.DataTypeDeclaration
{ name = encodedName
, typeArguments = []
, constructors = finalConstructors
}
}
)
where
encodedName = AST.encodable sortId
constructorFromLike
(Attribute.Constructors.ConstructorLikeConstructor c) =
Just c
constructorFromLike _ = Nothing
buildConstructor ::
Attribute.Constructors.Constructor ->
Maybe AST.UnresolvedConstructor
buildConstructor
Attribute.Constructors.Constructor
{ name = Symbol{symbolConstructor, symbolParams = []}
, sorts
} =
do
args <- zipWithM (buildConstructorArgument encodedName) [1 ..] sorts
return
SMT.Constructor
{ name = AST.SymbolReference symbolConstructor
, arguments = args
}
where
encodedName = getId symbolConstructor
buildConstructor _ = Nothing
buildConstructorArgument ::
Text.Text ->
Integer ->
Sort ->
Maybe AST.UnresolvedConstructorArgument
buildConstructorArgument
constructorName
index
sort@(SortActualSort SortActual{sortActualSorts = []}) =
Just
SMT.ConstructorArgument
{ name =
AST.Encodable $
SMT.Atom $
constructorName <> (Text.pack . show) index
, argType = AST.SortReference sort
}
buildConstructorArgument _ _ _ = Nothing
|
cb7cfff7986594e1ff3b4c076ecf0f91c621f3676b49b0d3a5a143ba073d128d | yyna/polylith-example | core.clj | (ns greenlabs.rest-api.core
(:gen-class)
(:require [greenlabs.rest-api.handler :as handler]
[muuntaja.core :as m]
[reitit.coercion.spec]
[reitit.ring :as ring]
[reitit.ring.coercion :as rrc]
[reitit.ring.middleware.muuntaja :as muuntaja]
[reitit.ring.middleware.parameters :as parameters]
[ring.adapter.jetty9 :refer [run-jetty]]))
(def middlewares [parameters/parameters-middleware
rrc/coerce-request-middleware
muuntaja/format-middleware
muuntaja/format-response-middleware
rrc/coerce-response-middleware])
(def app
(ring/ring-handler
(ring/router
[["/user" {}
["/register" {:post {:form {:phone-number string?
:password string?}
:responses {200 {:body {:success boolean?}}
409 {:body {:success boolean?
:message string?}}}
:handler handler/user-register}}]
["/login" {:post {:form {:phone-number string?
:password string?}
:responses {200 {:body {:success boolean?}}
401 {:body {:success boolean?
:message string?}}}
:handler handler/user-login}}]]
["/send-verification-code" {:post {:form {:phone-number string?}
:responses {200 {:body {:success boolean?}}}
:handler handler/send-verification-code}}]
["/weather" {:get {:parameters {:query {:date string?}}
:responses {200 {:body {:success boolean?}}
404 {:body {:success boolean?
:message string?}}}
:handler handler/weather}}]]
{:data {:coercion reitit.coercion.spec/coercion
:muuntaja m/instance
:middleware middlewares}})))
(defn -main []
(run-jetty #'app {:port 3000
:join? false})
(println "server running in port 3000"))
(comment
(-main))
| null | https://raw.githubusercontent.com/yyna/polylith-example/ef775e02269d2fbee8a599622e6d5d7b1b2dc73d/bases/rest-api/src/greenlabs/rest_api/core.clj | clojure | (ns greenlabs.rest-api.core
(:gen-class)
(:require [greenlabs.rest-api.handler :as handler]
[muuntaja.core :as m]
[reitit.coercion.spec]
[reitit.ring :as ring]
[reitit.ring.coercion :as rrc]
[reitit.ring.middleware.muuntaja :as muuntaja]
[reitit.ring.middleware.parameters :as parameters]
[ring.adapter.jetty9 :refer [run-jetty]]))
(def middlewares [parameters/parameters-middleware
rrc/coerce-request-middleware
muuntaja/format-middleware
muuntaja/format-response-middleware
rrc/coerce-response-middleware])
(def app
(ring/ring-handler
(ring/router
[["/user" {}
["/register" {:post {:form {:phone-number string?
:password string?}
:responses {200 {:body {:success boolean?}}
409 {:body {:success boolean?
:message string?}}}
:handler handler/user-register}}]
["/login" {:post {:form {:phone-number string?
:password string?}
:responses {200 {:body {:success boolean?}}
401 {:body {:success boolean?
:message string?}}}
:handler handler/user-login}}]]
["/send-verification-code" {:post {:form {:phone-number string?}
:responses {200 {:body {:success boolean?}}}
:handler handler/send-verification-code}}]
["/weather" {:get {:parameters {:query {:date string?}}
:responses {200 {:body {:success boolean?}}
404 {:body {:success boolean?
:message string?}}}
:handler handler/weather}}]]
{:data {:coercion reitit.coercion.spec/coercion
:muuntaja m/instance
:middleware middlewares}})))
(defn -main []
(run-jetty #'app {:port 3000
:join? false})
(println "server running in port 3000"))
(comment
(-main))
| |
7f3156a45e65114b2bb8d1a9fea004527d573b6452dadf704c47494f4428d6b1 | ndpar/erlang | frequency_reliable.erl | %%
F.Cesarini & S.Thomson , Erlang Programming , p.150 .
%% Reliable Client/Server
%%
%% Based on frequency2.erl
%%
Test 1 : Kill the client
%%
1 > frequency_reliable : start ( ) .
%% ok
2 > frequency_reliable : allocate ( ) .
%% {ok,10}
3 > frequency_reliable : allocate ( ) .
%% {ok,11}
4 > exit(self ( ) , kill ) .
%% ** exception exit: killed
%% 5> frequency_reliable:allocate().
%% {ok,10}
6 > frequency_reliable : allocate ( ) .
%% {ok,11}
%%
Test 2 : Kill the server
%%
1 > frequency_reliable : start ( ) .
%% ok
2 > frequency_reliable : allocate ( ) .
%% {ok,10}
3 > self ( ) .
< 0.31.0 >
4 > exit(whereis(frequency_reliable ) , kill ) .
%% true
5 > self ( ) .
< 0.37.0 >
%%
-module(frequency_reliable).
-export([start/0, stop/0]).
-export([allocate/0, deallocate/1]).
-export([init/0]).
% Start function to create and initialize the server
start() ->
register(frequency_reliable, spawn(frequency_reliable, init, [])), ok.
init() ->
process_flag(trap_exit, true),
Frequencies = {get_frequencies(), []},
loop(Frequencies).
get_frequencies() -> [10,11,12,13,14,15].
loop(Frequencies) ->
receive
{request, Pid, allocate} ->
{NewFrequencies, Reply} = allocate(Frequencies, Pid),
reply(Pid, Reply),
loop(NewFrequencies);
{request, Pid, {deallocate, Freq}} ->
{NewFrequencies, Reply} = deallocate(Frequencies, Freq, Pid),
reply(Pid, Reply),
loop(NewFrequencies);
{request, Pid, stop} ->
{_, Allocated} = Frequencies,
case Allocated of
[] ->
reply(Pid, ok);
_ ->
reply(Pid, {error, frequencies_in_use}),
loop(Frequencies)
end;
{'EXIT', Pid, _Reason} ->
NewFrequencies = exited(Frequencies, Pid),
loop(NewFrequencies)
end.
exited({Free, Allocated}, Pid) ->
case lists:keysearch(Pid, 2, Allocated) of
{value, {Freq, Pid}} ->
NewAllocated = lists:keydelete(Freq, 1, Allocated),
exited({[Freq|Free], NewAllocated}, Pid);
false ->
{Free, Allocated}
end.
allocate({[], Allocated}, _Pid) ->
{{[], Allocated}, {error, no_frequency}};
allocate({[Freq|Free], Allocated}, Pid) ->
ClientFrequencies = my_lists:keyfilter(Pid, 2, Allocated),
case length(ClientFrequencies) of
3 -> {{[Freq|Free], Allocated}, {error, exceed_limit}};
_ ->
link(Pid),
{{Free, [{Freq,Pid}|Allocated]}, {ok, Freq}}
end.
deallocate({Free, Allocated}, Freq, Pid) ->
case lists:member({Freq, Pid}, Allocated) of
true ->
unlink(Pid),
NewAllocated = lists:keydelete(Freq, 1, Allocated),
{{[Freq|Free], NewAllocated}, ok};
_ ->
{{Free, Allocated}, {error, foreign_frequency}}
end.
reply(Pid, Reply) ->
Pid ! {reply, Reply}.
% Client functions
stop() -> call(stop).
allocate() -> call(allocate).
deallocate(Freq) -> call({deallocate, Freq}).
call(Message) ->
frequency_reliable ! {request, self(), Message},
receive {reply, Reply} -> Reply end.
| null | https://raw.githubusercontent.com/ndpar/erlang/e215841a1d370e0fc5eb6b9ff40ea7ae78fc8763/src/frequency_reliable.erl | erlang |
Reliable Client/Server
Based on frequency2.erl
ok
{ok,10}
{ok,11}
** exception exit: killed
5> frequency_reliable:allocate().
{ok,10}
{ok,11}
ok
{ok,10}
true
Start function to create and initialize the server
Client functions | F.Cesarini & S.Thomson , Erlang Programming , p.150 .
Test 1 : Kill the client
1 > frequency_reliable : start ( ) .
2 > frequency_reliable : allocate ( ) .
3 > frequency_reliable : allocate ( ) .
4 > exit(self ( ) , kill ) .
6 > frequency_reliable : allocate ( ) .
Test 2 : Kill the server
1 > frequency_reliable : start ( ) .
2 > frequency_reliable : allocate ( ) .
3 > self ( ) .
< 0.31.0 >
4 > exit(whereis(frequency_reliable ) , kill ) .
5 > self ( ) .
< 0.37.0 >
-module(frequency_reliable).
-export([start/0, stop/0]).
-export([allocate/0, deallocate/1]).
-export([init/0]).
start() ->
register(frequency_reliable, spawn(frequency_reliable, init, [])), ok.
init() ->
process_flag(trap_exit, true),
Frequencies = {get_frequencies(), []},
loop(Frequencies).
get_frequencies() -> [10,11,12,13,14,15].
loop(Frequencies) ->
receive
{request, Pid, allocate} ->
{NewFrequencies, Reply} = allocate(Frequencies, Pid),
reply(Pid, Reply),
loop(NewFrequencies);
{request, Pid, {deallocate, Freq}} ->
{NewFrequencies, Reply} = deallocate(Frequencies, Freq, Pid),
reply(Pid, Reply),
loop(NewFrequencies);
{request, Pid, stop} ->
{_, Allocated} = Frequencies,
case Allocated of
[] ->
reply(Pid, ok);
_ ->
reply(Pid, {error, frequencies_in_use}),
loop(Frequencies)
end;
{'EXIT', Pid, _Reason} ->
NewFrequencies = exited(Frequencies, Pid),
loop(NewFrequencies)
end.
exited({Free, Allocated}, Pid) ->
case lists:keysearch(Pid, 2, Allocated) of
{value, {Freq, Pid}} ->
NewAllocated = lists:keydelete(Freq, 1, Allocated),
exited({[Freq|Free], NewAllocated}, Pid);
false ->
{Free, Allocated}
end.
allocate({[], Allocated}, _Pid) ->
{{[], Allocated}, {error, no_frequency}};
allocate({[Freq|Free], Allocated}, Pid) ->
ClientFrequencies = my_lists:keyfilter(Pid, 2, Allocated),
case length(ClientFrequencies) of
3 -> {{[Freq|Free], Allocated}, {error, exceed_limit}};
_ ->
link(Pid),
{{Free, [{Freq,Pid}|Allocated]}, {ok, Freq}}
end.
deallocate({Free, Allocated}, Freq, Pid) ->
case lists:member({Freq, Pid}, Allocated) of
true ->
unlink(Pid),
NewAllocated = lists:keydelete(Freq, 1, Allocated),
{{[Freq|Free], NewAllocated}, ok};
_ ->
{{Free, Allocated}, {error, foreign_frequency}}
end.
reply(Pid, Reply) ->
Pid ! {reply, Reply}.
stop() -> call(stop).
allocate() -> call(allocate).
deallocate(Freq) -> call({deallocate, Freq}).
call(Message) ->
frequency_reliable ! {request, self(), Message},
receive {reply, Reply} -> Reply end.
|
e9f8754e1d362494c716be0a331b55315f1144c40fda5e5ebd6cfa2d270983cc | earl-ducaine/cl-garnet | pixmap-lab.lisp | -*- Mode : LISP ; Syntax : Common - Lisp ; Package : DEMO - ANIMATOR ; Base : 10 -*-
;;
(defpackage :pixmap-lab
(:use :common-lisp :kr)
(:export do-go do-stop))
(in-package :pixmap-lab)
(defparameter agg nil)
(defparameter *top-win* nil)
(defparameter *xlib-display* nil)
(defparameter *pixmap-lab-std-out* *standard-output*)
(load "/home/rett/dev/garnet/cl-garnet/macro-patch.lisp")
;;;;(load "src/gem/anti-alias-graphics.lisp")
;; (xlib::describe-trace (get-the-xlib-display *top-window*))
(defun get-the-xlib-display (garnet-window)
(gem::the-display garnet-window))
(defun display-trace-history ()
(xlib::display-trace-history (get-the-xlib-display *top-window*)))
(defun run-draw-triangle-on-window ()
(when *top-win*
(opal:destroy *top-win*))
(setf *top-win* (create-window 400 410))
(draw-triangle-on-window *top-win*))
(defparameter *triagle-coordinates*
'((40 30)
(10 40)
(46 60)
(50 45)))
(defun interactive ()
(ql:quickload :xoanon.gui.garnet)
(load "/home/rett/dev/garnet/cl-garnet/pixmap-lab.lisp")
(in-package :pixmap-lab)
(trace xlib::get-put-items)
(trace xlib::put-raw-image :break t)
(run-draw-triangle-on-window)
(sb-ext:exit)
(run-draw-triangle-on-window))
(defun generate-polygon-sides (points)
;; closing side.
(let* ((sides '()))
(push `( ,@(first (reverse points)) ,@(first points)) sides)
(reduce (lambda (last-point point)
(push `( ,@last-point ,@point) sides)
point)
points)
(reverse sides)))
(defun draw-triangle-on-window (win)
(let* ((height (gv win :height))
(width (gv win :width))
(cl-vector-image
(gem::vector-create-polygon-on-surface
height width
#(150 200 255)
#(30 10 0)
(generate-polygon-sides *triagle-coordinates*))))
(gem::transfer-surface-window win cl-vector-image)))
(defun create-window (height width)
(let ((top-win (create-instance nil opal::window
(:left 500)
(:top 100)
(:double-buffered-p t)
(:width width)
(:height height)
(:title "GARNET Animator Demo")
(:icon-title "Animator"))))
( let ( ( agg ( create - instance NIL opal : aggregate ) ) )
( s - value top - win : aggregate agg )
(opal:update top-win)
top-win))
(defun run-event-loop ()
(inter:main-event-loop))
(defparameter xlib::*trace-log-list* '())
(defun xlib::write-to-trace-log (vector start end)
(push (copy-seq (subseq vector start end)) xlib::*trace-log-list*))
(deftype buffer-bytes () `(simple-array (unsigned-byte 8) (*)))
(defparameter my-array
(make-array 5
:element-type 'character
:adjustable t
:fill-pointer 3))
( make - array 6 : element - type ' sequence
: fill - pointer 3 )
(defun starts-with-p (str1 str2)
"Determine whether `str1` starts with `str2`"
(let ((p (search str2 str1)))
(and p (= 0 p))))
;; (print (starts-with-p "foobar" "foo")) ; T
( print ( starts - with - p " foobar " " bar " ) ) ; NIL
(defun ends-with-p (str1 str2)
"Determine whether `str1` ends with `str2`"
(let ((p (mismatch str2 str1 :from-end T)))
(or (not p) (= 0 p))))
(defun get-symbols-defined-in-package (package)
(let ((xlib-symbols '()))
(do-symbols (symbol package)
(if (string-equal (symbol-name :xlib)
(package-name (symbol-package symbol)))
(push symbol xlib-symbols)))
xlib-symbols))
(defun get-x-message-types-symbols ()
(reduce (lambda (symbols symbol)
(let ((symbol-name (symbol-name symbol)))
(if (and (starts-with-p symbol-name "+X-")
;; exclude x-render symbols
(not (starts-with-p symbol-name "+X-RENDER"))
(ends-with-p symbol-name "+"))
(cons symbol symbols)
symbols)))
(get-symbols-defined-in-package :xlib)
:initial-value '()))
(defun get-x-message-types-map ()
(let ((hash-table (make-hash-table)))
(dolist (symbol (get-x-message-types-symbols))
(if (and (atom (symbol-value symbol))
(not (gethash (symbol-value symbol) hash-table)))
(setf (gethash (symbol-value symbol) hash-table)
(symbol-name symbol))
(progn
(format t "Warning: (atom (symbol-value symbol)): ~S(~S)~%"
symbol (atom (symbol-value symbol)))
(format t "Warning: (not (gethash (symbol-value symbol) hash-table)): ~S(~S)~%"
symbol (not (gethash (symbol-value symbol) hash-table))))))
hash-table))
(defparameter xlib-symbols (get-symbols-defined-in-package :xlib))
(defparameter x-message-types-map (get-x-message-types-map))
( defconstant + x - polyfillarc+ 71 ) )
(defun dissassemble-x-message (message-bytes)
)
;; (defconstant +x-polyfillarc+ 71)
(defun xlib::buffer-write (vector buffer start end)
Write out VECTOR from START to END into BUFFER
;; Internal function, MUST BE CALLED FROM WITHIN WITH-BUFFER
(declare (type xlib::buffer buffer)
(type xlib::array-index start end))
(when (xlib::buffer-dead buffer)
(xlib::x-error 'closed-display :display buffer))
(xlib::write-to-trace-log vector start end)
(xlib::wrap-buf-output (buffer)
(funcall (xlib::buffer-write-function buffer)
vector buffer start end))
nil)
;; (defun do-stop ()
;; ;;(opal:destroy top-win)
;; )
;; Drawing on the screen using anti eliasing algorithms with
;; transparentcy has the following steps:
1 . requires that we get a snapshot of the current window .
;; (get-image <drawable>)
( as a zimage ) , convert the zimage to an array , draw on the array ,
;; then replay the window with the new value.
;;(gem::the-display win)
| null | https://raw.githubusercontent.com/earl-ducaine/cl-garnet/f0095848513ba69c370ed1dc51ee01f0bb4dd108/bone-yard/pixmap-lab.lisp | lisp | Syntax : Common - Lisp ; Package : DEMO - ANIMATOR ; Base : 10 -*-
(load "src/gem/anti-alias-graphics.lisp")
(xlib::describe-trace (get-the-xlib-display *top-window*))
closing side.
(print (starts-with-p "foobar" "foo")) ; T
NIL
exclude x-render symbols
(defconstant +x-polyfillarc+ 71)
Internal function, MUST BE CALLED FROM WITHIN WITH-BUFFER
(defun do-stop ()
;;(opal:destroy top-win)
)
Drawing on the screen using anti eliasing algorithms with
transparentcy has the following steps:
(get-image <drawable>)
then replay the window with the new value.
(gem::the-display win) |
(defpackage :pixmap-lab
(:use :common-lisp :kr)
(:export do-go do-stop))
(in-package :pixmap-lab)
(defparameter agg nil)
(defparameter *top-win* nil)
(defparameter *xlib-display* nil)
(defparameter *pixmap-lab-std-out* *standard-output*)
(load "/home/rett/dev/garnet/cl-garnet/macro-patch.lisp")
(defun get-the-xlib-display (garnet-window)
(gem::the-display garnet-window))
(defun display-trace-history ()
(xlib::display-trace-history (get-the-xlib-display *top-window*)))
(defun run-draw-triangle-on-window ()
(when *top-win*
(opal:destroy *top-win*))
(setf *top-win* (create-window 400 410))
(draw-triangle-on-window *top-win*))
(defparameter *triagle-coordinates*
'((40 30)
(10 40)
(46 60)
(50 45)))
(defun interactive ()
(ql:quickload :xoanon.gui.garnet)
(load "/home/rett/dev/garnet/cl-garnet/pixmap-lab.lisp")
(in-package :pixmap-lab)
(trace xlib::get-put-items)
(trace xlib::put-raw-image :break t)
(run-draw-triangle-on-window)
(sb-ext:exit)
(run-draw-triangle-on-window))
(defun generate-polygon-sides (points)
(let* ((sides '()))
(push `( ,@(first (reverse points)) ,@(first points)) sides)
(reduce (lambda (last-point point)
(push `( ,@last-point ,@point) sides)
point)
points)
(reverse sides)))
(defun draw-triangle-on-window (win)
(let* ((height (gv win :height))
(width (gv win :width))
(cl-vector-image
(gem::vector-create-polygon-on-surface
height width
#(150 200 255)
#(30 10 0)
(generate-polygon-sides *triagle-coordinates*))))
(gem::transfer-surface-window win cl-vector-image)))
(defun create-window (height width)
(let ((top-win (create-instance nil opal::window
(:left 500)
(:top 100)
(:double-buffered-p t)
(:width width)
(:height height)
(:title "GARNET Animator Demo")
(:icon-title "Animator"))))
( let ( ( agg ( create - instance NIL opal : aggregate ) ) )
( s - value top - win : aggregate agg )
(opal:update top-win)
top-win))
(defun run-event-loop ()
(inter:main-event-loop))
(defparameter xlib::*trace-log-list* '())
(defun xlib::write-to-trace-log (vector start end)
(push (copy-seq (subseq vector start end)) xlib::*trace-log-list*))
(deftype buffer-bytes () `(simple-array (unsigned-byte 8) (*)))
(defparameter my-array
(make-array 5
:element-type 'character
:adjustable t
:fill-pointer 3))
( make - array 6 : element - type ' sequence
: fill - pointer 3 )
(defun starts-with-p (str1 str2)
"Determine whether `str1` starts with `str2`"
(let ((p (search str2 str1)))
(and p (= 0 p))))
(defun ends-with-p (str1 str2)
"Determine whether `str1` ends with `str2`"
(let ((p (mismatch str2 str1 :from-end T)))
(or (not p) (= 0 p))))
(defun get-symbols-defined-in-package (package)
(let ((xlib-symbols '()))
(do-symbols (symbol package)
(if (string-equal (symbol-name :xlib)
(package-name (symbol-package symbol)))
(push symbol xlib-symbols)))
xlib-symbols))
(defun get-x-message-types-symbols ()
(reduce (lambda (symbols symbol)
(let ((symbol-name (symbol-name symbol)))
(if (and (starts-with-p symbol-name "+X-")
(not (starts-with-p symbol-name "+X-RENDER"))
(ends-with-p symbol-name "+"))
(cons symbol symbols)
symbols)))
(get-symbols-defined-in-package :xlib)
:initial-value '()))
(defun get-x-message-types-map ()
(let ((hash-table (make-hash-table)))
(dolist (symbol (get-x-message-types-symbols))
(if (and (atom (symbol-value symbol))
(not (gethash (symbol-value symbol) hash-table)))
(setf (gethash (symbol-value symbol) hash-table)
(symbol-name symbol))
(progn
(format t "Warning: (atom (symbol-value symbol)): ~S(~S)~%"
symbol (atom (symbol-value symbol)))
(format t "Warning: (not (gethash (symbol-value symbol) hash-table)): ~S(~S)~%"
symbol (not (gethash (symbol-value symbol) hash-table))))))
hash-table))
(defparameter xlib-symbols (get-symbols-defined-in-package :xlib))
(defparameter x-message-types-map (get-x-message-types-map))
( defconstant + x - polyfillarc+ 71 ) )
(defun dissassemble-x-message (message-bytes)
)
(defun xlib::buffer-write (vector buffer start end)
Write out VECTOR from START to END into BUFFER
(declare (type xlib::buffer buffer)
(type xlib::array-index start end))
(when (xlib::buffer-dead buffer)
(xlib::x-error 'closed-display :display buffer))
(xlib::write-to-trace-log vector start end)
(xlib::wrap-buf-output (buffer)
(funcall (xlib::buffer-write-function buffer)
vector buffer start end))
nil)
1 . requires that we get a snapshot of the current window .
( as a zimage ) , convert the zimage to an array , draw on the array ,
|
867f033a1a9730477648fa14f00b885c465318576a2bf578a9b455a7d7f1ee84 | jeapostrophe/racket-langserver | error-codes.rkt | #lang racket/base
;; Defined by JSON RPC
(define PARSE-ERROR -32700)
(define INVALID-REQUEST -32600)
(define METHOD-NOT-FOUND -32601)
(define INVALID-PARAMS -32602)
(define INTERNAL-ERROR -32603)
(define SERVER-ERROR-START -32099)
(define SERVER-ERROR-END -32000)
(define SERVER-NOT-INITIALIZED -32002)
(define UNKNOWN-ERROR-CODE -32001)
Defined by LSP protocol
(define REQUEST-CANCELLED -32800)
(provide (all-defined-out))
| null | https://raw.githubusercontent.com/jeapostrophe/racket-langserver/1a675e5bac122a4269934cb100e892e00997f304/error-codes.rkt | racket | Defined by JSON RPC | #lang racket/base
(define PARSE-ERROR -32700)
(define INVALID-REQUEST -32600)
(define METHOD-NOT-FOUND -32601)
(define INVALID-PARAMS -32602)
(define INTERNAL-ERROR -32603)
(define SERVER-ERROR-START -32099)
(define SERVER-ERROR-END -32000)
(define SERVER-NOT-INITIALIZED -32002)
(define UNKNOWN-ERROR-CODE -32001)
Defined by LSP protocol
(define REQUEST-CANCELLED -32800)
(provide (all-defined-out))
|
42078a8a3fefa34233822577dfe64987ee47abe409396ab64bad60beb2527a6a | rems-project/lem | path.mli | (**************************************************************************)
(* Lem *)
(* *)
, University of Cambridge
, INRIA Paris - Rocquencourt
, University of Cambridge
, University of Cambridge
, University of Cambridge ( while working on Lem )
, University of Cambridge
, University of Kent
, University of Cambridge
, University of Edinburgh
Shaked Flur , University of Cambridge
, University of Cambridge
, University of Cambridge
(* *)
The Lem sources are copyright 2010 - 2018
by the authors above and Institut National de Recherche en
Informatique et en Automatique ( INRIA ) .
(* *)
All files except / pmap.{ml , mli } and ocaml - libpset.{ml , mli }
(* are distributed under the license below. The former are distributed *)
(* under the LGPLv2, as in the LICENSE file. *)
(* *)
(* *)
(* Redistribution and use in source and binary forms, with or without *)
(* modification, are permitted provided that the following conditions *)
(* are met: *)
1 . Redistributions of source code must retain the above copyright
(* notice, this list of conditions and the following disclaimer. *)
2 . Redistributions in binary form must reproduce the above copyright
(* notice, this list of conditions and the following disclaimer in the *)
(* documentation and/or other materials provided with the distribution. *)
3 . The names of the authors may not be used to endorse or promote
(* products derived from this software without specific prior written *)
(* permission. *)
(* *)
THIS SOFTWARE IS PROVIDED BY THE AUTHORS ` ` AS IS '' AND ANY EXPRESS
(* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED *)
(* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE *)
ARE DISCLAIMED . IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
(* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE *)
(* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS *)
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER
(* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR *)
(* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN *)
(* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *)
(**************************************************************************)
(** internal canonical long identifiers *)
(* t is the type of globally unique identifier paths to definitions *)
type t
val compare : t -> t -> int
val pp : Format.formatter -> t -> unit
val from_id : Ident.t -> t
val : t - > Name.t
val get_name : t -> Name.t
*)
val mk_path : Name.t list -> Name.t -> t
(** [mk_path_list names] splits names into [ns @ [n]] and calls
[mk_path ns n]. It fails, if [names] is empty. *)
val mk_path_list : Name.t list -> t
* ] returns the module path of path [ p ] . If
if is a path of an identifier [ m0 . ... . mn . f ] , then [ get_module ]
returns the module path [ m0 . ... . mn ] . If the path does not have
a module prefix , i.e. if it is a single name [ f ] , [ None ] is returned .
if is a path of an identifier [m0. ... . mn . f], then [get_module]
returns the module path [m0. ... . mn]. If the path does not have
a module prefix, i.e. if it is a single name [f], [None] is returned. *)
val get_module_path : t -> t option
val natpath : t
val listpath : t
(* TODO The vector type does not seem to be supported in the prover backends. Remove? *)
val vectorpath : t
val boolpath : t
val bitpath : t
val setpath : t
val stringpath : t
val unitpath : t
val charpath : t
val numeralpath : t
val get_name : t -> Name.t
(** [get_toplevel_name p] gets the outmost name of a path. This is important
when checking prefixes. For example, the result for path [module.submodule.name] is
[module] and for [name] it is [name]. *)
val get_toplevel_name: t -> Name.t
val check_prefix : Name.t -> t -> bool
val to_ident : Ast.lex_skips -> t -> Ident.t
val to_name : t -> Name.t
val to_name_list : t -> Name.t list * Name.t
val to_string : t -> string
val to_rope : ( Ast.lex_skips - > Ulib . Text.t ) - > Ulib . Text.t - > t - > Ulib . Text.t
val get_lskip : t - > Ast.lex_skips
val add_pre_lskip : Ast.lex_skips - > t - > t
val drop_parens : t - > t
val is_lower : t - > bool
val is_upper : t - > bool
val to_lower : t - > t
val to_upper : t - > t
val prefix_is_lower : t - > bool
val prefix_is_upper : t - > bool
val prefix_to_lower : t - > t
val prefix_to_upper : t - > t
val to_rope : (Ast.lex_skips -> Ulib.Text.t) -> Ulib.Text.t -> t -> Ulib.Text.t
val get_lskip: t -> Ast.lex_skips
val add_pre_lskip : Ast.lex_skips -> t -> t
val drop_parens : t -> t
val is_lower : t -> bool
val is_upper : t -> bool
val to_lower : t -> t
val to_upper : t -> t
val prefix_is_lower : t -> bool
val prefix_is_upper : t -> bool
val prefix_to_lower : t -> t
val prefix_to_upper : t -> t
*)
| null | https://raw.githubusercontent.com/rems-project/lem/a839114e468119d9ac0868d7dc53eae7f3cc3a6c/src/path.mli | ocaml | ************************************************************************
Lem
are distributed under the license below. The former are distributed
under the LGPLv2, as in the LICENSE file.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
products derived from this software without specific prior written
permission.
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
************************************************************************
* internal canonical long identifiers
t is the type of globally unique identifier paths to definitions
* [mk_path_list names] splits names into [ns @ [n]] and calls
[mk_path ns n]. It fails, if [names] is empty.
TODO The vector type does not seem to be supported in the prover backends. Remove?
* [get_toplevel_name p] gets the outmost name of a path. This is important
when checking prefixes. For example, the result for path [module.submodule.name] is
[module] and for [name] it is [name]. | , University of Cambridge
, INRIA Paris - Rocquencourt
, University of Cambridge
, University of Cambridge
, University of Cambridge ( while working on Lem )
, University of Cambridge
, University of Kent
, University of Cambridge
, University of Edinburgh
Shaked Flur , University of Cambridge
, University of Cambridge
, University of Cambridge
The Lem sources are copyright 2010 - 2018
by the authors above and Institut National de Recherche en
Informatique et en Automatique ( INRIA ) .
All files except / pmap.{ml , mli } and ocaml - libpset.{ml , mli }
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . The names of the authors may not be used to endorse or promote
THIS SOFTWARE IS PROVIDED BY THE AUTHORS ` ` AS IS '' AND ANY EXPRESS
ARE DISCLAIMED . IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER
type t
val compare : t -> t -> int
val pp : Format.formatter -> t -> unit
val from_id : Ident.t -> t
val : t - > Name.t
val get_name : t -> Name.t
*)
val mk_path : Name.t list -> Name.t -> t
val mk_path_list : Name.t list -> t
* ] returns the module path of path [ p ] . If
if is a path of an identifier [ m0 . ... . mn . f ] , then [ get_module ]
returns the module path [ m0 . ... . mn ] . If the path does not have
a module prefix , i.e. if it is a single name [ f ] , [ None ] is returned .
if is a path of an identifier [m0. ... . mn . f], then [get_module]
returns the module path [m0. ... . mn]. If the path does not have
a module prefix, i.e. if it is a single name [f], [None] is returned. *)
val get_module_path : t -> t option
val natpath : t
val listpath : t
val vectorpath : t
val boolpath : t
val bitpath : t
val setpath : t
val stringpath : t
val unitpath : t
val charpath : t
val numeralpath : t
val get_name : t -> Name.t
val get_toplevel_name: t -> Name.t
val check_prefix : Name.t -> t -> bool
val to_ident : Ast.lex_skips -> t -> Ident.t
val to_name : t -> Name.t
val to_name_list : t -> Name.t list * Name.t
val to_string : t -> string
val to_rope : ( Ast.lex_skips - > Ulib . Text.t ) - > Ulib . Text.t - > t - > Ulib . Text.t
val get_lskip : t - > Ast.lex_skips
val add_pre_lskip : Ast.lex_skips - > t - > t
val drop_parens : t - > t
val is_lower : t - > bool
val is_upper : t - > bool
val to_lower : t - > t
val to_upper : t - > t
val prefix_is_lower : t - > bool
val prefix_is_upper : t - > bool
val prefix_to_lower : t - > t
val prefix_to_upper : t - > t
val to_rope : (Ast.lex_skips -> Ulib.Text.t) -> Ulib.Text.t -> t -> Ulib.Text.t
val get_lskip: t -> Ast.lex_skips
val add_pre_lskip : Ast.lex_skips -> t -> t
val drop_parens : t -> t
val is_lower : t -> bool
val is_upper : t -> bool
val to_lower : t -> t
val to_upper : t -> t
val prefix_is_lower : t -> bool
val prefix_is_upper : t -> bool
val prefix_to_lower : t -> t
val prefix_to_upper : t -> t
*)
|
455c33a190971c12f34b4584f6d3002e0e4b3a8e9f9b6860daf2462c2d813dc3 | volhovm/orgstat | Script.hs | -- | Script output type. We launch the executable asked after
-- injecting the environment variables related to the report.
module OrgStat.Outputs.Script
( processScriptOutput
) where
import Universum
import Control.Lens (views)
import qualified Data.Map.Strict as M
import System.Environment (lookupEnv, setEnv, unsetEnv)
import System.Process (callCommand)
import OrgStat.Ast
import OrgStat.Config (confReports, crName)
import OrgStat.Helpers (resolveReport)
import OrgStat.Outputs.Types (ScriptParams(..))
import OrgStat.Util (timeF)
import OrgStat.WorkMonad (WorkM, wcConfig)
-- | Processes script output.
processScriptOutput :: ScriptParams -> WorkM ()
processScriptOutput ScriptParams{..} = do
-- Considering all the reports if none are specified.
reportsToConsider <- case spReports of
[] -> views wcConfig (map crName . confReports)
xs -> pure xs
allReports <- mapM (\r -> (r,) <$> resolveReport r) reportsToConsider
-- Set env variables
prevVars <- forM allReports $ \(toString -> reportName,org) -> do
let duration = timeF $ orgTotalDuration $ filterHasClock org
let mean = timeF $ orgMeanDuration $ filterHasClock org
let median = timeF $ orgMedianDuration $ filterHasClock org
let pomodoro = orgPomodoroNum $ filterHasClock org
let toMinutes x = round x `div` 60
-- logWarning $ "1: " <> show org
logWarning $ " 2 : " < > show ( filterHasClock org )
logWarning $ " 3 : " < > show ( orgDurations $ filterHasClock org )
let durationsPyth :: [Int] = map toMinutes $ orgDurations $ filterHasClock org
(prevVar :: Maybe String) <- liftIO $ lookupEnv reportName
liftIO $ setEnv reportName (toString duration)
liftIO $ setEnv (reportName <> "Mean") (toString mean)
liftIO $ setEnv (reportName <> "Median") (toString median)
liftIO $ setEnv (reportName <> "Pomodoro") (show pomodoro)
liftIO $ setEnv (reportName <> "DurationsList") (show durationsPyth)
pure $ (reportName,) <$> prevVar
let prevVarsMap :: Map String String
prevVarsMap = M.fromList $ catMaybes prevVars
-- Execute script
let cmdArgument = either id (\t -> "-c \"" <> toString t <> "\"") spScript
liftIO $ callCommand $
spInterpreter <> " " <> cmdArgument
--"/bin/env sh " <> cmdArgument
-- Restore the old variables, clean new.
forM_ (map fst allReports) $ \(toString -> reportName) -> do
liftIO $ case M.lookup reportName prevVarsMap of
Nothing -> unsetEnv reportName
Just prevValue -> setEnv reportName prevValue
where
| null | https://raw.githubusercontent.com/volhovm/orgstat/92d55971be73d82f2e94435488654d7a14a12c0f/src/OrgStat/Outputs/Script.hs | haskell | | Script output type. We launch the executable asked after
injecting the environment variables related to the report.
| Processes script output.
Considering all the reports if none are specified.
Set env variables
logWarning $ "1: " <> show org
Execute script
"/bin/env sh " <> cmdArgument
Restore the old variables, clean new. |
module OrgStat.Outputs.Script
( processScriptOutput
) where
import Universum
import Control.Lens (views)
import qualified Data.Map.Strict as M
import System.Environment (lookupEnv, setEnv, unsetEnv)
import System.Process (callCommand)
import OrgStat.Ast
import OrgStat.Config (confReports, crName)
import OrgStat.Helpers (resolveReport)
import OrgStat.Outputs.Types (ScriptParams(..))
import OrgStat.Util (timeF)
import OrgStat.WorkMonad (WorkM, wcConfig)
processScriptOutput :: ScriptParams -> WorkM ()
processScriptOutput ScriptParams{..} = do
reportsToConsider <- case spReports of
[] -> views wcConfig (map crName . confReports)
xs -> pure xs
allReports <- mapM (\r -> (r,) <$> resolveReport r) reportsToConsider
prevVars <- forM allReports $ \(toString -> reportName,org) -> do
let duration = timeF $ orgTotalDuration $ filterHasClock org
let mean = timeF $ orgMeanDuration $ filterHasClock org
let median = timeF $ orgMedianDuration $ filterHasClock org
let pomodoro = orgPomodoroNum $ filterHasClock org
let toMinutes x = round x `div` 60
logWarning $ " 2 : " < > show ( filterHasClock org )
logWarning $ " 3 : " < > show ( orgDurations $ filterHasClock org )
let durationsPyth :: [Int] = map toMinutes $ orgDurations $ filterHasClock org
(prevVar :: Maybe String) <- liftIO $ lookupEnv reportName
liftIO $ setEnv reportName (toString duration)
liftIO $ setEnv (reportName <> "Mean") (toString mean)
liftIO $ setEnv (reportName <> "Median") (toString median)
liftIO $ setEnv (reportName <> "Pomodoro") (show pomodoro)
liftIO $ setEnv (reportName <> "DurationsList") (show durationsPyth)
pure $ (reportName,) <$> prevVar
let prevVarsMap :: Map String String
prevVarsMap = M.fromList $ catMaybes prevVars
let cmdArgument = either id (\t -> "-c \"" <> toString t <> "\"") spScript
liftIO $ callCommand $
spInterpreter <> " " <> cmdArgument
forM_ (map fst allReports) $ \(toString -> reportName) -> do
liftIO $ case M.lookup reportName prevVarsMap of
Nothing -> unsetEnv reportName
Just prevValue -> setEnv reportName prevValue
where
|
4f72bab31581c8273481afebefa49c2ff6733495a46d12692f45775ab956ce73 | mattsta/er | er_pool.erl | -module(er_pool).
-behaviour(gen_server).
%% gen_server callbacks
-export([init/1,
handle_call/3, handle_cast/2,
handle_info/2, terminate/2, code_change/3]).
%% api callbacks
-export([start_link/0, start_link/1, start_link/3, start_link/4]).
-export([start_link_nameless/2, start_link_nameless/3, start_link_nameless/4]).
-record(state, {ip :: string(),
port :: pos_integer(),
available :: [pid()],
reserved :: [pid()],
error_strategy :: {retry, pos_integer()} | % retry count
{wait, pos_integer()} | % retry every-N ms
crash
}).
%%====================================================================
%% api callbacks
%%====================================================================
% With names
start_link() ->
start_link(?MODULE).
start_link(GenServerName) when is_atom(GenServerName) ->
start_link(GenServerName, "127.0.0.1", 6379).
start_link(GenServerName, IP, Port) when is_atom(GenServerName) ->
start_link(GenServerName, IP, Port, 25).
start_link(GenServerName, IP, Port, SocketCount) when is_atom(GenServerName) ->
start_link(GenServerName, IP, Port, SocketCount, crash).
start_link(GenServerName, IP, Port, SocketCount, Strategy)
when is_atom(GenServerName) ->
gen_server:start_link({local, GenServerName}, ?MODULE,
[IP, Port, SocketCount, Strategy], []).
% Without names
start_link_nameless(IP, Port) ->
start_link_nameless(IP, Port, 25).
start_link_nameless(IP, Port, SocketCount) ->
start_link_nameless(IP, Port, SocketCount, crash).
start_link_nameless(IP, Port, SocketCount, Strategy) ->
gen_server:start_link(?MODULE, [IP, Port, SocketCount, Strategy], []).
%%====================================================================
%% gen_server callbacks
%%====================================================================
%%--------------------------------------------------------------------
%% Function: init(Args) -> {ok, State} |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% Description: Initiates the server
%%--------------------------------------------------------------------
init([IP, Port, SocketCount, Strategy]) when is_list(IP), is_integer(Port) ->
process_flag(trap_exit, true),
PreState = #state{ip = IP, port = Port, error_strategy = Strategy},
try initial_connect(SocketCount, PreState) of
State -> {ok, State}
catch
throw:Error -> {stop, Error}
end.
%%--------------------------------------------------------------------
Function : % % handle_call(Request , From , State ) - > { reply , Reply , State } |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% Description: Handling call messages
%%--------------------------------------------------------------------
These commands persisit state on one connection . We ca n't add
% their connection back to the general pool.
handle_call({cmd, Parts}, From,
#state{available = [H|T], reserved = R} = State) when
hd(Parts) =:= <<"multi">> orelse
hd(Parts) =:= <<"watch">> orelse
hd(Parts) =:= <<"subscribe">> orelse
hd(Parts) =:= <<"psubscribe">> orelse
hd(Parts) =:= <<"monitor">> ->
spawn(fun() ->
gen_server:reply(From, {H, gen_server:call(H, {cmd, Parts}, infinity)})
end),
Caller = self(),
spawn(fun() -> Caller ! add_connection end),
{noreply, State#state{available = T, reserved = [H | R]}};
% Blocking list ops *do* block, but don't need to return their er_redis pid
Transactional returns should self - clean - up
handle_call({cmd, Parts}, From,
#state{available = [H|T], reserved = R} = State) when
hd(Parts) =:= <<"exec">> orelse
hd(Parts) =:= <<"discard">> orelse
hd(Parts) =:= <<"blpop">> orelse
hd(Parts) =:= <<"brpoplpush">> orelse
hd(Parts) =:= <<"brpop">> ->
Caller = self(),
spawn(fun() ->
gen_server:reply(From, gen_server:call(H, {cmd, Parts}, infinity)),
Caller ! {done_processing_reserved, H}
end),
spawn(fun() -> Caller ! add_connection end),
{noreply, State#state{available = T, reserved = [H | R]}};
handle_call({cmd, Parts}, From,
#state{available = [H|T]} = State) ->
spawn(fun() ->
gen_server:reply(From, gen_server:call(H, {cmd, Parts}, infinity))
end),
{noreply, State#state{available = T ++ [H]}}.
%%--------------------------------------------------------------------
Function : handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% Description: Handling cast messages
%%--------------------------------------------------------------------
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
Function : handle_info(Info , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% Description: Handling all non call/cast messages
%%--------------------------------------------------------------------
% A blocking or reserved operation finished. Add the reserved server
% back into the available pool.
handle_info({done_processing_reserved, Pid},
#state{available=Available, reserved=Reserved} = State) ->
RemovedOld = Reserved -- [Pid],
NewAvail = [Pid | Available],
{noreply, State#state{available=NewAvail, reserved=RemovedOld}};
% An er_redis died because of a connection error. Do something.
% {wait, N} and {retry, N} are not perfect right now.
handle_info(add_connection, #state{available=Available} = State) ->
try connect(State) of
Connected -> {noreply, State#state{available=[Connected | Available]}}
catch
throw:Error -> run_error_strategy(Error, State)
end;
% An er_redis died because of a connection error. Do something.
% {wait, N} and {retry, N} are not perfect right now.
handle_info({'EXIT', _Pid, {er_connect_failed, _, _, _}} = Error, State) ->
run_error_strategy(Error, State);
% An er_redis died because of some other error. Remove it from list of servers.
handle_info({'EXIT', Pid, _Reason},
#state{available=Available, reserved=Reserved} = State) ->
try connect(State) of
Connected ->
case lists:member(Pid, Available) of
true -> RemovedOld = Available -- [Pid],
NewAvail = [Connected | RemovedOld],
{noreply, State#state{available = NewAvail}};
false -> RemovedOld = Reserved -- [Pid],
NewAvail = [Connected | Available],
{noreply, State#state{available = NewAvail,
reserved = RemovedOld}}
end
catch
throw:Error -> run_error_strategy(Error, State)
end;
handle_info(shutdown, State) ->
{stop, normal, State};
handle_info(Info, State) ->
error_logger:error_msg("Other info: ~p with state ~p~n", [Info, State]),
{noreply, State}.
%%--------------------------------------------------------------------
%% Function: terminate(Reason, State) -> void()
%% Description: This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any necessary
%% cleaning up. When it returns, the gen_server terminates with Reason.
%% The return value is ignored.
%%--------------------------------------------------------------------
terminate(_Reason, #state{available=Available, reserved=Reserved}) ->
[exit(P, normal) || P <- Available],
[exit(P, normal) || P <- Reserved].
%%--------------------------------------------------------------------
Func : code_change(OldVsn , State , Extra ) - > { ok , NewState }
%% Description: Convert process state when code is changed
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
initial_connect(SockCount, State) ->
ErServers = [connect(State) || _ <- lists:seq(1, SockCount)],
State#state{available = ErServers, reserved = []}.
connect(#state{ip = IP, port = Port}) ->
case er_redis:connect(IP, Port) of
{ok, Server} -> Server;
Other -> throw({er_pool, connect, Other})
end.
run_error_strategy(ErError, #state{error_strategy = Strategy} = State) ->
case Strategy of
{wait, N} -> timer:sleep(N),
{noreply, State};
{retry, N} -> case N > 0 of
true -> {noreply, State#state{error_strategy={retry, N-1}}};
false -> {stop, max_retries_reached, State}
end;
_ -> {stop, {er_error, ErError}, State}
end.
| null | https://raw.githubusercontent.com/mattsta/er/7ac6dccf4952ddf32d921b6548026980a3db70c7/src/er_pool.erl | erlang | gen_server callbacks
api callbacks
retry count
retry every-N ms
====================================================================
api callbacks
====================================================================
With names
Without names
====================================================================
gen_server callbacks
====================================================================
--------------------------------------------------------------------
Function: init(Args) -> {ok, State} |
ignore |
{stop, Reason}
Description: Initiates the server
--------------------------------------------------------------------
--------------------------------------------------------------------
% handle_call(Request , From , State ) - > { reply , Reply , State } |
{stop, Reason, Reply, State} |
{stop, Reason, State}
Description: Handling call messages
--------------------------------------------------------------------
their connection back to the general pool.
Blocking list ops *do* block, but don't need to return their er_redis pid
--------------------------------------------------------------------
{stop, Reason, State}
Description: Handling cast messages
--------------------------------------------------------------------
--------------------------------------------------------------------
{stop, Reason, State}
Description: Handling all non call/cast messages
--------------------------------------------------------------------
A blocking or reserved operation finished. Add the reserved server
back into the available pool.
An er_redis died because of a connection error. Do something.
{wait, N} and {retry, N} are not perfect right now.
An er_redis died because of a connection error. Do something.
{wait, N} and {retry, N} are not perfect right now.
An er_redis died because of some other error. Remove it from list of servers.
--------------------------------------------------------------------
Function: terminate(Reason, State) -> void()
Description: This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any necessary
cleaning up. When it returns, the gen_server terminates with Reason.
The return value is ignored.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Convert process state when code is changed
--------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | -module(er_pool).
-behaviour(gen_server).
-export([init/1,
handle_call/3, handle_cast/2,
handle_info/2, terminate/2, code_change/3]).
-export([start_link/0, start_link/1, start_link/3, start_link/4]).
-export([start_link_nameless/2, start_link_nameless/3, start_link_nameless/4]).
-record(state, {ip :: string(),
port :: pos_integer(),
available :: [pid()],
reserved :: [pid()],
crash
}).
start_link() ->
start_link(?MODULE).
start_link(GenServerName) when is_atom(GenServerName) ->
start_link(GenServerName, "127.0.0.1", 6379).
start_link(GenServerName, IP, Port) when is_atom(GenServerName) ->
start_link(GenServerName, IP, Port, 25).
start_link(GenServerName, IP, Port, SocketCount) when is_atom(GenServerName) ->
start_link(GenServerName, IP, Port, SocketCount, crash).
start_link(GenServerName, IP, Port, SocketCount, Strategy)
when is_atom(GenServerName) ->
gen_server:start_link({local, GenServerName}, ?MODULE,
[IP, Port, SocketCount, Strategy], []).
start_link_nameless(IP, Port) ->
start_link_nameless(IP, Port, 25).
start_link_nameless(IP, Port, SocketCount) ->
start_link_nameless(IP, Port, SocketCount, crash).
start_link_nameless(IP, Port, SocketCount, Strategy) ->
gen_server:start_link(?MODULE, [IP, Port, SocketCount, Strategy], []).
{ ok , State , Timeout } |
init([IP, Port, SocketCount, Strategy]) when is_list(IP), is_integer(Port) ->
process_flag(trap_exit, true),
PreState = #state{ip = IP, port = Port, error_strategy = Strategy},
try initial_connect(SocketCount, PreState) of
State -> {ok, State}
catch
throw:Error -> {stop, Error}
end.
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
These commands persisit state on one connection . We ca n't add
handle_call({cmd, Parts}, From,
#state{available = [H|T], reserved = R} = State) when
hd(Parts) =:= <<"multi">> orelse
hd(Parts) =:= <<"watch">> orelse
hd(Parts) =:= <<"subscribe">> orelse
hd(Parts) =:= <<"psubscribe">> orelse
hd(Parts) =:= <<"monitor">> ->
spawn(fun() ->
gen_server:reply(From, {H, gen_server:call(H, {cmd, Parts}, infinity)})
end),
Caller = self(),
spawn(fun() -> Caller ! add_connection end),
{noreply, State#state{available = T, reserved = [H | R]}};
Transactional returns should self - clean - up
handle_call({cmd, Parts}, From,
#state{available = [H|T], reserved = R} = State) when
hd(Parts) =:= <<"exec">> orelse
hd(Parts) =:= <<"discard">> orelse
hd(Parts) =:= <<"blpop">> orelse
hd(Parts) =:= <<"brpoplpush">> orelse
hd(Parts) =:= <<"brpop">> ->
Caller = self(),
spawn(fun() ->
gen_server:reply(From, gen_server:call(H, {cmd, Parts}, infinity)),
Caller ! {done_processing_reserved, H}
end),
spawn(fun() -> Caller ! add_connection end),
{noreply, State#state{available = T, reserved = [H | R]}};
handle_call({cmd, Parts}, From,
#state{available = [H|T]} = State) ->
spawn(fun() ->
gen_server:reply(From, gen_server:call(H, {cmd, Parts}, infinity))
end),
{noreply, State#state{available = T ++ [H]}}.
Function : handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast(_Msg, State) ->
{noreply, State}.
Function : handle_info(Info , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_info({done_processing_reserved, Pid},
#state{available=Available, reserved=Reserved} = State) ->
RemovedOld = Reserved -- [Pid],
NewAvail = [Pid | Available],
{noreply, State#state{available=NewAvail, reserved=RemovedOld}};
handle_info(add_connection, #state{available=Available} = State) ->
try connect(State) of
Connected -> {noreply, State#state{available=[Connected | Available]}}
catch
throw:Error -> run_error_strategy(Error, State)
end;
handle_info({'EXIT', _Pid, {er_connect_failed, _, _, _}} = Error, State) ->
run_error_strategy(Error, State);
handle_info({'EXIT', Pid, _Reason},
#state{available=Available, reserved=Reserved} = State) ->
try connect(State) of
Connected ->
case lists:member(Pid, Available) of
true -> RemovedOld = Available -- [Pid],
NewAvail = [Connected | RemovedOld],
{noreply, State#state{available = NewAvail}};
false -> RemovedOld = Reserved -- [Pid],
NewAvail = [Connected | Available],
{noreply, State#state{available = NewAvail,
reserved = RemovedOld}}
end
catch
throw:Error -> run_error_strategy(Error, State)
end;
handle_info(shutdown, State) ->
{stop, normal, State};
handle_info(Info, State) ->
error_logger:error_msg("Other info: ~p with state ~p~n", [Info, State]),
{noreply, State}.
terminate(_Reason, #state{available=Available, reserved=Reserved}) ->
[exit(P, normal) || P <- Available],
[exit(P, normal) || P <- Reserved].
Func : code_change(OldVsn , State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
initial_connect(SockCount, State) ->
ErServers = [connect(State) || _ <- lists:seq(1, SockCount)],
State#state{available = ErServers, reserved = []}.
connect(#state{ip = IP, port = Port}) ->
case er_redis:connect(IP, Port) of
{ok, Server} -> Server;
Other -> throw({er_pool, connect, Other})
end.
run_error_strategy(ErError, #state{error_strategy = Strategy} = State) ->
case Strategy of
{wait, N} -> timer:sleep(N),
{noreply, State};
{retry, N} -> case N > 0 of
true -> {noreply, State#state{error_strategy={retry, N-1}}};
false -> {stop, max_retries_reached, State}
end;
_ -> {stop, {er_error, ErError}, State}
end.
|
37067d142612bcedf6012c2ae595047f2ba10fecbf601a57af2a3fe0f75d0e42 | janestreet/core_unix | time_functor.ml | *
Outside of Core Time appears to be a single module with a number of submodules :
- Time
- Span
- Ofday
- Zone
The reality under the covers is n't as simple for a three reasons :
- We want as much Time functionality available to Core as possible , and Core modules
should n't rely on Unix functions . Some functions in Time require Unix , which creates
one split .
- We want some functionality to be functorized so that code can be shared
between Time and Time_ns .
- Time has internal circular dependencies . For instance , Ofday.now relies on
Time.now , but Time also wants to expose Time.to_date_ofday , which relies on Ofday .
We use a stack of modules to break the cycle .
This leads to the following modules within Core :
Core . Span - the core type of span
Core . Ofday - the core type of ofday , which is really a constrained span
Core . Date - the core type of date
Core . Zone - the base functor for creating a Zone type
Core . Time_float0 - contains the base Time.t type and lays out the basic
relationship between Time , Span , Ofday , and Zone
Core . Time_float - ties Time , Span , Ofday , Zone , and Date together and provides
the higher level functions for them that do n't rely on Unix
Core . Time - re - exposes Time_float
Core . Zone_cache - implements a caching layer between the Unix filesystem and Zones
Core . Core_date - adds the Unix dependent functions to Date
Core . Core_time - adds the Unix dependent functions to Time
Core - renames the Core_{base } modules to { base } for ease of access in
modules outside of Core
Outside of Core Time appears to be a single module with a number of submodules:
- Time
- Span
- Ofday
- Zone
The reality under the covers isn't as simple for a three reasons:
- We want as much Time functionality available to Core as possible, and Core modules
shouldn't rely on Unix functions. Some functions in Time require Unix, which creates
one split.
- We want some functionality to be functorized so that code can be shared
between Time and Time_ns.
- Time has internal circular dependencies. For instance, Ofday.now relies on
Time.now, but Time also wants to expose Time.to_date_ofday, which relies on Ofday.
We use a stack of modules to break the cycle.
This leads to the following modules within Core:
Core.Span - the core type of span
Core.Ofday - the core type of ofday, which is really a constrained span
Core.Date - the core type of date
Core.Zone - the base functor for creating a Zone type
Core.Time_float0 - contains the base Time.t type and lays out the basic
relationship between Time, Span, Ofday, and Zone
Core.Time_float - ties Time, Span, Ofday, Zone, and Date together and provides
the higher level functions for them that don't rely on Unix
Core.Time - re-exposes Time_float
Core.Zone_cache - implements a caching layer between the Unix filesystem and Zones
Core.Core_date - adds the Unix dependent functions to Date
Core.Core_time - adds the Unix dependent functions to Time
Core - renames the Core_{base} modules to {base} for ease of access in
modules outside of Core
*)
open! Core
open! Import
open! Int.Replace_polymorphic_compare
include Time_functor_intf
module Make
(Time0 : Time_float.S_kernel_without_zone)
(Time : Time_float.S_kernel with module Time := Time0) =
struct
module Span = struct
include Time.Span
let arg_type = Core.Command.Arg_type.create of_string
end
module Zone = struct
include Time.Zone
include (Timezone : Timezone.Extend_zone with type t := t)
let arg_type = Core.Command.Arg_type.create of_string
end
module Ofday = struct
include Time.Ofday
let arg_type = Core.Command.Arg_type.create of_string
let now ~zone = Time.to_ofday ~zone (Time.now ())
module Zoned = struct
type t =
{ ofday : Time.Ofday.t
; zone : Zone.t
}
[@@deriving bin_io, fields, compare, equal, hash]
type sexp_repr = Time.Ofday.t * Zone.t [@@deriving sexp]
let sexp_of_t t = [%sexp_of: sexp_repr] (t.ofday, t.zone)
let t_of_sexp sexp =
let ofday, zone = [%of_sexp: sexp_repr] sexp in
{ ofday; zone }
;;
let to_time t date = Time.of_date_ofday ~zone:(zone t) date (ofday t)
let create ofday zone = { ofday; zone }
let create_local ofday = create ofday (Lazy.force Zone.local)
let of_string string : t =
match String.split string ~on:' ' with
| [ ofday; zone ] ->
{ ofday = Time.Ofday.of_string ofday; zone = Zone.of_string zone }
| _ -> failwithf "Ofday.Zoned.of_string %s" string ()
;;
let to_string (t : t) : string =
String.concat [ Time.Ofday.to_string t.ofday; " "; Zone.to_string t.zone ]
;;
let to_string_trimmed (t : t) : string =
String.concat [ Time.Ofday.to_string_trimmed t.ofday; " "; Zone.to_string t.zone ]
;;
let arg_type = Core.Command.Arg_type.create of_string
module With_nonchronological_compare = struct
type nonrec t = t [@@deriving bin_io, compare, equal, sexp, hash]
end
include Pretty_printer.Register (struct
type nonrec t = t
let to_string = to_string
let module_name = "Time_float_unix.Ofday.Zoned"
end)
end
end
include (
Time :
module type of Time
with module Zone := Time.Zone
and module Ofday := Time.Ofday
and module Span := Time.Span)
let of_tm tm ~zone =
(* Explicitly ignoring isdst, wday, yday (they are redundant with the other fields
and the [zone] argument) *)
let { Unix.tm_year
; tm_mon
; tm_mday
; tm_hour
; tm_min
; tm_sec
; tm_isdst = _
; tm_wday = _
; tm_yday = _
}
=
tm
in
let date =
Date.create_exn ~y:(tm_year + 1900) ~m:(Month.of_int_exn (tm_mon + 1)) ~d:tm_mday
in
let ofday = Ofday.create ~hr:tm_hour ~min:tm_min ~sec:tm_sec () in
of_date_ofday ~zone date ofday
;;
let of_date_ofday_zoned date ofday_zoned = Ofday.Zoned.to_time ofday_zoned date
let to_date_ofday_zoned t ~zone =
let date, ofday = to_date_ofday t ~zone in
date, Ofday.Zoned.create ofday zone
;;
let to_ofday_zoned t ~zone =
let ofday = to_ofday t ~zone in
Ofday.Zoned.create ofday zone
;;
let of_string_fix_proto utc str =
try
let expect_length = 21 in
(* = 8 + 1 + 12 *)
let expect_dash = 8 in
if Char.( <> ) str.[expect_dash] '-'
then failwithf "no dash in position %d" expect_dash ();
let zone =
match utc with
| `Utc -> Zone.utc
| `Local -> Lazy.force Zone.local
in
if Int.( > ) (String.length str) expect_length then failwithf "input too long" ();
of_date_ofday
~zone
(Date.of_string_iso8601_basic str ~pos:0)
(Ofday.of_string_iso8601_extended str ~pos:(expect_dash + 1))
with
| exn -> invalid_argf "Time.of_string_fix_proto %s: %s" str (Exn.to_string exn) ()
;;
let to_string_fix_proto utc t =
let zone =
match utc with
| `Utc -> Zone.utc
| `Local -> Lazy.force Zone.local
in
let date, sec = to_date_ofday t ~zone in
Date.to_string_iso8601_basic date ^ "-" ^ Ofday.to_millisecond_string sec
;;
let format t s ~zone =
let epoch_time =
Zone.date_and_ofday_of_absolute_time zone t
|> Date_and_ofday.to_synthetic_span_since_epoch
|> Span.to_sec
in
Unix.strftime (Unix.gmtime epoch_time) s
;;
let parse ?allow_trailing_input s ~fmt ~zone =
Unix.strptime ?allow_trailing_input ~fmt s |> of_tm ~zone
;;
let pause_for span =
let time_remaining =
If too large a float is passed in ( Span.max_value for instance ) then
will return immediately , leading to an infinite and expensive
select loop . This is handled by pausing for no longer than 100 days .
nanosleep will return immediately, leading to an infinite and expensive
select loop. This is handled by pausing for no longer than 100 days.
*)
let span = Span.min span (Span.scale Span.day 100.) in
Unix.nanosleep (Span.to_sec span)
in
if Float.( > ) time_remaining 0.0
then `Remaining (Span.of_sec time_remaining)
else `Ok
;;
(** Pause and don't allow events to interrupt. *)
let rec pause span =
match pause_for span with
| `Remaining span -> pause span
| `Ok -> ()
;;
(** Pause but allow events to interrupt. *)
let interruptible_pause = pause_for
let rec pause_forever () =
pause (Span.of_day 1.0);
pause_forever ()
;;
let to_string t = to_string_abs t ~zone:(Lazy.force Zone.local)
let ensure_colon_in_offset offset =
if Char.( = ) offset.[1] ':' || Char.( = ) offset.[2] ':'
then offset
else (
let offset_length = String.length offset in
if Int.( < ) offset_length 3 || Int.( > ) offset_length 4
then failwithf "invalid offset %s" offset ()
else
String.concat
[ String.slice offset 0 (offset_length - 2)
; ":"
; String.slice offset (offset_length - 2) offset_length
])
;;
exception Time_string_not_absolute of string [@@deriving sexp]
let of_string_gen ~if_no_timezone s =
let default_zone () =
match if_no_timezone with
| `Fail -> raise (Time_string_not_absolute s)
| `Local -> Lazy.force Zone.local
| `Use_this_one zone -> zone
in
of_string_gen ~default_zone ~find_zone:Zone.find_exn s
;;
let of_string_abs s = of_string_gen ~if_no_timezone:`Fail s
let of_string s = of_string_gen ~if_no_timezone:`Local s
let arg_type = Core.Command.Arg_type.create of_string_abs
include Pretty_printer.Register (struct
type nonrec t = t
let to_string = to_string
let module_name = "Time_float_unix"
end)
let sexp_zone = ref Zone.local
let get_sexp_zone () = Lazy.force !sexp_zone
let set_sexp_zone zone = sexp_zone := lazy zone
let t_of_sexp_gen ~if_no_timezone sexp =
try
match sexp with
| Sexp.List [ Sexp.Atom date; Sexp.Atom ofday; Sexp.Atom tz ] ->
of_date_ofday
~zone:(Zone.find_exn tz)
(Date.of_string date)
(Ofday.of_string ofday)
This is actually where the output of [ sexp_of_t ] is handled , since that 's e.g.
( 2015 - 07 - 06 09:09:44.787988 + 01:00 ) .
(2015-07-06 09:09:44.787988+01:00). *)
| Sexp.List [ Sexp.Atom date; Sexp.Atom ofday_and_possibly_zone ] ->
of_string_gen ~if_no_timezone (date ^ " " ^ ofday_and_possibly_zone)
| Sexp.Atom datetime -> of_string_gen ~if_no_timezone datetime
| _ -> of_sexp_error "Time.t_of_sexp" sexp
with
| Of_sexp_error _ as e -> raise e
| e -> of_sexp_error (sprintf "Time.t_of_sexp: %s" (Exn.to_string e)) sexp
;;
let t_of_sexp sexp =
t_of_sexp_gen sexp ~if_no_timezone:(`Use_this_one (Lazy.force !sexp_zone))
;;
let t_sexp_grammar : t Sexplib.Sexp_grammar.t =
{ untyped =
Union
[ String
; List (Cons (String, Cons (String, Empty)))
; List (Cons (String, Cons (String, Cons (String, Empty))))
]
}
;;
let t_of_sexp_abs sexp = t_of_sexp_gen sexp ~if_no_timezone:`Fail
let sexp_of_t_abs t ~zone =
Sexp.List (List.map (Time.to_string_abs_parts ~zone t) ~f:(fun s -> Sexp.Atom s))
;;
let sexp_of_t t = sexp_of_t_abs ~zone:(Lazy.force !sexp_zone) t
module type C =
Comparable.Map_and_set_binable
with type t := t
and type comparator_witness := comparator_witness
let make_comparable ?(sexp_of_t = sexp_of_t) ?(t_of_sexp = t_of_sexp) () : (module C) =
(module struct
module C = struct
type nonrec t = t [@@deriving bin_io]
type nonrec comparator_witness = comparator_witness
let comparator = comparator
let sexp_of_t = sexp_of_t
let t_of_sexp = t_of_sexp
end
include C
module Map = Map.Make_binable_using_comparator (C)
module Set = Set.Make_binable_using_comparator (C)
end)
;;
In 108.06a and earlier , times in sexps of Maps and Sets were raw floats . From
108.07 through 109.13 , the output format remained raw as before , but both the raw
and pretty format were accepted as input . From 109.14 on , the output format was
changed from raw to pretty , while continuing to accept both formats . Once we
believe most programs are beyond 109.14 , we will switch the input format to no
longer accept raw .
108.07 through 109.13, the output format remained raw as before, but both the raw
and pretty format were accepted as input. From 109.14 on, the output format was
changed from raw to pretty, while continuing to accept both formats. Once we
believe most programs are beyond 109.14, we will switch the input format to no
longer accept raw. *)
include
(val make_comparable () ~t_of_sexp:(fun sexp ->
match
Option.try_with (fun () ->
of_span_since_epoch (Span.of_sec (Float.t_of_sexp sexp)))
with
| Some t -> t
| None -> t_of_sexp sexp))
let%test _ =
Set.equal
(Set.of_list [ epoch ])
(Set.t_of_sexp
(Sexp.List [ Float.sexp_of_t (Span.to_sec (to_span_since_epoch epoch)) ]))
;;
include Hashable.Make_binable (struct
type nonrec t = t [@@deriving bin_io, compare, hash, sexp]
end)
module Exposed_for_tests = struct
let ensure_colon_in_offset = ensure_colon_in_offset
end
end
| null | https://raw.githubusercontent.com/janestreet/core_unix/abfad608bb4ab04d16478a081cc284a88c3b3184/time_float_unix/src/time_functor.ml | ocaml | Explicitly ignoring isdst, wday, yday (they are redundant with the other fields
and the [zone] argument)
= 8 + 1 + 12
* Pause and don't allow events to interrupt.
* Pause but allow events to interrupt. | *
Outside of Core Time appears to be a single module with a number of submodules :
- Time
- Span
- Ofday
- Zone
The reality under the covers is n't as simple for a three reasons :
- We want as much Time functionality available to Core as possible , and Core modules
should n't rely on Unix functions . Some functions in Time require Unix , which creates
one split .
- We want some functionality to be functorized so that code can be shared
between Time and Time_ns .
- Time has internal circular dependencies . For instance , Ofday.now relies on
Time.now , but Time also wants to expose Time.to_date_ofday , which relies on Ofday .
We use a stack of modules to break the cycle .
This leads to the following modules within Core :
Core . Span - the core type of span
Core . Ofday - the core type of ofday , which is really a constrained span
Core . Date - the core type of date
Core . Zone - the base functor for creating a Zone type
Core . Time_float0 - contains the base Time.t type and lays out the basic
relationship between Time , Span , Ofday , and Zone
Core . Time_float - ties Time , Span , Ofday , Zone , and Date together and provides
the higher level functions for them that do n't rely on Unix
Core . Time - re - exposes Time_float
Core . Zone_cache - implements a caching layer between the Unix filesystem and Zones
Core . Core_date - adds the Unix dependent functions to Date
Core . Core_time - adds the Unix dependent functions to Time
Core - renames the Core_{base } modules to { base } for ease of access in
modules outside of Core
Outside of Core Time appears to be a single module with a number of submodules:
- Time
- Span
- Ofday
- Zone
The reality under the covers isn't as simple for a three reasons:
- We want as much Time functionality available to Core as possible, and Core modules
shouldn't rely on Unix functions. Some functions in Time require Unix, which creates
one split.
- We want some functionality to be functorized so that code can be shared
between Time and Time_ns.
- Time has internal circular dependencies. For instance, Ofday.now relies on
Time.now, but Time also wants to expose Time.to_date_ofday, which relies on Ofday.
We use a stack of modules to break the cycle.
This leads to the following modules within Core:
Core.Span - the core type of span
Core.Ofday - the core type of ofday, which is really a constrained span
Core.Date - the core type of date
Core.Zone - the base functor for creating a Zone type
Core.Time_float0 - contains the base Time.t type and lays out the basic
relationship between Time, Span, Ofday, and Zone
Core.Time_float - ties Time, Span, Ofday, Zone, and Date together and provides
the higher level functions for them that don't rely on Unix
Core.Time - re-exposes Time_float
Core.Zone_cache - implements a caching layer between the Unix filesystem and Zones
Core.Core_date - adds the Unix dependent functions to Date
Core.Core_time - adds the Unix dependent functions to Time
Core - renames the Core_{base} modules to {base} for ease of access in
modules outside of Core
*)
open! Core
open! Import
open! Int.Replace_polymorphic_compare
include Time_functor_intf
module Make
(Time0 : Time_float.S_kernel_without_zone)
(Time : Time_float.S_kernel with module Time := Time0) =
struct
module Span = struct
include Time.Span
let arg_type = Core.Command.Arg_type.create of_string
end
module Zone = struct
include Time.Zone
include (Timezone : Timezone.Extend_zone with type t := t)
let arg_type = Core.Command.Arg_type.create of_string
end
module Ofday = struct
include Time.Ofday
let arg_type = Core.Command.Arg_type.create of_string
let now ~zone = Time.to_ofday ~zone (Time.now ())
module Zoned = struct
type t =
{ ofday : Time.Ofday.t
; zone : Zone.t
}
[@@deriving bin_io, fields, compare, equal, hash]
type sexp_repr = Time.Ofday.t * Zone.t [@@deriving sexp]
let sexp_of_t t = [%sexp_of: sexp_repr] (t.ofday, t.zone)
let t_of_sexp sexp =
let ofday, zone = [%of_sexp: sexp_repr] sexp in
{ ofday; zone }
;;
let to_time t date = Time.of_date_ofday ~zone:(zone t) date (ofday t)
let create ofday zone = { ofday; zone }
let create_local ofday = create ofday (Lazy.force Zone.local)
let of_string string : t =
match String.split string ~on:' ' with
| [ ofday; zone ] ->
{ ofday = Time.Ofday.of_string ofday; zone = Zone.of_string zone }
| _ -> failwithf "Ofday.Zoned.of_string %s" string ()
;;
let to_string (t : t) : string =
String.concat [ Time.Ofday.to_string t.ofday; " "; Zone.to_string t.zone ]
;;
let to_string_trimmed (t : t) : string =
String.concat [ Time.Ofday.to_string_trimmed t.ofday; " "; Zone.to_string t.zone ]
;;
let arg_type = Core.Command.Arg_type.create of_string
module With_nonchronological_compare = struct
type nonrec t = t [@@deriving bin_io, compare, equal, sexp, hash]
end
include Pretty_printer.Register (struct
type nonrec t = t
let to_string = to_string
let module_name = "Time_float_unix.Ofday.Zoned"
end)
end
end
include (
Time :
module type of Time
with module Zone := Time.Zone
and module Ofday := Time.Ofday
and module Span := Time.Span)
let of_tm tm ~zone =
let { Unix.tm_year
; tm_mon
; tm_mday
; tm_hour
; tm_min
; tm_sec
; tm_isdst = _
; tm_wday = _
; tm_yday = _
}
=
tm
in
let date =
Date.create_exn ~y:(tm_year + 1900) ~m:(Month.of_int_exn (tm_mon + 1)) ~d:tm_mday
in
let ofday = Ofday.create ~hr:tm_hour ~min:tm_min ~sec:tm_sec () in
of_date_ofday ~zone date ofday
;;
let of_date_ofday_zoned date ofday_zoned = Ofday.Zoned.to_time ofday_zoned date
let to_date_ofday_zoned t ~zone =
let date, ofday = to_date_ofday t ~zone in
date, Ofday.Zoned.create ofday zone
;;
let to_ofday_zoned t ~zone =
let ofday = to_ofday t ~zone in
Ofday.Zoned.create ofday zone
;;
let of_string_fix_proto utc str =
try
let expect_length = 21 in
let expect_dash = 8 in
if Char.( <> ) str.[expect_dash] '-'
then failwithf "no dash in position %d" expect_dash ();
let zone =
match utc with
| `Utc -> Zone.utc
| `Local -> Lazy.force Zone.local
in
if Int.( > ) (String.length str) expect_length then failwithf "input too long" ();
of_date_ofday
~zone
(Date.of_string_iso8601_basic str ~pos:0)
(Ofday.of_string_iso8601_extended str ~pos:(expect_dash + 1))
with
| exn -> invalid_argf "Time.of_string_fix_proto %s: %s" str (Exn.to_string exn) ()
;;
let to_string_fix_proto utc t =
let zone =
match utc with
| `Utc -> Zone.utc
| `Local -> Lazy.force Zone.local
in
let date, sec = to_date_ofday t ~zone in
Date.to_string_iso8601_basic date ^ "-" ^ Ofday.to_millisecond_string sec
;;
let format t s ~zone =
let epoch_time =
Zone.date_and_ofday_of_absolute_time zone t
|> Date_and_ofday.to_synthetic_span_since_epoch
|> Span.to_sec
in
Unix.strftime (Unix.gmtime epoch_time) s
;;
let parse ?allow_trailing_input s ~fmt ~zone =
Unix.strptime ?allow_trailing_input ~fmt s |> of_tm ~zone
;;
let pause_for span =
let time_remaining =
If too large a float is passed in ( Span.max_value for instance ) then
will return immediately , leading to an infinite and expensive
select loop . This is handled by pausing for no longer than 100 days .
nanosleep will return immediately, leading to an infinite and expensive
select loop. This is handled by pausing for no longer than 100 days.
*)
let span = Span.min span (Span.scale Span.day 100.) in
Unix.nanosleep (Span.to_sec span)
in
if Float.( > ) time_remaining 0.0
then `Remaining (Span.of_sec time_remaining)
else `Ok
;;
let rec pause span =
match pause_for span with
| `Remaining span -> pause span
| `Ok -> ()
;;
let interruptible_pause = pause_for
let rec pause_forever () =
pause (Span.of_day 1.0);
pause_forever ()
;;
let to_string t = to_string_abs t ~zone:(Lazy.force Zone.local)
let ensure_colon_in_offset offset =
if Char.( = ) offset.[1] ':' || Char.( = ) offset.[2] ':'
then offset
else (
let offset_length = String.length offset in
if Int.( < ) offset_length 3 || Int.( > ) offset_length 4
then failwithf "invalid offset %s" offset ()
else
String.concat
[ String.slice offset 0 (offset_length - 2)
; ":"
; String.slice offset (offset_length - 2) offset_length
])
;;
exception Time_string_not_absolute of string [@@deriving sexp]
let of_string_gen ~if_no_timezone s =
let default_zone () =
match if_no_timezone with
| `Fail -> raise (Time_string_not_absolute s)
| `Local -> Lazy.force Zone.local
| `Use_this_one zone -> zone
in
of_string_gen ~default_zone ~find_zone:Zone.find_exn s
;;
let of_string_abs s = of_string_gen ~if_no_timezone:`Fail s
let of_string s = of_string_gen ~if_no_timezone:`Local s
let arg_type = Core.Command.Arg_type.create of_string_abs
include Pretty_printer.Register (struct
type nonrec t = t
let to_string = to_string
let module_name = "Time_float_unix"
end)
let sexp_zone = ref Zone.local
let get_sexp_zone () = Lazy.force !sexp_zone
let set_sexp_zone zone = sexp_zone := lazy zone
let t_of_sexp_gen ~if_no_timezone sexp =
try
match sexp with
| Sexp.List [ Sexp.Atom date; Sexp.Atom ofday; Sexp.Atom tz ] ->
of_date_ofday
~zone:(Zone.find_exn tz)
(Date.of_string date)
(Ofday.of_string ofday)
This is actually where the output of [ sexp_of_t ] is handled , since that 's e.g.
( 2015 - 07 - 06 09:09:44.787988 + 01:00 ) .
(2015-07-06 09:09:44.787988+01:00). *)
| Sexp.List [ Sexp.Atom date; Sexp.Atom ofday_and_possibly_zone ] ->
of_string_gen ~if_no_timezone (date ^ " " ^ ofday_and_possibly_zone)
| Sexp.Atom datetime -> of_string_gen ~if_no_timezone datetime
| _ -> of_sexp_error "Time.t_of_sexp" sexp
with
| Of_sexp_error _ as e -> raise e
| e -> of_sexp_error (sprintf "Time.t_of_sexp: %s" (Exn.to_string e)) sexp
;;
let t_of_sexp sexp =
t_of_sexp_gen sexp ~if_no_timezone:(`Use_this_one (Lazy.force !sexp_zone))
;;
let t_sexp_grammar : t Sexplib.Sexp_grammar.t =
{ untyped =
Union
[ String
; List (Cons (String, Cons (String, Empty)))
; List (Cons (String, Cons (String, Cons (String, Empty))))
]
}
;;
let t_of_sexp_abs sexp = t_of_sexp_gen sexp ~if_no_timezone:`Fail
let sexp_of_t_abs t ~zone =
Sexp.List (List.map (Time.to_string_abs_parts ~zone t) ~f:(fun s -> Sexp.Atom s))
;;
let sexp_of_t t = sexp_of_t_abs ~zone:(Lazy.force !sexp_zone) t
module type C =
Comparable.Map_and_set_binable
with type t := t
and type comparator_witness := comparator_witness
let make_comparable ?(sexp_of_t = sexp_of_t) ?(t_of_sexp = t_of_sexp) () : (module C) =
(module struct
module C = struct
type nonrec t = t [@@deriving bin_io]
type nonrec comparator_witness = comparator_witness
let comparator = comparator
let sexp_of_t = sexp_of_t
let t_of_sexp = t_of_sexp
end
include C
module Map = Map.Make_binable_using_comparator (C)
module Set = Set.Make_binable_using_comparator (C)
end)
;;
In 108.06a and earlier , times in sexps of Maps and Sets were raw floats . From
108.07 through 109.13 , the output format remained raw as before , but both the raw
and pretty format were accepted as input . From 109.14 on , the output format was
changed from raw to pretty , while continuing to accept both formats . Once we
believe most programs are beyond 109.14 , we will switch the input format to no
longer accept raw .
108.07 through 109.13, the output format remained raw as before, but both the raw
and pretty format were accepted as input. From 109.14 on, the output format was
changed from raw to pretty, while continuing to accept both formats. Once we
believe most programs are beyond 109.14, we will switch the input format to no
longer accept raw. *)
include
(val make_comparable () ~t_of_sexp:(fun sexp ->
match
Option.try_with (fun () ->
of_span_since_epoch (Span.of_sec (Float.t_of_sexp sexp)))
with
| Some t -> t
| None -> t_of_sexp sexp))
let%test _ =
Set.equal
(Set.of_list [ epoch ])
(Set.t_of_sexp
(Sexp.List [ Float.sexp_of_t (Span.to_sec (to_span_since_epoch epoch)) ]))
;;
include Hashable.Make_binable (struct
type nonrec t = t [@@deriving bin_io, compare, hash, sexp]
end)
module Exposed_for_tests = struct
let ensure_colon_in_offset = ensure_colon_in_offset
end
end
|
2f1b60f6ff426959d78e306be9f2e40b3d6e6113c5dac2184e959f84e358be4d | sgbj/MaximaSharp | test_readbase_lisp.lisp | (defun $test_readbase_lisp () '((mlist) 1 2 3 4 10 20 30 40))
| null | https://raw.githubusercontent.com/sgbj/MaximaSharp/75067d7e045b9ed50883b5eb09803b4c8f391059/Test/bin/Debug/Maxima-5.30.0/share/maxima/5.30.0/tests/test_readbase_lisp.lisp | lisp | (defun $test_readbase_lisp () '((mlist) 1 2 3 4 10 20 30 40))
| |
16161e673bc927dfcd8563d2f004d95203d79440b9fef7585b56b7a64432bc80 | ekmett/ekmett.github.com | Lift.hs | # OPTIONS_GHC -cpp - undecidable - instances #
-------------------------------------------------------------------------------------------
-- |
-- Module : Control.Functor.Combinators.Lift
Copyright : 2008
-- License : BSD
--
Maintainer : < >
-- Stability : experimental
-- Portability : non-portable (functional-dependencies)
--
-- transform a pair of functors with a bifunctor deriving a new functor.
-- this subsumes functor product and functor coproduct
-------------------------------------------------------------------------------------------
module Control.Functor.Combinators.Lift
( Lift(Lift,runLift)
, (:*:), runProductF
, (:+:), runCoproductF
, Ap, runAp, mkAp
) where
import Control.Applicative
import Control.Category.Hask
import Control.Functor
import Control.Functor.Contra
import Control.Functor.Exponential
import Control.Functor.Full
import Control.Functor.HigherOrder
import Control.Monad.Identity
import Control.Functor.Pointed
import Control.Arrow ((&&&),(|||))
-- * Bifunctor functor transformer
type - level LiftA2
newtype Lift p f g a = Lift { runLift :: p (f a) (g a) }
type Ap p = Lift p Identity
runAp :: Bifunctor p Hask Hask Hask => Ap p f a -> p a (f a)
runAp = first runIdentity . runLift
mkAp :: Bifunctor p Hask Hask Hask => p a (f a) -> Ap p f a
mkAp = Lift . first Identity
instance (Bifunctor p Hask Hask Hask, Functor f ,Functor g) => Functor (Lift p f g) where
fmap f = Lift . bimap (fmap f) (fmap f) . runLift
instance (Bifunctor p Hask Hask Hask, ContraFunctor f ,ContraFunctor g) => ContraFunctor (Lift p f g) where
contramap f = Lift . bimap (contramap f) (contramap f) . runLift
instance (Bifunctor p Hask Hask Hask, ExpFunctor f ,ExpFunctor g) => ExpFunctor (Lift p f g) where
xmap f g = Lift . bimap (xmap f g) (xmap f g) . runLift
instance (Bifunctor p Hask Hask Hask) => HFunctor (Ap p) where
ffmap f = Lift . bimap (fmap f) (fmap f) . runLift
hfmap f = Lift . second f . runLift
type (f :*: g) = Lift (,) f g
runProductF :: (f :*: g) a -> (f a, g a)
runProductF = runLift
instance (Pointed f, Pointed g) => Pointed (f :*: g) where
point = Lift . (point &&& point)
instance (Applicative f, Applicative g) => Applicative (f :*: g) where
pure b = Lift (pure b, pure b)
Lift (f,g) <*> Lift (a,b) = Lift (f <*> a, g <*> b)
instance (Faithful f, Faithful g) => Faithful (f :*: g)
type (f :+: g) = Lift Either f g
runCoproductF :: (f :+: g) a -> Either (f a) (g a)
runCoproductF = runLift
instance (Copointed f, Copointed g) => Copointed (f :+: g) where
extract = (extract ||| extract) . runLift
| null | https://raw.githubusercontent.com/ekmett/ekmett.github.com/8d3abab5b66db631e148e1d046d18909bece5893/haskell/category-extras/_darcs/pristine/src/Control/Functor/Combinators/Lift.hs | haskell | -----------------------------------------------------------------------------------------
|
Module : Control.Functor.Combinators.Lift
License : BSD
Stability : experimental
Portability : non-portable (functional-dependencies)
transform a pair of functors with a bifunctor deriving a new functor.
this subsumes functor product and functor coproduct
-----------------------------------------------------------------------------------------
* Bifunctor functor transformer | # OPTIONS_GHC -cpp - undecidable - instances #
Copyright : 2008
Maintainer : < >
module Control.Functor.Combinators.Lift
( Lift(Lift,runLift)
, (:*:), runProductF
, (:+:), runCoproductF
, Ap, runAp, mkAp
) where
import Control.Applicative
import Control.Category.Hask
import Control.Functor
import Control.Functor.Contra
import Control.Functor.Exponential
import Control.Functor.Full
import Control.Functor.HigherOrder
import Control.Monad.Identity
import Control.Functor.Pointed
import Control.Arrow ((&&&),(|||))
type - level LiftA2
newtype Lift p f g a = Lift { runLift :: p (f a) (g a) }
type Ap p = Lift p Identity
runAp :: Bifunctor p Hask Hask Hask => Ap p f a -> p a (f a)
runAp = first runIdentity . runLift
mkAp :: Bifunctor p Hask Hask Hask => p a (f a) -> Ap p f a
mkAp = Lift . first Identity
instance (Bifunctor p Hask Hask Hask, Functor f ,Functor g) => Functor (Lift p f g) where
fmap f = Lift . bimap (fmap f) (fmap f) . runLift
instance (Bifunctor p Hask Hask Hask, ContraFunctor f ,ContraFunctor g) => ContraFunctor (Lift p f g) where
contramap f = Lift . bimap (contramap f) (contramap f) . runLift
instance (Bifunctor p Hask Hask Hask, ExpFunctor f ,ExpFunctor g) => ExpFunctor (Lift p f g) where
xmap f g = Lift . bimap (xmap f g) (xmap f g) . runLift
instance (Bifunctor p Hask Hask Hask) => HFunctor (Ap p) where
ffmap f = Lift . bimap (fmap f) (fmap f) . runLift
hfmap f = Lift . second f . runLift
type (f :*: g) = Lift (,) f g
runProductF :: (f :*: g) a -> (f a, g a)
runProductF = runLift
instance (Pointed f, Pointed g) => Pointed (f :*: g) where
point = Lift . (point &&& point)
instance (Applicative f, Applicative g) => Applicative (f :*: g) where
pure b = Lift (pure b, pure b)
Lift (f,g) <*> Lift (a,b) = Lift (f <*> a, g <*> b)
instance (Faithful f, Faithful g) => Faithful (f :*: g)
type (f :+: g) = Lift Either f g
runCoproductF :: (f :+: g) a -> Either (f a) (g a)
runCoproductF = runLift
instance (Copointed f, Copointed g) => Copointed (f :+: g) where
extract = (extract ||| extract) . runLift
|
3f73fd8c68360fdae24f350578a4a5479fdc19569a9f8709a25759f920a0fd5a | nasa/Common-Metadata-Repository | echo10.clj | (ns cmr.ingest.services.granule-bulk-update.utils.echo10
"Contains functions for updating ECHO10 granule xml metadata."
(:require
[clojure.data.xml :as xml]
[clojure.zip :as zip]
[cmr.common.xml :as cx]))
(def ^:private echo10-main-schema-elements
"Defines the element tags that come after OnlineAccessURLs in ECHO10 Granule xml schema"
[:GranuleUR :InsertTime :LastUpdate :DeleteTime :Collection :RestrictionFlag :RestrictionComment :DataGranule
:PGEVersionClass :Temporal :Spatial :OrbitCalculatedSpatialDomains :MeasuredParameters :Platforms :Campaigns
:AdditionalAttributes :InputGranules :TwoDCoordinateSystem :Price :OnlineAccessURLs :OnlineResources :Orderable
:DataFormat :Visible :CloudCover :MetadataStandardName :MetadataStandardVersion :AssociatedBrowseImages :AssociatedBrowseImageUrls])
(defn- get-rest-echo10-elements
"Go through the list of echo 10 elements and return all of the elements after the
the passed in element."
[element]
(loop [elem (first echo10-main-schema-elements)
left-over-list (rest echo10-main-schema-elements)]
(cond
(= elem element) left-over-list
:else (recur (first left-over-list) (rest left-over-list)))))
(defn links->online-resources
"Creates online resource URL XML elements from the passed in links."
[links]
(for [link links]
(let [url (:URL link)
type (:Type link)
description (:Description link)
mime-type (:MimeType link)]
(xml/element :OnlineResource {}
(when url (xml/element :URL {} url))
(when description (xml/element :Description {} description))
(when type (xml/element :Type {} type))
(when mime-type (xml/element :MimeType {} mime-type))))))
(defn xml-elem->online-resource
"Parses and returns XML element for OnlineResource."
[elem]
(let [url (cx/string-at-path elem [:URL])
description (cx/string-at-path elem [:Description])
resource-type (cx/string-at-path elem [:Type])
mime-type (cx/string-at-path elem [:MimeType])]
{:url url
:description description
:type resource-type
:mime-type mime-type}))
(defn update-online-resources
"Returns an OnlineResources node in zipper representation
where UPDATE-FIELD is updated where the LOCATOR-FIELD has a matching
key in the VALUE-MAP."
[online-resources locator-field value-field value-map]
(let [edn-resources (map xml-elem->online-resource online-resources)
resources (map #(merge %
(when-let [replacement (get value-map (get % locator-field))]
(hash-map value-field replacement)))
edn-resources)]
(xml/element
:OnlineResources {}
(for [r resources]
(let [{:keys [url description type mime-type]} r]
(xml/element :OnlineResource {}
(xml/element :URL {} url)
(when description (xml/element :Description {} description))
(xml/element :Type {} type)
(when mime-type (xml/element :MimeType {} mime-type))))))))
(defn links->online-access-urls
"Creates online access URL XML elements from the passed in links."
[links]
(for [link links]
(let [url (:URL link)
description (:Description link)
mime-type (:MimeType link)]
(xml/element :OnlineAccessUrl {}
(when url (xml/element :URL {} url))
(when description (xml/element :URLDescription {} description))
(when mime-type (xml/element :MimeType {} mime-type))))))
(defn- xml-elem->online-access-url
"Parses and returns XML element for OnlineAccessURL"
[elem]
(let [url (cx/string-at-path elem [:URL])
description (cx/string-at-path elem [:URLDescription])
mime-type (cx/string-at-path elem [:MimeType])]
{:url url
:url-description description
:mime-type mime-type}))
(defn update-online-access-urls
"Returns an OnlineAccessURLs node in zipper representation
where UPDATE-FIELD is updated where the LOCATOR-FIELD has a matching
key in the VALUE-MAP."
[online-access-urls locator-field value-field value-map]
(let [edn-access-urls (map xml-elem->online-access-url online-access-urls)
access-urls (map #(merge %
(when-let [replacement (get value-map (get % locator-field))]
(hash-map value-field replacement)))
edn-access-urls)]
(xml/element
:OnlineAccessURLs {}
(for [r access-urls]
(let [{:keys [url url-description mime-type]} r]
(xml/element :OnlineAccessURL {}
(xml/element :URL {} url)
(when url-description (xml/element :URLDescription {} url-description))
(when mime-type (xml/element :MimeType {} mime-type))))))))
(defn xml-elem->provider-browse
"Parses and returns XML element for ProviderBrowseUrl"
[elem]
(let [url (cx/string-at-path elem [:URL])
file-size (cx/long-at-path elem [:FileSize])
description (cx/string-at-path elem [:Description])
mime-type (cx/string-at-path elem [:MimeType])]
{:url url
:file-size file-size
:description description
:mime-type mime-type}))
(defn links->provider-browse-urls
"Creates provider browse URL XML elements from the passed in links."
[links]
(for [link links]
(let [url (:URL link)
file-size (:Size link)
description (:Description link)
mime-type (:MimeType link)]
(xml/element :ProviderBrowseUrl {}
(when url (xml/element :URL {} url))
(when file-size (xml/element :FileSize {} file-size))
(when description (xml/element :Description {} description))
(when mime-type (xml/element :MimeType {} mime-type))))))
(defn update-browse-image-urls
"Returns an AssociatedBrowseImageUrls node in zipper representation
where UPDATE-FIELD is updated where the LOCATOR-FIELD has a matching
key in the VALUE-MAP."
[urls locator-field value-field value-map]
(let [edn-urls (map xml-elem->provider-browse urls)
new-urls (map #(merge %
(when-let [replacement (get value-map (get % locator-field))]
(hash-map value-field replacement)))
edn-urls)]
(xml/element
:AssociatedBrowseImageUrls {}
(for [r new-urls]
(let [{:keys [url file-size description mime-type]} r]
(xml/element :ProviderBrowseUrl {}
(xml/element :URL {} url)
(when file-size (xml/element :FileSize {} file-size))
(when description (xml/element :Description {} description))
(when mime-type (xml/element :MimeType {} mime-type))))))))
(defn replace-in-tree
"Take a parsed granule xml, replace the given node with the provided replacement
Returns the zipper representation of the updated xml."
[tree element-tag replacement]
(let [zipper (zip/xml-zip tree)
start-loc (zip/down zipper)]
(loop [loc start-loc done false]
(if done
(zip/root loc)
(if-let [right-loc (zip/right loc)]
(cond
;; at an OnlineResources element, replace the node with updated value
(= element-tag (-> right-loc zip/node :tag))
(recur (zip/replace right-loc replacement) true)
;; no action needs to be taken, move to the next node
:else
(recur right-loc false))
(recur loc true))))))
(defn add-in-tree
"Take a parsed granule xml, add the passed in items to the node at the passed in element.
If the element exists, place the items at the end of the element's children. If the element
does not exist, place the items into the correct spot, using the main list at the top.
Returns the zipper representation of the updated xml."
[tree element items]
(let [zipper (zip/xml-zip tree)
start-loc (-> zipper zip/down)
rest-of-echo10-elements (seq (get-rest-echo10-elements element))]
(loop [loc start-loc done false]
(if done
(zip/root loc)
(if-let [right-loc (zip/right loc)]
(cond
;; at the passed in element, append to the node with the updated values
(= element (-> right-loc zip/node :tag))
(recur (zip/append-child right-loc items) true)
;; at an element after the passed in element add to the left
(some #{(-> right-loc zip/node :tag)} rest-of-echo10-elements)
(recur (zip/insert-left right-loc (xml/element element {} items)) true)
;; no action needs to be taken, move to the next node
:else
(recur right-loc false))
;; at the end of the file, add to the right
(recur (zip/insert-right loc (xml/element element {} items)) true))))))
(defn- compare-to-remove-url
"This function goes through the list of URLs to remove and compares each one
to the passed in xml represented child. If a match is found nil is returned,
otherwise the child is returned."
[child urls-to-remove]
(when child
(let [x (xml-elem->online-resource child)]
(loop [items urls-to-remove match? false]
(cond
(= true match?)
nil
(nil? (seq items))
child
:else
(let [item (first items)]
(if (= (:url x) (:URL item))
(recur (rest items) true)
(recur (rest items) false))))))))
(defn remove-from-tree
"Take a parsed granule xml, remove the passed in items from the node at the passed in element.
Returns the zipper representation of the updated xml."
[tree node-path-vector urls-to-remove]
(let [zipper (zip/xml-zip tree)
element (first node-path-vector)]
(loop [loc (-> zipper zip/down) done false]
(if done
(zip/root loc)
(if-let [right-loc (zip/right loc)]
(cond
;; when the passed in element is found, check each of the the urls to remove
;; against each child element. This builds a list of children URL elements to add back to this
;; node where the urls to remove don't exist, essentially removing them.
(= element (-> right-loc zip/node :tag))
(let [children (remove nil?
(map #(compare-to-remove-url % urls-to-remove)
(zip/children right-loc)))]
;; if children exist then replace the nodes children.
;; otherwise remove the node, as it is no longer needed.
(if (seq children)
(let [new-node (zip/make-node right-loc (zip/node right-loc) children)]
(recur (zip/replace right-loc new-node) true))
(recur (zip/remove right-loc) true)))
;; no action needs to be taken, move to the next node
:else
(recur right-loc false))
;; at the end of the file - we are done.
(recur loc true))))))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/39625dbee824a8d27644e60921e893fbb9282a2c/ingest-app/src/cmr/ingest/services/granule_bulk_update/utils/echo10.clj | clojure | at an OnlineResources element, replace the node with updated value
no action needs to be taken, move to the next node
at the passed in element, append to the node with the updated values
at an element after the passed in element add to the left
no action needs to be taken, move to the next node
at the end of the file, add to the right
when the passed in element is found, check each of the the urls to remove
against each child element. This builds a list of children URL elements to add back to this
node where the urls to remove don't exist, essentially removing them.
if children exist then replace the nodes children.
otherwise remove the node, as it is no longer needed.
no action needs to be taken, move to the next node
at the end of the file - we are done. | (ns cmr.ingest.services.granule-bulk-update.utils.echo10
"Contains functions for updating ECHO10 granule xml metadata."
(:require
[clojure.data.xml :as xml]
[clojure.zip :as zip]
[cmr.common.xml :as cx]))
(def ^:private echo10-main-schema-elements
"Defines the element tags that come after OnlineAccessURLs in ECHO10 Granule xml schema"
[:GranuleUR :InsertTime :LastUpdate :DeleteTime :Collection :RestrictionFlag :RestrictionComment :DataGranule
:PGEVersionClass :Temporal :Spatial :OrbitCalculatedSpatialDomains :MeasuredParameters :Platforms :Campaigns
:AdditionalAttributes :InputGranules :TwoDCoordinateSystem :Price :OnlineAccessURLs :OnlineResources :Orderable
:DataFormat :Visible :CloudCover :MetadataStandardName :MetadataStandardVersion :AssociatedBrowseImages :AssociatedBrowseImageUrls])
(defn- get-rest-echo10-elements
"Go through the list of echo 10 elements and return all of the elements after the
the passed in element."
[element]
(loop [elem (first echo10-main-schema-elements)
left-over-list (rest echo10-main-schema-elements)]
(cond
(= elem element) left-over-list
:else (recur (first left-over-list) (rest left-over-list)))))
(defn links->online-resources
"Creates online resource URL XML elements from the passed in links."
[links]
(for [link links]
(let [url (:URL link)
type (:Type link)
description (:Description link)
mime-type (:MimeType link)]
(xml/element :OnlineResource {}
(when url (xml/element :URL {} url))
(when description (xml/element :Description {} description))
(when type (xml/element :Type {} type))
(when mime-type (xml/element :MimeType {} mime-type))))))
(defn xml-elem->online-resource
"Parses and returns XML element for OnlineResource."
[elem]
(let [url (cx/string-at-path elem [:URL])
description (cx/string-at-path elem [:Description])
resource-type (cx/string-at-path elem [:Type])
mime-type (cx/string-at-path elem [:MimeType])]
{:url url
:description description
:type resource-type
:mime-type mime-type}))
(defn update-online-resources
"Returns an OnlineResources node in zipper representation
where UPDATE-FIELD is updated where the LOCATOR-FIELD has a matching
key in the VALUE-MAP."
[online-resources locator-field value-field value-map]
(let [edn-resources (map xml-elem->online-resource online-resources)
resources (map #(merge %
(when-let [replacement (get value-map (get % locator-field))]
(hash-map value-field replacement)))
edn-resources)]
(xml/element
:OnlineResources {}
(for [r resources]
(let [{:keys [url description type mime-type]} r]
(xml/element :OnlineResource {}
(xml/element :URL {} url)
(when description (xml/element :Description {} description))
(xml/element :Type {} type)
(when mime-type (xml/element :MimeType {} mime-type))))))))
(defn links->online-access-urls
"Creates online access URL XML elements from the passed in links."
[links]
(for [link links]
(let [url (:URL link)
description (:Description link)
mime-type (:MimeType link)]
(xml/element :OnlineAccessUrl {}
(when url (xml/element :URL {} url))
(when description (xml/element :URLDescription {} description))
(when mime-type (xml/element :MimeType {} mime-type))))))
(defn- xml-elem->online-access-url
"Parses and returns XML element for OnlineAccessURL"
[elem]
(let [url (cx/string-at-path elem [:URL])
description (cx/string-at-path elem [:URLDescription])
mime-type (cx/string-at-path elem [:MimeType])]
{:url url
:url-description description
:mime-type mime-type}))
(defn update-online-access-urls
"Returns an OnlineAccessURLs node in zipper representation
where UPDATE-FIELD is updated where the LOCATOR-FIELD has a matching
key in the VALUE-MAP."
[online-access-urls locator-field value-field value-map]
(let [edn-access-urls (map xml-elem->online-access-url online-access-urls)
access-urls (map #(merge %
(when-let [replacement (get value-map (get % locator-field))]
(hash-map value-field replacement)))
edn-access-urls)]
(xml/element
:OnlineAccessURLs {}
(for [r access-urls]
(let [{:keys [url url-description mime-type]} r]
(xml/element :OnlineAccessURL {}
(xml/element :URL {} url)
(when url-description (xml/element :URLDescription {} url-description))
(when mime-type (xml/element :MimeType {} mime-type))))))))
(defn xml-elem->provider-browse
"Parses and returns XML element for ProviderBrowseUrl"
[elem]
(let [url (cx/string-at-path elem [:URL])
file-size (cx/long-at-path elem [:FileSize])
description (cx/string-at-path elem [:Description])
mime-type (cx/string-at-path elem [:MimeType])]
{:url url
:file-size file-size
:description description
:mime-type mime-type}))
(defn links->provider-browse-urls
"Creates provider browse URL XML elements from the passed in links."
[links]
(for [link links]
(let [url (:URL link)
file-size (:Size link)
description (:Description link)
mime-type (:MimeType link)]
(xml/element :ProviderBrowseUrl {}
(when url (xml/element :URL {} url))
(when file-size (xml/element :FileSize {} file-size))
(when description (xml/element :Description {} description))
(when mime-type (xml/element :MimeType {} mime-type))))))
(defn update-browse-image-urls
"Returns an AssociatedBrowseImageUrls node in zipper representation
where UPDATE-FIELD is updated where the LOCATOR-FIELD has a matching
key in the VALUE-MAP."
[urls locator-field value-field value-map]
(let [edn-urls (map xml-elem->provider-browse urls)
new-urls (map #(merge %
(when-let [replacement (get value-map (get % locator-field))]
(hash-map value-field replacement)))
edn-urls)]
(xml/element
:AssociatedBrowseImageUrls {}
(for [r new-urls]
(let [{:keys [url file-size description mime-type]} r]
(xml/element :ProviderBrowseUrl {}
(xml/element :URL {} url)
(when file-size (xml/element :FileSize {} file-size))
(when description (xml/element :Description {} description))
(when mime-type (xml/element :MimeType {} mime-type))))))))
(defn replace-in-tree
"Take a parsed granule xml, replace the given node with the provided replacement
Returns the zipper representation of the updated xml."
[tree element-tag replacement]
(let [zipper (zip/xml-zip tree)
start-loc (zip/down zipper)]
(loop [loc start-loc done false]
(if done
(zip/root loc)
(if-let [right-loc (zip/right loc)]
(cond
(= element-tag (-> right-loc zip/node :tag))
(recur (zip/replace right-loc replacement) true)
:else
(recur right-loc false))
(recur loc true))))))
(defn add-in-tree
"Take a parsed granule xml, add the passed in items to the node at the passed in element.
If the element exists, place the items at the end of the element's children. If the element
does not exist, place the items into the correct spot, using the main list at the top.
Returns the zipper representation of the updated xml."
[tree element items]
(let [zipper (zip/xml-zip tree)
start-loc (-> zipper zip/down)
rest-of-echo10-elements (seq (get-rest-echo10-elements element))]
(loop [loc start-loc done false]
(if done
(zip/root loc)
(if-let [right-loc (zip/right loc)]
(cond
(= element (-> right-loc zip/node :tag))
(recur (zip/append-child right-loc items) true)
(some #{(-> right-loc zip/node :tag)} rest-of-echo10-elements)
(recur (zip/insert-left right-loc (xml/element element {} items)) true)
:else
(recur right-loc false))
(recur (zip/insert-right loc (xml/element element {} items)) true))))))
(defn- compare-to-remove-url
"This function goes through the list of URLs to remove and compares each one
to the passed in xml represented child. If a match is found nil is returned,
otherwise the child is returned."
[child urls-to-remove]
(when child
(let [x (xml-elem->online-resource child)]
(loop [items urls-to-remove match? false]
(cond
(= true match?)
nil
(nil? (seq items))
child
:else
(let [item (first items)]
(if (= (:url x) (:URL item))
(recur (rest items) true)
(recur (rest items) false))))))))
(defn remove-from-tree
"Take a parsed granule xml, remove the passed in items from the node at the passed in element.
Returns the zipper representation of the updated xml."
[tree node-path-vector urls-to-remove]
(let [zipper (zip/xml-zip tree)
element (first node-path-vector)]
(loop [loc (-> zipper zip/down) done false]
(if done
(zip/root loc)
(if-let [right-loc (zip/right loc)]
(cond
(= element (-> right-loc zip/node :tag))
(let [children (remove nil?
(map #(compare-to-remove-url % urls-to-remove)
(zip/children right-loc)))]
(if (seq children)
(let [new-node (zip/make-node right-loc (zip/node right-loc) children)]
(recur (zip/replace right-loc new-node) true))
(recur (zip/remove right-loc) true)))
:else
(recur right-loc false))
(recur loc true))))))
|
4513b26d8727e1459bdc019c34ccae5aa4b743fd45f5a11b9da79c528bc8cb7a | janestreet/core_bench | exception_tests.ml | open Core
open Core_bench
exception Noarg
exception Arg1 of int
let get () = if Random.bool () then 10 else 10
let trywith =
Bench.Test.create
~name:"trywith"
(let x = get () in
let y = get () in
fun () ->
ignore
(try x with
| _ -> y))
;;
let trywithraise0 =
Bench.Test.create
~name:"trywith-raise0"
(let x = get () in
let y = get () in
fun () ->
ignore
(try raise Noarg with
| _ -> x + y))
;;
let trywithraise0match =
Bench.Test.create
~name:"trywith-raise0-match"
(let x = get () in
let y = get () in
fun () ->
ignore
(try raise Noarg with
| Noarg -> x + y))
;;
let trywithraise1 =
Bench.Test.create
~name:"trywith-raise1"
(let x = get () in
let y = get () in
fun () ->
ignore
(try raise (Arg1 x) with
| _ -> y))
;;
let trywithraise1match =
Bench.Test.create
~name:"trywith-raise1-match"
(let x = get () in
let y = get () in
fun () ->
ignore
(try raise (Arg1 x) with
| Arg1 x -> x + y))
;;
let trywith1 =
Bench.Test.create
~name:"trywith1"
(let x = get () in
let y = get () in
fun () ->
ignore
(try x with
| Arg1 x -> x + y))
;;
let recur d f () =
let rec loop n = if n = 0 then f () else loop (n - 1) + 1 in
ignore (loop d)
;;
let depths = [ 0; 10; 100; 1000; 10000 ]
let recur_trywith =
Bench.Test.create_indexed
~name:"recur_trywith"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try x with
| _ -> y)))
;;
let recur_trywithraise0 =
Bench.Test.create_indexed
~name:"recur_trywith-raise0"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try raise Noarg with
| _ -> x + y)))
;;
let recur_trywithraise0match =
Bench.Test.create_indexed
~name:"recur_trywith-raise0-match"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try raise Noarg with
| Noarg -> x + y)))
;;
let recur_trywithraise1 =
Bench.Test.create_indexed
~name:"recur_trywith-raise1"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try raise (Arg1 x) with
| _ -> y)))
;;
let recur_trywithraise1match =
Bench.Test.create_indexed
~name:"recur_trywith-raise1-match"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try raise (Arg1 x) with
| Arg1 x -> x + y)))
;;
let recur_trywith1 =
Bench.Test.create_indexed
~name:"recurs_trywith1"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try x with
| Arg1 x -> x + y)))
;;
let tests =
[ trywith
; trywithraise0
; trywithraise0match
; trywithraise1
; trywithraise1match
; trywith1
; recur_trywith
; recur_trywithraise0
; recur_trywithraise0match
; recur_trywithraise1
; recur_trywithraise1match
; recur_trywith1
]
;;
let command = Bench.make_command tests
| null | https://raw.githubusercontent.com/janestreet/core_bench/f319e14b458131d825cbba51ed25a8168ce5404e/test/exception_tests.ml | ocaml | open Core
open Core_bench
exception Noarg
exception Arg1 of int
let get () = if Random.bool () then 10 else 10
let trywith =
Bench.Test.create
~name:"trywith"
(let x = get () in
let y = get () in
fun () ->
ignore
(try x with
| _ -> y))
;;
let trywithraise0 =
Bench.Test.create
~name:"trywith-raise0"
(let x = get () in
let y = get () in
fun () ->
ignore
(try raise Noarg with
| _ -> x + y))
;;
let trywithraise0match =
Bench.Test.create
~name:"trywith-raise0-match"
(let x = get () in
let y = get () in
fun () ->
ignore
(try raise Noarg with
| Noarg -> x + y))
;;
let trywithraise1 =
Bench.Test.create
~name:"trywith-raise1"
(let x = get () in
let y = get () in
fun () ->
ignore
(try raise (Arg1 x) with
| _ -> y))
;;
let trywithraise1match =
Bench.Test.create
~name:"trywith-raise1-match"
(let x = get () in
let y = get () in
fun () ->
ignore
(try raise (Arg1 x) with
| Arg1 x -> x + y))
;;
let trywith1 =
Bench.Test.create
~name:"trywith1"
(let x = get () in
let y = get () in
fun () ->
ignore
(try x with
| Arg1 x -> x + y))
;;
let recur d f () =
let rec loop n = if n = 0 then f () else loop (n - 1) + 1 in
ignore (loop d)
;;
let depths = [ 0; 10; 100; 1000; 10000 ]
let recur_trywith =
Bench.Test.create_indexed
~name:"recur_trywith"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try x with
| _ -> y)))
;;
let recur_trywithraise0 =
Bench.Test.create_indexed
~name:"recur_trywith-raise0"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try raise Noarg with
| _ -> x + y)))
;;
let recur_trywithraise0match =
Bench.Test.create_indexed
~name:"recur_trywith-raise0-match"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try raise Noarg with
| Noarg -> x + y)))
;;
let recur_trywithraise1 =
Bench.Test.create_indexed
~name:"recur_trywith-raise1"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try raise (Arg1 x) with
| _ -> y)))
;;
let recur_trywithraise1match =
Bench.Test.create_indexed
~name:"recur_trywith-raise1-match"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try raise (Arg1 x) with
| Arg1 x -> x + y)))
;;
let recur_trywith1 =
Bench.Test.create_indexed
~name:"recurs_trywith1"
~args:depths
(let x = get () in
let y = get () in
fun depth ->
Staged.stage
(recur depth (fun () ->
try x with
| Arg1 x -> x + y)))
;;
let tests =
[ trywith
; trywithraise0
; trywithraise0match
; trywithraise1
; trywithraise1match
; trywith1
; recur_trywith
; recur_trywithraise0
; recur_trywithraise0match
; recur_trywithraise1
; recur_trywithraise1match
; recur_trywith1
]
;;
let command = Bench.make_command tests
| |
c326a8844a3d1aa3707624a79b0d12af2d0764a8960fdb03be315a0e2e21316a | huangz1990/SICP-answers | 38-fold-left.scm | 38-fold-left.scm
(define (fold-left op initial sequence)
(define (iter result rest)
(if (null? rest)
result
(iter (op result (car rest))
(cdr rest))))
(iter initial sequence))
| null | https://raw.githubusercontent.com/huangz1990/SICP-answers/15e3475003ef10eb738cf93c1932277bc56bacbe/chp2/code/38-fold-left.scm | scheme | 38-fold-left.scm
(define (fold-left op initial sequence)
(define (iter result rest)
(if (null? rest)
result
(iter (op result (car rest))
(cdr rest))))
(iter initial sequence))
| |
56b4c2db1c84a3d41de06ef1c23039170707812cb523dbde569358a99acb1ded | dalong0514/ITstudy | 0304Koch.lisp | ------------------------== { } = = ----------------------- ; ;
;; ;;
The Koch Snowflake , devised by Swedish mathematician ; ;
in 1904 , is one of the earliest and perhaps most familiar fractal ; ;
curves . It is created by arranging three individual to ; ;
;; form an equilateral triangle. ;;
;; ;;
The Koch Curve itself is created by first dividing a straight line ; ;
into three equal segments and then constructing an equilateral ; ;
;; triangle whose base is the middle segment of the line, before ;;
;; finally removing the base of the triangle: ;;
;; ;;
1 . _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ; ;
;; ;;
2 . _ _ _ _ _ _ |______| _ _ _ _ _ _ ; ;
;; ;;
3 . /\ ; ;
;; / \ ;;
;; / \ ;;
;; ______/______\______ ;;
;; ;;
4 . /\ ; ;
;; / \ ;;
;; / \ ;;
;; ______/ \______ ;;
;; ;;
This procedure is then repeated on each of the four resulting line ; ;
segments - the infinite limit of this process is the Koch Curve . ; ;
;; ;;
As a consequence of its fractal nature , the Koch Snowflake has some ; ;
;; very intriguing & surprising geometrical properties. ;;
;; ;;
;; If the initial equilateral triangle has side length 's', then the ;;
initial area enclosed by the Koch Snowflake at the 0th iteration ; ;
;; is: ;;
A[0 ] = s/2·s·cos(pi/6 ) = ( s^2·sqrt(3))/4 ; ;
;; ;;
The perimeter of the Koch Snowflake at the 0th iteration is hence : ; ;
;; ;;
;; P[0] = 3·s ;;
;; ;;
Three new triangles are added at the first iteration , and , for ; ;
every iteration after , 4 times as many triangles are added to the ; ;
;; Snowflake, and so at the nth stage there are 3·4^(n-1) triangles. ;;
;; ;;
With every iteration , the side length of each triangle is a third ; ;
;; the length of the previous side, meaning that the area of the ;;
triangles reduces by a factor of 9 at each iteration . ; ;
;; ;;
Therefore , at the nth stage , the Koch Snowflake has an Area of : ; ;
;; ;;
A[n ] = A[n-1 ] + ( 3·4^(n-1)/9^n)·A[0 ] ( n > = 2 ) ; ;
;; A[1] = 4/3·A[0] ;;
A[0 ] = ( s^2·sqrt(3))/4 ; ;
;; ;;
;; In the limit as n goes to infinity, this iteration formula ;;
;; converges to: ;;
;; ;;
;; A[n] = 8/5·A[0] = 2/5·s^2·sqrt(3) ;;
;; ;;
;; ...which is clearly finite. ;;
;; ;;
However , with every iteration there are four times as many line ; ;
segments , with each line segment a third as long as the previous ; ;
;; iteration. ;;
;; ;;
;; Hence, at the nth stage, the perimeter is: ;;
;; ;;
P[n ] = ( 4/3)^n·P[0 ] = 3·(4/3)^n·s ; ;
;; ;;
Since 4/3 > 1 , this goes to infinity as n goes to infinity . ; ;
;; ;;
Therefore , we have the surprising conclusion that the ; ;
;; Snowflake is an infinitely long curve bounding a finite area. ;;
;; ;;
;;----------------------------------------------------------------------;;
Author : , Copyright © 2012 - www.lee-mac.com ; ;
;;----------------------------------------------------------------------;;
Version 1.0 - 24 - 12 - 2012 ; ;
;; ;;
First release . ; ;
;;----------------------------------------------------------------------;;
(defun c:koch ( / 3p a0 an d1 en l1 l2 no p1 p2 p3 p4 r1 r2 )
(setq p1 (cond ((getpoint "\nSpecify Center <0,0,0>: ")) ('(0.0 0.0 0.0)))
r1 (cond ((getdist p1 "\nSpecify Radius <1.0>: ")) (1.0))
l1 (list
(cons 10 (polar p1 (/ (* 3.0 pi) 6.0) r1))
(cons 10 (polar p1 (/ (* 7.0 pi) 6.0) r1))
(cons 10 (polar p1 (/ (* 11.0 pi) 6.0) r1))
)
en (koch:poly l1)
r2 (* r1 (sqrt 3.0))
3p (/ pi 3.0)
a0 (/ (* r2 r2 (sqrt 3.0)) 4.0)
no 0
)
(while
(progn
(princ
(strcat
"\nIteration: " (itoa no)
"\tVertices: " (itoa (* 3 (expt 4 no)))
"\tLength: " (rtos (* 3 (expt (/ 4.0 3.0) no) r2) 2)
"\tArea: "
(cond
( (= 0 no)
(rtos a0 2)
)
( (= 1 no)
(rtos (setq an (/ (* 4.0 a0) 3.0)) 2)
)
( (rtos (setq an (+ an (/ (* 3 a0 (expt 4 (1- no))) (expt 9 no)))) 2))
)
)
)
(initget "Yes No")
(/= "No" (getkword "\nContinue? [Yes/No] <Yes>: "))
)
(setq l1 (cons (last l1) l1))
(while
(setq p1 (cdar l1)
l1 (cdr l1)
p2 (cdar l1)
)
(setq a1 (angle p1 p2)
d1 (/ (distance p1 p2) 3.0)
p3 (polar p1 a1 d1)
p4 (cons 10 (polar p2 a1 (- d1)))
l2 (vl-list* (cons 10 p2) p4 (cons 10 (polar p3 (- a1 3p) d1)) (cons 10 p3) l2)
)
)
(entdel en)
(setq l1 (reverse l2)
l2 nil
en (koch:poly l1)
no (1+ no)
)
)
(princ)
)
(defun koch:poly ( l )
(entmakex
(append
(list
'(000 . "LWPOLYLINE")
'(100 . "AcDbEntity")
'(100 . "AcDbPolyline")
(cons 90 (length l))
'(70 . 1)
)
l
)
)
)
;;----------------------------------------------------------------------;;
(princ
(strcat
"\n:: Koch.lsp | Version 1.0 | © Lee Mac "
(menucmd "m=$(edtime,$(getvar,DATE),YYYY)")
" www.lee-mac.com ::"
"\n:: Type \"koch\" to Invoke ::"
)
)
(princ)
;;----------------------------------------------------------------------;;
;; End of File ;;
;;----------------------------------------------------------------------;; | null | https://raw.githubusercontent.com/dalong0514/ITstudy/8a7f1708d11856a78016795268da67b6a7521115/004%E7%BC%96%E7%A8%8B%E8%AF%AD%E8%A8%80/07AutoLisp/04LeeMac-Library/0304Koch.lisp | lisp | ;
;;
;
;
;
form an equilateral triangle. ;;
;;
;
;
triangle whose base is the middle segment of the line, before ;;
finally removing the base of the triangle: ;;
;;
;
;;
;
;;
;
/ \ ;;
/ \ ;;
______/______\______ ;;
;;
;
/ \ ;;
/ \ ;;
______/ \______ ;;
;;
;
;
;;
;
very intriguing & surprising geometrical properties. ;;
;;
If the initial equilateral triangle has side length 's', then the ;;
;
is: ;;
;
;;
;
;;
P[0] = 3·s ;;
;;
;
;
Snowflake, and so at the nth stage there are 3·4^(n-1) triangles. ;;
;;
;
the length of the previous side, meaning that the area of the ;;
;
;;
;
;;
;
A[1] = 4/3·A[0] ;;
;
;;
In the limit as n goes to infinity, this iteration formula ;;
converges to: ;;
;;
A[n] = 8/5·A[0] = 2/5·s^2·sqrt(3) ;;
;;
...which is clearly finite. ;;
;;
;
;
iteration. ;;
;;
Hence, at the nth stage, the perimeter is: ;;
;;
;
;;
;
;;
;
Snowflake is an infinitely long curve bounding a finite area. ;;
;;
----------------------------------------------------------------------;;
;
----------------------------------------------------------------------;;
;
;;
;
----------------------------------------------------------------------;;
----------------------------------------------------------------------;;
----------------------------------------------------------------------;;
End of File ;;
----------------------------------------------------------------------;; |
(defun c:koch ( / 3p a0 an d1 en l1 l2 no p1 p2 p3 p4 r1 r2 )
(setq p1 (cond ((getpoint "\nSpecify Center <0,0,0>: ")) ('(0.0 0.0 0.0)))
r1 (cond ((getdist p1 "\nSpecify Radius <1.0>: ")) (1.0))
l1 (list
(cons 10 (polar p1 (/ (* 3.0 pi) 6.0) r1))
(cons 10 (polar p1 (/ (* 7.0 pi) 6.0) r1))
(cons 10 (polar p1 (/ (* 11.0 pi) 6.0) r1))
)
en (koch:poly l1)
r2 (* r1 (sqrt 3.0))
3p (/ pi 3.0)
a0 (/ (* r2 r2 (sqrt 3.0)) 4.0)
no 0
)
(while
(progn
(princ
(strcat
"\nIteration: " (itoa no)
"\tVertices: " (itoa (* 3 (expt 4 no)))
"\tLength: " (rtos (* 3 (expt (/ 4.0 3.0) no) r2) 2)
"\tArea: "
(cond
( (= 0 no)
(rtos a0 2)
)
( (= 1 no)
(rtos (setq an (/ (* 4.0 a0) 3.0)) 2)
)
( (rtos (setq an (+ an (/ (* 3 a0 (expt 4 (1- no))) (expt 9 no)))) 2))
)
)
)
(initget "Yes No")
(/= "No" (getkword "\nContinue? [Yes/No] <Yes>: "))
)
(setq l1 (cons (last l1) l1))
(while
(setq p1 (cdar l1)
l1 (cdr l1)
p2 (cdar l1)
)
(setq a1 (angle p1 p2)
d1 (/ (distance p1 p2) 3.0)
p3 (polar p1 a1 d1)
p4 (cons 10 (polar p2 a1 (- d1)))
l2 (vl-list* (cons 10 p2) p4 (cons 10 (polar p3 (- a1 3p) d1)) (cons 10 p3) l2)
)
)
(entdel en)
(setq l1 (reverse l2)
l2 nil
en (koch:poly l1)
no (1+ no)
)
)
(princ)
)
(defun koch:poly ( l )
(entmakex
(append
(list
'(000 . "LWPOLYLINE")
'(100 . "AcDbEntity")
'(100 . "AcDbPolyline")
(cons 90 (length l))
'(70 . 1)
)
l
)
)
)
(princ
(strcat
"\n:: Koch.lsp | Version 1.0 | © Lee Mac "
(menucmd "m=$(edtime,$(getvar,DATE),YYYY)")
" www.lee-mac.com ::"
"\n:: Type \"koch\" to Invoke ::"
)
)
(princ)
|
cb784a7e0bf0ee51032307c56fad7f31c690c48b4b3da8b1cb26adbf8923f0fe | pontarius/pontarius-xmpp | DataForms.hs | # LANGUAGE NoMonomorphismRestriction #
# LANGUAGE TupleSections #
{-# LANGUAGE OverloadedStrings #-}
| XEP 0004 : Data Forms ( -0004.html )
module Network.Xmpp.Xep.DataForms where
import qualified Data.Text as Text
import Data.XML.Pickle
import qualified Data.XML.Types as XML
dataFormNs :: Text.Text
dataFormNs = "jabber:x:data"
dataFormName :: Text.Text -> XML.Name
dataFormName n = XML.Name n (Just dataFormNs) Nothing
data FormType = FormF | SubmitF | CancelF | ResultF
instance Show FormType where
show FormF = "form"
show SubmitF = "submit"
show CancelF = "cancel"
show ResultF = "result"
instance Read FormType where
readsPrec _ "form" = [(FormF , "")]
readsPrec _ "submit" = [(SubmitF, "")]
readsPrec _ "cancel" = [(CancelF, "")]
readsPrec _ "result" = [(ResultF, "")]
readsPrec _ _ = []
data Option = Option { label :: Maybe Text.Text
, options :: [Text.Text]
} deriving Show
data Field = Field { fieldVar :: Maybe Text.Text
, fieldLabel :: Maybe Text.Text
, fieldType :: Maybe FieldType
, fieldDesc :: Maybe Text.Text
, fieldRequired :: Bool
, fieldValues :: [Text.Text]
, fieldOptions :: [Option]
} deriving Show
data Form = Form { formType :: FormType
, title :: Maybe Text.Text
, instructions :: [Text.Text]
, fields :: [Field]
, reported :: Maybe [Field]
, items :: [[Field]]
} deriving Show
data FieldType = Boolean
| Fixed
| Hidden
| JidMulti
| JidSingle
| ListMulti
| ListSingle
| TextMulti
| TextPrivate
| TextSingle
instance Show FieldType where
show Boolean = "boolean"
show Fixed = "fixed"
show Hidden = "hidden"
show JidMulti = "jid-multi"
show JidSingle = "jid-single"
show ListMulti = "list-multi"
show ListSingle = "list-single"
show TextMulti = "text-multi"
show TextPrivate = "text-private"
show TextSingle = "text-single"
instance Read FieldType where
readsPrec _ "boolean" = [(Boolean ,"")]
readsPrec _ "fixed" = [(Fixed ,"")]
readsPrec _ "hidden" = [(Hidden ,"")]
readsPrec _ "jid-multi" = [(JidMulti ,"")]
readsPrec _ "jid-single" = [(JidSingle ,"")]
readsPrec _ "list-multi" = [(ListMulti ,"")]
readsPrec _ "list-single" = [(ListSingle ,"")]
readsPrec _ "text-multi" = [(TextMulti ,"")]
readsPrec _ "text-private" = [(TextPrivate ,"")]
readsPrec _ "text-single" = [(TextSingle ,"")]
readsPrec _ _ = []
xpForm :: PU [XML.Node] Form
xpForm = xpWrap (\(tp , (ttl, ins, flds, rpd, its)) ->
Form tp ttl (map snd ins) flds rpd (map snd its))
(\(Form tp ttl ins flds rpd its) ->
(tp ,
(ttl, map ((),) ins
, flds, rpd, map ((),) its)))
$
xpElem (dataFormName "x")
(xpAttr "type" xpPrim)
(xp5Tuple
(xpOption $ xpElemNodes (dataFormName "title") (xpContent xpId))
(xpElems (dataFormName "instructions") xpUnit (xpContent xpId))
xpFields
(xpOption $ xpElemNodes (dataFormName "reported") xpFields)
(xpElems (dataFormName "item") xpUnit xpFields))
xpFields :: PU [XML.Node] [Field]
xpFields = xpWrap (map $ \((var, tp, lbl),(desc, req, vals, opts))
-> Field var lbl tp desc req vals opts)
(map $ \(Field var lbl tp desc req vals opts)
-> ((var, tp, lbl),(desc, req, vals, opts))) $
xpElems (dataFormName "field")
(xp3Tuple
(xpAttrImplied "var" xpId )
(xpAttrImplied "type" xpPrim )
(xpAttrImplied "label" xpId )
)
(xp4Tuple
(xpOption $ xpElemText (dataFormName "desc"))
(xpElemExists (dataFormName "required"))
xpValues
xpOptions )
xpValues :: PU [XML.Node] [Text.Text]
xpValues = xpWrap (map snd) (map ((),))
(xpElems (dataFormName "value") xpUnit (xpContent xpId))
xpOptions :: PU [XML.Node] [Option]
xpOptions = xpWrap
(map $ \(l, os) -> Option l os)
(map $ \(Option l os) -> (l, os)) $
xpElems (dataFormName "option")
(xpAttrImplied "label" xpId)
xpValues
| null | https://raw.githubusercontent.com/pontarius/pontarius-xmpp/08e4a24e6408adb6320b4e73f7be691de060b583/source/Network/Xmpp/Xep/DataForms.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE NoMonomorphismRestriction #
# LANGUAGE TupleSections #
| XEP 0004 : Data Forms ( -0004.html )
module Network.Xmpp.Xep.DataForms where
import qualified Data.Text as Text
import Data.XML.Pickle
import qualified Data.XML.Types as XML
dataFormNs :: Text.Text
dataFormNs = "jabber:x:data"
dataFormName :: Text.Text -> XML.Name
dataFormName n = XML.Name n (Just dataFormNs) Nothing
data FormType = FormF | SubmitF | CancelF | ResultF
instance Show FormType where
show FormF = "form"
show SubmitF = "submit"
show CancelF = "cancel"
show ResultF = "result"
instance Read FormType where
readsPrec _ "form" = [(FormF , "")]
readsPrec _ "submit" = [(SubmitF, "")]
readsPrec _ "cancel" = [(CancelF, "")]
readsPrec _ "result" = [(ResultF, "")]
readsPrec _ _ = []
data Option = Option { label :: Maybe Text.Text
, options :: [Text.Text]
} deriving Show
data Field = Field { fieldVar :: Maybe Text.Text
, fieldLabel :: Maybe Text.Text
, fieldType :: Maybe FieldType
, fieldDesc :: Maybe Text.Text
, fieldRequired :: Bool
, fieldValues :: [Text.Text]
, fieldOptions :: [Option]
} deriving Show
data Form = Form { formType :: FormType
, title :: Maybe Text.Text
, instructions :: [Text.Text]
, fields :: [Field]
, reported :: Maybe [Field]
, items :: [[Field]]
} deriving Show
data FieldType = Boolean
| Fixed
| Hidden
| JidMulti
| JidSingle
| ListMulti
| ListSingle
| TextMulti
| TextPrivate
| TextSingle
instance Show FieldType where
show Boolean = "boolean"
show Fixed = "fixed"
show Hidden = "hidden"
show JidMulti = "jid-multi"
show JidSingle = "jid-single"
show ListMulti = "list-multi"
show ListSingle = "list-single"
show TextMulti = "text-multi"
show TextPrivate = "text-private"
show TextSingle = "text-single"
instance Read FieldType where
readsPrec _ "boolean" = [(Boolean ,"")]
readsPrec _ "fixed" = [(Fixed ,"")]
readsPrec _ "hidden" = [(Hidden ,"")]
readsPrec _ "jid-multi" = [(JidMulti ,"")]
readsPrec _ "jid-single" = [(JidSingle ,"")]
readsPrec _ "list-multi" = [(ListMulti ,"")]
readsPrec _ "list-single" = [(ListSingle ,"")]
readsPrec _ "text-multi" = [(TextMulti ,"")]
readsPrec _ "text-private" = [(TextPrivate ,"")]
readsPrec _ "text-single" = [(TextSingle ,"")]
readsPrec _ _ = []
xpForm :: PU [XML.Node] Form
xpForm = xpWrap (\(tp , (ttl, ins, flds, rpd, its)) ->
Form tp ttl (map snd ins) flds rpd (map snd its))
(\(Form tp ttl ins flds rpd its) ->
(tp ,
(ttl, map ((),) ins
, flds, rpd, map ((),) its)))
$
xpElem (dataFormName "x")
(xpAttr "type" xpPrim)
(xp5Tuple
(xpOption $ xpElemNodes (dataFormName "title") (xpContent xpId))
(xpElems (dataFormName "instructions") xpUnit (xpContent xpId))
xpFields
(xpOption $ xpElemNodes (dataFormName "reported") xpFields)
(xpElems (dataFormName "item") xpUnit xpFields))
xpFields :: PU [XML.Node] [Field]
xpFields = xpWrap (map $ \((var, tp, lbl),(desc, req, vals, opts))
-> Field var lbl tp desc req vals opts)
(map $ \(Field var lbl tp desc req vals opts)
-> ((var, tp, lbl),(desc, req, vals, opts))) $
xpElems (dataFormName "field")
(xp3Tuple
(xpAttrImplied "var" xpId )
(xpAttrImplied "type" xpPrim )
(xpAttrImplied "label" xpId )
)
(xp4Tuple
(xpOption $ xpElemText (dataFormName "desc"))
(xpElemExists (dataFormName "required"))
xpValues
xpOptions )
xpValues :: PU [XML.Node] [Text.Text]
xpValues = xpWrap (map snd) (map ((),))
(xpElems (dataFormName "value") xpUnit (xpContent xpId))
xpOptions :: PU [XML.Node] [Option]
xpOptions = xpWrap
(map $ \(l, os) -> Option l os)
(map $ \(Option l os) -> (l, os)) $
xpElems (dataFormName "option")
(xpAttrImplied "label" xpId)
xpValues
|
5c1e32b838a0ed130be2d3f347d08ce5482c49a2c2a3eccc3cb4e1647cb3a718 | BradWBeer/cl-pango | library.lisp | (in-package #:cl-pango)
(cffi:define-foreign-library :libpango
(cffi-features:darwin (:or "libpango-1.0.dylib" "libpango.dylib"))
(cffi-features:unix (:or "libpango-1.0.so" "libpango-1.0.so.0"))
(cffi-features:windows "libpango.dll"))
(cffi:load-foreign-library :libpango)
(cffi:define-foreign-library :libpangocairo
(cffi-features:darwin (:or "libpangocairo-1.0.dylib" "libpangocairo.dylib"))
(cffi-features:unix (:or "libpangocairo-1.0.so" "libpangocairo-1.0.so.0"))
(cffi-features:windows "libpangocairo.dll"))
(cffi:load-foreign-library :libpangocairo)
| null | https://raw.githubusercontent.com/BradWBeer/cl-pango/ee4904d19ce22d00eb2fe17a4fe42e5df8ac8701/library.lisp | lisp | (in-package #:cl-pango)
(cffi:define-foreign-library :libpango
(cffi-features:darwin (:or "libpango-1.0.dylib" "libpango.dylib"))
(cffi-features:unix (:or "libpango-1.0.so" "libpango-1.0.so.0"))
(cffi-features:windows "libpango.dll"))
(cffi:load-foreign-library :libpango)
(cffi:define-foreign-library :libpangocairo
(cffi-features:darwin (:or "libpangocairo-1.0.dylib" "libpangocairo.dylib"))
(cffi-features:unix (:or "libpangocairo-1.0.so" "libpangocairo-1.0.so.0"))
(cffi-features:windows "libpangocairo.dll"))
(cffi:load-foreign-library :libpangocairo)
| |
6088157f34fcef5d20414a67b2c30da322ce7e46d548390cfd6d36a1d31fda44 | rubenbarroso/EOPL | 2-4.scm | (let ((time-stamp "Time-stamp: <2001-05-09 19:28:56 dfried>"))
(eopl:printf "2-4.scm ~a~%" (substring time-stamp 13 29)))
(define create-queue
(lambda ()
(let ((q-in '())
(q-out '()))
(letrec
((reset-queue
(lambda ()
(set! q-in '())
(set! q-out '())))
(empty-queue?
(lambda ()
(and (null? q-in)
(null? q-out))))
(enqueue
(lambda (x)
(set! q-in (cons x q-in))))
(dequeue
(lambda ()
(if (empty-queue?)
(eopl:error 'dequeue
"Not on an empty queue")
(begin
(if (null? q-out)
(begin
(set! q-out (reverse q-in))
(set! q-in '())))
(let ((ans (car q-out)))
(set! q-out (cdr q-out))
ans))))))
(vector reset-queue empty-queue? enqueue dequeue)))))
(define queue-get-reset-operation
(lambda (q) (vector-ref q 0)))
(define queue-get-empty?-operation
(lambda (q) (vector-ref q 1)))
(define queue-get-enqueue-operation
(lambda (q) (vector-ref q 2)))
(define queue-get-dequeue-operation
(lambda (q) (vector-ref q 3)))
| null | https://raw.githubusercontent.com/rubenbarroso/EOPL/f9b3c03c2fcbaddf64694ee3243d54be95bfe31d/src/interps/2-4.scm | scheme | (let ((time-stamp "Time-stamp: <2001-05-09 19:28:56 dfried>"))
(eopl:printf "2-4.scm ~a~%" (substring time-stamp 13 29)))
(define create-queue
(lambda ()
(let ((q-in '())
(q-out '()))
(letrec
((reset-queue
(lambda ()
(set! q-in '())
(set! q-out '())))
(empty-queue?
(lambda ()
(and (null? q-in)
(null? q-out))))
(enqueue
(lambda (x)
(set! q-in (cons x q-in))))
(dequeue
(lambda ()
(if (empty-queue?)
(eopl:error 'dequeue
"Not on an empty queue")
(begin
(if (null? q-out)
(begin
(set! q-out (reverse q-in))
(set! q-in '())))
(let ((ans (car q-out)))
(set! q-out (cdr q-out))
ans))))))
(vector reset-queue empty-queue? enqueue dequeue)))))
(define queue-get-reset-operation
(lambda (q) (vector-ref q 0)))
(define queue-get-empty?-operation
(lambda (q) (vector-ref q 1)))
(define queue-get-enqueue-operation
(lambda (q) (vector-ref q 2)))
(define queue-get-dequeue-operation
(lambda (q) (vector-ref q 3)))
| |
622f1947ddaf19b533e9c503b373d8c80683876688e90e466704f99828af6adb | yesodweb/wai | WaiAppEmbeddedTest.hs | # LANGUAGE TemplateHaskell , OverloadedStrings #
module WaiAppEmbeddedTest (embSpec) where
import Codec.Compression.GZip (compress)
import EmbeddedTestEntries
import Network.Wai
import Network.Wai.Application.Static (staticApp)
import Network.Wai.Test
import Test.Hspec
import WaiAppStatic.Storage.Embedded
import WaiAppStatic.Types
defRequest :: Request
defRequest = defaultRequest
embSpec :: Spec
embSpec = do
let embedSettings settings = flip runSession (staticApp settings)
let embed = embedSettings $(mkSettings mkEntries)
describe "embedded, compressed entry" $ do
it "served correctly" $ embed $ do
req <- request (setRawPathInfo defRequest "e1.txt")
assertStatus 200 req
assertHeader "Content-Type" "text/plain" req
assertHeader "Content-Encoding" "gzip" req
assertHeader "ETag" "Etag 1" req
assertNoHeader "Last-Modified req" req
assertBody (compress $ body 1000 'A') req
it "304 when valid if-none-match sent" $ embed $ do
req <- request (setRawPathInfo defRequest "e1.txt")
{ requestHeaders = [("If-None-Match", "Etag 1")] }
assertStatus 304 req
it "ssIndices works" $ do
let testSettings = $(mkSettings mkEntries){
ssIndices = [unsafeToPiece "index.html"]
}
embedSettings testSettings $ do
req <- request defRequest
assertStatus 200 req
assertBody "index file" req
it "ssIndices works with trailing slashes" $ do
let testSettings = $(mkSettings mkEntries){
ssIndices = [unsafeToPiece "index.html"]
}
embedSettings testSettings $ do
req <- request (setRawPathInfo defRequest "/foo/")
assertStatus 200 req
assertBody "index file in subdir" req
describe "embedded, uncompressed entry" $ do
it "too short" $ embed $ do
req <- request (setRawPathInfo defRequest "e2.txt")
assertStatus 200 req
assertHeader "Content-Type" "text/plain" req
assertNoHeader "Content-Encoding" req
assertHeader "ETag" "Etag 2" req
assertBody "ABC" req
it "wrong mime" $ embed $ do
req <- request (setRawPathInfo defRequest "somedir/e3.txt")
assertStatus 200 req
assertHeader "Content-Type" "xxx" req
assertNoHeader "Content-Encoding" req
assertHeader "ETag" "Etag 3" req
assertBody (body 1000 'A') req
describe "reloadable entry" $
it "served correctly" $ embed $ do
req <- request (setRawPathInfo defRequest "e4.css")
assertStatus 200 req
assertHeader "Content-Type" "text/css" req
assertNoHeader "Content-Encoding" req
assertHeader "ETag" "Etag 4" req
assertBody (body 2000 'Q') req
describe "entries without etags" $ do
it "embedded entry" $ embed $ do
req <- request (setRawPathInfo defRequest "e5.txt")
assertStatus 200 req
assertHeader "Content-Type" "text/plain" req
assertHeader "Content-Encoding" "gzip" req
assertNoHeader "ETag" req
assertBody (compress $ body 1000 'Z') req
it "reload entry" $ embed $ do
req <- request (setRawPathInfo defRequest "e6.txt")
assertStatus 200 req
assertHeader "Content-Type" "text/plain" req
assertNoHeader "Content-Encoding" req
assertNoHeader "ETag" req
assertBody (body 1000 'W') req
| null | https://raw.githubusercontent.com/yesodweb/wai/f59e577f865d017b4726826ac5586bb916cf315b/wai-app-static/test/WaiAppEmbeddedTest.hs | haskell | # LANGUAGE TemplateHaskell , OverloadedStrings #
module WaiAppEmbeddedTest (embSpec) where
import Codec.Compression.GZip (compress)
import EmbeddedTestEntries
import Network.Wai
import Network.Wai.Application.Static (staticApp)
import Network.Wai.Test
import Test.Hspec
import WaiAppStatic.Storage.Embedded
import WaiAppStatic.Types
defRequest :: Request
defRequest = defaultRequest
embSpec :: Spec
embSpec = do
let embedSettings settings = flip runSession (staticApp settings)
let embed = embedSettings $(mkSettings mkEntries)
describe "embedded, compressed entry" $ do
it "served correctly" $ embed $ do
req <- request (setRawPathInfo defRequest "e1.txt")
assertStatus 200 req
assertHeader "Content-Type" "text/plain" req
assertHeader "Content-Encoding" "gzip" req
assertHeader "ETag" "Etag 1" req
assertNoHeader "Last-Modified req" req
assertBody (compress $ body 1000 'A') req
it "304 when valid if-none-match sent" $ embed $ do
req <- request (setRawPathInfo defRequest "e1.txt")
{ requestHeaders = [("If-None-Match", "Etag 1")] }
assertStatus 304 req
it "ssIndices works" $ do
let testSettings = $(mkSettings mkEntries){
ssIndices = [unsafeToPiece "index.html"]
}
embedSettings testSettings $ do
req <- request defRequest
assertStatus 200 req
assertBody "index file" req
it "ssIndices works with trailing slashes" $ do
let testSettings = $(mkSettings mkEntries){
ssIndices = [unsafeToPiece "index.html"]
}
embedSettings testSettings $ do
req <- request (setRawPathInfo defRequest "/foo/")
assertStatus 200 req
assertBody "index file in subdir" req
describe "embedded, uncompressed entry" $ do
it "too short" $ embed $ do
req <- request (setRawPathInfo defRequest "e2.txt")
assertStatus 200 req
assertHeader "Content-Type" "text/plain" req
assertNoHeader "Content-Encoding" req
assertHeader "ETag" "Etag 2" req
assertBody "ABC" req
it "wrong mime" $ embed $ do
req <- request (setRawPathInfo defRequest "somedir/e3.txt")
assertStatus 200 req
assertHeader "Content-Type" "xxx" req
assertNoHeader "Content-Encoding" req
assertHeader "ETag" "Etag 3" req
assertBody (body 1000 'A') req
describe "reloadable entry" $
it "served correctly" $ embed $ do
req <- request (setRawPathInfo defRequest "e4.css")
assertStatus 200 req
assertHeader "Content-Type" "text/css" req
assertNoHeader "Content-Encoding" req
assertHeader "ETag" "Etag 4" req
assertBody (body 2000 'Q') req
describe "entries without etags" $ do
it "embedded entry" $ embed $ do
req <- request (setRawPathInfo defRequest "e5.txt")
assertStatus 200 req
assertHeader "Content-Type" "text/plain" req
assertHeader "Content-Encoding" "gzip" req
assertNoHeader "ETag" req
assertBody (compress $ body 1000 'Z') req
it "reload entry" $ embed $ do
req <- request (setRawPathInfo defRequest "e6.txt")
assertStatus 200 req
assertHeader "Content-Type" "text/plain" req
assertNoHeader "Content-Encoding" req
assertNoHeader "ETag" req
assertBody (body 1000 'W') req
| |
320c62122616a50c636c2b4b7299125db18fe3e63c3a6c5fa52a8d193ff4f69c | ghc/nofib | Encode.hs |
- Encode Mk 2 , using a prefix table for the codes
-
- , Systems Research , British Telecom Laboratories 1992
- Encode Mk 2, using a prefix table for the codes
-
- Paul Sanders, Systems Research, British Telecom Laboratories 1992
-}
module Encode (encode) where
import Defaults
import PTTrees
-- for convenience we make the code table type explicit
type CodeTable = PrefixTree Char Int
-- encode sets up the arguments for the real function.
encode :: String -> [Int]
encode input = encode' input first_code initial_table
- encode ' loops through the input string assembling the codes produced
- by code_string . The first character is treated specially in that it
- is not added to the table ; its code is simply its ascii value .
- encode' loops through the input string assembling the codes produced
- by code_string. The first character is treated specially in that it
- is not added to the table; its code is simply its ascii value.
-}
encode' [] _ _
= []
encode' input v t
= case (code_string input 0 v t) of { (input', n, t') ->
n : encode' input' (v + 1) t'
}
- code_string parses enough of the input string to produce one code and
- returns the remaining input , the code and a new code table .
-
- The first character is taken and its place found in the code table . The
- extension code table found for this character is then used as the lookup
- table for the next character .
-
- If a character is not found in the current table then output the code
- of the character associated with the current table and add the current
- character to the current table and assign it the next new code value .
- code_string parses enough of the input string to produce one code and
- returns the remaining input, the code and a new code table.
-
- The first character is taken and its place found in the code table. The
- extension code table found for this character is then used as the lookup
- table for the next character.
-
- If a character is not found in the current table then output the code
- of the character associated with the current table and add the current
- character to the current table and assign it the next new code value.
-}
code_string input@(c : input2) old_code next_code (PT p@(PTE k v t) l r)
| c < k = (f1 r1 p r)
| c > k = (f2 r2 p l)
| otherwise = (f3 r3 k v l r)
where
r1 = code_string input old_code next_code l
r2 = code_string input old_code next_code r
r3 = code_string input2 v next_code t
f1 (input_l,nl,l2) p r = (input_l,nl,PT p l2 r)
f2 (input_r,nr,r2) p l = (input_r,nr,PT p l r2)
f3 (input2,n,t2) k v l r = (input2, n, PT (PTE k v t2) l r)
code_string input@(c : input_file2) old_code next_code PTNil
| next_code >= 4096 = (input, old_code, PTNil)
| otherwise = (input, old_code, PT (PTE c next_code PTNil) PTNil PTNil)
code_string [] old_code next_code code_table
= ([], old_code, PTNil)
- We want the inital table to be balanced , but this is expensive to compute
- as a rebalance is needed evert two inserts ( yuk ! ) . So we do the ordinary
- infix - order binary tree insert but give the keys in such an order as to
- give a balanced tree .
-
- ( I would have defined the tree by hand but the constant was too big
- for )
- We want the inital table to be balanced, but this is expensive to compute
- as a rebalance is needed evert two inserts (yuk!). So we do the ordinary
- infix-order binary tree insert but give the keys in such an order as to
- give a balanced tree.
-
- (I would have defined the tree by hand but the constant was too big
- for hc-0.41)
-}
initial_table :: CodeTable
initial_table = foldr tab_insert PTNil balanced_list
tab_insert n = insert (toEnum n) n
balanced_list
= [128,64,32,16,8,4,2,1,0,3,6,5,7,12,10,9,11,14,13,15,24,20,18,17,19,22,
21,23,28,26,25,27,30,29,31,48,40,36,34,33,35,38,37,39,44,42,41,43,46,
45,47,56,52,50,49,51,54,53,55,60,58,57,59,62,61,63,96,80,72,68,66,65]
++ bal_list2 ++ bal_list3 ++ bal_list4 ++ bal_list5
bal_list2
= [67,70,69,71,76,74,73,75,78,77,79,88,84,82,81,83,86,85,87,92,90,89,91,
94,93,95,112,104,100,98,97,99,102,101,103,108,106,105,107,110,109,111,
120,116,114,113,115,118,117,119,124,122,121,123,126,125,127,192,160]
bal_list3
= [144,136,132,130,129,131,134,133,135,140,138,137,139,142,141,143,152,
148,146,145,147,150,149,151,156,154,153,155,158,157,159,176,168,164,
162,161,163,166,165,167,172,170,169,171,174,173,175,184,180,178,177]
bal_list4
= [179,182,181,183,188,186,185,187,190,189,191,224,208,200,196,194,193,
195,198,197,199,204,202,201,203,206,205,207,216,212,210,209,211,214,
213,215,220,218,217,219,222,221,223,240,232,228,226,225,227,230,229,
231,236,234,233,235,238,237,239,248,244,242,241,243,246,245,247,252]
bal_list5
= [250,249,251,254,253,255]
| null | https://raw.githubusercontent.com/ghc/nofib/f34b90b5a6ce46284693119a06d1133908b11856/real/compress/Encode.hs | haskell | for convenience we make the code table type explicit
encode sets up the arguments for the real function. |
- Encode Mk 2 , using a prefix table for the codes
-
- , Systems Research , British Telecom Laboratories 1992
- Encode Mk 2, using a prefix table for the codes
-
- Paul Sanders, Systems Research, British Telecom Laboratories 1992
-}
module Encode (encode) where
import Defaults
import PTTrees
type CodeTable = PrefixTree Char Int
encode :: String -> [Int]
encode input = encode' input first_code initial_table
- encode ' loops through the input string assembling the codes produced
- by code_string . The first character is treated specially in that it
- is not added to the table ; its code is simply its ascii value .
- encode' loops through the input string assembling the codes produced
- by code_string. The first character is treated specially in that it
- is not added to the table; its code is simply its ascii value.
-}
encode' [] _ _
= []
encode' input v t
= case (code_string input 0 v t) of { (input', n, t') ->
n : encode' input' (v + 1) t'
}
- code_string parses enough of the input string to produce one code and
- returns the remaining input , the code and a new code table .
-
- The first character is taken and its place found in the code table . The
- extension code table found for this character is then used as the lookup
- table for the next character .
-
- If a character is not found in the current table then output the code
- of the character associated with the current table and add the current
- character to the current table and assign it the next new code value .
- code_string parses enough of the input string to produce one code and
- returns the remaining input, the code and a new code table.
-
- The first character is taken and its place found in the code table. The
- extension code table found for this character is then used as the lookup
- table for the next character.
-
- If a character is not found in the current table then output the code
- of the character associated with the current table and add the current
- character to the current table and assign it the next new code value.
-}
code_string input@(c : input2) old_code next_code (PT p@(PTE k v t) l r)
| c < k = (f1 r1 p r)
| c > k = (f2 r2 p l)
| otherwise = (f3 r3 k v l r)
where
r1 = code_string input old_code next_code l
r2 = code_string input old_code next_code r
r3 = code_string input2 v next_code t
f1 (input_l,nl,l2) p r = (input_l,nl,PT p l2 r)
f2 (input_r,nr,r2) p l = (input_r,nr,PT p l r2)
f3 (input2,n,t2) k v l r = (input2, n, PT (PTE k v t2) l r)
code_string input@(c : input_file2) old_code next_code PTNil
| next_code >= 4096 = (input, old_code, PTNil)
| otherwise = (input, old_code, PT (PTE c next_code PTNil) PTNil PTNil)
code_string [] old_code next_code code_table
= ([], old_code, PTNil)
- We want the inital table to be balanced , but this is expensive to compute
- as a rebalance is needed evert two inserts ( yuk ! ) . So we do the ordinary
- infix - order binary tree insert but give the keys in such an order as to
- give a balanced tree .
-
- ( I would have defined the tree by hand but the constant was too big
- for )
- We want the inital table to be balanced, but this is expensive to compute
- as a rebalance is needed evert two inserts (yuk!). So we do the ordinary
- infix-order binary tree insert but give the keys in such an order as to
- give a balanced tree.
-
- (I would have defined the tree by hand but the constant was too big
- for hc-0.41)
-}
initial_table :: CodeTable
initial_table = foldr tab_insert PTNil balanced_list
tab_insert n = insert (toEnum n) n
balanced_list
= [128,64,32,16,8,4,2,1,0,3,6,5,7,12,10,9,11,14,13,15,24,20,18,17,19,22,
21,23,28,26,25,27,30,29,31,48,40,36,34,33,35,38,37,39,44,42,41,43,46,
45,47,56,52,50,49,51,54,53,55,60,58,57,59,62,61,63,96,80,72,68,66,65]
++ bal_list2 ++ bal_list3 ++ bal_list4 ++ bal_list5
bal_list2
= [67,70,69,71,76,74,73,75,78,77,79,88,84,82,81,83,86,85,87,92,90,89,91,
94,93,95,112,104,100,98,97,99,102,101,103,108,106,105,107,110,109,111,
120,116,114,113,115,118,117,119,124,122,121,123,126,125,127,192,160]
bal_list3
= [144,136,132,130,129,131,134,133,135,140,138,137,139,142,141,143,152,
148,146,145,147,150,149,151,156,154,153,155,158,157,159,176,168,164,
162,161,163,166,165,167,172,170,169,171,174,173,175,184,180,178,177]
bal_list4
= [179,182,181,183,188,186,185,187,190,189,191,224,208,200,196,194,193,
195,198,197,199,204,202,201,203,206,205,207,216,212,210,209,211,214,
213,215,220,218,217,219,222,221,223,240,232,228,226,225,227,230,229,
231,236,234,233,235,238,237,239,248,244,242,241,243,246,245,247,252]
bal_list5
= [250,249,251,254,253,255]
|
3eae744eb3a9d6bde17a793383f54a2d0491d3a05f319fac5fc052701bc64b78 | mflatt/shrubbery-rhombus-0 | delta-text.rkt | #lang racket/base
(require racket/class)
(provide make-delta-text)
(define delta-text%
(class object%
(init-field next ; the text that this is a delta from
at-pos ; the start of a line where whitespace is inserted or deleted
delta) ; amount to insert or (when negative) delete
(super-new)
;; No effect before this position:
(define pre at-pos)
;; Simple token shifting after this position (new coordinates):
(define post (let-values ([(s e) (send next get-token-range pre)])
(unless (= pre s) (error "bad delta construction"))
(+ e delta)))
;; The range from `pre` to `post` is a whitespace token,
;; either newly extended to newly truncated
(define/public (get-text s e)
(cond
[(e . <= . pre) (send next get-text s e)]
[(s . >= . post)
(send next get-text (- s delta) (- e delta))]
[(s . < . pre)
(string-append
(get-text s pre)
(get-text pre e))]
[(e . > . post)
(string-append
(get-text s post)
(get-text post e))]
[else
;; bounds are completely in whitespace region:
(make-string #\space (- e s))]))
(define/public (classify-position* pos)
(cond
[(pos . < . pre)
(send next classify-position* pos)]
[(pos . >= . post)
(send next classify-position* (- pos delta))]
[else 'white-space]))
(define/public (classify-position pos)
(define type (classify-position* pos))
(if (hash? type) (hash-ref type 'type 'unknown) type))
(define/public (get-token-range pos)
(cond
[(pos . < . pre)
(send next get-token-range pos)]
[(pos . >= . post)
(define-values (s e) (send next get-token-range (- pos delta)))
(values (+ s delta) (+ e delta))]
[else (values pre post)]))
(define/private (shift-in r)
(if (r . < . pre)
r
(max pre (- r delta))))
;; biased to the end of inserted whitespace
(define/private (shift-out r)
(and r
(cond
[(r . <= . pre) r]
[else (max pre (+ r delta))])))
(define/public (last-position)
(shift-out (send next last-position)))
(define/public (position-paragraph pos [eol? #f])
(send next position-paragraph (shift-in pos) eol?))
(define/public (paragraph-start-position para)
(shift-out (send next paragraph-start-position para)))
(define/public (paragraph-end-position para)
(shift-out (send next paragraph-end-position para)))
(define/public (backward-match pos cutoff)
(shift-out (send next backward-match (shift-in pos) (shift-in cutoff))))
(define/public (forward-match pos cutoff)
(shift-out (send next forward-match (shift-in pos) (shift-in cutoff))))))
(define (make-delta-text t at-pos delta)
(new delta-text%
[next t]
[at-pos at-pos]
[delta delta]))
| null | https://raw.githubusercontent.com/mflatt/shrubbery-rhombus-0/3a27b257a49d2248a379d06dc15cee7c8959459a/shrubbery/private/delta-text.rkt | racket | the text that this is a delta from
the start of a line where whitespace is inserted or deleted
amount to insert or (when negative) delete
No effect before this position:
Simple token shifting after this position (new coordinates):
The range from `pre` to `post` is a whitespace token,
either newly extended to newly truncated
bounds are completely in whitespace region:
biased to the end of inserted whitespace | #lang racket/base
(require racket/class)
(provide make-delta-text)
(define delta-text%
(class object%
(super-new)
(define pre at-pos)
(define post (let-values ([(s e) (send next get-token-range pre)])
(unless (= pre s) (error "bad delta construction"))
(+ e delta)))
(define/public (get-text s e)
(cond
[(e . <= . pre) (send next get-text s e)]
[(s . >= . post)
(send next get-text (- s delta) (- e delta))]
[(s . < . pre)
(string-append
(get-text s pre)
(get-text pre e))]
[(e . > . post)
(string-append
(get-text s post)
(get-text post e))]
[else
(make-string #\space (- e s))]))
(define/public (classify-position* pos)
(cond
[(pos . < . pre)
(send next classify-position* pos)]
[(pos . >= . post)
(send next classify-position* (- pos delta))]
[else 'white-space]))
(define/public (classify-position pos)
(define type (classify-position* pos))
(if (hash? type) (hash-ref type 'type 'unknown) type))
(define/public (get-token-range pos)
(cond
[(pos . < . pre)
(send next get-token-range pos)]
[(pos . >= . post)
(define-values (s e) (send next get-token-range (- pos delta)))
(values (+ s delta) (+ e delta))]
[else (values pre post)]))
(define/private (shift-in r)
(if (r . < . pre)
r
(max pre (- r delta))))
(define/private (shift-out r)
(and r
(cond
[(r . <= . pre) r]
[else (max pre (+ r delta))])))
(define/public (last-position)
(shift-out (send next last-position)))
(define/public (position-paragraph pos [eol? #f])
(send next position-paragraph (shift-in pos) eol?))
(define/public (paragraph-start-position para)
(shift-out (send next paragraph-start-position para)))
(define/public (paragraph-end-position para)
(shift-out (send next paragraph-end-position para)))
(define/public (backward-match pos cutoff)
(shift-out (send next backward-match (shift-in pos) (shift-in cutoff))))
(define/public (forward-match pos cutoff)
(shift-out (send next forward-match (shift-in pos) (shift-in cutoff))))))
(define (make-delta-text t at-pos delta)
(new delta-text%
[next t]
[at-pos at-pos]
[delta delta]))
|
543ba85e623bcb003e87558f0e37f09fff76ad8cbb281ebf7dee71b6dce4bddb | yetanalytics/flint | query.cljc | (ns com.yetanalytics.flint.format.query
(:require [clojure.string :as cstr]
[com.yetanalytics.flint.format :as f]
[com.yetanalytics.flint.format.axiom]
[com.yetanalytics.flint.format.prologue]
[com.yetanalytics.flint.format.triple]
[com.yetanalytics.flint.format.modifier]
[com.yetanalytics.flint.format.select]
[com.yetanalytics.flint.format.where]
[com.yetanalytics.flint.format.values]))
(defmethod f/format-ast-node :construct [{:keys [pretty?]} [_ construct]]
(if (not-empty construct)
(str "CONSTRUCT " (-> construct
(f/join-clauses pretty?)
(f/wrap-in-braces pretty?)))
"CONSTRUCT"))
(defmethod f/format-ast-node :describe/vars-or-iris [_ [_ var-or-iris]]
(cstr/join " " var-or-iris))
(defmethod f/format-ast-node :describe [_ [_ describe]]
(str "DESCRIBE " describe))
(defmethod f/format-ast-node :ask [_ _]
"ASK")
(defmethod f/format-ast-node :from [_ [_ iri]]
(str "FROM " iri))
(defmethod f/format-ast-node :from-named [{:keys [pretty?]} [_ iri-coll]]
(-> (map (fn [iri] (str "FROM NAMED " iri)) iri-coll)
(f/join-clauses pretty?)))
(defmethod f/format-ast-node :query/select [{:keys [pretty?]} [_ select-query]]
(f/join-clauses select-query pretty?))
(defmethod f/format-ast-node :query/construct [{:keys [pretty?]} [_ construct-query]]
(f/join-clauses construct-query pretty?))
(defmethod f/format-ast-node :query/describe [{:keys [pretty?]} [_ describe-query]]
(f/join-clauses describe-query pretty?))
(defmethod f/format-ast-node :query/ask [{:keys [pretty?]} [_ ask-query]]
(f/join-clauses ask-query pretty?))
| null | https://raw.githubusercontent.com/yetanalytics/flint/85a5435ce9e04dd7e16697783dffd05a6dc240cb/src/main/com/yetanalytics/flint/format/query.cljc | clojure | (ns com.yetanalytics.flint.format.query
(:require [clojure.string :as cstr]
[com.yetanalytics.flint.format :as f]
[com.yetanalytics.flint.format.axiom]
[com.yetanalytics.flint.format.prologue]
[com.yetanalytics.flint.format.triple]
[com.yetanalytics.flint.format.modifier]
[com.yetanalytics.flint.format.select]
[com.yetanalytics.flint.format.where]
[com.yetanalytics.flint.format.values]))
(defmethod f/format-ast-node :construct [{:keys [pretty?]} [_ construct]]
(if (not-empty construct)
(str "CONSTRUCT " (-> construct
(f/join-clauses pretty?)
(f/wrap-in-braces pretty?)))
"CONSTRUCT"))
(defmethod f/format-ast-node :describe/vars-or-iris [_ [_ var-or-iris]]
(cstr/join " " var-or-iris))
(defmethod f/format-ast-node :describe [_ [_ describe]]
(str "DESCRIBE " describe))
(defmethod f/format-ast-node :ask [_ _]
"ASK")
(defmethod f/format-ast-node :from [_ [_ iri]]
(str "FROM " iri))
(defmethod f/format-ast-node :from-named [{:keys [pretty?]} [_ iri-coll]]
(-> (map (fn [iri] (str "FROM NAMED " iri)) iri-coll)
(f/join-clauses pretty?)))
(defmethod f/format-ast-node :query/select [{:keys [pretty?]} [_ select-query]]
(f/join-clauses select-query pretty?))
(defmethod f/format-ast-node :query/construct [{:keys [pretty?]} [_ construct-query]]
(f/join-clauses construct-query pretty?))
(defmethod f/format-ast-node :query/describe [{:keys [pretty?]} [_ describe-query]]
(f/join-clauses describe-query pretty?))
(defmethod f/format-ast-node :query/ask [{:keys [pretty?]} [_ ask-query]]
(f/join-clauses ask-query pretty?))
| |
725d151ce6b11aeca4eda9235f12847347e2a3a7b65e362ccedebffd81915dae | mhuebert/re-db | dev.clj | (ns re-db.dev
(:require [nextjournal.clerk :as clerk]
[nextjournal.clerk.config :as config]
[shadow.cljs.devtools.api :as shadow]))
(defn start
{:shadow/requires-server true}
[]
;; local experimenting
(try (compile 're-db.scratch.Suspension) (catch Exception e nil))
(shadow/watch :clerk)
(swap! config/!resource->url merge {"/js/viewer.js" ":8008/clerk/clerk.js"})
(clerk/serve! {:browse? true
:watch-paths ["src/notebooks/re_db/notebooks"]
:show-filter-fn #(re-find #"notebooks/[^/]+\.clj\w?" %)})
(eval '(do (in-ns 'nextjournal.clerk.analyzer)
(defn hash-codeblock [->hash {:as codeblock :keys [hash form id deps vars]}]
(let [->hash' (if (and (not (ifn? ->hash)) (seq deps))
(binding [*out* *err*]
(println "->hash must be `ifn?`" {:->hash ->hash :codeblock codeblock})
identity)
->hash)
hashed-deps (into #{} (map ->hash') deps)]
(sha1-base58 (binding [*print-length* nil]
(pr-str (set/union (conj hashed-deps id)
vars)))))))))
(comment
(start)
(clerk/clear-cache!)
(do (in-ns 'nextjournal.clerk.analyzer)
(defn hash-codeblock [->hash {:as codeblock :keys [hash form id deps vars]}]
(let [->hash' (if (and (not (ifn? ->hash)) (seq deps))
(binding [*out* *err*]
(println "->hash must be `ifn?`" {:->hash ->hash :codeblock codeblock})
identity)
->hash)
hashed-deps (into #{} (map ->hash') deps)]
(sha1-base58 (binding [*print-length* nil]
(pr-str (set/union (conj hashed-deps id)
vars)))))))) | null | https://raw.githubusercontent.com/mhuebert/re-db/54ed1c2c4c47b4344710ff881c5a9ef1f2b5faed/src/notebooks/re_db/dev.clj | clojure | local experimenting | (ns re-db.dev
(:require [nextjournal.clerk :as clerk]
[nextjournal.clerk.config :as config]
[shadow.cljs.devtools.api :as shadow]))
(defn start
{:shadow/requires-server true}
[]
(try (compile 're-db.scratch.Suspension) (catch Exception e nil))
(shadow/watch :clerk)
(swap! config/!resource->url merge {"/js/viewer.js" ":8008/clerk/clerk.js"})
(clerk/serve! {:browse? true
:watch-paths ["src/notebooks/re_db/notebooks"]
:show-filter-fn #(re-find #"notebooks/[^/]+\.clj\w?" %)})
(eval '(do (in-ns 'nextjournal.clerk.analyzer)
(defn hash-codeblock [->hash {:as codeblock :keys [hash form id deps vars]}]
(let [->hash' (if (and (not (ifn? ->hash)) (seq deps))
(binding [*out* *err*]
(println "->hash must be `ifn?`" {:->hash ->hash :codeblock codeblock})
identity)
->hash)
hashed-deps (into #{} (map ->hash') deps)]
(sha1-base58 (binding [*print-length* nil]
(pr-str (set/union (conj hashed-deps id)
vars)))))))))
(comment
(start)
(clerk/clear-cache!)
(do (in-ns 'nextjournal.clerk.analyzer)
(defn hash-codeblock [->hash {:as codeblock :keys [hash form id deps vars]}]
(let [->hash' (if (and (not (ifn? ->hash)) (seq deps))
(binding [*out* *err*]
(println "->hash must be `ifn?`" {:->hash ->hash :codeblock codeblock})
identity)
->hash)
hashed-deps (into #{} (map ->hash') deps)]
(sha1-base58 (binding [*print-length* nil]
(pr-str (set/union (conj hashed-deps id)
vars)))))))) |
1a4e0801277ff6d8bb03d5c18ae89df8340b9ff1797c10a7320c2d1bb206141e | eburlingame/arinc-parser | field_defs.clj | (ns arinc424.field-defs
(:require [arinc424.fields.route-type :refer :all]
[arinc424.fields.latlong :refer :all]
[arinc424.fields.navaid-class :refer :all]
[arinc424.helpers :refer :all]))
Types -plane.com/update/data/424-15s.pdf ( Ch . 5 , pg . 66 )
; TODO: Add spec defns here
; Field structure:
; FIELDs looks like:
;
; {
; :len 20
: match " [ A - Z]{1 , 4 } "
; :examples ["example1" "example2" ...]
; :value-fn (fn (value) result)
; }
; or
; {
; :len 10
; :examples ["example1" "example2" ...]
; :values {
' key1 :
' key2 : value2
; ...
; }
; }
; if the :values struct is specified, then the field will match only keys in the :values map
; if only the match is specified, then the string will be returned when it is parsed
; Example field elements
;
; FIELDs can look like:
;
; FIELD
;
; or
; {
; :len 20
; :examples ["example1" "example2" ...]
; :sections {
; 'section-code FIELD
; 'another-section-code FIELD
; }
; }
; or
; {
; :len 20
; :examples ["example1" "example2" ...]
; :sections {
; ['section-code 'subsection-code] FIELD
; ['another-section-code 'another-subsection-code] FIELD
; }
; }
; You can either put the length in each FIELD element, or at the top-level map
(def field-defs
{
Record type ( 5.2 )
:record-type
{
:len 1
:values
{
"S" :standard
"T" :tailored
}
:examples ["S" "T"]
}
:customer-area-code
{
:len 3
:values
{
"USA" :usa
"EEU" :eeu
"EUR" :eur
"MES" :mes
"PAC" :pac
"CAN" :can
"LAM" :lam
"SPA" :spa
"SAM" :sam
"AFR" :afr
}
:examples ["EEU" "CAN" "AFR"]
}
5.6 pg 69
:airport-icao-ident
{
:len 4
:matches #"[A-Z0-9|\s]{4}"
:examples ["KJFK" "DMIA" "9Y9" "CYUL" "EDDF" "53Y" "CA14"]
}
:route-type
{
:len 1
:sections {
Page 70
[:enroute :airway-and-route] {:values enroute-airway-route-type-values}
Page 70
[:enroute :preferred-route] {:values enroute-preferred-route-type-values}
Page 71
[:airport :sid] {:values airport-heliport-sid-route-type-values}
[:heliport :sid] {:values airport-heliport-sid-route-type-values}
Page 71
[:airport :star] {:values airport-heliport-star-route-type-values}
[:heliport :star] {:values airport-heliport-star-route-type-values}
Page 71
[:heliport :approach-procedure] {
:matches #"(A|B|E|F|G|I|J|K|L|M|N|P|R|T|U|V|W|X|Y|Z)(D|N|T|P|S| )(C|S|H| )"
:examples ["ANC" "LSH" "V S" "VDS" "LDS"]
:value-fn airport-heliport-approach-route-type-values}
}
}
Page 71
:route-ident
{
:sections {
[:enroute :airway-and-route] {
:len 5
:matches #"[A-Z0-9|\s]{5}"
:examples ["V216" "C1150", "J380" "UA16" "UB414"]
}
[:enroute :preferred-route] {
:len 10
:matches #"[A-Z0-9|\s]{10}"
:examples ["N111B" "TOS13" "S14WK" "CYYLCYYC" "TCOLAR" "KZTLKSAV" "NDICANRY"]
}
}
}
Page 74
:icao-code
{
:len 2
:matches #"[A-Z0-9|\s]{2}"
:example ["K1" "K7" "PA" "MM" "EG" "UT"]
}
Page 74
:continuation-record-num
{
:len 1
:matches #"[A-Z0-9]"
:value-fn zero-through-z-value
}
:vor-ndb-freq
{
:len 5
:sections
{
[:navaid :vhf-navaid]
{
:matches #"[0-9]{5}"
:value-fn #(insert-decimal % 2)
}
[:navaid :ndb-navaid]
{
:matches #"[0-9]{5}"
:value-fn #(insert-decimal % 1)
}
}
}
:navaid-class
{
:len 5
:matches #"[A-Z|\s]{5}"
:value-fn navaid-class-value
}
5.36 pg . 85
:latitude
{
:len 9
:examples ["N39513881"]
:matches #"(N|S)(\d{2})(\d{2})(\d{4})"
:value-fn latitude-value
}
5.37 pg . 85
:longitude
{
:len 10
:examples ["W104450794"]
:matches #"(E|W)(\d{3})(\d{2})(\d{4})"
:value-fn longitude-value
}
5.31 pg . 83
:vor-ndb-ident
{
:len 4
:examples ["DEN " "6YA " "PPI " "TIKX"]
:matches #"[A-Z0-9|\s]{4}"
}
5.38 pg . 86
:dme-ident
{
:len 4
:examples ["MCR " "DEN " "IDVR" "DN " " "]
:matches #".{4}"
}
5.66 pg . 91
:station-declination
{
:len 5
:examples [""]
:matches #"(E|W|T|G)(\d{4})"
:value-fn station-declination-value
}
5.40 pg . 86
:dme-elevation
{
:len 5
:examples ["00530", "-0140"]
:matches #"(-|\d\d{4})"
:value-fn parse-int
}
5.149 pg . 112
:figure-of-merit
{
:len 1
:examples ["0" "3"]
:values {"0" :terminal-use
"1" :low-altitude-use
"2" :high-altitude-use
"3" :extended-high-altitude-use
"9" :out-of-service}
}
5.90 pg . 98
:ils-dme-bias
{
:len 2
:examples ["13" "91"]
:matches #"(\d\d)| "
:value-fn #(if (= " " %) nil (insert-decimal % 1))
}
5.150 pg . 112
:frequency-protection-distance
{
:len 3
:examples ["030" "150" "600"]
:matches #"\d\d\d| "
:value-fn parse-int
}
5.197 pg . 95
:datum-code
{
:len 3
:matches #"..."
:examples ["AGD" "NAS" "WGA"]
}
5.71 pg . 3.27
:vor-name
{
:len 30
:matches #".{30}"
}
5.31 pg . 83
:file-record-num
{
:len 5
:matches #"\d{5}"
:value-fn parse-int
}
5.32 pg . 83
:cycle-data
{
:len 4
:matches #"\d\d\d\d"
:value-fn (fn [val] (match-regex val
[#"(\d\d)(\d\d)" #(hash-map :year (parse-int %1)
:cycle (parse-int %2))]))
}
}) | null | https://raw.githubusercontent.com/eburlingame/arinc-parser/1bef86924aef21888c27301bf51af90262ec4c52/src/arinc424/field_defs.clj | clojure | TODO: Add spec defns here
Field structure:
FIELDs looks like:
{
:len 20
:examples ["example1" "example2" ...]
:value-fn (fn (value) result)
}
or
{
:len 10
:examples ["example1" "example2" ...]
:values {
...
}
}
if the :values struct is specified, then the field will match only keys in the :values map
if only the match is specified, then the string will be returned when it is parsed
Example field elements
FIELDs can look like:
FIELD
or
{
:len 20
:examples ["example1" "example2" ...]
:sections {
'section-code FIELD
'another-section-code FIELD
}
}
or
{
:len 20
:examples ["example1" "example2" ...]
:sections {
['section-code 'subsection-code] FIELD
['another-section-code 'another-subsection-code] FIELD
}
}
You can either put the length in each FIELD element, or at the top-level map | (ns arinc424.field-defs
(:require [arinc424.fields.route-type :refer :all]
[arinc424.fields.latlong :refer :all]
[arinc424.fields.navaid-class :refer :all]
[arinc424.helpers :refer :all]))
Types -plane.com/update/data/424-15s.pdf ( Ch . 5 , pg . 66 )
: match " [ A - Z]{1 , 4 } "
' key1 :
' key2 : value2
(def field-defs
{
Record type ( 5.2 )
:record-type
{
:len 1
:values
{
"S" :standard
"T" :tailored
}
:examples ["S" "T"]
}
:customer-area-code
{
:len 3
:values
{
"USA" :usa
"EEU" :eeu
"EUR" :eur
"MES" :mes
"PAC" :pac
"CAN" :can
"LAM" :lam
"SPA" :spa
"SAM" :sam
"AFR" :afr
}
:examples ["EEU" "CAN" "AFR"]
}
5.6 pg 69
:airport-icao-ident
{
:len 4
:matches #"[A-Z0-9|\s]{4}"
:examples ["KJFK" "DMIA" "9Y9" "CYUL" "EDDF" "53Y" "CA14"]
}
:route-type
{
:len 1
:sections {
Page 70
[:enroute :airway-and-route] {:values enroute-airway-route-type-values}
Page 70
[:enroute :preferred-route] {:values enroute-preferred-route-type-values}
Page 71
[:airport :sid] {:values airport-heliport-sid-route-type-values}
[:heliport :sid] {:values airport-heliport-sid-route-type-values}
Page 71
[:airport :star] {:values airport-heliport-star-route-type-values}
[:heliport :star] {:values airport-heliport-star-route-type-values}
Page 71
[:heliport :approach-procedure] {
:matches #"(A|B|E|F|G|I|J|K|L|M|N|P|R|T|U|V|W|X|Y|Z)(D|N|T|P|S| )(C|S|H| )"
:examples ["ANC" "LSH" "V S" "VDS" "LDS"]
:value-fn airport-heliport-approach-route-type-values}
}
}
Page 71
:route-ident
{
:sections {
[:enroute :airway-and-route] {
:len 5
:matches #"[A-Z0-9|\s]{5}"
:examples ["V216" "C1150", "J380" "UA16" "UB414"]
}
[:enroute :preferred-route] {
:len 10
:matches #"[A-Z0-9|\s]{10}"
:examples ["N111B" "TOS13" "S14WK" "CYYLCYYC" "TCOLAR" "KZTLKSAV" "NDICANRY"]
}
}
}
Page 74
:icao-code
{
:len 2
:matches #"[A-Z0-9|\s]{2}"
:example ["K1" "K7" "PA" "MM" "EG" "UT"]
}
Page 74
:continuation-record-num
{
:len 1
:matches #"[A-Z0-9]"
:value-fn zero-through-z-value
}
:vor-ndb-freq
{
:len 5
:sections
{
[:navaid :vhf-navaid]
{
:matches #"[0-9]{5}"
:value-fn #(insert-decimal % 2)
}
[:navaid :ndb-navaid]
{
:matches #"[0-9]{5}"
:value-fn #(insert-decimal % 1)
}
}
}
:navaid-class
{
:len 5
:matches #"[A-Z|\s]{5}"
:value-fn navaid-class-value
}
5.36 pg . 85
:latitude
{
:len 9
:examples ["N39513881"]
:matches #"(N|S)(\d{2})(\d{2})(\d{4})"
:value-fn latitude-value
}
5.37 pg . 85
:longitude
{
:len 10
:examples ["W104450794"]
:matches #"(E|W)(\d{3})(\d{2})(\d{4})"
:value-fn longitude-value
}
5.31 pg . 83
:vor-ndb-ident
{
:len 4
:examples ["DEN " "6YA " "PPI " "TIKX"]
:matches #"[A-Z0-9|\s]{4}"
}
5.38 pg . 86
:dme-ident
{
:len 4
:examples ["MCR " "DEN " "IDVR" "DN " " "]
:matches #".{4}"
}
5.66 pg . 91
:station-declination
{
:len 5
:examples [""]
:matches #"(E|W|T|G)(\d{4})"
:value-fn station-declination-value
}
5.40 pg . 86
:dme-elevation
{
:len 5
:examples ["00530", "-0140"]
:matches #"(-|\d\d{4})"
:value-fn parse-int
}
5.149 pg . 112
:figure-of-merit
{
:len 1
:examples ["0" "3"]
:values {"0" :terminal-use
"1" :low-altitude-use
"2" :high-altitude-use
"3" :extended-high-altitude-use
"9" :out-of-service}
}
5.90 pg . 98
:ils-dme-bias
{
:len 2
:examples ["13" "91"]
:matches #"(\d\d)| "
:value-fn #(if (= " " %) nil (insert-decimal % 1))
}
5.150 pg . 112
:frequency-protection-distance
{
:len 3
:examples ["030" "150" "600"]
:matches #"\d\d\d| "
:value-fn parse-int
}
5.197 pg . 95
:datum-code
{
:len 3
:matches #"..."
:examples ["AGD" "NAS" "WGA"]
}
5.71 pg . 3.27
:vor-name
{
:len 30
:matches #".{30}"
}
5.31 pg . 83
:file-record-num
{
:len 5
:matches #"\d{5}"
:value-fn parse-int
}
5.32 pg . 83
:cycle-data
{
:len 4
:matches #"\d\d\d\d"
:value-fn (fn [val] (match-regex val
[#"(\d\d)(\d\d)" #(hash-map :year (parse-int %1)
:cycle (parse-int %2))]))
}
}) |
7834c9668f586361f53d764db99fbb57d41936886457c09b9559d478e6b271bd | mnieper/unsyntax | runtime-exports.scm | Copyright © ( 2020 ) .
;; This file is part of unsyntax.
;; Permission is hereby granted, free of charge, to any person
;; obtaining a copy of this software and associated documentation files
( the " Software " ) , to deal in the Software without restriction ,
;; including without limitation the rights to use, copy, modify, merge,
publish , distribute , sublicense , and/or sell copies of the Software ,
and to permit persons to whom the Software is furnished to do so ,
;; subject to the following conditions:
;; The above copyright notice and this permission notice (including the
;; next paragraph) shall be included in all copies or substantial
portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
;; MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN
;; ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
;; CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
;; SOFTWARE.
(export
Core syntax
begin
case-lambda
define
define-record-type
define-values
delay
delay-force
lambda
if
let
let-values
let*
letrec
parameterize
set!
quote
;; Base library
*
+
-
/
<
<=
=
=>
>
>=
abs
append
apply
assoc
assq
assv
binary-port?
boolean=?
boolean?
bytevector
bytevector-append
bytevector-copy
bytevector-copy!
bytevector-length
bytevector-u8-ref
bytevector-u8-set!
bytevector?
caar
cadr
call-with-current-continuation
call-with-port
call-with-values
call/cc
car
cdar
cddr
cdr
ceiling
char->integer
char-ready?
char<=?
char<?
char=?
char>=?
char>?
char?
close-input-port
close-output-port
close-port
complex?
cons
current-error-port
current-input-port
current-output-port
denominator
dynamic-wind
eof-object
eof-object?
eq?
equal?
eqv?
error
error-object-irritants
error-object-message
error-object?
even?
exact
exact-integer-sqrt
exact-integer?
exact?
expt
;;features
file-error?
floor
floor-quotient
floor-remainder
floor/
flush-output-port
for-each
gcd
get-output-bytevector
get-output-string
inexact
inexact?
input-port-open?
input-port?
integer->char
integer?
lcm
length
list
list->string
list->vector
list-copy
list-ref
list-set!
list-tail
list?
make-bytevector
make-list
make-parameter
make-string
make-vector
map
max
member
memq
memv
min
modulo
negative?
newline
not
null?
number->string
number?
numerator
odd?
open-input-bytevector
open-input-string
open-output-bytevector
open-output-string
output-port-open?
output-port?
pair?
peek-char
peek-u8
port?
positive?
procedure?
quotient
raise
raise-continuable
rational?
rationalize
read-bytevector
read-bytevector!
read-char
read-error?
read-line
read-string
read-u8
real?
remainder
reverse
round
set-car!
set-cdr!
square
string
string->list
string->number
string->symbol
string->utf8
string->vector
string-append
string-copy
string-copy!
string-fill!
string-for-each
string-length
string-map
string-ref
string-set!
string<=?
string<?
string=?
string>=?
string>?
string?
substring
symbol->string
symbol=?
symbol?
textual-port?
truncate
truncate-quotient
truncate-remainder
truncate/
u8-ready?
utf8->string
values
vector
vector->list
vector->string
vector-append
vector-copy
vector-copy!
vector-fill!
vector-for-each
vector-length
vector-map
vector-ref
vector-set!
vector?
with-exception-handler
write-bytevector
write-char
write-string
write-u8
zero?
CxR Library
caaaar
caaadr
caaar
caadar
caaddr
caadr
cadaar
cadadr
cadar
caddar
cadddr
caddr
cdaaar
cdaadr
cdaar
cdadar
cdaddr
cdadr
cddaar
cddadr
cddar
cdddar
cddddr
cdddr
Inexact Library
acos
asin
atan
cos
exp
finite?
infinite?
log
nan?
sin
sqrt
tan
Lazy Library
force
make-promise
promise?
Process - Context Library
host-command-line
emergency-exit
exit
get-environment-variable
get-environment-variables
Time Library
current-jiffy
current-second
jiffies-per-second
Write Library
display
write
SRFI 1
cons*
every
;; Errors
raise-syntax-error
;; Feature identifiers
host-features
features
;; Host evaluator
host-eval
host-environment
>=
-
append
equal?
car
cdr
cons
fold-right
null?
list
list->vector
reverse
vector
set-global!
;; Syntax
syntax->datum
syntax-null?
syntax-pair?
syntax-car
syntax-cdr
syntax-length+
syntax->list
syntax-split-at
syntax-vector?
syntax-vector->list
;; Identifiers
identifier?
free-identifier=?
;; Store
set-keyword!
set-property!
Meta definitions
arguments->vector
meta-unbox
meta-set-box!
SRFI 111
set-box!
unbox
SRFI 125
make-hash-table
hash-table
hash-table-unfold
alist->hash-table
hash-table?
hash-table-contains?
hash-table-empty?
hash-table=?
hash-table-mutable?
hash-table-ref
hash-table-ref/default
hash-table-set!
hash-table-delete!
hash-table-intern!
hash-table-update!
hash-table-update!/default
hash-table-pop!
hash-table-clear!
hash-table-size
hash-table-keys
hash-table-values
hash-table-entries
hash-table-find
hash-table-count
hash-table-map
hash-table-for-each
hash-table-map!
hash-table-map->list
hash-table-fold
hash-table-prune!
hash-table-copy
hash-table-empty-copy
hash-table->alist
hash-table-union!
hash-table-intersection!
hash-table-difference!
hash-table-xor!
SRFI 128
comparator? comparator-ordered? comparator-hashable?
make-comparator
make-pair-comparator make-list-comparator make-vector-comparator
make-eq-comparator make-eqv-comparator make-equal-comparator
boolean-hash char-hash char-ci-hash
string-hash string-ci-hash symbol-hash number-hash
make-default-comparator default-hash comparator-register-default!
comparator-type-test-predicate comparator-equality-predicate
comparator-ordering-predicate comparator-hash-function
comparator-test-type comparator-check-type comparator-hash
hash-bound hash-salt
=? <? >? <=? >=?
comparator-if<=>
comparator-max comparator-min
comparator-max-in-list comparator-min-in-list
default-comparator boolean-comparator real-comparator
char-comparator char-ci-comparator
string-comparator string-ci-comparator
list-comparator vector-comparator
eq-comparator eqv-comparator equal-comparator
SRFI 27
random-integer
random-real
default-random-source
make-random-source
random-source?
random-source-state-ref
random-source-state-set!
random-source-randomize!
random-source-pseudo-randomize!
random-source-make-integers
random-source-make-reals
)
| null | https://raw.githubusercontent.com/mnieper/unsyntax/cd12891805a93229255ff0f2c46cf0e2b5316c7c/src/unsyntax/stdlibs/runtime-exports.scm | scheme | This file is part of unsyntax.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation files
including without limitation the rights to use, copy, modify, merge,
subject to the following conditions:
The above copyright notice and this permission notice (including the
next paragraph) shall be included in all copies or substantial
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Base library
features
Errors
Feature identifiers
Host evaluator
Syntax
Identifiers
Store | Copyright © ( 2020 ) .
( the " Software " ) , to deal in the Software without restriction ,
publish , distribute , sublicense , and/or sell copies of the Software ,
and to permit persons to whom the Software is furnished to do so ,
portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN
(export
Core syntax
begin
case-lambda
define
define-record-type
define-values
delay
delay-force
lambda
if
let
let-values
let*
letrec
parameterize
set!
quote
*
+
-
/
<
<=
=
=>
>
>=
abs
append
apply
assoc
assq
assv
binary-port?
boolean=?
boolean?
bytevector
bytevector-append
bytevector-copy
bytevector-copy!
bytevector-length
bytevector-u8-ref
bytevector-u8-set!
bytevector?
caar
cadr
call-with-current-continuation
call-with-port
call-with-values
call/cc
car
cdar
cddr
cdr
ceiling
char->integer
char-ready?
char<=?
char<?
char=?
char>=?
char>?
char?
close-input-port
close-output-port
close-port
complex?
cons
current-error-port
current-input-port
current-output-port
denominator
dynamic-wind
eof-object
eof-object?
eq?
equal?
eqv?
error
error-object-irritants
error-object-message
error-object?
even?
exact
exact-integer-sqrt
exact-integer?
exact?
expt
file-error?
floor
floor-quotient
floor-remainder
floor/
flush-output-port
for-each
gcd
get-output-bytevector
get-output-string
inexact
inexact?
input-port-open?
input-port?
integer->char
integer?
lcm
length
list
list->string
list->vector
list-copy
list-ref
list-set!
list-tail
list?
make-bytevector
make-list
make-parameter
make-string
make-vector
map
max
member
memq
memv
min
modulo
negative?
newline
not
null?
number->string
number?
numerator
odd?
open-input-bytevector
open-input-string
open-output-bytevector
open-output-string
output-port-open?
output-port?
pair?
peek-char
peek-u8
port?
positive?
procedure?
quotient
raise
raise-continuable
rational?
rationalize
read-bytevector
read-bytevector!
read-char
read-error?
read-line
read-string
read-u8
real?
remainder
reverse
round
set-car!
set-cdr!
square
string
string->list
string->number
string->symbol
string->utf8
string->vector
string-append
string-copy
string-copy!
string-fill!
string-for-each
string-length
string-map
string-ref
string-set!
string<=?
string<?
string=?
string>=?
string>?
string?
substring
symbol->string
symbol=?
symbol?
textual-port?
truncate
truncate-quotient
truncate-remainder
truncate/
u8-ready?
utf8->string
values
vector
vector->list
vector->string
vector-append
vector-copy
vector-copy!
vector-fill!
vector-for-each
vector-length
vector-map
vector-ref
vector-set!
vector?
with-exception-handler
write-bytevector
write-char
write-string
write-u8
zero?
CxR Library
caaaar
caaadr
caaar
caadar
caaddr
caadr
cadaar
cadadr
cadar
caddar
cadddr
caddr
cdaaar
cdaadr
cdaar
cdadar
cdaddr
cdadr
cddaar
cddadr
cddar
cdddar
cddddr
cdddr
Inexact Library
acos
asin
atan
cos
exp
finite?
infinite?
log
nan?
sin
sqrt
tan
Lazy Library
force
make-promise
promise?
Process - Context Library
host-command-line
emergency-exit
exit
get-environment-variable
get-environment-variables
Time Library
current-jiffy
current-second
jiffies-per-second
Write Library
display
write
SRFI 1
cons*
every
raise-syntax-error
host-features
features
host-eval
host-environment
>=
-
append
equal?
car
cdr
cons
fold-right
null?
list
list->vector
reverse
vector
set-global!
syntax->datum
syntax-null?
syntax-pair?
syntax-car
syntax-cdr
syntax-length+
syntax->list
syntax-split-at
syntax-vector?
syntax-vector->list
identifier?
free-identifier=?
set-keyword!
set-property!
Meta definitions
arguments->vector
meta-unbox
meta-set-box!
SRFI 111
set-box!
unbox
SRFI 125
make-hash-table
hash-table
hash-table-unfold
alist->hash-table
hash-table?
hash-table-contains?
hash-table-empty?
hash-table=?
hash-table-mutable?
hash-table-ref
hash-table-ref/default
hash-table-set!
hash-table-delete!
hash-table-intern!
hash-table-update!
hash-table-update!/default
hash-table-pop!
hash-table-clear!
hash-table-size
hash-table-keys
hash-table-values
hash-table-entries
hash-table-find
hash-table-count
hash-table-map
hash-table-for-each
hash-table-map!
hash-table-map->list
hash-table-fold
hash-table-prune!
hash-table-copy
hash-table-empty-copy
hash-table->alist
hash-table-union!
hash-table-intersection!
hash-table-difference!
hash-table-xor!
SRFI 128
comparator? comparator-ordered? comparator-hashable?
make-comparator
make-pair-comparator make-list-comparator make-vector-comparator
make-eq-comparator make-eqv-comparator make-equal-comparator
boolean-hash char-hash char-ci-hash
string-hash string-ci-hash symbol-hash number-hash
make-default-comparator default-hash comparator-register-default!
comparator-type-test-predicate comparator-equality-predicate
comparator-ordering-predicate comparator-hash-function
comparator-test-type comparator-check-type comparator-hash
hash-bound hash-salt
=? <? >? <=? >=?
comparator-if<=>
comparator-max comparator-min
comparator-max-in-list comparator-min-in-list
default-comparator boolean-comparator real-comparator
char-comparator char-ci-comparator
string-comparator string-ci-comparator
list-comparator vector-comparator
eq-comparator eqv-comparator equal-comparator
SRFI 27
random-integer
random-real
default-random-source
make-random-source
random-source?
random-source-state-ref
random-source-state-set!
random-source-randomize!
random-source-pseudo-randomize!
random-source-make-integers
random-source-make-reals
)
|
c7957609822ecaf64db31d0f96aad12a2b0e9938ecad1af36a852e2cbfa97eed | FieryCod/holy-lambda-ring-adapter | routes.clj | (ns example.routes
(:require
[reitit.coercion.malli :as coercion-malli]
[reitit.ring.middleware.muuntaja :as muuntaja]
[reitit.ring.coercion :as coercion]
[reitit.ring.middleware.exception :as exception]
[muuntaja.core :as m]
[ring.util.response :as response]
[reitit.ring.middleware.multipart :as multipart]
[reitit.ring.middleware.parameters :as parameters]
[reitit.swagger :as swagger]
[ring.util.io :as ring-io]
[com.stuartsierra.component :as component]
[reitit.swagger-ui :as swagger-ui]
[reitit.ring :as ring]))
(defn- ring-handler
[_dependencies]
(ring/ring-handler
(ring/router
[["/resources/*" {:no-doc true
:get {:handler (ring/create-resource-handler)}}]
["/welcome-screen" {:no-doc true
:get {:handler (fn [_req]
{:body "<html><iframe width=\"100%\" height=\"100%\" src=\"/resources/index.html\"></iframe></html>"
:status 200
:headers {"content-type" "text/html"}})}}]
["/api/v1" {:swagger {:info {:title "Application routes"
:description "Lorem ipsum"}}}
["/seq" {:get {:handler (fn [_req]
{:body '("hello" "world")
:status 200})}}]
["/byte-array-hello" {:get {:handler (fn [_req]
{:body (ring-io/string-input-stream "Hello world" "utf-8")
:status 200})}}]
["/hello" {:description "Says Hello!"
:get {:handler (fn [_req]
(response/response {:hello "Hello world"}))}}]
["/say-hello" {:description "Now it's your turn to say hello"
:post {:parameters {:body [:map
[:hello string?]]}
:handler (fn [{:keys [parameters]}]
(response/response {:hello (:body parameters)}))}}]]
["" {:no-doc true}
["/swagger.json" {:get {:swagger {:info {:title "Ring API"
:description "Ring API running on AWS Lambda"
:version "1.0.0"}}
:handler (swagger/create-swagger-handler)}}]
["/api-docs/*" {:get (swagger-ui/create-swagger-ui-handler)}]]]
{:data {:coercion coercion-malli/coercion
:muuntaja m/instance
:middleware [;; query-params & form-params
parameters/parameters-middleware
;; content-negotiation
muuntaja/format-negotiate-middleware
;; encoding response body
muuntaja/format-response-middleware
;; exception handling
exception/exception-middleware
;; decoding request body
muuntaja/format-request-middleware
;; coercing response bodys
coercion/coerce-response-middleware
;; coercing request parameters
coercion/coerce-request-middleware
;; multipart
multipart/multipart-middleware
]}})
(ring/create-default-handler
{:not-found (constantly {:status 404
:body "Not found route!"})})))
(defrecord ^:private RingHandlerComponent [dependencies]
component/Lifecycle
(start [this]
(assoc this :ring-handler (ring-handler dependencies)))
(stop [this]
(dissoc this :ring-handler)))
(defn ->ring-handler-component
[opts]
(map->RingHandlerComponent opts))
| null | https://raw.githubusercontent.com/FieryCod/holy-lambda-ring-adapter/bb78262e0694a343c2d03ac9f13c0ad0980e77cc/examples/native/src/example/routes.clj | clojure | query-params & form-params
content-negotiation
encoding response body
exception handling
decoding request body
coercing response bodys
coercing request parameters
multipart | (ns example.routes
(:require
[reitit.coercion.malli :as coercion-malli]
[reitit.ring.middleware.muuntaja :as muuntaja]
[reitit.ring.coercion :as coercion]
[reitit.ring.middleware.exception :as exception]
[muuntaja.core :as m]
[ring.util.response :as response]
[reitit.ring.middleware.multipart :as multipart]
[reitit.ring.middleware.parameters :as parameters]
[reitit.swagger :as swagger]
[ring.util.io :as ring-io]
[com.stuartsierra.component :as component]
[reitit.swagger-ui :as swagger-ui]
[reitit.ring :as ring]))
(defn- ring-handler
[_dependencies]
(ring/ring-handler
(ring/router
[["/resources/*" {:no-doc true
:get {:handler (ring/create-resource-handler)}}]
["/welcome-screen" {:no-doc true
:get {:handler (fn [_req]
{:body "<html><iframe width=\"100%\" height=\"100%\" src=\"/resources/index.html\"></iframe></html>"
:status 200
:headers {"content-type" "text/html"}})}}]
["/api/v1" {:swagger {:info {:title "Application routes"
:description "Lorem ipsum"}}}
["/seq" {:get {:handler (fn [_req]
{:body '("hello" "world")
:status 200})}}]
["/byte-array-hello" {:get {:handler (fn [_req]
{:body (ring-io/string-input-stream "Hello world" "utf-8")
:status 200})}}]
["/hello" {:description "Says Hello!"
:get {:handler (fn [_req]
(response/response {:hello "Hello world"}))}}]
["/say-hello" {:description "Now it's your turn to say hello"
:post {:parameters {:body [:map
[:hello string?]]}
:handler (fn [{:keys [parameters]}]
(response/response {:hello (:body parameters)}))}}]]
["" {:no-doc true}
["/swagger.json" {:get {:swagger {:info {:title "Ring API"
:description "Ring API running on AWS Lambda"
:version "1.0.0"}}
:handler (swagger/create-swagger-handler)}}]
["/api-docs/*" {:get (swagger-ui/create-swagger-ui-handler)}]]]
{:data {:coercion coercion-malli/coercion
:muuntaja m/instance
parameters/parameters-middleware
muuntaja/format-negotiate-middleware
muuntaja/format-response-middleware
exception/exception-middleware
muuntaja/format-request-middleware
coercion/coerce-response-middleware
coercion/coerce-request-middleware
multipart/multipart-middleware
]}})
(ring/create-default-handler
{:not-found (constantly {:status 404
:body "Not found route!"})})))
(defrecord ^:private RingHandlerComponent [dependencies]
component/Lifecycle
(start [this]
(assoc this :ring-handler (ring-handler dependencies)))
(stop [this]
(dissoc this :ring-handler)))
(defn ->ring-handler-component
[opts]
(map->RingHandlerComponent opts))
|
7dacbe98f368700b4bb1be39bf2f2f20c43311fcba27f64e09c2bf0e8ceaf273 | dQuadrant/kuber | Core.hs | # LANGUAGE ScopedTypeVariables #
# LANGUAGE LambdaCase #
module Kuber.Server.Core where
import qualified Data.Text as T
import qualified Data.Aeson as A
import Data.Text.Lazy.Encoding as TL
import qualified Data.Text.Lazy as TL
import Cardano.Api
import Control.Exception (throw, try)
import qualified Data.Set as Set
import System.Exit (die)
import Cardano.Kuber.Api
import Cardano.Kuber.Util
import System.Environment (getEnv)
import System.FilePath (joinPath)
import Cardano.Ledger.Alonzo.Scripts (ExUnits(ExUnits))
import Data.Text.Conversions (Base16(Base16), convertText)
import Cardano.Api.Shelley (TxBody(ShelleyTxBody), fromShelleyTxIn)
import Cardano.Ledger.Shelley.API (TxBody(_inputs))
import qualified Cardano.Ledger.TxIn as Ledger
import qualified Cardano.Ledger.Core as Ledger
import Cardano.Ledger.Alonzo.TxBody (inputs')
import qualified Data.Map as Map
import Data.Text (Text)
import Cardano.Kuber.Data.Models
import qualified Data.ByteString.Char8 as BS8
import Data.Functor ((<&>))
import Cardano.Kuber.Data.Parsers (parseTxIn)
import qualified Debug.Trace as Debug
import Data.Word (Word64)
import qualified Data.Aeson.Key as A
getKeyHash :: AddressModal -> IO KeyHashResponse
getKeyHash aie = do
case addressInEraToPaymentKeyHash (unAddressModal aie) of
Nothing -> throw $ FrameworkError ParserError "Couldn't derive key-hash from address "
Just ha -> pure $ KeyHashResponse $ BS8.unpack $ serialiseToRawBytesHex ha
data QueryTipResponse = QueryTipResponse{
blk:: String
, qtrSlotNo :: Word64
}
instance ToJSON QueryTipResponse where
toJSON (QueryTipResponse blk slot) = A.object [
A.fromString "slot" A..= slot,
A.fromString "block" A..= blk
]
queryTip ::ChainInfo x => x -> IO QueryTipResponse
queryTip ctx = do
chainPoint<-doQuery (QueryChainPoint CardanoMode)
systemStart<-doQuery QuerySystemStart
tip <- doQuery (QueryChainPoint CardanoMode)
case chainPoint of
ChainPointAtGenesis -> pure $ QueryTipResponse "genesis" 0
ChainPoint sn ha -> pure $ QueryTipResponse (toHexString $ serialiseToRawBytes ha) (unSlotNo sn)
where
doQuery q= do
a <-queryNodeLocalState conn Nothing q
case a of
Left af -> throw $ FrameworkError NodeQueryError (show af)
Right e -> pure e
conn= getConnectInfo ctx
getBalance :: ChainInfo x => x -> String -> IO BalanceResponse
getBalance ctx addrStr = do
case parseTxIn (T.pack addrStr) of
Just txin -> do
eUtxos <- queryTxins (getConnectInfo ctx) (Set.singleton txin)
case eUtxos of
Left fe -> throw fe
Right utxos -> do
putStrLn $ addrStr ++ " : " ++ show utxos
pure $ BalanceResponse utxos
Nothing -> do
addr <- case deserialiseAddress AsAddressAny $ T.pack addrStr of
Nothing -> case
deserialiseFromBech32 (AsSigningKey AsPaymentKey) $ T.pack addrStr of
Left bde -> throw $ FrameworkError ParserError "Invalid address"
Right any -> pure $ toAddressAny $ skeyToAddr any (getNetworkId ctx)
Just aany -> pure aany
eUtxos <- queryUtxos (getConnectInfo ctx) $ Set.singleton addr
case eUtxos of
Left fe -> throw fe
Right utxos -> pure $ BalanceResponse utxos
submitTx' :: ChainInfo x => x -> SubmitTxModal -> IO TxResponse
submitTx' ctx (SubmitTxModal tx mWitness) = do
let tx' = case mWitness of
Nothing -> tx
Just kw -> makeSignedTransaction (kw : getTxWitnesses tx) txbody
txbody = getTxBody tx
status <- submitTx (getConnectInfo ctx) tx'
case status of
Left fe -> throw fe
Right x1 -> pure $ TxResponse tx'
txBuilder :: DetailedChainInfo -> Maybe Bool -> TxBuilder -> IO TxResponse
txBuilder dcinfo submitM txBuilder = do
putStrLn $ BS8.unpack $ prettyPrintJSON txBuilder
txE <- txBuilderToTxIO dcinfo txBuilder
case txE of
Left fe -> throw fe
Right tx -> case submitM of
Just True -> do
txE <- submitTx (getConnectInfo dcinfo) tx
case txE of
Left fe -> throw fe
Right _ -> pure $ TxResponse tx
_ -> pure $ TxResponse tx
evaluateExecutionUnits' :: DetailedChainInfo -> String -> IO [Either String ExecutionUnits]
evaluateExecutionUnits' dcinfo txStr = do
case convertText txStr of
Nothing -> fail "Tx string is not hex encoded"
Just (Base16 bs) -> case deserialiseFromCBOR (AsTx AsBabbageEra ) bs of
Left e -> fail $ "Tx string: Invalid CBOR format : "++ show e
Right tx -> evaluateExecutionUnits dcinfo tx
| null | https://raw.githubusercontent.com/dQuadrant/kuber/ead85f86ee3b38f9533ff731e09fa17bd693335d/server/src/Kuber/Server/Core.hs | haskell | # LANGUAGE ScopedTypeVariables #
# LANGUAGE LambdaCase #
module Kuber.Server.Core where
import qualified Data.Text as T
import qualified Data.Aeson as A
import Data.Text.Lazy.Encoding as TL
import qualified Data.Text.Lazy as TL
import Cardano.Api
import Control.Exception (throw, try)
import qualified Data.Set as Set
import System.Exit (die)
import Cardano.Kuber.Api
import Cardano.Kuber.Util
import System.Environment (getEnv)
import System.FilePath (joinPath)
import Cardano.Ledger.Alonzo.Scripts (ExUnits(ExUnits))
import Data.Text.Conversions (Base16(Base16), convertText)
import Cardano.Api.Shelley (TxBody(ShelleyTxBody), fromShelleyTxIn)
import Cardano.Ledger.Shelley.API (TxBody(_inputs))
import qualified Cardano.Ledger.TxIn as Ledger
import qualified Cardano.Ledger.Core as Ledger
import Cardano.Ledger.Alonzo.TxBody (inputs')
import qualified Data.Map as Map
import Data.Text (Text)
import Cardano.Kuber.Data.Models
import qualified Data.ByteString.Char8 as BS8
import Data.Functor ((<&>))
import Cardano.Kuber.Data.Parsers (parseTxIn)
import qualified Debug.Trace as Debug
import Data.Word (Word64)
import qualified Data.Aeson.Key as A
getKeyHash :: AddressModal -> IO KeyHashResponse
getKeyHash aie = do
case addressInEraToPaymentKeyHash (unAddressModal aie) of
Nothing -> throw $ FrameworkError ParserError "Couldn't derive key-hash from address "
Just ha -> pure $ KeyHashResponse $ BS8.unpack $ serialiseToRawBytesHex ha
data QueryTipResponse = QueryTipResponse{
blk:: String
, qtrSlotNo :: Word64
}
instance ToJSON QueryTipResponse where
toJSON (QueryTipResponse blk slot) = A.object [
A.fromString "slot" A..= slot,
A.fromString "block" A..= blk
]
queryTip ::ChainInfo x => x -> IO QueryTipResponse
queryTip ctx = do
chainPoint<-doQuery (QueryChainPoint CardanoMode)
systemStart<-doQuery QuerySystemStart
tip <- doQuery (QueryChainPoint CardanoMode)
case chainPoint of
ChainPointAtGenesis -> pure $ QueryTipResponse "genesis" 0
ChainPoint sn ha -> pure $ QueryTipResponse (toHexString $ serialiseToRawBytes ha) (unSlotNo sn)
where
doQuery q= do
a <-queryNodeLocalState conn Nothing q
case a of
Left af -> throw $ FrameworkError NodeQueryError (show af)
Right e -> pure e
conn= getConnectInfo ctx
getBalance :: ChainInfo x => x -> String -> IO BalanceResponse
getBalance ctx addrStr = do
case parseTxIn (T.pack addrStr) of
Just txin -> do
eUtxos <- queryTxins (getConnectInfo ctx) (Set.singleton txin)
case eUtxos of
Left fe -> throw fe
Right utxos -> do
putStrLn $ addrStr ++ " : " ++ show utxos
pure $ BalanceResponse utxos
Nothing -> do
addr <- case deserialiseAddress AsAddressAny $ T.pack addrStr of
Nothing -> case
deserialiseFromBech32 (AsSigningKey AsPaymentKey) $ T.pack addrStr of
Left bde -> throw $ FrameworkError ParserError "Invalid address"
Right any -> pure $ toAddressAny $ skeyToAddr any (getNetworkId ctx)
Just aany -> pure aany
eUtxos <- queryUtxos (getConnectInfo ctx) $ Set.singleton addr
case eUtxos of
Left fe -> throw fe
Right utxos -> pure $ BalanceResponse utxos
submitTx' :: ChainInfo x => x -> SubmitTxModal -> IO TxResponse
submitTx' ctx (SubmitTxModal tx mWitness) = do
let tx' = case mWitness of
Nothing -> tx
Just kw -> makeSignedTransaction (kw : getTxWitnesses tx) txbody
txbody = getTxBody tx
status <- submitTx (getConnectInfo ctx) tx'
case status of
Left fe -> throw fe
Right x1 -> pure $ TxResponse tx'
txBuilder :: DetailedChainInfo -> Maybe Bool -> TxBuilder -> IO TxResponse
txBuilder dcinfo submitM txBuilder = do
putStrLn $ BS8.unpack $ prettyPrintJSON txBuilder
txE <- txBuilderToTxIO dcinfo txBuilder
case txE of
Left fe -> throw fe
Right tx -> case submitM of
Just True -> do
txE <- submitTx (getConnectInfo dcinfo) tx
case txE of
Left fe -> throw fe
Right _ -> pure $ TxResponse tx
_ -> pure $ TxResponse tx
evaluateExecutionUnits' :: DetailedChainInfo -> String -> IO [Either String ExecutionUnits]
evaluateExecutionUnits' dcinfo txStr = do
case convertText txStr of
Nothing -> fail "Tx string is not hex encoded"
Just (Base16 bs) -> case deserialiseFromCBOR (AsTx AsBabbageEra ) bs of
Left e -> fail $ "Tx string: Invalid CBOR format : "++ show e
Right tx -> evaluateExecutionUnits dcinfo tx
| |
e28bad5355cd72487f3966afab86a2badb936a842f3283818718551c9cefb15f | luc-tielen/llvm-codegen | ModuleBuilder.hs | # LANGUAGE TypeFamilies , MultiParamTypeClasses , UndecidableInstances #
module LLVM.Codegen.ModuleBuilder
( ModuleBuilderT
, ModuleBuilder
, runModuleBuilderT
, runModuleBuilder
, MonadModuleBuilder
, Module(..)
, Definition(..)
, ParameterName(..)
, FunctionAttribute(..)
, function
, global
, globalUtf8StringPtr
, extern
, typedef
, opaqueTypedef
, getTypedefs
, lookupType
, withFunctionAttributes
) where
import GHC.Stack
import Control.Monad.State.Lazy (StateT(..), MonadState, State, execStateT, modify, gets)
import qualified Control.Monad.State.Strict as StrictState
import qualified Control.Monad.State.Lazy as LazyState
import qualified Control.Monad.RWS.Lazy as LazyRWS
import qualified Control.Monad.RWS.Strict as StrictRWS
import Control.Monad.Reader
import Control.Monad.Writer
import Control.Monad.Except
import Control.Monad.Morph
import Data.DList (DList)
import Data.Map (Map)
import Data.String
import qualified Data.DList as DList
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.ByteString as BS
import qualified Data.List as L
import Data.Functor.Identity
import LLVM.Codegen.IRBuilder.Monad
import LLVM.Codegen.Operand
import LLVM.Codegen.Type
import LLVM.Codegen.Flag
import LLVM.Codegen.NameSupply
import LLVM.Codegen.IR
import LLVM.Pretty
newtype Module
= Module [Definition]
instance Pretty Module where
pretty (Module defs) =
vsep $ L.intersperse mempty $ map pretty defs
data ParameterName
= ParameterName !T.Text
| NoParameterName
deriving Show
instance IsString ParameterName where
fromString = ParameterName . fromString
data FunctionAttribute
= WasmExportName !T.Text
| AlwaysInline
-- Add more as needed..
deriving Show
data Global
= GlobalVariable !Name !Type !Constant
| Function !Name !Type ![(Type, ParameterName)] ![FunctionAttribute] ![BasicBlock]
deriving Show
data Typedef
= Opaque
| Clear !Type
deriving Show
data Definition
= GlobalDefinition !Global
| TypeDefinition !Name !Typedef
deriving Show
instance Pretty Definition where
pretty = \case
GlobalDefinition g ->
pretty g
TypeDefinition name typeDef ->
let prettyTy = case typeDef of
Opaque -> "opaque"
Clear ty -> pretty ty
in "%" <> pretty name <+> "=" <+> "type" <+> prettyTy
instance Pretty Global where
pretty = \case
GlobalVariable name ty constant ->
"@" <> pretty name <+> "=" <+> "global" <+> pretty ty <+> pretty constant
Function name retTy args attrs body
| null body ->
"declare external ccc" <+> pretty retTy <+> fnName <> toTuple (map (pretty . fst) args) <> prettyAttrs
| otherwise ->
"define external ccc" <+> pretty retTy <+> fnName <> toTuple (zipWith prettyArg [0..] args) <> prettyAttrs <+>
"{" <> hardline <>
prettyBody body <> hardline <>
"}"
where
fnName = "@" <> pretty name
prettyArg :: Int -> (Type, ParameterName) -> Doc ann
prettyArg i (argTy, nm) =
case nm of
NoParameterName ->
pretty argTy <+> pretty (LocalRef argTy $ Name $ T.pack $ show i)
ParameterName paramName ->
pretty argTy <+> pretty (LocalRef argTy $ Name paramName)
prettyBody blocks =
vsep $ map pretty blocks
prettyAttrs =
if null attrs
then mempty
else mempty <+> hsep (map pretty attrs)
toTuple argDocs =
parens $ argDocs `sepBy` ", "
sepBy docs separator =
mconcat $ L.intersperse separator docs
instance Pretty FunctionAttribute where
pretty = \case
AlwaysInline ->
"alwaysinline"
WasmExportName name ->
dquotes "wasm-export-name" <> "=" <> dquotes (pretty name)
data ModuleBuilderState
= ModuleBuilderState
{ definitions :: !(DList Definition)
, types :: !(Map Name Type)
, defaultFunctionAttributes :: ![FunctionAttribute]
}
newtype ModuleBuilderT m a
= ModuleBuilderT { unModuleBuilderT :: StateT ModuleBuilderState m a }
deriving ( Functor, Applicative, Monad, MonadFix, MonadIO
, MonadError e
)
via StateT ModuleBuilderState m
type ModuleBuilder = ModuleBuilderT Identity
instance MonadTrans ModuleBuilderT where
lift = ModuleBuilderT . lift
# INLINEABLE lift #
instance MonadReader r m => MonadReader r (ModuleBuilderT m) where
ask = lift ask
# INLINEABLE ask #
local = mapModuleBuilderT . local
# INLINEABLE local #
mapModuleBuilderT :: (Functor m, Monad n) => (m a -> n a) -> ModuleBuilderT m a -> ModuleBuilderT n a
mapModuleBuilderT f (ModuleBuilderT inner) =
ModuleBuilderT $ do
s <- LazyState.get
LazyState.mapStateT (g s) inner
where
g s = fmap (,s) . f . fmap fst
# INLINEABLE mapModuleBuilderT #
instance MonadState s m => MonadState s (ModuleBuilderT m) where
state = lift . LazyState.state
# INLINEABLE state #
instance MFunctor ModuleBuilderT where
hoist nat = ModuleBuilderT . hoist nat . unModuleBuilderT
# INLINEABLE hoist #
class Monad m => MonadModuleBuilder m where
liftModuleBuilderState :: State ModuleBuilderState a -> m a
default liftModuleBuilderState
:: (MonadTrans t, MonadModuleBuilder m1, m ~ t m1)
=> State ModuleBuilderState a
-> m a
liftModuleBuilderState = lift . liftModuleBuilderState
{-# INLINEABLE liftModuleBuilderState #-}
instance Monad m => MonadModuleBuilder (ModuleBuilderT m) where
liftModuleBuilderState (StateT s) =
ModuleBuilderT $ StateT $ pure . runIdentity . s
{-# INLINEABLE liftModuleBuilderState #-}
instance MonadModuleBuilder m => MonadModuleBuilder (IRBuilderT m)
instance MonadModuleBuilder m => MonadModuleBuilder (StrictState.StateT s m)
instance MonadModuleBuilder m => MonadModuleBuilder (LazyState.StateT s m)
instance (MonadModuleBuilder m, Monoid w) => MonadModuleBuilder (StrictRWS.RWST r w s m)
instance (MonadModuleBuilder m, Monoid w) => MonadModuleBuilder (LazyRWS.RWST r w s m)
instance MonadModuleBuilder m => MonadModuleBuilder (ReaderT r m)
instance (MonadModuleBuilder m, Monoid w) => MonadModuleBuilder (WriterT w m)
instance MonadModuleBuilder m => MonadModuleBuilder (ExceptT e m)
runModuleBuilderT :: Monad m => ModuleBuilderT m a -> m Module
runModuleBuilderT (ModuleBuilderT m) =
Module . DList.toList . definitions <$> execStateT m beginState
where
beginState = ModuleBuilderState mempty mempty []
# INLINEABLE runModuleBuilderT #
withFunctionAttributes
:: MonadModuleBuilder m
=> ([FunctionAttribute] -> [FunctionAttribute])
-> m a -> m a
withFunctionAttributes f m = do
fnAttrs <- liftModuleBuilderState (gets defaultFunctionAttributes)
liftModuleBuilderState $
modify $ \s -> s { defaultFunctionAttributes = f fnAttrs }
result <- m
liftModuleBuilderState $
modify $ \s -> s { defaultFunctionAttributes = fnAttrs }
pure result
# INLINEABLE withFunctionAttributes #
resetFunctionAttributes :: MonadModuleBuilder m => m ()
resetFunctionAttributes =
liftModuleBuilderState $
modify $ \s -> s { defaultFunctionAttributes = mempty }
# INLINEABLE resetFunctionAttributes #
getDefaultFunctionAttributes :: MonadModuleBuilder m => m [FunctionAttribute]
getDefaultFunctionAttributes =
liftModuleBuilderState $ gets defaultFunctionAttributes
# INLINEABLE getDefaultFunctionAttributes #
runModuleBuilder :: ModuleBuilder a -> Module
runModuleBuilder = runIdentity . runModuleBuilderT
# INLINEABLE runModuleBuilder #
function :: (HasCallStack, MonadModuleBuilder m)
=> Name -> [(Type, ParameterName)] -> Type -> ([Operand] -> IRBuilderT m a) -> m Operand
function name args retTy fnBody = do
fnAttrs <- getDefaultFunctionAttributes
(names, instrs) <- runIRBuilderT $ do
(names, operands) <- unzip <$> traverse (uncurry mkOperand) args
resetFunctionAttributes -- This is done to avoid functions emitted in the body that not automatically copy the same attributes
_ <- fnBody operands
pure names
liftModuleBuilderState $
modify $ \s -> s { defaultFunctionAttributes = fnAttrs }
let args' = zipWith (\argName (ty, _) -> (ty, ParameterName $ unName argName)) names args
emitDefinition $ GlobalDefinition $ Function name retTy args' fnAttrs instrs
pure $ ConstantOperand $ GlobalRef (ptr (FunctionType retTy $ map fst args)) name
# INLINEABLE function #
emitDefinition :: MonadModuleBuilder m => Definition -> m ()
emitDefinition def =
liftModuleBuilderState $ modify $ \s -> s { definitions = DList.snoc (definitions s) def }
# INLINEABLE emitDefinition #
getTypedefs :: MonadModuleBuilder m => m (Map Name Type)
getTypedefs =
liftModuleBuilderState $ gets types
# #
lookupType :: MonadModuleBuilder m => Name -> m (Maybe Type)
lookupType name =
liftModuleBuilderState $ gets (Map.lookup name . types)
# INLINEABLE lookupType #
addType :: MonadModuleBuilder m => Name -> Type -> m ()
addType name ty =
liftModuleBuilderState $ modify $ \s -> s { types = Map.insert name ty (types s) }
{-# INLINEABLE addType #-}
global :: MonadModuleBuilder m => Name -> Type -> Constant -> m Operand
global name ty constant = do
emitDefinition $ GlobalDefinition $ GlobalVariable name ty constant
pure $ ConstantOperand $ GlobalRef (ptr ty) name
# INLINEABLE global #
globalUtf8StringPtr :: (HasCallStack, MonadNameSupply m, MonadModuleBuilder m, MonadIRBuilder m)
=> T.Text -> Name -> m Operand
globalUtf8StringPtr txt name = do
let utf8Bytes = BS.snoc (TE.encodeUtf8 txt) 0 -- 0-terminated UTF8 string
llvmValues = map (Int 8 . toInteger) $ BS.unpack utf8Bytes
arrayValue = Array i8 llvmValues
constant = ConstantOperand arrayValue
ty = typeOf constant
-- This definition will end up before the function this is used in
addr <- global name ty arrayValue
let instr = GetElementPtr On addr [ ConstantOperand $ Int 32 0
, ConstantOperand $ Int 32 0
]
emitInstr (ptr i8) instr
# INLINEABLE globalUtf8StringPtr #
-- NOTE: typedefs are only allowed for structs, even though clang also allows it
for primitive types . This is done to avoid weird inconsistencies with the LLVM JIT
-- (where this is not allowed).
typedef :: MonadModuleBuilder m => Name -> Flag Packed -> [Type] -> m Type
typedef name packed tys = do
let ty = StructureType packed tys
emitDefinition $ TypeDefinition name (Clear ty)
addType name ty
pure $ NamedTypeReference name
# INLINEABLE typedef #
opaqueTypedef :: MonadModuleBuilder m => Name -> m Type
opaqueTypedef name = do
emitDefinition $ TypeDefinition name Opaque
pure $ NamedTypeReference name
# INLINEABLE opaqueTypedef #
extern :: MonadModuleBuilder m => Name -> [Type] -> Type -> m Operand
extern name argTys retTy = do
let args = [(argTy, ParameterName "") | argTy <- argTys]
fnAttrs <- getDefaultFunctionAttributes
emitDefinition $ GlobalDefinition $ Function name retTy args fnAttrs []
let fnTy = ptr $ FunctionType retTy argTys
pure $ ConstantOperand $ GlobalRef fnTy name
# INLINEABLE extern #
-- NOTE: Only used internally, this creates an unassigned operand
mkOperand :: Monad m => Type -> ParameterName -> IRBuilderT m (Name, Operand)
mkOperand ty paramName = do
name <- case paramName of
NoParameterName -> fresh
ParameterName name -> fresh `named` Name name
pure (name, LocalRef ty name)
{-# INLINEABLE mkOperand #-}
| null | https://raw.githubusercontent.com/luc-tielen/llvm-codegen/84df715cb92c23a512a4a44ca92f592f676d3610/lib/LLVM/Codegen/ModuleBuilder.hs | haskell | Add more as needed..
# INLINEABLE liftModuleBuilderState #
# INLINEABLE liftModuleBuilderState #
This is done to avoid functions emitted in the body that not automatically copy the same attributes
# INLINEABLE addType #
0-terminated UTF8 string
This definition will end up before the function this is used in
NOTE: typedefs are only allowed for structs, even though clang also allows it
(where this is not allowed).
NOTE: Only used internally, this creates an unassigned operand
# INLINEABLE mkOperand # | # LANGUAGE TypeFamilies , MultiParamTypeClasses , UndecidableInstances #
module LLVM.Codegen.ModuleBuilder
( ModuleBuilderT
, ModuleBuilder
, runModuleBuilderT
, runModuleBuilder
, MonadModuleBuilder
, Module(..)
, Definition(..)
, ParameterName(..)
, FunctionAttribute(..)
, function
, global
, globalUtf8StringPtr
, extern
, typedef
, opaqueTypedef
, getTypedefs
, lookupType
, withFunctionAttributes
) where
import GHC.Stack
import Control.Monad.State.Lazy (StateT(..), MonadState, State, execStateT, modify, gets)
import qualified Control.Monad.State.Strict as StrictState
import qualified Control.Monad.State.Lazy as LazyState
import qualified Control.Monad.RWS.Lazy as LazyRWS
import qualified Control.Monad.RWS.Strict as StrictRWS
import Control.Monad.Reader
import Control.Monad.Writer
import Control.Monad.Except
import Control.Monad.Morph
import Data.DList (DList)
import Data.Map (Map)
import Data.String
import qualified Data.DList as DList
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.ByteString as BS
import qualified Data.List as L
import Data.Functor.Identity
import LLVM.Codegen.IRBuilder.Monad
import LLVM.Codegen.Operand
import LLVM.Codegen.Type
import LLVM.Codegen.Flag
import LLVM.Codegen.NameSupply
import LLVM.Codegen.IR
import LLVM.Pretty
newtype Module
= Module [Definition]
instance Pretty Module where
pretty (Module defs) =
vsep $ L.intersperse mempty $ map pretty defs
data ParameterName
= ParameterName !T.Text
| NoParameterName
deriving Show
instance IsString ParameterName where
fromString = ParameterName . fromString
data FunctionAttribute
= WasmExportName !T.Text
| AlwaysInline
deriving Show
data Global
= GlobalVariable !Name !Type !Constant
| Function !Name !Type ![(Type, ParameterName)] ![FunctionAttribute] ![BasicBlock]
deriving Show
data Typedef
= Opaque
| Clear !Type
deriving Show
data Definition
= GlobalDefinition !Global
| TypeDefinition !Name !Typedef
deriving Show
instance Pretty Definition where
pretty = \case
GlobalDefinition g ->
pretty g
TypeDefinition name typeDef ->
let prettyTy = case typeDef of
Opaque -> "opaque"
Clear ty -> pretty ty
in "%" <> pretty name <+> "=" <+> "type" <+> prettyTy
instance Pretty Global where
pretty = \case
GlobalVariable name ty constant ->
"@" <> pretty name <+> "=" <+> "global" <+> pretty ty <+> pretty constant
Function name retTy args attrs body
| null body ->
"declare external ccc" <+> pretty retTy <+> fnName <> toTuple (map (pretty . fst) args) <> prettyAttrs
| otherwise ->
"define external ccc" <+> pretty retTy <+> fnName <> toTuple (zipWith prettyArg [0..] args) <> prettyAttrs <+>
"{" <> hardline <>
prettyBody body <> hardline <>
"}"
where
fnName = "@" <> pretty name
prettyArg :: Int -> (Type, ParameterName) -> Doc ann
prettyArg i (argTy, nm) =
case nm of
NoParameterName ->
pretty argTy <+> pretty (LocalRef argTy $ Name $ T.pack $ show i)
ParameterName paramName ->
pretty argTy <+> pretty (LocalRef argTy $ Name paramName)
prettyBody blocks =
vsep $ map pretty blocks
prettyAttrs =
if null attrs
then mempty
else mempty <+> hsep (map pretty attrs)
toTuple argDocs =
parens $ argDocs `sepBy` ", "
sepBy docs separator =
mconcat $ L.intersperse separator docs
instance Pretty FunctionAttribute where
pretty = \case
AlwaysInline ->
"alwaysinline"
WasmExportName name ->
dquotes "wasm-export-name" <> "=" <> dquotes (pretty name)
data ModuleBuilderState
= ModuleBuilderState
{ definitions :: !(DList Definition)
, types :: !(Map Name Type)
, defaultFunctionAttributes :: ![FunctionAttribute]
}
newtype ModuleBuilderT m a
= ModuleBuilderT { unModuleBuilderT :: StateT ModuleBuilderState m a }
deriving ( Functor, Applicative, Monad, MonadFix, MonadIO
, MonadError e
)
via StateT ModuleBuilderState m
type ModuleBuilder = ModuleBuilderT Identity
instance MonadTrans ModuleBuilderT where
lift = ModuleBuilderT . lift
# INLINEABLE lift #
instance MonadReader r m => MonadReader r (ModuleBuilderT m) where
ask = lift ask
# INLINEABLE ask #
local = mapModuleBuilderT . local
# INLINEABLE local #
mapModuleBuilderT :: (Functor m, Monad n) => (m a -> n a) -> ModuleBuilderT m a -> ModuleBuilderT n a
mapModuleBuilderT f (ModuleBuilderT inner) =
ModuleBuilderT $ do
s <- LazyState.get
LazyState.mapStateT (g s) inner
where
g s = fmap (,s) . f . fmap fst
# INLINEABLE mapModuleBuilderT #
instance MonadState s m => MonadState s (ModuleBuilderT m) where
state = lift . LazyState.state
# INLINEABLE state #
instance MFunctor ModuleBuilderT where
hoist nat = ModuleBuilderT . hoist nat . unModuleBuilderT
# INLINEABLE hoist #
class Monad m => MonadModuleBuilder m where
liftModuleBuilderState :: State ModuleBuilderState a -> m a
default liftModuleBuilderState
:: (MonadTrans t, MonadModuleBuilder m1, m ~ t m1)
=> State ModuleBuilderState a
-> m a
liftModuleBuilderState = lift . liftModuleBuilderState
instance Monad m => MonadModuleBuilder (ModuleBuilderT m) where
liftModuleBuilderState (StateT s) =
ModuleBuilderT $ StateT $ pure . runIdentity . s
instance MonadModuleBuilder m => MonadModuleBuilder (IRBuilderT m)
instance MonadModuleBuilder m => MonadModuleBuilder (StrictState.StateT s m)
instance MonadModuleBuilder m => MonadModuleBuilder (LazyState.StateT s m)
instance (MonadModuleBuilder m, Monoid w) => MonadModuleBuilder (StrictRWS.RWST r w s m)
instance (MonadModuleBuilder m, Monoid w) => MonadModuleBuilder (LazyRWS.RWST r w s m)
instance MonadModuleBuilder m => MonadModuleBuilder (ReaderT r m)
instance (MonadModuleBuilder m, Monoid w) => MonadModuleBuilder (WriterT w m)
instance MonadModuleBuilder m => MonadModuleBuilder (ExceptT e m)
runModuleBuilderT :: Monad m => ModuleBuilderT m a -> m Module
runModuleBuilderT (ModuleBuilderT m) =
Module . DList.toList . definitions <$> execStateT m beginState
where
beginState = ModuleBuilderState mempty mempty []
# INLINEABLE runModuleBuilderT #
withFunctionAttributes
:: MonadModuleBuilder m
=> ([FunctionAttribute] -> [FunctionAttribute])
-> m a -> m a
withFunctionAttributes f m = do
fnAttrs <- liftModuleBuilderState (gets defaultFunctionAttributes)
liftModuleBuilderState $
modify $ \s -> s { defaultFunctionAttributes = f fnAttrs }
result <- m
liftModuleBuilderState $
modify $ \s -> s { defaultFunctionAttributes = fnAttrs }
pure result
# INLINEABLE withFunctionAttributes #
resetFunctionAttributes :: MonadModuleBuilder m => m ()
resetFunctionAttributes =
liftModuleBuilderState $
modify $ \s -> s { defaultFunctionAttributes = mempty }
# INLINEABLE resetFunctionAttributes #
getDefaultFunctionAttributes :: MonadModuleBuilder m => m [FunctionAttribute]
getDefaultFunctionAttributes =
liftModuleBuilderState $ gets defaultFunctionAttributes
# INLINEABLE getDefaultFunctionAttributes #
runModuleBuilder :: ModuleBuilder a -> Module
runModuleBuilder = runIdentity . runModuleBuilderT
# INLINEABLE runModuleBuilder #
function :: (HasCallStack, MonadModuleBuilder m)
=> Name -> [(Type, ParameterName)] -> Type -> ([Operand] -> IRBuilderT m a) -> m Operand
function name args retTy fnBody = do
fnAttrs <- getDefaultFunctionAttributes
(names, instrs) <- runIRBuilderT $ do
(names, operands) <- unzip <$> traverse (uncurry mkOperand) args
_ <- fnBody operands
pure names
liftModuleBuilderState $
modify $ \s -> s { defaultFunctionAttributes = fnAttrs }
let args' = zipWith (\argName (ty, _) -> (ty, ParameterName $ unName argName)) names args
emitDefinition $ GlobalDefinition $ Function name retTy args' fnAttrs instrs
pure $ ConstantOperand $ GlobalRef (ptr (FunctionType retTy $ map fst args)) name
# INLINEABLE function #
emitDefinition :: MonadModuleBuilder m => Definition -> m ()
emitDefinition def =
liftModuleBuilderState $ modify $ \s -> s { definitions = DList.snoc (definitions s) def }
# INLINEABLE emitDefinition #
getTypedefs :: MonadModuleBuilder m => m (Map Name Type)
getTypedefs =
liftModuleBuilderState $ gets types
# #
lookupType :: MonadModuleBuilder m => Name -> m (Maybe Type)
lookupType name =
liftModuleBuilderState $ gets (Map.lookup name . types)
# INLINEABLE lookupType #
addType :: MonadModuleBuilder m => Name -> Type -> m ()
addType name ty =
liftModuleBuilderState $ modify $ \s -> s { types = Map.insert name ty (types s) }
global :: MonadModuleBuilder m => Name -> Type -> Constant -> m Operand
global name ty constant = do
emitDefinition $ GlobalDefinition $ GlobalVariable name ty constant
pure $ ConstantOperand $ GlobalRef (ptr ty) name
# INLINEABLE global #
globalUtf8StringPtr :: (HasCallStack, MonadNameSupply m, MonadModuleBuilder m, MonadIRBuilder m)
=> T.Text -> Name -> m Operand
globalUtf8StringPtr txt name = do
llvmValues = map (Int 8 . toInteger) $ BS.unpack utf8Bytes
arrayValue = Array i8 llvmValues
constant = ConstantOperand arrayValue
ty = typeOf constant
addr <- global name ty arrayValue
let instr = GetElementPtr On addr [ ConstantOperand $ Int 32 0
, ConstantOperand $ Int 32 0
]
emitInstr (ptr i8) instr
# INLINEABLE globalUtf8StringPtr #
for primitive types . This is done to avoid weird inconsistencies with the LLVM JIT
typedef :: MonadModuleBuilder m => Name -> Flag Packed -> [Type] -> m Type
typedef name packed tys = do
let ty = StructureType packed tys
emitDefinition $ TypeDefinition name (Clear ty)
addType name ty
pure $ NamedTypeReference name
# INLINEABLE typedef #
opaqueTypedef :: MonadModuleBuilder m => Name -> m Type
opaqueTypedef name = do
emitDefinition $ TypeDefinition name Opaque
pure $ NamedTypeReference name
# INLINEABLE opaqueTypedef #
extern :: MonadModuleBuilder m => Name -> [Type] -> Type -> m Operand
extern name argTys retTy = do
let args = [(argTy, ParameterName "") | argTy <- argTys]
fnAttrs <- getDefaultFunctionAttributes
emitDefinition $ GlobalDefinition $ Function name retTy args fnAttrs []
let fnTy = ptr $ FunctionType retTy argTys
pure $ ConstantOperand $ GlobalRef fnTy name
# INLINEABLE extern #
mkOperand :: Monad m => Type -> ParameterName -> IRBuilderT m (Name, Operand)
mkOperand ty paramName = do
name <- case paramName of
NoParameterName -> fresh
ParameterName name -> fresh `named` Name name
pure (name, LocalRef ty name)
|
5741b6e9c9e374abd6b978d649ac1481a668bd2aa22970607b5de4c2107a05dd | timmolderez/inspector-jay | core.clj | Copyright ( c ) 2013 - 2015 .
;
; All rights reserved. This program and the accompanying materials
; are made available under the terms of the 3-Clause BSD License
; which accompanies this distribution, and is available at
; -3-Clause
(ns inspector-jay.core
"Inspector Jay is a graphical inspector that lets you examine Java/Clojure objects and data structures."
{:author "Tim Molderez"}
(:gen-class
:name inspectorjay.InspectorJay
:prefix java-
:methods [#^{:static true} [inspect [Object] Object]])
(:require [inspector-jay.gui
[gui :as gui]
[utils :as utils]]))
(defn inspect
"Displays an inspector window for a given object.
The return value of inspect is the object itself, so you can plug in this function anywhere you like.
See gui/default-options for more information on all available keyword arguments."
^Object [^Object object & {:as args}]
(if (not= object nil)
(apply gui/inspector-window object (utils/map-to-keyword-args args)))
object)
(defn last-selected-value
"Retrieve the value of the tree node that was last selected.
See gui/last-selected-value for more information."
[]
(gui/last-selected-value))
(defn java-inspect
"Java wrapper for the inspect function.
When using Java, you can call this function as follows:
inspectorjay.InspectorJay.inspect(anObject);"
[object]
(inspect object))
(defn java-inspectorPanel
"Java wrapper for the inspector-panel function.
Rather than opening an inspector window, this method only returns the inspector's JPanel.
You can use it to embed Inspector Jay in your own applications."
[object]
(gui/inspector-panel object)) | null | https://raw.githubusercontent.com/timmolderez/inspector-jay/0035beae482c49e0f215a54e17baf405e42f2398/src/inspector_jay/core.clj | clojure |
All rights reserved. This program and the accompanying materials
are made available under the terms of the 3-Clause BSD License
which accompanies this distribution, and is available at
-3-Clause
" | Copyright ( c ) 2013 - 2015 .
(ns inspector-jay.core
"Inspector Jay is a graphical inspector that lets you examine Java/Clojure objects and data structures."
{:author "Tim Molderez"}
(:gen-class
:name inspectorjay.InspectorJay
:prefix java-
:methods [#^{:static true} [inspect [Object] Object]])
(:require [inspector-jay.gui
[gui :as gui]
[utils :as utils]]))
(defn inspect
"Displays an inspector window for a given object.
The return value of inspect is the object itself, so you can plug in this function anywhere you like.
See gui/default-options for more information on all available keyword arguments."
^Object [^Object object & {:as args}]
(if (not= object nil)
(apply gui/inspector-window object (utils/map-to-keyword-args args)))
object)
(defn last-selected-value
"Retrieve the value of the tree node that was last selected.
See gui/last-selected-value for more information."
[]
(gui/last-selected-value))
(defn java-inspect
"Java wrapper for the inspect function.
When using Java, you can call this function as follows:
[object]
(inspect object))
(defn java-inspectorPanel
"Java wrapper for the inspector-panel function.
Rather than opening an inspector window, this method only returns the inspector's JPanel.
You can use it to embed Inspector Jay in your own applications."
[object]
(gui/inspector-panel object)) |
808048f14a3f307c4b6dce3af4e89134ea3c8481097946f8127bca1e72e59852 | xvw/preface | bounded_meet_semilattice.ml | module Core_via_meet_and_top
(Req : Preface_specs.Bounded_meet_semilattice.WITH_MEET_AND_TOP) =
Req
module Core_over_meet_semilattice_and_via_top
(Meet_req : Preface_specs.Meet_semilattice.CORE)
(Req : Preface_specs.Bounded_meet_semilattice.WITH_TOP
with type t = Meet_req.t) =
struct
include Meet_req
include Req
end
module Infix (Core : Preface_specs.Bounded_meet_semilattice.CORE) = struct
include Meet_semilattice.Infix (Core)
end
module Via
(Core : Preface_specs.Bounded_meet_semilattice.CORE)
(Infix : Preface_specs.Bounded_meet_semilattice.INFIX) =
struct
include Core
module Infix = Infix
include Infix
end
module Via_meet_and_top
(Req : Preface_specs.Bounded_meet_semilattice.WITH_MEET_AND_TOP) =
struct
module Core = Core_via_meet_and_top (Req)
include Core
module Infix = Infix (Core)
include Infix
end
module Over_meet_semilattice_and_via_top
(Meet_req : Preface_specs.Meet_semilattice.CORE)
(Req : Preface_specs.Bounded_meet_semilattice.WITH_TOP
with type t = Meet_req.t) =
struct
module Core = Core_over_meet_semilattice_and_via_top (Meet_req) (Req)
include Core
module Infix = Infix (Core)
include Infix
end
| null | https://raw.githubusercontent.com/xvw/preface/f908ba45e5d58c330781e61162628bbd7c240145/lib/preface_make/bounded_meet_semilattice.ml | ocaml | module Core_via_meet_and_top
(Req : Preface_specs.Bounded_meet_semilattice.WITH_MEET_AND_TOP) =
Req
module Core_over_meet_semilattice_and_via_top
(Meet_req : Preface_specs.Meet_semilattice.CORE)
(Req : Preface_specs.Bounded_meet_semilattice.WITH_TOP
with type t = Meet_req.t) =
struct
include Meet_req
include Req
end
module Infix (Core : Preface_specs.Bounded_meet_semilattice.CORE) = struct
include Meet_semilattice.Infix (Core)
end
module Via
(Core : Preface_specs.Bounded_meet_semilattice.CORE)
(Infix : Preface_specs.Bounded_meet_semilattice.INFIX) =
struct
include Core
module Infix = Infix
include Infix
end
module Via_meet_and_top
(Req : Preface_specs.Bounded_meet_semilattice.WITH_MEET_AND_TOP) =
struct
module Core = Core_via_meet_and_top (Req)
include Core
module Infix = Infix (Core)
include Infix
end
module Over_meet_semilattice_and_via_top
(Meet_req : Preface_specs.Meet_semilattice.CORE)
(Req : Preface_specs.Bounded_meet_semilattice.WITH_TOP
with type t = Meet_req.t) =
struct
module Core = Core_over_meet_semilattice_and_via_top (Meet_req) (Req)
include Core
module Infix = Infix (Core)
include Infix
end
| |
94c24e1a3685bac8f44fb518f489e19882b4c356210f8503535ea82739a17f29 | tfausak/monadoc-5 | PingSpec.hs | module Monadoc.Handler.PingSpec where
import qualified Monadoc
import qualified Monadoc.Handler.Ping as Ping
import Monadoc.Prelude
import qualified Monadoc.Type.App as App
import qualified Monadoc.Type.Config as Config
import qualified Monadoc.Type.Context as Context
import qualified Network.HTTP.Types as Http
import qualified Network.Wai as Wai
import Test.Hspec
spec :: Spec
spec = describe "Monadoc.Handler.Ping" <| do
describe "handle" <| do
it "works" <| do
ctx <- Monadoc.configToContext Config.test
response <- App.run
ctx { Context.request = Wai.defaultRequest }
Ping.handle
Wai.responseStatus response `shouldBe` Http.ok200
| null | https://raw.githubusercontent.com/tfausak/monadoc-5/5361dd1870072cf2771857adbe92658118ddaa27/src/test/Monadoc/Handler/PingSpec.hs | haskell | module Monadoc.Handler.PingSpec where
import qualified Monadoc
import qualified Monadoc.Handler.Ping as Ping
import Monadoc.Prelude
import qualified Monadoc.Type.App as App
import qualified Monadoc.Type.Config as Config
import qualified Monadoc.Type.Context as Context
import qualified Network.HTTP.Types as Http
import qualified Network.Wai as Wai
import Test.Hspec
spec :: Spec
spec = describe "Monadoc.Handler.Ping" <| do
describe "handle" <| do
it "works" <| do
ctx <- Monadoc.configToContext Config.test
response <- App.run
ctx { Context.request = Wai.defaultRequest }
Ping.handle
Wai.responseStatus response `shouldBe` Http.ok200
| |
0eeaa619c0d1f183df135f8ffdde2ccbf0939a41b3a9f625a24dd98618367833 | facebook/duckling | Tests.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Ordinal.KO.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.KO.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "KO Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/tests/Duckling/Ordinal/KO/Tests.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. | Copyright ( c ) 2016 - present , Facebook , Inc.
module Duckling.Ordinal.KO.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.KO.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "KO Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
|
c955ac30b54b19b8553e787c02a4390ebbd79c4a43c32f24cee56d5f352ba289 | c4-project/c4f | statement_traverse.ml | This file is part of c4f .
Copyright ( c ) 2018 - 2022 C4 Project
c4 t itself is licensed under the MIT License . See the LICENSE file in the
project root for more information .
Parts of c4 t are based on code from the Herdtools7 project
( ) : see the LICENSE.herd file in the
project root for more information .
Copyright (c) 2018-2022 C4 Project
c4t itself is licensed under the MIT License. See the LICENSE file in the
project root for more information.
Parts of c4t are based on code from the Herdtools7 project
() : see the LICENSE.herd file in the
project root for more information. *)
open Base
open Import
module Base_map (A : Applicative.S) = struct
type 'meta t = 'meta Statement.t
let bmap (type m1 m2) (x : m1 t)
~(prim :
(m1, Prim_statement.t) With_meta.t
-> (m2, Prim_statement.t) With_meta.t A.t )
~(if_stm : (m1, m1 t) If.t -> (m2, m2 t) If.t A.t)
~(flow : (m1, m1 t) Flow_block.t -> (m2, m2 t) Flow_block.t A.t) :
m2 t A.t =
Travesty_base_exts.Fn.Compose_syntax.(
Statement.reduce_step x
~prim:(prim >> A.map ~f:(Accessor.construct Statement.prim))
~if_stm:(if_stm >> A.map ~f:(Accessor.construct Statement.if_stm))
~flow:(flow >> A.map ~f:(Accessor.construct Statement.flow)))
module IB = If.Base_map (A)
module FB = Flow_block.Base_map (A)
module Bk = Block.On (A)
Ideally , if we can get rid of Bk.bi_map_m , we should be able to use
applicatives throughout this .
applicatives throughout this. *)
let bmap_flat (type m1 m2) (x : m1 t)
~(prim :
(m1, Prim_statement.t) With_meta.t
-> (m2, Prim_statement.t) With_meta.t A.t )
~(flow_header : Flow_block.Header.t -> Flow_block.Header.t A.t)
~(if_cond : Expression.t -> Expression.t A.t)
~(block_meta : m1 -> m2 A.t) : m2 t A.t =
let rec mu x =
let map_block = Bk.bi_map_m ~left:block_meta ~right:mu in
bmap x ~prim
~if_stm:
(IB.bmap ~cond:if_cond ~t_branch:map_block ~f_branch:map_block)
~flow:(FB.bmap ~body:map_block ~header:flow_header)
in
mu x
end
module On_meta :
Travesty.Traversable_types.S1 with type 'meta t := 'meta Statement.t =
Travesty.Traversable.Make1 (struct
type 'meta t = 'meta Statement.t
module On (M : Applicative.S) = struct
module B = Base_map (M)
module AccM = Accessor.Of_applicative (M)
let map_m (x : 'm1 t) ~(f : 'm1 -> 'm2 M.t) : 'm2 t M.t =
B.bmap_flat x
~prim:(AccM.map With_meta.meta ~f)
~flow_header:M.return ~if_cond:M.return ~block_meta:f
end
end)
let erase_meta (type meta) (s : meta Statement.t) : unit Statement.t =
On_meta.map s ~f:(Fn.const ())
module With_meta (Meta : T) = struct
type nonrec t = Meta.t Statement.t
module Block_stms = Block.On_statements (Meta)
(** Does the legwork of implementing a particular type of traversal over
statements. *)
module Make_traversal (Basic : sig
module Elt : Equal.S
module P :
Travesty.Traversable_types.S0
with type t := Prim_statement.t
and module Elt = Elt
module FH :
Travesty.Traversable_types.S0
with type t := Flow_block.Header.t
and module Elt = Elt
module IE :
Travesty.Traversable_types.S0
with type t := Expression.t
and module Elt = Elt
end) =
Travesty.Traversable.Make0 (struct
type nonrec t = t
module Elt = Basic.Elt
module On (M : Applicative.S) = struct
module SBase = Base_map (M)
module PM = Basic.P.On (M)
module FHM = Basic.FH.On (M)
module IEM = Basic.IE.On (M)
module AccM = Accessor.Of_applicative (M)
let map_m (x : t) ~(f : Elt.t -> Elt.t M.t) : t M.t =
SBase.bmap_flat x
~prim:(AccM.map With_meta.value ~f:(PM.map_m ~f))
~flow_header:(FHM.map_m ~f) ~if_cond:(IEM.map_m ~f)
~block_meta:M.return
end
end)
module On_lvalues :
Travesty.Traversable_types.S0 with type t = t and type Elt.t = Lvalue.t =
Make_traversal (struct
module Elt = Lvalue
module FH = Flow_block.Header.On_lvalues
module IE =
Travesty.Traversable.Chain0
(Expression_traverse.On_addresses)
(Address.On_lvalues)
module P = Prim_statement.On_lvalues
end)
module On_addresses :
Travesty.Traversable_types.S0 with type t = t and type Elt.t = Address.t =
Make_traversal (struct
module Elt = Address
module FH =
Travesty.Traversable.Chain0
(Flow_block.Header.On_expressions)
(Expression_traverse.On_addresses)
module IE = Expression_traverse.On_addresses
module P = Prim_statement.On_addresses
end)
module On_expressions :
Travesty.Traversable_types.S0
with type t = t
and type Elt.t = Expression.t = Make_traversal (struct
module Elt = Expression
module FH = Flow_block.Header.On_expressions
module IE =
Travesty.Traversable.Fix_elt
(Travesty_containers.Singleton)
(Expression)
module P = Prim_statement.On_expressions
end)
module On_primitives :
Travesty.Traversable_types.S0
with type t = t
and type Elt.t = Prim_statement.t = Make_traversal (struct
module Elt = Prim_statement
module FH =
Travesty.Traversable.Const (Flow_block.Header) (Prim_statement)
module IE = Travesty.Traversable.Const (Expression) (Prim_statement)
module P =
Travesty.Traversable.Fix_elt
(Travesty_containers.Singleton)
(Prim_statement)
end)
end
(** This rather expansive functor takes a method of lifting various
sub-traversals of some [Elt] to a traversal for [Top], and instantiates
it for a load of common values of [Elt]. *)
module Make_traversal_set
(Meta : T)
(Top : T)
(F : functor
(Basic : sig
module Elt : Equal.S
module FH :
Travesty.Traversable_types.S0
with type t := Flow_block.Header.t
and module Elt = Elt
module LE :
Travesty.Traversable_types.S0
with type t := Expression.t
and module Elt = Elt
module S :
Travesty.Traversable_types.S0
with type t := Meta.t Statement.t
and module Elt = Elt
end)
->
Travesty.Traversable_types.S0
with type t = Top.t
and type Elt.t = Basic.Elt.t) =
struct
module Sm = With_meta (Meta)
module On_lvalues :
Travesty.Traversable_types.S0
with type t = Top.t
and type Elt.t = Lvalue.t = F (struct
module Elt = Lvalue
module FH = Flow_block.Header.On_lvalues
module LE =
Travesty.Traversable.Chain0
(Expression_traverse.On_addresses)
(Address.On_lvalues)
module S = Sm.On_lvalues
end)
module On_addresses :
Travesty.Traversable_types.S0
with type t = Top.t
and type Elt.t = Address.t = F (struct
module Elt = Address
module FH =
Travesty.Traversable.Chain0
(Flow_block.Header.On_expressions)
(Expression_traverse.On_addresses)
module LE = Expression_traverse.On_addresses
module S = Sm.On_addresses
end)
module On_expressions :
Travesty.Traversable_types.S0
with type t = Top.t
and type Elt.t = Expression.t = F (struct
module Elt = Expression
module FH = Flow_block.Header.On_expressions
module LE =
Travesty.Traversable.Fix_elt
(Travesty_containers.Singleton)
(Expression)
module S = Sm.On_expressions
end)
module On_primitives :
Travesty.Traversable_types.S0
with type t = Top.t
and type Elt.t = Prim_statement.t = F (struct
module Elt = Prim_statement
module FH =
Travesty.Traversable.Const (Flow_block.Header) (Prim_statement)
module LE = Travesty.Traversable.Const (Expression) (Prim_statement)
module S = Sm.On_primitives
end)
end
module If :
Statement_types.S_traversable with type 'meta t = 'meta Statement.If.t =
struct
type 'meta t = 'meta Statement.If.t
module On_meta :
Travesty.Traversable_types.S1 with type 'meta t := 'meta t =
Travesty.Traversable.Make1 (struct
type nonrec 'meta t = 'meta t
module On (M : Applicative.S) = struct
module B = If.Base_map (M)
module Mn = On_meta.On (M)
module Bk = Block.On (M)
let map_m (x : 'm1 t) ~(f : 'm1 -> 'm2 M.t) : 'm2 t M.t =
B.bmap x ~cond:M.return
~t_branch:(Bk.bi_map_m ~left:f ~right:(Mn.map_m ~f))
~f_branch:(Bk.bi_map_m ~left:f ~right:(Mn.map_m ~f))
end
end)
let erase_meta (type meta) (s : meta t) : unit t =
On_meta.map s ~f:(Fn.const ())
module With_meta (Meta : T) = struct
module M = struct
type nonrec t = Meta.t t
end
include M
module Block_stms = Block.On_statements (Meta)
(** Does the legwork of implementing a particular type of traversal over
if statements. *)
module Make_traversal (Basic : sig
module Elt : Equal.S
module LE :
Travesty.Traversable_types.S0
with type t := Expression.t
and module Elt = Elt
module S :
Travesty.Traversable_types.S0
with type t := Meta.t Statement.t
and module Elt = Elt
end) =
Travesty.Traversable.Make0 (struct
type nonrec t = t
module Elt = Basic.Elt
module On (M : Applicative.S) = struct
module IBase = If.Base_map (M)
module Bk = Block_stms.On (M)
module EM = Basic.LE.On (M)
module SM = Basic.S.On (M)
let map_m x ~f =
IBase.bmap x ~cond:(EM.map_m ~f)
~t_branch:(Bk.map_m ~f:(SM.map_m ~f))
~f_branch:(Bk.map_m ~f:(SM.map_m ~f))
end
end)
include Make_traversal_set (Meta) (M) (Make_traversal)
end
end
module Flow_block :
Statement_types.S_traversable
with type 'meta t = ('meta, 'meta Statement.t) Flow_block.t = struct
type 'meta t = ('meta, 'meta Statement.t) Flow_block.t
[@@deriving sexp, compare, equal]
module On_meta :
Travesty.Traversable_types.S1 with type 'meta t := 'meta t =
Travesty.Traversable.Make1 (struct
type nonrec 'meta t = 'meta t
module On (M : Applicative.S) = struct
module B = Flow_block.Base_map (M)
module Mn = On_meta.On (M)
module Bk = Block.On (M)
let map_m (x : 'm1 t) ~(f : 'm1 -> 'm2 M.t) : 'm2 t M.t =
B.bmap x
~body:(Bk.bi_map_m ~left:f ~right:(Mn.map_m ~f))
~header:M.return
end
end)
let erase_meta (type meta) (s : meta t) : unit t =
On_meta.map s ~f:(Fn.const ())
module With_meta (Meta : T) = struct
module M = struct
type nonrec t = Meta.t t
end
include M
module Block_stms = Block.On_statements (Meta)
(** Does the legwork of implementing a particular type of traversal over
flow blocks. *)
module Make_traversal (Basic : sig
module Elt : Equal.S
module FH :
Travesty.Traversable_types.S0
with type t := Flow_block.Header.t
and module Elt = Elt
module S :
Travesty.Traversable_types.S0
with type t := Meta.t Statement.t
and module Elt = Elt
end) =
Travesty.Traversable.Make0 (struct
type nonrec t = t
module Elt = Basic.Elt
module On (M : Applicative.S) = struct
module FBase = Flow_block.Base_map (M)
module Bk = Block_stms.On (M)
module HM = Basic.FH.On (M)
module SM = Basic.S.On (M)
let map_m x ~f =
FBase.bmap x ~header:(HM.map_m ~f)
~body:(Bk.map_m ~f:(SM.map_m ~f))
end
end)
include Make_traversal_set (Meta) (M) (Make_traversal)
end
end
| null | https://raw.githubusercontent.com/c4-project/c4f/8939477732861789abc807c8c1532a302b2848a5/lib/fir/src/statement_traverse.ml | ocaml | * Does the legwork of implementing a particular type of traversal over
statements.
* This rather expansive functor takes a method of lifting various
sub-traversals of some [Elt] to a traversal for [Top], and instantiates
it for a load of common values of [Elt].
* Does the legwork of implementing a particular type of traversal over
if statements.
* Does the legwork of implementing a particular type of traversal over
flow blocks. | This file is part of c4f .
Copyright ( c ) 2018 - 2022 C4 Project
c4 t itself is licensed under the MIT License . See the LICENSE file in the
project root for more information .
Parts of c4 t are based on code from the Herdtools7 project
( ) : see the LICENSE.herd file in the
project root for more information .
Copyright (c) 2018-2022 C4 Project
c4t itself is licensed under the MIT License. See the LICENSE file in the
project root for more information.
Parts of c4t are based on code from the Herdtools7 project
() : see the LICENSE.herd file in the
project root for more information. *)
open Base
open Import
module Base_map (A : Applicative.S) = struct
type 'meta t = 'meta Statement.t
let bmap (type m1 m2) (x : m1 t)
~(prim :
(m1, Prim_statement.t) With_meta.t
-> (m2, Prim_statement.t) With_meta.t A.t )
~(if_stm : (m1, m1 t) If.t -> (m2, m2 t) If.t A.t)
~(flow : (m1, m1 t) Flow_block.t -> (m2, m2 t) Flow_block.t A.t) :
m2 t A.t =
Travesty_base_exts.Fn.Compose_syntax.(
Statement.reduce_step x
~prim:(prim >> A.map ~f:(Accessor.construct Statement.prim))
~if_stm:(if_stm >> A.map ~f:(Accessor.construct Statement.if_stm))
~flow:(flow >> A.map ~f:(Accessor.construct Statement.flow)))
module IB = If.Base_map (A)
module FB = Flow_block.Base_map (A)
module Bk = Block.On (A)
Ideally , if we can get rid of Bk.bi_map_m , we should be able to use
applicatives throughout this .
applicatives throughout this. *)
let bmap_flat (type m1 m2) (x : m1 t)
~(prim :
(m1, Prim_statement.t) With_meta.t
-> (m2, Prim_statement.t) With_meta.t A.t )
~(flow_header : Flow_block.Header.t -> Flow_block.Header.t A.t)
~(if_cond : Expression.t -> Expression.t A.t)
~(block_meta : m1 -> m2 A.t) : m2 t A.t =
let rec mu x =
let map_block = Bk.bi_map_m ~left:block_meta ~right:mu in
bmap x ~prim
~if_stm:
(IB.bmap ~cond:if_cond ~t_branch:map_block ~f_branch:map_block)
~flow:(FB.bmap ~body:map_block ~header:flow_header)
in
mu x
end
module On_meta :
Travesty.Traversable_types.S1 with type 'meta t := 'meta Statement.t =
Travesty.Traversable.Make1 (struct
type 'meta t = 'meta Statement.t
module On (M : Applicative.S) = struct
module B = Base_map (M)
module AccM = Accessor.Of_applicative (M)
let map_m (x : 'm1 t) ~(f : 'm1 -> 'm2 M.t) : 'm2 t M.t =
B.bmap_flat x
~prim:(AccM.map With_meta.meta ~f)
~flow_header:M.return ~if_cond:M.return ~block_meta:f
end
end)
let erase_meta (type meta) (s : meta Statement.t) : unit Statement.t =
On_meta.map s ~f:(Fn.const ())
module With_meta (Meta : T) = struct
type nonrec t = Meta.t Statement.t
module Block_stms = Block.On_statements (Meta)
module Make_traversal (Basic : sig
module Elt : Equal.S
module P :
Travesty.Traversable_types.S0
with type t := Prim_statement.t
and module Elt = Elt
module FH :
Travesty.Traversable_types.S0
with type t := Flow_block.Header.t
and module Elt = Elt
module IE :
Travesty.Traversable_types.S0
with type t := Expression.t
and module Elt = Elt
end) =
Travesty.Traversable.Make0 (struct
type nonrec t = t
module Elt = Basic.Elt
module On (M : Applicative.S) = struct
module SBase = Base_map (M)
module PM = Basic.P.On (M)
module FHM = Basic.FH.On (M)
module IEM = Basic.IE.On (M)
module AccM = Accessor.Of_applicative (M)
let map_m (x : t) ~(f : Elt.t -> Elt.t M.t) : t M.t =
SBase.bmap_flat x
~prim:(AccM.map With_meta.value ~f:(PM.map_m ~f))
~flow_header:(FHM.map_m ~f) ~if_cond:(IEM.map_m ~f)
~block_meta:M.return
end
end)
module On_lvalues :
Travesty.Traversable_types.S0 with type t = t and type Elt.t = Lvalue.t =
Make_traversal (struct
module Elt = Lvalue
module FH = Flow_block.Header.On_lvalues
module IE =
Travesty.Traversable.Chain0
(Expression_traverse.On_addresses)
(Address.On_lvalues)
module P = Prim_statement.On_lvalues
end)
module On_addresses :
Travesty.Traversable_types.S0 with type t = t and type Elt.t = Address.t =
Make_traversal (struct
module Elt = Address
module FH =
Travesty.Traversable.Chain0
(Flow_block.Header.On_expressions)
(Expression_traverse.On_addresses)
module IE = Expression_traverse.On_addresses
module P = Prim_statement.On_addresses
end)
module On_expressions :
Travesty.Traversable_types.S0
with type t = t
and type Elt.t = Expression.t = Make_traversal (struct
module Elt = Expression
module FH = Flow_block.Header.On_expressions
module IE =
Travesty.Traversable.Fix_elt
(Travesty_containers.Singleton)
(Expression)
module P = Prim_statement.On_expressions
end)
module On_primitives :
Travesty.Traversable_types.S0
with type t = t
and type Elt.t = Prim_statement.t = Make_traversal (struct
module Elt = Prim_statement
module FH =
Travesty.Traversable.Const (Flow_block.Header) (Prim_statement)
module IE = Travesty.Traversable.Const (Expression) (Prim_statement)
module P =
Travesty.Traversable.Fix_elt
(Travesty_containers.Singleton)
(Prim_statement)
end)
end
module Make_traversal_set
(Meta : T)
(Top : T)
(F : functor
(Basic : sig
module Elt : Equal.S
module FH :
Travesty.Traversable_types.S0
with type t := Flow_block.Header.t
and module Elt = Elt
module LE :
Travesty.Traversable_types.S0
with type t := Expression.t
and module Elt = Elt
module S :
Travesty.Traversable_types.S0
with type t := Meta.t Statement.t
and module Elt = Elt
end)
->
Travesty.Traversable_types.S0
with type t = Top.t
and type Elt.t = Basic.Elt.t) =
struct
module Sm = With_meta (Meta)
module On_lvalues :
Travesty.Traversable_types.S0
with type t = Top.t
and type Elt.t = Lvalue.t = F (struct
module Elt = Lvalue
module FH = Flow_block.Header.On_lvalues
module LE =
Travesty.Traversable.Chain0
(Expression_traverse.On_addresses)
(Address.On_lvalues)
module S = Sm.On_lvalues
end)
module On_addresses :
Travesty.Traversable_types.S0
with type t = Top.t
and type Elt.t = Address.t = F (struct
module Elt = Address
module FH =
Travesty.Traversable.Chain0
(Flow_block.Header.On_expressions)
(Expression_traverse.On_addresses)
module LE = Expression_traverse.On_addresses
module S = Sm.On_addresses
end)
module On_expressions :
Travesty.Traversable_types.S0
with type t = Top.t
and type Elt.t = Expression.t = F (struct
module Elt = Expression
module FH = Flow_block.Header.On_expressions
module LE =
Travesty.Traversable.Fix_elt
(Travesty_containers.Singleton)
(Expression)
module S = Sm.On_expressions
end)
module On_primitives :
Travesty.Traversable_types.S0
with type t = Top.t
and type Elt.t = Prim_statement.t = F (struct
module Elt = Prim_statement
module FH =
Travesty.Traversable.Const (Flow_block.Header) (Prim_statement)
module LE = Travesty.Traversable.Const (Expression) (Prim_statement)
module S = Sm.On_primitives
end)
end
module If :
Statement_types.S_traversable with type 'meta t = 'meta Statement.If.t =
struct
type 'meta t = 'meta Statement.If.t
module On_meta :
Travesty.Traversable_types.S1 with type 'meta t := 'meta t =
Travesty.Traversable.Make1 (struct
type nonrec 'meta t = 'meta t
module On (M : Applicative.S) = struct
module B = If.Base_map (M)
module Mn = On_meta.On (M)
module Bk = Block.On (M)
let map_m (x : 'm1 t) ~(f : 'm1 -> 'm2 M.t) : 'm2 t M.t =
B.bmap x ~cond:M.return
~t_branch:(Bk.bi_map_m ~left:f ~right:(Mn.map_m ~f))
~f_branch:(Bk.bi_map_m ~left:f ~right:(Mn.map_m ~f))
end
end)
let erase_meta (type meta) (s : meta t) : unit t =
On_meta.map s ~f:(Fn.const ())
module With_meta (Meta : T) = struct
module M = struct
type nonrec t = Meta.t t
end
include M
module Block_stms = Block.On_statements (Meta)
module Make_traversal (Basic : sig
module Elt : Equal.S
module LE :
Travesty.Traversable_types.S0
with type t := Expression.t
and module Elt = Elt
module S :
Travesty.Traversable_types.S0
with type t := Meta.t Statement.t
and module Elt = Elt
end) =
Travesty.Traversable.Make0 (struct
type nonrec t = t
module Elt = Basic.Elt
module On (M : Applicative.S) = struct
module IBase = If.Base_map (M)
module Bk = Block_stms.On (M)
module EM = Basic.LE.On (M)
module SM = Basic.S.On (M)
let map_m x ~f =
IBase.bmap x ~cond:(EM.map_m ~f)
~t_branch:(Bk.map_m ~f:(SM.map_m ~f))
~f_branch:(Bk.map_m ~f:(SM.map_m ~f))
end
end)
include Make_traversal_set (Meta) (M) (Make_traversal)
end
end
module Flow_block :
Statement_types.S_traversable
with type 'meta t = ('meta, 'meta Statement.t) Flow_block.t = struct
type 'meta t = ('meta, 'meta Statement.t) Flow_block.t
[@@deriving sexp, compare, equal]
module On_meta :
Travesty.Traversable_types.S1 with type 'meta t := 'meta t =
Travesty.Traversable.Make1 (struct
type nonrec 'meta t = 'meta t
module On (M : Applicative.S) = struct
module B = Flow_block.Base_map (M)
module Mn = On_meta.On (M)
module Bk = Block.On (M)
let map_m (x : 'm1 t) ~(f : 'm1 -> 'm2 M.t) : 'm2 t M.t =
B.bmap x
~body:(Bk.bi_map_m ~left:f ~right:(Mn.map_m ~f))
~header:M.return
end
end)
let erase_meta (type meta) (s : meta t) : unit t =
On_meta.map s ~f:(Fn.const ())
module With_meta (Meta : T) = struct
module M = struct
type nonrec t = Meta.t t
end
include M
module Block_stms = Block.On_statements (Meta)
module Make_traversal (Basic : sig
module Elt : Equal.S
module FH :
Travesty.Traversable_types.S0
with type t := Flow_block.Header.t
and module Elt = Elt
module S :
Travesty.Traversable_types.S0
with type t := Meta.t Statement.t
and module Elt = Elt
end) =
Travesty.Traversable.Make0 (struct
type nonrec t = t
module Elt = Basic.Elt
module On (M : Applicative.S) = struct
module FBase = Flow_block.Base_map (M)
module Bk = Block_stms.On (M)
module HM = Basic.FH.On (M)
module SM = Basic.S.On (M)
let map_m x ~f =
FBase.bmap x ~header:(HM.map_m ~f)
~body:(Bk.map_m ~f:(SM.map_m ~f))
end
end)
include Make_traversal_set (Meta) (M) (Make_traversal)
end
end
|
879ea4052102a492f31f8d525f91bbdab4dd1696b2fe10865e9efb2198a7c990 | vmchale/kempe | Pretty.hs | {-# LANGUAGE OverloadedStrings #-}
module Kempe.Asm.Pretty ( i4
, prettyLabel
) where
import Data.Semigroup ((<>))
import Prettyprinter (Doc, indent, pretty)
i4 :: Doc ann -> Doc ann
i4 = indent 4
prettyLabel :: Word -> Doc ann
prettyLabel l = "kmp_" <> pretty l
| null | https://raw.githubusercontent.com/vmchale/kempe/05ed82ad51704c092e9cb60ff3d034e4e4bb7407/src/Kempe/Asm/Pretty.hs | haskell | # LANGUAGE OverloadedStrings # |
module Kempe.Asm.Pretty ( i4
, prettyLabel
) where
import Data.Semigroup ((<>))
import Prettyprinter (Doc, indent, pretty)
i4 :: Doc ann -> Doc ann
i4 = indent 4
prettyLabel :: Word -> Doc ann
prettyLabel l = "kmp_" <> pretty l
|
65775b68bff858a8db6080b0d198c327cdbcc48601dd0abb5a2a2269827b7826 | fulcro-legacy/fulcro-lein-template | user.clj | (ns {{name}}.model.user
(:require
[com.wsscode.pathom.connect :as pc]
[{{name}}.server-components.pathom-wrappers :refer [defmutation defresolver]]
[taoensso.timbre :as log]))
(def user-database (atom {}))
(defresolver all-users-resolver
"Resolve queries for :all-users."
[env input]
{;;GIVEN nothing
::pc/output [{:all-users [:user/id]}]} ;; I can output all users. NOTE: only ID is needed...other resolvers resolve the rest
(log/info "All users. Database contains: " @user-database)
{:all-users (mapv
(fn [id] {:user/id id})
(keys @user-database))})
(defresolver user-resolver
"Resolve details of a single user. (See pathom docs for adding batching)"
[env {:user/keys [id]}]
{::pc/input #{:user/id} ; GIVEN a user ID
::pc/output [:user/name]} ; I can produce a user's details
;; Look up the user (e.g. in a database), and return what you promised
(when (contains? @user-database id)
(get @user-database id)))
(defresolver user-address-resolver
"Resolve address details for a user. Note the address data could be stored on the user in the database or elsewhere."
[env {:user/keys [id]}]
{::pc/input #{:user/id} ; GIVEN a user ID
::pc/output [:address/id :address/street :address/city :address/state :address/postal-code]} ; I can produce address details
(log/info "Resolving address for " id)
{:address/id "fake-id"
:address/street "111 Main St."
:address/city "Nowhere"
:address/state "WI"
:address/postal-code "99999"})
(defmutation upsert-user
"Add/save a user. Required parameters are:
:user/id - The ID of the user
:user/name - The name of the user
Returns a User (e.g. :user/id) which can resolve to a mutation join return graph.
"
[{:keys [config ring/request]} {:user/keys [id name]}]
{::pc/params #{:user/id :user/name}
::pc/output [:user/id]}
(log/debug "Upsert user with server config that has keys: " (keys config))
(log/debug "Ring request that has keys: " (keys request))
(when (and id name)
(swap! user-database assoc id {:user/id id
:user/name name})
;; Returning the user id allows the UI to query for the result. In this case we're "virtually" adding an address for
;; them!
{:user/id id}))
| null | https://raw.githubusercontent.com/fulcro-legacy/fulcro-lein-template/41195fc3b5e4054ee8b0cfff379bbadb006be046/resources/leiningen/new/fulcro/src/main/app/model/user.clj | clojure | GIVEN nothing
I can output all users. NOTE: only ID is needed...other resolvers resolve the rest
GIVEN a user ID
I can produce a user's details
Look up the user (e.g. in a database), and return what you promised
GIVEN a user ID
I can produce address details
Returning the user id allows the UI to query for the result. In this case we're "virtually" adding an address for
them! | (ns {{name}}.model.user
(:require
[com.wsscode.pathom.connect :as pc]
[{{name}}.server-components.pathom-wrappers :refer [defmutation defresolver]]
[taoensso.timbre :as log]))
(def user-database (atom {}))
(defresolver all-users-resolver
"Resolve queries for :all-users."
[env input]
(log/info "All users. Database contains: " @user-database)
{:all-users (mapv
(fn [id] {:user/id id})
(keys @user-database))})
(defresolver user-resolver
"Resolve details of a single user. (See pathom docs for adding batching)"
[env {:user/keys [id]}]
(when (contains? @user-database id)
(get @user-database id)))
(defresolver user-address-resolver
"Resolve address details for a user. Note the address data could be stored on the user in the database or elsewhere."
[env {:user/keys [id]}]
(log/info "Resolving address for " id)
{:address/id "fake-id"
:address/street "111 Main St."
:address/city "Nowhere"
:address/state "WI"
:address/postal-code "99999"})
(defmutation upsert-user
"Add/save a user. Required parameters are:
:user/id - The ID of the user
:user/name - The name of the user
Returns a User (e.g. :user/id) which can resolve to a mutation join return graph.
"
[{:keys [config ring/request]} {:user/keys [id name]}]
{::pc/params #{:user/id :user/name}
::pc/output [:user/id]}
(log/debug "Upsert user with server config that has keys: " (keys config))
(log/debug "Ring request that has keys: " (keys request))
(when (and id name)
(swap! user-database assoc id {:user/id id
:user/name name})
{:user/id id}))
|
87c5d73166593fef5b88c8f465a03d7b9714e1260c830ce37220112ae48aad61 | ocaml-flambda/ocaml-jst | obj.mli | # 1 "obj.mli"
(**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open! Stdlib
(** Operations on internal representations of values.
Not for the casual user.
*)
type t
@since 4.12
external repr : 'a -> t = "%identity"
external obj : t -> 'a = "%identity"
external magic : 'a -> 'b = "%obj_magic"
val is_block : t -> bool
external is_int : t -> bool = "%obj_is_int"
external tag : t -> int = "caml_obj_tag" [@@noalloc]
val size : t -> int
external reachable_words : t -> int = "caml_obj_reachable_words"
*
Computes the total size ( in words , including the headers ) of all
heap blocks accessible from the argument . Statically
allocated blocks are excluded , unless the runtime system
was configured with [ --disable - naked - pointers ] .
@since 4.04
Computes the total size (in words, including the headers) of all
heap blocks accessible from the argument. Statically
allocated blocks are excluded, unless the runtime system
was configured with [--disable-naked-pointers].
@since 4.04
*)
val field : t -> int -> t
* When using flambda :
[ set_field ] and [ set_double_field ] MUST NOT be called on immutable
blocks . ( Blocks allocated in C stubs , or with [ new_block ] below ,
are always considered mutable . )
For experts only :
[ set_field ] et al can be made safe by first wrapping the block in
{ ! Sys.opaque_identity } , so any information about its contents will not
be propagated .
[set_field] and [set_double_field] MUST NOT be called on immutable
blocks. (Blocks allocated in C stubs, or with [new_block] below,
are always considered mutable.)
For experts only:
[set_field] et al can be made safe by first wrapping the block in
{!Sys.opaque_identity}, so any information about its contents will not
be propagated.
*)
val set_field : t -> int -> t -> unit
@since 3.11.2
val set_double_field : t -> int -> float -> unit
@since 3.11.2
external raw_field : t -> int -> raw_data = "caml_obj_raw_field"
@since 4.12
external set_raw_field : t -> int -> raw_data -> unit
= "caml_obj_set_raw_field"
@since 4.12
external new_block : int -> int -> t = "caml_obj_block"
external dup : t -> t = "%obj_dup"
(** [dup t] returns a shallow copy of [t]. However if [t] is immutable then
it might be returned unchanged. *)
external add_offset : t -> Int32.t -> t = "caml_obj_add_offset"
@since 3.12.0
external with_tag : int -> t -> t = "caml_obj_with_tag"
@since 4.09.0
val first_non_constant_constructor_tag : int
val last_non_constant_constructor_tag : int
val lazy_tag : int
val closure_tag : int
val object_tag : int
val infix_tag : int
val forward_tag : int
val no_scan_tag : int
val abstract_tag : int
val string_tag : int (* both [string] and [bytes] *)
val double_tag : int
val double_array_tag : int
val custom_tag : int
val final_tag : int
[@@ocaml.deprecated "Replaced by custom_tag."]
val int_tag : int
val out_of_heap_tag : int
should never happen @since 3.11.0
module Closure : sig
type info = {
arity: int;
start_env: int;
}
val info : t -> info
end
module Extension_constructor :
sig
type t = extension_constructor
val of_val : 'a -> t
val name : t -> string
val id : t -> int
end
val extension_constructor : 'a -> extension_constructor
[@@ocaml.deprecated "use Obj.Extension_constructor.of_val"]
val extension_name : extension_constructor -> string
[@@ocaml.deprecated "use Obj.Extension_constructor.name"]
val extension_id : extension_constructor -> int
[@@ocaml.deprecated "use Obj.Extension_constructor.id"]
module Ephemeron: sig
* Ephemeron with arbitrary arity and untyped
type obj_t = t
(** alias for {!Obj.t} *)
type t
* an ephemeron cf { ! Ephemeron }
val create: int -> t
* [ create n ] returns an ephemeron with [ n ] keys .
All the keys and the data are initially empty .
The argument [ n ] must be between zero
and { ! } ( limits included ) .
All the keys and the data are initially empty.
The argument [n] must be between zero
and {!max_ephe_length} (limits included).
*)
val length: t -> int
(** return the number of keys *)
val get_key: t -> int -> obj_t option
* Same as { ! . Ephemeron . }
val get_key_copy: t -> int -> obj_t option
* Same as { ! . Ephemeron . K1.get_key_copy }
val set_key: t -> int -> obj_t -> unit
* Same as { ! . Ephemeron . K1.set_key }
val unset_key: t -> int -> unit
* Same as { ! . Ephemeron . K1.unset_key }
val check_key: t -> int -> bool
* Same as { ! . Ephemeron . }
val blit_key : t -> int -> t -> int -> int -> unit
* Same as { ! . Ephemeron . }
val get_data: t -> obj_t option
* Same as { ! . Ephemeron . K1.get_data }
val get_data_copy: t -> obj_t option
* Same as { ! . Ephemeron . K1.get_data_copy }
val set_data: t -> obj_t -> unit
* Same as { ! . Ephemeron . }
val unset_data: t -> unit
* Same as { ! . Ephemeron . K1.unset_data }
val check_data: t -> bool
* Same as { ! . Ephemeron . K1.check_data }
val blit_data : t -> t -> unit
* Same as { ! . Ephemeron . K1.blit_data }
val max_ephe_length: int
* Maximum length of an ephemeron , ie the maximum number of keys an
ephemeron could contain
ephemeron could contain *)
end
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/549d75742504bb3df218cc8bcc1abf3e9ddd3217/stdlib/obj.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Operations on internal representations of values.
Not for the casual user.
* [dup t] returns a shallow copy of [t]. However if [t] is immutable then
it might be returned unchanged.
both [string] and [bytes]
* alias for {!Obj.t}
* return the number of keys | # 1 "obj.mli"
, projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open! Stdlib
type t
@since 4.12
external repr : 'a -> t = "%identity"
external obj : t -> 'a = "%identity"
external magic : 'a -> 'b = "%obj_magic"
val is_block : t -> bool
external is_int : t -> bool = "%obj_is_int"
external tag : t -> int = "caml_obj_tag" [@@noalloc]
val size : t -> int
external reachable_words : t -> int = "caml_obj_reachable_words"
*
Computes the total size ( in words , including the headers ) of all
heap blocks accessible from the argument . Statically
allocated blocks are excluded , unless the runtime system
was configured with [ --disable - naked - pointers ] .
@since 4.04
Computes the total size (in words, including the headers) of all
heap blocks accessible from the argument. Statically
allocated blocks are excluded, unless the runtime system
was configured with [--disable-naked-pointers].
@since 4.04
*)
val field : t -> int -> t
* When using flambda :
[ set_field ] and [ set_double_field ] MUST NOT be called on immutable
blocks . ( Blocks allocated in C stubs , or with [ new_block ] below ,
are always considered mutable . )
For experts only :
[ set_field ] et al can be made safe by first wrapping the block in
{ ! Sys.opaque_identity } , so any information about its contents will not
be propagated .
[set_field] and [set_double_field] MUST NOT be called on immutable
blocks. (Blocks allocated in C stubs, or with [new_block] below,
are always considered mutable.)
For experts only:
[set_field] et al can be made safe by first wrapping the block in
{!Sys.opaque_identity}, so any information about its contents will not
be propagated.
*)
val set_field : t -> int -> t -> unit
@since 3.11.2
val set_double_field : t -> int -> float -> unit
@since 3.11.2
external raw_field : t -> int -> raw_data = "caml_obj_raw_field"
@since 4.12
external set_raw_field : t -> int -> raw_data -> unit
= "caml_obj_set_raw_field"
@since 4.12
external new_block : int -> int -> t = "caml_obj_block"
external dup : t -> t = "%obj_dup"
external add_offset : t -> Int32.t -> t = "caml_obj_add_offset"
@since 3.12.0
external with_tag : int -> t -> t = "caml_obj_with_tag"
@since 4.09.0
val first_non_constant_constructor_tag : int
val last_non_constant_constructor_tag : int
val lazy_tag : int
val closure_tag : int
val object_tag : int
val infix_tag : int
val forward_tag : int
val no_scan_tag : int
val abstract_tag : int
val double_tag : int
val double_array_tag : int
val custom_tag : int
val final_tag : int
[@@ocaml.deprecated "Replaced by custom_tag."]
val int_tag : int
val out_of_heap_tag : int
should never happen @since 3.11.0
module Closure : sig
type info = {
arity: int;
start_env: int;
}
val info : t -> info
end
module Extension_constructor :
sig
type t = extension_constructor
val of_val : 'a -> t
val name : t -> string
val id : t -> int
end
val extension_constructor : 'a -> extension_constructor
[@@ocaml.deprecated "use Obj.Extension_constructor.of_val"]
val extension_name : extension_constructor -> string
[@@ocaml.deprecated "use Obj.Extension_constructor.name"]
val extension_id : extension_constructor -> int
[@@ocaml.deprecated "use Obj.Extension_constructor.id"]
module Ephemeron: sig
* Ephemeron with arbitrary arity and untyped
type obj_t = t
type t
* an ephemeron cf { ! Ephemeron }
val create: int -> t
* [ create n ] returns an ephemeron with [ n ] keys .
All the keys and the data are initially empty .
The argument [ n ] must be between zero
and { ! } ( limits included ) .
All the keys and the data are initially empty.
The argument [n] must be between zero
and {!max_ephe_length} (limits included).
*)
val length: t -> int
val get_key: t -> int -> obj_t option
* Same as { ! . Ephemeron . }
val get_key_copy: t -> int -> obj_t option
* Same as { ! . Ephemeron . K1.get_key_copy }
val set_key: t -> int -> obj_t -> unit
* Same as { ! . Ephemeron . K1.set_key }
val unset_key: t -> int -> unit
* Same as { ! . Ephemeron . K1.unset_key }
val check_key: t -> int -> bool
* Same as { ! . Ephemeron . }
val blit_key : t -> int -> t -> int -> int -> unit
* Same as { ! . Ephemeron . }
val get_data: t -> obj_t option
* Same as { ! . Ephemeron . K1.get_data }
val get_data_copy: t -> obj_t option
* Same as { ! . Ephemeron . K1.get_data_copy }
val set_data: t -> obj_t -> unit
* Same as { ! . Ephemeron . }
val unset_data: t -> unit
* Same as { ! . Ephemeron . K1.unset_data }
val check_data: t -> bool
* Same as { ! . Ephemeron . K1.check_data }
val blit_data : t -> t -> unit
* Same as { ! . Ephemeron . K1.blit_data }
val max_ephe_length: int
* Maximum length of an ephemeron , ie the maximum number of keys an
ephemeron could contain
ephemeron could contain *)
end
|
7971d37cbb335e80733b24ad30c7a6366c64c24597b1394bb735bcbd4ef24740 | seckcoder/course-compiler | s1_36.rkt | (if (>= 2 1)
42
0)
| null | https://raw.githubusercontent.com/seckcoder/course-compiler/4363e5b3e15eaa7553902c3850b6452de80b2ef6/tests/s1_36.rkt | racket | (if (>= 2 1)
42
0)
| |
6b0969ca87886ca552edbe74c511c19c24ad7a91b010d318544df8e981941d29 | chef-boneyard/bookshelf | bksw_wm_object.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil ; fill - column : 92 -*-
%% ex: ts=4 sw=4 et
@author < >
Copyright 2012 - 2013 Opscode , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
-module(bksw_wm_object).
-include_lib("mixer/include/mixer.hrl").
-mixin([{bksw_wm_base, [init/1,
is_authorized/2,
finish_request/2,
service_available/2]}]).
Webmachine callbacks
-export([allowed_methods/2,
content_types_accepted/2,
content_types_provided/2,
delete_resource/2,
generate_etag/2,
last_modified/2,
resource_exists/2,
%% Override
validate_content_checksum/2,
%% Resource helpers
download/2,
upload/2]).
-include_lib("webmachine/include/webmachine.hrl").
-include("internal.hrl").
%%===================================================================
%% Public API
%%===================================================================
%% By default, if wm sees a 'content-md5' header, it will read the request body to compute
the and compare to the header value . A 400 will then be returned automagically by wm
%% if the digests do not match. Since we wish to read request bodies in a streaming fashion,
%% we need to handle our own checksum validation. Using wm's default would mean having a
%% full copy of the request body buffered into the request process state. So we define this
%% resource callback to blindly say the content is valid and then do the verification in the
%% upload/2 flow.
validate_content_checksum(Rq, Ctx) ->
{true, Rq, Ctx}.
allowed_methods(Rq, Ctx) ->
{['HEAD', 'GET', 'PUT', 'DELETE'], Rq, Ctx}.
content_types_provided(Rq, Ctx) ->
CType =
case wrq:get_req_header("accept", Rq) of
undefined ->
"application/octet-stream";
"*/*" ->
"application/octet-stream";
C ->
C
end,
{[{CType, download}], Rq, Ctx}.
content_types_accepted(Rq, Ctx) ->
CT = case wrq:get_req_header("content-type", Rq) of
undefined ->
"application/octet-stream";
X ->
X
end,
{MT, _Params} = webmachine_util:media_type_to_detail(CT),
{[{MT, upload}], Rq, Ctx}.
resource_exists(Rq0, Ctx) ->
{ok, Bucket, Path} = bksw_util:get_object_and_bucket(Rq0),
%% Buckets always exist for writes since we create them on the fly
case wrq:method(Rq0) of
'PUT' ->
{true, Rq0, Ctx};
_ ->
%% determine if the entry exists by opening it. This way, we can cache the fd
%% and avoid extra system calls. It also helps to keep the request processing
%% more consistent since we will open the fd once at start and hold on to it.
%% Note that there is still a possible discrepency when we read the meta data.
case bksw_io:open_for_read(Bucket, Path) of
{error, enoent} ->
{false, Rq0, Ctx};
{ok, Ref} ->
{true, Rq0, Ctx#context{entry_ref = Ref}}
end
end.
last_modified(Rq0, Ctx) ->
case entry_md(Ctx) of
{#object{date = Date}, CtxNew} ->
{Date, Rq0, CtxNew};
_ ->
{halt, Rq0, Ctx}
end.
generate_etag(Rq0, Ctx) ->
case entry_md(Ctx) of
{#object{digest = Digest}, CtxNew} ->
{bksw_format:to_base64(Digest), Rq0, CtxNew};
_ ->
{halt, Rq0, Ctx}
end.
delete_resource(Rq0, Ctx) ->
{ok, Bucket, Path} = bksw_util:get_object_and_bucket(Rq0),
{bksw_io:entry_delete(Bucket, Path), Rq0, Ctx}.
%% Return `{Obj, CtxNew}' where `Obj' is the entry meta data `#object{}' record or the atom
%% `error'. The `CtxNew' may have been updated and should be kept. Accessing entry md
%% through this function ensures we only ever read the md from the file system once.
entry_md(#context{entry_md = #object{} = Obj} = Ctx) ->
{Obj, Ctx};
entry_md(#context{entry_ref = Ref, entry_md = undefined} = Ctx) ->
case bksw_io:entry_md(Ref) of
{ok, #object{} = Obj} ->
{Obj, Ctx#context{entry_md = Obj}};
Error ->
{Error, Ctx}
end.
%%
%% Resource Helpers
%%
download(Rq0, #context{entry_ref = Ref, stream_download = true} = Ctx) ->
{{stream, send_streamed_body(Ref)}, Rq0, Ctx};
download(Rq0, #context{entry_ref = Ref, stream_download = false} = Ctx) ->
{fully_read(Ref, []), Rq0, Ctx}.
upload(Rq0, Ctx) ->
{ok, Bucket, Path} = bksw_util:get_object_and_bucket(Rq0),
case bksw_io:open_for_write(Bucket, Path) of
{ok, Ref} ->
write_streamed_body(wrq:stream_req_body(Rq0, ?BLOCK_SIZE), Ref, Rq0, Ctx);
Error ->
error_logger:error_msg("Erroring opening ~p/~p for writing: ~p~n", [Bucket, Path, Error]),
{false, Rq0, Ctx}
end.
%%===================================================================
%% Internal Functions
%%===================================================================
send_streamed_body(Ref) ->
case bksw_io:read(Ref, ?BLOCK_SIZE) of
{ok, eof} ->
bksw_io:finish_read(Ref),
{<<>>, done};
{ok, Data} ->
case byte_size(Data) < ?BLOCK_SIZE of
true ->
bksw_io:finish_read(Ref),
{Data, done};
false ->
{Data, fun() -> send_streamed_body(Ref) end}
end;
Error = {error, _} ->
bksw_io:finish_read(Ref),
error_logger:error_msg("Error occurred during content download: ~p~n", [Error]),
Error
end.
fully_read(Ref, Accum) ->
case bksw_io:read(Ref, ?BLOCK_SIZE) of
{ok, eof} ->
lists:reverse(Accum);
{ok, Data} ->
fully_read(Ref, [Data|Accum]);
Error ->
error_logger:error_msg("Error occurred during content download: ~p~n", [Error]),
lists:reverse(Accum)
end.
write_streamed_body({Data, done}, Ref, Rq0, Ctx) ->
{ok, Ref1} = bksw_io:write(Ref, Data),
{ok, Digest} = bksw_io:finish_write(Ref1),
case get_header('Content-MD5', Rq0) of
undefined ->
Rq1 = bksw_req:with_etag(base64:encode(Digest), Rq0),
{true, wrq:set_response_code(202, Rq1), Ctx};
RawRequestMd5 ->
RequestMd5 = base64:decode(RawRequestMd5),
case RequestMd5 of
Digest ->
Rq1 = bksw_req:with_etag(RawRequestMd5, Rq0),
{true, wrq:set_response_code(202, Rq1), Ctx};
_ ->
{true, wrq:set_response_code(406, Rq0), Ctx}
end
end;
write_streamed_body({Data, Next}, Ref, Rq0, Ctx) ->
{ok, Ref1} = bksw_io:write(Ref, Data),
write_streamed_body(Next(), Ref1, Rq0, Ctx).
get_header(Header, Rq) ->
wrq:get_req_header(Header, Rq).
| null | https://raw.githubusercontent.com/chef-boneyard/bookshelf/f9584e766d16d090812c8f7064651882dddc2512/src/bksw_wm_object.erl | erlang | ex: ts=4 sw=4 et
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
Override
Resource helpers
===================================================================
Public API
===================================================================
By default, if wm sees a 'content-md5' header, it will read the request body to compute
if the digests do not match. Since we wish to read request bodies in a streaming fashion,
we need to handle our own checksum validation. Using wm's default would mean having a
full copy of the request body buffered into the request process state. So we define this
resource callback to blindly say the content is valid and then do the verification in the
upload/2 flow.
Buckets always exist for writes since we create them on the fly
determine if the entry exists by opening it. This way, we can cache the fd
and avoid extra system calls. It also helps to keep the request processing
more consistent since we will open the fd once at start and hold on to it.
Note that there is still a possible discrepency when we read the meta data.
Return `{Obj, CtxNew}' where `Obj' is the entry meta data `#object{}' record or the atom
`error'. The `CtxNew' may have been updated and should be kept. Accessing entry md
through this function ensures we only ever read the md from the file system once.
Resource Helpers
===================================================================
Internal Functions
=================================================================== | -*- erlang - indent - level : 4;indent - tabs - mode : nil ; fill - column : 92 -*-
@author < >
Copyright 2012 - 2013 Opscode , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(bksw_wm_object).
-include_lib("mixer/include/mixer.hrl").
-mixin([{bksw_wm_base, [init/1,
is_authorized/2,
finish_request/2,
service_available/2]}]).
Webmachine callbacks
-export([allowed_methods/2,
content_types_accepted/2,
content_types_provided/2,
delete_resource/2,
generate_etag/2,
last_modified/2,
resource_exists/2,
validate_content_checksum/2,
download/2,
upload/2]).
-include_lib("webmachine/include/webmachine.hrl").
-include("internal.hrl").
the and compare to the header value . A 400 will then be returned automagically by wm
validate_content_checksum(Rq, Ctx) ->
{true, Rq, Ctx}.
allowed_methods(Rq, Ctx) ->
{['HEAD', 'GET', 'PUT', 'DELETE'], Rq, Ctx}.
content_types_provided(Rq, Ctx) ->
CType =
case wrq:get_req_header("accept", Rq) of
undefined ->
"application/octet-stream";
"*/*" ->
"application/octet-stream";
C ->
C
end,
{[{CType, download}], Rq, Ctx}.
content_types_accepted(Rq, Ctx) ->
CT = case wrq:get_req_header("content-type", Rq) of
undefined ->
"application/octet-stream";
X ->
X
end,
{MT, _Params} = webmachine_util:media_type_to_detail(CT),
{[{MT, upload}], Rq, Ctx}.
resource_exists(Rq0, Ctx) ->
{ok, Bucket, Path} = bksw_util:get_object_and_bucket(Rq0),
case wrq:method(Rq0) of
'PUT' ->
{true, Rq0, Ctx};
_ ->
case bksw_io:open_for_read(Bucket, Path) of
{error, enoent} ->
{false, Rq0, Ctx};
{ok, Ref} ->
{true, Rq0, Ctx#context{entry_ref = Ref}}
end
end.
last_modified(Rq0, Ctx) ->
case entry_md(Ctx) of
{#object{date = Date}, CtxNew} ->
{Date, Rq0, CtxNew};
_ ->
{halt, Rq0, Ctx}
end.
generate_etag(Rq0, Ctx) ->
case entry_md(Ctx) of
{#object{digest = Digest}, CtxNew} ->
{bksw_format:to_base64(Digest), Rq0, CtxNew};
_ ->
{halt, Rq0, Ctx}
end.
delete_resource(Rq0, Ctx) ->
{ok, Bucket, Path} = bksw_util:get_object_and_bucket(Rq0),
{bksw_io:entry_delete(Bucket, Path), Rq0, Ctx}.
entry_md(#context{entry_md = #object{} = Obj} = Ctx) ->
{Obj, Ctx};
entry_md(#context{entry_ref = Ref, entry_md = undefined} = Ctx) ->
case bksw_io:entry_md(Ref) of
{ok, #object{} = Obj} ->
{Obj, Ctx#context{entry_md = Obj}};
Error ->
{Error, Ctx}
end.
download(Rq0, #context{entry_ref = Ref, stream_download = true} = Ctx) ->
{{stream, send_streamed_body(Ref)}, Rq0, Ctx};
download(Rq0, #context{entry_ref = Ref, stream_download = false} = Ctx) ->
{fully_read(Ref, []), Rq0, Ctx}.
upload(Rq0, Ctx) ->
{ok, Bucket, Path} = bksw_util:get_object_and_bucket(Rq0),
case bksw_io:open_for_write(Bucket, Path) of
{ok, Ref} ->
write_streamed_body(wrq:stream_req_body(Rq0, ?BLOCK_SIZE), Ref, Rq0, Ctx);
Error ->
error_logger:error_msg("Erroring opening ~p/~p for writing: ~p~n", [Bucket, Path, Error]),
{false, Rq0, Ctx}
end.
send_streamed_body(Ref) ->
case bksw_io:read(Ref, ?BLOCK_SIZE) of
{ok, eof} ->
bksw_io:finish_read(Ref),
{<<>>, done};
{ok, Data} ->
case byte_size(Data) < ?BLOCK_SIZE of
true ->
bksw_io:finish_read(Ref),
{Data, done};
false ->
{Data, fun() -> send_streamed_body(Ref) end}
end;
Error = {error, _} ->
bksw_io:finish_read(Ref),
error_logger:error_msg("Error occurred during content download: ~p~n", [Error]),
Error
end.
fully_read(Ref, Accum) ->
case bksw_io:read(Ref, ?BLOCK_SIZE) of
{ok, eof} ->
lists:reverse(Accum);
{ok, Data} ->
fully_read(Ref, [Data|Accum]);
Error ->
error_logger:error_msg("Error occurred during content download: ~p~n", [Error]),
lists:reverse(Accum)
end.
write_streamed_body({Data, done}, Ref, Rq0, Ctx) ->
{ok, Ref1} = bksw_io:write(Ref, Data),
{ok, Digest} = bksw_io:finish_write(Ref1),
case get_header('Content-MD5', Rq0) of
undefined ->
Rq1 = bksw_req:with_etag(base64:encode(Digest), Rq0),
{true, wrq:set_response_code(202, Rq1), Ctx};
RawRequestMd5 ->
RequestMd5 = base64:decode(RawRequestMd5),
case RequestMd5 of
Digest ->
Rq1 = bksw_req:with_etag(RawRequestMd5, Rq0),
{true, wrq:set_response_code(202, Rq1), Ctx};
_ ->
{true, wrq:set_response_code(406, Rq0), Ctx}
end
end;
write_streamed_body({Data, Next}, Ref, Rq0, Ctx) ->
{ok, Ref1} = bksw_io:write(Ref, Data),
write_streamed_body(Next(), Ref1, Rq0, Ctx).
get_header(Header, Rq) ->
wrq:get_req_header(Header, Rq).
|
a67e6472a2098d102b624e22b16c716502c732e1550fcd4a9a7a4792a56a3a75 | lispbuilder/lispbuilder | ttf-font-data.lisp |
(in-package #:lispbuilder-sdl)
(export '*ttf-font-vera* :lispbuilder-sdl)
(defparameter *ttf-font-vera*
(make-instance 'ttf-font-definition
:size 32
:filename (merge-pathnames "Vera.ttf" *default-font-path*)))
| null | https://raw.githubusercontent.com/lispbuilder/lispbuilder/589b3c6d552bbec4b520f61388117d6c7b3de5ab/lispbuilder-sdl-ttf/sdl-ttf/ttf-font-data.lisp | lisp |
(in-package #:lispbuilder-sdl)
(export '*ttf-font-vera* :lispbuilder-sdl)
(defparameter *ttf-font-vera*
(make-instance 'ttf-font-definition
:size 32
:filename (merge-pathnames "Vera.ttf" *default-font-path*)))
| |
1139fce0f7724a053541d7a449c5d63c0cb3cbcc3eb1ca087978dbfdaead31ce | portkey-cloud/aws-clj-sdk | _2016-11-23.clj | (ns portkey.aws.states.-2016-11-23 (:require [portkey.aws]))
(def
endpoints
'{"ap-northeast-1"
{:credential-scope {:service "states", :region "ap-northeast-1"},
:ssl-common-name "states.ap-northeast-1.amazonaws.com",
:endpoint "-northeast-1.amazonaws.com",
:signature-version :v4},
"eu-west-1"
{:credential-scope {:service "states", :region "eu-west-1"},
:ssl-common-name "states.eu-west-1.amazonaws.com",
:endpoint "-west-1.amazonaws.com",
:signature-version :v4},
"us-east-2"
{:credential-scope {:service "states", :region "us-east-2"},
:ssl-common-name "states.us-east-2.amazonaws.com",
:endpoint "-east-2.amazonaws.com",
:signature-version :v4},
"ap-southeast-2"
{:credential-scope {:service "states", :region "ap-southeast-2"},
:ssl-common-name "states.ap-southeast-2.amazonaws.com",
:endpoint "-southeast-2.amazonaws.com",
:signature-version :v4},
"ap-southeast-1"
{:credential-scope {:service "states", :region "ap-southeast-1"},
:ssl-common-name "states.ap-southeast-1.amazonaws.com",
:endpoint "-southeast-1.amazonaws.com",
:signature-version :v4},
"ca-central-1"
{:credential-scope {:service "states", :region "ca-central-1"},
:ssl-common-name "states.ca-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-central-1"
{:credential-scope {:service "states", :region "eu-central-1"},
:ssl-common-name "states.eu-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-west-2"
{:credential-scope {:service "states", :region "eu-west-2"},
:ssl-common-name "states.eu-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-west-2"
{:credential-scope {:service "states", :region "us-west-2"},
:ssl-common-name "states.us-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-east-1"
{:credential-scope {:service "states", :region "us-east-1"},
:ssl-common-name "states.us-east-1.amazonaws.com",
:endpoint "-east-1.amazonaws.com",
:signature-version :v4}})
(comment TODO support "json")
| null | https://raw.githubusercontent.com/portkey-cloud/aws-clj-sdk/10623a5c86bd56c8b312f56b76ae5ff52c26a945/src/portkey/aws/states/_2016-11-23.clj | clojure | (ns portkey.aws.states.-2016-11-23 (:require [portkey.aws]))
(def
endpoints
'{"ap-northeast-1"
{:credential-scope {:service "states", :region "ap-northeast-1"},
:ssl-common-name "states.ap-northeast-1.amazonaws.com",
:endpoint "-northeast-1.amazonaws.com",
:signature-version :v4},
"eu-west-1"
{:credential-scope {:service "states", :region "eu-west-1"},
:ssl-common-name "states.eu-west-1.amazonaws.com",
:endpoint "-west-1.amazonaws.com",
:signature-version :v4},
"us-east-2"
{:credential-scope {:service "states", :region "us-east-2"},
:ssl-common-name "states.us-east-2.amazonaws.com",
:endpoint "-east-2.amazonaws.com",
:signature-version :v4},
"ap-southeast-2"
{:credential-scope {:service "states", :region "ap-southeast-2"},
:ssl-common-name "states.ap-southeast-2.amazonaws.com",
:endpoint "-southeast-2.amazonaws.com",
:signature-version :v4},
"ap-southeast-1"
{:credential-scope {:service "states", :region "ap-southeast-1"},
:ssl-common-name "states.ap-southeast-1.amazonaws.com",
:endpoint "-southeast-1.amazonaws.com",
:signature-version :v4},
"ca-central-1"
{:credential-scope {:service "states", :region "ca-central-1"},
:ssl-common-name "states.ca-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-central-1"
{:credential-scope {:service "states", :region "eu-central-1"},
:ssl-common-name "states.eu-central-1.amazonaws.com",
:endpoint "-central-1.amazonaws.com",
:signature-version :v4},
"eu-west-2"
{:credential-scope {:service "states", :region "eu-west-2"},
:ssl-common-name "states.eu-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-west-2"
{:credential-scope {:service "states", :region "us-west-2"},
:ssl-common-name "states.us-west-2.amazonaws.com",
:endpoint "-west-2.amazonaws.com",
:signature-version :v4},
"us-east-1"
{:credential-scope {:service "states", :region "us-east-1"},
:ssl-common-name "states.us-east-1.amazonaws.com",
:endpoint "-east-1.amazonaws.com",
:signature-version :v4}})
(comment TODO support "json")
| |
a76ff682bb8b5c059207418b6d96f378f1b35910a5253b180e0cdb9dc1bde82a | trptcolin/reply | JlineInputReader.clj | (ns reply.reader.jline.JlineInputReader
(:gen-class
:extends java.io.Reader
:state state
:init init
:constructors {[clojure.lang.Associative] []}
:main false))
(defn -init [config]
[[] (atom (assoc config
:internal-queue (java.util.LinkedList.)))])
(defn -read-single [this]
(let [{:keys [^java.util.Deque internal-queue jline-reader set-empty-prompt]}
@(.state this)]
(if-let [c (.peekFirst internal-queue)]
(.removeFirst internal-queue)
(let [line (.readLine jline-reader)]
(set-empty-prompt)
(if line
(do
(doseq [c line]
(.addLast internal-queue (int c)))
(.addLast internal-queue (int \newline))
(-read-single this))
-1)))))
(defn -read-char<>-int-int [this buffer offset length]
(let [{:keys [internal-queue jline-reader]}
@(.state this)]
(loop [i offset
left length]
(if (> left 0)
(let [c (-read-single this)]
(if (= c -1)
(if (= i offset)
-1
(- i offset))
(do (aset-char buffer i c)
(recur (inc i) (dec left)))))
(- i offset)))))
| null | https://raw.githubusercontent.com/trptcolin/reply/f0c730e7a6753494f9f90f02234bc040318da393/src/clj/reply/reader/jline/JlineInputReader.clj | clojure | (ns reply.reader.jline.JlineInputReader
(:gen-class
:extends java.io.Reader
:state state
:init init
:constructors {[clojure.lang.Associative] []}
:main false))
(defn -init [config]
[[] (atom (assoc config
:internal-queue (java.util.LinkedList.)))])
(defn -read-single [this]
(let [{:keys [^java.util.Deque internal-queue jline-reader set-empty-prompt]}
@(.state this)]
(if-let [c (.peekFirst internal-queue)]
(.removeFirst internal-queue)
(let [line (.readLine jline-reader)]
(set-empty-prompt)
(if line
(do
(doseq [c line]
(.addLast internal-queue (int c)))
(.addLast internal-queue (int \newline))
(-read-single this))
-1)))))
(defn -read-char<>-int-int [this buffer offset length]
(let [{:keys [internal-queue jline-reader]}
@(.state this)]
(loop [i offset
left length]
(if (> left 0)
(let [c (-read-single this)]
(if (= c -1)
(if (= i offset)
-1
(- i offset))
(do (aset-char buffer i c)
(recur (inc i) (dec left)))))
(- i offset)))))
| |
d1139a6386bec1978f5bc5698728096b7df7033416e91927b272688fb195ab66 | uzh/canary | main.ml | let () =
Printf.printf "TODO\n" | null | https://raw.githubusercontent.com/uzh/canary/8e2914cc19f2e964938ff2438717d8d677c4a5b4/test/main.ml | ocaml | let () =
Printf.printf "TODO\n" | |
dfbcce25047ba96128ba40379acd1b9bcc50007282e6e225267ebe7ee17d1e9f | cram2/cram | negative-binomial.lisp | Negative binomial and distributions
, Sat Nov 25 2006 - 16:00
Time - stamp : < 2010 - 01 - 17 10:29:42EST negative-binomial.lisp >
;;
Copyright 2006 , 2007 , 2008 , 2009
Distributed under the terms of the GNU General Public License
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with this program. If not, see </>.
(in-package :gsl)
;;; /usr/include/gsl/gsl_randist.h
;;;;****************************************************************************
;;;; Negative binomial
;;;;****************************************************************************
(defmfun sample
((generator random-number-generator) (type (eql :negative-binomial))
&key probability n)
"gsl_ran_negative_binomial"
(((mpointer generator) :pointer) (probability :double) (n :double))
:definition :method
:c-return :uint
"A random integer from the negative binomial
distribution, the number of failures occurring before n successes
in independent trials with probability of success. The
probability distribution for negative binomial variates is
given by probability (p):
p(k) = {\Gamma(n + k) \over \Gamma(k+1) \Gamma(n) } p^n (1-p)^k
Note that n is not required to be an integer.")
(defmfun negative-binomial-pdf (k p n)
"gsl_ran_negative_binomial_pdf" ((k :uint) (p :double) (n :double))
:c-return :double
"The probability p(k) of obtaining k
from a negative binomial distribution with parameters p and
n, using the formula given in #'sample :negative-binomial.")
(defmfun negative-binomial-P (k p n)
"gsl_cdf_negative_binomial_P" ((k :uint) (p :double) (n :double))
:c-return :double
"The cumulative distribution functions
P(k) for the negative binomial distribution
with parameters p and n.")
(defmfun negative-binomial-Q (k p n)
"gsl_cdf_negative_binomial_Q" ((k :uint) (p :double) (n :double))
:c-return :double
"The cumulative distribution functions
Q(k) for the negative binomial distribution
with parameters p and n.")
;;;;****************************************************************************
;;;;****************************************************************************
(defmfun sample
((generator random-number-generator) (type (eql :pascal))
&key probability n)
"gsl_ran_pascal"
(((mpointer generator) :pointer) (probability :double) (n :uint))
:definition :method
:c-return :uint
"A random integer from the Pascal distribution. The
Pascal distribution is simply a negative binomial distribution with an
integer value of n.
p(k) = {(n + k - 1)! \over k! (n - 1)! } p^n (1-p)^k
k >= 0.")
(defmfun pascal-pdf (k p n)
"gsl_ran_pascal_pdf" ((k :uint) (p :double) (n :uint))
:c-return :double
"The probability p(k) of obtaining k
from a Pascal distribution with parameters p and
n, using the formula given in #'sample :pascal.")
(defmfun pascal-P (k p n)
"gsl_cdf_pascal_P" ((k :uint) (p :double) (n :uint))
:c-return :double
"The cumulative distribution functions
P(k) for the Pascal distribution
with parameters p and n.")
(defmfun pascal-Q (k p n)
"gsl_cdf_pascal_Q" ((k :uint) (p :double) (n :uint))
:c-return :double
"The cumulative distribution functions
Q(k) for the Pascal distribution
with parameters p and n.")
;;;;****************************************************************************
;;;; Examples and unit test
;;;;****************************************************************************
(save-test negative-binomial
(let ((rng (make-random-number-generator +mt19937+ 0)))
(loop for i from 0 to 10
collect
(sample rng :negative-binomial :probability 0.4d0 :n 12.0d0)))
(negative-binomial-pdf 5 0.4d0 12.0d0)
(negative-binomial-P 5 0.4d0 12.0d0)
(negative-binomial-Q 5 0.4d0 12.0d0)
(let ((rng (make-random-number-generator +mt19937+ 0)))
(loop for i from 0 to 10
collect
(sample rng :pascal :probability 0.4d0 :n 12)))
(pascal-pdf 5 0.4d0 12)
(pascal-P 5 0.4d0 12)
(pascal-Q 5 0.4d0 12))
| null | https://raw.githubusercontent.com/cram2/cram/dcb73031ee944d04215bbff9e98b9e8c210ef6c5/cram_3rdparty/gsll/src/random/negative-binomial.lisp | lisp |
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
/usr/include/gsl/gsl_randist.h
****************************************************************************
Negative binomial
****************************************************************************
****************************************************************************
****************************************************************************
****************************************************************************
Examples and unit test
**************************************************************************** | Negative binomial and distributions
, Sat Nov 25 2006 - 16:00
Time - stamp : < 2010 - 01 - 17 10:29:42EST negative-binomial.lisp >
Copyright 2006 , 2007 , 2008 , 2009
Distributed under the terms of the GNU General Public License
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(in-package :gsl)
(defmfun sample
((generator random-number-generator) (type (eql :negative-binomial))
&key probability n)
"gsl_ran_negative_binomial"
(((mpointer generator) :pointer) (probability :double) (n :double))
:definition :method
:c-return :uint
"A random integer from the negative binomial
distribution, the number of failures occurring before n successes
in independent trials with probability of success. The
probability distribution for negative binomial variates is
given by probability (p):
p(k) = {\Gamma(n + k) \over \Gamma(k+1) \Gamma(n) } p^n (1-p)^k
Note that n is not required to be an integer.")
(defmfun negative-binomial-pdf (k p n)
"gsl_ran_negative_binomial_pdf" ((k :uint) (p :double) (n :double))
:c-return :double
"The probability p(k) of obtaining k
from a negative binomial distribution with parameters p and
n, using the formula given in #'sample :negative-binomial.")
(defmfun negative-binomial-P (k p n)
"gsl_cdf_negative_binomial_P" ((k :uint) (p :double) (n :double))
:c-return :double
"The cumulative distribution functions
P(k) for the negative binomial distribution
with parameters p and n.")
(defmfun negative-binomial-Q (k p n)
"gsl_cdf_negative_binomial_Q" ((k :uint) (p :double) (n :double))
:c-return :double
"The cumulative distribution functions
Q(k) for the negative binomial distribution
with parameters p and n.")
(defmfun sample
((generator random-number-generator) (type (eql :pascal))
&key probability n)
"gsl_ran_pascal"
(((mpointer generator) :pointer) (probability :double) (n :uint))
:definition :method
:c-return :uint
"A random integer from the Pascal distribution. The
Pascal distribution is simply a negative binomial distribution with an
integer value of n.
p(k) = {(n + k - 1)! \over k! (n - 1)! } p^n (1-p)^k
k >= 0.")
(defmfun pascal-pdf (k p n)
"gsl_ran_pascal_pdf" ((k :uint) (p :double) (n :uint))
:c-return :double
"The probability p(k) of obtaining k
from a Pascal distribution with parameters p and
n, using the formula given in #'sample :pascal.")
(defmfun pascal-P (k p n)
"gsl_cdf_pascal_P" ((k :uint) (p :double) (n :uint))
:c-return :double
"The cumulative distribution functions
P(k) for the Pascal distribution
with parameters p and n.")
(defmfun pascal-Q (k p n)
"gsl_cdf_pascal_Q" ((k :uint) (p :double) (n :uint))
:c-return :double
"The cumulative distribution functions
Q(k) for the Pascal distribution
with parameters p and n.")
(save-test negative-binomial
(let ((rng (make-random-number-generator +mt19937+ 0)))
(loop for i from 0 to 10
collect
(sample rng :negative-binomial :probability 0.4d0 :n 12.0d0)))
(negative-binomial-pdf 5 0.4d0 12.0d0)
(negative-binomial-P 5 0.4d0 12.0d0)
(negative-binomial-Q 5 0.4d0 12.0d0)
(let ((rng (make-random-number-generator +mt19937+ 0)))
(loop for i from 0 to 10
collect
(sample rng :pascal :probability 0.4d0 :n 12)))
(pascal-pdf 5 0.4d0 12)
(pascal-P 5 0.4d0 12)
(pascal-Q 5 0.4d0 12))
|
5744e76792c211637d7cfeb61bef64678109f7459917fd4218e2c586d6b26ae7 | coq/coq | notationextern.ml | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
(** Declaration of uninterpretation functions (i.e. printing rules)
for notations *)
(*i*)
open Util
open Names
open Globnames
open Constrexpr
open Notation_term
open Glob_term
(*i*)
let notation_entry_level_eq s1 s2 = match (s1,s2) with
| InConstrEntrySomeLevel, InConstrEntrySomeLevel -> true
| InCustomEntryLevel (s1,n1), InCustomEntryLevel (s2,n2) -> String.equal s1 s2 && n1 = n2
| (InConstrEntrySomeLevel | InCustomEntryLevel _), _ -> false
let pair_eq f g (x1, y1) (x2, y2) = f x1 x2 && g y1 y2
let notation_binder_kind_eq k1 k2 = match k1, k2 with
| AsIdent, AsIdent -> true
| AsName, AsName -> true
| AsAnyPattern, AsAnyPattern -> true
| AsStrictPattern, AsStrictPattern -> true
| (AsIdent | AsName | AsAnyPattern | AsStrictPattern), _ -> false
let notation_binder_source_eq s1 s2 = match s1, s2 with
| NtnBinderParsedAsSomeBinderKind bk1, NtnBinderParsedAsSomeBinderKind bk2 -> notation_binder_kind_eq bk1 bk2
| NtnBinderParsedAsBinder, NtnBinderParsedAsBinder -> true
| NtnBinderParsedAsConstr bk1, NtnBinderParsedAsConstr bk2 -> notation_binder_kind_eq bk1 bk2
| (NtnBinderParsedAsSomeBinderKind _ | NtnBinderParsedAsBinder | NtnBinderParsedAsConstr _), _ -> false
let ntpe_eq t1 t2 = match t1, t2 with
| NtnTypeConstr, NtnTypeConstr -> true
| NtnTypeBinder s1, NtnTypeBinder s2 -> notation_binder_source_eq s1 s2
| NtnTypeConstrList, NtnTypeConstrList -> true
| NtnTypeBinderList s1, NtnTypeBinderList s2 -> notation_binder_source_eq s1 s2
| (NtnTypeConstr | NtnTypeBinder _ | NtnTypeConstrList | NtnTypeBinderList _), _ -> false
let var_attributes_eq (_, ((entry1, sc1), tp1)) (_, ((entry2, sc2), tp2)) =
notation_entry_level_eq entry1 entry2 &&
pair_eq (List.equal String.equal) (List.equal String.equal) sc1 sc2 &&
ntpe_eq tp1 tp2
let interpretation_eq (vars1, t1 as x1) (vars2, t2 as x2) =
x1 == x2 ||
List.equal var_attributes_eq vars1 vars2 &&
Notation_ops.eq_notation_constr (List.map fst vars1, List.map fst vars2) t1 t2
(* Uninterpretation tables *)
type 'a interp_rule_gen =
| NotationRule of Constrexpr.specific_notation
| AbbrevRule of 'a
type interp_rule = KerName.t interp_rule_gen
We define keys for glob_constr and aconstr to split the syntax entries
according to the key of the pattern ( adapted from by HH )
according to the key of the pattern (adapted from Chet Murthy by HH) *)
type key =
| RefKey of GlobRef.t
| Oth
let key_compare k1 k2 = match k1, k2 with
| RefKey gr1, RefKey gr2 -> GlobRef.CanOrd.compare gr1 gr2
| RefKey _, Oth -> -1
| Oth, RefKey _ -> 1
| Oth, Oth -> 0
module KeyOrd = struct type t = key let compare = key_compare end
module KeyMap = Map.Make(KeyOrd)
type notation_applicative_status =
| AppBoundedNotation of int
| AppUnboundedNotation
| NotAppNotation
type notation_rule = interp_rule * interpretation * notation_applicative_status
let notation_rule_eq (rule1,pat1,s1 as x1) (rule2,pat2,s2 as x2) =
x1 == x2 || (rule1 = rule2 && interpretation_eq pat1 pat2 && s1 = s2)
let strictly_finer_interpretation_than (_,(_,interp1,_)) (_,(_,interp2,_)) =
Notation_ops.strictly_finer_interpretation_than interp1 interp2
let keymap_add key interp map =
let old = try KeyMap.find key map with Not_found -> [] in
(* strictly finer interpretation are kept in front *)
let strictly_finer, rest = List.partition (fun c -> strictly_finer_interpretation_than c interp) old in
KeyMap.add key (strictly_finer @ interp :: rest) map
let keymap_remove key interp map =
let old = try KeyMap.find key map with Not_found -> [] in
KeyMap.add key (List.remove_first (fun (_,rule) -> notation_rule_eq interp rule) old) map
let keymap_find key map =
try KeyMap.find key map
with Not_found -> []
(* Scopes table : interpretation -> scope_name *)
Boolean = for cases pattern also
let notations_key_table = ref (KeyMap.empty : (bool * notation_rule) list KeyMap.t)
let glob_prim_constr_key c = match DAst.get c with
| GRef (ref, _) -> Some (canonical_gr ref)
| GApp (c, _) ->
begin match DAst.get c with
| GRef (ref, _) -> Some (canonical_gr ref)
| _ -> None
end
| GProj ((cst,_), _, _) -> Some (canonical_gr (GlobRef.ConstRef cst))
| _ -> None
let glob_constr_keys c = match DAst.get c with
| GApp (c, _) ->
begin match DAst.get c with
| GRef (ref, _) -> [RefKey (canonical_gr ref); Oth]
| _ -> [Oth]
end
| GProj ((cst,_), _, _) -> [RefKey (canonical_gr (GlobRef.ConstRef cst))]
| GRef (ref,_) -> [RefKey (canonical_gr ref)]
| _ -> [Oth]
let cases_pattern_key c = match DAst.get c with
| PatCstr (ref,_,_) -> RefKey (canonical_gr (GlobRef.ConstructRef ref))
| _ -> Oth
let notation_constr_key = function (* Rem: NApp(NRef ref,[]) stands for @ref *)
| NApp (NRef (ref,_),args) -> RefKey(canonical_gr ref), AppBoundedNotation (List.length args)
| NProj ((cst,_),args,_) -> RefKey(canonical_gr (GlobRef.ConstRef cst)), AppBoundedNotation (List.length args + 1)
| NList (_,_,NApp (NRef (ref,_),args),_,_)
| NBinderList (_,_,NApp (NRef (ref,_),args),_,_) ->
RefKey (canonical_gr ref), AppBoundedNotation (List.length args)
| NRef (ref,_) -> RefKey(canonical_gr ref), NotAppNotation
| NApp (NList (_,_,NApp (NRef (ref,_),args),_,_), args') ->
RefKey (canonical_gr ref), AppBoundedNotation (List.length args + List.length args')
| NApp (NList (_,_,NApp (_,args),_,_), args') ->
Oth, AppBoundedNotation (List.length args + List.length args')
| NApp (_,args) -> Oth, AppBoundedNotation (List.length args)
| NList (_,_,NApp (NVar x,_),_,_) when x = Notation_ops.ldots_var -> Oth, AppUnboundedNotation
| _ -> Oth, NotAppNotation
let uninterp_notations c =
List.map_append (fun key -> List.map snd (keymap_find key !notations_key_table))
(glob_constr_keys c)
let filter_also_for_pattern =
List.map_filter (function (true,x) -> Some x | _ -> None)
let uninterp_cases_pattern_notations c =
filter_also_for_pattern (keymap_find (cases_pattern_key c) !notations_key_table)
let uninterp_ind_pattern_notations ind =
filter_also_for_pattern (keymap_find (RefKey (canonical_gr (GlobRef.IndRef ind))) !notations_key_table)
let remove_uninterpretation rule (metas,c as pat) =
let (key,n) = notation_constr_key c in
notations_key_table := keymap_remove key ((rule,pat,n)) !notations_key_table
let declare_uninterpretation ?(also_in_cases_pattern=true) rule (metas,c as pat) =
let (key,n) = notation_constr_key c in
notations_key_table := keymap_add key (also_in_cases_pattern,(rule,pat,n)) !notations_key_table
let freeze ~marshallable =
!notations_key_table
let unfreeze fkm =
notations_key_table := fkm
let init () =
notations_key_table := KeyMap.empty
let () =
Summary.declare_summary "notation_uninterpretation"
{ stage = Summary.Stage.Interp;
Summary.freeze_function = freeze;
Summary.unfreeze_function = unfreeze;
Summary.init_function = init }
let with_notation_uninterpretation_protection f x =
let fs = freeze ~marshallable:false in
try let a = f x in unfreeze fs; a
with reraise ->
let reraise = Exninfo.capture reraise in
let () = unfreeze fs in
Exninfo.iraise reraise
(** Miscellaneous *)
type notation_use =
| OnlyPrinting
| OnlyParsing
| ParsingAndPrinting
| null | https://raw.githubusercontent.com/coq/coq/f66b58cc7e6a8e245b35c3858989181825c591ce/interp/notationextern.ml | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
* Declaration of uninterpretation functions (i.e. printing rules)
for notations
i
i
Uninterpretation tables
strictly finer interpretation are kept in front
Scopes table : interpretation -> scope_name
Rem: NApp(NRef ref,[]) stands for @ref
* Miscellaneous | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
open Util
open Names
open Globnames
open Constrexpr
open Notation_term
open Glob_term
let notation_entry_level_eq s1 s2 = match (s1,s2) with
| InConstrEntrySomeLevel, InConstrEntrySomeLevel -> true
| InCustomEntryLevel (s1,n1), InCustomEntryLevel (s2,n2) -> String.equal s1 s2 && n1 = n2
| (InConstrEntrySomeLevel | InCustomEntryLevel _), _ -> false
let pair_eq f g (x1, y1) (x2, y2) = f x1 x2 && g y1 y2
let notation_binder_kind_eq k1 k2 = match k1, k2 with
| AsIdent, AsIdent -> true
| AsName, AsName -> true
| AsAnyPattern, AsAnyPattern -> true
| AsStrictPattern, AsStrictPattern -> true
| (AsIdent | AsName | AsAnyPattern | AsStrictPattern), _ -> false
let notation_binder_source_eq s1 s2 = match s1, s2 with
| NtnBinderParsedAsSomeBinderKind bk1, NtnBinderParsedAsSomeBinderKind bk2 -> notation_binder_kind_eq bk1 bk2
| NtnBinderParsedAsBinder, NtnBinderParsedAsBinder -> true
| NtnBinderParsedAsConstr bk1, NtnBinderParsedAsConstr bk2 -> notation_binder_kind_eq bk1 bk2
| (NtnBinderParsedAsSomeBinderKind _ | NtnBinderParsedAsBinder | NtnBinderParsedAsConstr _), _ -> false
let ntpe_eq t1 t2 = match t1, t2 with
| NtnTypeConstr, NtnTypeConstr -> true
| NtnTypeBinder s1, NtnTypeBinder s2 -> notation_binder_source_eq s1 s2
| NtnTypeConstrList, NtnTypeConstrList -> true
| NtnTypeBinderList s1, NtnTypeBinderList s2 -> notation_binder_source_eq s1 s2
| (NtnTypeConstr | NtnTypeBinder _ | NtnTypeConstrList | NtnTypeBinderList _), _ -> false
let var_attributes_eq (_, ((entry1, sc1), tp1)) (_, ((entry2, sc2), tp2)) =
notation_entry_level_eq entry1 entry2 &&
pair_eq (List.equal String.equal) (List.equal String.equal) sc1 sc2 &&
ntpe_eq tp1 tp2
let interpretation_eq (vars1, t1 as x1) (vars2, t2 as x2) =
x1 == x2 ||
List.equal var_attributes_eq vars1 vars2 &&
Notation_ops.eq_notation_constr (List.map fst vars1, List.map fst vars2) t1 t2
type 'a interp_rule_gen =
| NotationRule of Constrexpr.specific_notation
| AbbrevRule of 'a
type interp_rule = KerName.t interp_rule_gen
We define keys for glob_constr and aconstr to split the syntax entries
according to the key of the pattern ( adapted from by HH )
according to the key of the pattern (adapted from Chet Murthy by HH) *)
type key =
| RefKey of GlobRef.t
| Oth
let key_compare k1 k2 = match k1, k2 with
| RefKey gr1, RefKey gr2 -> GlobRef.CanOrd.compare gr1 gr2
| RefKey _, Oth -> -1
| Oth, RefKey _ -> 1
| Oth, Oth -> 0
module KeyOrd = struct type t = key let compare = key_compare end
module KeyMap = Map.Make(KeyOrd)
type notation_applicative_status =
| AppBoundedNotation of int
| AppUnboundedNotation
| NotAppNotation
type notation_rule = interp_rule * interpretation * notation_applicative_status
let notation_rule_eq (rule1,pat1,s1 as x1) (rule2,pat2,s2 as x2) =
x1 == x2 || (rule1 = rule2 && interpretation_eq pat1 pat2 && s1 = s2)
let strictly_finer_interpretation_than (_,(_,interp1,_)) (_,(_,interp2,_)) =
Notation_ops.strictly_finer_interpretation_than interp1 interp2
let keymap_add key interp map =
let old = try KeyMap.find key map with Not_found -> [] in
let strictly_finer, rest = List.partition (fun c -> strictly_finer_interpretation_than c interp) old in
KeyMap.add key (strictly_finer @ interp :: rest) map
let keymap_remove key interp map =
let old = try KeyMap.find key map with Not_found -> [] in
KeyMap.add key (List.remove_first (fun (_,rule) -> notation_rule_eq interp rule) old) map
let keymap_find key map =
try KeyMap.find key map
with Not_found -> []
Boolean = for cases pattern also
let notations_key_table = ref (KeyMap.empty : (bool * notation_rule) list KeyMap.t)
let glob_prim_constr_key c = match DAst.get c with
| GRef (ref, _) -> Some (canonical_gr ref)
| GApp (c, _) ->
begin match DAst.get c with
| GRef (ref, _) -> Some (canonical_gr ref)
| _ -> None
end
| GProj ((cst,_), _, _) -> Some (canonical_gr (GlobRef.ConstRef cst))
| _ -> None
let glob_constr_keys c = match DAst.get c with
| GApp (c, _) ->
begin match DAst.get c with
| GRef (ref, _) -> [RefKey (canonical_gr ref); Oth]
| _ -> [Oth]
end
| GProj ((cst,_), _, _) -> [RefKey (canonical_gr (GlobRef.ConstRef cst))]
| GRef (ref,_) -> [RefKey (canonical_gr ref)]
| _ -> [Oth]
let cases_pattern_key c = match DAst.get c with
| PatCstr (ref,_,_) -> RefKey (canonical_gr (GlobRef.ConstructRef ref))
| _ -> Oth
| NApp (NRef (ref,_),args) -> RefKey(canonical_gr ref), AppBoundedNotation (List.length args)
| NProj ((cst,_),args,_) -> RefKey(canonical_gr (GlobRef.ConstRef cst)), AppBoundedNotation (List.length args + 1)
| NList (_,_,NApp (NRef (ref,_),args),_,_)
| NBinderList (_,_,NApp (NRef (ref,_),args),_,_) ->
RefKey (canonical_gr ref), AppBoundedNotation (List.length args)
| NRef (ref,_) -> RefKey(canonical_gr ref), NotAppNotation
| NApp (NList (_,_,NApp (NRef (ref,_),args),_,_), args') ->
RefKey (canonical_gr ref), AppBoundedNotation (List.length args + List.length args')
| NApp (NList (_,_,NApp (_,args),_,_), args') ->
Oth, AppBoundedNotation (List.length args + List.length args')
| NApp (_,args) -> Oth, AppBoundedNotation (List.length args)
| NList (_,_,NApp (NVar x,_),_,_) when x = Notation_ops.ldots_var -> Oth, AppUnboundedNotation
| _ -> Oth, NotAppNotation
let uninterp_notations c =
List.map_append (fun key -> List.map snd (keymap_find key !notations_key_table))
(glob_constr_keys c)
let filter_also_for_pattern =
List.map_filter (function (true,x) -> Some x | _ -> None)
let uninterp_cases_pattern_notations c =
filter_also_for_pattern (keymap_find (cases_pattern_key c) !notations_key_table)
let uninterp_ind_pattern_notations ind =
filter_also_for_pattern (keymap_find (RefKey (canonical_gr (GlobRef.IndRef ind))) !notations_key_table)
let remove_uninterpretation rule (metas,c as pat) =
let (key,n) = notation_constr_key c in
notations_key_table := keymap_remove key ((rule,pat,n)) !notations_key_table
let declare_uninterpretation ?(also_in_cases_pattern=true) rule (metas,c as pat) =
let (key,n) = notation_constr_key c in
notations_key_table := keymap_add key (also_in_cases_pattern,(rule,pat,n)) !notations_key_table
let freeze ~marshallable =
!notations_key_table
let unfreeze fkm =
notations_key_table := fkm
let init () =
notations_key_table := KeyMap.empty
let () =
Summary.declare_summary "notation_uninterpretation"
{ stage = Summary.Stage.Interp;
Summary.freeze_function = freeze;
Summary.unfreeze_function = unfreeze;
Summary.init_function = init }
let with_notation_uninterpretation_protection f x =
let fs = freeze ~marshallable:false in
try let a = f x in unfreeze fs; a
with reraise ->
let reraise = Exninfo.capture reraise in
let () = unfreeze fs in
Exninfo.iraise reraise
type notation_use =
| OnlyPrinting
| OnlyParsing
| ParsingAndPrinting
|
6155723d5368a0c01ba24e82f784880e3a4b51e01de21c56fe5373a7bbb8d5a3 | jyh/metaprl | itt_set.ml | doc <:doc<
@module[Itt_set]
The @tt[Itt_set] module defines a ``set'' type, or more precisely,
it defines a type by quantified @emph{separation}. The form of the type is
$@set{x; T; P[x]}$, where $T$ is a type, and $P[x]$ is a type for
any element $x @in T$. The elements of the set type are those elements
of $x @in T$ where the proposition $P[x]$ is true.
The set type is a ``squash'' type: the type is similar to the
dependent product $x@colon T @times P[x]$ (Section @refmodule[Itt_dprod]),
but the proof $P[x]$ is omitted (squashed). The set type <<{x: 'T| 'P['x]}>>
is always a subtype of $T$.
@docoff
----------------------------------------------------------------
@begin[license]
This file is part of MetaPRL, a modular, higher order
logical framework that provides a logical programming
environment for OCaml and other languages.
See the file doc/htmlman/default.html or visit /
for more information.
Copyright (C) 1997-2006 MetaPRL Group, Cornell University and
California Institute of Technology
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
Author: Jason Hickey @email{}
Modified by: Aleksey Nogin @email{}
@end[license]
>>
doc <:doc<
@parents
>>
extends Itt_squash
extends Itt_equal
extends Itt_unit
extends Itt_subtype
extends Itt_struct
extends Itt_dprod
extends Itt_image
doc docoff
open Basic_tactics
open Itt_equal
open Itt_subtype
(************************************************************************
* TERMS *
************************************************************************)
doc <:doc<
@terms
The @tt{set} term defines the set type.
>>
define unfold_set: set{'A; x. 'B['x]} <--> Img{x:'A * 'B['x]; p.fst{'p}}
doc docoff
let set_term = << { a: 'A | 'B['a] } >>
let set_opname = opname_of_term set_term
let is_set_term = is_dep0_dep1_term set_opname
let dest_set = dest_dep0_dep1_term set_opname
let mk_set_term = mk_dep0_dep1_term set_opname
(************************************************************************
* DISPLAY FORMS *
************************************************************************)
dform set_df1 : {x:'A | 'B} = math_set {'x; 'A; 'B}
(************************************************************************
* RULES *
************************************************************************)
doc <:doc<
@rules
@modsubsection{Equality and typehood}
The set type $@set{x; A; B[x]}$ is a type if $A$ is a type,
and $B[x]$ is a type for any $x @in A$. Equality of the set
type is @emph{intensional}. Two set types are equal only if their
parts are equal. Note that it is possible to define
an @emph{extensional} version of a set type using the @emph{intensional} one
by applying the @hrefterm[esquash] operator to the set predicate.
>>
interactive setEquality {| intro [] |} :
[wf] sequent { <H> >- 'A1 = 'A2 in univ[i:l] } -->
[wf] sequent { <H>; a1: 'A1 >- 'B1['a1] = 'B2['a1] in univ[i:l] } -->
sequent { <H> >- { a1:'A1 | 'B1['a1] } = { a2:'A2 | 'B2['a2] } in univ[i:l] }
interactive setType {| intro [] |} :
[wf] sequent { <H> >- "type"{'A} } -->
[wf] sequent { <H>; a: 'A >- "type"{'B['a]} } -->
sequent { <H> >- "type"{ { a:'A | 'B['a] } } }
doc <:doc<
@modsubsection{Membership}
Two terms $a_1$ and $a_2$ are equal in the set type $@set{a; A; B[a]}$
if they are equal in $A$ and also $B[a_1]$ is true.
>>
interactive setMemberEquality {| intro [] |} :
[wf] sequent { <H> >- 'a1 = 'a2 in 'A } -->
[assertion] sequent { <H> >- squash{'B['a1]} } -->
[wf] sequent { <H>; a: 'A >- "type"{'B['a]} } -->
sequent { <H> >- 'a1 = 'a2 in { a:'A | 'B['a] } }
doc <:doc<
@modsubsection{Introduction}
A set type $@set{x; A; B[x]}$ is true if there is an element $a @in A$
where $B[a]$ is true.
>>
interactive setMemberFormation {| intro [] |} 'a :
[wf] sequent { <H> >- 'a = 'a in 'A } -->
[main] sequent { <H> >- squash{'B['a]} } -->
[wf] sequent { <H>; x: 'A >- "type"{'B['x]} } -->
sequent { <H> >- { x:'A | 'B['x] } }
doc <:doc<
@modsubsection{Elimination}
An assumption with a set type $u@colon @set{x; A; B[x]}$ asserts two facts:
that $u @in A$ and $B[u]$. However, the proof of $B[u]$ is unavailable. The
$@squash{B[u]}$ hypothesis states that $B[u]$ is true, but its proof is
omitted.
>>
interactive setElimination {| elim [AutoOK] |} 'H :
('t['u;'i] : sequent { <H>; u: 'A; i: squash{'B['u]}; <J['u]> >- 'T['u] }) -->
sequent { <H>; u: { x:'A | 'B['x] }; <J['u]> >- 'T['u] }
interactive set_member {| nth_hyp |} 'H :
sequent { <H>; u: { x: 'A | 'B['x] }; <J['u]> >- 'u in 'A }
doc <:doc<
@modsubsection{Subtyping}
The set type $@set{x; A; B[x]}$ is always a subtype of $A$ if
the set type is really a type. This rule is added to the
@hrefresource[subtype_resource].
>>
interactive set_subtype {| intro [] |} :
[wf] sequent { <H> >- 'A Type } -->
[wf] sequent { <H> >- { a: 'A | 'B['a] } Type } -->
sequent { <H> >- { a: 'A | 'B['a] } subtype 'A }
interactive set_monotone {| intro [] |} :
[wf] sequent { <H> >- { a: 'A_1 | 'B_1['a] } Type } -->
[wf] sequent { <H>; a:'A_2 >- 'B_2['a] Type } -->
sequent { <H> >- 'A_1 subtype 'A_2 } -->
sequent { <H>; a:'A_1; 'B_1['a] >- squash{'B_2['a]} } -->
sequent { <H> >- { a: 'A_1 | 'B_1['a] } subtype { a: 'A_2 | 'B_2['a] } }
doc docoff
(************************************************************************
* TYPE INFERENCE *
************************************************************************)
let resource typeinf += (set_term, infer_univ_dep0_dep1 dest_set)
(************************************************************************
* SUBTYPING *
************************************************************************)
let resource sub += (LRSubtype ([<< { a: 'A | 'B['a] } >>, << 'A >>], set_subtype))
(*
* -*-
* Local Variables:
* End:
* -*-
*)
| null | https://raw.githubusercontent.com/jyh/metaprl/51ba0bbbf409ecb7f96f5abbeb91902fdec47a19/theories/itt/core/itt_set.ml | ocaml | ***********************************************************************
* TERMS *
***********************************************************************
***********************************************************************
* DISPLAY FORMS *
***********************************************************************
***********************************************************************
* RULES *
***********************************************************************
***********************************************************************
* TYPE INFERENCE *
***********************************************************************
***********************************************************************
* SUBTYPING *
***********************************************************************
* -*-
* Local Variables:
* End:
* -*-
| doc <:doc<
@module[Itt_set]
The @tt[Itt_set] module defines a ``set'' type, or more precisely,
it defines a type by quantified @emph{separation}. The form of the type is
$@set{x; T; P[x]}$, where $T$ is a type, and $P[x]$ is a type for
any element $x @in T$. The elements of the set type are those elements
of $x @in T$ where the proposition $P[x]$ is true.
The set type is a ``squash'' type: the type is similar to the
dependent product $x@colon T @times P[x]$ (Section @refmodule[Itt_dprod]),
but the proof $P[x]$ is omitted (squashed). The set type <<{x: 'T| 'P['x]}>>
is always a subtype of $T$.
@docoff
----------------------------------------------------------------
@begin[license]
This file is part of MetaPRL, a modular, higher order
logical framework that provides a logical programming
environment for OCaml and other languages.
See the file doc/htmlman/default.html or visit /
for more information.
Copyright (C) 1997-2006 MetaPRL Group, Cornell University and
California Institute of Technology
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
Author: Jason Hickey @email{}
Modified by: Aleksey Nogin @email{}
@end[license]
>>
doc <:doc<
@parents
>>
extends Itt_squash
extends Itt_equal
extends Itt_unit
extends Itt_subtype
extends Itt_struct
extends Itt_dprod
extends Itt_image
doc docoff
open Basic_tactics
open Itt_equal
open Itt_subtype
doc <:doc<
@terms
The @tt{set} term defines the set type.
>>
define unfold_set: set{'A; x. 'B['x]} <--> Img{x:'A * 'B['x]; p.fst{'p}}
doc docoff
let set_term = << { a: 'A | 'B['a] } >>
let set_opname = opname_of_term set_term
let is_set_term = is_dep0_dep1_term set_opname
let dest_set = dest_dep0_dep1_term set_opname
let mk_set_term = mk_dep0_dep1_term set_opname
dform set_df1 : {x:'A | 'B} = math_set {'x; 'A; 'B}
doc <:doc<
@rules
@modsubsection{Equality and typehood}
The set type $@set{x; A; B[x]}$ is a type if $A$ is a type,
and $B[x]$ is a type for any $x @in A$. Equality of the set
type is @emph{intensional}. Two set types are equal only if their
parts are equal. Note that it is possible to define
an @emph{extensional} version of a set type using the @emph{intensional} one
by applying the @hrefterm[esquash] operator to the set predicate.
>>
interactive setEquality {| intro [] |} :
[wf] sequent { <H> >- 'A1 = 'A2 in univ[i:l] } -->
[wf] sequent { <H>; a1: 'A1 >- 'B1['a1] = 'B2['a1] in univ[i:l] } -->
sequent { <H> >- { a1:'A1 | 'B1['a1] } = { a2:'A2 | 'B2['a2] } in univ[i:l] }
interactive setType {| intro [] |} :
[wf] sequent { <H> >- "type"{'A} } -->
[wf] sequent { <H>; a: 'A >- "type"{'B['a]} } -->
sequent { <H> >- "type"{ { a:'A | 'B['a] } } }
doc <:doc<
@modsubsection{Membership}
Two terms $a_1$ and $a_2$ are equal in the set type $@set{a; A; B[a]}$
if they are equal in $A$ and also $B[a_1]$ is true.
>>
interactive setMemberEquality {| intro [] |} :
[wf] sequent { <H> >- 'a1 = 'a2 in 'A } -->
[assertion] sequent { <H> >- squash{'B['a1]} } -->
[wf] sequent { <H>; a: 'A >- "type"{'B['a]} } -->
sequent { <H> >- 'a1 = 'a2 in { a:'A | 'B['a] } }
doc <:doc<
@modsubsection{Introduction}
A set type $@set{x; A; B[x]}$ is true if there is an element $a @in A$
where $B[a]$ is true.
>>
interactive setMemberFormation {| intro [] |} 'a :
[wf] sequent { <H> >- 'a = 'a in 'A } -->
[main] sequent { <H> >- squash{'B['a]} } -->
[wf] sequent { <H>; x: 'A >- "type"{'B['x]} } -->
sequent { <H> >- { x:'A | 'B['x] } }
doc <:doc<
@modsubsection{Elimination}
An assumption with a set type $u@colon @set{x; A; B[x]}$ asserts two facts:
that $u @in A$ and $B[u]$. However, the proof of $B[u]$ is unavailable. The
$@squash{B[u]}$ hypothesis states that $B[u]$ is true, but its proof is
omitted.
>>
interactive setElimination {| elim [AutoOK] |} 'H :
('t['u;'i] : sequent { <H>; u: 'A; i: squash{'B['u]}; <J['u]> >- 'T['u] }) -->
sequent { <H>; u: { x:'A | 'B['x] }; <J['u]> >- 'T['u] }
interactive set_member {| nth_hyp |} 'H :
sequent { <H>; u: { x: 'A | 'B['x] }; <J['u]> >- 'u in 'A }
doc <:doc<
@modsubsection{Subtyping}
The set type $@set{x; A; B[x]}$ is always a subtype of $A$ if
the set type is really a type. This rule is added to the
@hrefresource[subtype_resource].
>>
interactive set_subtype {| intro [] |} :
[wf] sequent { <H> >- 'A Type } -->
[wf] sequent { <H> >- { a: 'A | 'B['a] } Type } -->
sequent { <H> >- { a: 'A | 'B['a] } subtype 'A }
interactive set_monotone {| intro [] |} :
[wf] sequent { <H> >- { a: 'A_1 | 'B_1['a] } Type } -->
[wf] sequent { <H>; a:'A_2 >- 'B_2['a] Type } -->
sequent { <H> >- 'A_1 subtype 'A_2 } -->
sequent { <H>; a:'A_1; 'B_1['a] >- squash{'B_2['a]} } -->
sequent { <H> >- { a: 'A_1 | 'B_1['a] } subtype { a: 'A_2 | 'B_2['a] } }
doc docoff
let resource typeinf += (set_term, infer_univ_dep0_dep1 dest_set)
let resource sub += (LRSubtype ([<< { a: 'A | 'B['a] } >>, << 'A >>], set_subtype))
|
f7097391d7c58b5aa4c268cae7a168c57c185283d510b2119c71efc304345a4e | jlesquembre/clojars-publish-action | entrypoint.clj | (ns entrypoint
(:require
[clojure.data.xml :as xml]
[clojure.tools.deps.alpha.script.generate-manifest2 :as gen-manifest]
[clojure.zip :as zip]
[clojure.data.zip.xml :as zip-xml]
[clojure.string :as str]
[clojure.java.io :as io]
[clojure.edn :as edn]
[hf.depstar.uberjar :refer [build-jar]]
[deps-deploy.deps-deploy :as deploy]))
(xml/alias-uri 'pom "")
(def github-ref (or (System/getenv "GITHUB_REF") "refs/UNKNOWN"))
(def use-git-ref (case (some-> "USE_GIT_REF" System/getenv str/lower-case)
"false" nil
true))
(defn nav-xml
[xml path]
(let [f #(apply zip-xml/xml1-> % path)]
(some-> xml
(xml/parse-str)
(zip/xml-zip)
f)))
(defn get-content
[xml & path]
(some-> (nav-xml xml path)
(zip-xml/text)))
(defn update-content
[xml value & path]
(or (some-> (nav-xml xml path)
(zip/edit #(assoc % :content value))
(zip/root)
(xml/emit-str))
xml))
(defn get-version
[xml]
(if use-git-ref
(cond-> (last (str/split github-ref #"/"))
(not (str/starts-with? github-ref "refs/tags/"))
(str "-SNAPSHOT"))
(get-content xml ::pom/project ::pom/version)))
(defn file->str
[& parts]
(let [path (apply str parts)
f (io/file (apply str path))]
(when (.exists f)
(slurp f))))
(defn update-pom-version!
[pom version]
(let [new-pom (-> pom
(update-content version ::pom/project ::pom/version)
(update-content version ::pom/project ::pom/scm ::pom/tag))]
(spit "./pom.xml" new-pom)))
(defn -main
[& args]
(let [deps (-> (file->str "./deps.edn") edn/read-string)
pom (file->str "./pom.xml")
version (get-version pom)
project-name (get-content pom ::pom/project ::pom/name)
jar-name (str project-name "-" version ".jar")
jar-path (str "./target/" jar-name)]
(when use-git-ref
(update-pom-version! pom version))
; -install/blob/2ee355398e655e1d1b57e4f5ee658d087ccaea7f/src/main/resources/clojure#L342
; (print (:out (sh "clojure" "-Spom")))
(gen-manifest/-main "--config-project" "./deps.edn" "--gen" "pom")
(build-jar {:jar jar-path :jar-type :thin :verbose true})
; mvn deploy:deploy-file -Dfile="target/${jar_name}" -DpomFile=pom.xml \
; -DrepositoryId=clojars -Durl=/ \
; -Dclojars.username="${CLOJARS_USERNAME}" \
; -Dclojars.password="${CLOJARS_PASSWORD}"
(deploy/-main "deploy" jar-path)))
| null | https://raw.githubusercontent.com/jlesquembre/clojars-publish-action/9420e56c7c8555802306a8673c022e2ad3e95e4c/src/entrypoint.clj | clojure | -install/blob/2ee355398e655e1d1b57e4f5ee658d087ccaea7f/src/main/resources/clojure#L342
(print (:out (sh "clojure" "-Spom")))
mvn deploy:deploy-file -Dfile="target/${jar_name}" -DpomFile=pom.xml \
-DrepositoryId=clojars -Durl=/ \
-Dclojars.username="${CLOJARS_USERNAME}" \
-Dclojars.password="${CLOJARS_PASSWORD}" | (ns entrypoint
(:require
[clojure.data.xml :as xml]
[clojure.tools.deps.alpha.script.generate-manifest2 :as gen-manifest]
[clojure.zip :as zip]
[clojure.data.zip.xml :as zip-xml]
[clojure.string :as str]
[clojure.java.io :as io]
[clojure.edn :as edn]
[hf.depstar.uberjar :refer [build-jar]]
[deps-deploy.deps-deploy :as deploy]))
(xml/alias-uri 'pom "")
(def github-ref (or (System/getenv "GITHUB_REF") "refs/UNKNOWN"))
(def use-git-ref (case (some-> "USE_GIT_REF" System/getenv str/lower-case)
"false" nil
true))
(defn nav-xml
[xml path]
(let [f #(apply zip-xml/xml1-> % path)]
(some-> xml
(xml/parse-str)
(zip/xml-zip)
f)))
(defn get-content
[xml & path]
(some-> (nav-xml xml path)
(zip-xml/text)))
(defn update-content
[xml value & path]
(or (some-> (nav-xml xml path)
(zip/edit #(assoc % :content value))
(zip/root)
(xml/emit-str))
xml))
(defn get-version
[xml]
(if use-git-ref
(cond-> (last (str/split github-ref #"/"))
(not (str/starts-with? github-ref "refs/tags/"))
(str "-SNAPSHOT"))
(get-content xml ::pom/project ::pom/version)))
(defn file->str
[& parts]
(let [path (apply str parts)
f (io/file (apply str path))]
(when (.exists f)
(slurp f))))
(defn update-pom-version!
[pom version]
(let [new-pom (-> pom
(update-content version ::pom/project ::pom/version)
(update-content version ::pom/project ::pom/scm ::pom/tag))]
(spit "./pom.xml" new-pom)))
(defn -main
[& args]
(let [deps (-> (file->str "./deps.edn") edn/read-string)
pom (file->str "./pom.xml")
version (get-version pom)
project-name (get-content pom ::pom/project ::pom/name)
jar-name (str project-name "-" version ".jar")
jar-path (str "./target/" jar-name)]
(when use-git-ref
(update-pom-version! pom version))
(gen-manifest/-main "--config-project" "./deps.edn" "--gen" "pom")
(build-jar {:jar jar-path :jar-type :thin :verbose true})
(deploy/-main "deploy" jar-path)))
|
84834271ed26750a81c66f690572c7cf844698bd9b78fbd3b8a73e9cba8bcb6b | Octachron/codept | module.ml |
let debug fmt = Format.ifprintf Pp.err ("Debug:" ^^ fmt ^^"@.")
module Arg = struct
type 'a t = { name:Name.t option; signature:'a }
type 'a arg = 'a t
let pp pp ppf = function
| Some arg ->
Pp.fp ppf "(%a:%a)" Name.pp_opt arg.name pp arg.signature
| None -> Pp.fp ppf "()"
let sch sign = let open Schematic.Tuple in
let fwd arg = [arg.name; arg.signature] in
let rev [name;signature] = {name;signature} in
Schematic.custom Schematic.[option String; sign] fwd rev
let map f x = { x with signature = f x.signature }
let reflect pp ppf = function
| Some arg ->
Pp.fp ppf {|Some {name="%a"; %a}|} Name.pp_opt arg.name pp arg.signature
| None -> Pp.fp ppf "()"
let pp_s pp_sig ppf args = Pp.fp ppf "%a"
(Pp.(list ~sep:(s "→@,")) @@ pp pp_sig) args;
if List.length args > 0 then Pp.fp ppf "→"
end
module Divergence= struct
type origin =
| First_class_module
| External
type t = { root: Name.t option; origin:origin; loc: Uloc.t }
let pp_origin ppf s =
Pp.fp ppf "%s" @@
match s with
| First_class_module -> "first class module"
| External -> "external module"
module Reflect = struct
let origin_r ppf s =
Pp.fp ppf "%s" @@
match s with
| First_class_module -> "First_class_module"
| External -> "External"
let rloc ppf =
let open Loc in
function
| Nowhere -> Pp.fp ppf "Nowhere"
| Simple {line;start;stop} -> Pp.fp ppf "Simple{line=%d;start=%d;stop=%d}"
line start stop
| Multiline {start; stop} ->
let pair ppf (x,y)= Pp.fp ppf "(%d,%d)" x y in
Pp.fp ppf "Multiline{start=%a; stop =%a}"
pair start pair stop
let floc ppf {Uloc.pkg; loc} =
Pp.fp ppf "(%a:%a)" Pkg.reflect pkg rloc loc
let divergence ppf {root;loc;origin} =
Pp.fp ppf "{root=%a;loc=%a;origin=%a}" Pp.estring Option.(root><"")
floc loc origin_r origin
end
let reflect = Reflect.divergence
let pp ppf {root; origin; loc= {pkg=path;loc} } =
Pp.fp ppf "open %s at %a:%a (%a)"
Option.(root><"")
Pkg.pp path Loc.pp loc
pp_origin origin
let sch_origin =
let open Schematic in
custom
(Sum[ "First_class_module", Void; "External", Void])
(function
| First_class_module -> C E
| External -> C (S E))
(function
| C E -> First_class_module
| C (S E) -> External
| _ -> .)
let sch = let open Schematic in let open Tuple in
custom
Schematic.[option String; sch_origin; [Pkg.sch; Loc.Sch.t ]]
(fun r -> [r.root;r.origin; [r.loc.pkg; r.loc.loc] ])
(fun [root;origin;[pkg;loc]] -> {root;origin;loc={pkg;loc}} )
end
module Origin = struct
type t =
| Unit of {source:Pkg.t; path:Namespaced.t}
(** aka toplevel module *)
| Submodule
| Namespace (** Temporary module from namespace *)
* Not resolved first - class module
| Arg (** functor argument *)
| Phantom of bool * Divergence.t
(** Ambiguous module, that could be an external module *)
let pp ppf = function
| Unit s ->
begin match s.source.Pkg.source with
| Pkg.Local-> Pp.fp ppf "#"
| Pkg x -> Pp.fp ppf "#[%a]" Namespaced.pp x
| Unknown -> Pp.fp ppf "#!"
| Special n -> Pp.fp ppf "*(%s)" n
end
| Submodule -> Pp.fp ppf "."
| Namespace -> Pp.fp ppf "(nms)"
| First_class -> Pp.fp ppf "'"
| Arg -> Pp.fp ppf "§"
| Phantom _ -> Pp.fp ppf "👻"
module Sch = struct open Schematic
let raw =
Sum [ "Unit", [Pkg.sch; Namespaced.sch];
"Submodule", Void; "First_class", Void; "Arg", Void;
"Phantom", [ Bool; Divergence.sch];
"Namespace", Void
]
let t = let open Tuple in
custom raw
(function
| Unit {source; path} -> C (Z [source;path])
| Submodule -> C (S E)
| First_class -> C (S (S E))
| Arg -> C(S (S (S E)))
| Phantom (b,div) -> C (S (S (S (S(Z [b;div])))))
| Namespace -> C (S (S (S (S (S E)))))
)
(function
| C Z [source;path] -> Unit {source;path}
| C S E -> Submodule
| C S S E -> First_class
| C S S S E -> Arg
| C S S S S Z [b;d] -> Phantom(b,d)
| C S S S S S E -> Namespace
| _ -> .
)
end let sch = Sch.t
let reflect ppf = function
| Unit u -> Pp.fp ppf "Unit {source=%a;path=%a}"
Pkg.reflect u.source Namespaced.reflect u.path
| Submodule -> Pp.fp ppf "Submodule"
| First_class -> Pp.fp ppf "First_class"
| Arg -> Pp.fp ppf "Arg"
| Phantom (root,b) -> Pp.fp ppf "Phantom (%b,%a)" root Divergence.reflect b
| Namespace -> Pp.fp ppf "Namespace"
let at_most max v = match max, v with
| (First_class|Arg|Namespace) , _ -> max
| Unit _ , v -> v
| Submodule, Unit _ -> Submodule
| Phantom _, Submodule -> Submodule
| Submodule, v -> v
| Phantom _ as ph , _ -> ph
end
type origin = Origin.t
(** Type-level tags *)
type extended = private Extended
type simple = private Simple
(** Signature with tracked origin *)
type tracked_signature = {
origin : Origin.t;
signature : signature;
}
(** Core module or alias *)
and _ ty =
* Classic module
| Alias:
{
path: Namespaced.t;
(** Path.To.Target:
projecting this path may create new dependencies
*)
phantom: Divergence.t option;
* Track potential delayed dependencies
after divergent accident :
[
module M = A ( * { name = M ; path = [ A ] }
after divergent accident:
[
module M = A (* Alias { name = M; path = [A] } *)
open Unknownable (* <- divergence *)
{ phantom = Some divergence }
]
In the example above, [M] could be the local module
[.M], triggering the delayed alias dependency [A]. Or it could
be a submodule [Unknownable.M] . Without sufficient information,
codept defaults to computing an upper bound of dependencies,
and therefore considers that [M] is [.M], and the inferred
dependencies for the above code snipet is {A,Unknowable} .
*)
} -> extended ty
| Abstract: Id.t -> 'any ty
(** Abstract module type may be refined during functor application,
keeping track of their identity is thus important
*)
| Fun: 'a ty Arg.t option * 'a ty -> 'a ty
| Link: Namespaced.t -> extended ty (** Link to a compilation unit *)
| Namespace: dict -> extended ty
(** Namespace are open bundle of modules *)
and t = extended ty
and definition = { modules : dict; module_types : dict }
and signature =
| Blank (** Unknown signature, used as for extern module, placeholder, … *)
| Exact of definition
| Divergence of { point: Divergence.t; before:signature; after:definition}
* A divergent signature happens when a signature inference is disturbed
by opening or including an unknowable module :
[ module A = …
include Extern ( * < - divergence
by opening or including an unknowable module:
[ module A = …
include Extern (* <- divergence *)
< - which A is this : .A or Extern . A ?
]
*)
and dict = t Name.map
type sty = simple ty
type level = Module | Module_type
type modul_ = t
type named = Name.t * t
let is_functor = function
| Fun _ -> true
| _ -> false
module Dict = struct
type t = dict
let empty = Name.Map.empty
let of_list = List.fold_left (fun x (name,m) -> Name.Map.add name m x) empty
let union =
let rec merge _name x y = match x, y with
| (Sig { origin = Unit {path = p;_}; _ } as x), Link path
when path = p -> Some x
| x , { weak = true ; _ } - > Some x
| Namespace n, Namespace n' ->
Some (Namespace (Name.Map.union merge n n'))
| _, r -> Some r in
Name.Map.union merge
let weak_union =
let rec merge _k x y = match x, y with
| Namespace n, Namespace n' ->
Some (Namespace (Name.Map.union merge n n'))
| x, _ -> Some x in
Name.Map.union merge
let diff x y = Name.Map.merge ( fun _ x y -> match x, y with
| Some _, Some _ -> None
| Some _ as x, None -> x
| None, _ -> None
) x y
let sch elt =
let open Schematic in
Custom {
fwd = Name.Map.bindings;
rev = of_list;
sch = Array (pair String elt)}
end
(* TODO: Behavior with links *)
let rec spirit_away breakpoint root = function
| Alias a as al ->
if not root then
Alias { a with phantom = Some breakpoint }
else al
| Abstract _ | Fun _ as f -> f
| Link _ as l -> l
| Namespace modules ->
Namespace ( Name.Map.map (spirit_away breakpoint false) modules )
| Sig m ->
let origin = Origin.Phantom (root,breakpoint) in
let origin = match m.origin with
| Unit _ as u -> u
| Phantom _ as ph -> ph
| _ -> origin in
Sig { origin; signature = spirit_away_sign breakpoint false m.signature }
and spirit_away_sign breakpoint root = function
| Blank -> Blank
| Divergence d -> Divergence {
before = spirit_away_sign breakpoint root d.before;
point = d.point;
after = spirit_away_def breakpoint root d.after
}
| Exact def -> Exact (spirit_away_def breakpoint root def)
and spirit_away_def breakpoint root def =
let map root =
Name.Map.map (spirit_away breakpoint root) in
{ modules = map root def.modules; module_types = map true def.module_types }
let spirit_away b = spirit_away b true
let sig_merge (s1: definition) (s2:definition) =
{ module_types = Name.Map.union' s1.module_types s2.module_types;
modules = Dict.union s1.modules s2.modules }
let sig_diff s1 s2 =
{
module_types = Dict.diff s1.module_types s2.module_types;
modules = Dict.diff s1.modules s2.modules
}
let empty = Name.Map.empty
let empty_sig = {modules = empty; module_types = empty }
let rec flatten = function
| Exact x -> x
| Divergence d -> sig_merge (flatten d.before) d.after
| Blank -> empty_sig
let is_exact_sig = function
| Exact _ -> true
| Divergence _ -> false
| Blank -> false
let is_exact m =
match m with
| Namespace _ | Link _ | Abstract _ | Fun _ -> true
| Alias {phantom ; _ } -> phantom = None
| Sig m -> is_exact_sig m.signature
let md s = Sig s
let rec aliases0 l = function
| Alias {path; _ } | Link path -> path :: l
| Abstract _ | Fun _ -> l
| Namespace modules ->
Name.Map.fold (fun _ x l -> aliases0 l x) modules l
| Sig { signature; _ } ->
let signature = flatten signature in
let add _k x l = aliases0 l x in
Name.Map.fold add signature.modules
@@ Name.Map.fold add signature.module_types
@@ []
let aliases = aliases0 []
let pp_alias = Pp.opt Paths.Expr.pp
let pp_level ppf lvl = Pp.fp ppf "%s" (match lvl with
| Module -> "module"
| Module_type -> "module type"
)
let reflect_phantom ppf = function
| None -> Pp.fp ppf "None"
| Some x -> Pp.fp ppf "Some(%a)" Divergence.reflect x
let reflect_opt reflect ppf = function
| None -> Pp.string ppf "None"
| Some x -> Pp.fp ppf "Some %a" reflect x
let rec reflect ppf = function
| Sig m -> Pp.fp ppf "Sig (%a)" reflect_m m
| Fun (arg,x) -> Pp.fp ppf "Fun (%a,%a)" (reflect_opt reflect_arg) arg reflect x
| Namespace modules ->
Pp.fp ppf "Namespace (%a)"
reflect_mdict modules
| Alias {path;phantom} ->
Pp.fp ppf "Alias {path=%a;phantom=%a}"
reflect_namespaced path
reflect_phantom phantom
| Link path ->
Pp.fp ppf "Link (%a)"
reflect_namespaced path
| Abstract n -> Pp.fp ppf "Abstract %a" Id.pp n
and reflect_named ppf (n,m) = Pp.fp ppf "(%S,%a)" n reflect m
and reflect_namespaced ppf nd =
if nd.namespace = [] then
Pp.fp ppf "Namespaced.make %a"
Pp.estring nd.name
else
Pp.fp ppf "Namespaced.make ~nms:[%a] %a"
Pp.(list ~sep:(s";@ ") @@ estring) nd.namespace
Pp.estring nd.name
and reflect_m ppf {origin;signature} =
Pp.fp ppf {|@[<hov>{origin=%a; signature=%a}@]|}
Origin.reflect origin
reflect_signature signature
and reflect_signature ppf m = reflect_definition ppf (flatten m)
and reflect_definition ppf {modules; module_types} =
match Name.Map.cardinal modules, Name.Map.cardinal module_types with
| 0, 0 -> Pp.string ppf "empty"
| _, 0 -> Pp.fp ppf
"of_list @[<hov>[%a]@]" reflect_mdict modules
| 0, _ -> Pp.fp ppf
"of_list_type @[<hov>[%a]@]"
reflect_mdict module_types
| _ ->
Pp.fp ppf "@[(merge @,(of_list [%a]) @,(of_list_type [%a])@, )@]"
reflect_mdict modules
reflect_mdict module_types
and reflect_mdict ppf dict =
Pp.(list ~sep:(s ";@ ") @@ reflect_named) ppf (Name.Map.bindings dict)
and reflect_arg ppf arg = Pp.fp ppf "{name=%a;signature=%a}"
(reflect_opt Pp.estring) arg.name reflect arg.signature
let reflect_modules ppf dict =
Pp.fp ppf "Dict.of_list @[<v 2>[%a]@]"
(Pp.list ~sep:(Pp.s ";@ ") reflect_named)
(Name.Map.bindings dict)
let rec pp ppf = function
| Alias {path;phantom} ->
Pp.fp ppf "≡%s%a" (if phantom=None then "" else "(👻)" )
Namespaced.pp path
| Link path -> Pp.fp ppf "⇒%a" Namespaced.pp path
| Sig m -> pp_m ppf m
| Fun (arg,x) ->
Pp.fp ppf "%a->%a" Pp.(opt pp_arg) arg pp x
| Namespace n -> Pp.fp ppf "Namespace @[[%a]@]"
pp_mdict n
| Abstract n -> Pp.fp ppf "■(%a)" Id.pp n
and pp_m ppf {origin;signature;_} =
Pp.fp ppf "%a:%a"
Origin.pp origin pp_signature signature
and pp_signature ppf = function
| Blank -> Pp.fp ppf "ø"
| Exact s -> pp_definition ppf s
| Divergence {point; before; after} ->
Pp.fp ppf "%a ∘ %a ∘ %a"
pp_signature before
Divergence.pp point
pp_definition after
and pp_definition ppf {modules; module_types} =
Pp.fp ppf "@[<hv>(%a" pp_mdict modules;
if Name.Map.cardinal module_types >0 then
Pp.fp ppf "@, types:@, %a)@]"
pp_mdict module_types
else Pp.fp ppf ")@]"
and pp_mdict ppf dict =
Pp.fp ppf "%a" (Pp.(list ~sep:(s " @,")) pp_pair) (Name.Map.bindings dict)
and pp_pair ppf (name,md) = Pp.fp ppf "%s:%a" name pp md
and pp_arg ppf arg = Pp.fp ppf "(%a:%a)" (Pp.opt Pp.string) arg.name pp arg.signature
let mockup ?origin ?path name =
let origin = match origin, path with
| _, Some p -> Origin.Unit {source= p; path=Namespaced.make name}
| Some o, None -> o
| _ -> Submodule in
{
origin;
signature = Blank
}
let create
?(origin=Origin.Submodule) signature =
{ origin; signature}
let namespace (path:Namespaced.t) =
let rec namespace (global:Namespaced.t) path =
match path with
| [] -> raise (Invalid_argument "Module.namespace: empty namespace")
| [name] ->
name, Namespace (Dict.of_list [global.name, Link global])
| name :: rest ->
name, Namespace (Dict.of_list [namespace global rest])
in
namespace path path.namespace
let rec with_namespace nms name module'=
match nms with
| [] -> name, module'
| a :: q ->
let sub = with_namespace q name module' in
a, Namespace (Dict.of_list [sub])
let signature_of_lists ms mts =
let e = Name.Map.empty in
let add map (name,m) = Name.Map.add name m map in
{ modules = List.fold_left add e ms;
module_types = List.fold_left add e mts
}
let to_list m = Name.Map.bindings m
module Schema = struct
open Schematic
module Origin_f = Label(struct let l = "origin" end)
module Modules = Label(struct let l = "modules" end)
module Module_types = Label(struct let l = "module_types" end)
module Name_f = Label(struct let l = "name" end)
let (><) = Option.(><)
let l = let open L in function | [] -> None | x -> Some x
module Mu = struct
let _m, module', arg = Schematic_indices.three
end
let named () = Schematic.pair String Mu.module'
let dict () = Dict.sch Mu.module'
let schr = Obj [
Opt, Origin_f.l, (reopen Origin.sch);
Opt, Modules.l, dict ();
Opt, Module_types.l, dict ()
]
let d x = if x = Dict.empty then None else Some x
let rec m = Custom { fwd; rev; sch = schr }
and fwd x =
let s = flatten x.signature in
Record.[
Origin_f.l $=? (default Origin.Submodule x.origin);
Modules.l $=? d s.modules;
Module_types.l $=? d s.module_types
]
and rev = let open Record in
fun [ _, o; _, m; _, mt] ->
create ~origin:(o >< Origin.Submodule)
(Exact { modules = m >< Dict.empty; module_types = mt >< Dict.empty})
let opt_arg = option Mu.arg
let rec module' =
Custom { fwd = fwdm; rev=revm;
sch = Sum[ "M", m;
"Alias", reopen Paths.S.sch;
"Fun", [opt_arg; Mu.module'];
"Abstract", reopen Id.sch;
"Link", reopen Paths.S.sch;
"Namespace", Array (named ())
]
}
and fwdm = function
| Sig m -> C (Z m)
| Alias x -> C (S (Z (Namespaced.flatten x.path)))
| Fun (arg,x) -> C (S (S (Z [arg;x])))
| Abstract x -> C (S (S (S (Z x))))
| Link x -> C (S (S (S (S (Z (Namespaced.flatten x))))))
| Namespace n -> C (S (S (S (S (S (Z (to_list n)))))))
and revm =
let open Tuple in
function
| C Z m -> Sig m
| C S Z path -> Alias {path=Namespaced.of_path path; phantom=None}
| C S S Z [arg;body] -> Fun(arg,body)
| C S S S Z n -> Abstract n
| C S S S S Z path -> Link (Namespaced.of_path path)
| C S S S S S Z modules ->
Namespace (Dict.of_list modules)
| _ -> .
let arg = Arg.sch module'
let defs : _ rec_defs = ["m", m; "module'", module'; "arg", arg]
let m = Rec { id = ["Module"; "m"]; defs; proj = Zn }
let module' = Rec { id = ["Module"; "module'"]; defs; proj = Sn Zn }
end
module Def = struct
let empty = empty_sig
let (|+>) m (name,x) = Name.Map.add name x m
let modules dict = { empty with modules=dict }
let merge = sig_merge
let map f x = { modules = Name.Map.map f x.modules; module_types = Name.Map.map f x.module_types }
let weak_merge (s1:definition) (s2:definition) =
{ module_types = Dict.weak_union s1.module_types s2.module_types;
modules = Dict.weak_union s1.modules s2.modules }
let add sg x = { sg with modules = sg.modules |+> x }
let add_type sg x = { sg with module_types = sg.module_types |+> x }
let add_gen level = match level with
| Module -> add
| Module_type -> add_type
let find level name d = match level with
| Module -> Name.Map.find_opt name d.modules
| Module_type -> Name.Map.find_opt name d.module_types
let remove level name d = match level with
| Module ->
let modules = Name.Map.remove name d.modules in
{ d with modules }
| Module_type ->
let module_types = Name.Map.remove name d.module_types in
{ d with module_types }
let pp = pp_definition
let sch = let open Schematic in let open Schema in
let named = pair String module' in
custom
(Obj[Opt,Modules.l, Array named; Opt, Module_types.l, Array named])
(fun x -> [ Modules.l $=? l(to_list x.modules);
Module_types.l $=? (l @@ to_list x.module_types)] )
(let open Record in fun [_,m;_,mt] -> signature_of_lists (m><[]) (mt><[]))
type t = definition
end
module Sig = struct
let rec card s =
let card_def s = let c= Name.Map.cardinal in
c s.modules + c s.module_types in
match s with
| Blank -> 0
| Divergence p ->
card p.before + card_def p.after
| Exact s -> card_def s
let (|+>) m (name,x) = Name.Map.add name x m
let rec gen_merge def_merge s1 s2 = match s1, s2 with
| Blank, s | s, Blank -> s
| Exact s1, Exact s2 -> Exact (def_merge s1 s2)
| Divergence p , Exact s ->
Divergence { p with after = def_merge p.after s }
| s, Divergence p ->
Divergence { p with before = gen_merge def_merge s p.before }
let merge x = gen_merge Def.merge x
let weak_merge x = gen_merge Def.weak_merge x
let diff = gen_merge sig_diff
let flatten = flatten
let create m = Exact { modules = empty |+> m; module_types = empty }
let create_type m = Exact { module_types = empty |+> m; modules = empty }
let is_exact = is_exact_sig
let gen_create level md = match level with
| Module -> create md
| Module_type -> create_type md
let of_lists l1 l2 = Exact (signature_of_lists l1 l2)
let of_list ms =
Exact { modules = List.fold_left (|+>) empty ms; module_types = empty }
let of_list_type ms =
Exact { module_types = List.fold_left (|+>) empty ms; modules = empty }
let add_gen lvl sg x = match sg with
| Blank -> Exact (Def.add_gen lvl Def.empty x)
| Exact sg -> Exact (Def.add_gen lvl sg x)
| Divergence p -> Divergence { p with after = Def.add_gen lvl p.after x }
let add = add_gen Module
let add_type = add_gen Module_type
let empty = Exact Def.empty
let pp = pp_signature
type t = signature
let sch = let open Schematic in let open Schema in
let named = pair String module' in
custom
(Obj [Opt, Modules.l, Array named; Opt, Module_types.l, Array named])
(fun x -> let s = flatten x in let l x = l(to_list x) in
Record.[ Modules.l $=? l s.modules; Module_types.l $=? l s.module_types ])
(let open Record in fun [_,m;_,mt] -> of_lists (m><[]) (mt><[]) )
end
let rec extend: type any. any ty -> extended ty = function
| Abstract n -> Abstract n
| Fun(a,x) ->
let map a = Arg.map extend a in
(Fun(Option.fmap map a, extend x) : modul_)
| Sig s -> Sig s
| Alias _ as x -> x
| Link _ as x -> x
| Namespace _ as x -> x
module Subst = struct
module Tbl = struct
include Map.Make(Id)
let find_opt k m =
match find k m with
| x -> Some x
| exception Not_found -> None
end
type 'x t = 'x ty Tbl.t
type 'x subst = 'x t
let identity = Tbl.empty
let add id mty subst = Tbl.add id mty subst
let rec apply: type any. (Id.t -> any ty option) -> any ty -> any ty = fun subst -> function
| Abstract id as old -> Option.( subst id >< old)
| Fun (x,y) -> Fun(Option.fmap (Arg.map (apply subst)) x, apply subst y)
| Sig {origin;signature} ->
Sig {origin;signature = apply_sig (fun id -> Option.fmap extend (subst id)) signature }
| Alias _ as x -> x
| Link _ as x -> x
| Namespace _ as x -> x
and apply_sig: (Id.t -> extended ty option) -> signature -> signature = fun subst -> function
| Blank -> Blank
| Exact s -> Exact (Def.map (apply subst) s)
| Divergence d -> Divergence { point = d.point;
before = apply_sig subst d.before;
after = Def.map (apply subst) d.after
}
let refresh seed x =
let tbl = ref Tbl.empty in
apply (fun k ->
match Tbl.find_opt k !tbl with
| Some _ as y -> y
| None ->
let fresh = Abstract (Id.create seed) in
tbl := Tbl.add k fresh !tbl;
Some fresh
) x
let apply subst x = if subst = identity then x else
apply (fun k -> Tbl.find_opt k subst) x
let rec compute_constraints lvl (type any) (arg:any ty) (param:any ty) (subst: extended subst): extended subst =
match arg, param with
| x, Abstract id -> add id (extend x) subst
| Fun _, Fun _ -> subst
| Alias _, Alias _ -> subst
| Link _, Link _ -> subst
| Namespace _, Namespace _ -> subst
| Sig arg, Sig param ->
if lvl = Module then
sig_constraints (Sig.flatten arg.signature) (Sig.flatten param.signature) subst
else
subst
| _ -> (* type error *) subst
and sig_constraints arg param subst =
subst
|> dict_constraints Module arg.modules param.modules
|> dict_constraints Module_type arg.module_types param.module_types
and dict_constraints lvl arg param subst =
Name.Map.fold (fun k arg subst ->
match Name.Map.find k param with
| exception Not_found -> subst
| param -> compute_constraints lvl arg param subst
) arg subst
let rec replace_at ~level ~delete ~path ~replacement = function
| Sig s ->
let signature, subst = sig_replace_at ~level ~delete ~path ~replacement s.signature in
Sig {s with signature}, subst
| m -> (* type error *) m, identity
and sig_replace_at ~level ~delete ~path ~replacement s =
match path, s with
| [], _ | _, Blank -> s, identity (* type error *)
| _ :: _, Exact e ->
let def, eq = def_replace_at ~level ~delete ~path ~replacement e in
Exact def, eq
| name :: q, Divergence ({after; before; _ } as d) ->
let level' = match q with [] -> level | _ :: _ -> Module in
match Def.find level' name after with
| None ->
let before, eq = sig_replace_at ~level ~delete ~path ~replacement before in
Divergence ({ d with after; before }), eq
| Some _ ->
let after, eq = def_replace_at ~level ~delete ~path ~replacement after in
Divergence ({ d with after; before }), eq
and def_replace_at ~level ~delete ~path ~replacement s = match path with
| [] -> (* error *) s, identity
| [name] ->
let s' =
if delete then Def.remove level name s
else Def.add_gen level s (name,extend replacement)
in
let eq =
match Def.find level name s with
| None -> (* type error *) identity
| Some old ->
compute_constraints level (extend replacement) old identity
in
s', eq
| a :: q ->
match Def.find Module a s with
| None -> (* type error *) s, identity
| Some sub ->
let m, eq = replace_at ~level ~delete ~path:q ~replacement sub in
Def.add_gen Module s (a,m), eq
let compute_constraints ~arg ~param = compute_constraints Module arg param identity
let pp ppf s =
let pp_elt ppf (k,x) = Pp.fp ppf "%a->%a" Id.pp k pp x in
Pp.list pp_elt ppf (Tbl.bindings s)
end
module Equal = struct
let rec eq: type a b. a ty -> b ty -> bool = fun x y ->
match x, y with
| Sig x, Sig y -> tsig x y
| Alias _, Alias _ -> x = y
| Abstract x, Abstract y -> x = y
| Fun (xa,xb), Fun (ya,yb) -> arg_opt xa ya && eq xb yb
| Link _ as x, (Link _ as y) -> x = y
| Namespace x, Namespace y -> dict x y
| _ -> false
and tsig x y =
x.origin = y.origin && signature x.signature y.signature
and signature x y =
match x, y with
| Blank, Blank -> true
| Exact x, Exact y -> def x y
| Divergence x, Divergence y ->
x.point = y.point
&& def x.after y.after
&& signature x.before y.before
| _ -> false
and def x y =
dict x.modules y.modules && dict x.module_types y.module_types
and dict x y = Name.Map.equal eq x y
and arg_opt: type a b. a ty Arg.t option -> b ty Arg.t option -> bool =
fun x y -> match x, y with
| Some x, Some y -> x.name = y.name && eq x.signature y.signature
| None, None -> true
| _ -> false
end
module Partial = struct
type t = { name: string option; mty: sty }
let empty_sig = { origin = Submodule; signature= Sig.empty}
let empty = { name=None; mty = Sig empty_sig }
let simple defs = { empty with mty = Sig { empty_sig with signature = defs} }
let rec is_exact x = match x.mty with
| Abstract _ -> true
| Fun (_,x) -> is_exact {name=None; mty=x}
| Sig s -> Sig.is_exact s.signature
let rec to_module ?origin (p:t) =
to_module_kind ?origin p.mty
and to_module_kind ?origin : sty -> modul_ = function
| Abstract n -> (Abstract n:modul_)
| Fun(a,x) ->
let map a = Arg.map (to_module_kind ?origin) a in
(Fun(Option.fmap map a, to_module_kind ?origin x) : modul_)
| Sig s ->
let origin = match origin with
| Some o -> Origin.at_most s.origin o
| None -> s.origin
in
Sig {origin; signature = s.signature }
let extend = extend
let refresh = Subst.refresh
let apply ~arg ~param ~body =
let subst = Subst.compute_constraints ~arg ~param in
debug "Constraint from typing:%a@." Subst.pp subst;
let res = Subst.apply subst body in
debug "Result:@ %a ⇒@ %a@." pp body pp res;
res
let replace_at ~level ~delete ~path ~replacement body =
let constrained, eq = Subst.replace_at ~delete ~level ~path ~replacement body in
Subst.apply eq constrained
let rec pp_sty ppf: sty -> _ = function
| Abstract n -> Pp.fp ppf "<abstract:%a>" Id.pp n
| Fun (a,x) -> Pp.fp ppf "%a->%a" (Arg.pp pp_sty) a pp_sty x
| Sig m -> pp_m ppf m
let pp ppf (x:t) =
let pp_name ppf = function
| None -> ()
| Some n -> Pp.fp ppf "(%s)" n in
Pp.fp ppf "%a%a" pp_name x.name pp_sty x.mty
let to_arg (p:t) = to_module p
let rec of_extended_mty: modul_ -> sty = function
| Abstract n -> Abstract n
| Sig x -> Sig x
| Fun (a,x) -> Fun(Option.fmap (Arg.map of_extended_mty) a, of_extended_mty x)
| Link _ | Namespace _ | Alias _ -> assert false
let of_extended ?name kind = { name; mty = of_extended_mty kind }
let of_module name m =
{name=Some name; mty = Sig m }
let pseudo_module name x =
let origin = Origin.Namespace in
let signature =
Exact {modules = x; module_types = Dict.empty } in
of_module name { signature; origin }
let is_functor x = match x.mty with
| Fun _ -> true
| _ -> false
let to_sign fdefs = match fdefs.mty with
| Abstract _ | Fun _ -> Error Sig.empty
| Sig s ->
Ok s.signature
module Sch = struct
open Schematic
module S = Schema
module Result = Label(struct let l = "signature" end)
let mu = Schematic_indices.one
let mty =
custom
(Sum ["Abstract", reopen Id.sch;
"Sig", Schema.m;
"Fun", [option @@ Arg.sch mu; mu];
])
(fun (x:sty) -> match x with
| Abstract x -> C (Z x)
| Sig s -> C (S (Z s))
| Fun (a,x) -> C (S (S (Z [a;x])))
| _ -> .
)
(let open Tuple in
function
| C Z x -> Abstract x
| C S Z x -> Sig x
| C S S Z [a;x] -> Fun (a,x)
| _ -> .
)
let mty = Rec { id = ["Partial"; "mty"]; defs=["mty", mty]; proj = Zn }
let partial = custom [option String; mty]
(fun {name; mty} -> [name;mty])
(let open Tuple in fun [name;mty] -> {name;mty})
end
let sch = Sch.partial
end
module Namespace = struct
type t = dict
let merge = Dict.union
let merge_all = List.fold_left merge Dict.empty
let rec from_module nms origin sign =
match nms.Namespaced.namespace with
| [] ->
Dict.of_list [nms.name, Sig (create ~origin sign)]
| a :: namespace ->
let sign = Namespace (from_module { nms with namespace } origin sign) in
Dict.of_list [a, sign]
let sch = Dict.sch Schema.module'
end
| null | https://raw.githubusercontent.com/Octachron/codept/017c2d93cb45e96d2703dc2734a1b7679d4e9ccb/lib/module.ml | ocaml | * aka toplevel module
* Temporary module from namespace
* functor argument
* Ambiguous module, that could be an external module
* Type-level tags
* Signature with tracked origin
* Core module or alias
* Path.To.Target:
projecting this path may create new dependencies
Alias { name = M; path = [A] }
<- divergence
* Abstract module type may be refined during functor application,
keeping track of their identity is thus important
* Link to a compilation unit
* Namespace are open bundle of modules
* Unknown signature, used as for extern module, placeholder, …
<- divergence
TODO: Behavior with links
type error
type error
type error
error
type error
type error |
let debug fmt = Format.ifprintf Pp.err ("Debug:" ^^ fmt ^^"@.")
module Arg = struct
type 'a t = { name:Name.t option; signature:'a }
type 'a arg = 'a t
let pp pp ppf = function
| Some arg ->
Pp.fp ppf "(%a:%a)" Name.pp_opt arg.name pp arg.signature
| None -> Pp.fp ppf "()"
let sch sign = let open Schematic.Tuple in
let fwd arg = [arg.name; arg.signature] in
let rev [name;signature] = {name;signature} in
Schematic.custom Schematic.[option String; sign] fwd rev
let map f x = { x with signature = f x.signature }
let reflect pp ppf = function
| Some arg ->
Pp.fp ppf {|Some {name="%a"; %a}|} Name.pp_opt arg.name pp arg.signature
| None -> Pp.fp ppf "()"
let pp_s pp_sig ppf args = Pp.fp ppf "%a"
(Pp.(list ~sep:(s "→@,")) @@ pp pp_sig) args;
if List.length args > 0 then Pp.fp ppf "→"
end
module Divergence= struct
type origin =
| First_class_module
| External
type t = { root: Name.t option; origin:origin; loc: Uloc.t }
let pp_origin ppf s =
Pp.fp ppf "%s" @@
match s with
| First_class_module -> "first class module"
| External -> "external module"
module Reflect = struct
let origin_r ppf s =
Pp.fp ppf "%s" @@
match s with
| First_class_module -> "First_class_module"
| External -> "External"
let rloc ppf =
let open Loc in
function
| Nowhere -> Pp.fp ppf "Nowhere"
| Simple {line;start;stop} -> Pp.fp ppf "Simple{line=%d;start=%d;stop=%d}"
line start stop
| Multiline {start; stop} ->
let pair ppf (x,y)= Pp.fp ppf "(%d,%d)" x y in
Pp.fp ppf "Multiline{start=%a; stop =%a}"
pair start pair stop
let floc ppf {Uloc.pkg; loc} =
Pp.fp ppf "(%a:%a)" Pkg.reflect pkg rloc loc
let divergence ppf {root;loc;origin} =
Pp.fp ppf "{root=%a;loc=%a;origin=%a}" Pp.estring Option.(root><"")
floc loc origin_r origin
end
let reflect = Reflect.divergence
let pp ppf {root; origin; loc= {pkg=path;loc} } =
Pp.fp ppf "open %s at %a:%a (%a)"
Option.(root><"")
Pkg.pp path Loc.pp loc
pp_origin origin
let sch_origin =
let open Schematic in
custom
(Sum[ "First_class_module", Void; "External", Void])
(function
| First_class_module -> C E
| External -> C (S E))
(function
| C E -> First_class_module
| C (S E) -> External
| _ -> .)
let sch = let open Schematic in let open Tuple in
custom
Schematic.[option String; sch_origin; [Pkg.sch; Loc.Sch.t ]]
(fun r -> [r.root;r.origin; [r.loc.pkg; r.loc.loc] ])
(fun [root;origin;[pkg;loc]] -> {root;origin;loc={pkg;loc}} )
end
module Origin = struct
type t =
| Unit of {source:Pkg.t; path:Namespaced.t}
| Submodule
* Not resolved first - class module
| Phantom of bool * Divergence.t
let pp ppf = function
| Unit s ->
begin match s.source.Pkg.source with
| Pkg.Local-> Pp.fp ppf "#"
| Pkg x -> Pp.fp ppf "#[%a]" Namespaced.pp x
| Unknown -> Pp.fp ppf "#!"
| Special n -> Pp.fp ppf "*(%s)" n
end
| Submodule -> Pp.fp ppf "."
| Namespace -> Pp.fp ppf "(nms)"
| First_class -> Pp.fp ppf "'"
| Arg -> Pp.fp ppf "§"
| Phantom _ -> Pp.fp ppf "👻"
module Sch = struct open Schematic
let raw =
Sum [ "Unit", [Pkg.sch; Namespaced.sch];
"Submodule", Void; "First_class", Void; "Arg", Void;
"Phantom", [ Bool; Divergence.sch];
"Namespace", Void
]
let t = let open Tuple in
custom raw
(function
| Unit {source; path} -> C (Z [source;path])
| Submodule -> C (S E)
| First_class -> C (S (S E))
| Arg -> C(S (S (S E)))
| Phantom (b,div) -> C (S (S (S (S(Z [b;div])))))
| Namespace -> C (S (S (S (S (S E)))))
)
(function
| C Z [source;path] -> Unit {source;path}
| C S E -> Submodule
| C S S E -> First_class
| C S S S E -> Arg
| C S S S S Z [b;d] -> Phantom(b,d)
| C S S S S S E -> Namespace
| _ -> .
)
end let sch = Sch.t
let reflect ppf = function
| Unit u -> Pp.fp ppf "Unit {source=%a;path=%a}"
Pkg.reflect u.source Namespaced.reflect u.path
| Submodule -> Pp.fp ppf "Submodule"
| First_class -> Pp.fp ppf "First_class"
| Arg -> Pp.fp ppf "Arg"
| Phantom (root,b) -> Pp.fp ppf "Phantom (%b,%a)" root Divergence.reflect b
| Namespace -> Pp.fp ppf "Namespace"
let at_most max v = match max, v with
| (First_class|Arg|Namespace) , _ -> max
| Unit _ , v -> v
| Submodule, Unit _ -> Submodule
| Phantom _, Submodule -> Submodule
| Submodule, v -> v
| Phantom _ as ph , _ -> ph
end
type origin = Origin.t
type extended = private Extended
type simple = private Simple
type tracked_signature = {
origin : Origin.t;
signature : signature;
}
and _ ty =
* Classic module
| Alias:
{
path: Namespaced.t;
phantom: Divergence.t option;
* Track potential delayed dependencies
after divergent accident :
[
module M = A ( * { name = M ; path = [ A ] }
after divergent accident:
[
{ phantom = Some divergence }
]
In the example above, [M] could be the local module
[.M], triggering the delayed alias dependency [A]. Or it could
be a submodule [Unknownable.M] . Without sufficient information,
codept defaults to computing an upper bound of dependencies,
and therefore considers that [M] is [.M], and the inferred
dependencies for the above code snipet is {A,Unknowable} .
*)
} -> extended ty
| Abstract: Id.t -> 'any ty
| Fun: 'a ty Arg.t option * 'a ty -> 'a ty
| Namespace: dict -> extended ty
and t = extended ty
and definition = { modules : dict; module_types : dict }
and signature =
| Exact of definition
| Divergence of { point: Divergence.t; before:signature; after:definition}
* A divergent signature happens when a signature inference is disturbed
by opening or including an unknowable module :
[ module A = …
include Extern ( * < - divergence
by opening or including an unknowable module:
[ module A = …
< - which A is this : .A or Extern . A ?
]
*)
and dict = t Name.map
type sty = simple ty
type level = Module | Module_type
type modul_ = t
type named = Name.t * t
let is_functor = function
| Fun _ -> true
| _ -> false
module Dict = struct
type t = dict
let empty = Name.Map.empty
let of_list = List.fold_left (fun x (name,m) -> Name.Map.add name m x) empty
let union =
let rec merge _name x y = match x, y with
| (Sig { origin = Unit {path = p;_}; _ } as x), Link path
when path = p -> Some x
| x , { weak = true ; _ } - > Some x
| Namespace n, Namespace n' ->
Some (Namespace (Name.Map.union merge n n'))
| _, r -> Some r in
Name.Map.union merge
let weak_union =
let rec merge _k x y = match x, y with
| Namespace n, Namespace n' ->
Some (Namespace (Name.Map.union merge n n'))
| x, _ -> Some x in
Name.Map.union merge
let diff x y = Name.Map.merge ( fun _ x y -> match x, y with
| Some _, Some _ -> None
| Some _ as x, None -> x
| None, _ -> None
) x y
let sch elt =
let open Schematic in
Custom {
fwd = Name.Map.bindings;
rev = of_list;
sch = Array (pair String elt)}
end
let rec spirit_away breakpoint root = function
| Alias a as al ->
if not root then
Alias { a with phantom = Some breakpoint }
else al
| Abstract _ | Fun _ as f -> f
| Link _ as l -> l
| Namespace modules ->
Namespace ( Name.Map.map (spirit_away breakpoint false) modules )
| Sig m ->
let origin = Origin.Phantom (root,breakpoint) in
let origin = match m.origin with
| Unit _ as u -> u
| Phantom _ as ph -> ph
| _ -> origin in
Sig { origin; signature = spirit_away_sign breakpoint false m.signature }
and spirit_away_sign breakpoint root = function
| Blank -> Blank
| Divergence d -> Divergence {
before = spirit_away_sign breakpoint root d.before;
point = d.point;
after = spirit_away_def breakpoint root d.after
}
| Exact def -> Exact (spirit_away_def breakpoint root def)
and spirit_away_def breakpoint root def =
let map root =
Name.Map.map (spirit_away breakpoint root) in
{ modules = map root def.modules; module_types = map true def.module_types }
let spirit_away b = spirit_away b true
let sig_merge (s1: definition) (s2:definition) =
{ module_types = Name.Map.union' s1.module_types s2.module_types;
modules = Dict.union s1.modules s2.modules }
let sig_diff s1 s2 =
{
module_types = Dict.diff s1.module_types s2.module_types;
modules = Dict.diff s1.modules s2.modules
}
let empty = Name.Map.empty
let empty_sig = {modules = empty; module_types = empty }
let rec flatten = function
| Exact x -> x
| Divergence d -> sig_merge (flatten d.before) d.after
| Blank -> empty_sig
let is_exact_sig = function
| Exact _ -> true
| Divergence _ -> false
| Blank -> false
let is_exact m =
match m with
| Namespace _ | Link _ | Abstract _ | Fun _ -> true
| Alias {phantom ; _ } -> phantom = None
| Sig m -> is_exact_sig m.signature
let md s = Sig s
let rec aliases0 l = function
| Alias {path; _ } | Link path -> path :: l
| Abstract _ | Fun _ -> l
| Namespace modules ->
Name.Map.fold (fun _ x l -> aliases0 l x) modules l
| Sig { signature; _ } ->
let signature = flatten signature in
let add _k x l = aliases0 l x in
Name.Map.fold add signature.modules
@@ Name.Map.fold add signature.module_types
@@ []
let aliases = aliases0 []
let pp_alias = Pp.opt Paths.Expr.pp
let pp_level ppf lvl = Pp.fp ppf "%s" (match lvl with
| Module -> "module"
| Module_type -> "module type"
)
let reflect_phantom ppf = function
| None -> Pp.fp ppf "None"
| Some x -> Pp.fp ppf "Some(%a)" Divergence.reflect x
let reflect_opt reflect ppf = function
| None -> Pp.string ppf "None"
| Some x -> Pp.fp ppf "Some %a" reflect x
let rec reflect ppf = function
| Sig m -> Pp.fp ppf "Sig (%a)" reflect_m m
| Fun (arg,x) -> Pp.fp ppf "Fun (%a,%a)" (reflect_opt reflect_arg) arg reflect x
| Namespace modules ->
Pp.fp ppf "Namespace (%a)"
reflect_mdict modules
| Alias {path;phantom} ->
Pp.fp ppf "Alias {path=%a;phantom=%a}"
reflect_namespaced path
reflect_phantom phantom
| Link path ->
Pp.fp ppf "Link (%a)"
reflect_namespaced path
| Abstract n -> Pp.fp ppf "Abstract %a" Id.pp n
and reflect_named ppf (n,m) = Pp.fp ppf "(%S,%a)" n reflect m
and reflect_namespaced ppf nd =
if nd.namespace = [] then
Pp.fp ppf "Namespaced.make %a"
Pp.estring nd.name
else
Pp.fp ppf "Namespaced.make ~nms:[%a] %a"
Pp.(list ~sep:(s";@ ") @@ estring) nd.namespace
Pp.estring nd.name
and reflect_m ppf {origin;signature} =
Pp.fp ppf {|@[<hov>{origin=%a; signature=%a}@]|}
Origin.reflect origin
reflect_signature signature
and reflect_signature ppf m = reflect_definition ppf (flatten m)
and reflect_definition ppf {modules; module_types} =
match Name.Map.cardinal modules, Name.Map.cardinal module_types with
| 0, 0 -> Pp.string ppf "empty"
| _, 0 -> Pp.fp ppf
"of_list @[<hov>[%a]@]" reflect_mdict modules
| 0, _ -> Pp.fp ppf
"of_list_type @[<hov>[%a]@]"
reflect_mdict module_types
| _ ->
Pp.fp ppf "@[(merge @,(of_list [%a]) @,(of_list_type [%a])@, )@]"
reflect_mdict modules
reflect_mdict module_types
and reflect_mdict ppf dict =
Pp.(list ~sep:(s ";@ ") @@ reflect_named) ppf (Name.Map.bindings dict)
and reflect_arg ppf arg = Pp.fp ppf "{name=%a;signature=%a}"
(reflect_opt Pp.estring) arg.name reflect arg.signature
let reflect_modules ppf dict =
Pp.fp ppf "Dict.of_list @[<v 2>[%a]@]"
(Pp.list ~sep:(Pp.s ";@ ") reflect_named)
(Name.Map.bindings dict)
let rec pp ppf = function
| Alias {path;phantom} ->
Pp.fp ppf "≡%s%a" (if phantom=None then "" else "(👻)" )
Namespaced.pp path
| Link path -> Pp.fp ppf "⇒%a" Namespaced.pp path
| Sig m -> pp_m ppf m
| Fun (arg,x) ->
Pp.fp ppf "%a->%a" Pp.(opt pp_arg) arg pp x
| Namespace n -> Pp.fp ppf "Namespace @[[%a]@]"
pp_mdict n
| Abstract n -> Pp.fp ppf "■(%a)" Id.pp n
and pp_m ppf {origin;signature;_} =
Pp.fp ppf "%a:%a"
Origin.pp origin pp_signature signature
and pp_signature ppf = function
| Blank -> Pp.fp ppf "ø"
| Exact s -> pp_definition ppf s
| Divergence {point; before; after} ->
Pp.fp ppf "%a ∘ %a ∘ %a"
pp_signature before
Divergence.pp point
pp_definition after
and pp_definition ppf {modules; module_types} =
Pp.fp ppf "@[<hv>(%a" pp_mdict modules;
if Name.Map.cardinal module_types >0 then
Pp.fp ppf "@, types:@, %a)@]"
pp_mdict module_types
else Pp.fp ppf ")@]"
and pp_mdict ppf dict =
Pp.fp ppf "%a" (Pp.(list ~sep:(s " @,")) pp_pair) (Name.Map.bindings dict)
and pp_pair ppf (name,md) = Pp.fp ppf "%s:%a" name pp md
and pp_arg ppf arg = Pp.fp ppf "(%a:%a)" (Pp.opt Pp.string) arg.name pp arg.signature
let mockup ?origin ?path name =
let origin = match origin, path with
| _, Some p -> Origin.Unit {source= p; path=Namespaced.make name}
| Some o, None -> o
| _ -> Submodule in
{
origin;
signature = Blank
}
let create
?(origin=Origin.Submodule) signature =
{ origin; signature}
let namespace (path:Namespaced.t) =
let rec namespace (global:Namespaced.t) path =
match path with
| [] -> raise (Invalid_argument "Module.namespace: empty namespace")
| [name] ->
name, Namespace (Dict.of_list [global.name, Link global])
| name :: rest ->
name, Namespace (Dict.of_list [namespace global rest])
in
namespace path path.namespace
let rec with_namespace nms name module'=
match nms with
| [] -> name, module'
| a :: q ->
let sub = with_namespace q name module' in
a, Namespace (Dict.of_list [sub])
let signature_of_lists ms mts =
let e = Name.Map.empty in
let add map (name,m) = Name.Map.add name m map in
{ modules = List.fold_left add e ms;
module_types = List.fold_left add e mts
}
let to_list m = Name.Map.bindings m
module Schema = struct
open Schematic
module Origin_f = Label(struct let l = "origin" end)
module Modules = Label(struct let l = "modules" end)
module Module_types = Label(struct let l = "module_types" end)
module Name_f = Label(struct let l = "name" end)
let (><) = Option.(><)
let l = let open L in function | [] -> None | x -> Some x
module Mu = struct
let _m, module', arg = Schematic_indices.three
end
let named () = Schematic.pair String Mu.module'
let dict () = Dict.sch Mu.module'
let schr = Obj [
Opt, Origin_f.l, (reopen Origin.sch);
Opt, Modules.l, dict ();
Opt, Module_types.l, dict ()
]
let d x = if x = Dict.empty then None else Some x
let rec m = Custom { fwd; rev; sch = schr }
and fwd x =
let s = flatten x.signature in
Record.[
Origin_f.l $=? (default Origin.Submodule x.origin);
Modules.l $=? d s.modules;
Module_types.l $=? d s.module_types
]
and rev = let open Record in
fun [ _, o; _, m; _, mt] ->
create ~origin:(o >< Origin.Submodule)
(Exact { modules = m >< Dict.empty; module_types = mt >< Dict.empty})
let opt_arg = option Mu.arg
let rec module' =
Custom { fwd = fwdm; rev=revm;
sch = Sum[ "M", m;
"Alias", reopen Paths.S.sch;
"Fun", [opt_arg; Mu.module'];
"Abstract", reopen Id.sch;
"Link", reopen Paths.S.sch;
"Namespace", Array (named ())
]
}
and fwdm = function
| Sig m -> C (Z m)
| Alias x -> C (S (Z (Namespaced.flatten x.path)))
| Fun (arg,x) -> C (S (S (Z [arg;x])))
| Abstract x -> C (S (S (S (Z x))))
| Link x -> C (S (S (S (S (Z (Namespaced.flatten x))))))
| Namespace n -> C (S (S (S (S (S (Z (to_list n)))))))
and revm =
let open Tuple in
function
| C Z m -> Sig m
| C S Z path -> Alias {path=Namespaced.of_path path; phantom=None}
| C S S Z [arg;body] -> Fun(arg,body)
| C S S S Z n -> Abstract n
| C S S S S Z path -> Link (Namespaced.of_path path)
| C S S S S S Z modules ->
Namespace (Dict.of_list modules)
| _ -> .
let arg = Arg.sch module'
let defs : _ rec_defs = ["m", m; "module'", module'; "arg", arg]
let m = Rec { id = ["Module"; "m"]; defs; proj = Zn }
let module' = Rec { id = ["Module"; "module'"]; defs; proj = Sn Zn }
end
module Def = struct
let empty = empty_sig
let (|+>) m (name,x) = Name.Map.add name x m
let modules dict = { empty with modules=dict }
let merge = sig_merge
let map f x = { modules = Name.Map.map f x.modules; module_types = Name.Map.map f x.module_types }
let weak_merge (s1:definition) (s2:definition) =
{ module_types = Dict.weak_union s1.module_types s2.module_types;
modules = Dict.weak_union s1.modules s2.modules }
let add sg x = { sg with modules = sg.modules |+> x }
let add_type sg x = { sg with module_types = sg.module_types |+> x }
let add_gen level = match level with
| Module -> add
| Module_type -> add_type
let find level name d = match level with
| Module -> Name.Map.find_opt name d.modules
| Module_type -> Name.Map.find_opt name d.module_types
let remove level name d = match level with
| Module ->
let modules = Name.Map.remove name d.modules in
{ d with modules }
| Module_type ->
let module_types = Name.Map.remove name d.module_types in
{ d with module_types }
let pp = pp_definition
let sch = let open Schematic in let open Schema in
let named = pair String module' in
custom
(Obj[Opt,Modules.l, Array named; Opt, Module_types.l, Array named])
(fun x -> [ Modules.l $=? l(to_list x.modules);
Module_types.l $=? (l @@ to_list x.module_types)] )
(let open Record in fun [_,m;_,mt] -> signature_of_lists (m><[]) (mt><[]))
type t = definition
end
module Sig = struct
let rec card s =
let card_def s = let c= Name.Map.cardinal in
c s.modules + c s.module_types in
match s with
| Blank -> 0
| Divergence p ->
card p.before + card_def p.after
| Exact s -> card_def s
let (|+>) m (name,x) = Name.Map.add name x m
let rec gen_merge def_merge s1 s2 = match s1, s2 with
| Blank, s | s, Blank -> s
| Exact s1, Exact s2 -> Exact (def_merge s1 s2)
| Divergence p , Exact s ->
Divergence { p with after = def_merge p.after s }
| s, Divergence p ->
Divergence { p with before = gen_merge def_merge s p.before }
let merge x = gen_merge Def.merge x
let weak_merge x = gen_merge Def.weak_merge x
let diff = gen_merge sig_diff
let flatten = flatten
let create m = Exact { modules = empty |+> m; module_types = empty }
let create_type m = Exact { module_types = empty |+> m; modules = empty }
let is_exact = is_exact_sig
let gen_create level md = match level with
| Module -> create md
| Module_type -> create_type md
let of_lists l1 l2 = Exact (signature_of_lists l1 l2)
let of_list ms =
Exact { modules = List.fold_left (|+>) empty ms; module_types = empty }
let of_list_type ms =
Exact { module_types = List.fold_left (|+>) empty ms; modules = empty }
let add_gen lvl sg x = match sg with
| Blank -> Exact (Def.add_gen lvl Def.empty x)
| Exact sg -> Exact (Def.add_gen lvl sg x)
| Divergence p -> Divergence { p with after = Def.add_gen lvl p.after x }
let add = add_gen Module
let add_type = add_gen Module_type
let empty = Exact Def.empty
let pp = pp_signature
type t = signature
let sch = let open Schematic in let open Schema in
let named = pair String module' in
custom
(Obj [Opt, Modules.l, Array named; Opt, Module_types.l, Array named])
(fun x -> let s = flatten x in let l x = l(to_list x) in
Record.[ Modules.l $=? l s.modules; Module_types.l $=? l s.module_types ])
(let open Record in fun [_,m;_,mt] -> of_lists (m><[]) (mt><[]) )
end
let rec extend: type any. any ty -> extended ty = function
| Abstract n -> Abstract n
| Fun(a,x) ->
let map a = Arg.map extend a in
(Fun(Option.fmap map a, extend x) : modul_)
| Sig s -> Sig s
| Alias _ as x -> x
| Link _ as x -> x
| Namespace _ as x -> x
module Subst = struct
module Tbl = struct
include Map.Make(Id)
let find_opt k m =
match find k m with
| x -> Some x
| exception Not_found -> None
end
type 'x t = 'x ty Tbl.t
type 'x subst = 'x t
let identity = Tbl.empty
let add id mty subst = Tbl.add id mty subst
let rec apply: type any. (Id.t -> any ty option) -> any ty -> any ty = fun subst -> function
| Abstract id as old -> Option.( subst id >< old)
| Fun (x,y) -> Fun(Option.fmap (Arg.map (apply subst)) x, apply subst y)
| Sig {origin;signature} ->
Sig {origin;signature = apply_sig (fun id -> Option.fmap extend (subst id)) signature }
| Alias _ as x -> x
| Link _ as x -> x
| Namespace _ as x -> x
and apply_sig: (Id.t -> extended ty option) -> signature -> signature = fun subst -> function
| Blank -> Blank
| Exact s -> Exact (Def.map (apply subst) s)
| Divergence d -> Divergence { point = d.point;
before = apply_sig subst d.before;
after = Def.map (apply subst) d.after
}
let refresh seed x =
let tbl = ref Tbl.empty in
apply (fun k ->
match Tbl.find_opt k !tbl with
| Some _ as y -> y
| None ->
let fresh = Abstract (Id.create seed) in
tbl := Tbl.add k fresh !tbl;
Some fresh
) x
let apply subst x = if subst = identity then x else
apply (fun k -> Tbl.find_opt k subst) x
let rec compute_constraints lvl (type any) (arg:any ty) (param:any ty) (subst: extended subst): extended subst =
match arg, param with
| x, Abstract id -> add id (extend x) subst
| Fun _, Fun _ -> subst
| Alias _, Alias _ -> subst
| Link _, Link _ -> subst
| Namespace _, Namespace _ -> subst
| Sig arg, Sig param ->
if lvl = Module then
sig_constraints (Sig.flatten arg.signature) (Sig.flatten param.signature) subst
else
subst
and sig_constraints arg param subst =
subst
|> dict_constraints Module arg.modules param.modules
|> dict_constraints Module_type arg.module_types param.module_types
and dict_constraints lvl arg param subst =
Name.Map.fold (fun k arg subst ->
match Name.Map.find k param with
| exception Not_found -> subst
| param -> compute_constraints lvl arg param subst
) arg subst
let rec replace_at ~level ~delete ~path ~replacement = function
| Sig s ->
let signature, subst = sig_replace_at ~level ~delete ~path ~replacement s.signature in
Sig {s with signature}, subst
and sig_replace_at ~level ~delete ~path ~replacement s =
match path, s with
| _ :: _, Exact e ->
let def, eq = def_replace_at ~level ~delete ~path ~replacement e in
Exact def, eq
| name :: q, Divergence ({after; before; _ } as d) ->
let level' = match q with [] -> level | _ :: _ -> Module in
match Def.find level' name after with
| None ->
let before, eq = sig_replace_at ~level ~delete ~path ~replacement before in
Divergence ({ d with after; before }), eq
| Some _ ->
let after, eq = def_replace_at ~level ~delete ~path ~replacement after in
Divergence ({ d with after; before }), eq
and def_replace_at ~level ~delete ~path ~replacement s = match path with
| [name] ->
let s' =
if delete then Def.remove level name s
else Def.add_gen level s (name,extend replacement)
in
let eq =
match Def.find level name s with
| Some old ->
compute_constraints level (extend replacement) old identity
in
s', eq
| a :: q ->
match Def.find Module a s with
| Some sub ->
let m, eq = replace_at ~level ~delete ~path:q ~replacement sub in
Def.add_gen Module s (a,m), eq
let compute_constraints ~arg ~param = compute_constraints Module arg param identity
let pp ppf s =
let pp_elt ppf (k,x) = Pp.fp ppf "%a->%a" Id.pp k pp x in
Pp.list pp_elt ppf (Tbl.bindings s)
end
module Equal = struct
let rec eq: type a b. a ty -> b ty -> bool = fun x y ->
match x, y with
| Sig x, Sig y -> tsig x y
| Alias _, Alias _ -> x = y
| Abstract x, Abstract y -> x = y
| Fun (xa,xb), Fun (ya,yb) -> arg_opt xa ya && eq xb yb
| Link _ as x, (Link _ as y) -> x = y
| Namespace x, Namespace y -> dict x y
| _ -> false
and tsig x y =
x.origin = y.origin && signature x.signature y.signature
and signature x y =
match x, y with
| Blank, Blank -> true
| Exact x, Exact y -> def x y
| Divergence x, Divergence y ->
x.point = y.point
&& def x.after y.after
&& signature x.before y.before
| _ -> false
and def x y =
dict x.modules y.modules && dict x.module_types y.module_types
and dict x y = Name.Map.equal eq x y
and arg_opt: type a b. a ty Arg.t option -> b ty Arg.t option -> bool =
fun x y -> match x, y with
| Some x, Some y -> x.name = y.name && eq x.signature y.signature
| None, None -> true
| _ -> false
end
module Partial = struct
type t = { name: string option; mty: sty }
let empty_sig = { origin = Submodule; signature= Sig.empty}
let empty = { name=None; mty = Sig empty_sig }
let simple defs = { empty with mty = Sig { empty_sig with signature = defs} }
let rec is_exact x = match x.mty with
| Abstract _ -> true
| Fun (_,x) -> is_exact {name=None; mty=x}
| Sig s -> Sig.is_exact s.signature
let rec to_module ?origin (p:t) =
to_module_kind ?origin p.mty
and to_module_kind ?origin : sty -> modul_ = function
| Abstract n -> (Abstract n:modul_)
| Fun(a,x) ->
let map a = Arg.map (to_module_kind ?origin) a in
(Fun(Option.fmap map a, to_module_kind ?origin x) : modul_)
| Sig s ->
let origin = match origin with
| Some o -> Origin.at_most s.origin o
| None -> s.origin
in
Sig {origin; signature = s.signature }
let extend = extend
let refresh = Subst.refresh
let apply ~arg ~param ~body =
let subst = Subst.compute_constraints ~arg ~param in
debug "Constraint from typing:%a@." Subst.pp subst;
let res = Subst.apply subst body in
debug "Result:@ %a ⇒@ %a@." pp body pp res;
res
let replace_at ~level ~delete ~path ~replacement body =
let constrained, eq = Subst.replace_at ~delete ~level ~path ~replacement body in
Subst.apply eq constrained
let rec pp_sty ppf: sty -> _ = function
| Abstract n -> Pp.fp ppf "<abstract:%a>" Id.pp n
| Fun (a,x) -> Pp.fp ppf "%a->%a" (Arg.pp pp_sty) a pp_sty x
| Sig m -> pp_m ppf m
let pp ppf (x:t) =
let pp_name ppf = function
| None -> ()
| Some n -> Pp.fp ppf "(%s)" n in
Pp.fp ppf "%a%a" pp_name x.name pp_sty x.mty
let to_arg (p:t) = to_module p
let rec of_extended_mty: modul_ -> sty = function
| Abstract n -> Abstract n
| Sig x -> Sig x
| Fun (a,x) -> Fun(Option.fmap (Arg.map of_extended_mty) a, of_extended_mty x)
| Link _ | Namespace _ | Alias _ -> assert false
let of_extended ?name kind = { name; mty = of_extended_mty kind }
let of_module name m =
{name=Some name; mty = Sig m }
let pseudo_module name x =
let origin = Origin.Namespace in
let signature =
Exact {modules = x; module_types = Dict.empty } in
of_module name { signature; origin }
let is_functor x = match x.mty with
| Fun _ -> true
| _ -> false
let to_sign fdefs = match fdefs.mty with
| Abstract _ | Fun _ -> Error Sig.empty
| Sig s ->
Ok s.signature
module Sch = struct
open Schematic
module S = Schema
module Result = Label(struct let l = "signature" end)
let mu = Schematic_indices.one
let mty =
custom
(Sum ["Abstract", reopen Id.sch;
"Sig", Schema.m;
"Fun", [option @@ Arg.sch mu; mu];
])
(fun (x:sty) -> match x with
| Abstract x -> C (Z x)
| Sig s -> C (S (Z s))
| Fun (a,x) -> C (S (S (Z [a;x])))
| _ -> .
)
(let open Tuple in
function
| C Z x -> Abstract x
| C S Z x -> Sig x
| C S S Z [a;x] -> Fun (a,x)
| _ -> .
)
let mty = Rec { id = ["Partial"; "mty"]; defs=["mty", mty]; proj = Zn }
let partial = custom [option String; mty]
(fun {name; mty} -> [name;mty])
(let open Tuple in fun [name;mty] -> {name;mty})
end
let sch = Sch.partial
end
module Namespace = struct
type t = dict
let merge = Dict.union
let merge_all = List.fold_left merge Dict.empty
let rec from_module nms origin sign =
match nms.Namespaced.namespace with
| [] ->
Dict.of_list [nms.name, Sig (create ~origin sign)]
| a :: namespace ->
let sign = Namespace (from_module { nms with namespace } origin sign) in
Dict.of_list [a, sign]
let sch = Dict.sch Schema.module'
end
|
1d6858242c772ee8ba88494f7f5b7c3f605307c561ac44bf2cec004f7a72164a | ogaml/ogaml | event.mli | module KeyEvent : sig
type t = {key : Keycode.t; shift : bool; control : bool; alt : bool}
end
module ButtonEvent : sig
type t = {button : Button.t; position : OgamlMath.Vector2i.t; shift : bool; control : bool; alt : bool}
end
type t =
| Closed
| Resized of OgamlMath.Vector2i.t
| KeyPressed of KeyEvent.t
| KeyReleased of KeyEvent.t
| ButtonPressed of ButtonEvent.t
| ButtonReleased of ButtonEvent.t
| MouseMoved of OgamlMath.Vector2i.t
| MouseWheelMoved of float
| null | https://raw.githubusercontent.com/ogaml/ogaml/5e74597521abf7ba2833a9247e55780eabfbab78/src/core/event.mli | ocaml | module KeyEvent : sig
type t = {key : Keycode.t; shift : bool; control : bool; alt : bool}
end
module ButtonEvent : sig
type t = {button : Button.t; position : OgamlMath.Vector2i.t; shift : bool; control : bool; alt : bool}
end
type t =
| Closed
| Resized of OgamlMath.Vector2i.t
| KeyPressed of KeyEvent.t
| KeyReleased of KeyEvent.t
| ButtonPressed of ButtonEvent.t
| ButtonReleased of ButtonEvent.t
| MouseMoved of OgamlMath.Vector2i.t
| MouseWheelMoved of float
| |
d6a208c142081ec78a61069e273b8fb44d141f6551773b89902613da0f2adeea | membase/cucumberl | complex_sample.erl | -module(complex_sample).
-export([setup/0, given/3, 'when'/3, then/3, main/0]).
setup() ->
[].
%% Step definitions for the sample calculator Addition feature.
given(Step, State, _) ->
complex_sample_support:given(Step, State).
'when'(Step, State, _) ->
complex_sample_support:'when'(Step, State).
then(Step, State, _) ->
complex_sample_support:then(Step, State).
main() ->
cucumberl:run("./features/complex_sample.feature").
| null | https://raw.githubusercontent.com/membase/cucumberl/80f5cfabcbacddd751be603241eefb29b132838c/examples/complex_sample/src/complex_sample.erl | erlang | Step definitions for the sample calculator Addition feature. | -module(complex_sample).
-export([setup/0, given/3, 'when'/3, then/3, main/0]).
setup() ->
[].
given(Step, State, _) ->
complex_sample_support:given(Step, State).
'when'(Step, State, _) ->
complex_sample_support:'when'(Step, State).
then(Step, State, _) ->
complex_sample_support:then(Step, State).
main() ->
cucumberl:run("./features/complex_sample.feature").
|
248811aa86471c62c8a07b52daff6e9c6916031fe442bc2968cfa597c99d23c2 | KaroshiBee/weevil | next_tests.ml | include Dapper.Dap.Testing_utils
module Dap = Dapper.Dap
module D = Dap.Data
module Js = Data_encoding.Json
module StateMock = struct
include Utils.StateMock
let backend_oc t = t.oc
let set_io t oc =
t.oc <- Some oc
end
module Next = Next.T (StateMock)
let%expect_test "Check sequencing etc for next" =
let state = StateMock.make () in
Lwt_io.with_temp_file ~temp_dir:"/dev/shm" (fun (fname, oc) ->
let () = StateMock.set_io state oc in
let command = Dap.Commands.next in
let req =
Dap.Request.(
Utils.next_msg ~seq:20
|> Js.construct (Message.enc command D.NextArguments.enc)
|> Js.to_string
)
in
Printf.printf "%s" req ;
let%lwt () =
[%expect
{|
{ "seq": 20, "type": "request", "command": "next",
"arguments": { "threadId": 1 } } |}]
in
match Next.handlers ~state with
| f_resp :: f_ev :: [] ->
(* happy path *)
let%lwt resp = f_resp req in
let resp = Result.get_ok resp in
Printf.printf "%s" resp ;
let%lwt () =
[%expect
{|
{ "seq": 1, "type": "response", "request_seq": 20, "success": true,
"command": "next", "body": {} } |}]
in
let%lwt ev = f_ev "string doesnt matter" in
let ev = Result.get_ok ev in
Printf.printf "%s" ev ;
let%lwt () = [%expect {|
{ "seq": 2, "type": "event", "event": "stopped",
"body":
{ "reason": "step", "threadId": 1, "preserveFocusHint": true,
"allThreadsStopped": true } } |}] in
let%lwt () =
let%lwt () = Lwt_io.flush oc in
In_channel.with_open_text fname (fun ic ->
let s = In_channel.input_all ic in
Printf.printf "%s" s;
let%lwt () = [%expect {|
Content-Length: 31
{ "event": { "step_size": 1 } } |}] in
Lwt.return_unit
)
in
Lwt.return_unit
| _ -> failwith "error: expected two handlers for next"
)
| null | https://raw.githubusercontent.com/KaroshiBee/weevil/1b166ba053062498c1ec05c885e04fba4ae7d831/lib/adapter/tests/adapter_expect_tests/next_tests.ml | ocaml | happy path | include Dapper.Dap.Testing_utils
module Dap = Dapper.Dap
module D = Dap.Data
module Js = Data_encoding.Json
module StateMock = struct
include Utils.StateMock
let backend_oc t = t.oc
let set_io t oc =
t.oc <- Some oc
end
module Next = Next.T (StateMock)
let%expect_test "Check sequencing etc for next" =
let state = StateMock.make () in
Lwt_io.with_temp_file ~temp_dir:"/dev/shm" (fun (fname, oc) ->
let () = StateMock.set_io state oc in
let command = Dap.Commands.next in
let req =
Dap.Request.(
Utils.next_msg ~seq:20
|> Js.construct (Message.enc command D.NextArguments.enc)
|> Js.to_string
)
in
Printf.printf "%s" req ;
let%lwt () =
[%expect
{|
{ "seq": 20, "type": "request", "command": "next",
"arguments": { "threadId": 1 } } |}]
in
match Next.handlers ~state with
| f_resp :: f_ev :: [] ->
let%lwt resp = f_resp req in
let resp = Result.get_ok resp in
Printf.printf "%s" resp ;
let%lwt () =
[%expect
{|
{ "seq": 1, "type": "response", "request_seq": 20, "success": true,
"command": "next", "body": {} } |}]
in
let%lwt ev = f_ev "string doesnt matter" in
let ev = Result.get_ok ev in
Printf.printf "%s" ev ;
let%lwt () = [%expect {|
{ "seq": 2, "type": "event", "event": "stopped",
"body":
{ "reason": "step", "threadId": 1, "preserveFocusHint": true,
"allThreadsStopped": true } } |}] in
let%lwt () =
let%lwt () = Lwt_io.flush oc in
In_channel.with_open_text fname (fun ic ->
let s = In_channel.input_all ic in
Printf.printf "%s" s;
let%lwt () = [%expect {|
Content-Length: 31
{ "event": { "step_size": 1 } } |}] in
Lwt.return_unit
)
in
Lwt.return_unit
| _ -> failwith "error: expected two handlers for next"
)
|
99b18c25de27c4402399e220321e9a682f7e6a8b4868baba41e7636b8c8cc494 | appleshan/cl-http | perhaps-patch-cookie.lisp | (in-package :http-user)
;;; If domain is localhost, consider it legal and don't send the domain part of the cookie
(defmethod respond-to-compute-cookie-form ((url url:http-form) stream query-alist)
(flet ((clean-up (item)
(and item ; don't let NIL through
(not (null-string-p (setq item (string-trim '(#\space #\tab #\return #\Linefeed) item))))
item))
(local-domain ()
(let ((host-name (local-host-domain-name)))
(let ((pos (position #\. (local-host-domain-name))))
(if pos
(subseq host-name (1+ pos))
;;; this name will not be valid, but at least the server is not crashing
host-name))))
(expires (expires delete-p)
(cond ((equalp delete-p "yes")
(- (get-universal-time) (* 60 60)))
(expires
(parse-gmt-time expires))
(t (+ (get-universal-time) (* 60 60))))))
(bind-query-values (name value domain path expires delete-p) (url query-alist)
(let* ((name (clean-up name))
(value (clean-up value))
(domain (clean-up domain))
(path (clean-up path))
(expires (clean-up expires))
(delete-p (clean-up delete-p))
(headers (when (and name value)
;; construct the cookie setting header using the defined interface.
(http:set-cookie-http-headers ((http::intern-keyword name) value
:expires (expires expires delete-p)
:domain (or domain (local-domain))
:path path)))))
(declare (dynamic-extent headers))
(setq *der* headers)
(with-successful-response (stream :html :content-location url :expires (url:expiration-universal-time url)
:cache-control (url:response-cache-control-directives url)
:content-language (languages url)
:additional-headers headers)
;; generate another version of the form with the new values.
(write-compute-cookie-form url stream))))))
(defun http::valid-domain-name-string-p (hostname &optional (start 0) (end (length hostname)) )
"Returns non-null if HOSTNAME is a valid internet domain name."
(flet ((illegal-char-p (char)
(member char '(#\% #\space #\tab #\return) :test #'eql)))
(declare (inline illegal-char-p))
(or
(and (http::char-position #\. hostname start end t)
(not (find-if #'illegal-char-p hostname :start start :end end)))
(string-equal hostname "localhost"))))
(define make-set-cookie-header-value (name value &key expires domain path secure)
"Creates a header value for use with the :SET-COOKIE header
that will store a cookie named NAME with value VALUE on a client. This value
created with this function should be passed as the value of :SET-COOKIE using
the ADDITIONAL-HEADERS argument to WITH-SUCCESSFUL-RESPONSE, and related
macros.
EXPIRES is a universal time when the cookie expires. DOMAIN is the server
domain name for which the cookie is valid, defaults to the server host name.
PATH is a relative URL denoting the range of URLs for DOMAIN for which the
cookie is valid. The client tests to see if the current URL is spanned by
PATH. PATH defaults to /. SECURE is a boolean value indicating whether the
cookie should sent over insecure connections (i.e., non-SSL).
For each cookie, the name and the value must not exceed 4k bytes. Each domain
name is limited to 20 cookies. When the 300 total cookies per client or 20
cookies per domain name limit are exceeded, cookies are deleted by clients
according to least recent usage. Servers may force cookies to be deleted by
providing an expiration that is in the past.
Applications may wish to use WRITE-TO-ARMOR-PLATED-STRING and
READ-FROM-ARMOR-PLATED-STRING to protect lisp forms stored in the client.
However, this encoding reduces the amount of data that can be store in a
cookie by approximately 25 percent. Alternatively,
STRING-ESCAPE-SPECIAL-CHARS and STRING-UNESCAPE-SPECIAL-CHARS may be used as a
transfer encoding. "
(check-type name keyword)
(check-type domain (or null string))
(check-type path (or null string (satisfies url-p)))
(unless (> 4001 (+ (the fixnum (length (symbol-name name))) (the fixnum (length value))))
(error "The combined size of NAME and VALUE exceed 4k bytes."))
(let ((args nil))
(cond-every
(secure
(push t args)
(push :secure args))
(path
(check-type path (or null string (satisfies url-p)))
(push path args)
(push :path args))
(domain
(let ((string (etypecase domain
(symbol (symbol-name domain))
(string domain))))
(unless (valid-domain-name-string-p string)
(error "The domain name, ~A, is not valid." string))
(unless (string-equal "localhost" string)
(push string args)
(push :domain args))))
(expires
(check-type expires integer)
(push expires args)
(push :expires args)))
`(,name ,value ,.args))) | null | https://raw.githubusercontent.com/appleshan/cl-http/a7ec6bf51e260e9bb69d8e180a103daf49aa0ac2/acl/jkf/goodies/perhaps-patch-cookie.lisp | lisp | If domain is localhost, consider it legal and don't send the domain part of the cookie
don't let NIL through
this name will not be valid, but at least the server is not crashing
construct the cookie setting header using the defined interface.
generate another version of the form with the new values. | (in-package :http-user)
(defmethod respond-to-compute-cookie-form ((url url:http-form) stream query-alist)
(flet ((clean-up (item)
(not (null-string-p (setq item (string-trim '(#\space #\tab #\return #\Linefeed) item))))
item))
(local-domain ()
(let ((host-name (local-host-domain-name)))
(let ((pos (position #\. (local-host-domain-name))))
(if pos
(subseq host-name (1+ pos))
host-name))))
(expires (expires delete-p)
(cond ((equalp delete-p "yes")
(- (get-universal-time) (* 60 60)))
(expires
(parse-gmt-time expires))
(t (+ (get-universal-time) (* 60 60))))))
(bind-query-values (name value domain path expires delete-p) (url query-alist)
(let* ((name (clean-up name))
(value (clean-up value))
(domain (clean-up domain))
(path (clean-up path))
(expires (clean-up expires))
(delete-p (clean-up delete-p))
(headers (when (and name value)
(http:set-cookie-http-headers ((http::intern-keyword name) value
:expires (expires expires delete-p)
:domain (or domain (local-domain))
:path path)))))
(declare (dynamic-extent headers))
(setq *der* headers)
(with-successful-response (stream :html :content-location url :expires (url:expiration-universal-time url)
:cache-control (url:response-cache-control-directives url)
:content-language (languages url)
:additional-headers headers)
(write-compute-cookie-form url stream))))))
(defun http::valid-domain-name-string-p (hostname &optional (start 0) (end (length hostname)) )
"Returns non-null if HOSTNAME is a valid internet domain name."
(flet ((illegal-char-p (char)
(member char '(#\% #\space #\tab #\return) :test #'eql)))
(declare (inline illegal-char-p))
(or
(and (http::char-position #\. hostname start end t)
(not (find-if #'illegal-char-p hostname :start start :end end)))
(string-equal hostname "localhost"))))
(define make-set-cookie-header-value (name value &key expires domain path secure)
"Creates a header value for use with the :SET-COOKIE header
that will store a cookie named NAME with value VALUE on a client. This value
created with this function should be passed as the value of :SET-COOKIE using
the ADDITIONAL-HEADERS argument to WITH-SUCCESSFUL-RESPONSE, and related
macros.
EXPIRES is a universal time when the cookie expires. DOMAIN is the server
domain name for which the cookie is valid, defaults to the server host name.
PATH is a relative URL denoting the range of URLs for DOMAIN for which the
cookie is valid. The client tests to see if the current URL is spanned by
PATH. PATH defaults to /. SECURE is a boolean value indicating whether the
cookie should sent over insecure connections (i.e., non-SSL).
For each cookie, the name and the value must not exceed 4k bytes. Each domain
name is limited to 20 cookies. When the 300 total cookies per client or 20
cookies per domain name limit are exceeded, cookies are deleted by clients
according to least recent usage. Servers may force cookies to be deleted by
providing an expiration that is in the past.
Applications may wish to use WRITE-TO-ARMOR-PLATED-STRING and
READ-FROM-ARMOR-PLATED-STRING to protect lisp forms stored in the client.
However, this encoding reduces the amount of data that can be store in a
cookie by approximately 25 percent. Alternatively,
STRING-ESCAPE-SPECIAL-CHARS and STRING-UNESCAPE-SPECIAL-CHARS may be used as a
transfer encoding. "
(check-type name keyword)
(check-type domain (or null string))
(check-type path (or null string (satisfies url-p)))
(unless (> 4001 (+ (the fixnum (length (symbol-name name))) (the fixnum (length value))))
(error "The combined size of NAME and VALUE exceed 4k bytes."))
(let ((args nil))
(cond-every
(secure
(push t args)
(push :secure args))
(path
(check-type path (or null string (satisfies url-p)))
(push path args)
(push :path args))
(domain
(let ((string (etypecase domain
(symbol (symbol-name domain))
(string domain))))
(unless (valid-domain-name-string-p string)
(error "The domain name, ~A, is not valid." string))
(unless (string-equal "localhost" string)
(push string args)
(push :domain args))))
(expires
(check-type expires integer)
(push expires args)
(push :expires args)))
`(,name ,value ,.args))) |
77e567d9f9f33a0e2500d7dcb91c52c3ba23debadf95d8413bcb397405600711 | Bodigrim/linear-builder | Main.hs | -- |
Copyright : ( c ) 2022
Licence : BSD3
Maintainer : < >
module Main where
import Data.Bits (Bits(..), FiniteBits(..))
import Data.Foldable
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Text.Builder.Linear.Buffer
import Data.Text.Internal (Text(..))
import Data.Text.Lazy (toStrict)
import Data.Text.Lazy.Builder (toLazyText)
import Data.Text.Lazy.Builder.Int (decimal, hexadecimal)
import Data.Text.Lazy.Builder.RealFloat (realFloat)
import GHC.Generics
import Test.Tasty
import Test.Tasty.QuickCheck hiding ((><), (.&.))
instance Arbitrary Text where
arbitrary = do
xs ← T.pack <$> arbitrary
d ← (`mod` (T.length xs + 1)) <$> arbitrary
pure $ T.drop d xs
shrink t@(Text arr off len)
= map (T.drop d . T.pack) (shrink ys)
++ map (\d' → T.drop d' $ T.pack $ drop (d - d') ys) (shrink d)
where
xs = T.unpack t
ys = T.unpack (Text arr 0 (off + len))
d = length ys - length xs
data Action
= AppendText Text
| PrependText Text
| AppendChar Char
| PrependChar Char
| AppendHex Word
| PrependHex Word
| AppendDec Int
| PrependDec Int
| AppendDec30 Int30
| PrependDec30 Int30
| AppendDouble Double
| PrependDouble Double
| AppendSpaces Word
| PrependSpaces Word
deriving (Eq, Ord, Show, Generic)
instance Arbitrary Action where
arbitrary = oneof
[ AppendText <$> arbitrary
, PrependText <$> arbitrary
, AppendChar <$> arbitraryUnicodeChar
, PrependChar <$> arbitraryUnicodeChar
, AppendHex <$> arbitraryBoundedIntegral
, PrependHex <$> arbitraryBoundedIntegral
, AppendDec <$> arbitraryBoundedIntegral
, PrependDec <$> arbitraryBoundedIntegral
, AppendDec30 <$> arbitraryBoundedIntegral
, PrependDec30 <$> arbitraryBoundedIntegral
, pure $ AppendHex minBound
, pure $ AppendHex maxBound
, pure $ AppendDec minBound
, pure $ AppendDec maxBound
, pure $ AppendDec 0
, AppendDouble <$> arbitrary
, PrependDouble <$> arbitrary
, AppendSpaces . getNonNegative <$> arbitrary
, PrependSpaces . getNonNegative <$> arbitrary
]
shrink = genericShrink
interpretOnText ∷ [Action] → Text → Text
interpretOnText xs z = foldl' go z xs
where
go ∷ Text → Action → Text
go b (AppendText x) = b <> x
go b (PrependText x) = x <> b
go b (AppendChar x) = T.snoc b x
go b (PrependChar x) = T.cons x b
go b (AppendHex x) = b <> toStrict (toLazyText (hexadecimal x))
go b (PrependHex x) = toStrict (toLazyText (hexadecimal x)) <> b
go b (AppendDec x) = b <> toStrict (toLazyText (decimal x))
go b (PrependDec x) = toStrict (toLazyText (decimal x)) <> b
go b (AppendDec30 x) = b <> toStrict (toLazyText (decimal x))
go b (PrependDec30 x) = toStrict (toLazyText (decimal x)) <> b
go b (AppendDouble x) = b <> toStrict (toLazyText (realFloat x))
go b (PrependDouble x) = toStrict (toLazyText (realFloat x)) <> b
go b (AppendSpaces n) = b <> T.replicate (fromIntegral n) (T.singleton ' ')
go b (PrependSpaces n) = T.replicate (fromIntegral n) (T.singleton ' ') <> b
interpretOnBuffer ∷ [Action] → Buffer ⊸ Buffer
interpretOnBuffer xs z = foldlIntoBuffer go z xs
where
go ∷ Buffer ⊸ Action → Buffer
go b (AppendText x) = b |> x
go b (PrependText x) = x <| b
go b (AppendChar x) = b |>. x
go b (PrependChar x) = x .<| b
go b (AppendHex x) = b |>& x
go b (PrependHex x) = x &<| b
go b (AppendDec x) = b |>$ x
go b (PrependDec x) = x $<| b
go b (AppendDec30 x) = b |>$ x
go b (PrependDec30 x) = x $<| b
go b (AppendDouble x) = b |>% x
go b (PrependDouble x) = x %<| b
go b (AppendSpaces n) = b |>… n
go b (PrependSpaces n) = n …<| b
main ∷ IO ()
main = defaultMain $ testGroup "All"
[ testProperty "sequence of actions" prop1
, testProperty "two sequences of actions" prop2
, testProperty "append addr#" prop3
, testProperty "prepend addr#" prop4
, testProperty "bytestring builder" prop5
]
prop1 ∷ [Action] → Property
prop1 acts = interpretOnText acts mempty ===
runBuffer (\b → interpretOnBuffer acts b)
prop2 ∷ [Action] → [Action] → Property
prop2 acts1 acts2 = interpretOnText acts1 mempty <> interpretOnText acts2 mempty ===
runBuffer (\b → go (dupBuffer b))
where
go ∷ (# Buffer, Buffer #) ⊸ Buffer
go (# b1, b2 #) = interpretOnBuffer acts1 b1 >< interpretOnBuffer acts2 b2
prop3 :: [Action] → Property
prop3 acts = runBuffer f1 === runBuffer f2
where
addr# = "foo"#
f1, f2 :: Buffer ⊸ Buffer
f1 = \b → interpretOnBuffer acts b |># addr#
f2 = \b → interpretOnBuffer acts b |> T.pack "foo"
prop4 :: [Action] → Property
prop4 acts = runBuffer f1 === runBuffer f2
where
addr# = "foo"#
f1, f2 :: Buffer ⊸ Buffer
f1 = \b → addr# <|# interpretOnBuffer acts b
f2 = \b → T.pack "foo" <| interpretOnBuffer acts b
prop5 ∷ [Action] → Property
prop5 acts = T.encodeUtf8 (interpretOnText acts mempty) ===
runBufferBS (\b → interpretOnBuffer acts b)
-------------------------------------------------------------------------------
newtype Int30 = Int30' Int
deriving stock (Eq, Ord, Show)
deriving newtype (Enum, Real, Integral)
pattern Int30 :: Int -> Int30
pattern Int30 x <- Int30' x where
Int30 x = Int30' (x .&. ((1 `shiftL` 30) - 1))
{-# COMPLETE Int30 #-}
instance Arbitrary Int30 where
arbitrary = Int30 <$> arbitrary
shrink (Int30 x) = Int30 <$> shrink x
instance Bounded Int30 where
minBound = negate (1 `shiftL` 30)
maxBound = (1 `shiftL` 30) - 1
instance Num Int30 where
Int30 x + Int30 y = Int30 (x + y)
Int30 x * Int30 y = Int30 (x * y)
abs (Int30 x) = Int30 (abs x)
signum = undefined
negate (Int30 x) = Int30 (negate x)
fromInteger x = Int30 (fromInteger x)
instance Bits Int30 where
(.&.) = undefined
(.|.) = undefined
xor = undefined
complement = undefined
shift (Int30 x) i = Int30 (shift x i)
rotate = undefined
bitSize = const 30
bitSizeMaybe = const (Just 30)
isSigned = const True
testBit = undefined
bit = undefined
popCount = undefined
instance FiniteBits Int30 where
finiteBitSize = const 30
| null | https://raw.githubusercontent.com/Bodigrim/linear-builder/c1de83a8496bb3a5b1806f7a53f5cc5304578be5/test/Main.hs | haskell | |
-----------------------------------------------------------------------------
# COMPLETE Int30 # | Copyright : ( c ) 2022
Licence : BSD3
Maintainer : < >
module Main where
import Data.Bits (Bits(..), FiniteBits(..))
import Data.Foldable
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Text.Builder.Linear.Buffer
import Data.Text.Internal (Text(..))
import Data.Text.Lazy (toStrict)
import Data.Text.Lazy.Builder (toLazyText)
import Data.Text.Lazy.Builder.Int (decimal, hexadecimal)
import Data.Text.Lazy.Builder.RealFloat (realFloat)
import GHC.Generics
import Test.Tasty
import Test.Tasty.QuickCheck hiding ((><), (.&.))
instance Arbitrary Text where
arbitrary = do
xs ← T.pack <$> arbitrary
d ← (`mod` (T.length xs + 1)) <$> arbitrary
pure $ T.drop d xs
shrink t@(Text arr off len)
= map (T.drop d . T.pack) (shrink ys)
++ map (\d' → T.drop d' $ T.pack $ drop (d - d') ys) (shrink d)
where
xs = T.unpack t
ys = T.unpack (Text arr 0 (off + len))
d = length ys - length xs
data Action
= AppendText Text
| PrependText Text
| AppendChar Char
| PrependChar Char
| AppendHex Word
| PrependHex Word
| AppendDec Int
| PrependDec Int
| AppendDec30 Int30
| PrependDec30 Int30
| AppendDouble Double
| PrependDouble Double
| AppendSpaces Word
| PrependSpaces Word
deriving (Eq, Ord, Show, Generic)
instance Arbitrary Action where
arbitrary = oneof
[ AppendText <$> arbitrary
, PrependText <$> arbitrary
, AppendChar <$> arbitraryUnicodeChar
, PrependChar <$> arbitraryUnicodeChar
, AppendHex <$> arbitraryBoundedIntegral
, PrependHex <$> arbitraryBoundedIntegral
, AppendDec <$> arbitraryBoundedIntegral
, PrependDec <$> arbitraryBoundedIntegral
, AppendDec30 <$> arbitraryBoundedIntegral
, PrependDec30 <$> arbitraryBoundedIntegral
, pure $ AppendHex minBound
, pure $ AppendHex maxBound
, pure $ AppendDec minBound
, pure $ AppendDec maxBound
, pure $ AppendDec 0
, AppendDouble <$> arbitrary
, PrependDouble <$> arbitrary
, AppendSpaces . getNonNegative <$> arbitrary
, PrependSpaces . getNonNegative <$> arbitrary
]
shrink = genericShrink
interpretOnText ∷ [Action] → Text → Text
interpretOnText xs z = foldl' go z xs
where
go ∷ Text → Action → Text
go b (AppendText x) = b <> x
go b (PrependText x) = x <> b
go b (AppendChar x) = T.snoc b x
go b (PrependChar x) = T.cons x b
go b (AppendHex x) = b <> toStrict (toLazyText (hexadecimal x))
go b (PrependHex x) = toStrict (toLazyText (hexadecimal x)) <> b
go b (AppendDec x) = b <> toStrict (toLazyText (decimal x))
go b (PrependDec x) = toStrict (toLazyText (decimal x)) <> b
go b (AppendDec30 x) = b <> toStrict (toLazyText (decimal x))
go b (PrependDec30 x) = toStrict (toLazyText (decimal x)) <> b
go b (AppendDouble x) = b <> toStrict (toLazyText (realFloat x))
go b (PrependDouble x) = toStrict (toLazyText (realFloat x)) <> b
go b (AppendSpaces n) = b <> T.replicate (fromIntegral n) (T.singleton ' ')
go b (PrependSpaces n) = T.replicate (fromIntegral n) (T.singleton ' ') <> b
interpretOnBuffer ∷ [Action] → Buffer ⊸ Buffer
interpretOnBuffer xs z = foldlIntoBuffer go z xs
where
go ∷ Buffer ⊸ Action → Buffer
go b (AppendText x) = b |> x
go b (PrependText x) = x <| b
go b (AppendChar x) = b |>. x
go b (PrependChar x) = x .<| b
go b (AppendHex x) = b |>& x
go b (PrependHex x) = x &<| b
go b (AppendDec x) = b |>$ x
go b (PrependDec x) = x $<| b
go b (AppendDec30 x) = b |>$ x
go b (PrependDec30 x) = x $<| b
go b (AppendDouble x) = b |>% x
go b (PrependDouble x) = x %<| b
go b (AppendSpaces n) = b |>… n
go b (PrependSpaces n) = n …<| b
main ∷ IO ()
main = defaultMain $ testGroup "All"
[ testProperty "sequence of actions" prop1
, testProperty "two sequences of actions" prop2
, testProperty "append addr#" prop3
, testProperty "prepend addr#" prop4
, testProperty "bytestring builder" prop5
]
prop1 ∷ [Action] → Property
prop1 acts = interpretOnText acts mempty ===
runBuffer (\b → interpretOnBuffer acts b)
prop2 ∷ [Action] → [Action] → Property
prop2 acts1 acts2 = interpretOnText acts1 mempty <> interpretOnText acts2 mempty ===
runBuffer (\b → go (dupBuffer b))
where
go ∷ (# Buffer, Buffer #) ⊸ Buffer
go (# b1, b2 #) = interpretOnBuffer acts1 b1 >< interpretOnBuffer acts2 b2
prop3 :: [Action] → Property
prop3 acts = runBuffer f1 === runBuffer f2
where
addr# = "foo"#
f1, f2 :: Buffer ⊸ Buffer
f1 = \b → interpretOnBuffer acts b |># addr#
f2 = \b → interpretOnBuffer acts b |> T.pack "foo"
prop4 :: [Action] → Property
prop4 acts = runBuffer f1 === runBuffer f2
where
addr# = "foo"#
f1, f2 :: Buffer ⊸ Buffer
f1 = \b → addr# <|# interpretOnBuffer acts b
f2 = \b → T.pack "foo" <| interpretOnBuffer acts b
prop5 ∷ [Action] → Property
prop5 acts = T.encodeUtf8 (interpretOnText acts mempty) ===
runBufferBS (\b → interpretOnBuffer acts b)
newtype Int30 = Int30' Int
deriving stock (Eq, Ord, Show)
deriving newtype (Enum, Real, Integral)
pattern Int30 :: Int -> Int30
pattern Int30 x <- Int30' x where
Int30 x = Int30' (x .&. ((1 `shiftL` 30) - 1))
instance Arbitrary Int30 where
arbitrary = Int30 <$> arbitrary
shrink (Int30 x) = Int30 <$> shrink x
instance Bounded Int30 where
minBound = negate (1 `shiftL` 30)
maxBound = (1 `shiftL` 30) - 1
instance Num Int30 where
Int30 x + Int30 y = Int30 (x + y)
Int30 x * Int30 y = Int30 (x * y)
abs (Int30 x) = Int30 (abs x)
signum = undefined
negate (Int30 x) = Int30 (negate x)
fromInteger x = Int30 (fromInteger x)
instance Bits Int30 where
(.&.) = undefined
(.|.) = undefined
xor = undefined
complement = undefined
shift (Int30 x) i = Int30 (shift x i)
rotate = undefined
bitSize = const 30
bitSizeMaybe = const (Just 30)
isSigned = const True
testBit = undefined
bit = undefined
popCount = undefined
instance FiniteBits Int30 where
finiteBitSize = const 30
|
1348089caa0e113afa1bc15437831cfe81862727562e4ef78b8398dbf2c115ad | konn/subcategories | Class.hs | # LANGUAGE EmptyCase , UndecidableSuperClasses #
module Control.Subcategory.Applicative.Class (CApplicative(..)) where
import Control.Subcategory.Functor
import qualified Control.Applicative as App
infixl 4 <.>
class CFunctor f => CApplicative f where
pair :: (Dom f a, Dom f b, Dom f (a, b)) => f a -> f b -> f (a, b)
default pair :: (Applicative f) => f a -> f b -> f (a, b)
pair = App.liftA2 (,)
(<.>) :: (Dom f a, Dom f b, Dom f (a -> b)) => f (a -> b) -> f a -> f b
default (<.>) :: (Applicative f) => f (a -> b) -> f a -> f b
(<.>) = (<*>)
(.>) :: (Dom f a, Dom f b) => f a -> f b -> f b
default (.>) :: Applicative f
=> f a -> f b -> f b
(.>) = (*>)
(<.) :: (Dom f a, Dom f b) => f a -> f b -> f a
default (<.) :: Applicative f
=> f a -> f b -> f a
(<.) = (<*)
| null | https://raw.githubusercontent.com/konn/subcategories/2ad473e09bbf674bbe3825849bad3cca7b25f4ac/src/Control/Subcategory/Applicative/Class.hs | haskell | # LANGUAGE EmptyCase , UndecidableSuperClasses #
module Control.Subcategory.Applicative.Class (CApplicative(..)) where
import Control.Subcategory.Functor
import qualified Control.Applicative as App
infixl 4 <.>
class CFunctor f => CApplicative f where
pair :: (Dom f a, Dom f b, Dom f (a, b)) => f a -> f b -> f (a, b)
default pair :: (Applicative f) => f a -> f b -> f (a, b)
pair = App.liftA2 (,)
(<.>) :: (Dom f a, Dom f b, Dom f (a -> b)) => f (a -> b) -> f a -> f b
default (<.>) :: (Applicative f) => f (a -> b) -> f a -> f b
(<.>) = (<*>)
(.>) :: (Dom f a, Dom f b) => f a -> f b -> f b
default (.>) :: Applicative f
=> f a -> f b -> f b
(.>) = (*>)
(<.) :: (Dom f a, Dom f b) => f a -> f b -> f a
default (<.) :: Applicative f
=> f a -> f b -> f a
(<.) = (<*)
| |
38e1d60cf24a1ba0edd107114b7add571c722bc2860d2805f4036d2da23a7399 | ivanperez-keera/haskanoid | GameState.hs | | The state of the game during execution . It has two
-- parts: general info (level, points, etc.) and
-- the actual gameplay info (objects).
--
-- Because the game is always in some running state
-- (there are no menus, etc.) we assume that there's
-- always some gameplay info, even though it can be
-- empty.
module GameState where
import as Yampa
import Objects
-- | The running state is given by a bunch of 'Objects' and the current general
-- 'GameInfo'. The latter contains info regarding the current level, the number
-- of points, etc.
--
-- Different parts of the game deal with these data structures. It is
-- therefore convenient to group them in subtrees, even if there's no
-- substantial difference betweem them.
data GameState = GameState
{ gameObjects :: Objects
, gameInfo :: GameInfo
}
-- | Initial (default) game state.
neutralGameState :: GameState
neutralGameState = GameState
{ gameObjects = []
, gameInfo = neutralGameInfo
}
-- | The gameinfo tells us the current game state (running, paused, etc.)
-- and general information, in this case, the number of lives, the level
-- and the points.
--
-- Since this info is then presented together to the users in a top panel, it
-- is convenient to give this product of values a proper name.
data GameInfo = GameInfo
{ gameStatus :: GameStatus
, gameLives :: Int
, gameLevel :: Int
, gamePoints :: Int
}
-- | Initial (default) game info (no points, no lives, no level).
neutralGameInfo :: GameInfo
neutralGameInfo = GameInfo
{ gameStatus = GameStarted
, gameLevel = 0
, gameLives = 0
, gamePoints = 0
}
-- | Possible actual game statuses. The game is always in one of these.
Interaction and presentation depend on this . Yampa switches are
-- used to jump from one to another, and the display module
-- changes presentation depending on the status.
data GameStatus = GamePlaying
| GamePaused
| GameLoading Int
| GameOver
| GameFinished
| GameStarted
deriving Eq
| null | https://raw.githubusercontent.com/ivanperez-keera/haskanoid/cb50205bd8e1ec92eae3b689c1e4f3f2c260367d/src/GameState.hs | haskell | parts: general info (level, points, etc.) and
the actual gameplay info (objects).
Because the game is always in some running state
(there are no menus, etc.) we assume that there's
always some gameplay info, even though it can be
empty.
| The running state is given by a bunch of 'Objects' and the current general
'GameInfo'. The latter contains info regarding the current level, the number
of points, etc.
Different parts of the game deal with these data structures. It is
therefore convenient to group them in subtrees, even if there's no
substantial difference betweem them.
| Initial (default) game state.
| The gameinfo tells us the current game state (running, paused, etc.)
and general information, in this case, the number of lives, the level
and the points.
Since this info is then presented together to the users in a top panel, it
is convenient to give this product of values a proper name.
| Initial (default) game info (no points, no lives, no level).
| Possible actual game statuses. The game is always in one of these.
used to jump from one to another, and the display module
changes presentation depending on the status. | | The state of the game during execution . It has two
module GameState where
import as Yampa
import Objects
data GameState = GameState
{ gameObjects :: Objects
, gameInfo :: GameInfo
}
neutralGameState :: GameState
neutralGameState = GameState
{ gameObjects = []
, gameInfo = neutralGameInfo
}
data GameInfo = GameInfo
{ gameStatus :: GameStatus
, gameLives :: Int
, gameLevel :: Int
, gamePoints :: Int
}
neutralGameInfo :: GameInfo
neutralGameInfo = GameInfo
{ gameStatus = GameStarted
, gameLevel = 0
, gameLives = 0
, gamePoints = 0
}
Interaction and presentation depend on this . Yampa switches are
data GameStatus = GamePlaying
| GamePaused
| GameLoading Int
| GameOver
| GameFinished
| GameStarted
deriving Eq
|
d81ed81772fb8ad348fdd1f2ae8e3848a8097d3ffbd294c023866bb73aef2726 | SRI-CSL/f3d | sysdef-tk.lisp | (in-package :config)
File " tk-pkg.lisp " adds exports to :
;;; These default configuration settings can be modified by $FREEDIUS/arch/<arch>/lisp/config.lisp
(defvar *tk-features* nil
"A list of keywords describing the features of the Tk library.
Permissible values: :THEMED :TRUEFONT.")
(defvar *tcltk-library-files* nil)
Windows users will need to explicitly load the Tcl and Tk DLLs ( as
well as several others , like GL ) before loading Freedius . On
;;; other systems, these libraries are loaded automatically.
A change and a question : under Windoze provides Tcl / Tk in
the form of .a files that can be statically linked into liblisptk .
;;; One also has the option of using an installed binary version of
Tcl / Tk as found in the " Program Files " directory . For now , I am
;;; reverting to the assumed cygwin default, but how should this be
handled ? Should we add a : feature if is present ?
(defvar *enable-slime* t)
(unless (find-package :swank)
(setq *enable-slime* nil))
This is called by tcl - tk - init to possibly load a different REPL
;;; file. Something is wonky with the fd-handler slime communication
style when using threaded SBCL . Locks up rather often :
(defun select-repl ()
( not ( find - package : clim ) ) )
#+cmu "cmu-slime-repl.lisp"
#+sbcl "sbcl-slime-repl.lisp"
# + ( and ( not sb - thread ) ) " sbcl-slime-repl.lisp "
# + ( and ) " sbcl-alt-slime-repl.lisp "
#+allegro "acl-slime-repl.lisp"
"repl.lisp"))
Problem here -- ca n't make a mixed - lisp - system until qffi is loaded .
Perhaps the system - tool should autoload mixed-lisp-system.lisp .
(st:define-system :tk
;;:system-class 'st::mixed-lisp-system
:required-systems '(qffi lisp-extensions tkmin)
: c - source - path " $ FREEDIUS / c / lisptk/ "
:libraries`(,@*tcltk-library-files*
"liblisptk.dll"
;; #-mswindows "liblisptk"
# + mswindows " "
)
:files `("tk-ffi.lisp"
"tcl-tk-init.lisp"
"tcl-eval.lisp"
"tk-commands.lisp"
"tk-bindings.lisp"
"tk-widget.lisp"
"tk-callbacks.lisp"
,(select-repl)
"widget-panel.lisp"
"menus.lisp"
))
; (st::find-system-named :tk)
| null | https://raw.githubusercontent.com/SRI-CSL/f3d/93285f582198bfbab33ca96ff71efda539b1bec7/f3d-tk/lisp-tk/sysdef-tk.lisp | lisp | These default configuration settings can be modified by $FREEDIUS/arch/<arch>/lisp/config.lisp
other systems, these libraries are loaded automatically.
One also has the option of using an installed binary version of
reverting to the assumed cygwin default, but how should this be
file. Something is wonky with the fd-handler slime communication
:system-class 'st::mixed-lisp-system
#-mswindows "liblisptk"
(st::find-system-named :tk) | (in-package :config)
File " tk-pkg.lisp " adds exports to :
(defvar *tk-features* nil
"A list of keywords describing the features of the Tk library.
Permissible values: :THEMED :TRUEFONT.")
(defvar *tcltk-library-files* nil)
Windows users will need to explicitly load the Tcl and Tk DLLs ( as
well as several others , like GL ) before loading Freedius . On
A change and a question : under Windoze provides Tcl / Tk in
the form of .a files that can be statically linked into liblisptk .
Tcl / Tk as found in the " Program Files " directory . For now , I am
handled ? Should we add a : feature if is present ?
(defvar *enable-slime* t)
(unless (find-package :swank)
(setq *enable-slime* nil))
This is called by tcl - tk - init to possibly load a different REPL
style when using threaded SBCL . Locks up rather often :
(defun select-repl ()
( not ( find - package : clim ) ) )
#+cmu "cmu-slime-repl.lisp"
#+sbcl "sbcl-slime-repl.lisp"
# + ( and ( not sb - thread ) ) " sbcl-slime-repl.lisp "
# + ( and ) " sbcl-alt-slime-repl.lisp "
#+allegro "acl-slime-repl.lisp"
"repl.lisp"))
Problem here -- ca n't make a mixed - lisp - system until qffi is loaded .
Perhaps the system - tool should autoload mixed-lisp-system.lisp .
(st:define-system :tk
:required-systems '(qffi lisp-extensions tkmin)
: c - source - path " $ FREEDIUS / c / lisptk/ "
:libraries`(,@*tcltk-library-files*
"liblisptk.dll"
# + mswindows " "
)
:files `("tk-ffi.lisp"
"tcl-tk-init.lisp"
"tcl-eval.lisp"
"tk-commands.lisp"
"tk-bindings.lisp"
"tk-widget.lisp"
"tk-callbacks.lisp"
,(select-repl)
"widget-panel.lisp"
"menus.lisp"
))
|
ff7a81ea11ac66bd706612c757d992f055e6f54bc0ffc13e0bc1f1403f06c7b9 | samrushing/irken-compiler | t_frb1.scm | ;; -*- Mode: Irken -*-
(include "lib/core.scm")
(include "lib/pair.scm")
(include "lib/string.scm")
(include "lib/frb.scm")
(define (t0)
(let ((t (tree/make int-cmp
(1 "time")
(2 "flies")
(3 "like")
(4 "a")
(5 "banana")
)))
(printn t)
(tree/dump 0 (lambda (k v d) (printf (repeat d " ") (int k) " " (string v) "\n")) t)
(tree/member t int-cmp 5)
(let ((t1 (tree/delete t int-cmp 4)))
(printn t1))
))
(include "lib/random.scm")
(define (t1 n)
(define (pprint t)
(tree/dump 0 (lambda (k v d) (printf (lpad 5 (int v)) " " (repeat d " ") (int k) "\n")) t)
)
(let ((t (tree:empty))
(keys0 '()))
(srandom 3141596)
(for-range i n
(let ((v (random)))
(tree/insert! t int-cmp v i)
(push! keys0 v)
))
(assert (tree/verify t))
(let ((t2 t))
(for-each
(lambda (k)
(set! t2 (tree/delete t2 int-cmp k))
(assert (tree/verify t2))
)
keys0))
(printn (tree/min t))
(printn (tree/max t))
(let ((sorted-keys (sort < keys0))
(min-key (first sorted-keys))
(max-key (last sorted-keys))
((k0 v0) (tree/min t))
((k1 v1) (tree/max t)))
(assert (= k0 min-key))
(assert (= k1 max-key))
(printn min-key)
(printn max-key)
)
))
(define (t2 n)
(let ((t (tree:empty)))
(srandom 3141596)
(for-range i n
(let ((v (random)))
(set! t (tree/insert t int-cmp v i))
))
(let ((bh (tree/black-height t 0)))
(printf "black-height: " (int bh) "\n")
(printn (tree/verify t)))
))
(t0)
(t1 10000)
(t2 1000)
| null | https://raw.githubusercontent.com/samrushing/irken-compiler/690da48852d55497f873738df54f14e8e135d006/tests/t_frb1.scm | scheme | -*- Mode: Irken -*- |
(include "lib/core.scm")
(include "lib/pair.scm")
(include "lib/string.scm")
(include "lib/frb.scm")
(define (t0)
(let ((t (tree/make int-cmp
(1 "time")
(2 "flies")
(3 "like")
(4 "a")
(5 "banana")
)))
(printn t)
(tree/dump 0 (lambda (k v d) (printf (repeat d " ") (int k) " " (string v) "\n")) t)
(tree/member t int-cmp 5)
(let ((t1 (tree/delete t int-cmp 4)))
(printn t1))
))
(include "lib/random.scm")
(define (t1 n)
(define (pprint t)
(tree/dump 0 (lambda (k v d) (printf (lpad 5 (int v)) " " (repeat d " ") (int k) "\n")) t)
)
(let ((t (tree:empty))
(keys0 '()))
(srandom 3141596)
(for-range i n
(let ((v (random)))
(tree/insert! t int-cmp v i)
(push! keys0 v)
))
(assert (tree/verify t))
(let ((t2 t))
(for-each
(lambda (k)
(set! t2 (tree/delete t2 int-cmp k))
(assert (tree/verify t2))
)
keys0))
(printn (tree/min t))
(printn (tree/max t))
(let ((sorted-keys (sort < keys0))
(min-key (first sorted-keys))
(max-key (last sorted-keys))
((k0 v0) (tree/min t))
((k1 v1) (tree/max t)))
(assert (= k0 min-key))
(assert (= k1 max-key))
(printn min-key)
(printn max-key)
)
))
(define (t2 n)
(let ((t (tree:empty)))
(srandom 3141596)
(for-range i n
(let ((v (random)))
(set! t (tree/insert t int-cmp v i))
))
(let ((bh (tree/black-height t 0)))
(printf "black-height: " (int bh) "\n")
(printn (tree/verify t)))
))
(t0)
(t1 10000)
(t2 1000)
|
2474a1bdb57067d96260f8a77edd0c7aeed62d9cd4874a5a84759ae30b0641d7 | rixed/ramen | heavyhitters_test.ml | A small program that benchmark the HeavyHitters module , either for
* correctness or speed .
* It receives the top parameters on the command line and then generate as
* many entries as needed with a configurable distribution .
* Every time a value is added the top is asked to classify the point ( either
* in the top or not ) . In parallel , the test tracks the values that are
* supposed to be the actual heavy hitters and estimates the correctness of
* the answer .
*
* We can thus learn about the trade off between max - top - size and resolution ,
* for different flatness of distributions .
* correctness or speed.
* It receives the top parameters on the command line and then generate as
* many entries as needed with a configurable distribution.
* Every time a value is added the top is asked to classify the point (either
* in the top or not). In parallel, the test tracks the values that are
* supposed to be the actual heavy hitters and estimates the correctness of
* the answer.
*
* We can thus learn about the trade off between max-top-size and resolution,
* for different flatness of distributions. *)
open Batteries
module HH = HeavyHitters
(* Generate a random integer according to a given distribution *)
let uniform () =
Random.float 1.
let rec exp_cdf l =
let x = uniform () in
if x = 0. then exp_cdf l else
~-. (log x /. l)
let plot_distribution seen =
let fname = "/tmp/heavyhitters_test.plg" in
File.with_file_out ~mode:[`create;`trunc] fname (fun oc ->
Array.iter (Printf.fprintf oc "%d\n") seen) ;
let max_height = Array.fold_left max 0 seen in
let cmd =
Printf.sprintf
"gnuplot -p -e 'set terminal dumb size 120,%d ansi256; \
set title \"value distribution\"; \
plot \"%s\" notitle with points pointtype 0'"
(min (max_height + 4) 25)
fname in
ignore (Unix.system cmd) ;
Unix.unlink fname
let () =
let top_size = ref 100
and top_max_size = ref 50_000
and top_decay = ref 0.
and top_sigmas = ref 0.
and num_inserts = ref 100_000
and init_inserts = ref 50_000
and pop_size = ref 1_000_000
and num_tracked = ref 50_000
and lambda = ref 0.0002
(* Test the rank function rather than is_in_top: *)
and test_rank = ref false
and plot_distrib = ref false
and skip_tests = ref false
in
Arg.(parse
[ "-top-size", Set_int top_size, "top size (def 20)" ;
"-top-max-size", Set_int top_max_size,
"max number of allowed tracked values in the top. Lower values \
improve performance but decrease correctness (def 50k)" ;
"-decay", Set_float top_decay, "decay coefficient (def 0)" ;
"-sigmas", Set_float top_sigmas, "drop hitters which weight do not \
deviate more than that number of sigmas." ;
"-inserts", Set_int num_inserts,
"number of values to insert and test (def 100k)" ;
"-init-inserts", Set_int init_inserts,
"entries added before starting the test (def 50k)" ;
"-pop-size", Set_int pop_size,
"size of the hitter population, the largest the more challenging \
(def 1M)" ;
"-tracked", Set_int num_tracked,
"how many actual heavy hitters to track (50k)" ;
"-lambda", Set_float lambda,
"distribution is λ*exp(-λ*x) so smaller values flatten the distribution \
and increase the challenge (def 0.0001)" ;
"-test-rank", Set test_rank, "test rank function rather than is-in-top" ;
"-plot", Set plot_distrib, "plot actual distribution over the population" ;
"-skip-tests", Set skip_tests, "skip actual tests, useful with -plot" ]
(fun s -> raise (Bad ("unknown "^ s)))
"HeavyHitters benchmark") ;
if !top_size > !num_tracked then (
Printf.printf "tracked must not be smaller than top-size.\n" ;
exit 1
) ;
Random.self_init () ;
(* Build a top for integers (TODO: string of size n) *)
let top =
HH.make ~max_size:!top_max_size ~decay:!top_decay ~sigmas:!top_sigmas in
let time = ref 0. in
(* Return a random hitter *)
let rec rand () =
(int_of_float (exp_cdf !lambda)) mod !pop_size in
let value_for_top x =
(* Just in case it could help the top algorithm that the heavier hitter
* are the smaller values, scramble the values (need to be reproductible
* obviously, but not reversible: *)
(x * 48271) mod 0x7fffffff in
let add x =
HH.add top !time 1. (value_for_top x) ;
time := !time +. 1. in
let is_in_top x =
HH.is_in_top !top_size (value_for_top x) top in
let seen = Array.make !num_tracked 0 in
let take () =
let x = rand () in
if x < Array.length seen then seen.(x) <- seen.(x) + 1 ;
add x ;
x in
let is_really_in_top n x =
if x < Array.length seen then (
(* We know many times we have seen x, how many times we have seen
* individually non x other heavy hitters. At worse, the rest was
* evenly spread between as few other individuals as necessary to make
* x fall out of the top. Would x still be in the top then? *)
let sum, min_rank, max_rank =
Array.fold_left (fun (sum, min_rank, max_rank) count ->
(* Assume other values with same seen count would go before x: *)
sum + count,
(if count > seen.(x) then min_rank + 1 else min_rank),
(if count >= seen.(x) then max_rank + 1 else max_rank)
We are going to overestimate max_rank
(* Remember we track more values than !top_size, so it may be that we
* have tracked enough values to already know that x is below !top_size: *)
if min_rank >= !top_size then
Some false
else if max_rank >= !top_size then
None
else
(* Then try to down x just below the top size: *)
let num_others = !top_size - max_rank in
let untracked = n - sum in
let max_untracked_count = untracked / num_others in
So if num_others untracked values had score ,
* would they rank before x ? If not then we can still be certain that
* x was is the top , otherwise we ca n't be certain :
* would they rank before x? If not then we can still be certain that
* x was is the top, otherwise we can't be certain: *)
if max_untracked_count < seen.(x) then Some true
else None
) else (
None
) in
Printf.printf "Initial round of %d insertions...\n%!" !init_inserts ;
for _ = 1 to !init_inserts do ignore (take ()) done ;
if !skip_tests then (
if !plot_distrib then plot_distribution seen
) else (
Printf.printf "Start benchmarking...\n%!" ;
Success when true
Success when false
and failures_true = ref 0
and failures_false = ref 0
and unknowns = ref 0 in
for n = 1 to !num_inserts do
let x = take () in
match is_in_top x,
is_really_in_top (n + !init_inserts) x with
| true, Some true ->
incr successes_true
| false, Some false ->
incr successes_false
| false, Some true ->
incr failures_true
| true, Some false->
incr failures_false
| _ ->
incr unknowns
done ;
if !plot_distrib then plot_distribution seen ;
let tracked_ratio =
float_of_int (Array.fold_left (+) 0 seen) /.
float_of_int (!num_inserts + !init_inserts) in
Printf.printf "Tracked %d/%d events (% 5.2f%%)\n"
(Array.fold_left (+) 0 seen) (!num_inserts + !init_inserts)
(100. *. tracked_ratio) ;
(* Find the most top_size value in seen, making use of an array of indices
* into seen and sorting it according to tracked sums: *)
let ordered_seen = Array.init (Array.length seen) identity in
Array.fast_sort (fun i1 i2 -> compare seen.(i2) seen.(i1))
ordered_seen ;
Printf.printf "Heavy hitters were: %a...\n"
(Enum.print ~first:"" ~last:"" ~sep:", " (fun oc i ->
Printf.fprintf oc "%d:%d" i seen.(i)))
(Enum.take (!top_size+1) (Array.enum ordered_seen)) ;
let hh_mask = String.init !top_size (fun i ->
let x = ordered_seen.(i) in
if is_in_top x then 'H' else '.') in
let hh_mask_real =
String.init !top_size (fun i ->
let x = ordered_seen.(i) in
begin
end ;
match is_really_in_top (!num_inserts + !init_inserts) x with
| None -> '?'
| Some true -> 'H'
| Some false -> '.') in
Printf.printf "Last HH mask (expected): %s\n" hh_mask_real ;
Printf.printf "Last HH mask (estimate): %s\n" hh_mask ;
let successes = !successes_true + !successes_false
and failures = !failures_true + !failures_false in
assert (successes + failures + !unknowns = !num_inserts) ;
Printf.printf "Total: % 6d successes, % 6d failures, % 6d unknowns\n"
successes failures !unknowns ;
let hi =
100. *. float_of_int !successes_true /.
float_of_int (!successes_true + !failures_true)
and lo =
100. *. float_of_int !successes_true /.
float_of_int (!successes_true + !failures_true + !unknowns) in
let resolution_true = 100. -. (hi -. lo) in
Printf.printf "When true: % 6d successes, % 6d failures \
-> % 5.2f … %.2f%% (resolution: % 5.2f%%)\n"
!successes_true !failures_true hi lo resolution_true ;
let hi =
100. *. float_of_int !successes_false /.
float_of_int (!successes_false + !failures_false)
and lo =
100. *. float_of_int !successes_false /.
float_of_int (!successes_false + !failures_false + !unknowns) in
let resolution_false = 100. -. (hi -. lo) in
Printf.printf "When false: % 6d successes, % 6d failures \
-> % 5.2f … %.2f%% (resolution: % 5.2f%%)\n"
!successes_false !failures_false hi lo resolution_false ;
if !successes_true + !failures_true = 0 then
Printf.printf "Result does not tell anything about top hitters.\n" ;
if !successes_false + !failures_false = 0 then
Printf.printf "Result does not tell anything about non-top hitters.\n" ;
if resolution_true < 90. || resolution_false < 90. then (
if tracked_ratio > 0.9 then
Printf.printf "Resolution is low despite good tracking, caused by rank equalities.\n"
else
Printf.printf "Resolution is low as a result of bad tracking, consider increasing -tracked.\n"
)
)
| null | https://raw.githubusercontent.com/rixed/ramen/11b1b34c3bf73ee6c69d7eb5c5fbf30e6dd2df4f/src/heavyhitters_test.ml | ocaml | Generate a random integer according to a given distribution
Test the rank function rather than is_in_top:
Build a top for integers (TODO: string of size n)
Return a random hitter
Just in case it could help the top algorithm that the heavier hitter
* are the smaller values, scramble the values (need to be reproductible
* obviously, but not reversible:
We know many times we have seen x, how many times we have seen
* individually non x other heavy hitters. At worse, the rest was
* evenly spread between as few other individuals as necessary to make
* x fall out of the top. Would x still be in the top then?
Assume other values with same seen count would go before x:
Remember we track more values than !top_size, so it may be that we
* have tracked enough values to already know that x is below !top_size:
Then try to down x just below the top size:
Find the most top_size value in seen, making use of an array of indices
* into seen and sorting it according to tracked sums: | A small program that benchmark the HeavyHitters module , either for
* correctness or speed .
* It receives the top parameters on the command line and then generate as
* many entries as needed with a configurable distribution .
* Every time a value is added the top is asked to classify the point ( either
* in the top or not ) . In parallel , the test tracks the values that are
* supposed to be the actual heavy hitters and estimates the correctness of
* the answer .
*
* We can thus learn about the trade off between max - top - size and resolution ,
* for different flatness of distributions .
* correctness or speed.
* It receives the top parameters on the command line and then generate as
* many entries as needed with a configurable distribution.
* Every time a value is added the top is asked to classify the point (either
* in the top or not). In parallel, the test tracks the values that are
* supposed to be the actual heavy hitters and estimates the correctness of
* the answer.
*
* We can thus learn about the trade off between max-top-size and resolution,
* for different flatness of distributions. *)
open Batteries
module HH = HeavyHitters
let uniform () =
Random.float 1.
let rec exp_cdf l =
let x = uniform () in
if x = 0. then exp_cdf l else
~-. (log x /. l)
let plot_distribution seen =
let fname = "/tmp/heavyhitters_test.plg" in
File.with_file_out ~mode:[`create;`trunc] fname (fun oc ->
Array.iter (Printf.fprintf oc "%d\n") seen) ;
let max_height = Array.fold_left max 0 seen in
let cmd =
Printf.sprintf
"gnuplot -p -e 'set terminal dumb size 120,%d ansi256; \
set title \"value distribution\"; \
plot \"%s\" notitle with points pointtype 0'"
(min (max_height + 4) 25)
fname in
ignore (Unix.system cmd) ;
Unix.unlink fname
let () =
let top_size = ref 100
and top_max_size = ref 50_000
and top_decay = ref 0.
and top_sigmas = ref 0.
and num_inserts = ref 100_000
and init_inserts = ref 50_000
and pop_size = ref 1_000_000
and num_tracked = ref 50_000
and lambda = ref 0.0002
and test_rank = ref false
and plot_distrib = ref false
and skip_tests = ref false
in
Arg.(parse
[ "-top-size", Set_int top_size, "top size (def 20)" ;
"-top-max-size", Set_int top_max_size,
"max number of allowed tracked values in the top. Lower values \
improve performance but decrease correctness (def 50k)" ;
"-decay", Set_float top_decay, "decay coefficient (def 0)" ;
"-sigmas", Set_float top_sigmas, "drop hitters which weight do not \
deviate more than that number of sigmas." ;
"-inserts", Set_int num_inserts,
"number of values to insert and test (def 100k)" ;
"-init-inserts", Set_int init_inserts,
"entries added before starting the test (def 50k)" ;
"-pop-size", Set_int pop_size,
"size of the hitter population, the largest the more challenging \
(def 1M)" ;
"-tracked", Set_int num_tracked,
"how many actual heavy hitters to track (50k)" ;
"-lambda", Set_float lambda,
"distribution is λ*exp(-λ*x) so smaller values flatten the distribution \
and increase the challenge (def 0.0001)" ;
"-test-rank", Set test_rank, "test rank function rather than is-in-top" ;
"-plot", Set plot_distrib, "plot actual distribution over the population" ;
"-skip-tests", Set skip_tests, "skip actual tests, useful with -plot" ]
(fun s -> raise (Bad ("unknown "^ s)))
"HeavyHitters benchmark") ;
if !top_size > !num_tracked then (
Printf.printf "tracked must not be smaller than top-size.\n" ;
exit 1
) ;
Random.self_init () ;
let top =
HH.make ~max_size:!top_max_size ~decay:!top_decay ~sigmas:!top_sigmas in
let time = ref 0. in
let rec rand () =
(int_of_float (exp_cdf !lambda)) mod !pop_size in
let value_for_top x =
(x * 48271) mod 0x7fffffff in
let add x =
HH.add top !time 1. (value_for_top x) ;
time := !time +. 1. in
let is_in_top x =
HH.is_in_top !top_size (value_for_top x) top in
let seen = Array.make !num_tracked 0 in
let take () =
let x = rand () in
if x < Array.length seen then seen.(x) <- seen.(x) + 1 ;
add x ;
x in
let is_really_in_top n x =
if x < Array.length seen then (
let sum, min_rank, max_rank =
Array.fold_left (fun (sum, min_rank, max_rank) count ->
sum + count,
(if count > seen.(x) then min_rank + 1 else min_rank),
(if count >= seen.(x) then max_rank + 1 else max_rank)
We are going to overestimate max_rank
if min_rank >= !top_size then
Some false
else if max_rank >= !top_size then
None
else
let num_others = !top_size - max_rank in
let untracked = n - sum in
let max_untracked_count = untracked / num_others in
So if num_others untracked values had score ,
* would they rank before x ? If not then we can still be certain that
* x was is the top , otherwise we ca n't be certain :
* would they rank before x? If not then we can still be certain that
* x was is the top, otherwise we can't be certain: *)
if max_untracked_count < seen.(x) then Some true
else None
) else (
None
) in
Printf.printf "Initial round of %d insertions...\n%!" !init_inserts ;
for _ = 1 to !init_inserts do ignore (take ()) done ;
if !skip_tests then (
if !plot_distrib then plot_distribution seen
) else (
Printf.printf "Start benchmarking...\n%!" ;
Success when true
Success when false
and failures_true = ref 0
and failures_false = ref 0
and unknowns = ref 0 in
for n = 1 to !num_inserts do
let x = take () in
match is_in_top x,
is_really_in_top (n + !init_inserts) x with
| true, Some true ->
incr successes_true
| false, Some false ->
incr successes_false
| false, Some true ->
incr failures_true
| true, Some false->
incr failures_false
| _ ->
incr unknowns
done ;
if !plot_distrib then plot_distribution seen ;
let tracked_ratio =
float_of_int (Array.fold_left (+) 0 seen) /.
float_of_int (!num_inserts + !init_inserts) in
Printf.printf "Tracked %d/%d events (% 5.2f%%)\n"
(Array.fold_left (+) 0 seen) (!num_inserts + !init_inserts)
(100. *. tracked_ratio) ;
let ordered_seen = Array.init (Array.length seen) identity in
Array.fast_sort (fun i1 i2 -> compare seen.(i2) seen.(i1))
ordered_seen ;
Printf.printf "Heavy hitters were: %a...\n"
(Enum.print ~first:"" ~last:"" ~sep:", " (fun oc i ->
Printf.fprintf oc "%d:%d" i seen.(i)))
(Enum.take (!top_size+1) (Array.enum ordered_seen)) ;
let hh_mask = String.init !top_size (fun i ->
let x = ordered_seen.(i) in
if is_in_top x then 'H' else '.') in
let hh_mask_real =
String.init !top_size (fun i ->
let x = ordered_seen.(i) in
begin
end ;
match is_really_in_top (!num_inserts + !init_inserts) x with
| None -> '?'
| Some true -> 'H'
| Some false -> '.') in
Printf.printf "Last HH mask (expected): %s\n" hh_mask_real ;
Printf.printf "Last HH mask (estimate): %s\n" hh_mask ;
let successes = !successes_true + !successes_false
and failures = !failures_true + !failures_false in
assert (successes + failures + !unknowns = !num_inserts) ;
Printf.printf "Total: % 6d successes, % 6d failures, % 6d unknowns\n"
successes failures !unknowns ;
let hi =
100. *. float_of_int !successes_true /.
float_of_int (!successes_true + !failures_true)
and lo =
100. *. float_of_int !successes_true /.
float_of_int (!successes_true + !failures_true + !unknowns) in
let resolution_true = 100. -. (hi -. lo) in
Printf.printf "When true: % 6d successes, % 6d failures \
-> % 5.2f … %.2f%% (resolution: % 5.2f%%)\n"
!successes_true !failures_true hi lo resolution_true ;
let hi =
100. *. float_of_int !successes_false /.
float_of_int (!successes_false + !failures_false)
and lo =
100. *. float_of_int !successes_false /.
float_of_int (!successes_false + !failures_false + !unknowns) in
let resolution_false = 100. -. (hi -. lo) in
Printf.printf "When false: % 6d successes, % 6d failures \
-> % 5.2f … %.2f%% (resolution: % 5.2f%%)\n"
!successes_false !failures_false hi lo resolution_false ;
if !successes_true + !failures_true = 0 then
Printf.printf "Result does not tell anything about top hitters.\n" ;
if !successes_false + !failures_false = 0 then
Printf.printf "Result does not tell anything about non-top hitters.\n" ;
if resolution_true < 90. || resolution_false < 90. then (
if tracked_ratio > 0.9 then
Printf.printf "Resolution is low despite good tracking, caused by rank equalities.\n"
else
Printf.printf "Resolution is low as a result of bad tracking, consider increasing -tracked.\n"
)
)
|
3cc25cb3b60938e68cdfae30a9391737661753da12f55c2aa93ebf4da356525c | HaskellZhangSong/Introduction_to_Haskell_2ed_source | State.hs | import Control.Monad
newtype State s a = State { runState :: s -> (a,s) }
deriving Functor
newtype Reader r a = Reader { runReader :: r -> a }
deriving Functor
instance Monad (State s) where
return x = State $ \s -> (x,s)
(>>=) :: State s a -> (a -> State s b) -> State s b
-- h :: a -> (a, s)
f : : ( a - > State s b )
-- g :: (s -> (b, s))
(State h) >>= f = State $ \s -> let (a, newState) = h s
(State g) = f a
in g newState
instance Applicative (State s) where
pure = return
(<*>) = ap
evalState :: State s a -> s -> a
evalState m s = fst (runState m s)
evaluate :: State s a -> Reader s a
evaluate s = Reader $ \e -> evalState s e
readOnly :: Reader s a -> State s a
readOnly r = State $ \s -> (runReader r s, s)
data Tree a = Leaf a | Node (Tree a) a (Tree a) deriving (Show,Eq)
labelTree :: Tree a -> Tree (a,Int)
labelTree t = fst $ ntAux t 0
ntAux :: Tree a -> Int -> (Tree (a,Int),Int)
ntAux (Leaf a) n = (Leaf (a,n),n+1)
ntAux (Node l a r) n = let (nn,n') = ((a,n),n+1) in
let (ln,n'') = ntAux l n' in
let (rn,n''') = ntAux r n'' in
(Node ln nn rn, n''')
test :: Tree Int
test = Node (Node (Leaf 5) 3 (Leaf 2)) 7 (Leaf 9)
| null | https://raw.githubusercontent.com/HaskellZhangSong/Introduction_to_Haskell_2ed_source/140c50fdccfe608fe499ecf2d8a3732f531173f5/C12/State.hs | haskell | h :: a -> (a, s)
g :: (s -> (b, s))
| import Control.Monad
newtype State s a = State { runState :: s -> (a,s) }
deriving Functor
newtype Reader r a = Reader { runReader :: r -> a }
deriving Functor
instance Monad (State s) where
return x = State $ \s -> (x,s)
(>>=) :: State s a -> (a -> State s b) -> State s b
f : : ( a - > State s b )
(State h) >>= f = State $ \s -> let (a, newState) = h s
(State g) = f a
in g newState
instance Applicative (State s) where
pure = return
(<*>) = ap
evalState :: State s a -> s -> a
evalState m s = fst (runState m s)
evaluate :: State s a -> Reader s a
evaluate s = Reader $ \e -> evalState s e
readOnly :: Reader s a -> State s a
readOnly r = State $ \s -> (runReader r s, s)
data Tree a = Leaf a | Node (Tree a) a (Tree a) deriving (Show,Eq)
labelTree :: Tree a -> Tree (a,Int)
labelTree t = fst $ ntAux t 0
ntAux :: Tree a -> Int -> (Tree (a,Int),Int)
ntAux (Leaf a) n = (Leaf (a,n),n+1)
ntAux (Node l a r) n = let (nn,n') = ((a,n),n+1) in
let (ln,n'') = ntAux l n' in
let (rn,n''') = ntAux r n'' in
(Node ln nn rn, n''')
test :: Tree Int
test = Node (Node (Leaf 5) 3 (Leaf 2)) 7 (Leaf 9)
|
eda2cee3ce9e3be23e61a2b0390cb32fcc638b409675c562f7eab2a48af3f535 | webyrd/n-grams-for-synthesis | combined-simplified-dynamic-ml-infer-evalo.scm | (load "prelude.scm")
;; ngrams-statistics structure:
;;
;; (((context form) . count) ...)
(define ngrams-statistics (read-data-from-file "tmp/statistics.scm"))
(define unique
(lambda (l)
(if (null? l)
'()
(cons (car l) (remove (car l) (unique (cdr l)))))))
(define all-contexts (unique (map caar ngrams-statistics)))
;; orderings-alist structure:
;;
;; ((context . (eval-relation ...)) ...)
(define orderings-alist
(let ((ordering-for-context
(lambda (ctx)
(let ((ctx-stats (map (lambda (entry) (cons (cadar entry) (cdr entry)))
(filter (lambda (entry) (equal? ctx (caar entry))) ngrams-statistics))))
;; ctx-stats has the structure:
;;
;; ((form . count) ...)
;;
;; For example,
;;
( ( app . 33 ) ... )
(let ((compare
(lambda (a b)
(> (alist-ref ctx-stats (car a) 0)
(alist-ref ctx-stats (car b) 0)))))
(map cdr (list-sort compare expert-ordering-alist-ml-!-/evalo)))))))
(map (lambda (ctx)
(cons ctx (ordering-for-context ctx)))
all-contexts)))
;; context -> list of eval-relations
(define order-eval-relations
(lambda (context)
(cond
((assoc context orderings-alist) => cdr)
(else
;(error 'eval-expo (string-append "bad context " (symbol->string context)))
; symbol? doesn't appear in the data, so we'll return the expert ordering
; for such cases.
expert-ordering-ml-!-/evalo))))
(define (!-/eval-expo expr gamma env type val context)
(build-and-run-conde expr gamma env type val context
(order-eval-relations context)
;expert-ordering
))
| null | https://raw.githubusercontent.com/webyrd/n-grams-for-synthesis/b53b071e53445337d3fe20db0249363aeb9f3e51/combined-simplified-dynamic-ml-infer-evalo.scm | scheme | ngrams-statistics structure:
(((context form) . count) ...)
orderings-alist structure:
((context . (eval-relation ...)) ...)
ctx-stats has the structure:
((form . count) ...)
For example,
context -> list of eval-relations
(error 'eval-expo (string-append "bad context " (symbol->string context)))
symbol? doesn't appear in the data, so we'll return the expert ordering
for such cases.
expert-ordering | (load "prelude.scm")
(define ngrams-statistics (read-data-from-file "tmp/statistics.scm"))
(define unique
(lambda (l)
(if (null? l)
'()
(cons (car l) (remove (car l) (unique (cdr l)))))))
(define all-contexts (unique (map caar ngrams-statistics)))
(define orderings-alist
(let ((ordering-for-context
(lambda (ctx)
(let ((ctx-stats (map (lambda (entry) (cons (cadar entry) (cdr entry)))
(filter (lambda (entry) (equal? ctx (caar entry))) ngrams-statistics))))
( ( app . 33 ) ... )
(let ((compare
(lambda (a b)
(> (alist-ref ctx-stats (car a) 0)
(alist-ref ctx-stats (car b) 0)))))
(map cdr (list-sort compare expert-ordering-alist-ml-!-/evalo)))))))
(map (lambda (ctx)
(cons ctx (ordering-for-context ctx)))
all-contexts)))
(define order-eval-relations
(lambda (context)
(cond
((assoc context orderings-alist) => cdr)
(else
expert-ordering-ml-!-/evalo))))
(define (!-/eval-expo expr gamma env type val context)
(build-and-run-conde expr gamma env type val context
(order-eval-relations context)
))
|
d3dd361a1b336a9f29e7c37a671763c22730a3767b61d501dd3d9a6854ddc0d4 | jumarko/clojure-experiments | drop_every_nth_item.clj | (ns four-clojure.drop-every-nth-item)
;;;
;;; Write a function which drops every nth item from a sequence
(defn drop-nth-item [coll index]
(let [nth-items-to-nil (map-indexed (fn [idx element]
(when (not= 0 (mod (inc idx) index))
element))
coll)]
(remove nil? nth-items-to-nil)))
;; simpler solution using keep-indexed
(defn drop-nth-item [coll index]
(keep-indexed (fn [idx element]
(when (not= 0 (mod (inc idx) index))
element))
coll))
;; most concise solution using partition-all
(defn drop-nth-item [coll index]
(apply concat
(partition-all (dec index) index coll)))
(= (drop-nth-item [1 2 3 4 5 6 7 8] 3) [1 2 4 5 7 8])
(= (drop-nth-item [:a :b :c :d :e :f] 2) [:a :c :e])
(= (drop-nth-item [1 2 3 4 5 6] 4) [1 2 3 5 6])
;; my custom check for repetitive elements
(= (drop-nth-item [1 2 1 1 3 4 4 5 5 6] 4) [1 2 1 3 4 4 5 6])
| null | https://raw.githubusercontent.com/jumarko/clojure-experiments/f0f9c091959e7f54c3fb13d0585a793ebb09e4f9/src/clojure_experiments/four_clojure/drop_every_nth_item.clj | clojure |
Write a function which drops every nth item from a sequence
simpler solution using keep-indexed
most concise solution using partition-all
my custom check for repetitive elements | (ns four-clojure.drop-every-nth-item)
(defn drop-nth-item [coll index]
(let [nth-items-to-nil (map-indexed (fn [idx element]
(when (not= 0 (mod (inc idx) index))
element))
coll)]
(remove nil? nth-items-to-nil)))
(defn drop-nth-item [coll index]
(keep-indexed (fn [idx element]
(when (not= 0 (mod (inc idx) index))
element))
coll))
(defn drop-nth-item [coll index]
(apply concat
(partition-all (dec index) index coll)))
(= (drop-nth-item [1 2 3 4 5 6 7 8] 3) [1 2 4 5 7 8])
(= (drop-nth-item [:a :b :c :d :e :f] 2) [:a :c :e])
(= (drop-nth-item [1 2 3 4 5 6] 4) [1 2 3 5 6])
(= (drop-nth-item [1 2 1 1 3 4 4 5 5 6] 4) [1 2 1 3 4 4 5 6])
|
81a19c54ea3393b49a16d1e76c16a0913c51a14e3e3f3823df941817b93c604f | tari3x/csec-modex | ciloptions.ml |
*
* Copyright ( c ) 2001 - 2003 ,
* < >
* < >
* < >
* < >
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are
* met :
*
* 1 . Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission .
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS
* IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED
* TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL ,
* EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR
* PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
* NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
*
* Copyright (c) 2001-2003,
* George C. Necula <>
* Scott McPeak <>
* Wes Weimer <>
* Ben Liblit <>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*)
module E = Errormsg
let setDebugFlag v name =
E.debugFlag := v;
if v then Pretty.flushOften := true
type outfile =
{ fname: string;
fchan: out_channel }
(* Processign of output file arguments *)
let openFile (what: string) (takeit: outfile -> unit) (fl: string) =
if !E.verboseFlag then
ignore (Printf.printf "Setting %s to %s\n" what fl);
(try takeit { fname = fl;
fchan = open_out fl }
with _ ->
raise (Arg.Bad ("Cannot open " ^ what ^ " file " ^ fl)))
let fileNames : string list ref = ref []
let recordFile fname =
fileNames := fname :: (!fileNames)
(* Parsing of files with additional names *)
let parseExtraFile (s: string) =
try
let sfile = open_in s in
while true do
let line = try input_line sfile with e -> (close_in sfile; raise e) in
let linelen = String.length line in
let rec scan (pos: int) (* next char to look at *)
(start: int) : unit (* start of the word,
or -1 if none *) =
if pos >= linelen then
if start >= 0 then
recordFile (String.sub line start (pos - start))
else
() (* Just move on to the next line *)
else
let c = String.get line pos in
match c with
' ' | '\n' | '\r' | '\t' ->
(* whitespace *)
if start >= 0 then begin
recordFile (String.sub line start (pos - start));
end;
scan (pos + 1) (-1)
| _ -> (* non-whitespace *)
if start >= 0 then
scan (pos + 1) start
else
scan (pos + 1) pos
in
scan 0 (-1)
done
with Sys_error _ -> E.s (E.error "Cannot find extra file: %s" s)
| End_of_file -> ()
let options : (string * Arg.spec * string) list =
let is_default = function
true -> " (default)"
| false -> "" in
[
(* General Options *)
"", Arg.Unit (fun () -> ()), " \n\t\tGeneral Options\n";
"--version",
Arg.Unit (fun _ ->
print_endline ("CIL version " ^ Cil.cilVersion ^
"\nMore information at /\n");
exit 0),
" Output version information and exit";
"--verbose",
Arg.Set E.verboseFlag,
(" Print lots of random stuff; this is passed on from cilly" ^
is_default !E.verboseFlag);
"--noverbose",
Arg.Clear E.verboseFlag,
(" Undo effect of verbose flag" ^ is_default (not !E.verboseFlag));
"--warnall",
Arg.Set E.warnFlag,
(" Show optional warnings" ^ is_default !E.warnFlag);
"--nowarnall",
Arg.Clear E.warnFlag,
(" Disable optional warnings" ^ is_default (not !E.warnFlag));
"--noTruncateWarning",
Arg.Clear Cil.warnTruncate,
" Suppress warning about truncating integer constants";
"--debug",
Arg.String (setDebugFlag true),
"<xxx> Turn on debugging flag xxx";
"--nodebug",
Arg.String (setDebugFlag false),
"<xxx> Turn off debugging flag xxx";
"--flush",
Arg.Set Pretty.flushOften,
(" Flush the output streams often; aids debugging" ^
is_default !Pretty.flushOften);
"--noflush",
Arg.Clear Pretty.flushOften,
(" Only flush output streams when inevitable" ^
is_default (not !Pretty.flushOften));
"--check",
Arg.Set Cilutil.doCheck,
(" Run a consistency check over the CIL after every operation" ^
is_default !Cilutil.doCheck);
"--nocheck",
Arg.Clear Cilutil.doCheck,
(" Turn off consistency checking of CIL" ^
is_default (not !Cilutil.doCheck));
"--strictcheck", Arg.Unit (fun _ -> Cilutil.doCheck := true;
Cilutil.strictChecking := true),
" Same as --check, but treats problems as errors not warnings.";
"", Arg.Unit (fun _ -> ()), "";
"--noPrintLn",
Arg.Unit (fun _ ->
Cil.lineDirectiveStyle := None;
Cprint.printLn := false),
" Do not output #line directives in the output";
"--commPrintLn",
Arg.Unit (fun _ ->
Cil.lineDirectiveStyle := Some Cil.LineComment;
Cprint.printLnComment := true),
" Print #line directives in the output, but put them in comments";
"--commPrintLnSparse",
Arg.Unit (fun _ ->
Cil.lineDirectiveStyle := Some Cil.LineCommentSparse;
Cprint.printLnComment := true),
" Print commented #line directives in the output only when\n\t\t\t\tthe line number changes.";
"--stats",
Arg.Set Cilutil.printStats,
(" Print statistics about running times and memory usage" ^
is_default !Cilutil.printStats);
"--nostats",
Arg.Clear Cilutil.printStats,
(" Do not print statistics" ^
is_default (not !Cilutil.printStats));
"--log",
Arg.String (openFile "log" (fun oc -> E.logChannel := oc.fchan)),
"<filename> Set the name of the log file; by default use stderr";
"--MSVC",
Arg.Unit (fun _ ->
Cil.msvcMode := true;
Frontc.setMSVCMode ();
if not Machdep.hasMSVC then
ignore (E.warn "Will work in MSVC mode but will be using machine-dependent parameters for GCC since you do not have the MSVC compiler installed")),
" Enable MSVC compatibility; default is GNU";
"--envmachine",
Arg.Unit (fun _ ->
try
let machineModel = Sys.getenv "CIL_MACHINE" in
Cil.envMachine := Some (Machdepenv.modelParse machineModel);
with
Not_found ->
ignore (E.error "CIL_MACHINE environment variable is not set")
| Failure msg ->
ignore (E.error "CIL_MACHINE machine model is invalid: %s" msg)),
" Use machine model specified in CIL_MACHINE environment variable";
"--ignore-merge-conflicts",
Arg.Set Mergecil.ignore_merge_conflicts,
(" Ignore merging conflicts" ^
is_default !Mergecil.ignore_merge_conflicts);
(* Little-used: *)
(* "--noignore-merge-conflicts", *)
(* Arg.Clear Mergecil.ignore_merge_conflicts, *)
(* (" Do not ignore merging conflicts" ^ *)
(* is_default (not !Mergecil.ignore_merge_conflicts)); *)
"--sliceGlobal",
Arg.Set Cilutil.sliceGlobal,
" Output is the slice of #pragma cilnoremove(sym) symbols";
(* sm: some more debugging options *)
"--tr",
Arg.String Trace.traceAddMulti,
"<sys> Subsystem to show debug printfs for";
"--extrafiles",
Arg.String parseExtraFile,
"<filename> File that contains a list of additional files to process,\n\t\t\t\tseparated by newlines";
(* Lowering Options *)
"", Arg.Unit (fun () -> ()), " \n\t\tLowering Options\n";
"--lowerConstants",
Arg.Set Cil.lowerConstants,
(" Lower constant expressions" ^ is_default !Cil.lowerConstants);
"--noLowerConstants",
Arg.Clear Cil.lowerConstants,
(" Do not lower constant expressions" ^
is_default (not !Cil.lowerConstants));
"--insertImplicitCasts",
Arg.Set Cil.insertImplicitCasts,
(" Insert implicit casts" ^ is_default !Cil.insertImplicitCasts);
"--noInsertImplicitCasts",
Arg.Clear Cil.insertImplicitCasts,
(" Do not insert implicit casts" ^
is_default (not !Cil.insertImplicitCasts));
"--forceRLArgEval",
Arg.Set Cabs2cil.forceRLArgEval,
(" Forces right to left evaluation of function arguments" ^
is_default !Cabs2cil.forceRLArgEval);
"--noForceRLArgEval",
Arg.Clear Cabs2cil.forceRLArgEval,
(" Evaluate function arguments in unspecified order" ^
is_default (not !Cabs2cil.forceRLArgEval));
"--nocil",
Arg.Int (fun n -> Cabs2cil.nocil := n),
"<index> Do not compile to CIL the global with the given index";
"--noDisallowDuplication",
Arg.Set Cabs2cil.allowDuplication,
(" Duplicate small chunks of code if necessary" ^
is_default !Cabs2cil.allowDuplication);
"--disallowDuplication",
Arg.Clear Cabs2cil.allowDuplication,
(" Prevent small chunks of code from being duplicated" ^
is_default (not !Cabs2cil.allowDuplication));
"--makeStaticGlobal",
Arg.Set Cil.makeStaticGlobal,
(" Convert local static variables into global variables" ^
is_default !Cil.makeStaticGlobal);
"--noMakeStaticGlobal",
Arg.Clear Cil.makeStaticGlobal,
(" Use initializers for local static variables" ^
is_default (not !Cil.makeStaticGlobal));
"--useLogicalOperators",
Arg.Set Cil.useLogicalOperators,
(" Where possible (that is, if there are no side-effects),\n\t\t\t\t" ^
"retain &&, || and ?: (instead of transforming them to If statements)" ^
is_default !Cil.useLogicalOperators);
"--noUseLogicalOperators",
Arg.Clear Cil.useLogicalOperators,
("Transform &&, || and ?: to If statements" ^
is_default (not !Cil.useLogicalOperators));
"--useComputedGoto",
Arg.Set Cil.useComputedGoto,
(" Retain GCC's computed goto" ^
is_default !Cil.useComputedGoto);
"--noUseComputedGoto",
Arg.Clear Cil.useComputedGoto,
(" Transform computed goto to Switch statements" ^
is_default (not !Cil.useComputedGoto));
"--useCaseRange",
Arg.Set Cil.useCaseRange,
(" Retain ranges of values in case labels" ^
is_default !Cil.useCaseRange);
"--noUseCaseRange",
Arg.Clear Cil.useCaseRange,
(" Transform case ranges to sequence of cases" ^
is_default (not !Cil.useCaseRange));
"--keepunused",
Arg.Set Rmtmps.keepUnused,
(" Do not remove the unused variables and types" ^
is_default !Rmtmps.keepUnused);
"--nokeepunused",
Arg.Clear Rmtmps.keepUnused,
(" Remove unused variables and types" ^
is_default (not !Rmtmps.keepUnused));
"--rmUnusedInlines",
Arg.Set Rmtmps.rmUnusedInlines,
(" Delete any unused inline functions; this is the default in MSVC mode" ^
is_default !Rmtmps.rmUnusedInlines);
"--noRmUnusedInlines",
Arg.Clear Rmtmps.rmUnusedInlines,
(" Do not delete any unused inline functions" ^
is_default (not !Rmtmps.rmUnusedInlines));
(* Output Options *)
"", Arg.Unit (fun () -> ()), " \n\t\tOutput Options\n";
"--printCilAsIs",
Arg.Set Cil.printCilAsIs,
(" Do not try to simplify the CIL when printing." ^
is_default !Cil.printCilAsIs);
"--noPrintCilAsIs",
Arg.Clear Cil.printCilAsIs,
(" Simplify the CIL when printing. This produces prettier output\n\t\t\t\tby e.g. changing while(1) into more meaningful loops " ^ is_default (not !Cil.printCilAsIs));
"--noWrap",
Arg.Unit (fun _ -> Cil.lineLength := 100_000),
" Do not wrap long lines when printing";
"--pdepth",
Arg.Int (fun n -> Pretty.printDepth := n),
("<n> Set max print depth (default: " ^
string_of_int !Pretty.printDepth ^ ")");
"--decil",
Arg.Clear Cil.print_CIL_Input,
" Don't print CIL specific-features like __blockattribute__";
(* Don't just add new flags at the end ... place options
in the correct category *)
]
| null | https://raw.githubusercontent.com/tari3x/csec-modex/5ab2aa18ef308b4d18ac479e5ab14476328a6a50/deps/cil-1.7.3/src/ciloptions.ml | ocaml | Processign of output file arguments
Parsing of files with additional names
next char to look at
start of the word,
or -1 if none
Just move on to the next line
whitespace
non-whitespace
General Options
Little-used:
"--noignore-merge-conflicts",
Arg.Clear Mergecil.ignore_merge_conflicts,
(" Do not ignore merging conflicts" ^
is_default (not !Mergecil.ignore_merge_conflicts));
sm: some more debugging options
Lowering Options
Output Options
Don't just add new flags at the end ... place options
in the correct category |
*
* Copyright ( c ) 2001 - 2003 ,
* < >
* < >
* < >
* < >
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are
* met :
*
* 1 . Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission .
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS
* IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED
* TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL ,
* EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR
* PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
* NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
*
* Copyright (c) 2001-2003,
* George C. Necula <>
* Scott McPeak <>
* Wes Weimer <>
* Ben Liblit <>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*)
module E = Errormsg
let setDebugFlag v name =
E.debugFlag := v;
if v then Pretty.flushOften := true
type outfile =
{ fname: string;
fchan: out_channel }
let openFile (what: string) (takeit: outfile -> unit) (fl: string) =
if !E.verboseFlag then
ignore (Printf.printf "Setting %s to %s\n" what fl);
(try takeit { fname = fl;
fchan = open_out fl }
with _ ->
raise (Arg.Bad ("Cannot open " ^ what ^ " file " ^ fl)))
let fileNames : string list ref = ref []
let recordFile fname =
fileNames := fname :: (!fileNames)
let parseExtraFile (s: string) =
try
let sfile = open_in s in
while true do
let line = try input_line sfile with e -> (close_in sfile; raise e) in
let linelen = String.length line in
if pos >= linelen then
if start >= 0 then
recordFile (String.sub line start (pos - start))
else
else
let c = String.get line pos in
match c with
' ' | '\n' | '\r' | '\t' ->
if start >= 0 then begin
recordFile (String.sub line start (pos - start));
end;
scan (pos + 1) (-1)
if start >= 0 then
scan (pos + 1) start
else
scan (pos + 1) pos
in
scan 0 (-1)
done
with Sys_error _ -> E.s (E.error "Cannot find extra file: %s" s)
| End_of_file -> ()
let options : (string * Arg.spec * string) list =
let is_default = function
true -> " (default)"
| false -> "" in
[
"", Arg.Unit (fun () -> ()), " \n\t\tGeneral Options\n";
"--version",
Arg.Unit (fun _ ->
print_endline ("CIL version " ^ Cil.cilVersion ^
"\nMore information at /\n");
exit 0),
" Output version information and exit";
"--verbose",
Arg.Set E.verboseFlag,
(" Print lots of random stuff; this is passed on from cilly" ^
is_default !E.verboseFlag);
"--noverbose",
Arg.Clear E.verboseFlag,
(" Undo effect of verbose flag" ^ is_default (not !E.verboseFlag));
"--warnall",
Arg.Set E.warnFlag,
(" Show optional warnings" ^ is_default !E.warnFlag);
"--nowarnall",
Arg.Clear E.warnFlag,
(" Disable optional warnings" ^ is_default (not !E.warnFlag));
"--noTruncateWarning",
Arg.Clear Cil.warnTruncate,
" Suppress warning about truncating integer constants";
"--debug",
Arg.String (setDebugFlag true),
"<xxx> Turn on debugging flag xxx";
"--nodebug",
Arg.String (setDebugFlag false),
"<xxx> Turn off debugging flag xxx";
"--flush",
Arg.Set Pretty.flushOften,
(" Flush the output streams often; aids debugging" ^
is_default !Pretty.flushOften);
"--noflush",
Arg.Clear Pretty.flushOften,
(" Only flush output streams when inevitable" ^
is_default (not !Pretty.flushOften));
"--check",
Arg.Set Cilutil.doCheck,
(" Run a consistency check over the CIL after every operation" ^
is_default !Cilutil.doCheck);
"--nocheck",
Arg.Clear Cilutil.doCheck,
(" Turn off consistency checking of CIL" ^
is_default (not !Cilutil.doCheck));
"--strictcheck", Arg.Unit (fun _ -> Cilutil.doCheck := true;
Cilutil.strictChecking := true),
" Same as --check, but treats problems as errors not warnings.";
"", Arg.Unit (fun _ -> ()), "";
"--noPrintLn",
Arg.Unit (fun _ ->
Cil.lineDirectiveStyle := None;
Cprint.printLn := false),
" Do not output #line directives in the output";
"--commPrintLn",
Arg.Unit (fun _ ->
Cil.lineDirectiveStyle := Some Cil.LineComment;
Cprint.printLnComment := true),
" Print #line directives in the output, but put them in comments";
"--commPrintLnSparse",
Arg.Unit (fun _ ->
Cil.lineDirectiveStyle := Some Cil.LineCommentSparse;
Cprint.printLnComment := true),
" Print commented #line directives in the output only when\n\t\t\t\tthe line number changes.";
"--stats",
Arg.Set Cilutil.printStats,
(" Print statistics about running times and memory usage" ^
is_default !Cilutil.printStats);
"--nostats",
Arg.Clear Cilutil.printStats,
(" Do not print statistics" ^
is_default (not !Cilutil.printStats));
"--log",
Arg.String (openFile "log" (fun oc -> E.logChannel := oc.fchan)),
"<filename> Set the name of the log file; by default use stderr";
"--MSVC",
Arg.Unit (fun _ ->
Cil.msvcMode := true;
Frontc.setMSVCMode ();
if not Machdep.hasMSVC then
ignore (E.warn "Will work in MSVC mode but will be using machine-dependent parameters for GCC since you do not have the MSVC compiler installed")),
" Enable MSVC compatibility; default is GNU";
"--envmachine",
Arg.Unit (fun _ ->
try
let machineModel = Sys.getenv "CIL_MACHINE" in
Cil.envMachine := Some (Machdepenv.modelParse machineModel);
with
Not_found ->
ignore (E.error "CIL_MACHINE environment variable is not set")
| Failure msg ->
ignore (E.error "CIL_MACHINE machine model is invalid: %s" msg)),
" Use machine model specified in CIL_MACHINE environment variable";
"--ignore-merge-conflicts",
Arg.Set Mergecil.ignore_merge_conflicts,
(" Ignore merging conflicts" ^
is_default !Mergecil.ignore_merge_conflicts);
"--sliceGlobal",
Arg.Set Cilutil.sliceGlobal,
" Output is the slice of #pragma cilnoremove(sym) symbols";
"--tr",
Arg.String Trace.traceAddMulti,
"<sys> Subsystem to show debug printfs for";
"--extrafiles",
Arg.String parseExtraFile,
"<filename> File that contains a list of additional files to process,\n\t\t\t\tseparated by newlines";
"", Arg.Unit (fun () -> ()), " \n\t\tLowering Options\n";
"--lowerConstants",
Arg.Set Cil.lowerConstants,
(" Lower constant expressions" ^ is_default !Cil.lowerConstants);
"--noLowerConstants",
Arg.Clear Cil.lowerConstants,
(" Do not lower constant expressions" ^
is_default (not !Cil.lowerConstants));
"--insertImplicitCasts",
Arg.Set Cil.insertImplicitCasts,
(" Insert implicit casts" ^ is_default !Cil.insertImplicitCasts);
"--noInsertImplicitCasts",
Arg.Clear Cil.insertImplicitCasts,
(" Do not insert implicit casts" ^
is_default (not !Cil.insertImplicitCasts));
"--forceRLArgEval",
Arg.Set Cabs2cil.forceRLArgEval,
(" Forces right to left evaluation of function arguments" ^
is_default !Cabs2cil.forceRLArgEval);
"--noForceRLArgEval",
Arg.Clear Cabs2cil.forceRLArgEval,
(" Evaluate function arguments in unspecified order" ^
is_default (not !Cabs2cil.forceRLArgEval));
"--nocil",
Arg.Int (fun n -> Cabs2cil.nocil := n),
"<index> Do not compile to CIL the global with the given index";
"--noDisallowDuplication",
Arg.Set Cabs2cil.allowDuplication,
(" Duplicate small chunks of code if necessary" ^
is_default !Cabs2cil.allowDuplication);
"--disallowDuplication",
Arg.Clear Cabs2cil.allowDuplication,
(" Prevent small chunks of code from being duplicated" ^
is_default (not !Cabs2cil.allowDuplication));
"--makeStaticGlobal",
Arg.Set Cil.makeStaticGlobal,
(" Convert local static variables into global variables" ^
is_default !Cil.makeStaticGlobal);
"--noMakeStaticGlobal",
Arg.Clear Cil.makeStaticGlobal,
(" Use initializers for local static variables" ^
is_default (not !Cil.makeStaticGlobal));
"--useLogicalOperators",
Arg.Set Cil.useLogicalOperators,
(" Where possible (that is, if there are no side-effects),\n\t\t\t\t" ^
"retain &&, || and ?: (instead of transforming them to If statements)" ^
is_default !Cil.useLogicalOperators);
"--noUseLogicalOperators",
Arg.Clear Cil.useLogicalOperators,
("Transform &&, || and ?: to If statements" ^
is_default (not !Cil.useLogicalOperators));
"--useComputedGoto",
Arg.Set Cil.useComputedGoto,
(" Retain GCC's computed goto" ^
is_default !Cil.useComputedGoto);
"--noUseComputedGoto",
Arg.Clear Cil.useComputedGoto,
(" Transform computed goto to Switch statements" ^
is_default (not !Cil.useComputedGoto));
"--useCaseRange",
Arg.Set Cil.useCaseRange,
(" Retain ranges of values in case labels" ^
is_default !Cil.useCaseRange);
"--noUseCaseRange",
Arg.Clear Cil.useCaseRange,
(" Transform case ranges to sequence of cases" ^
is_default (not !Cil.useCaseRange));
"--keepunused",
Arg.Set Rmtmps.keepUnused,
(" Do not remove the unused variables and types" ^
is_default !Rmtmps.keepUnused);
"--nokeepunused",
Arg.Clear Rmtmps.keepUnused,
(" Remove unused variables and types" ^
is_default (not !Rmtmps.keepUnused));
"--rmUnusedInlines",
Arg.Set Rmtmps.rmUnusedInlines,
(" Delete any unused inline functions; this is the default in MSVC mode" ^
is_default !Rmtmps.rmUnusedInlines);
"--noRmUnusedInlines",
Arg.Clear Rmtmps.rmUnusedInlines,
(" Do not delete any unused inline functions" ^
is_default (not !Rmtmps.rmUnusedInlines));
"", Arg.Unit (fun () -> ()), " \n\t\tOutput Options\n";
"--printCilAsIs",
Arg.Set Cil.printCilAsIs,
(" Do not try to simplify the CIL when printing." ^
is_default !Cil.printCilAsIs);
"--noPrintCilAsIs",
Arg.Clear Cil.printCilAsIs,
(" Simplify the CIL when printing. This produces prettier output\n\t\t\t\tby e.g. changing while(1) into more meaningful loops " ^ is_default (not !Cil.printCilAsIs));
"--noWrap",
Arg.Unit (fun _ -> Cil.lineLength := 100_000),
" Do not wrap long lines when printing";
"--pdepth",
Arg.Int (fun n -> Pretty.printDepth := n),
("<n> Set max print depth (default: " ^
string_of_int !Pretty.printDepth ^ ")");
"--decil",
Arg.Clear Cil.print_CIL_Input,
" Don't print CIL specific-features like __blockattribute__";
]
|
140f4c4c98894ac401eb75d864470c8ca8932cb14746bb6528798711ed4a507d | janestreet/shell | filename_extended.mli | (** Extensions to [Core.Core_filename]. *)
(** [normalize path] Removes as much "." and ".." from the path as possible. If the path
is absolute they will all be removed. *)
val normalize : string -> string
(** [parent path] The parent of the root directory is the root directory @return the path
to the parent of [path]. *)
val parent : string -> string
(** [make_relative ~to_:src f] returns [f] relative to [src].
@raise Failure if [is_relative f <> is_relative src] *)
val make_relative : ?to_:string -> string -> string
(** [make_absolute src] Turn [src] into an absolute path expanded from the current working
directory. *)
val make_absolute : string -> string
(** [expand] Makes a path absolute and expands [~] [~username] to home directories. In
case of error (e.g.: path home of a none existing user) raises [Failure] with a
(hopefully) helpful message. *)
val expand : ?from:string -> string -> string
(** Splits a given path into a list of strings. *)
val explode : string -> string list
(** dual to explode *)
val implode : string list -> string
(**/**)
(* this is exported because it is used by core_extended.filename. *)
val normalize_path : string list -> string list
(**/**)
* Filename.compare is a comparison that normalizes filenames ( " ./a " = " a " ) , uses a more
human ready algorithm based on [ String_extended.collate ] ( " rfc02.txt > rfc1.txt " ) and
extenstions ( " a.c " > " a.h " ) .
It is a total comparison on normalized filenames .
human ready algorithm based on [String_extended.collate] ("rfc02.txt > rfc1.txt") and
extenstions ("a.c" > "a.h").
It is a total comparison on normalized filenames. *)
val compare: string -> string -> int
(** [with_open_temp_file ~write ~f prefix suffix] create a temporary file; runs [write] on
its [out_channel] and then [f] on the resulting file. The file is removed once [f] is
done running. *)
val with_open_temp_file:
?in_dir: string
-> ?write:(out_channel -> unit)
-> f: (string -> 'a)
-> string -> string
-> 'a
(** Runs [f] with a temporary dir as option and removes the directory afterwards. *)
val with_temp_dir: ?in_dir:string -> string -> string -> f:(string -> 'a) -> 'a
(** [is_parent dir1 dir2] returns [true] if [dir1] is a parent of [dir2]
Note: This function is context independent, use [expand] if you want to consider
relatives paths from a given point.
In particular:
- A directory is always the parent of itself.
- The root is the parent of any directory
- An absolute path is never the parent of relative one and vice versa.
- ["../../a"] is never the parent of ["."] even if this could be true given
form the current working directory.
*)
val is_parent : string -> string -> bool
| null | https://raw.githubusercontent.com/janestreet/shell/d3e2163268e29d468a8eaa3c9ab74a1f95486fab/filename_extended/src/filename_extended.mli | ocaml | * Extensions to [Core.Core_filename].
* [normalize path] Removes as much "." and ".." from the path as possible. If the path
is absolute they will all be removed.
* [parent path] The parent of the root directory is the root directory @return the path
to the parent of [path].
* [make_relative ~to_:src f] returns [f] relative to [src].
@raise Failure if [is_relative f <> is_relative src]
* [make_absolute src] Turn [src] into an absolute path expanded from the current working
directory.
* [expand] Makes a path absolute and expands [~] [~username] to home directories. In
case of error (e.g.: path home of a none existing user) raises [Failure] with a
(hopefully) helpful message.
* Splits a given path into a list of strings.
* dual to explode
*/*
this is exported because it is used by core_extended.filename.
*/*
* [with_open_temp_file ~write ~f prefix suffix] create a temporary file; runs [write] on
its [out_channel] and then [f] on the resulting file. The file is removed once [f] is
done running.
* Runs [f] with a temporary dir as option and removes the directory afterwards.
* [is_parent dir1 dir2] returns [true] if [dir1] is a parent of [dir2]
Note: This function is context independent, use [expand] if you want to consider
relatives paths from a given point.
In particular:
- A directory is always the parent of itself.
- The root is the parent of any directory
- An absolute path is never the parent of relative one and vice versa.
- ["../../a"] is never the parent of ["."] even if this could be true given
form the current working directory.
|
val normalize : string -> string
val parent : string -> string
val make_relative : ?to_:string -> string -> string
val make_absolute : string -> string
val expand : ?from:string -> string -> string
val explode : string -> string list
val implode : string list -> string
val normalize_path : string list -> string list
* Filename.compare is a comparison that normalizes filenames ( " ./a " = " a " ) , uses a more
human ready algorithm based on [ String_extended.collate ] ( " rfc02.txt > rfc1.txt " ) and
extenstions ( " a.c " > " a.h " ) .
It is a total comparison on normalized filenames .
human ready algorithm based on [String_extended.collate] ("rfc02.txt > rfc1.txt") and
extenstions ("a.c" > "a.h").
It is a total comparison on normalized filenames. *)
val compare: string -> string -> int
val with_open_temp_file:
?in_dir: string
-> ?write:(out_channel -> unit)
-> f: (string -> 'a)
-> string -> string
-> 'a
val with_temp_dir: ?in_dir:string -> string -> string -> f:(string -> 'a) -> 'a
val is_parent : string -> string -> bool
|
9ed6255bec0558797988b634b65dc4cf614f666cf0eeff0d74ea2e12ee8edf52 | mfelleisen/Acquire | board-intf.rkt | #lang racket
;; ---------------------------------------------------------------------------------------------------
;; interface specification for inspecting and manipulating the Acquire board, its spots, and tiles
;; also provides all tiles: A1 ... I12 via tiles+spots submodule
(require "basics.rkt" "Lib/auxiliaries.rkt" "Lib/contract.rkt" 2htdp/image)
(interface basics&
;; creation of tiles, spots
[row? (-> any/c boolean?)]
[string->row (-> string? (maybe/c row?))]
[column? (-> any/c boolean?)]
[string->column (-> string? (maybe/c column?))]
[tile (-> column? row? any)]
SYNTAX :
( ctile COLUMN ROW ) creates a column and avoids the quoting
ctile
;; properties
[tile? (-> any/c boolean?)]
[tile<=? (-> tile? tile? boolean?)]
[tile>? (-> tile? tile? boolean?)]
[tile<? (-> tile? tile? boolean?)]
[tile->string (-> tile? string?)]
[ALL-TILES (and/c (listof tile?) (sorted tile<=?))]
[STARTER-TILES# natural-number/c]
;; externalize tiles
[xtile? (-> any/c boolean?)]
[tile->xexpr (-> tile? xtile?)]
;; -------------------------------------------------------------------------------------------------
placing a tile can have one of these effects
[FOUNDING symbol?] ;; if a tile is placed here, player may found a hotel IF chains are available
[GROWING symbol?]
[MERGING symbol?]
[SINGLETON symbol?] ;; placing a tile makes it a singleton
[IMPOSSIBLE symbol?] ;; a hotel is safe from merging
;; -------------------------------------------------------------------------------------------------
;; creating a board
[board? (-> any/c boolean?)]
[board (-> board?)]
[board-tiles (-> board? (listof tile?))]
;; -------------------------------------------------------------------------------------------------
[draw (-> board? image?)]
[draw-cell (-> tile? image?)]
;; -------------------------------------------------------------------------------------------------
;; properties of the board
[what-kind-of-spot
;; determine whether t is a spot for
-- founding a hotel < = > there is exactly one horizonatl or vertical ' free ' tile
-- merging hotels < = > there are two distinct hotel neighbors
-- growing a hotel < = > there is one hotel heighbor
;; -- placing a singleton <=> no neighbors whatsoever
;; -- impossible: if the placement would cause a merger and involved a safe hotel
(->i ((b board?) (t tile?))
#:pre/name (b t) "unoccupied spot" (free-spot? b t)
(result (or/c FOUNDING GROWING MERGING SINGLETON IMPOSSIBLE)))]
[growing-which
;; which hotel grows if place a tile at (c,r)
(->i ((b board?) (t tile?)) #:pre (b t) (eq? (what-kind-of-spot b t) GROWING) (hotel hotel?))]
[merging-which
;; which hotels are merged if place a tile at (c,r)
(->i ((b board?) (t tile?)) #:pre/name (b t) "merger spot" (eq? (what-kind-of-spot b t) MERGING)
(values (acquirer (non-empty-listof hotel?)) (acquired (listof hotel?))))]
[size-of-hotel
;; how large is the specified hotel on this board
(-> board? hotel? natural-number/c)]
[free-spot?
;; is this spot unoccupied? (needed for contract of what-kind-of-spot)
(-> board? tile? boolean?)]
;; -------------------------------------------------------------------------------------------------
;; placing tiles on the board
[merge-hotels
;; place a tile that merges hotels
(->i ((b board?) (t tile?) (h hotel?))
#:pre/name (b t) "tile designates a merger spot" (eq? (what-kind-of-spot b t) MERGING)
#:pre/name (b t h) "... a winner" (let-values ([(w _) (merging-which b t)]) (member h w))
(new-board board?))]
[found-hotel
;; place a tile and found a hotel
(->i ((b board?) (t tile?) (h hotel?)) #:pre (b t) (eq? (what-kind-of-spot b t) FOUNDING)
(new-board board?))]
[grow-hotel
;; place a tile and found a hotel
(->i ((b board?) (t tile?)) #:pre (b t) (eq? (what-kind-of-spot b t) GROWING)
(new-board board?))]
[place-tile
place a tile that neither merges hotels nor founds one
(->i ((b board?) (t tile?))
#:pre (b t) (memq (what-kind-of-spot b t) (list SINGLETON GROWING FOUNDING))
(new-board board?))]
[set-board
;; a derived function that re-discovers the appropriate situation and places a tile
(->i ((b board?) (t tile?) (a [or/c FOUNDING GROWING MERGING SINGLETON]) (h (maybe/c hotel?)))
#:pre/name (b t) "good spot" (free-spot? b t)
#:pre/name (a h) "hotel => founding & merging" (==> h (or (eq? FOUNDING a) (eq? MERGING a)))
#:pre/name (a h) "merging => hotel" (==> (eq? MERGING a) h)
(new-board board?))]
[affordable?
;; is the list of hotels for this hotel affordable given the budget constraint
(-> board? shares-order/c cash? boolean?)]
;; -------------------------------------------------------------------------------------------------
;; externalizing boards
[xboard? (-> any/c boolean?)]
[board->xexpr (-> board? xboard?)]
[xspot? (-> any/c boolean?)]
[spot->xexpr (-> board? tile? [or/c SINGLETON FOUNDING GROWING MERGING IMPOSSIBLE] xspot?)]
[*create-board-with-hotels
(->i ([t (and/c (listof tile?) distinct)]
[lh (t) (and/c (listof (cons/c hotel? (listof tile?))) (distinct-and-properly-formed t))])
[b board?])]
[distinct-and-properly-formed
(->i ((free-tiles (listof tile?)))
(check-hotels
(->i ((hotels-as-lists (listof (cons/c hotel? (listof tile?))))) (ok boolean?))))]
;; -------------------------------------------------------------------------------------------------
;; some sample boards
board-a1-b2-c6
board-a2-b2-american
board-b2-c2-am-c4-d4-tw-e4
board-3way-merger-at-d3)
| null | https://raw.githubusercontent.com/mfelleisen/Acquire/5b39df6c757c7c1cafd7ff198641c99d30072b91/board-intf.rkt | racket | ---------------------------------------------------------------------------------------------------
interface specification for inspecting and manipulating the Acquire board, its spots, and tiles
also provides all tiles: A1 ... I12 via tiles+spots submodule
creation of tiles, spots
properties
externalize tiles
-------------------------------------------------------------------------------------------------
if a tile is placed here, player may found a hotel IF chains are available
placing a tile makes it a singleton
a hotel is safe from merging
-------------------------------------------------------------------------------------------------
creating a board
-------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------
properties of the board
determine whether t is a spot for
-- placing a singleton <=> no neighbors whatsoever
-- impossible: if the placement would cause a merger and involved a safe hotel
which hotel grows if place a tile at (c,r)
which hotels are merged if place a tile at (c,r)
how large is the specified hotel on this board
is this spot unoccupied? (needed for contract of what-kind-of-spot)
-------------------------------------------------------------------------------------------------
placing tiles on the board
place a tile that merges hotels
place a tile and found a hotel
place a tile and found a hotel
a derived function that re-discovers the appropriate situation and places a tile
is the list of hotels for this hotel affordable given the budget constraint
-------------------------------------------------------------------------------------------------
externalizing boards
-------------------------------------------------------------------------------------------------
some sample boards | #lang racket
(require "basics.rkt" "Lib/auxiliaries.rkt" "Lib/contract.rkt" 2htdp/image)
(interface basics&
[row? (-> any/c boolean?)]
[string->row (-> string? (maybe/c row?))]
[column? (-> any/c boolean?)]
[string->column (-> string? (maybe/c column?))]
[tile (-> column? row? any)]
SYNTAX :
( ctile COLUMN ROW ) creates a column and avoids the quoting
ctile
[tile? (-> any/c boolean?)]
[tile<=? (-> tile? tile? boolean?)]
[tile>? (-> tile? tile? boolean?)]
[tile<? (-> tile? tile? boolean?)]
[tile->string (-> tile? string?)]
[ALL-TILES (and/c (listof tile?) (sorted tile<=?))]
[STARTER-TILES# natural-number/c]
[xtile? (-> any/c boolean?)]
[tile->xexpr (-> tile? xtile?)]
placing a tile can have one of these effects
[GROWING symbol?]
[MERGING symbol?]
[board? (-> any/c boolean?)]
[board (-> board?)]
[board-tiles (-> board? (listof tile?))]
[draw (-> board? image?)]
[draw-cell (-> tile? image?)]
[what-kind-of-spot
-- founding a hotel < = > there is exactly one horizonatl or vertical ' free ' tile
-- merging hotels < = > there are two distinct hotel neighbors
-- growing a hotel < = > there is one hotel heighbor
(->i ((b board?) (t tile?))
#:pre/name (b t) "unoccupied spot" (free-spot? b t)
(result (or/c FOUNDING GROWING MERGING SINGLETON IMPOSSIBLE)))]
[growing-which
(->i ((b board?) (t tile?)) #:pre (b t) (eq? (what-kind-of-spot b t) GROWING) (hotel hotel?))]
[merging-which
(->i ((b board?) (t tile?)) #:pre/name (b t) "merger spot" (eq? (what-kind-of-spot b t) MERGING)
(values (acquirer (non-empty-listof hotel?)) (acquired (listof hotel?))))]
[size-of-hotel
(-> board? hotel? natural-number/c)]
[free-spot?
(-> board? tile? boolean?)]
[merge-hotels
(->i ((b board?) (t tile?) (h hotel?))
#:pre/name (b t) "tile designates a merger spot" (eq? (what-kind-of-spot b t) MERGING)
#:pre/name (b t h) "... a winner" (let-values ([(w _) (merging-which b t)]) (member h w))
(new-board board?))]
[found-hotel
(->i ((b board?) (t tile?) (h hotel?)) #:pre (b t) (eq? (what-kind-of-spot b t) FOUNDING)
(new-board board?))]
[grow-hotel
(->i ((b board?) (t tile?)) #:pre (b t) (eq? (what-kind-of-spot b t) GROWING)
(new-board board?))]
[place-tile
place a tile that neither merges hotels nor founds one
(->i ((b board?) (t tile?))
#:pre (b t) (memq (what-kind-of-spot b t) (list SINGLETON GROWING FOUNDING))
(new-board board?))]
[set-board
(->i ((b board?) (t tile?) (a [or/c FOUNDING GROWING MERGING SINGLETON]) (h (maybe/c hotel?)))
#:pre/name (b t) "good spot" (free-spot? b t)
#:pre/name (a h) "hotel => founding & merging" (==> h (or (eq? FOUNDING a) (eq? MERGING a)))
#:pre/name (a h) "merging => hotel" (==> (eq? MERGING a) h)
(new-board board?))]
[affordable?
(-> board? shares-order/c cash? boolean?)]
[xboard? (-> any/c boolean?)]
[board->xexpr (-> board? xboard?)]
[xspot? (-> any/c boolean?)]
[spot->xexpr (-> board? tile? [or/c SINGLETON FOUNDING GROWING MERGING IMPOSSIBLE] xspot?)]
[*create-board-with-hotels
(->i ([t (and/c (listof tile?) distinct)]
[lh (t) (and/c (listof (cons/c hotel? (listof tile?))) (distinct-and-properly-formed t))])
[b board?])]
[distinct-and-properly-formed
(->i ((free-tiles (listof tile?)))
(check-hotels
(->i ((hotels-as-lists (listof (cons/c hotel? (listof tile?))))) (ok boolean?))))]
board-a1-b2-c6
board-a2-b2-american
board-b2-c2-am-c4-d4-tw-e4
board-3way-merger-at-d3)
|
4168170f82b012c01e5217cbdc54c162da60811558d4446173a7fdf95a621a66 | tsloughter/kuberl | kuberl_v1beta1_daemon_set_status.erl | -module(kuberl_v1beta1_daemon_set_status).
-export([encode/1]).
-export_type([kuberl_v1beta1_daemon_set_status/0]).
-type kuberl_v1beta1_daemon_set_status() ::
#{ 'collisionCount' => integer(),
'conditions' => list(),
'currentNumberScheduled' := integer(),
'desiredNumberScheduled' := integer(),
'numberAvailable' => integer(),
'numberMisscheduled' := integer(),
'numberReady' := integer(),
'numberUnavailable' => integer(),
'observedGeneration' => integer(),
'updatedNumberScheduled' => integer()
}.
encode(#{ 'collisionCount' := CollisionCount,
'conditions' := Conditions,
'currentNumberScheduled' := CurrentNumberScheduled,
'desiredNumberScheduled' := DesiredNumberScheduled,
'numberAvailable' := NumberAvailable,
'numberMisscheduled' := NumberMisscheduled,
'numberReady' := NumberReady,
'numberUnavailable' := NumberUnavailable,
'observedGeneration' := ObservedGeneration,
'updatedNumberScheduled' := UpdatedNumberScheduled
}) ->
#{ 'collisionCount' => CollisionCount,
'conditions' => Conditions,
'currentNumberScheduled' => CurrentNumberScheduled,
'desiredNumberScheduled' => DesiredNumberScheduled,
'numberAvailable' => NumberAvailable,
'numberMisscheduled' => NumberMisscheduled,
'numberReady' => NumberReady,
'numberUnavailable' => NumberUnavailable,
'observedGeneration' => ObservedGeneration,
'updatedNumberScheduled' => UpdatedNumberScheduled
}.
| null | https://raw.githubusercontent.com/tsloughter/kuberl/f02ae6680d6ea5db6e8b6c7acbee8c4f9df482e2/gen/kuberl_v1beta1_daemon_set_status.erl | erlang | -module(kuberl_v1beta1_daemon_set_status).
-export([encode/1]).
-export_type([kuberl_v1beta1_daemon_set_status/0]).
-type kuberl_v1beta1_daemon_set_status() ::
#{ 'collisionCount' => integer(),
'conditions' => list(),
'currentNumberScheduled' := integer(),
'desiredNumberScheduled' := integer(),
'numberAvailable' => integer(),
'numberMisscheduled' := integer(),
'numberReady' := integer(),
'numberUnavailable' => integer(),
'observedGeneration' => integer(),
'updatedNumberScheduled' => integer()
}.
encode(#{ 'collisionCount' := CollisionCount,
'conditions' := Conditions,
'currentNumberScheduled' := CurrentNumberScheduled,
'desiredNumberScheduled' := DesiredNumberScheduled,
'numberAvailable' := NumberAvailable,
'numberMisscheduled' := NumberMisscheduled,
'numberReady' := NumberReady,
'numberUnavailable' := NumberUnavailable,
'observedGeneration' := ObservedGeneration,
'updatedNumberScheduled' := UpdatedNumberScheduled
}) ->
#{ 'collisionCount' => CollisionCount,
'conditions' => Conditions,
'currentNumberScheduled' => CurrentNumberScheduled,
'desiredNumberScheduled' => DesiredNumberScheduled,
'numberAvailable' => NumberAvailable,
'numberMisscheduled' => NumberMisscheduled,
'numberReady' => NumberReady,
'numberUnavailable' => NumberUnavailable,
'observedGeneration' => ObservedGeneration,
'updatedNumberScheduled' => UpdatedNumberScheduled
}.
| |
0d77d71e931cb8b15a67b9c2114e8e84f061e1d2b2ab1c316c1cf1e726174f3b | BranchTaken/Hemlock | test_mul.ml | open! Basis.Rudiments
open! Basis
open Nat
let test () =
let rec test_pairs = function
| [] -> ()
| (x, y) :: pairs' -> begin
let z = (x * y) in
File.Fmt.stdout
|> fmt ~alt:true ~radix:Radix.Hex x
|> Fmt.fmt " * "
|> fmt ~alt:true ~radix:Radix.Hex y
|> Fmt.fmt " -> "
|> fmt ~alt:true ~radix:Radix.Hex z
|> Fmt.fmt "\n"
|> ignore;
test_pairs pairs'
end
in
let pairs = [
(of_string "0", of_string "0");
(of_string "0", of_string "1");
(of_string "1", of_string "0");
(of_string "1", of_string
"0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff");
(of_string "0xffff_ffff", of_string "0xffff_ffff");
(of_string "0xffff_ffff_ffff_ffff", of_string "0xffff_ffff_ffff_ffff");
(of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff",
of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff");
(of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff",
of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff")
] in
test_pairs pairs
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/a07e362d66319108c1478a4cbebab765c1808b1a/bootstrap/test/basis/nat/test_mul.ml | ocaml | open! Basis.Rudiments
open! Basis
open Nat
let test () =
let rec test_pairs = function
| [] -> ()
| (x, y) :: pairs' -> begin
let z = (x * y) in
File.Fmt.stdout
|> fmt ~alt:true ~radix:Radix.Hex x
|> Fmt.fmt " * "
|> fmt ~alt:true ~radix:Radix.Hex y
|> Fmt.fmt " -> "
|> fmt ~alt:true ~radix:Radix.Hex z
|> Fmt.fmt "\n"
|> ignore;
test_pairs pairs'
end
in
let pairs = [
(of_string "0", of_string "0");
(of_string "0", of_string "1");
(of_string "1", of_string "0");
(of_string "1", of_string
"0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff");
(of_string "0xffff_ffff", of_string "0xffff_ffff");
(of_string "0xffff_ffff_ffff_ffff", of_string "0xffff_ffff_ffff_ffff");
(of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff",
of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff");
(of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff",
of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff")
] in
test_pairs pairs
let _ = test ()
| |
5a2a0b4674c79ceecb6f5c722e1bd68f73f67ae6923999f9a75e3e1787e04000 | input-output-hk/hydra | VerificationKey.hs | # OPTIONS_GHC -Wno - orphans #
module Hydra.Cardano.Api.VerificationKey where
import Hydra.Cardano.Api.Prelude
-- * Orphans
-- XXX: This is quite specific to payment keys
instance ToJSON (VerificationKey PaymentKey) where
toJSON = toJSON . serialiseToTextEnvelope Nothing
instance FromJSON (VerificationKey PaymentKey) where
parseJSON v = do
env <- parseJSON v
case deserialiseFromTextEnvelope (AsVerificationKey AsPaymentKey) env of
Left e -> fail $ show e
Right a -> pure a
| null | https://raw.githubusercontent.com/input-output-hk/hydra/7bdb54c4c87ddfe3f951028798558e586f1610d3/hydra-cardano-api/src/Hydra/Cardano/Api/VerificationKey.hs | haskell | * Orphans
XXX: This is quite specific to payment keys | # OPTIONS_GHC -Wno - orphans #
module Hydra.Cardano.Api.VerificationKey where
import Hydra.Cardano.Api.Prelude
instance ToJSON (VerificationKey PaymentKey) where
toJSON = toJSON . serialiseToTextEnvelope Nothing
instance FromJSON (VerificationKey PaymentKey) where
parseJSON v = do
env <- parseJSON v
case deserialiseFromTextEnvelope (AsVerificationKey AsPaymentKey) env of
Left e -> fail $ show e
Right a -> pure a
|
983f17d9800675351e3ca96e5b04f3cfb7f9e8bfad9e0ceb41321a1ee9c62969 | stbuehler/haskell-nettle | AES.hs |
module KAT.AES
( katAES
, katAES128
, katAES192
, katAES256
) where
import KAT.Utils
import HexUtils
katAES, katAES128, katAES192, katAES256 :: KATs
katAES = concatKATs
[ katAES128
, katAES192
, katAES256
]
katAES128 = concatKATs
[ katAES128Nettle
, katAES128NIST
]
katAES192 = concatKATs
[ katAES192Nettle
, katAES192NIST
]
katAES256 = concatKATs
[ katAES256Nettle
, katAES256NIST
]
-- source: nettle tests
katAES128Nettle, katAES192Nettle, katAES256Nettle :: KATs
katAES128Nettle = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112")
(hs "506812A45F08C889 B97F5980038B8359")
(hs "D8F532538289EF7D 06B506A4FD5BE9C9")
, KAT_ECB
(hs "14151617191A1B1C 1E1F202123242526")
(hs "5C6D71CA30DE8B8B 00549984D2EC7D4B")
(hs "59AB30F4D4EE6E4F F9907EF65B1FB68C")
, KAT_ECB
(hs "28292A2B2D2E2F30 323334353738393A")
(hs "53F3F4C64F8616E4 E7C56199F48F21F6")
(hs "BF1ED2FCB2AF3FD4 1443B56D85025CB1")
, KAT_ECB
(hs "A0A1A2A3A5A6A7A8 AAABACADAFB0B1B2")
(hs "F5F4F7F684878689 A6A7A0A1D2CDCCCF")
(hs "CE52AF650D088CA5 59425223F4D32694")
-- nettle "test_invert"
, KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112")
(hs "506812A45F08C889 B97F5980038B8359")
(hs "D8F532538289EF7D 06B506A4FD5BE9C9")
]
}
katAES192Nettle = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112 14151617191A1B1C")
(hs "2D33EEF2C0430A8A 9EBF45E809C40BB6")
(hs "DFF4945E0336DF4C 1C56BC700EFF837F")
-- nettle "test_invert"
, KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112 14151617191A1B1C")
(hs "2D33EEF2C0430A8A 9EBF45E809C40BB6")
(hs "DFF4945E0336DF4C 1C56BC700EFF837F")
]
}
katAES256Nettle = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112 14151617191A1B1C 1E1F202123242526")
(hs "834EADFCCAC7E1B30664B1ABA44815AB")
(hs "1946DABF6A03A2A2 C3D0B05080AED6FC")
, KAT_ECB
(hs "8d ae 93 ff fc 78 c9 44 2a bd 0c 1e 68 bc a6 c7 05 c7 84 e3 5a a9 11 8b d3 16 aa 54 9b 44 08 9e")
(hs "a5 ce 55 d4 21 15 a1 c6 4a a4 0c b2 ca a6 d1 37")
(hs "1f 94 fc 85 f2 36 21 06 4a ea e3 c9 cc 38 01 0e")
-- nettle "test_invert"
, KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112 14151617191A1B1C 1E1F202123242526")
(hs "834EADFCCAC7E1B30664B1ABA44815AB")
(hs "1946DABF6A03A2A2 C3D0B05080AED6FC")
]
}
From draft NIST spec on AES modes .
F.1 ECB Example Vectors
katAES128NIST, katAES192NIST, katAES256NIST :: KATs
-- F.1.1 ECB-AES128-Encrypt
katAES128NIST = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "2b7e151628aed2a6abf7158809cf4f3c")
(hs "6bc1bee22e409f96e93d7e117393172a ae2d8a571e03ac9c9eb76fac45af8e51 30c81c46a35ce411e5fbc1191a0a52ef f69f2445df4f9b17ad2b417be66c3710")
(hs "3ad77bb40d7a3660a89ecaf32466ef97 f5d3d58503b9699de785895a96fdbaaf 43b1cd7f598ece23881b00e3ed030688 7b0c785e27e8ad3f8223207104725dd4")
]
}
-- F.1.3 ECB-AES192-Encrypt
katAES192NIST = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "8e73b0f7da0e6452c810f32b809079e5 62f8ead2522c6b7b")
(hs "6bc1bee22e409f96e93d7e117393172a ae2d8a571e03ac9c9eb76fac45af8e51 30c81c46a35ce411e5fbc1191a0a52ef f69f2445df4f9b17ad2b417be66c3710")
(hs "bd334f1d6e45f25ff712a214571fa5cc 974104846d0ad3ad7734ecb3ecee4eef ef7afd2270e2e60adce0ba2face6444e 9a4b41ba738d6c72fb16691603c18e0e")
]
}
-- F.1.5 ECB-AES256-Encrypt
katAES256NIST = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "603deb1015ca71be2b73aef0857d7781 1f352c073b6108d72d9810a30914dff4")
(hs "6bc1bee22e409f96e93d7e117393172a ae2d8a571e03ac9c9eb76fac45af8e51 30c81c46a35ce411e5fbc1191a0a52ef f69f2445df4f9b17ad2b417be66c3710")
(hs "f3eed1bdb5d2a03c064b5a7e3db181f8 591ccb10d410ed26dc5ba74a31362870 b6ed21b99ca6f4f9f153e7b1beafed1d 23304b7a39f9f3ff067d8d8f9e24ecc7")
]
}
| null | https://raw.githubusercontent.com/stbuehler/haskell-nettle/0fb94a24c72efd1ef74c368669301bb755977f37/src/Tests/KAT/AES.hs | haskell | source: nettle tests
nettle "test_invert"
nettle "test_invert"
nettle "test_invert"
F.1.1 ECB-AES128-Encrypt
F.1.3 ECB-AES192-Encrypt
F.1.5 ECB-AES256-Encrypt |
module KAT.AES
( katAES
, katAES128
, katAES192
, katAES256
) where
import KAT.Utils
import HexUtils
katAES, katAES128, katAES192, katAES256 :: KATs
katAES = concatKATs
[ katAES128
, katAES192
, katAES256
]
katAES128 = concatKATs
[ katAES128Nettle
, katAES128NIST
]
katAES192 = concatKATs
[ katAES192Nettle
, katAES192NIST
]
katAES256 = concatKATs
[ katAES256Nettle
, katAES256NIST
]
katAES128Nettle, katAES192Nettle, katAES256Nettle :: KATs
katAES128Nettle = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112")
(hs "506812A45F08C889 B97F5980038B8359")
(hs "D8F532538289EF7D 06B506A4FD5BE9C9")
, KAT_ECB
(hs "14151617191A1B1C 1E1F202123242526")
(hs "5C6D71CA30DE8B8B 00549984D2EC7D4B")
(hs "59AB30F4D4EE6E4F F9907EF65B1FB68C")
, KAT_ECB
(hs "28292A2B2D2E2F30 323334353738393A")
(hs "53F3F4C64F8616E4 E7C56199F48F21F6")
(hs "BF1ED2FCB2AF3FD4 1443B56D85025CB1")
, KAT_ECB
(hs "A0A1A2A3A5A6A7A8 AAABACADAFB0B1B2")
(hs "F5F4F7F684878689 A6A7A0A1D2CDCCCF")
(hs "CE52AF650D088CA5 59425223F4D32694")
, KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112")
(hs "506812A45F08C889 B97F5980038B8359")
(hs "D8F532538289EF7D 06B506A4FD5BE9C9")
]
}
katAES192Nettle = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112 14151617191A1B1C")
(hs "2D33EEF2C0430A8A 9EBF45E809C40BB6")
(hs "DFF4945E0336DF4C 1C56BC700EFF837F")
, KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112 14151617191A1B1C")
(hs "2D33EEF2C0430A8A 9EBF45E809C40BB6")
(hs "DFF4945E0336DF4C 1C56BC700EFF837F")
]
}
katAES256Nettle = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112 14151617191A1B1C 1E1F202123242526")
(hs "834EADFCCAC7E1B30664B1ABA44815AB")
(hs "1946DABF6A03A2A2 C3D0B05080AED6FC")
, KAT_ECB
(hs "8d ae 93 ff fc 78 c9 44 2a bd 0c 1e 68 bc a6 c7 05 c7 84 e3 5a a9 11 8b d3 16 aa 54 9b 44 08 9e")
(hs "a5 ce 55 d4 21 15 a1 c6 4a a4 0c b2 ca a6 d1 37")
(hs "1f 94 fc 85 f2 36 21 06 4a ea e3 c9 cc 38 01 0e")
, KAT_ECB
(hs "0001020305060708 0A0B0C0D0F101112 14151617191A1B1C 1E1F202123242526")
(hs "834EADFCCAC7E1B30664B1ABA44815AB")
(hs "1946DABF6A03A2A2 C3D0B05080AED6FC")
]
}
From draft NIST spec on AES modes .
F.1 ECB Example Vectors
katAES128NIST, katAES192NIST, katAES256NIST :: KATs
katAES128NIST = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "2b7e151628aed2a6abf7158809cf4f3c")
(hs "6bc1bee22e409f96e93d7e117393172a ae2d8a571e03ac9c9eb76fac45af8e51 30c81c46a35ce411e5fbc1191a0a52ef f69f2445df4f9b17ad2b417be66c3710")
(hs "3ad77bb40d7a3660a89ecaf32466ef97 f5d3d58503b9699de785895a96fdbaaf 43b1cd7f598ece23881b00e3ed030688 7b0c785e27e8ad3f8223207104725dd4")
]
}
katAES192NIST = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "8e73b0f7da0e6452c810f32b809079e5 62f8ead2522c6b7b")
(hs "6bc1bee22e409f96e93d7e117393172a ae2d8a571e03ac9c9eb76fac45af8e51 30c81c46a35ce411e5fbc1191a0a52ef f69f2445df4f9b17ad2b417be66c3710")
(hs "bd334f1d6e45f25ff712a214571fa5cc 974104846d0ad3ad7734ecb3ecee4eef ef7afd2270e2e60adce0ba2face6444e 9a4b41ba738d6c72fb16691603c18e0e")
]
}
katAES256NIST = defaultKATs
{ kat_ECB =
[ KAT_ECB
(hs "603deb1015ca71be2b73aef0857d7781 1f352c073b6108d72d9810a30914dff4")
(hs "6bc1bee22e409f96e93d7e117393172a ae2d8a571e03ac9c9eb76fac45af8e51 30c81c46a35ce411e5fbc1191a0a52ef f69f2445df4f9b17ad2b417be66c3710")
(hs "f3eed1bdb5d2a03c064b5a7e3db181f8 591ccb10d410ed26dc5ba74a31362870 b6ed21b99ca6f4f9f153e7b1beafed1d 23304b7a39f9f3ff067d8d8f9e24ecc7")
]
}
|
ebe15c330499ae6167ec8feb9ed1c4b1ecf488030f06d704e72e84c0c741a593 | goldfirere/units | Factor.hs | Data / Metrology . Factor.hs
The units Package
Copyright ( c ) 2013
This file defines the Factor kind and operations over lists of Factors .
Factors represents dimensions and units raised to a power of integers , and the lists of Factors represents monomials of dimensions and units .
The units Package
Copyright (c) 2013 Richard Eisenberg
This file defines the Factor kind and operations over lists of Factors.
Factors represents dimensions and units raised to a power of integers, and the lists of Factors represents monomials of dimensions and units.
-}
# LANGUAGE TypeFamilies , DataKinds , TypeOperators , UndecidableInstances , CPP #
allow compilation even without
#ifndef MIN_VERSION_singletons
#define MIN_VERSION_singletons(a,b,c) 1
#endif
#if __GLASGOW_HASKELL__ >= 900
# OPTIONS_GHC -Wno - star - is - type #
#endif
module Data.Metrology.Factor where
import GHC.Exts (Constraint)
import Data.Metrology.Z as Z
import Data.Type.Equality as DTE
import Data.Type.Bool
#if MIN_VERSION_singletons(3,0,0)
import Prelude.Singletons
#else
import Data.Singletons.Prelude
#endif
-- | This will only be used at the kind level. It holds a dimension or unit
-- with its exponent.
data Factor star = F star Z
----------------------------------------------------------
--- Set-like operations ----------------------------------
----------------------------------------------------------
These functions are templates for type - level functions .
remove : : String - > [ String ] - > [ String ]
remove _ [ ] = [ ]
remove s ( h : t ) = if s = = h then t else h : remove s t
member : : String - > [ String ] - > Bool
member _ [ ] = False
member s ( h : t ) = s = = h || member s t
extract : : String - > [ String ] - > ( [ String ] , Maybe String )
extract _ [ ] = ( [ ] , Nothing )
extract s ( h : t ) =
if s = = h
then ( t , Just s )
else let ( resList , resVal ) = extract s t in ( h : resList , resVal )
reorder : : [ String ] - > [ String ] - > [ String ]
reorder x [ ] = x
reorder x ( h : t ) =
case extract h x of
( lst , Nothing ) - > reorder lst t
( lst , Just elt ) - > elt : ( reorder lst t )
These functions are templates for type-level functions.
remove :: String -> [String] -> [String]
remove _ [] = []
remove s (h:t) = if s == h then t else h : remove s t
member :: String -> [String] -> Bool
member _ [] = False
member s (h:t) = s == h || member s t
extract :: String -> [String] -> ([String], Maybe String)
extract _ [] = ([], Nothing)
extract s (h:t) =
if s == h
then (t, Just s)
else let (resList, resVal) = extract s t in (h : resList, resVal)
reorder :: [String] -> [String] -> [String]
reorder x [] = x
reorder x (h:t) =
case extract h x of
(lst, Nothing) -> reorder lst t
(lst, Just elt) -> elt : (reorder lst t)
-}
infix 4 $=
-- | Do these Factors represent the same dimension?
type family (a :: Factor *) $= (b :: Factor *) :: Bool where
(F n1 z1) $= (F n2 z2) = n1 DTE.== n2
a $= b = False
| @(Extract s lst)@ pulls the Factor that matches s out of lst , returning a
diminished list and , possibly , the extracted Factor .
--
-- @
-- Extract A [A, B, C] ==> ([B, C], Just A
-- Extract F [A, B, C] ==> ([A, B, C], Nothing)
-- @
type family Extract (s :: Factor *)
(lst :: [Factor *])
:: ([Factor *], Maybe (Factor *)) where
Extract s '[] = '( '[], Nothing )
Extract s (h ': t) =
If (s $= h)
'(t, Just h)
'(h ': Fst (Extract s t), Snd (Extract s t))
-- kind DimAnnotation = [Factor *]
-- a list of Factors forms a full annotation of a quantity's dimension
-- | Reorders a to be the in the same order as b, putting entries not in b at the end
--
-- @
Reorder [ A 1 , B 2 ] [ B 5 , A 2 ] = = > [ B 2 , A 1 ]
Reorder [ A 1 , B 2 , C 3 ] [ C 2 , A 8 ] = = > [ C 3 , A 1 , B 2 ]
Reorder [ A 1 , B 2 ] [ B 4 , C 1 , A 9 ] = = > [ B 2 , A 1 ]
-- Reorder x x ==> x
-- Reorder x [] ==> x
-- Reorder [] x ==> []
-- @
type family Reorder (a :: [Factor *]) (b :: [Factor *]) :: [Factor *] where
Reorder x x = x
Reorder '[] x = '[]
Reorder '[x] y = '[x]
Reorder x '[] = x
Reorder x (h ': t) = Reorder' (Extract h x) t
-- | Helper function in 'Reorder'
type family Reorder' (scrut :: ([Factor *], Maybe (Factor *)))
(t :: [Factor *])
:: [Factor *] where
Reorder' '(lst, Nothing) t = Reorder lst t
Reorder' '(lst, Just elt) t = elt ': (Reorder lst t)
infix 4 @~
| Check if two @[Factor * ] @s should be considered to be equal
type family (a :: [Factor *]) @~ (b :: [Factor *]) :: Constraint where
a @~ b = (Normalize (a @- b) ~ '[])
----------------------------------------------------------
--- Normalization ----------------------------------------
----------------------------------------------------------
| Take a @[Factor * ] @ and remove any @Factor@s with an exponent of 0
type family Normalize (d :: [Factor *]) :: [Factor *] where
Normalize '[] = '[]
Normalize ((F n Zero) ': t) = Normalize t
Normalize (h ': t) = h ': Normalize t
----------------------------------------------------------
--- Arithmetic -------------------------------------------
----------------------------------------------------------
infixl 6 @@+
| Adds corresponding exponents in two dimension , assuming the lists are
-- ordered similarly.
type family (a :: [Factor *]) @@+ (b :: [Factor *]) :: [Factor *] where
'[] @@+ b = b
a @@+ '[] = a
((F name z1) ': t1) @@+ ((F name z2) ': t2) = (F name (z1 #+ z2)) ': (t1 @@+ t2)
(h ': t) @@+ b = h ': (t @@+ b)
infixl 6 @+
| Adds corresponding exponents in two dimension , preserving order
type family (a :: [Factor *]) @+ (b :: [Factor *]) :: [Factor *] where
a @+ b = a @@+ (Reorder b a)
infixl 6 @@-
| Subtract exponents in two dimensions , assuming the lists are ordered
-- similarly.
type family (a :: [Factor *]) @@- (b :: [Factor *]) :: [Factor *] where
'[] @@- b = NegList b
a @@- '[] = a
((F name z1) ': t1) @@- ((F name z2) ': t2) = (F name (z1 #- z2)) ': (t1 @@- t2)
(h ': t) @@- b = h ': (t @@- b)
infixl 6 @-
| Subtract exponents in two dimensions
type family (a :: [Factor *]) @- (b :: [Factor *]) :: [Factor *] where
a @- a = '[]
a @- b = a @@- (Reorder b a)
-- | negate a single @Factor@
type family NegDim (a :: Factor *) :: Factor * where
NegDim (F n z) = F n (Z.Negate z)
-- | negate a list of @Factor@s
type family NegList (a :: [Factor *]) :: [Factor *] where
NegList '[] = '[]
NegList (h ': t) = (NegDim h ': (NegList t))
infixl 7 @*
-- | Multiplication of the exponents in a dimension by a scalar
type family (base :: [Factor *]) @* (power :: Z) :: [Factor *] where
'[] @* power = '[]
((F name num) ': t) @* power = (F name (num #* power)) ': (t @* power)
infixl 7 @/
-- | Division of the exponents in a dimension by a scalar
type family (dims :: [Factor *]) @/ (z :: Z) :: [Factor *] where
'[] @/ z = '[]
((F name num) ': t) @/ z = (F name (num #/ z)) ': (t @/ z)
| null | https://raw.githubusercontent.com/goldfirere/units/4941c3b4325783ad3c5b6486231f395279d8511e/units/Data/Metrology/Factor.hs | haskell | | This will only be used at the kind level. It holds a dimension or unit
with its exponent.
--------------------------------------------------------
- Set-like operations ----------------------------------
--------------------------------------------------------
| Do these Factors represent the same dimension?
@
Extract A [A, B, C] ==> ([B, C], Just A
Extract F [A, B, C] ==> ([A, B, C], Nothing)
@
kind DimAnnotation = [Factor *]
a list of Factors forms a full annotation of a quantity's dimension
| Reorders a to be the in the same order as b, putting entries not in b at the end
@
Reorder x x ==> x
Reorder x [] ==> x
Reorder [] x ==> []
@
| Helper function in 'Reorder'
--------------------------------------------------------
- Normalization ----------------------------------------
--------------------------------------------------------
--------------------------------------------------------
- Arithmetic -------------------------------------------
--------------------------------------------------------
ordered similarly.
similarly.
| negate a single @Factor@
| negate a list of @Factor@s
| Multiplication of the exponents in a dimension by a scalar
| Division of the exponents in a dimension by a scalar | Data / Metrology . Factor.hs
The units Package
Copyright ( c ) 2013
This file defines the Factor kind and operations over lists of Factors .
Factors represents dimensions and units raised to a power of integers , and the lists of Factors represents monomials of dimensions and units .
The units Package
Copyright (c) 2013 Richard Eisenberg
This file defines the Factor kind and operations over lists of Factors.
Factors represents dimensions and units raised to a power of integers, and the lists of Factors represents monomials of dimensions and units.
-}
# LANGUAGE TypeFamilies , DataKinds , TypeOperators , UndecidableInstances , CPP #
allow compilation even without
#ifndef MIN_VERSION_singletons
#define MIN_VERSION_singletons(a,b,c) 1
#endif
#if __GLASGOW_HASKELL__ >= 900
# OPTIONS_GHC -Wno - star - is - type #
#endif
module Data.Metrology.Factor where
import GHC.Exts (Constraint)
import Data.Metrology.Z as Z
import Data.Type.Equality as DTE
import Data.Type.Bool
#if MIN_VERSION_singletons(3,0,0)
import Prelude.Singletons
#else
import Data.Singletons.Prelude
#endif
data Factor star = F star Z
These functions are templates for type - level functions .
remove : : String - > [ String ] - > [ String ]
remove _ [ ] = [ ]
remove s ( h : t ) = if s = = h then t else h : remove s t
member : : String - > [ String ] - > Bool
member _ [ ] = False
member s ( h : t ) = s = = h || member s t
extract : : String - > [ String ] - > ( [ String ] , Maybe String )
extract _ [ ] = ( [ ] , Nothing )
extract s ( h : t ) =
if s = = h
then ( t , Just s )
else let ( resList , resVal ) = extract s t in ( h : resList , resVal )
reorder : : [ String ] - > [ String ] - > [ String ]
reorder x [ ] = x
reorder x ( h : t ) =
case extract h x of
( lst , Nothing ) - > reorder lst t
( lst , Just elt ) - > elt : ( reorder lst t )
These functions are templates for type-level functions.
remove :: String -> [String] -> [String]
remove _ [] = []
remove s (h:t) = if s == h then t else h : remove s t
member :: String -> [String] -> Bool
member _ [] = False
member s (h:t) = s == h || member s t
extract :: String -> [String] -> ([String], Maybe String)
extract _ [] = ([], Nothing)
extract s (h:t) =
if s == h
then (t, Just s)
else let (resList, resVal) = extract s t in (h : resList, resVal)
reorder :: [String] -> [String] -> [String]
reorder x [] = x
reorder x (h:t) =
case extract h x of
(lst, Nothing) -> reorder lst t
(lst, Just elt) -> elt : (reorder lst t)
-}
infix 4 $=
type family (a :: Factor *) $= (b :: Factor *) :: Bool where
(F n1 z1) $= (F n2 z2) = n1 DTE.== n2
a $= b = False
| @(Extract s lst)@ pulls the Factor that matches s out of lst , returning a
diminished list and , possibly , the extracted Factor .
type family Extract (s :: Factor *)
(lst :: [Factor *])
:: ([Factor *], Maybe (Factor *)) where
Extract s '[] = '( '[], Nothing )
Extract s (h ': t) =
If (s $= h)
'(t, Just h)
'(h ': Fst (Extract s t), Snd (Extract s t))
Reorder [ A 1 , B 2 ] [ B 5 , A 2 ] = = > [ B 2 , A 1 ]
Reorder [ A 1 , B 2 , C 3 ] [ C 2 , A 8 ] = = > [ C 3 , A 1 , B 2 ]
Reorder [ A 1 , B 2 ] [ B 4 , C 1 , A 9 ] = = > [ B 2 , A 1 ]
type family Reorder (a :: [Factor *]) (b :: [Factor *]) :: [Factor *] where
Reorder x x = x
Reorder '[] x = '[]
Reorder '[x] y = '[x]
Reorder x '[] = x
Reorder x (h ': t) = Reorder' (Extract h x) t
type family Reorder' (scrut :: ([Factor *], Maybe (Factor *)))
(t :: [Factor *])
:: [Factor *] where
Reorder' '(lst, Nothing) t = Reorder lst t
Reorder' '(lst, Just elt) t = elt ': (Reorder lst t)
infix 4 @~
| Check if two @[Factor * ] @s should be considered to be equal
type family (a :: [Factor *]) @~ (b :: [Factor *]) :: Constraint where
a @~ b = (Normalize (a @- b) ~ '[])
| Take a @[Factor * ] @ and remove any @Factor@s with an exponent of 0
type family Normalize (d :: [Factor *]) :: [Factor *] where
Normalize '[] = '[]
Normalize ((F n Zero) ': t) = Normalize t
Normalize (h ': t) = h ': Normalize t
infixl 6 @@+
| Adds corresponding exponents in two dimension , assuming the lists are
type family (a :: [Factor *]) @@+ (b :: [Factor *]) :: [Factor *] where
'[] @@+ b = b
a @@+ '[] = a
((F name z1) ': t1) @@+ ((F name z2) ': t2) = (F name (z1 #+ z2)) ': (t1 @@+ t2)
(h ': t) @@+ b = h ': (t @@+ b)
infixl 6 @+
| Adds corresponding exponents in two dimension , preserving order
type family (a :: [Factor *]) @+ (b :: [Factor *]) :: [Factor *] where
a @+ b = a @@+ (Reorder b a)
infixl 6 @@-
| Subtract exponents in two dimensions , assuming the lists are ordered
type family (a :: [Factor *]) @@- (b :: [Factor *]) :: [Factor *] where
'[] @@- b = NegList b
a @@- '[] = a
((F name z1) ': t1) @@- ((F name z2) ': t2) = (F name (z1 #- z2)) ': (t1 @@- t2)
(h ': t) @@- b = h ': (t @@- b)
infixl 6 @-
| Subtract exponents in two dimensions
type family (a :: [Factor *]) @- (b :: [Factor *]) :: [Factor *] where
a @- a = '[]
a @- b = a @@- (Reorder b a)
type family NegDim (a :: Factor *) :: Factor * where
NegDim (F n z) = F n (Z.Negate z)
type family NegList (a :: [Factor *]) :: [Factor *] where
NegList '[] = '[]
NegList (h ': t) = (NegDim h ': (NegList t))
infixl 7 @*
type family (base :: [Factor *]) @* (power :: Z) :: [Factor *] where
'[] @* power = '[]
((F name num) ': t) @* power = (F name (num #* power)) ': (t @* power)
infixl 7 @/
type family (dims :: [Factor *]) @/ (z :: Z) :: [Factor *] where
'[] @/ z = '[]
((F name num) ': t) @/ z = (F name (num #/ z)) ': (t @/ z)
|
a02bc1d75f29e121f25f37342b122abfc2c99ba323ccce68223b0587b0a8789e | stchang/macrotypes | exist.rkt | #lang s-exp macrotypes/typecheck
(extends "stlc+reco+var.rkt")
;; existential types
;; Types:
- types from stlc+reco+var.rkt
;; - ∃
;; Terms:
- terms from stlc+reco+var.rkt
;; - pack and open
(provide ∃ pack open)
(define-binding-type ∃ #:bvs = 1)
(define-typed-syntax pack
[(_ (τ:type e) as ∃τ:type)
#:with (~∃ (τ_abstract) τ_body) #'∃τ.norm
#:with [e- τ_e] (infer+erase #'e)
#:when (typecheck? #'τ_e (subst #'τ.norm #'τ_abstract #'τ_body))
(⊢ e- : ∃τ.norm)])
(define-typed-syntax open #:datum-literals (<=)
[(_ [x:id <= e_packed with X:id] e)
;; The subst below appears to be a hack, but it's not really.
;; It's the (TaPL) type rule itself that is fast and loose.
;; Leveraging the macro system's management of binding reveals this.
;;
Specifically , here is the TaPL Unpack type rule , , p366 :
;; Γ ⊢ e_packed : {∃X,τ_body}
Γ , X , x : τ_e
;; ------------------------------
;; Γ ⊢ (open [x <= e_packed with X] e) : τ_e
;;
There 's * two * separate binders , the ∃ and the let ,
;; which the rule conflates.
;;
Here 's the rule rewritten to distinguish the two binding positions :
;; Γ ⊢ e_packed : {∃X_1,τ_body}
Γ , X_???,x : τ_e
;; ------------------------------
Γ ⊢ ( open [ x < = e_packed with X_2 ] e ) : τ_e
;;
The X_1 binds references to X in .
The X_2 binds references to X in t_2 .
;; What should the X_??? be?
;;
A first guess might be to replace X _ ? ? ? with both X_1 and X_2 ,
;; so all the potentially referenced type vars are bound.
;; Γ ⊢ e_packed : {∃X_1,τ_body}
Γ , X_1,X_2,x : τ_e
;; ------------------------------
Γ ⊢ ( open [ x < = e_packed with X_2 ] e ) : τ_e
;;
;; But this example demonstrates that the rule above doesnt work:
( open [ x < = ( pack ( Int 0 ) as ( ∃ ( X_1 ) X_1 ) ) with X_2 ]
;; ((λ ([y : X_2]) y) x)
Here , x has type X_1 , y has type X_2 , but they should be the same thing ,
so we need to replace all X_1 's with X_2
;;
;; Here's the fixed rule, which is implemented here
;;
;; Γ ⊢ e_packed : {∃X_1,τ_body}
Γ , X_2:#%type , x:[X_2 / X_1]τ_body ⊢ e : τ_e
;; ------------------------------
Γ ⊢ ( open [ x < = e_packed with X_2 ] e ) : τ_e
;;
#:with [e_packed- (~∃ (Y) τ_body)] (infer+erase #'e_packed)
#:with τ_x (subst #'X #'Y #'τ_body)
#:with [(X- x-) e- τ_e] (infer/ctx+erase #'([X :: #%type] [x : τ_x]) #'e)
#:with τ_e_checked
;; err if values with type X escape open's body
(let ([ctx (syntax-local-make-definition-context)])
(syntax-local-bind-syntaxes
(list #'X-)
#'(lambda (stx)
(type-error #:src #'stx #:msg "existential type ~a is not in scope" #'X-))
ctx)
(local-expand #'τ_e 'expression '() ctx))
(⊢ (let- ([x- e_packed-]) e-) : τ_e_checked)])
| null | https://raw.githubusercontent.com/stchang/macrotypes/05ec31f2e1fe0ddd653211e041e06c6c8071ffa6/macrotypes-example/macrotypes/examples/exist.rkt | racket | existential types
Types:
- ∃
Terms:
- pack and open
The subst below appears to be a hack, but it's not really.
It's the (TaPL) type rule itself that is fast and loose.
Leveraging the macro system's management of binding reveals this.
Γ ⊢ e_packed : {∃X,τ_body}
------------------------------
Γ ⊢ (open [x <= e_packed with X] e) : τ_e
which the rule conflates.
Γ ⊢ e_packed : {∃X_1,τ_body}
------------------------------
What should the X_??? be?
so all the potentially referenced type vars are bound.
Γ ⊢ e_packed : {∃X_1,τ_body}
------------------------------
But this example demonstrates that the rule above doesnt work:
((λ ([y : X_2]) y) x)
Here's the fixed rule, which is implemented here
Γ ⊢ e_packed : {∃X_1,τ_body}
------------------------------
err if values with type X escape open's body | #lang s-exp macrotypes/typecheck
(extends "stlc+reco+var.rkt")
- types from stlc+reco+var.rkt
- terms from stlc+reco+var.rkt
(provide ∃ pack open)
(define-binding-type ∃ #:bvs = 1)
(define-typed-syntax pack
[(_ (τ:type e) as ∃τ:type)
#:with (~∃ (τ_abstract) τ_body) #'∃τ.norm
#:with [e- τ_e] (infer+erase #'e)
#:when (typecheck? #'τ_e (subst #'τ.norm #'τ_abstract #'τ_body))
(⊢ e- : ∃τ.norm)])
(define-typed-syntax open #:datum-literals (<=)
[(_ [x:id <= e_packed with X:id] e)
Specifically , here is the TaPL Unpack type rule , , p366 :
Γ , X , x : τ_e
There 's * two * separate binders , the ∃ and the let ,
Here 's the rule rewritten to distinguish the two binding positions :
Γ , X_???,x : τ_e
Γ ⊢ ( open [ x < = e_packed with X_2 ] e ) : τ_e
The X_1 binds references to X in .
The X_2 binds references to X in t_2 .
A first guess might be to replace X _ ? ? ? with both X_1 and X_2 ,
Γ , X_1,X_2,x : τ_e
Γ ⊢ ( open [ x < = e_packed with X_2 ] e ) : τ_e
( open [ x < = ( pack ( Int 0 ) as ( ∃ ( X_1 ) X_1 ) ) with X_2 ]
Here , x has type X_1 , y has type X_2 , but they should be the same thing ,
so we need to replace all X_1 's with X_2
Γ , X_2:#%type , x:[X_2 / X_1]τ_body ⊢ e : τ_e
Γ ⊢ ( open [ x < = e_packed with X_2 ] e ) : τ_e
#:with [e_packed- (~∃ (Y) τ_body)] (infer+erase #'e_packed)
#:with τ_x (subst #'X #'Y #'τ_body)
#:with [(X- x-) e- τ_e] (infer/ctx+erase #'([X :: #%type] [x : τ_x]) #'e)
#:with τ_e_checked
(let ([ctx (syntax-local-make-definition-context)])
(syntax-local-bind-syntaxes
(list #'X-)
#'(lambda (stx)
(type-error #:src #'stx #:msg "existential type ~a is not in scope" #'X-))
ctx)
(local-expand #'τ_e 'expression '() ctx))
(⊢ (let- ([x- e_packed-]) e-) : τ_e_checked)])
|
8a32c7cfb57e2a4b547041fca0dacbb5c3e123807050608ba6083d828eda68fe | Risto-Stevcev/bastet | Test_JsEndo.ml | open BsMocha.Mocha
let ( <. ) = Function.Infix.( <. )
;;
describe "Endo" (fun () -> ())
| null | https://raw.githubusercontent.com/Risto-Stevcev/bastet/030db286f57d2e316897f0600d40b34777eabba6/bastet_js/test/Test_JsEndo.ml | ocaml | open BsMocha.Mocha
let ( <. ) = Function.Infix.( <. )
;;
describe "Endo" (fun () -> ())
| |
160a2d2dae7af6793da7bdae8bb86330430915cf3323ad1d37af7e4c7ba67e8d | solita/laundry | pdf.clj | (ns laundry.pdf
(:require
[clojure.java.io :as io]
[compojure.api.sweet :as sweet :refer [POST]]
[laundry.machines :as machines :refer [badness-resp]]
[laundry.util :refer [shell-out!]]
[ring.middleware.multipart-params :refer [wrap-multipart-params]]
[ring.swagger.upload :as upload]
[ring.util.http-response :as htresp]
[schema.core :as s]
[taoensso.timbre :as timbre :refer [info]]))
(s/defn temp-file-input-stream [path :- s/Str]
(let [input (io/input-stream (io/file path))]
(proxy [java.io.FilterInputStream] [input]
(close []
(proxy-super close)
(io/delete-file path)))))
;; pdf/a converter
(s/defn api-pdf2pdfa [env, tempfile :- java.io.File]
(let [in-path (.getAbsolutePath tempfile)
out-path (str (.getAbsolutePath tempfile) ".pdf")
res (shell-out! (str (:tools env) "/pdf2pdfa")
in-path out-path)]
(.delete tempfile)
(if (= (:exit res) 0)
(htresp/content-type
(htresp/ok (temp-file-input-stream out-path))
"application/pdf")
(badness-resp "pdf2pdfa conversion failed" res))))
;; pdf → txt conversion
(s/defn api-pdf2txt [env, tempfile :- java.io.File]
(let [path (.getAbsolutePath tempfile)
out (str path ".txt")
res (shell-out! (str (:tools env) "/pdf2txt") path out)]
(.delete tempfile)
(if (= (:exit res) 0)
(htresp/content-type
(htresp/ok (temp-file-input-stream out))
"text/plain")
(badness-resp "pdf2txt conversion failed" res))))
previewer of first page
(s/defn api-pdf2jpeg [env, tempfile :- java.io.File]
(let [path (.getAbsolutePath tempfile)
out (str (.getAbsolutePath tempfile) ".jpeg")
res (shell-out! (str (:tools env) "/pdf2jpeg") path out)]
(.delete tempfile)
(if (= (:exit res) 0)
(htresp/content-type
(htresp/ok (temp-file-input-stream out))
"image/jpeg")
(badness-resp "pdf preview failed" res))))
(machines/add-api-generator!
(fn [env]
(sweet/context "/pdf" []
(POST "/pdf-preview" []
:summary "attempt to convert first page of a PDF to JPEG"
:multipart-params [file :- upload/TempFileUpload]
:middleware [wrap-multipart-params]
(let [tempfile (:tempfile file)
filename (:filename file)]
(info "PDF previewer received " filename "(" (:size file) "b)")
(.deleteOnExit tempfile) ;; cleanup if VM is terminated
(api-pdf2jpeg env tempfile)))
(POST "/pdf2txt" []
:summary "attempt to convert a PDF file to TXT"
:multipart-params [file :- upload/TempFileUpload]
:middleware [wrap-multipart-params]
(let [tempfile (:tempfile file)
filename (:filename file)]
(info "PDF2TXT converter received " filename "(" (:size file) "b)")
(.deleteOnExit tempfile) ;; cleanup if VM is terminated
(api-pdf2txt env tempfile)))
(POST "/pdf2pdfa" []
:summary "attempt to convert a PDF file to PDF/A"
:multipart-params [file :- upload/TempFileUpload]
:middleware [wrap-multipart-params]
(let [tempfile (:tempfile file)
filename (:filename file)]
(info "PDF converter received " filename "(" (:size file) "b)")
(.deleteOnExit tempfile) ;; cleanup if VM is terminated
(api-pdf2pdfa env tempfile))))))
| null | https://raw.githubusercontent.com/solita/laundry/4e1fe96ebae19cde14c3ba5396929ba1578b7715/src/laundry/pdf.clj | clojure | pdf/a converter
pdf → txt conversion
cleanup if VM is terminated
cleanup if VM is terminated
cleanup if VM is terminated | (ns laundry.pdf
(:require
[clojure.java.io :as io]
[compojure.api.sweet :as sweet :refer [POST]]
[laundry.machines :as machines :refer [badness-resp]]
[laundry.util :refer [shell-out!]]
[ring.middleware.multipart-params :refer [wrap-multipart-params]]
[ring.swagger.upload :as upload]
[ring.util.http-response :as htresp]
[schema.core :as s]
[taoensso.timbre :as timbre :refer [info]]))
(s/defn temp-file-input-stream [path :- s/Str]
(let [input (io/input-stream (io/file path))]
(proxy [java.io.FilterInputStream] [input]
(close []
(proxy-super close)
(io/delete-file path)))))
(s/defn api-pdf2pdfa [env, tempfile :- java.io.File]
(let [in-path (.getAbsolutePath tempfile)
out-path (str (.getAbsolutePath tempfile) ".pdf")
res (shell-out! (str (:tools env) "/pdf2pdfa")
in-path out-path)]
(.delete tempfile)
(if (= (:exit res) 0)
(htresp/content-type
(htresp/ok (temp-file-input-stream out-path))
"application/pdf")
(badness-resp "pdf2pdfa conversion failed" res))))
(s/defn api-pdf2txt [env, tempfile :- java.io.File]
(let [path (.getAbsolutePath tempfile)
out (str path ".txt")
res (shell-out! (str (:tools env) "/pdf2txt") path out)]
(.delete tempfile)
(if (= (:exit res) 0)
(htresp/content-type
(htresp/ok (temp-file-input-stream out))
"text/plain")
(badness-resp "pdf2txt conversion failed" res))))
previewer of first page
(s/defn api-pdf2jpeg [env, tempfile :- java.io.File]
(let [path (.getAbsolutePath tempfile)
out (str (.getAbsolutePath tempfile) ".jpeg")
res (shell-out! (str (:tools env) "/pdf2jpeg") path out)]
(.delete tempfile)
(if (= (:exit res) 0)
(htresp/content-type
(htresp/ok (temp-file-input-stream out))
"image/jpeg")
(badness-resp "pdf preview failed" res))))
(machines/add-api-generator!
(fn [env]
(sweet/context "/pdf" []
(POST "/pdf-preview" []
:summary "attempt to convert first page of a PDF to JPEG"
:multipart-params [file :- upload/TempFileUpload]
:middleware [wrap-multipart-params]
(let [tempfile (:tempfile file)
filename (:filename file)]
(info "PDF previewer received " filename "(" (:size file) "b)")
(api-pdf2jpeg env tempfile)))
(POST "/pdf2txt" []
:summary "attempt to convert a PDF file to TXT"
:multipart-params [file :- upload/TempFileUpload]
:middleware [wrap-multipart-params]
(let [tempfile (:tempfile file)
filename (:filename file)]
(info "PDF2TXT converter received " filename "(" (:size file) "b)")
(api-pdf2txt env tempfile)))
(POST "/pdf2pdfa" []
:summary "attempt to convert a PDF file to PDF/A"
:multipart-params [file :- upload/TempFileUpload]
:middleware [wrap-multipart-params]
(let [tempfile (:tempfile file)
filename (:filename file)]
(info "PDF converter received " filename "(" (:size file) "b)")
(api-pdf2pdfa env tempfile))))))
|
ea22685aaab76f999619535d07a91f4dd3aa1ef7562bc8c2123ca25cca120a88 | kudu-dynamics/blaze | Solver.hs | HLINT ignore " Use if "
{-# LANGUAGE RankNTypes #-}
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
module Blaze.Pil.Solver
( module Blaze.Pil.Solver
, module Blaze.Types.Pil.Solver
, module Exports
) where
import Blaze.Prelude hiding (error, zero, natVal, isSigned)
import qualified Prelude as P
import qualified Blaze.Types.Pil as Pil
import Blaze.Types.Pil ( Expression
, PilVar
, Statement
)
import qualified Data.HashMap.Strict as HashMap
import Blaze.Types.Pil.Solver
import qualified Blaze.Pil.Solver.List as BSList
import Data.SBV.Tools.Overflow (bvAddO, bvSubO)
import qualified Data.SBV.Trans as Exports (z3, cvc4)
import qualified Data.SBV.Trans as SBV
import Data.SBV.Trans ( (.==)
, (./=)
, (.>)
, (.>=)
, (.<)
, (.<=)
, (.||)
, (.~|)
, sPopCount
, SFiniteBits
, SInteger
, SInt8
, SInt16
, SInt32
, SInt64
, SInt
, SWord8
, SWord16
, SWord32
, SWord64
, SWord
, WordN
)
import Data.SBV.Dynamic as D hiding (Solver)
import qualified Blaze.Types.Pil.Checker as Ch
import qualified Blaze.Pil.Checker as Ch
import Blaze.Types.Pil.Checker ( DeepSymType )
import Data.SBV.Internals (SBV(SBV), unSBV)
stubbedFunctionConstraintGen :: HashMap Text (SVal -> [SVal] -> Solver ())
stubbedFunctionConstraintGen = HashMap.fromList
[ ( "memcpy"
, \_r args -> case args of
[dest, src, n] -> do
guardList dest
guardList src
guardIntegral n
n' <- boundedToSInteger n
constrain_ $ BSList.length dest .>= n'
constrain_ $ BSList.length src .>= n'
constrain _ $ r .== ( SList.take n ' src .++ SList.drop n ' dest )
xs -> throwError . StubbedFunctionArgError "memcpy" 3 $ length xs
)
, ( "abs"
, \r args -> case args of
[n] -> do
guardIntegral n
constrain $ r `svEqual` svAbs n
xs -> throwError . StubbedFunctionArgError "abs" 1 $ length xs
)
]
pilVarName :: PilVar -> Text
pilVarName pv = pv ^. #symbol
<> maybe "" (("@"<>) . view (#func . #name)) mCtx
<> maybe "" (("."<>) . show . f . view #ctxId) mCtx
where
f (Pil.CtxId n) = n
mCtx :: Maybe Pil.Ctx
mCtx = pv ^. #ctx
-- | Convert a `DeepSymType` to an SBV Kind.
-- Any symbolic Sign types are concretized to False.
deepSymTypeToKind :: DeepSymType -> Solver Kind
deepSymTypeToKind t = case t of
Ch.DSVar v -> err $ "Can't convert DSVar " <> show v
Ch.DSRecursive _s pt -> deepSymTypeToKind (Ch.DSType pt)
-- ignore recursion, and hope for the best
Ch.DSType pt -> case pt of
Ch.TArray _alen _etype -> err "Array should be handled only when wrapped in Ptr"
Ch.TBool -> return KBool
Ch.TChar bw -> KBounded False <$> getBitWidth bw
Ch.TInt bw s -> KBounded <$> (getSigned s <|> pure False) <*> getBitWidth bw
Ch.TFloat _ -> return KDouble
SBV only has float or double , so we 'll just pick double
Ch.TBitVector bw -> KBounded False <$> getBitWidth bw
Ch.TPointer bw _pt -> KBounded False <$> getBitWidth bw
-- Ch.TPointer bwt ptrElemType -> case ptrElemType of
Ch . DSType ( Ch . TArray _ alen arrayElemType ) - >
-- alen constraint is handled at sym var creation
KList < $ > deepSymTypeToKind arrayElemType
-- -- TODO: structs. good luck
_ - > KBounded < $ > pure False < * > getBitWidth bwt
Ch.TCString _ -> return KString
Ch.TRecord _ -> err "Can't handle Record type"
Ch.TUnit -> return $ KTuple []
Ch.TBottom s -> err $ "TBottom " <> show s
Ch.TFunction _ _ -> err "Can't handle Function type"
where
getBitWidth :: Maybe Bits -> Solver Int
getBitWidth (Just b) = case fromIntegral b of
0 -> err "Bitwidth cannot be zero."
n -> return n
-- TODO: Will this show the error in context or do we need to manage that ourselves here?
getBitWidth Nothing = err "Can't get bitwidth."
getSigned :: Maybe Bool -> Solver Bool
getSigned (Just s) = return s
getSigned Nothing = err "Can't get signedness."
err :: forall a. Text -> Solver a
err = throwError . DeepSymTypeConversionError t
makeSymVar :: Maybe Text -> DeepSymType -> Kind -> Solver SVal
makeSymVar nm _dst k = do
case cs <$> nm of
Just n -> D.svNewVar k n
Nothing -> D.svNewVar_ k
-- v <- case cs <$> nm of
-- Just n -> D.svNewVar k n
-- Nothing -> D.svNewVar_ k
-- case dst of
Ch . DSType ( Ch . TPointer _ ( Ch . DSType ( Ch . TArray ( Ch . DSType ( Ch . ) ) _ ) ) ) - > do
-- constrain_ $ fromIntegral n .== BSList.length v
-- _ -> return ()
-- return v
makeSymVarOfType :: Maybe Text -> DeepSymType -> Solver SVal
makeSymVarOfType nm dst = deepSymTypeToKind dst >>= makeSymVar nm dst
catchIfLenient :: (SolverError -> SolverError) -> Solver a -> (SolverError -> Solver a) -> Solver a
catchIfLenient wrapErr m handleError = do
solverLeniency <- view #leniency
catchError m $ \e ->
case solverLeniency of
AbortOnError -> throwError e
SkipStatementsWithErrors -> warn (wrapErr e) >> handleError e
catchIfLenientForPilVar :: PilVar -> Solver () -> Solver ()
catchIfLenientForPilVar pv m = catchIfLenient (PilVarConversionError $ pv ^. #symbol) m (const $ return ())
catchIfLenientForStmt :: Solver () -> Solver ()
catchIfLenientForStmt m = do
sindex <- use #currentStmtIndex
catchIfLenient (StmtError sindex) m (const $ return ())
declarePilVars :: Solver ()
declarePilVars = ask >>= mapM_ f . HashMap.toList . typeEnv
where
f (pv, dst) = catchIfLenientForPilVar pv $ do
sval <- makeSymVarOfType (Just nm) dst
#varNames %= HashMap.insert pv nm
#varMap %= HashMap.insert pv sval
where
nm = pilVarName pv
constInt :: Bits -> Integer -> SVal
constInt w = svInteger (KBounded True $ fromIntegral w)
constWord :: Bits -> Integer -> SVal
constWord w = svInteger (KBounded False $ fromIntegral w)
constFloat :: Double -> SVal
constFloat = svDouble
constInteger :: Integral a => a -> SVal
constInteger = svInteger KUnbounded . fromIntegral
| requires : > = bv
-- : kindOf bv is bounded
zeroExtend :: Bits -> SVal -> SVal
zeroExtend targetWidth bv = case kindOf bv of
(KBounded _s w)
| tw == w -> svUnsign bv
| tw > w -> svJoin ext $ svUnsign bv
| otherwise -> P.error "zeroExtend: target width less than bitvec width"
where
ext = svInteger (KBounded False $ fromIntegral targetWidth - w) 0
_ -> P.error "zeroExtend: arg not bitvec"
where
tw = fromIntegral targetWidth
-- | most significant bit
-- requires: kindOf bv is Bounded
: width bv > 0
msb :: SVal -> SVal
msb bv = case kindOf bv of
(KBounded _ w)
| w == 0 -> P.error "msb: bv has zero width"
| otherwise -> svTestBit bv (w - 1)
_ -> P.error "msb: bv must be Bounded kind"
| requires : > = bv
-- : kindOf bv is bounded
: width bv > 0
signExtend :: Bits -> SVal -> SVal
signExtend targetWidth bv = case kindOf bv of
(KBounded _s w)
| tw == w -> svSign bv
| tw > w -> svJoin ext $ svSign bv
| otherwise -> P.error "signExtend: target width less than bitvec width"
where
tw = fromIntegral targetWidth
zero = svInteger (KBounded True $ fromIntegral targetWidth - w) 0
ones = svNot zero
ext = svIte (msb bv) ones zero
_ -> P.error "signExtend: bv must be Bounded kind"
signExtendSVal :: SVal -> SVal -> SVal
signExtendSVal tw bv = case kindOf bv of
(KBounded _s w) -> svIte (tw `svGreaterEq` constWord 32 (toInteger w)) (svJoin ext bv) bv
where
subtract an additional 1 off since we are zero inclusive in createExtendBuf
extWidth = tw `svMinus` constWord 32 (toInteger w) `svMinus` constWord 32 1
zeros = buf
ones = svNot buf
ext = svIte (msb bv) ones zeros
buf = createExtendBuf extWidth
createExtendBuf :: SVal -> SVal
createExtendBuf width = svIte (width `svEqual` constWord 32 0)
(constInt 1 0)
$ svJoin (createExtendBuf $ width `svMinus` constWord 32 1) $ constInt 1 0
_ -> P.error "signExtend: bv must be Bounded kind"
zeroExtendSVal :: SVal -> SVal -> SVal
zeroExtendSVal tw bv = case kindOf bv of
(KBounded _s w) -> svIte (tw `svGreaterEq` constWord 32 (toInteger w)) (svJoin buf bv) bv
where
extWidth = tw `svMinus` constWord 32 (toInteger w) `svMinus` constWord 32 1
buf = createExtendBuf extWidth
createExtendBuf :: SVal -> SVal
createExtendBuf width = svIte (width `svEqual` constWord 32 0)
(constWord 1 0)
$ svJoin (createExtendBuf $ width `svMinus` constWord 32 1) $ constWord 1 0
_ -> P.error "signExtend: bv must be Bounded kind"
-- | Extends b to match a's width.
-- | requires: width a >= width b
-- : kindOf a, b are bounded
: widths a , b > 0
matchBoundedWidth :: HasKind a => a -> SVal -> SVal
matchBoundedWidth a b = case (kindOf a, kindOf b) of
(KBounded _ w1, KBounded s w2)
| w1 == w2 -> b
| w1 > w2 -> if s then signExtend w1' b else zeroExtend w1' b
| otherwise -> lowPart_ w1' b
where
w1' = fromIntegral w1
_ -> P.error "matchBoundedWidth: a and b must be kind Bounded"
| Matches second to first bounded integral sign
-- error if either not bounded.
matchSign :: HasKind a => a -> SVal -> SVal
matchSign a b = case (kindOf a, kindOf b) of
(KBounded s1 _, KBounded s2 _)
| s1 == s2 -> b
| otherwise -> if s1 then svSign b else svUnsign b
_ -> P.error "matchSign: a and b must be kind Bounded"
matchIntegral :: HasKind a => a -> SVal -> SVal
matchIntegral a b = matchBoundedWidth a (matchSign a b)
-- if x is signed, converts to unsigned, then runs f, then converts result to signed
runAsUnsigned :: (SVal -> SVal) -> SVal -> SVal
runAsUnsigned f x = case kindOf x of
KBounded True _ -> svSign (f (svUnsign x))
KBounded False _ -> f x
_ -> P.error "runAsSigned: expected KBounded"
isSigned :: SVal -> Bool
isSigned x = case kindOf x of
KBounded s _ -> s
k -> P.error $ "isSigned expected KBounded, got " <> show k
-- this is pretty much just copied out of Data.SBV
sSignedShiftArithRight :: SVal -> SVal -> SVal
sSignedShiftArithRight x i
| isSigned i = P.error "sSignedShiftArithRight: shift amount should be unsigned"
| isSigned x = svShiftRight x i
| otherwise = svIte (msb x)
(svNot (svShiftRight (svNot x) i))
(svShiftRight x i)
-- TODO: convert SVals to unsigned.
the svJoin gets messed up if these are signed
TODO : has the above TODO been done ? check to make sure
updateBitVec :: BitOffset -> SVal -> SVal -> Solver SVal
updateBitVec boff src dest = case (kindOf dest, kindOf src) of
(KBounded destSign wdest, KBounded _ wsrc)
| wsrc + off > wdest -> throwError . ErrorMessage $ "updateBitVec: src width + offset must be less than dest width"
| otherwise -> do
destHighPart <- highPart (fromIntegral $ wdest - (off + wsrc)) dest'
destLowPart <- lowPart (fromIntegral boff) dest'
return . bool svUnsign svSign destSign
$ destHighPart `svJoin` src' `svJoin` destLowPart
where
dest' = svUnsign dest
src' = svUnsign src
off = fromIntegral boff
_ -> throwError . ErrorMessage $ "updateBitVec: both args must be KBounded"
safeExtract :: Bits -> Bits -> SVal -> Solver SVal
safeExtract endIndex' startIndex' var = case k of
(KBounded _ w)
| endIndex' >= fromIntegral w -> error "endIndex out of bounds"
| startIndex' < 0 -> error "startIndex out of bounds"
| otherwise -> return $ svExtract (fromIntegral endIndex') (fromIntegral startIndex') var
_ -> error "must be KBounded"
where
k = kindOf var
error msg' = throwError $ ExtractError
{ endIndex = endIndex'
, startIndex = startIndex'
, kind = k
, msg = msg'
}
lowPart :: Bits -> SVal -> Solver SVal
lowPart n src = case kindOf src of
KBounded _ w
| n > fromIntegral w -> throwError . ErrorMessage $ "lowPart: cannot get part greater than whole"
| otherwise -> return $ lowPart_ n src
_ -> P.error "lowPart: src must be KBounded"
lowPart_ :: Bits -> SVal -> SVal
lowPart_ n src = case kindOf src of
KBounded _ _w -> svExtract (fromIntegral n - 1) 0 src
_ -> P.error "lowPart: src must be KBounded"
highPart :: Bits -> SVal -> Solver SVal
highPart n src = case kindOf src of
KBounded _ w
| n > fromIntegral w -> throwError . ErrorMessage $ "highPart: cannot get part greater than whole"
| otherwise -> return $ highPart_ n src
_ -> P.error "highPart: src must be KBounded"
-- TODO: guard that n is greater than 0
-- and that w is big enough
highPart_ :: Bits -> SVal -> SVal
highPart_ n src = case kindOf src of
KBounded _ w -> svExtract (w - 1) (w - fromIntegral n) src
_ -> P.error "lowPart: src must be KBounded"
rotateWithCarry :: (SVal -> SVal -> SVal) -> SVal -> SVal -> SVal -> SVal
rotateWithCarry rotFunc src rot c = case kindOf src of
KBounded _ w -> svExtract w 1 $ rotFunc (svJoin src c) rot
_ -> P.error "rotateWithCarry: src is not KBounded"
-- with carry works like regular rotate with carry appended to the end
rotateRightWithCarry :: SVal -> SVal -> SVal -> SVal
rotateRightWithCarry = rotateWithCarry svRotateRight
rotateLeftWithCarry :: SVal -> SVal -> SVal -> SVal
rotateLeftWithCarry = rotateWithCarry svRotateLeft
toSFloat :: SVal -> Solver SBV.SDouble
toSFloat x = guardFloat x >> return (SBV x)
toSFloat' :: SVal -> SBV.SDouble
toSFloat' x = case kindOf x of
KDouble -> SBV x
_ -> P.error "toSFloat: x is not KDouble kind"
toSBool' :: SVal -> SBool
toSBool' x = case kindOf x of
KBool -> SBV x
_ -> P.error "toSBool: x is not KBool kind"
toSBool :: SVal -> Solver SBool
toSBool x = guardBool x >> return (SBV x)
toSList : : HasKind a = > SVal - > Solver ( SList a )
-- toSList x
-- sizeOf :: SVal -> Solver SInteger
-- sizeOf x = case k of
KBool - > throwError $ SizeOfError k
KBounded _ w - > s
-- KUnbounded -> error "SBV.HasKind.intSizeOf((S)Integer)"
-- KReal -> error "SBV.HasKind.intSizeOf((S)Real)"
KFloat - > 32
KDouble - > 64
-- KFP i j -> i + j
-- KRational -> error "SBV.HasKind.intSizeOf((S)Rational)"
KUserSort s _ - > error $ " SBV.HasKind.intSizeOf : Uninterpreted sort : " + + s
> error " SBV.HasKind.intSizeOf((S)Double ) "
KChar - > error " SBV.HasKind.intSizeOf((S)Char ) "
-- KList ek -> error $ "SBV.HasKind.intSizeOf((S)List)" ++ show ek
KSet error $ " SBV.HasKind.intSizeOf((S)Set ) " + + show ek
-- KTuple tys -> error $ "SBV.HasKind.intSizeOf((S)Tuple)" ++ show tys
k - > error $ " SBV.HasKind.intSizeOf((S)Maybe ) " + + show k
-- KEither k1 k2 -> error $ "SBV.HasKind.intSizeOf((S)Either)" ++ show (k1, k2)
-- where
-- k = kindOf x
boolToInt' :: SVal -> SVal
boolToInt' b = svIte b (svInteger k 1) (svInteger k 0)
where
k = KBounded False 1
converts bool to 0 or 1 integral of Kind k
boolToInt :: Kind -> SVal -> Solver SVal
boolToInt (KBounded s w) b = do
guardBool b
return $ svIte b (svInteger (KBounded s w) 1) (svInteger (KBounded s w) 0)
boolToInt t _ = throwError . ErrorMessage $ "boolToInt expected KBounded, got " <> show t
constrain_ :: SBool -> Solver ()
constrain_ b = do
ctx <- ask
case ctx ^. #useUnsatCore of
False -> SBV.constrain b
True -> do
i <- use #currentStmtIndex
SBV.namedConstraint ("stmt_" <> show i) b
constrain :: SVal -> Solver ()
constrain = constrain_ <=< toSBool
newSymVar :: Text -> Kind -> Solver SVal
newSymVar name' k = D.svNewVar k (cs name')
-------------------------------
guardBool :: HasKind a => a -> Solver ()
guardBool x = case k of
KBool -> return ()
_ -> throwError $ GuardError "guardBool" [k] "Not Bool"
where
k = kindOf x
svBoolNot :: SVal -> SVal
svBoolNot = unSBV . SBV.sNot . toSBool'
guardIntegral :: HasKind a => a -> Solver ()
guardIntegral x = case k of
KBounded _ _ -> return ()
_ -> throwError $ GuardError "guardIntegral" [k] "Not integral"
where
k = kindOf x
boundedToSInteger :: SVal -> Solver SInteger
boundedToSInteger x = do
guardIntegral x
case kindOf x of
KBounded True 8 -> return $ SBV.sFromIntegral (SBV x :: SInt8)
KBounded True 16 -> return $ SBV.sFromIntegral (SBV x :: SInt16)
KBounded True 32 -> return $ SBV.sFromIntegral (SBV x :: SInt32)
KBounded True 64 -> return $ SBV.sFromIntegral (SBV x :: SInt64)
KBounded True 128 -> return $ SBV.sFromIntegral (SBV x :: SInt 128)
KBounded False 8 -> return $ SBV.sFromIntegral (SBV x :: SWord8)
KBounded False 16 -> return $ SBV.sFromIntegral (SBV x :: SWord16)
KBounded False 32 -> return $ SBV.sFromIntegral (SBV x :: SWord32)
KBounded False 64 -> return $ SBV.sFromIntegral (SBV x :: SWord64)
KBounded False 128 -> return $ SBV.sFromIntegral (SBV x :: SWord 128)
t -> throwError . ConversionError $ "Cannot convert type " <> show t
guardFloat :: HasKind a => a -> Solver ()
guardFloat x = case k of
KDouble -> return ()
_ -> throwError $ GuardError "guardFloat" [k] "Not Double"
where
k = kindOf x
guardIntegralFirstWidthNotSmaller :: (HasKind a, HasKind b)
=> a -> b -> Solver ()
guardIntegralFirstWidthNotSmaller x y = case (kx, ky) of
(KBounded _ w1, KBounded _ w2)
| w1 >= w2 -> return ()
| otherwise -> throwError $ GuardError "guardIntegralFirstWidthNotSmaller"
[kx, ky]
"Second arg width is greater than first"
_ -> throwError $ GuardError "guardIntegralFirstWidthNotSmaller"
[kx, ky]
"Both must be KBounded"
where
kx = kindOf x
ky = kindOf y
guardSameKind :: (HasKind a, HasKind b) => a -> b -> Solver ()
guardSameKind x y = if kindOf x == kindOf y
then return ()
else throwError $ GuardError "guardSameKind" [kindOf x, kindOf y] "not same kind"
guardList :: (HasKind a) => a -> Solver ()
guardList x = case kindOf x of
KList _ -> return ()
_ -> throwError $ GuardError "guardList" [kindOf x] "not a list"
lookupVarSym :: PilVar -> Solver SVal
lookupVarSym pv = do
vm <- use #varMap
maybe err return $ HashMap.lookup pv vm
where
err = throwError . ErrorMessage
$ "lookupVarSym failed for var '" <> pilVarName pv <> "'"
bitsToOperationSize :: Bits -> Pil.OperationSize
bitsToOperationSize = Pil.OperationSize . (`div` 8) . fromIntegral
dstToExpr :: DSTExpression -> Expression
dstToExpr (Ch.InfoExpression (info, _) op) = Pil.Expression (bitsToOperationSize $ info ^. #size) $ dstToExpr <$> op
catchAndWarnStmtDef :: a -> Solver a -> Solver a
catchAndWarnStmtDef def m = catchError m $ \e -> do
si <- use #currentStmtIndex
warn $ StmtError si e
return def
catchAndWarnStmt :: Solver () -> Solver ()
catchAndWarnStmt m = catchError m $ \e -> do
si <- use #currentStmtIndex
warn $ StmtError si e
warn :: SolverError -> Solver ()
warn e = #errors %= (e :)
svAggrAnd :: [SVal] -> SVal
svAggrAnd = foldr svAnd svTrue
| Convert an ' SVal ' to an ' SBV a ' , where ' a ' is one of ' Word8 ' , ' Word16 ' ,
' ' , ' Word64 ' , and then run a function with this wrapped SBV . If ' SVal '
-- is not one of these supported sizes, then the result will be @Just (f ...)@,
-- otherwise 'Nothing' is returned
liftSFiniteBits :: (forall a. SFiniteBits a => SBV a -> b) -> SVal -> Maybe b
liftSFiniteBits f sv =
-- Can easily extend this if we need to support more sizes later by adding
more @WordN@ cases
case intSizeOf sv of
1 -> Just . f $ (SBV sv :: SBV (WordN 1))
8 -> Just . f $ (SBV sv :: SBV Word8)
16 -> Just . f $ (SBV sv :: SBV Word16)
32 -> Just . f $ (SBV sv :: SBV Word32)
64 -> Just . f $ (SBV sv :: SBV Word64)
_ -> Nothing
-- | Like 'liftSFiniteBits' but discard the phantom type information of the 'SBV _'
result and return a typeless ' SVal '
liftSFiniteBits' :: (forall a. SFiniteBits a => SBV a -> SBV b) -> SVal -> Maybe SVal
liftSFiniteBits' sv f = (\(SBV x) -> x) <$> liftSFiniteBits sv f
solveStmt :: Statement (Ch.InfoExpression (Ch.SymInfo, Maybe DeepSymType))
-> Solver ()
solveStmt = catchIfLenientForStmt . solveStmt_ solveExpr
| Generates for statement , using provided expr solver
solveStmt_ :: (DSTExpression -> Solver SVal)
-> Statement (Ch.InfoExpression (Ch.SymInfo, Maybe DeepSymType))
-> Solver ()
solveStmt_ solveExprFunc stmt = catchAndWarnStmt $ case stmt of
Pil.Def x -> do
pv <- lookupVarSym $ x ^. #var
expr <- solveExprFunc $ x ^. #value
guardSameKind pv expr
constrain $ pv `svEqual` expr
Pil.Constraint x ->
solveExprFunc (x ^. #condition) >>= constrain
Pil.Store x -> do
let exprAddr = dstToExpr $ x ^. #addr
sValue <- solveExprFunc $ x ^. #value
let insertStoreVar Nothing = Just [sValue]
insertStoreVar (Just xs) = Just $ sValue : xs
modify (\s -> s { stores = HashMap.alter insertStoreVar exprAddr $ s ^. #stores } )
return ()
Pil.DefPhi x -> do
pv <- lookupVarSym $ x ^. #dest
eqs <- mapM (f pv) $ x ^. #src
constrain_ $ SBV.sOr eqs
where
f pv y = do
pv2 <- lookupVarSym y
guardSameKind pv pv2
toSBool $ pv `svEqual` pv2
_ -> return ()
solveExpr :: DSTExpression -> Solver SVal
solveExpr = solveExpr_ solveExpr
| Creates SVal that represents expression .
This type of InfoExpression is in a TypeReport
solveExpr_ :: (DSTExpression -> Solver SVal) -> DSTExpression -> Solver SVal
-- solveExpr (Ch.InfoExpression ((Ch.SymInfo _ xsym), Nothing) _) = \
solverError $ " No type for " < > show xsym
solveExpr_ solveExprRec (Ch.InfoExpression (Ch.SymInfo sz xsym, mdst) op) = catchFallbackAndWarn $ case op of
Pil.ADC x -> integralBinOpWithCarry x $ \a b c -> a `svPlus` b `svPlus` c
Pil.ADD x -> integralBinOpMatchSecondArgToFirst x svPlus
Pil.ADD_WILL_CARRY x ->
integralBinOpMatchSecondArgToFirst x $ \a b -> unSBV $ uncurry (.||) $ bvAddO (svUnsign a) (svUnsign b)
Pil.ADD_WILL_OVERFLOW x ->
integralBinOpMatchSecondArgToFirst x $ \a b -> unSBV $ uncurry (.||) $ bvAddO (svSign a) (svSign b)
Pil.ARRAY_ADDR x -> do
base <- solveExprRec (x ^. #base)
index <- solveExprRec (x ^. #index)
guardIntegral base
guardIntegral index
let stride = svInteger (kindOf base) . fromIntegral $ x ^. #stride
pure $ base `svPlus` (zeroExtend (fromIntegral $ intSizeOf base) index `svTimes` stride)
Pil.AND x -> integralBinOpMatchSecondArgToFirst x svAnd
Pil.ASR x -> integralBinOpUnrelatedArgs x sSignedShiftArithRight
Pil.BOOL_TO_INT x -> do
b <- solveExprRec $ x ^. #src
guardBool b
k <- getRetKind
guardIntegral k
return $ svIte b (svInteger k 1) (svInteger k 0)
-- TODO: stub standard libs here
Pil.CALL x -> do
fcg <- view #funcConstraintGen <$> ask
case (x ^. #name) >>= flip HashMap.lookup fcg of
Nothing -> fallbackAsFreeVar
Just gen -> do
args <- mapM solveExprRec $ x ^. #params
r <- fallbackAsFreeVar
gen r args
return r
Pil.CEIL x -> floatUnOp x $ SBV.fpRoundToIntegral SBV.sRoundTowardPositive
Pil.CMP_E x -> binOpEqArgsReturnsBool x svEqual
Pil.CMP_NE x -> binOpEqArgsReturnsBool x svNotEqual
Pil.CMP_SGE x -> binOpEqArgsReturnsBool x svGreaterEq
Pil.CMP_SGT x -> binOpEqArgsReturnsBool x svGreaterThan
Pil.CMP_SLE x -> binOpEqArgsReturnsBool x svLessEq
Pil.CMP_SLT x -> binOpEqArgsReturnsBool x svLessThan
-- the signed and unsigned versions use the same smt func
-- the type checker should guarantee that the args are correctly signed or unsigned
but maybe TODO should be to convert signed SVal to unsigned SVal if necessary
Pil.CMP_UGE x -> binOpEqArgsReturnsBool x svGreaterEq
Pil.CMP_UGT x -> binOpEqArgsReturnsBool x svGreaterThan
Pil.CMP_ULE x -> binOpEqArgsReturnsBool x svLessEq
Pil.CMP_ULT x -> binOpEqArgsReturnsBool x svLessThan
Pil.CONST x -> do
k <- getRetKind
guardIntegral k
return . svInteger k . fromIntegral $ x ^. #constant
Pil.CONST_BOOL x -> return . svBool $ x ^. #constant
Pil.CONST_FLOAT x -> return . svDouble $ x ^. #constant
Pil.CONST_PTR x ->
return . svInteger (KBounded False $ fromIntegral sz)
. fromIntegral $ x ^. #constant
Pil.ConstStr x -> return . unSBV $ SBV.literal (cs $ x ^. #value :: String)
Pil.ConstFuncPtr x -> return . svInteger (KBounded False $ fromIntegral sz)
. fromIntegral $ x ^. #address
TODO : do we need to do anything special for the DP versions ?
Pil.DIVS x -> integralBinOpMatchSecondArgToFirst x svDivide
Pil.DIVS_DP x -> divOrModDP True x svDivide
Pil.DIVU x -> integralBinOpMatchSecondArgToFirst x svDivide
Pil.DIVU_DP x -> divOrModDP False x svDivide
Pil.Extract _ -> unhandled "Extract"
Pil.ExternPtr _ -> unhandled "ExternPtr"
Pil.FABS x -> floatUnOp x SBV.fpAbs
Pil.FADD x -> floatBinOp x $ SBV.fpAdd SBV.sRoundNearestTiesToAway
Pil.FDIV x -> floatBinOp x $ SBV.fpDiv SBV.sRoundNearestTiesToAway
Pil.FCMP_E x -> floatBinOpReturnsBool x (.==)
Pil.FCMP_GE x -> floatBinOpReturnsBool x (.>=)
Pil.FCMP_GT x -> floatBinOpReturnsBool x (.>)
Pil.FCMP_LE x -> floatBinOpReturnsBool x (.<=)
Pil.FCMP_LT x -> floatBinOpReturnsBool x (.<)
Pil.FCMP_O x -> floatBinOpReturnsBool x $ \a b -> SBV.fpIsNaN a .~| SBV.fpIsNaN b
Pil.FCMP_NE x -> floatBinOpReturnsBool x (./=)
Pil.FCMP_UO x -> floatBinOpReturnsBool x $ \a b -> SBV.fpIsNaN a .|| SBV.fpIsNaN b
-- TODO: a FIELD_ADDR should only be used inside a LOAD or Store, and hence
should never be " solved " . But maybe field could be added to ?
-- Pil.FIELD_ADDR x -> do
Pil.FLOAT_CONV x -> do
y <- solveExprRec $ x ^. #src
case kindOf y of
(KBounded False 32) -> return
. unSBV
. SBV.toSDouble SBV.sRoundNearestTiesToAway
. SBV.sWord32AsSFloat
. SBV $ y
(KBounded False 64) -> return
. unSBV
. SBV.sWord64AsSDouble
. SBV $ y
k -> throwError . ErrorMessage
$ "FLOAT_CONV expecting Unsigned integral of 32 or 64 bit width, got"
<> show k
Pil.FLOAT_TO_INT x -> do
k <- getRetKind
y <- solveExprRec $ x ^. #src
guardFloat y
case k of
(KBounded False 64) -> unSBV <$> (f y :: Solver SBV.SWord64)
(KBounded False 32) -> unSBV <$> (f y :: Solver SBV.SWord32)
(KBounded False 16) -> unSBV <$> (f y :: Solver SBV.SWord16)
(KBounded False 8) -> unSBV <$> (f y :: Solver SBV.SWord8)
(KBounded True 64) -> unSBV <$> (f y :: Solver SBV.SInt64)
(KBounded True 32) -> unSBV <$> (f y :: Solver SBV.SInt32)
(KBounded True 16) -> unSBV <$> (f y :: Solver SBV.SInt16)
(KBounded True 8) -> unSBV <$> (f y :: Solver SBV.SInt8)
_ -> throwError . ErrorMessage
$ "FLOAT_TO_INT: unsupported return type: " <> show k
where
f :: forall a. (SBV.IEEEFloatConvertible a) => SVal -> Solver (SBV a)
f = return . SBV.fromSDouble SBV.sRoundNearestTiesToAway . SBV
Pil.FLOOR x -> floatUnOp x $ SBV.fpRoundToIntegral SBV.sRoundTowardNegative
Pil.FMUL x -> floatBinOp x $ SBV.fpMul SBV.sRoundNearestTiesToAway
Pil.FNEG x -> floatUnOp x SBV.fpNeg
Pil.FSQRT x -> floatUnOp x $ SBV.fpSqrt SBV.sRoundNearestTiesToAway
Pil.FTRUNC x -> floatUnOp x $ SBV.fpRoundToIntegral SBV.sRoundTowardZero
Pil.FSUB x -> floatBinOp x $ SBV.fpSub SBV.sRoundNearestTiesToAway
Pil.IMPORT x -> return
. svInteger (KBounded False $ fromIntegral sz)
. fromIntegral
$ x ^. #constant
Pil.INT_TO_FLOAT x -> do
y <- solveExprRec $ x ^. #src
let f :: forall a. SBV.IEEEFloatConvertible a => SBV a -> Solver SVal
f = return . unSBV . SBV.toSDouble SBV.sRoundNearestTiesToAway
case kindOf y of
(KBounded True 8) -> f (SBV y :: SBV.SInt8)
(KBounded True 16) -> f (SBV y :: SBV.SInt16)
(KBounded True 32) -> f (SBV y :: SBV.SInt32)
(KBounded True 64) -> f (SBV y :: SBV.SInt64)
(KBounded False 8) -> f (SBV y :: SBV.SWord8)
(KBounded False 16) -> f (SBV y :: SBV.SWord16)
(KBounded False 32) -> f (SBV y :: SBV.SWord32)
(KBounded False 64) -> f (SBV y :: SBV.SWord64)
k -> throwError . ErrorMessage
$ "INT_TO_FLOAT: unsupported return type: " <> show k
Pil.LOAD x -> do
s <- use #stores
let key = dstToExpr $ x ^. #src
maybe (createFreeVar key) return $ HashMap.lookup key s >>= headMay
where
createFreeVar k = do
freeVar <- fallbackAsFreeVar
#stores %= HashMap.insert k [freeVar]
return freeVar
Pil.LOW_PART x -> integralUnOpM x $ lowPart sz
Pil.LSL x -> integralBinOpUnrelatedArgs x svShiftLeft
Pil.LSR x -> integralBinOpUnrelatedArgs x svShiftRight
Pil.MODS x -> integralBinOpMatchSecondArgToFirst x svRem
Pil.MODS_DP x -> divOrModDP True x svRem
Pil.MODU x -> integralBinOpMatchSecondArgToFirst x svRem
Pil.MODU_DP x -> divOrModDP False x svRem
Pil.MUL x -> integralBinOpMatchSecondArgToFirst x svTimes
Pil.MULS_DP x -> mulDP True x
Pil.MULU_DP x -> mulDP False x
Pil.NEG x -> integralUnOp x svUNeg
Pil.NOT x -> do
y <- solveExprRec $ x ^. #src
let k = kindOf y
case k of
KBool -> return $ unSBV . SBV.sNot . toSBool' $ y
(KBounded _ _) -> return $ svNot y
_ -> throwError . ErrorMessage $ "NOT expecting Bool or Integral, got " <> show k
Pil.OR x -> integralBinOpMatchSecondArgToFirst x svOr
Pil.POPCNT x ->
integralUnOpM x $ \bv -> do
case liftSFiniteBits' sPopCount bv of
Just res -> pure res
Nothing -> throwError . ErrorMessage $ "Unsupported POPCNT operand size: " <> show (intSizeOf bv)
Pil.RLC x -> rotateBinOpWithCarry x rotateLeftWithCarry
Pil.ROL x -> integralBinOpUnrelatedArgs x svRotateLeft
Pil.ROR x -> integralBinOpUnrelatedArgs x svRotateRight
Pil.ROUND_TO_INT _ -> unhandled "ROUND_TO_INT"
Pil.RRC x -> rotateBinOpWithCarry x rotateRightWithCarry
Pil.SBB x -> integralBinOpWithCarry x $ \a b c -> (a `svMinus` b) `svMinus` c
Pil.MemCmp _ -> unhandled "MemCmp"
Pil.StrCmp _ -> unhandled "StrCmp"
Pil.StrNCmp _ -> unhandled "StrNCmp"
Pil.STACK_LOCAL_ADDR _ -> unhandled "STACK_LOCAL_ADDR"
Pil.FIELD_ADDR _ -> unhandled "FIELD_ADDR"
Pil.SUB x -> integralBinOpMatchSecondArgToFirst x svMinus
Pil.SUB_WILL_OVERFLOW x ->
integralBinOpMatchSecondArgToFirst x $ \a b -> unSBV $ uncurry (.||) $ bvSubO (svSign a) (svSign b)
Pil.SX x -> bitVectorUnOp x (signExtend sz)
Pil.TEST_BIT x -> integralBinOpUnrelatedArgs x $ \a b -> case kindOf a of
KBounded _ w -> (a `svAnd` svExp (constWord (Bits w') 2) b)
`svGreaterThan` constWord (Bits w') 0
where
w' = fromIntegral w
-- TODO: Throw error if not KBounded
_ -> svFalse
Pil.UNIMPL _ -> throwError . ErrorMessage $ "UNIMPL"
Pil.UNIT -> unhandled "UNIT"
Pil.UPDATE_VAR x -> do
dest <- lookupVarSym $ x ^. #dest
src <- solveExprRec $ x ^. #src
guardIntegral dest
guardIntegral src
--TODO: convert dest and src to unsigned and convert them back if needed
TODO : the above TODO might already happen in updateBitVec . find out .
updateBitVec (toBitOffset $ x ^. #offset) src dest
-- -- How should src and dest be related?
-- -- Can't express that `offset + width(src) == width(dest)`
-- -- without `+` and `==` as type level operators.
-- return [ (r, CSVar v) ]
Pil.VAR x -> lookupVarSym $ x ^. #src
TODO : add test
-- also, maybe convert the offset to bits?
Pil.VAR_FIELD x -> do
v <- lookupVarSym $ x ^. #src
safeExtract (off + w - 1) off v
where
off = fromIntegral . toBitOffset $ x ^. #offset
w = fromIntegral sz
-- this should really be called VAR_JOIN
Pil.VAR_JOIN x -> do
low <- lookupVarSym $ x ^. #low
high <- lookupVarSym $ x ^. #high
guardIntegral low
guardIntegral high
return $ svJoin high low
Pil.VAR_PHI _ -> unhandled "VAR_PHI"
Pil.XOR x -> integralBinOpUnrelatedArgs x svXOr
Pil.ZX x -> bitVectorUnOp x (zeroExtend sz)
where
| Throws an error that says exactly which ' ExprOp ' constructor is unhandled
unhandled opName = throwError . ErrorMessage $ "unhandled PIL op: " <> opName
fallbackAsFreeVar :: Solver SVal
fallbackAsFreeVar = case mdst of
Nothing -> throwError . ExprError xsym . ErrorMessage $ "missing DeepSymType"
Just dst -> catchError (makeSymVarOfType Nothing dst) $ \e ->
throwError $ ExprError xsym e
getDst :: Solver DeepSymType
getDst = maybe e return mdst
where e = throwError . ErrorMessage $ "missing DeepSymType"
getRetKind = getDst >>= deepSymTypeToKind
catchFallbackAndWarn :: Solver SVal -> Solver SVal
catchFallbackAndWarn m = catchError m $ \e -> do
si <- use #currentStmtIndex
warn $ StmtError si e
fallbackAsFreeVar
binOpEqArgsReturnsBool :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression)
=> x -> (SVal -> SVal -> SVal) -> Solver SVal
binOpEqArgsReturnsBool x f = do
lx <- solveExprRec (x ^. #left)
rx <- solveExprRec (x ^. #right)
guardSameKind lx rx
return $ f lx rx
| does n't match second arg to first
integralBinOpUnrelatedArgs :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression)
=> x -> (SVal -> SVal -> SVal) -> Solver SVal
integralBinOpUnrelatedArgs x f = do
lx <- solveExprRec (x ^. #left)
rx <- solveExprRec (x ^. #right)
guardIntegral lx
guardIntegral rx
return $ f lx rx
| assumes first arg width > = second arg width
matches second args sign and width to equal first
integralBinOpMatchSecondArgToFirst
:: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression)
=> x -> (SVal -> SVal -> SVal) -> Solver SVal
integralBinOpMatchSecondArgToFirst x f = do
lx <- solveExprRec (x ^. #left)
rx <- solveExprRec (x ^. #right)
guardIntegralFirstWidthNotSmaller lx rx
let rx' = matchSign lx (matchBoundedWidth lx rx)
return $ f lx rx'
HLINT ignore " Reduce duplication "
floatBinOp :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression )
=> x
-> (SBV.SDouble -> SBV.SDouble -> SBV.SDouble) -> Solver SVal
floatBinOp x f = do
lx <- toSFloat =<< solveExprRec (x ^. #left)
rx <- toSFloat =<< solveExprRec (x ^. #right)
return . unSBV $ f lx rx
floatBinOpReturnsBool :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression )
=> x
-> (SBV.SDouble -> SBV.SDouble -> SBool) -> Solver SVal
floatBinOpReturnsBool x f = do
lx <- toSFloat =<< solveExprRec (x ^. #left)
rx <- toSFloat =<< solveExprRec (x ^. #right)
return . unSBV $ f lx rx
bitVectorUnOp :: HasField' "src" x DSTExpression
=> x
-> (SVal -> SVal) -> Solver SVal
bitVectorUnOp = integralUnOp
integralUnOp :: HasField' "src" x DSTExpression
=> x
-> (SVal -> SVal) -> Solver SVal
integralUnOp x f = integralUnOpM x (return . f)
integralUnOpM :: HasField' "src" x DSTExpression
=> x
-> (SVal -> Solver SVal) -> Solver SVal
integralUnOpM x f = do
lx <- solveExprRec (x ^. #src)
guardIntegral lx
f lx
floatUnOp :: HasField' "src" x DSTExpression
=> x
-> (SBV.SDouble -> SBV.SDouble) -> Solver SVal
floatUnOp x f = do
lx <- solveExprRec (x ^. #src)
unSBV . f <$> toSFloat lx
-- | return is double width, so we double the args
mulDP :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression )
=> Bool
-> x
-> Solver SVal
mulDP signedness x = do
lx <- solveExprRec (x ^. #left)
rx <- solveExprRec (x ^. #right)
let retKind = KBounded signedness $ fromIntegral sz
guardIntegralFirstWidthNotSmaller lx rx
guardIntegralFirstWidthNotSmaller retKind lx
let lx' = matchIntegral retKind lx
rx' = matchIntegral lx' rx
return $ svTimes lx' rx'
| first arg is double width of second and return arg
so we have to increase width of second , then shrink result by half
divOrModDP :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression )
=> Bool
-> x
-> (SVal -> SVal -> SVal) -> Solver SVal
divOrModDP signedness x f = do
lx <- solveExprRec (x ^. #left)
rx <- solveExprRec (x ^. #right)
let retKind = KBounded signedness $ fromIntegral sz
guardIntegralFirstWidthNotSmaller lx rx
let rx' = matchIntegral lx rx
res = f lx rx'
res' = matchIntegral retKind res -- make result size of original rx
return res'
integralBinOpWithCarry :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression
, HasField' "carry" x DSTExpression)
=> x
-> (SVal -> SVal -> SVal -> SVal) -> Solver SVal
integralBinOpWithCarry x f = do
a <- solveExprRec (x ^. #left)
b <- solveExprRec (x ^. #right)
c <- solveExprRec (x ^. #carry)
guardIntegralFirstWidthNotSmaller a b
cAsInt <- boolToInt (kindOf a) c
let b' = matchIntegral a b
return $ f a b' cAsInt
rotateBinOpWithCarry :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression
, HasField' "carry" x DSTExpression)
=> x
-> (SVal -> SVal -> SVal -> SVal) -> Solver SVal
rotateBinOpWithCarry x f = do
a <- solveExprRec (x ^. #left)
b <- solveExprRec (x ^. #right)
c <- solveExprRec (x ^. #carry)
guardIntegral a
guardIntegral b
guardBool c
cAsInt <- boolToInt (KBounded False 1) c
return $ runAsUnsigned (\y -> f y b cAsInt) a
solveTypedStmtsWith :: SMTConfig
-> HashMap PilVar DeepSymType
-> [(Int, Statement (Ch.InfoExpression (Ch.SymInfo, Maybe DeepSymType)))]
-> IO (Either SolverError SolverReport)
solveTypedStmtsWith solverCfg vartypes stmts = do
er <- runSolverWith solverCfg run ( emptyState
, SolverCtx vartypes stubbedFunctionConstraintGen True AbortOnError
)
return $ toSolverReport <$> er
where
toSolverReport :: (SolverResult, SolverState) -> SolverReport
toSolverReport (r, s) = SolverReport r (s ^. #errors)
run = do
declarePilVars
mapM_ f stmts
querySolverResult
f (ix, stmt) = do
#currentStmtIndex .= ix
solveStmt stmt
| runs type checker first , then solver
solveStmtsWith :: SMTConfig
-> [Statement Expression]
-> IO (Either
(Either
Ch.ConstraintGenError
(SolverError, Ch.TypeReport))
(SolverReport, Ch.TypeReport))
solveStmtsWith solverCfg stmts = do
-- should essential analysis steps be included here?
-- let stmts' = Analysis.substFields stmts
let er = Ch.checkStmts stmts
case er of
Left e -> return $ Left (Left e)
Right tr -> solveTypedStmtsWith solverCfg (tr ^. #varSymTypeMap) (tr ^. #symTypedStmts) >>= \case
Left e -> return $ Left (Right (e, tr))
Right sr -> return $ Right (sr, tr)
-- | convenience function for checking statements.
any errors in Type Checker or Solver result in Unk
-- warnings are ignored
solveStmtsWith_ :: SMTConfig
-> [Statement Expression]
-> IO SolverResult
solveStmtsWith_ solverCfg stmts = solveStmtsWith solverCfg stmts >>= \case
Left _ -> return Unk
Right (r, _) -> return $ r ^. #result
| null | https://raw.githubusercontent.com/kudu-dynamics/blaze/2220a07d372a817e79525ec2707984b189fe98c9/src/Blaze/Pil/Solver.hs | haskell | # LANGUAGE RankNTypes #
| Convert a `DeepSymType` to an SBV Kind.
Any symbolic Sign types are concretized to False.
ignore recursion, and hope for the best
Ch.TPointer bwt ptrElemType -> case ptrElemType of
alen constraint is handled at sym var creation
-- TODO: structs. good luck
TODO: Will this show the error in context or do we need to manage that ourselves here?
v <- case cs <$> nm of
Just n -> D.svNewVar k n
Nothing -> D.svNewVar_ k
case dst of
constrain_ $ fromIntegral n .== BSList.length v
_ -> return ()
return v
: kindOf bv is bounded
| most significant bit
requires: kindOf bv is Bounded
: kindOf bv is bounded
| Extends b to match a's width.
| requires: width a >= width b
: kindOf a, b are bounded
error if either not bounded.
if x is signed, converts to unsigned, then runs f, then converts result to signed
this is pretty much just copied out of Data.SBV
TODO: convert SVals to unsigned.
TODO: guard that n is greater than 0
and that w is big enough
with carry works like regular rotate with carry appended to the end
toSList x
sizeOf :: SVal -> Solver SInteger
sizeOf x = case k of
KUnbounded -> error "SBV.HasKind.intSizeOf((S)Integer)"
KReal -> error "SBV.HasKind.intSizeOf((S)Real)"
KFP i j -> i + j
KRational -> error "SBV.HasKind.intSizeOf((S)Rational)"
KList ek -> error $ "SBV.HasKind.intSizeOf((S)List)" ++ show ek
KTuple tys -> error $ "SBV.HasKind.intSizeOf((S)Tuple)" ++ show tys
KEither k1 k2 -> error $ "SBV.HasKind.intSizeOf((S)Either)" ++ show (k1, k2)
where
k = kindOf x
-----------------------------
is not one of these supported sizes, then the result will be @Just (f ...)@,
otherwise 'Nothing' is returned
Can easily extend this if we need to support more sizes later by adding
| Like 'liftSFiniteBits' but discard the phantom type information of the 'SBV _'
solveExpr (Ch.InfoExpression ((Ch.SymInfo _ xsym), Nothing) _) = \
TODO: stub standard libs here
the signed and unsigned versions use the same smt func
the type checker should guarantee that the args are correctly signed or unsigned
TODO: a FIELD_ADDR should only be used inside a LOAD or Store, and hence
Pil.FIELD_ADDR x -> do
TODO: Throw error if not KBounded
TODO: convert dest and src to unsigned and convert them back if needed
-- How should src and dest be related?
-- Can't express that `offset + width(src) == width(dest)`
-- without `+` and `==` as type level operators.
return [ (r, CSVar v) ]
also, maybe convert the offset to bits?
this should really be called VAR_JOIN
| return is double width, so we double the args
make result size of original rx
should essential analysis steps be included here?
let stmts' = Analysis.substFields stmts
| convenience function for checking statements.
warnings are ignored | HLINT ignore " Use if "
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
module Blaze.Pil.Solver
( module Blaze.Pil.Solver
, module Blaze.Types.Pil.Solver
, module Exports
) where
import Blaze.Prelude hiding (error, zero, natVal, isSigned)
import qualified Prelude as P
import qualified Blaze.Types.Pil as Pil
import Blaze.Types.Pil ( Expression
, PilVar
, Statement
)
import qualified Data.HashMap.Strict as HashMap
import Blaze.Types.Pil.Solver
import qualified Blaze.Pil.Solver.List as BSList
import Data.SBV.Tools.Overflow (bvAddO, bvSubO)
import qualified Data.SBV.Trans as Exports (z3, cvc4)
import qualified Data.SBV.Trans as SBV
import Data.SBV.Trans ( (.==)
, (./=)
, (.>)
, (.>=)
, (.<)
, (.<=)
, (.||)
, (.~|)
, sPopCount
, SFiniteBits
, SInteger
, SInt8
, SInt16
, SInt32
, SInt64
, SInt
, SWord8
, SWord16
, SWord32
, SWord64
, SWord
, WordN
)
import Data.SBV.Dynamic as D hiding (Solver)
import qualified Blaze.Types.Pil.Checker as Ch
import qualified Blaze.Pil.Checker as Ch
import Blaze.Types.Pil.Checker ( DeepSymType )
import Data.SBV.Internals (SBV(SBV), unSBV)
stubbedFunctionConstraintGen :: HashMap Text (SVal -> [SVal] -> Solver ())
stubbedFunctionConstraintGen = HashMap.fromList
[ ( "memcpy"
, \_r args -> case args of
[dest, src, n] -> do
guardList dest
guardList src
guardIntegral n
n' <- boundedToSInteger n
constrain_ $ BSList.length dest .>= n'
constrain_ $ BSList.length src .>= n'
constrain _ $ r .== ( SList.take n ' src .++ SList.drop n ' dest )
xs -> throwError . StubbedFunctionArgError "memcpy" 3 $ length xs
)
, ( "abs"
, \r args -> case args of
[n] -> do
guardIntegral n
constrain $ r `svEqual` svAbs n
xs -> throwError . StubbedFunctionArgError "abs" 1 $ length xs
)
]
pilVarName :: PilVar -> Text
pilVarName pv = pv ^. #symbol
<> maybe "" (("@"<>) . view (#func . #name)) mCtx
<> maybe "" (("."<>) . show . f . view #ctxId) mCtx
where
f (Pil.CtxId n) = n
mCtx :: Maybe Pil.Ctx
mCtx = pv ^. #ctx
deepSymTypeToKind :: DeepSymType -> Solver Kind
deepSymTypeToKind t = case t of
Ch.DSVar v -> err $ "Can't convert DSVar " <> show v
Ch.DSRecursive _s pt -> deepSymTypeToKind (Ch.DSType pt)
Ch.DSType pt -> case pt of
Ch.TArray _alen _etype -> err "Array should be handled only when wrapped in Ptr"
Ch.TBool -> return KBool
Ch.TChar bw -> KBounded False <$> getBitWidth bw
Ch.TInt bw s -> KBounded <$> (getSigned s <|> pure False) <*> getBitWidth bw
Ch.TFloat _ -> return KDouble
SBV only has float or double , so we 'll just pick double
Ch.TBitVector bw -> KBounded False <$> getBitWidth bw
Ch.TPointer bw _pt -> KBounded False <$> getBitWidth bw
Ch . DSType ( Ch . TArray _ alen arrayElemType ) - >
KList < $ > deepSymTypeToKind arrayElemType
_ - > KBounded < $ > pure False < * > getBitWidth bwt
Ch.TCString _ -> return KString
Ch.TRecord _ -> err "Can't handle Record type"
Ch.TUnit -> return $ KTuple []
Ch.TBottom s -> err $ "TBottom " <> show s
Ch.TFunction _ _ -> err "Can't handle Function type"
where
getBitWidth :: Maybe Bits -> Solver Int
getBitWidth (Just b) = case fromIntegral b of
0 -> err "Bitwidth cannot be zero."
n -> return n
getBitWidth Nothing = err "Can't get bitwidth."
getSigned :: Maybe Bool -> Solver Bool
getSigned (Just s) = return s
getSigned Nothing = err "Can't get signedness."
err :: forall a. Text -> Solver a
err = throwError . DeepSymTypeConversionError t
makeSymVar :: Maybe Text -> DeepSymType -> Kind -> Solver SVal
makeSymVar nm _dst k = do
case cs <$> nm of
Just n -> D.svNewVar k n
Nothing -> D.svNewVar_ k
Ch . DSType ( Ch . TPointer _ ( Ch . DSType ( Ch . TArray ( Ch . DSType ( Ch . ) ) _ ) ) ) - > do
makeSymVarOfType :: Maybe Text -> DeepSymType -> Solver SVal
makeSymVarOfType nm dst = deepSymTypeToKind dst >>= makeSymVar nm dst
catchIfLenient :: (SolverError -> SolverError) -> Solver a -> (SolverError -> Solver a) -> Solver a
catchIfLenient wrapErr m handleError = do
solverLeniency <- view #leniency
catchError m $ \e ->
case solverLeniency of
AbortOnError -> throwError e
SkipStatementsWithErrors -> warn (wrapErr e) >> handleError e
catchIfLenientForPilVar :: PilVar -> Solver () -> Solver ()
catchIfLenientForPilVar pv m = catchIfLenient (PilVarConversionError $ pv ^. #symbol) m (const $ return ())
catchIfLenientForStmt :: Solver () -> Solver ()
catchIfLenientForStmt m = do
sindex <- use #currentStmtIndex
catchIfLenient (StmtError sindex) m (const $ return ())
declarePilVars :: Solver ()
declarePilVars = ask >>= mapM_ f . HashMap.toList . typeEnv
where
f (pv, dst) = catchIfLenientForPilVar pv $ do
sval <- makeSymVarOfType (Just nm) dst
#varNames %= HashMap.insert pv nm
#varMap %= HashMap.insert pv sval
where
nm = pilVarName pv
constInt :: Bits -> Integer -> SVal
constInt w = svInteger (KBounded True $ fromIntegral w)
constWord :: Bits -> Integer -> SVal
constWord w = svInteger (KBounded False $ fromIntegral w)
constFloat :: Double -> SVal
constFloat = svDouble
constInteger :: Integral a => a -> SVal
constInteger = svInteger KUnbounded . fromIntegral
| requires : > = bv
zeroExtend :: Bits -> SVal -> SVal
zeroExtend targetWidth bv = case kindOf bv of
(KBounded _s w)
| tw == w -> svUnsign bv
| tw > w -> svJoin ext $ svUnsign bv
| otherwise -> P.error "zeroExtend: target width less than bitvec width"
where
ext = svInteger (KBounded False $ fromIntegral targetWidth - w) 0
_ -> P.error "zeroExtend: arg not bitvec"
where
tw = fromIntegral targetWidth
: width bv > 0
msb :: SVal -> SVal
msb bv = case kindOf bv of
(KBounded _ w)
| w == 0 -> P.error "msb: bv has zero width"
| otherwise -> svTestBit bv (w - 1)
_ -> P.error "msb: bv must be Bounded kind"
| requires : > = bv
: width bv > 0
signExtend :: Bits -> SVal -> SVal
signExtend targetWidth bv = case kindOf bv of
(KBounded _s w)
| tw == w -> svSign bv
| tw > w -> svJoin ext $ svSign bv
| otherwise -> P.error "signExtend: target width less than bitvec width"
where
tw = fromIntegral targetWidth
zero = svInteger (KBounded True $ fromIntegral targetWidth - w) 0
ones = svNot zero
ext = svIte (msb bv) ones zero
_ -> P.error "signExtend: bv must be Bounded kind"
signExtendSVal :: SVal -> SVal -> SVal
signExtendSVal tw bv = case kindOf bv of
(KBounded _s w) -> svIte (tw `svGreaterEq` constWord 32 (toInteger w)) (svJoin ext bv) bv
where
subtract an additional 1 off since we are zero inclusive in createExtendBuf
extWidth = tw `svMinus` constWord 32 (toInteger w) `svMinus` constWord 32 1
zeros = buf
ones = svNot buf
ext = svIte (msb bv) ones zeros
buf = createExtendBuf extWidth
createExtendBuf :: SVal -> SVal
createExtendBuf width = svIte (width `svEqual` constWord 32 0)
(constInt 1 0)
$ svJoin (createExtendBuf $ width `svMinus` constWord 32 1) $ constInt 1 0
_ -> P.error "signExtend: bv must be Bounded kind"
zeroExtendSVal :: SVal -> SVal -> SVal
zeroExtendSVal tw bv = case kindOf bv of
(KBounded _s w) -> svIte (tw `svGreaterEq` constWord 32 (toInteger w)) (svJoin buf bv) bv
where
extWidth = tw `svMinus` constWord 32 (toInteger w) `svMinus` constWord 32 1
buf = createExtendBuf extWidth
createExtendBuf :: SVal -> SVal
createExtendBuf width = svIte (width `svEqual` constWord 32 0)
(constWord 1 0)
$ svJoin (createExtendBuf $ width `svMinus` constWord 32 1) $ constWord 1 0
_ -> P.error "signExtend: bv must be Bounded kind"
: widths a , b > 0
matchBoundedWidth :: HasKind a => a -> SVal -> SVal
matchBoundedWidth a b = case (kindOf a, kindOf b) of
(KBounded _ w1, KBounded s w2)
| w1 == w2 -> b
| w1 > w2 -> if s then signExtend w1' b else zeroExtend w1' b
| otherwise -> lowPart_ w1' b
where
w1' = fromIntegral w1
_ -> P.error "matchBoundedWidth: a and b must be kind Bounded"
| Matches second to first bounded integral sign
matchSign :: HasKind a => a -> SVal -> SVal
matchSign a b = case (kindOf a, kindOf b) of
(KBounded s1 _, KBounded s2 _)
| s1 == s2 -> b
| otherwise -> if s1 then svSign b else svUnsign b
_ -> P.error "matchSign: a and b must be kind Bounded"
matchIntegral :: HasKind a => a -> SVal -> SVal
matchIntegral a b = matchBoundedWidth a (matchSign a b)
runAsUnsigned :: (SVal -> SVal) -> SVal -> SVal
runAsUnsigned f x = case kindOf x of
KBounded True _ -> svSign (f (svUnsign x))
KBounded False _ -> f x
_ -> P.error "runAsSigned: expected KBounded"
isSigned :: SVal -> Bool
isSigned x = case kindOf x of
KBounded s _ -> s
k -> P.error $ "isSigned expected KBounded, got " <> show k
sSignedShiftArithRight :: SVal -> SVal -> SVal
sSignedShiftArithRight x i
| isSigned i = P.error "sSignedShiftArithRight: shift amount should be unsigned"
| isSigned x = svShiftRight x i
| otherwise = svIte (msb x)
(svNot (svShiftRight (svNot x) i))
(svShiftRight x i)
the svJoin gets messed up if these are signed
TODO : has the above TODO been done ? check to make sure
updateBitVec :: BitOffset -> SVal -> SVal -> Solver SVal
updateBitVec boff src dest = case (kindOf dest, kindOf src) of
(KBounded destSign wdest, KBounded _ wsrc)
| wsrc + off > wdest -> throwError . ErrorMessage $ "updateBitVec: src width + offset must be less than dest width"
| otherwise -> do
destHighPart <- highPart (fromIntegral $ wdest - (off + wsrc)) dest'
destLowPart <- lowPart (fromIntegral boff) dest'
return . bool svUnsign svSign destSign
$ destHighPart `svJoin` src' `svJoin` destLowPart
where
dest' = svUnsign dest
src' = svUnsign src
off = fromIntegral boff
_ -> throwError . ErrorMessage $ "updateBitVec: both args must be KBounded"
safeExtract :: Bits -> Bits -> SVal -> Solver SVal
safeExtract endIndex' startIndex' var = case k of
(KBounded _ w)
| endIndex' >= fromIntegral w -> error "endIndex out of bounds"
| startIndex' < 0 -> error "startIndex out of bounds"
| otherwise -> return $ svExtract (fromIntegral endIndex') (fromIntegral startIndex') var
_ -> error "must be KBounded"
where
k = kindOf var
error msg' = throwError $ ExtractError
{ endIndex = endIndex'
, startIndex = startIndex'
, kind = k
, msg = msg'
}
lowPart :: Bits -> SVal -> Solver SVal
lowPart n src = case kindOf src of
KBounded _ w
| n > fromIntegral w -> throwError . ErrorMessage $ "lowPart: cannot get part greater than whole"
| otherwise -> return $ lowPart_ n src
_ -> P.error "lowPart: src must be KBounded"
lowPart_ :: Bits -> SVal -> SVal
lowPart_ n src = case kindOf src of
KBounded _ _w -> svExtract (fromIntegral n - 1) 0 src
_ -> P.error "lowPart: src must be KBounded"
highPart :: Bits -> SVal -> Solver SVal
highPart n src = case kindOf src of
KBounded _ w
| n > fromIntegral w -> throwError . ErrorMessage $ "highPart: cannot get part greater than whole"
| otherwise -> return $ highPart_ n src
_ -> P.error "highPart: src must be KBounded"
highPart_ :: Bits -> SVal -> SVal
highPart_ n src = case kindOf src of
KBounded _ w -> svExtract (w - 1) (w - fromIntegral n) src
_ -> P.error "lowPart: src must be KBounded"
rotateWithCarry :: (SVal -> SVal -> SVal) -> SVal -> SVal -> SVal -> SVal
rotateWithCarry rotFunc src rot c = case kindOf src of
KBounded _ w -> svExtract w 1 $ rotFunc (svJoin src c) rot
_ -> P.error "rotateWithCarry: src is not KBounded"
rotateRightWithCarry :: SVal -> SVal -> SVal -> SVal
rotateRightWithCarry = rotateWithCarry svRotateRight
rotateLeftWithCarry :: SVal -> SVal -> SVal -> SVal
rotateLeftWithCarry = rotateWithCarry svRotateLeft
toSFloat :: SVal -> Solver SBV.SDouble
toSFloat x = guardFloat x >> return (SBV x)
toSFloat' :: SVal -> SBV.SDouble
toSFloat' x = case kindOf x of
KDouble -> SBV x
_ -> P.error "toSFloat: x is not KDouble kind"
toSBool' :: SVal -> SBool
toSBool' x = case kindOf x of
KBool -> SBV x
_ -> P.error "toSBool: x is not KBool kind"
toSBool :: SVal -> Solver SBool
toSBool x = guardBool x >> return (SBV x)
toSList : : HasKind a = > SVal - > Solver ( SList a )
KBool - > throwError $ SizeOfError k
KBounded _ w - > s
KFloat - > 32
KDouble - > 64
KUserSort s _ - > error $ " SBV.HasKind.intSizeOf : Uninterpreted sort : " + + s
> error " SBV.HasKind.intSizeOf((S)Double ) "
KChar - > error " SBV.HasKind.intSizeOf((S)Char ) "
KSet error $ " SBV.HasKind.intSizeOf((S)Set ) " + + show ek
k - > error $ " SBV.HasKind.intSizeOf((S)Maybe ) " + + show k
boolToInt' :: SVal -> SVal
boolToInt' b = svIte b (svInteger k 1) (svInteger k 0)
where
k = KBounded False 1
converts bool to 0 or 1 integral of Kind k
boolToInt :: Kind -> SVal -> Solver SVal
boolToInt (KBounded s w) b = do
guardBool b
return $ svIte b (svInteger (KBounded s w) 1) (svInteger (KBounded s w) 0)
boolToInt t _ = throwError . ErrorMessage $ "boolToInt expected KBounded, got " <> show t
constrain_ :: SBool -> Solver ()
constrain_ b = do
ctx <- ask
case ctx ^. #useUnsatCore of
False -> SBV.constrain b
True -> do
i <- use #currentStmtIndex
SBV.namedConstraint ("stmt_" <> show i) b
constrain :: SVal -> Solver ()
constrain = constrain_ <=< toSBool
newSymVar :: Text -> Kind -> Solver SVal
newSymVar name' k = D.svNewVar k (cs name')
guardBool :: HasKind a => a -> Solver ()
guardBool x = case k of
KBool -> return ()
_ -> throwError $ GuardError "guardBool" [k] "Not Bool"
where
k = kindOf x
svBoolNot :: SVal -> SVal
svBoolNot = unSBV . SBV.sNot . toSBool'
guardIntegral :: HasKind a => a -> Solver ()
guardIntegral x = case k of
KBounded _ _ -> return ()
_ -> throwError $ GuardError "guardIntegral" [k] "Not integral"
where
k = kindOf x
boundedToSInteger :: SVal -> Solver SInteger
boundedToSInteger x = do
guardIntegral x
case kindOf x of
KBounded True 8 -> return $ SBV.sFromIntegral (SBV x :: SInt8)
KBounded True 16 -> return $ SBV.sFromIntegral (SBV x :: SInt16)
KBounded True 32 -> return $ SBV.sFromIntegral (SBV x :: SInt32)
KBounded True 64 -> return $ SBV.sFromIntegral (SBV x :: SInt64)
KBounded True 128 -> return $ SBV.sFromIntegral (SBV x :: SInt 128)
KBounded False 8 -> return $ SBV.sFromIntegral (SBV x :: SWord8)
KBounded False 16 -> return $ SBV.sFromIntegral (SBV x :: SWord16)
KBounded False 32 -> return $ SBV.sFromIntegral (SBV x :: SWord32)
KBounded False 64 -> return $ SBV.sFromIntegral (SBV x :: SWord64)
KBounded False 128 -> return $ SBV.sFromIntegral (SBV x :: SWord 128)
t -> throwError . ConversionError $ "Cannot convert type " <> show t
guardFloat :: HasKind a => a -> Solver ()
guardFloat x = case k of
KDouble -> return ()
_ -> throwError $ GuardError "guardFloat" [k] "Not Double"
where
k = kindOf x
guardIntegralFirstWidthNotSmaller :: (HasKind a, HasKind b)
=> a -> b -> Solver ()
guardIntegralFirstWidthNotSmaller x y = case (kx, ky) of
(KBounded _ w1, KBounded _ w2)
| w1 >= w2 -> return ()
| otherwise -> throwError $ GuardError "guardIntegralFirstWidthNotSmaller"
[kx, ky]
"Second arg width is greater than first"
_ -> throwError $ GuardError "guardIntegralFirstWidthNotSmaller"
[kx, ky]
"Both must be KBounded"
where
kx = kindOf x
ky = kindOf y
guardSameKind :: (HasKind a, HasKind b) => a -> b -> Solver ()
guardSameKind x y = if kindOf x == kindOf y
then return ()
else throwError $ GuardError "guardSameKind" [kindOf x, kindOf y] "not same kind"
guardList :: (HasKind a) => a -> Solver ()
guardList x = case kindOf x of
KList _ -> return ()
_ -> throwError $ GuardError "guardList" [kindOf x] "not a list"
lookupVarSym :: PilVar -> Solver SVal
lookupVarSym pv = do
vm <- use #varMap
maybe err return $ HashMap.lookup pv vm
where
err = throwError . ErrorMessage
$ "lookupVarSym failed for var '" <> pilVarName pv <> "'"
bitsToOperationSize :: Bits -> Pil.OperationSize
bitsToOperationSize = Pil.OperationSize . (`div` 8) . fromIntegral
dstToExpr :: DSTExpression -> Expression
dstToExpr (Ch.InfoExpression (info, _) op) = Pil.Expression (bitsToOperationSize $ info ^. #size) $ dstToExpr <$> op
catchAndWarnStmtDef :: a -> Solver a -> Solver a
catchAndWarnStmtDef def m = catchError m $ \e -> do
si <- use #currentStmtIndex
warn $ StmtError si e
return def
catchAndWarnStmt :: Solver () -> Solver ()
catchAndWarnStmt m = catchError m $ \e -> do
si <- use #currentStmtIndex
warn $ StmtError si e
warn :: SolverError -> Solver ()
warn e = #errors %= (e :)
svAggrAnd :: [SVal] -> SVal
svAggrAnd = foldr svAnd svTrue
| Convert an ' SVal ' to an ' SBV a ' , where ' a ' is one of ' Word8 ' , ' Word16 ' ,
' ' , ' Word64 ' , and then run a function with this wrapped SBV . If ' SVal '
liftSFiniteBits :: (forall a. SFiniteBits a => SBV a -> b) -> SVal -> Maybe b
liftSFiniteBits f sv =
more @WordN@ cases
case intSizeOf sv of
1 -> Just . f $ (SBV sv :: SBV (WordN 1))
8 -> Just . f $ (SBV sv :: SBV Word8)
16 -> Just . f $ (SBV sv :: SBV Word16)
32 -> Just . f $ (SBV sv :: SBV Word32)
64 -> Just . f $ (SBV sv :: SBV Word64)
_ -> Nothing
result and return a typeless ' SVal '
liftSFiniteBits' :: (forall a. SFiniteBits a => SBV a -> SBV b) -> SVal -> Maybe SVal
liftSFiniteBits' sv f = (\(SBV x) -> x) <$> liftSFiniteBits sv f
solveStmt :: Statement (Ch.InfoExpression (Ch.SymInfo, Maybe DeepSymType))
-> Solver ()
solveStmt = catchIfLenientForStmt . solveStmt_ solveExpr
| Generates for statement , using provided expr solver
solveStmt_ :: (DSTExpression -> Solver SVal)
-> Statement (Ch.InfoExpression (Ch.SymInfo, Maybe DeepSymType))
-> Solver ()
solveStmt_ solveExprFunc stmt = catchAndWarnStmt $ case stmt of
Pil.Def x -> do
pv <- lookupVarSym $ x ^. #var
expr <- solveExprFunc $ x ^. #value
guardSameKind pv expr
constrain $ pv `svEqual` expr
Pil.Constraint x ->
solveExprFunc (x ^. #condition) >>= constrain
Pil.Store x -> do
let exprAddr = dstToExpr $ x ^. #addr
sValue <- solveExprFunc $ x ^. #value
let insertStoreVar Nothing = Just [sValue]
insertStoreVar (Just xs) = Just $ sValue : xs
modify (\s -> s { stores = HashMap.alter insertStoreVar exprAddr $ s ^. #stores } )
return ()
Pil.DefPhi x -> do
pv <- lookupVarSym $ x ^. #dest
eqs <- mapM (f pv) $ x ^. #src
constrain_ $ SBV.sOr eqs
where
f pv y = do
pv2 <- lookupVarSym y
guardSameKind pv pv2
toSBool $ pv `svEqual` pv2
_ -> return ()
solveExpr :: DSTExpression -> Solver SVal
solveExpr = solveExpr_ solveExpr
| Creates SVal that represents expression .
This type of InfoExpression is in a TypeReport
solveExpr_ :: (DSTExpression -> Solver SVal) -> DSTExpression -> Solver SVal
solverError $ " No type for " < > show xsym
solveExpr_ solveExprRec (Ch.InfoExpression (Ch.SymInfo sz xsym, mdst) op) = catchFallbackAndWarn $ case op of
Pil.ADC x -> integralBinOpWithCarry x $ \a b c -> a `svPlus` b `svPlus` c
Pil.ADD x -> integralBinOpMatchSecondArgToFirst x svPlus
Pil.ADD_WILL_CARRY x ->
integralBinOpMatchSecondArgToFirst x $ \a b -> unSBV $ uncurry (.||) $ bvAddO (svUnsign a) (svUnsign b)
Pil.ADD_WILL_OVERFLOW x ->
integralBinOpMatchSecondArgToFirst x $ \a b -> unSBV $ uncurry (.||) $ bvAddO (svSign a) (svSign b)
Pil.ARRAY_ADDR x -> do
base <- solveExprRec (x ^. #base)
index <- solveExprRec (x ^. #index)
guardIntegral base
guardIntegral index
let stride = svInteger (kindOf base) . fromIntegral $ x ^. #stride
pure $ base `svPlus` (zeroExtend (fromIntegral $ intSizeOf base) index `svTimes` stride)
Pil.AND x -> integralBinOpMatchSecondArgToFirst x svAnd
Pil.ASR x -> integralBinOpUnrelatedArgs x sSignedShiftArithRight
Pil.BOOL_TO_INT x -> do
b <- solveExprRec $ x ^. #src
guardBool b
k <- getRetKind
guardIntegral k
return $ svIte b (svInteger k 1) (svInteger k 0)
Pil.CALL x -> do
fcg <- view #funcConstraintGen <$> ask
case (x ^. #name) >>= flip HashMap.lookup fcg of
Nothing -> fallbackAsFreeVar
Just gen -> do
args <- mapM solveExprRec $ x ^. #params
r <- fallbackAsFreeVar
gen r args
return r
Pil.CEIL x -> floatUnOp x $ SBV.fpRoundToIntegral SBV.sRoundTowardPositive
Pil.CMP_E x -> binOpEqArgsReturnsBool x svEqual
Pil.CMP_NE x -> binOpEqArgsReturnsBool x svNotEqual
Pil.CMP_SGE x -> binOpEqArgsReturnsBool x svGreaterEq
Pil.CMP_SGT x -> binOpEqArgsReturnsBool x svGreaterThan
Pil.CMP_SLE x -> binOpEqArgsReturnsBool x svLessEq
Pil.CMP_SLT x -> binOpEqArgsReturnsBool x svLessThan
but maybe TODO should be to convert signed SVal to unsigned SVal if necessary
Pil.CMP_UGE x -> binOpEqArgsReturnsBool x svGreaterEq
Pil.CMP_UGT x -> binOpEqArgsReturnsBool x svGreaterThan
Pil.CMP_ULE x -> binOpEqArgsReturnsBool x svLessEq
Pil.CMP_ULT x -> binOpEqArgsReturnsBool x svLessThan
Pil.CONST x -> do
k <- getRetKind
guardIntegral k
return . svInteger k . fromIntegral $ x ^. #constant
Pil.CONST_BOOL x -> return . svBool $ x ^. #constant
Pil.CONST_FLOAT x -> return . svDouble $ x ^. #constant
Pil.CONST_PTR x ->
return . svInteger (KBounded False $ fromIntegral sz)
. fromIntegral $ x ^. #constant
Pil.ConstStr x -> return . unSBV $ SBV.literal (cs $ x ^. #value :: String)
Pil.ConstFuncPtr x -> return . svInteger (KBounded False $ fromIntegral sz)
. fromIntegral $ x ^. #address
TODO : do we need to do anything special for the DP versions ?
Pil.DIVS x -> integralBinOpMatchSecondArgToFirst x svDivide
Pil.DIVS_DP x -> divOrModDP True x svDivide
Pil.DIVU x -> integralBinOpMatchSecondArgToFirst x svDivide
Pil.DIVU_DP x -> divOrModDP False x svDivide
Pil.Extract _ -> unhandled "Extract"
Pil.ExternPtr _ -> unhandled "ExternPtr"
Pil.FABS x -> floatUnOp x SBV.fpAbs
Pil.FADD x -> floatBinOp x $ SBV.fpAdd SBV.sRoundNearestTiesToAway
Pil.FDIV x -> floatBinOp x $ SBV.fpDiv SBV.sRoundNearestTiesToAway
Pil.FCMP_E x -> floatBinOpReturnsBool x (.==)
Pil.FCMP_GE x -> floatBinOpReturnsBool x (.>=)
Pil.FCMP_GT x -> floatBinOpReturnsBool x (.>)
Pil.FCMP_LE x -> floatBinOpReturnsBool x (.<=)
Pil.FCMP_LT x -> floatBinOpReturnsBool x (.<)
Pil.FCMP_O x -> floatBinOpReturnsBool x $ \a b -> SBV.fpIsNaN a .~| SBV.fpIsNaN b
Pil.FCMP_NE x -> floatBinOpReturnsBool x (./=)
Pil.FCMP_UO x -> floatBinOpReturnsBool x $ \a b -> SBV.fpIsNaN a .|| SBV.fpIsNaN b
should never be " solved " . But maybe field could be added to ?
Pil.FLOAT_CONV x -> do
y <- solveExprRec $ x ^. #src
case kindOf y of
(KBounded False 32) -> return
. unSBV
. SBV.toSDouble SBV.sRoundNearestTiesToAway
. SBV.sWord32AsSFloat
. SBV $ y
(KBounded False 64) -> return
. unSBV
. SBV.sWord64AsSDouble
. SBV $ y
k -> throwError . ErrorMessage
$ "FLOAT_CONV expecting Unsigned integral of 32 or 64 bit width, got"
<> show k
Pil.FLOAT_TO_INT x -> do
k <- getRetKind
y <- solveExprRec $ x ^. #src
guardFloat y
case k of
(KBounded False 64) -> unSBV <$> (f y :: Solver SBV.SWord64)
(KBounded False 32) -> unSBV <$> (f y :: Solver SBV.SWord32)
(KBounded False 16) -> unSBV <$> (f y :: Solver SBV.SWord16)
(KBounded False 8) -> unSBV <$> (f y :: Solver SBV.SWord8)
(KBounded True 64) -> unSBV <$> (f y :: Solver SBV.SInt64)
(KBounded True 32) -> unSBV <$> (f y :: Solver SBV.SInt32)
(KBounded True 16) -> unSBV <$> (f y :: Solver SBV.SInt16)
(KBounded True 8) -> unSBV <$> (f y :: Solver SBV.SInt8)
_ -> throwError . ErrorMessage
$ "FLOAT_TO_INT: unsupported return type: " <> show k
where
f :: forall a. (SBV.IEEEFloatConvertible a) => SVal -> Solver (SBV a)
f = return . SBV.fromSDouble SBV.sRoundNearestTiesToAway . SBV
Pil.FLOOR x -> floatUnOp x $ SBV.fpRoundToIntegral SBV.sRoundTowardNegative
Pil.FMUL x -> floatBinOp x $ SBV.fpMul SBV.sRoundNearestTiesToAway
Pil.FNEG x -> floatUnOp x SBV.fpNeg
Pil.FSQRT x -> floatUnOp x $ SBV.fpSqrt SBV.sRoundNearestTiesToAway
Pil.FTRUNC x -> floatUnOp x $ SBV.fpRoundToIntegral SBV.sRoundTowardZero
Pil.FSUB x -> floatBinOp x $ SBV.fpSub SBV.sRoundNearestTiesToAway
Pil.IMPORT x -> return
. svInteger (KBounded False $ fromIntegral sz)
. fromIntegral
$ x ^. #constant
Pil.INT_TO_FLOAT x -> do
y <- solveExprRec $ x ^. #src
let f :: forall a. SBV.IEEEFloatConvertible a => SBV a -> Solver SVal
f = return . unSBV . SBV.toSDouble SBV.sRoundNearestTiesToAway
case kindOf y of
(KBounded True 8) -> f (SBV y :: SBV.SInt8)
(KBounded True 16) -> f (SBV y :: SBV.SInt16)
(KBounded True 32) -> f (SBV y :: SBV.SInt32)
(KBounded True 64) -> f (SBV y :: SBV.SInt64)
(KBounded False 8) -> f (SBV y :: SBV.SWord8)
(KBounded False 16) -> f (SBV y :: SBV.SWord16)
(KBounded False 32) -> f (SBV y :: SBV.SWord32)
(KBounded False 64) -> f (SBV y :: SBV.SWord64)
k -> throwError . ErrorMessage
$ "INT_TO_FLOAT: unsupported return type: " <> show k
Pil.LOAD x -> do
s <- use #stores
let key = dstToExpr $ x ^. #src
maybe (createFreeVar key) return $ HashMap.lookup key s >>= headMay
where
createFreeVar k = do
freeVar <- fallbackAsFreeVar
#stores %= HashMap.insert k [freeVar]
return freeVar
Pil.LOW_PART x -> integralUnOpM x $ lowPart sz
Pil.LSL x -> integralBinOpUnrelatedArgs x svShiftLeft
Pil.LSR x -> integralBinOpUnrelatedArgs x svShiftRight
Pil.MODS x -> integralBinOpMatchSecondArgToFirst x svRem
Pil.MODS_DP x -> divOrModDP True x svRem
Pil.MODU x -> integralBinOpMatchSecondArgToFirst x svRem
Pil.MODU_DP x -> divOrModDP False x svRem
Pil.MUL x -> integralBinOpMatchSecondArgToFirst x svTimes
Pil.MULS_DP x -> mulDP True x
Pil.MULU_DP x -> mulDP False x
Pil.NEG x -> integralUnOp x svUNeg
Pil.NOT x -> do
y <- solveExprRec $ x ^. #src
let k = kindOf y
case k of
KBool -> return $ unSBV . SBV.sNot . toSBool' $ y
(KBounded _ _) -> return $ svNot y
_ -> throwError . ErrorMessage $ "NOT expecting Bool or Integral, got " <> show k
Pil.OR x -> integralBinOpMatchSecondArgToFirst x svOr
Pil.POPCNT x ->
integralUnOpM x $ \bv -> do
case liftSFiniteBits' sPopCount bv of
Just res -> pure res
Nothing -> throwError . ErrorMessage $ "Unsupported POPCNT operand size: " <> show (intSizeOf bv)
Pil.RLC x -> rotateBinOpWithCarry x rotateLeftWithCarry
Pil.ROL x -> integralBinOpUnrelatedArgs x svRotateLeft
Pil.ROR x -> integralBinOpUnrelatedArgs x svRotateRight
Pil.ROUND_TO_INT _ -> unhandled "ROUND_TO_INT"
Pil.RRC x -> rotateBinOpWithCarry x rotateRightWithCarry
Pil.SBB x -> integralBinOpWithCarry x $ \a b c -> (a `svMinus` b) `svMinus` c
Pil.MemCmp _ -> unhandled "MemCmp"
Pil.StrCmp _ -> unhandled "StrCmp"
Pil.StrNCmp _ -> unhandled "StrNCmp"
Pil.STACK_LOCAL_ADDR _ -> unhandled "STACK_LOCAL_ADDR"
Pil.FIELD_ADDR _ -> unhandled "FIELD_ADDR"
Pil.SUB x -> integralBinOpMatchSecondArgToFirst x svMinus
Pil.SUB_WILL_OVERFLOW x ->
integralBinOpMatchSecondArgToFirst x $ \a b -> unSBV $ uncurry (.||) $ bvSubO (svSign a) (svSign b)
Pil.SX x -> bitVectorUnOp x (signExtend sz)
Pil.TEST_BIT x -> integralBinOpUnrelatedArgs x $ \a b -> case kindOf a of
KBounded _ w -> (a `svAnd` svExp (constWord (Bits w') 2) b)
`svGreaterThan` constWord (Bits w') 0
where
w' = fromIntegral w
_ -> svFalse
Pil.UNIMPL _ -> throwError . ErrorMessage $ "UNIMPL"
Pil.UNIT -> unhandled "UNIT"
Pil.UPDATE_VAR x -> do
dest <- lookupVarSym $ x ^. #dest
src <- solveExprRec $ x ^. #src
guardIntegral dest
guardIntegral src
TODO : the above TODO might already happen in updateBitVec . find out .
updateBitVec (toBitOffset $ x ^. #offset) src dest
Pil.VAR x -> lookupVarSym $ x ^. #src
TODO : add test
Pil.VAR_FIELD x -> do
v <- lookupVarSym $ x ^. #src
safeExtract (off + w - 1) off v
where
off = fromIntegral . toBitOffset $ x ^. #offset
w = fromIntegral sz
Pil.VAR_JOIN x -> do
low <- lookupVarSym $ x ^. #low
high <- lookupVarSym $ x ^. #high
guardIntegral low
guardIntegral high
return $ svJoin high low
Pil.VAR_PHI _ -> unhandled "VAR_PHI"
Pil.XOR x -> integralBinOpUnrelatedArgs x svXOr
Pil.ZX x -> bitVectorUnOp x (zeroExtend sz)
where
| Throws an error that says exactly which ' ExprOp ' constructor is unhandled
unhandled opName = throwError . ErrorMessage $ "unhandled PIL op: " <> opName
fallbackAsFreeVar :: Solver SVal
fallbackAsFreeVar = case mdst of
Nothing -> throwError . ExprError xsym . ErrorMessage $ "missing DeepSymType"
Just dst -> catchError (makeSymVarOfType Nothing dst) $ \e ->
throwError $ ExprError xsym e
getDst :: Solver DeepSymType
getDst = maybe e return mdst
where e = throwError . ErrorMessage $ "missing DeepSymType"
getRetKind = getDst >>= deepSymTypeToKind
catchFallbackAndWarn :: Solver SVal -> Solver SVal
catchFallbackAndWarn m = catchError m $ \e -> do
si <- use #currentStmtIndex
warn $ StmtError si e
fallbackAsFreeVar
binOpEqArgsReturnsBool :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression)
=> x -> (SVal -> SVal -> SVal) -> Solver SVal
binOpEqArgsReturnsBool x f = do
lx <- solveExprRec (x ^. #left)
rx <- solveExprRec (x ^. #right)
guardSameKind lx rx
return $ f lx rx
| does n't match second arg to first
integralBinOpUnrelatedArgs :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression)
=> x -> (SVal -> SVal -> SVal) -> Solver SVal
integralBinOpUnrelatedArgs x f = do
lx <- solveExprRec (x ^. #left)
rx <- solveExprRec (x ^. #right)
guardIntegral lx
guardIntegral rx
return $ f lx rx
| assumes first arg width > = second arg width
matches second args sign and width to equal first
integralBinOpMatchSecondArgToFirst
:: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression)
=> x -> (SVal -> SVal -> SVal) -> Solver SVal
integralBinOpMatchSecondArgToFirst x f = do
lx <- solveExprRec (x ^. #left)
rx <- solveExprRec (x ^. #right)
guardIntegralFirstWidthNotSmaller lx rx
let rx' = matchSign lx (matchBoundedWidth lx rx)
return $ f lx rx'
HLINT ignore " Reduce duplication "
floatBinOp :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression )
=> x
-> (SBV.SDouble -> SBV.SDouble -> SBV.SDouble) -> Solver SVal
floatBinOp x f = do
lx <- toSFloat =<< solveExprRec (x ^. #left)
rx <- toSFloat =<< solveExprRec (x ^. #right)
return . unSBV $ f lx rx
floatBinOpReturnsBool :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression )
=> x
-> (SBV.SDouble -> SBV.SDouble -> SBool) -> Solver SVal
floatBinOpReturnsBool x f = do
lx <- toSFloat =<< solveExprRec (x ^. #left)
rx <- toSFloat =<< solveExprRec (x ^. #right)
return . unSBV $ f lx rx
bitVectorUnOp :: HasField' "src" x DSTExpression
=> x
-> (SVal -> SVal) -> Solver SVal
bitVectorUnOp = integralUnOp
integralUnOp :: HasField' "src" x DSTExpression
=> x
-> (SVal -> SVal) -> Solver SVal
integralUnOp x f = integralUnOpM x (return . f)
integralUnOpM :: HasField' "src" x DSTExpression
=> x
-> (SVal -> Solver SVal) -> Solver SVal
integralUnOpM x f = do
lx <- solveExprRec (x ^. #src)
guardIntegral lx
f lx
floatUnOp :: HasField' "src" x DSTExpression
=> x
-> (SBV.SDouble -> SBV.SDouble) -> Solver SVal
floatUnOp x f = do
lx <- solveExprRec (x ^. #src)
unSBV . f <$> toSFloat lx
mulDP :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression )
=> Bool
-> x
-> Solver SVal
mulDP signedness x = do
lx <- solveExprRec (x ^. #left)
rx <- solveExprRec (x ^. #right)
let retKind = KBounded signedness $ fromIntegral sz
guardIntegralFirstWidthNotSmaller lx rx
guardIntegralFirstWidthNotSmaller retKind lx
let lx' = matchIntegral retKind lx
rx' = matchIntegral lx' rx
return $ svTimes lx' rx'
| first arg is double width of second and return arg
so we have to increase width of second , then shrink result by half
divOrModDP :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression )
=> Bool
-> x
-> (SVal -> SVal -> SVal) -> Solver SVal
divOrModDP signedness x f = do
lx <- solveExprRec (x ^. #left)
rx <- solveExprRec (x ^. #right)
let retKind = KBounded signedness $ fromIntegral sz
guardIntegralFirstWidthNotSmaller lx rx
let rx' = matchIntegral lx rx
res = f lx rx'
return res'
integralBinOpWithCarry :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression
, HasField' "carry" x DSTExpression)
=> x
-> (SVal -> SVal -> SVal -> SVal) -> Solver SVal
integralBinOpWithCarry x f = do
a <- solveExprRec (x ^. #left)
b <- solveExprRec (x ^. #right)
c <- solveExprRec (x ^. #carry)
guardIntegralFirstWidthNotSmaller a b
cAsInt <- boolToInt (kindOf a) c
let b' = matchIntegral a b
return $ f a b' cAsInt
rotateBinOpWithCarry :: ( HasField' "left" x DSTExpression
, HasField' "right" x DSTExpression
, HasField' "carry" x DSTExpression)
=> x
-> (SVal -> SVal -> SVal -> SVal) -> Solver SVal
rotateBinOpWithCarry x f = do
a <- solveExprRec (x ^. #left)
b <- solveExprRec (x ^. #right)
c <- solveExprRec (x ^. #carry)
guardIntegral a
guardIntegral b
guardBool c
cAsInt <- boolToInt (KBounded False 1) c
return $ runAsUnsigned (\y -> f y b cAsInt) a
solveTypedStmtsWith :: SMTConfig
-> HashMap PilVar DeepSymType
-> [(Int, Statement (Ch.InfoExpression (Ch.SymInfo, Maybe DeepSymType)))]
-> IO (Either SolverError SolverReport)
solveTypedStmtsWith solverCfg vartypes stmts = do
er <- runSolverWith solverCfg run ( emptyState
, SolverCtx vartypes stubbedFunctionConstraintGen True AbortOnError
)
return $ toSolverReport <$> er
where
toSolverReport :: (SolverResult, SolverState) -> SolverReport
toSolverReport (r, s) = SolverReport r (s ^. #errors)
run = do
declarePilVars
mapM_ f stmts
querySolverResult
f (ix, stmt) = do
#currentStmtIndex .= ix
solveStmt stmt
| runs type checker first , then solver
solveStmtsWith :: SMTConfig
-> [Statement Expression]
-> IO (Either
(Either
Ch.ConstraintGenError
(SolverError, Ch.TypeReport))
(SolverReport, Ch.TypeReport))
solveStmtsWith solverCfg stmts = do
let er = Ch.checkStmts stmts
case er of
Left e -> return $ Left (Left e)
Right tr -> solveTypedStmtsWith solverCfg (tr ^. #varSymTypeMap) (tr ^. #symTypedStmts) >>= \case
Left e -> return $ Left (Right (e, tr))
Right sr -> return $ Right (sr, tr)
any errors in Type Checker or Solver result in Unk
solveStmtsWith_ :: SMTConfig
-> [Statement Expression]
-> IO SolverResult
solveStmtsWith_ solverCfg stmts = solveStmtsWith solverCfg stmts >>= \case
Left _ -> return Unk
Right (r, _) -> return $ r ^. #result
|
aff575a002a2738ea3526111f3ef9dffff9094b7732ec7a97bc93161e19eea3c | fragnix/fragnix | Network.Wai.Handler.Warp.IORef.hs | # LANGUAGE Haskell98 #
# LINE 1 " Network / Wai / Handler / Warp / IORef.hs " #
# LANGUAGE CPP #
module Network.Wai.Handler.Warp.IORef (
module Data.IORef
) where
import Data.IORef
| null | https://raw.githubusercontent.com/fragnix/fragnix/b9969e9c6366e2917a782f3ac4e77cce0835448b/tests/packages/application/Network.Wai.Handler.Warp.IORef.hs | haskell | # LANGUAGE Haskell98 #
# LINE 1 " Network / Wai / Handler / Warp / IORef.hs " #
# LANGUAGE CPP #
module Network.Wai.Handler.Warp.IORef (
module Data.IORef
) where
import Data.IORef
| |
d89e20c5b9db4ad4f0000aa202e7d5d39c67c8e017c932955f807e6fa28cac91 | lambdacube3d/lambdacube-edsl | texturedCube.hs | # LANGUAGE OverloadedStrings , , TypeOperators , DataKinds , FlexibleContexts , GADTs #
import qualified Graphics.UI.GLFW as GLFW
import Control.Monad
import Data.Vect
import qualified Data.Trie as T
import qualified Data.Vector.Storable as SV
import LambdaCube.GL
import LambdaCube.GL.Mesh
import Common.Utils
import Common.GraphicsUtils
import Codec.Image.STB hiding (Image)
import FX
Our vertices . Tree consecutive floats give a 3D vertex ; Three consecutive vertices give a triangle .
A cube has 6 faces with 2 triangles each , so this makes 6 * 2=12 triangles , and 12 * 3 vertices
g_vertex_buffer_data =
[ ( 1.0, 1.0,-1.0)
, ( 1.0,-1.0,-1.0)
, (-1.0,-1.0,-1.0)
, ( 1.0, 1.0,-1.0)
, (-1.0,-1.0,-1.0)
, (-1.0, 1.0,-1.0)
, ( 1.0, 1.0,-1.0)
, ( 1.0, 1.0, 1.0)
, ( 1.0,-1.0, 1.0)
, ( 1.0, 1.0,-1.0)
, ( 1.0,-1.0, 1.0)
, ( 1.0,-1.0,-1.0)
, ( 1.0, 1.0, 1.0)
, (-1.0,-1.0, 1.0)
, ( 1.0,-1.0, 1.0)
, ( 1.0, 1.0, 1.0)
, (-1.0, 1.0, 1.0)
, (-1.0,-1.0, 1.0)
, (-1.0, 1.0, 1.0)
, (-1.0,-1.0,-1.0)
, (-1.0,-1.0, 1.0)
, (-1.0, 1.0, 1.0)
, (-1.0, 1.0,-1.0)
, (-1.0,-1.0,-1.0)
, ( 1.0, 1.0,-1.0)
, (-1.0, 1.0,-1.0)
, (-1.0, 1.0, 1.0)
, ( 1.0, 1.0,-1.0)
, (-1.0, 1.0, 1.0)
, ( 1.0, 1.0, 1.0)
, ( 1.0, 1.0,-1.0)
, ( 1.0, 1.0, 1.0)
, (-1.0, 1.0, 1.0)
, ( 1.0, 1.0,-1.0)
, (-1.0, 1.0, 1.0)
, (-1.0, 1.0,-1.0)
]
Two UV coordinatesfor each vertex . They were created with Blender .
g_uv_buffer_data =
[ (0.0, 0.0)
, (0.0, 1.0)
, (1.0, 1.0)
, (0.0, 0.0)
, (1.0, 1.0)
, (1.0, 0.0)
, (0.0, 0.0)
, (1.0, 0.0)
, (1.0, 1.0)
, (0.0, 0.0)
, (1.0, 1.0)
, (0.0, 1.0)
, (1.0, 0.0)
, (0.0, 1.0)
, (1.0, 1.0)
, (1.0, 0.0)
, (0.0, 0.0)
, (0.0, 1.0)
, (0.0, 0.0)
, (1.0, 1.0)
, (0.0, 1.0)
, (0.0, 0.0)
, (1.0, 0.0)
, (1.0, 1.0)
, (0.0, 0.0)
, (1.0, 0.0)
, (1.0, 1.0)
, (0.0, 0.0)
, (1.0, 1.0)
, (0.0, 1.0)
, (0.0, 0.0)
, (0.0, 1.0)
, (1.0, 1.0)
, (0.0, 0.0)
, (1.0, 1.0)
, (1.0, 0.0)
]
myCube :: Mesh
myCube = Mesh
{ mAttributes = T.fromList
[ ("vertexPosition_modelspace", A_V3F $ SV.fromList [V3 x y z | (x,y,z) <- g_vertex_buffer_data])
, ("vertexUV", A_V2F $ SV.fromList [V2 u v | (u,v) <- g_uv_buffer_data])
]
, mPrimitive = P_Triangles
, mGPUData = Nothing
}
texturing :: Exp Obj (Texture Tex2D SingleTex (Regular Float) RGBA) -> Exp Obj (VertexStream Triangle (V3F,V2F)) -> Exp Obj (FrameBuffer 1 (Float,V4F))
texturing tex objs = Accumulate fragmentCtx PassAll fragmentShader fragmentStream emptyFB
where
rasterCtx :: RasterContext Triangle
rasterCtx = TriangleCtx (CullNone) PolygonFill NoOffset LastVertex
fragmentCtx :: AccumulationContext (Depth Float :+: (Color (V4 Float) :+: ZZ))
fragmentCtx = AccumulationContext Nothing $ DepthOp Less True:.ColorOp NoBlending (one' :: V4B):.ZT
emptyFB :: Exp Obj (FrameBuffer 1 (Float,V4F))
emptyFB = FrameBuffer (DepthImage n1 1000:.ColorImage n1 (V4 0 0 0.4 1):.ZT)
fragmentStream :: Exp Obj (FragmentStream 1 V2F)
fragmentStream = Rasterize rasterCtx primitiveStream
primitiveStream :: Exp Obj (PrimitiveStream Triangle () 1 V V2F)
primitiveStream = Transform vertexShader objs
modelViewProj :: Exp V M44F
modelViewProj = Uni (IM44F "MVP")
vertexShader :: Exp V (V3F,V2F) -> VertexOut () V2F
vertexShader puv = VertexOut v4 (Const 1) ZT (Smooth uv:.ZT)
where
v4 :: Exp V V4F
v4 = modelViewProj @*. v3v4 p
(p,uv) = untup2 puv
fragmentShader :: Exp F V2F -> FragmentOut (Depth Float :+: Color V4F :+: ZZ)
fragmentShader uv = FragmentOutRastDepth $ color tex uv :. ZT
color t uv = texture' (smp t) uv
smp t = Sampler LinearFilter ClampToEdge t
main :: IO ()
main = do
(win,windowSize) <- initWindow "LambdaCube 3D Textured Cube" 1024 768
let keyIsPressed k = fmap (==KeyState'Pressed) $ getKey win k
let texture = TextureSlot "myTextureSampler" $ Texture2D (Float RGBA) n1
frameImage :: Exp Obj (Image 1 V4F)
frameImage = PrjFrameBuffer "" tix0 $ texturing texture (Fetch "stream" Triangles (IV3F "vertexPosition_modelspace", IV2F "vertexUV"))
fx img = PrjFrameBuffer "" tix0 $ texturing (imgToTex $ postProcess $ img) (Fetch "stream" Triangles (IV3F "vertexPosition_modelspace", IV2F "vertexUV"))
imgToTex img = Texture (Texture2D (Float RGBA) n1) (V2 512 512) NoMip [img]
--renderer <- compileRenderer $ ScreenOut $ frameImage
--renderer <- compileRenderer $ ScreenOut $ blur gaussFilter9 $ frameImage
renderer <- compileRenderer $ ScreenOut $ iterate fx frameImage !! 4
initUtility renderer
let uniformMap = uniformSetter renderer
texture = uniformFTexture2D "myTextureSampler" uniformMap
mvp = uniformM44F "MVP" uniformMap
setWindowSize = setScreenSize renderer
setWindowSize 1024 768
Right img <- loadImage "hello.png" -- "uvtemplate.bmp"
texture =<< compileTexture2DRGBAF True False img
gpuCube <- compileMesh myCube
addMesh renderer "stream" gpuCube []
let = fromProjective ( lookat ( Vec3 4 0.5 ( -0.6 ) ) ( Vec3 0 0 0 ) ( Vec3 0 1 0 ) )
let cm = fromProjective (lookat (Vec3 3 1.3 0.3) (Vec3 0 0 0) (Vec3 0 1 0))
pm = perspective 0.1 100 (pi/4) (1024 / 768)
loop = do
Just t <- getTime
let angle = pi / 24 * realToFrac t
mm = fromProjective $ rotationEuler $ Vec3 angle 0 0
mvp $! mat4ToM44F $! mm .*. cm .*. pm
render renderer
swapBuffers win >> pollEvents
k <- keyIsPressed Key'Escape
unless k $ loop
loop
dispose renderer
destroyWindow win
terminate
vec4ToV4F :: Vec4 -> V4F
vec4ToV4F (Vec4 x y z w) = V4 x y z w
mat4ToM44F :: Mat4 -> M44F
mat4ToM44F (Mat4 a b c d) = V4 (vec4ToV4F a) (vec4ToV4F b) (vec4ToV4F c) (vec4ToV4F d)
| null | https://raw.githubusercontent.com/lambdacube3d/lambdacube-edsl/4347bb0ed344e71c0333136cf2e162aec5941df7/lambdacube-samples/texturedCube.hs | haskell | renderer <- compileRenderer $ ScreenOut $ frameImage
renderer <- compileRenderer $ ScreenOut $ blur gaussFilter9 $ frameImage
"uvtemplate.bmp" | # LANGUAGE OverloadedStrings , , TypeOperators , DataKinds , FlexibleContexts , GADTs #
import qualified Graphics.UI.GLFW as GLFW
import Control.Monad
import Data.Vect
import qualified Data.Trie as T
import qualified Data.Vector.Storable as SV
import LambdaCube.GL
import LambdaCube.GL.Mesh
import Common.Utils
import Common.GraphicsUtils
import Codec.Image.STB hiding (Image)
import FX
Our vertices . Tree consecutive floats give a 3D vertex ; Three consecutive vertices give a triangle .
A cube has 6 faces with 2 triangles each , so this makes 6 * 2=12 triangles , and 12 * 3 vertices
g_vertex_buffer_data =
[ ( 1.0, 1.0,-1.0)
, ( 1.0,-1.0,-1.0)
, (-1.0,-1.0,-1.0)
, ( 1.0, 1.0,-1.0)
, (-1.0,-1.0,-1.0)
, (-1.0, 1.0,-1.0)
, ( 1.0, 1.0,-1.0)
, ( 1.0, 1.0, 1.0)
, ( 1.0,-1.0, 1.0)
, ( 1.0, 1.0,-1.0)
, ( 1.0,-1.0, 1.0)
, ( 1.0,-1.0,-1.0)
, ( 1.0, 1.0, 1.0)
, (-1.0,-1.0, 1.0)
, ( 1.0,-1.0, 1.0)
, ( 1.0, 1.0, 1.0)
, (-1.0, 1.0, 1.0)
, (-1.0,-1.0, 1.0)
, (-1.0, 1.0, 1.0)
, (-1.0,-1.0,-1.0)
, (-1.0,-1.0, 1.0)
, (-1.0, 1.0, 1.0)
, (-1.0, 1.0,-1.0)
, (-1.0,-1.0,-1.0)
, ( 1.0, 1.0,-1.0)
, (-1.0, 1.0,-1.0)
, (-1.0, 1.0, 1.0)
, ( 1.0, 1.0,-1.0)
, (-1.0, 1.0, 1.0)
, ( 1.0, 1.0, 1.0)
, ( 1.0, 1.0,-1.0)
, ( 1.0, 1.0, 1.0)
, (-1.0, 1.0, 1.0)
, ( 1.0, 1.0,-1.0)
, (-1.0, 1.0, 1.0)
, (-1.0, 1.0,-1.0)
]
Two UV coordinatesfor each vertex . They were created with Blender .
g_uv_buffer_data =
[ (0.0, 0.0)
, (0.0, 1.0)
, (1.0, 1.0)
, (0.0, 0.0)
, (1.0, 1.0)
, (1.0, 0.0)
, (0.0, 0.0)
, (1.0, 0.0)
, (1.0, 1.0)
, (0.0, 0.0)
, (1.0, 1.0)
, (0.0, 1.0)
, (1.0, 0.0)
, (0.0, 1.0)
, (1.0, 1.0)
, (1.0, 0.0)
, (0.0, 0.0)
, (0.0, 1.0)
, (0.0, 0.0)
, (1.0, 1.0)
, (0.0, 1.0)
, (0.0, 0.0)
, (1.0, 0.0)
, (1.0, 1.0)
, (0.0, 0.0)
, (1.0, 0.0)
, (1.0, 1.0)
, (0.0, 0.0)
, (1.0, 1.0)
, (0.0, 1.0)
, (0.0, 0.0)
, (0.0, 1.0)
, (1.0, 1.0)
, (0.0, 0.0)
, (1.0, 1.0)
, (1.0, 0.0)
]
myCube :: Mesh
myCube = Mesh
{ mAttributes = T.fromList
[ ("vertexPosition_modelspace", A_V3F $ SV.fromList [V3 x y z | (x,y,z) <- g_vertex_buffer_data])
, ("vertexUV", A_V2F $ SV.fromList [V2 u v | (u,v) <- g_uv_buffer_data])
]
, mPrimitive = P_Triangles
, mGPUData = Nothing
}
texturing :: Exp Obj (Texture Tex2D SingleTex (Regular Float) RGBA) -> Exp Obj (VertexStream Triangle (V3F,V2F)) -> Exp Obj (FrameBuffer 1 (Float,V4F))
texturing tex objs = Accumulate fragmentCtx PassAll fragmentShader fragmentStream emptyFB
where
rasterCtx :: RasterContext Triangle
rasterCtx = TriangleCtx (CullNone) PolygonFill NoOffset LastVertex
fragmentCtx :: AccumulationContext (Depth Float :+: (Color (V4 Float) :+: ZZ))
fragmentCtx = AccumulationContext Nothing $ DepthOp Less True:.ColorOp NoBlending (one' :: V4B):.ZT
emptyFB :: Exp Obj (FrameBuffer 1 (Float,V4F))
emptyFB = FrameBuffer (DepthImage n1 1000:.ColorImage n1 (V4 0 0 0.4 1):.ZT)
fragmentStream :: Exp Obj (FragmentStream 1 V2F)
fragmentStream = Rasterize rasterCtx primitiveStream
primitiveStream :: Exp Obj (PrimitiveStream Triangle () 1 V V2F)
primitiveStream = Transform vertexShader objs
modelViewProj :: Exp V M44F
modelViewProj = Uni (IM44F "MVP")
vertexShader :: Exp V (V3F,V2F) -> VertexOut () V2F
vertexShader puv = VertexOut v4 (Const 1) ZT (Smooth uv:.ZT)
where
v4 :: Exp V V4F
v4 = modelViewProj @*. v3v4 p
(p,uv) = untup2 puv
fragmentShader :: Exp F V2F -> FragmentOut (Depth Float :+: Color V4F :+: ZZ)
fragmentShader uv = FragmentOutRastDepth $ color tex uv :. ZT
color t uv = texture' (smp t) uv
smp t = Sampler LinearFilter ClampToEdge t
main :: IO ()
main = do
(win,windowSize) <- initWindow "LambdaCube 3D Textured Cube" 1024 768
let keyIsPressed k = fmap (==KeyState'Pressed) $ getKey win k
let texture = TextureSlot "myTextureSampler" $ Texture2D (Float RGBA) n1
frameImage :: Exp Obj (Image 1 V4F)
frameImage = PrjFrameBuffer "" tix0 $ texturing texture (Fetch "stream" Triangles (IV3F "vertexPosition_modelspace", IV2F "vertexUV"))
fx img = PrjFrameBuffer "" tix0 $ texturing (imgToTex $ postProcess $ img) (Fetch "stream" Triangles (IV3F "vertexPosition_modelspace", IV2F "vertexUV"))
imgToTex img = Texture (Texture2D (Float RGBA) n1) (V2 512 512) NoMip [img]
renderer <- compileRenderer $ ScreenOut $ iterate fx frameImage !! 4
initUtility renderer
let uniformMap = uniformSetter renderer
texture = uniformFTexture2D "myTextureSampler" uniformMap
mvp = uniformM44F "MVP" uniformMap
setWindowSize = setScreenSize renderer
setWindowSize 1024 768
texture =<< compileTexture2DRGBAF True False img
gpuCube <- compileMesh myCube
addMesh renderer "stream" gpuCube []
let = fromProjective ( lookat ( Vec3 4 0.5 ( -0.6 ) ) ( Vec3 0 0 0 ) ( Vec3 0 1 0 ) )
let cm = fromProjective (lookat (Vec3 3 1.3 0.3) (Vec3 0 0 0) (Vec3 0 1 0))
pm = perspective 0.1 100 (pi/4) (1024 / 768)
loop = do
Just t <- getTime
let angle = pi / 24 * realToFrac t
mm = fromProjective $ rotationEuler $ Vec3 angle 0 0
mvp $! mat4ToM44F $! mm .*. cm .*. pm
render renderer
swapBuffers win >> pollEvents
k <- keyIsPressed Key'Escape
unless k $ loop
loop
dispose renderer
destroyWindow win
terminate
vec4ToV4F :: Vec4 -> V4F
vec4ToV4F (Vec4 x y z w) = V4 x y z w
mat4ToM44F :: Mat4 -> M44F
mat4ToM44F (Mat4 a b c d) = V4 (vec4ToV4F a) (vec4ToV4F b) (vec4ToV4F c) (vec4ToV4F d)
|
ab02f140ef3ec967fc5edec93ec9cd7690a6945646c0b36f4fa7883ccce132de | GlideAngle/flare-timing | StopTestMain.hs | module Main (main) where
import Test.Tasty (TestTree, testGroup, defaultMain)
import Test.Tasty.SmallCheck as SC
import Test.Tasty.QuickCheck as QC
import Stopped
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests =
testGroup
"Tests"
[ units
, properties
]
properties :: TestTree
properties = testGroup "Properties" [scProps, qcProps]
units :: TestTree
units =
testGroup
"Stopped Task Units"
[ stoppedTimeUnits
, stoppedScoreUnits
, scoreTimeWindowUnits
, applyGlideUnits
]
scProps :: TestTree
scProps = testGroup "(checked by SmallCheck)"
[ SC.testProperty "Stop task time from announced time, Hg" stopTaskTimeHg
, SC.testProperty "Stop task time from announced time, Pg" stopTaskTimePg
, SC.testProperty "Can score a stopped task, Hg" canScoreStoppedHg
, SC.testProperty "Can score a stopped task, Pg" canScoreStoppedPg
, SC.testProperty "Score time window is in the range [0, stop time]" scoreTimeWindow
, SC.testProperty "Stopped track has glide distance bonus" applyGlide
]
qcProps :: TestTree
qcProps = testGroup "(checked by QuickCheck)"
[ QC.testProperty "Stop task time from announced time, Hg" stopTaskTimeHg
, QC.testProperty "Stop task time from announced time, Pg" stopTaskTimePg
, QC.testProperty "Can score a stopped task, Hg" canScoreStoppedHg
, QC.testProperty "Can score a stopped task, Pg" canScoreStoppedPg
, QC.testProperty "Score time window is in the range [0, stop time]" scoreTimeWindow
, QC.testProperty "Stopped track has glide distance bonus" applyGlide
]
| null | https://raw.githubusercontent.com/GlideAngle/flare-timing/27bd34c1943496987382091441a1c2516c169263/lang-haskell/gap-stop/test-suite-stop/StopTestMain.hs | haskell | module Main (main) where
import Test.Tasty (TestTree, testGroup, defaultMain)
import Test.Tasty.SmallCheck as SC
import Test.Tasty.QuickCheck as QC
import Stopped
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests =
testGroup
"Tests"
[ units
, properties
]
properties :: TestTree
properties = testGroup "Properties" [scProps, qcProps]
units :: TestTree
units =
testGroup
"Stopped Task Units"
[ stoppedTimeUnits
, stoppedScoreUnits
, scoreTimeWindowUnits
, applyGlideUnits
]
scProps :: TestTree
scProps = testGroup "(checked by SmallCheck)"
[ SC.testProperty "Stop task time from announced time, Hg" stopTaskTimeHg
, SC.testProperty "Stop task time from announced time, Pg" stopTaskTimePg
, SC.testProperty "Can score a stopped task, Hg" canScoreStoppedHg
, SC.testProperty "Can score a stopped task, Pg" canScoreStoppedPg
, SC.testProperty "Score time window is in the range [0, stop time]" scoreTimeWindow
, SC.testProperty "Stopped track has glide distance bonus" applyGlide
]
qcProps :: TestTree
qcProps = testGroup "(checked by QuickCheck)"
[ QC.testProperty "Stop task time from announced time, Hg" stopTaskTimeHg
, QC.testProperty "Stop task time from announced time, Pg" stopTaskTimePg
, QC.testProperty "Can score a stopped task, Hg" canScoreStoppedHg
, QC.testProperty "Can score a stopped task, Pg" canScoreStoppedPg
, QC.testProperty "Score time window is in the range [0, stop time]" scoreTimeWindow
, QC.testProperty "Stopped track has glide distance bonus" applyGlide
]
| |
7528c20913a261f4be5fbfee48b62f4b33cbe55ee0cb3017b088d9ffd8576cf4 | kazu-yamamoto/http2 | RingOfQueuesSTMSpec.hs | {-# LANGUAGE BangPatterns #-}
module RingOfQueuesSTMSpec where
import Control.Concurrent.STM
import Data.IORef (readIORef)
import Data.List (group, sort)
import Test.Hspec
import qualified RingOfQueuesSTM as P
spec :: Spec
spec = do
describe "base priority queue" $ do
it "queues entries based on weight" $ do
q <- atomically P.new
let e1 = P.newEntry 1 201
atomically $ P.enqueue e1 q
let e2 = P.newEntry 3 101
atomically $ P.enqueue e2 q
let e3 = P.newEntry 5 1
atomically $ P.enqueue e3 q
xs <- enqdeq q 1000
map length (group (sort xs)) `shouldBe` [663,334,3]
enqdeq :: P.PriorityQueue Int -> Int -> IO [Int]
enqdeq pq num = loop pq num []
where
loop _ 0 vs = return vs
loop !q !n vs = do
ent <- atomically $ P.dequeue q
atomically $ P.enqueue ent q
let !v = P.item ent
loop q (n - 1) (v:vs)
| null | https://raw.githubusercontent.com/kazu-yamamoto/http2/3c29763be147a3d482eff28f427ad80f1d4df706/bench-priority/test/RingOfQueuesSTMSpec.hs | haskell | # LANGUAGE BangPatterns # |
module RingOfQueuesSTMSpec where
import Control.Concurrent.STM
import Data.IORef (readIORef)
import Data.List (group, sort)
import Test.Hspec
import qualified RingOfQueuesSTM as P
spec :: Spec
spec = do
describe "base priority queue" $ do
it "queues entries based on weight" $ do
q <- atomically P.new
let e1 = P.newEntry 1 201
atomically $ P.enqueue e1 q
let e2 = P.newEntry 3 101
atomically $ P.enqueue e2 q
let e3 = P.newEntry 5 1
atomically $ P.enqueue e3 q
xs <- enqdeq q 1000
map length (group (sort xs)) `shouldBe` [663,334,3]
enqdeq :: P.PriorityQueue Int -> Int -> IO [Int]
enqdeq pq num = loop pq num []
where
loop _ 0 vs = return vs
loop !q !n vs = do
ent <- atomically $ P.dequeue q
atomically $ P.enqueue ent q
let !v = P.item ent
loop q (n - 1) (v:vs)
|
1bb706c03610c7710417bbea7617c3dcd8d067efb8e1bfa8dba261f1805c61bf | swtwsk/vinci-lang | CleanControlFlow.hs | # LANGUAGE LambdaCase #
module SSA.Optimizations.CleanControlFlow (
cleanControlFlow,
countPrecedessors,
postOrder
) where
import Control.Monad.Reader
import Control.Monad.State
import Data.Bifunctor (bimap, first)
import qualified Data.Map as Map
import SSA.AST
import SSA.LabelGraph
type CleanM = State (Edges, BlocksMap)
cleanControlFlow :: Edges -> BlocksMap -> (Edges, BlocksMap)
cleanControlFlow edges blockMap = execState clean (edges, blockMap)
clean :: CleanM ()
clean = do
order <- gets (postOrder . fst)
changed <- or <$> mapM cleanPass order
when changed clean
cleanPass :: SLabel -> CleanM Bool
cleanPass label = do
edge <- gets ((Map.! label) . fst)
case edge of
BranchEdge l1 l2 -> do
let sameLabels = l1 == l2
when sameLabels (redundantElimination label l1)
return sameLabels
JumpEdge target -> jumpElimination label target
NoEdge -> return False
redundantElimination :: SLabel -> SLabel -> CleanM ()
redundantElimination label target = do
(_, blocksMap) <- get
let edgesFn = Map.insert label (JumpEdge target)
(SLabelled l' phis (SBlock block)) = blocksMap Map.! label
block' = changeLast block
blocksFn = Map.insert label (SLabelled l' phis (SBlock block'))
modify $ bimap edgesFn blocksFn
where
changeLast :: [SStmt] -> [SStmt]
changeLast [_] = [SGoto target]
changeLast (h:t) = h:changeLast t
changeLast [] = []
jumpElimination :: SLabel -> SLabel -> CleanM Bool
jumpElimination label target = do
(edges, blocksMap) <- get
let (SLabelled _ phis (SBlock block)) = blocksMap Map.! label
isEmpty = null phis && block == [SGoto target]
let (SLabelled _ tPhis (SBlock tBlock)) = blocksMap Map.! target
isTargetEmptyBranching = null tPhis &&
case tBlock of { [SIf {}] -> True; _ -> False }
hasOnePrecedessor = null tPhis && (countPrecedessors edges target == 1)
when isEmpty $ emptyElimination label target
when (not isEmpty && hasOnePrecedessor) $ mergeBlocks label target
when (not isEmpty && not hasOnePrecedessor && isTargetEmptyBranching) $
hoistBlocks label target
return $ isEmpty || hasOnePrecedessor || isTargetEmptyBranching
emptyElimination :: SLabel -> SLabel -> CleanM ()
emptyElimination label target = do
edges <- gets fst
let edgesFn = Map.map $ \case
b@(BranchEdge l1 l2) ->
if l1 == label then BranchEdge target l2 else
if l2 == label then BranchEdge l1 label else
b
j@(JumpEdge t) -> if t == label then JumpEdge target else j
NoEdge -> NoEdge
blocksMapFn = Map.map $ \(SLabelled l phis (SBlock b)) ->
SLabelled l phis (SBlock $ changeLast b)
labelPrecedessors = getPrecedessors edges label
blocksMapFn' = flip Map.adjust target $ \(SLabelled l phis b) ->
let phisFns = map (changePhi label) labelPrecedessors
phis' = map (flip (foldr id) phisFns) phis in
SLabelled l phis' b
modify $ bimap (Map.delete label . edgesFn) (Map.delete label . blocksMapFn' . blocksMapFn)
where
changeLast :: [SStmt] -> [SStmt]
changeLast [SGoto t] = [if t == label then SGoto target else SGoto t]
changeLast [SIf sf e l1 l2] = (: []) $ let sif = SIf sf e in
if l1 == label then sif target l2 else
if l2 == label then sif l1 label else
sif l1 l2
changeLast (h:t) = h:changeLast t
changeLast [] = []
mergeBlocks :: SLabel -> SLabel -> CleanM ()
mergeBlocks label target = do
(edges, blocksMap) <- get
let targetEdge = edges Map.! target
edgesFn = Map.delete target . Map.insert label targetEdge
(SLabelled _ lPhis (SBlock lBlock)) = blocksMap Map.! label
(SLabelled _ tPhis (SBlock tBlock)) = blocksMap Map.! target
block' = SLabelled label (lPhis ++ tPhis) $ SBlock (merge lBlock tBlock)
fixPhisFn = Map.map $ \(SLabelled l phis b) ->
SLabelled l (changePhi target label <$> phis) b
blocksMapFn = fixPhisFn . Map.delete target . Map.insert label block'
modify $ bimap edgesFn blocksMapFn
where
merge :: [SStmt] -> [SStmt] -> [SStmt]
merge [SGoto _] stmts2 = stmts2
merge (h:t) stmts2 = h:merge t stmts2
merge [] stmts2 = stmts2
hoistBlocks :: SLabel -> SLabel -> CleanM ()
hoistBlocks label target = do
(edges, blocksMap) <- get
let targetEdge = edges Map.! target
edgesFn = Map.insert label targetEdge
(SLabelled _ lPhis (SBlock block)) = blocksMap Map.! label
(SLabelled _ tPhis (SBlock tBlock)) = blocksMap Map.! target
block' = changeLast block tBlock
fixPhisFn = Map.map $ \(SLabelled l phis b) ->
SLabelled l (changePhi target label <$> phis) b
blocksMapFn =
Map.insert label (SLabelled label (lPhis ++ tPhis) (SBlock block'))
modify $ bimap edgesFn (fixPhisFn . blocksMapFn)
where
changeLast :: [SStmt] -> [SStmt] -> [SStmt]
changeLast [_] tJump = tJump
changeLast (h:t) tJump = h:changeLast t tJump
changeLast [] _ = []
countPrecedessors :: Edges -> SLabel -> Int
countPrecedessors edges label = length $ getPrecedessors edges label
getPrecedessors :: Edges -> SLabel -> [SLabel]
getPrecedessors edges label =
let foldFn source edge acc = case edge of
BranchEdge l1 l2 ->
if (l1 == label) || (l2 == label) then source:acc else acc
JumpEdge t -> if t == label then source:acc else acc
NoEdge -> acc
in Map.foldrWithKey foldFn [] edges
changePhi :: SLabel -> SLabel -> SPhiNode -> SPhiNode
changePhi orig new (SPhiNode v blockVars) = SPhiNode v (changePhi' <$> blockVars)
where
changePhi' :: (SLabel, String) -> (SLabel, String)
changePhi' = first (\l -> if l == orig then new else l)
| null | https://raw.githubusercontent.com/swtwsk/vinci-lang/9c7e01953e0b1cf135af7188e0c71fe6195bdfa1/src/SSA/Optimizations/CleanControlFlow.hs | haskell | # LANGUAGE LambdaCase #
module SSA.Optimizations.CleanControlFlow (
cleanControlFlow,
countPrecedessors,
postOrder
) where
import Control.Monad.Reader
import Control.Monad.State
import Data.Bifunctor (bimap, first)
import qualified Data.Map as Map
import SSA.AST
import SSA.LabelGraph
type CleanM = State (Edges, BlocksMap)
cleanControlFlow :: Edges -> BlocksMap -> (Edges, BlocksMap)
cleanControlFlow edges blockMap = execState clean (edges, blockMap)
clean :: CleanM ()
clean = do
order <- gets (postOrder . fst)
changed <- or <$> mapM cleanPass order
when changed clean
cleanPass :: SLabel -> CleanM Bool
cleanPass label = do
edge <- gets ((Map.! label) . fst)
case edge of
BranchEdge l1 l2 -> do
let sameLabels = l1 == l2
when sameLabels (redundantElimination label l1)
return sameLabels
JumpEdge target -> jumpElimination label target
NoEdge -> return False
redundantElimination :: SLabel -> SLabel -> CleanM ()
redundantElimination label target = do
(_, blocksMap) <- get
let edgesFn = Map.insert label (JumpEdge target)
(SLabelled l' phis (SBlock block)) = blocksMap Map.! label
block' = changeLast block
blocksFn = Map.insert label (SLabelled l' phis (SBlock block'))
modify $ bimap edgesFn blocksFn
where
changeLast :: [SStmt] -> [SStmt]
changeLast [_] = [SGoto target]
changeLast (h:t) = h:changeLast t
changeLast [] = []
jumpElimination :: SLabel -> SLabel -> CleanM Bool
jumpElimination label target = do
(edges, blocksMap) <- get
let (SLabelled _ phis (SBlock block)) = blocksMap Map.! label
isEmpty = null phis && block == [SGoto target]
let (SLabelled _ tPhis (SBlock tBlock)) = blocksMap Map.! target
isTargetEmptyBranching = null tPhis &&
case tBlock of { [SIf {}] -> True; _ -> False }
hasOnePrecedessor = null tPhis && (countPrecedessors edges target == 1)
when isEmpty $ emptyElimination label target
when (not isEmpty && hasOnePrecedessor) $ mergeBlocks label target
when (not isEmpty && not hasOnePrecedessor && isTargetEmptyBranching) $
hoistBlocks label target
return $ isEmpty || hasOnePrecedessor || isTargetEmptyBranching
emptyElimination :: SLabel -> SLabel -> CleanM ()
emptyElimination label target = do
edges <- gets fst
let edgesFn = Map.map $ \case
b@(BranchEdge l1 l2) ->
if l1 == label then BranchEdge target l2 else
if l2 == label then BranchEdge l1 label else
b
j@(JumpEdge t) -> if t == label then JumpEdge target else j
NoEdge -> NoEdge
blocksMapFn = Map.map $ \(SLabelled l phis (SBlock b)) ->
SLabelled l phis (SBlock $ changeLast b)
labelPrecedessors = getPrecedessors edges label
blocksMapFn' = flip Map.adjust target $ \(SLabelled l phis b) ->
let phisFns = map (changePhi label) labelPrecedessors
phis' = map (flip (foldr id) phisFns) phis in
SLabelled l phis' b
modify $ bimap (Map.delete label . edgesFn) (Map.delete label . blocksMapFn' . blocksMapFn)
where
changeLast :: [SStmt] -> [SStmt]
changeLast [SGoto t] = [if t == label then SGoto target else SGoto t]
changeLast [SIf sf e l1 l2] = (: []) $ let sif = SIf sf e in
if l1 == label then sif target l2 else
if l2 == label then sif l1 label else
sif l1 l2
changeLast (h:t) = h:changeLast t
changeLast [] = []
mergeBlocks :: SLabel -> SLabel -> CleanM ()
mergeBlocks label target = do
(edges, blocksMap) <- get
let targetEdge = edges Map.! target
edgesFn = Map.delete target . Map.insert label targetEdge
(SLabelled _ lPhis (SBlock lBlock)) = blocksMap Map.! label
(SLabelled _ tPhis (SBlock tBlock)) = blocksMap Map.! target
block' = SLabelled label (lPhis ++ tPhis) $ SBlock (merge lBlock tBlock)
fixPhisFn = Map.map $ \(SLabelled l phis b) ->
SLabelled l (changePhi target label <$> phis) b
blocksMapFn = fixPhisFn . Map.delete target . Map.insert label block'
modify $ bimap edgesFn blocksMapFn
where
merge :: [SStmt] -> [SStmt] -> [SStmt]
merge [SGoto _] stmts2 = stmts2
merge (h:t) stmts2 = h:merge t stmts2
merge [] stmts2 = stmts2
hoistBlocks :: SLabel -> SLabel -> CleanM ()
hoistBlocks label target = do
(edges, blocksMap) <- get
let targetEdge = edges Map.! target
edgesFn = Map.insert label targetEdge
(SLabelled _ lPhis (SBlock block)) = blocksMap Map.! label
(SLabelled _ tPhis (SBlock tBlock)) = blocksMap Map.! target
block' = changeLast block tBlock
fixPhisFn = Map.map $ \(SLabelled l phis b) ->
SLabelled l (changePhi target label <$> phis) b
blocksMapFn =
Map.insert label (SLabelled label (lPhis ++ tPhis) (SBlock block'))
modify $ bimap edgesFn (fixPhisFn . blocksMapFn)
where
changeLast :: [SStmt] -> [SStmt] -> [SStmt]
changeLast [_] tJump = tJump
changeLast (h:t) tJump = h:changeLast t tJump
changeLast [] _ = []
countPrecedessors :: Edges -> SLabel -> Int
countPrecedessors edges label = length $ getPrecedessors edges label
getPrecedessors :: Edges -> SLabel -> [SLabel]
getPrecedessors edges label =
let foldFn source edge acc = case edge of
BranchEdge l1 l2 ->
if (l1 == label) || (l2 == label) then source:acc else acc
JumpEdge t -> if t == label then source:acc else acc
NoEdge -> acc
in Map.foldrWithKey foldFn [] edges
changePhi :: SLabel -> SLabel -> SPhiNode -> SPhiNode
changePhi orig new (SPhiNode v blockVars) = SPhiNode v (changePhi' <$> blockVars)
where
changePhi' :: (SLabel, String) -> (SLabel, String)
changePhi' = first (\l -> if l == orig then new else l)
| |
256c438930ace95fe6cf83a8849379c7cb1aac102e1f91af65a3cc4b427ccb60 | igorhvr/bedlam | fmt-pretty.scm | ;;;; fmt-pretty.scm -- pretty printing format combinator
;;
Copyright ( c ) 2006 - 2007 . All rights reserved .
;; BSD-style license:
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; additional settings
(define (fmt-shares st) (fmt-ref st 'shares))
(define (fmt-set-shares! st x) (fmt-set! st 'shares x))
(define (fmt-copy-shares st)
(fmt-set-shares! (copy-fmt-state st) (copy-shares (fmt-shares st))))
(define (copy-shares shares)
(let ((tab (make-eq?-table)))
(hash-table-walk
(car shares)
(lambda (obj x) (eq?-table-set! tab obj (cons (car x) (cdr x)))))
(cons tab (cdr shares))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; utilities
(define (fmt-shared-write obj proc)
(lambda (st)
(let* ((shares (fmt-shares st))
(cell (and shares (eq?-table-ref (car shares) obj))))
(if (pair? cell)
(cond
((cdr cell)
((fmt-writer st) (gen-shared-ref (car cell) "#") st))
(else
(set-car! cell (cdr shares))
(set-cdr! cell #t)
(set-cdr! shares (+ (cdr shares) 1))
(proc ((fmt-writer st) (gen-shared-ref (car cell) "=") st))))
(proc st)))))
(define (fmt-join/shares fmt ls . o)
(let ((sep (dsp (if (pair? o) (car o) " "))))
(lambda (st)
(if (null? ls)
st
(let* ((shares (fmt-shares st))
(tab (car shares))
(output (fmt-writer st)))
(let lp ((ls ls) (st st))
(let ((st ((fmt (car ls)) st))
(rest (cdr ls)))
(cond
((null? rest) st)
((pair? rest)
(call-with-shared-ref/cdr rest st shares
(lambda (st) (lp rest st))
sep))
(else ((fmt rest) (output ". " (sep st))))))))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; pretty printing
(define (non-app? x)
(if (pair? x)
(or (not (or (null? (cdr x)) (pair? (cdr x))))
(non-app? (car x)))
(not (symbol? x))))
(define syntax-abbrevs
'((quote . "'") (quasiquote . "`") (unquote . ",") (unquote-splicing . ",@")
))
(define (pp-let ls)
(if (and (pair? (cdr ls)) (symbol? (cadr ls)))
(pp-with-indent 2 ls)
(pp-with-indent 1 ls)))
(define indent-rules
`((lambda . 1) (define . 1)
(let . ,pp-let) (loop . ,pp-let)
(let* . 1) (letrec . 1) (letrec* . 1) (and-let* . 1) (let1 . 2)
(let-values . 1) (let*-values . 1) (receive . 2) (parameterize . 1)
(let-syntax . 1) (letrec-syntax . 1) (syntax-rules . 1) (syntax-case . 2)
(match . 1) (match-let . 1) (match-let* . 1)
(if . 3) (when . 1) (unless . 1) (case . 1) (while . 1) (until . 1)
(do . 2) (dotimes . 1) (dolist . 1) (test . 1)
(condition-case . 1) (guard . 1) (rec . 1)
(call-with-current-continuation . 0)
))
(define indent-prefix-rules
`(("with-" . -1) ("call-with-" . -1) ("define-" . 1))
)
(define indent-suffix-rules
`(("-case" . 1))
)
(define (pp-indentation form)
(let ((indent
(cond
((assq (car form) indent-rules) => cdr)
((and (symbol? (car form))
(let ((str (symbol->string (car form))))
(or (find (lambda (rx) (string-prefix? (car rx) str))
indent-prefix-rules)
(find (lambda (rx) (string-suffix? (car rx) str))
indent-suffix-rules))))
=> cdr)
(else #f))))
(if (and (number? indent) (negative? indent))
(max 0 (- (+ (length+ form) indent) 1))
indent)))
(define (pp-with-indent indent-rule ls)
(lambda (st)
(let* ((col1 (fmt-col st))
(st ((cat "(" (pp-object (car ls))) st))
(col2 (fmt-col st))
(fixed (take* (cdr ls) (or indent-rule 1)))
(tail (drop* (cdr ls) (or indent-rule 1)))
(st2 (fmt-copy-shares st))
(first-line
((fmt-to-string (cat " " (fmt-join/shares pp-flat fixed " "))) st2))
(default
(let ((sep (make-nl-space (+ col1 1))))
(cat sep (fmt-join/shares pp-object (cdr ls) sep) ")"))))
(cond
((< (+ col2 (string-length first-line)) (fmt-width st2))
fixed values on first line
(let ((sep (make-nl-space
(if indent-rule (+ col1 2) (+ col2 1)))))
((cat first-line
(cond
((not (or (null? tail) (pair? tail)))
(cat ". " (pp-object tail)))
((> (length+ (cdr ls)) (or indent-rule 1))
(cat sep (fmt-join/shares pp-object tail sep)))
(else
fmt-null))
")")
st2)))
(indent-rule ;;(and indent-rule (not (pair? (car ls))))
fixed values lined up , body indented two spaces
((fmt-try-fit
(lambda (st)
((cat
" "
(fmt-join/shares pp-object fixed (make-nl-space (+ col2 1)))
(if (pair? tail)
(let ((sep (make-nl-space (+ col1 2))))
(cat sep (fmt-join/shares pp-object tail sep)))
"")
")")
(fmt-copy-shares st)))
default)
st))
(else
;; all on separate lines
(default st))))))
(define (pp-app ls)
(let ((indent-rule (pp-indentation ls)))
(if (procedure? indent-rule)
(indent-rule ls)
(pp-with-indent indent-rule ls))))
;; the elements may be shared, just checking the top level list
;; structure
(define (proper-non-shared-list? ls shares)
(let ((tab (car shares)))
(let lp ((ls ls))
(or (null? ls)
(and (pair? ls)
(not (eq?-table-ref tab ls))
(lp (cdr ls)))))))
(define (pp-flat x)
(cond
((pair? x)
(fmt-shared-write
x
(cond
((and (pair? (cdr x)) (null? (cddr x))
(assq (car x) syntax-abbrevs))
=> (lambda (abbrev)
(cat (cdr abbrev) (pp-flat (cadr x)))))
(else
(cat "(" (fmt-join/shares pp-flat x " ") ")")))))
((vector? x)
(fmt-shared-write
x
(cat "#(" (fmt-join/shares pp-flat (vector->list x) " ") ")")))
(else
(lambda (st) ((write-with-shares x (fmt-shares st)) st)))))
(define (pp-pair ls)
(fmt-shared-write
ls
(cond
one element list , no lines to break
((null? (cdr ls))
(cat "(" (pp-object (car ls)) ")"))
;; quote or other abbrev
((and (pair? (cdr ls)) (null? (cddr ls))
(assq (car ls) syntax-abbrevs))
=> (lambda (abbrev)
(cat (cdr abbrev) (pp-object (cadr ls)))))
(else
(fmt-try-fit
(lambda (st) ((pp-flat ls) (fmt-copy-shares st)))
(lambda (st)
(if (and (non-app? ls)
(proper-non-shared-list? ls (fmt-shares st)))
((pp-data-list ls) st)
((pp-app ls) st))))))))
(define (pp-data-list ls)
(lambda (st)
(let* ((output (fmt-writer st))
(st (output "(" st))
(col (fmt-col st))
(width (- (fmt-width st) col))
(st2 (fmt-copy-shares st)))
(cond
((and (pair? (cdr ls)) (pair? (cddr ls)) (pair? (cdddr ls))
((fits-in-columns ls pp-flat width) st2))
=> (lambda (ls)
at least four elements which can be broken into columns
(let* ((prefix (make-nl-space (+ col 1)))
(widest (+ 1 (car ls)))
always > = 2
(let lp ((ls (cdr ls)) (st st2) (i 1))
(cond
((null? ls)
(output ")" st))
((null? (cdr ls))
(output ")" (output (car ls) st)))
(else
(let ((st (output (car ls) st)))
(if (>= i columns)
(lp (cdr ls) (output prefix st) 1)
(let* ((pad (- widest (string-length (car ls))))
(st (output (make-space pad) st)))
(lp (cdr ls) st (+ i 1)))))))))))
(else
no room , print one per line
((cat (fmt-join pp-object ls (make-nl-space col)) ")") st))))))
(define (pp-vector vec)
(fmt-shared-write vec (cat "#" (pp-data-list (vector->list vec)))))
(define (pp-object obj)
(cond
((pair? obj) (pp-pair obj))
((vector? obj) (pp-vector obj))
(else (lambda (st) ((write-with-shares obj (fmt-shares st)) st)))))
(define (pretty obj)
(fmt-bind 'shares (cons (make-shared-ref-table obj) 0)
(cat (pp-object obj) fl)))
(define (pretty/unshared obj)
(fmt-bind 'shares (cons (make-eq?-table) 0) (cat (pp-object obj) fl)))
| null | https://raw.githubusercontent.com/igorhvr/bedlam/b62e0d047105bb0473bdb47c58b23f6ca0f79a4e/iasylum/fmt/fmt-0.8.1/fmt-pretty.scm | scheme | fmt-pretty.scm -- pretty printing format combinator
BSD-style license:
additional settings
utilities
pretty printing
(and indent-rule (not (pair? (car ls))))
all on separate lines
the elements may be shared, just checking the top level list
structure
quote or other abbrev | Copyright ( c ) 2006 - 2007 . All rights reserved .
(define (fmt-shares st) (fmt-ref st 'shares))
(define (fmt-set-shares! st x) (fmt-set! st 'shares x))
(define (fmt-copy-shares st)
(fmt-set-shares! (copy-fmt-state st) (copy-shares (fmt-shares st))))
(define (copy-shares shares)
(let ((tab (make-eq?-table)))
(hash-table-walk
(car shares)
(lambda (obj x) (eq?-table-set! tab obj (cons (car x) (cdr x)))))
(cons tab (cdr shares))))
(define (fmt-shared-write obj proc)
(lambda (st)
(let* ((shares (fmt-shares st))
(cell (and shares (eq?-table-ref (car shares) obj))))
(if (pair? cell)
(cond
((cdr cell)
((fmt-writer st) (gen-shared-ref (car cell) "#") st))
(else
(set-car! cell (cdr shares))
(set-cdr! cell #t)
(set-cdr! shares (+ (cdr shares) 1))
(proc ((fmt-writer st) (gen-shared-ref (car cell) "=") st))))
(proc st)))))
(define (fmt-join/shares fmt ls . o)
(let ((sep (dsp (if (pair? o) (car o) " "))))
(lambda (st)
(if (null? ls)
st
(let* ((shares (fmt-shares st))
(tab (car shares))
(output (fmt-writer st)))
(let lp ((ls ls) (st st))
(let ((st ((fmt (car ls)) st))
(rest (cdr ls)))
(cond
((null? rest) st)
((pair? rest)
(call-with-shared-ref/cdr rest st shares
(lambda (st) (lp rest st))
sep))
(else ((fmt rest) (output ". " (sep st))))))))))))
(define (non-app? x)
(if (pair? x)
(or (not (or (null? (cdr x)) (pair? (cdr x))))
(non-app? (car x)))
(not (symbol? x))))
(define syntax-abbrevs
'((quote . "'") (quasiquote . "`") (unquote . ",") (unquote-splicing . ",@")
))
(define (pp-let ls)
(if (and (pair? (cdr ls)) (symbol? (cadr ls)))
(pp-with-indent 2 ls)
(pp-with-indent 1 ls)))
(define indent-rules
`((lambda . 1) (define . 1)
(let . ,pp-let) (loop . ,pp-let)
(let* . 1) (letrec . 1) (letrec* . 1) (and-let* . 1) (let1 . 2)
(let-values . 1) (let*-values . 1) (receive . 2) (parameterize . 1)
(let-syntax . 1) (letrec-syntax . 1) (syntax-rules . 1) (syntax-case . 2)
(match . 1) (match-let . 1) (match-let* . 1)
(if . 3) (when . 1) (unless . 1) (case . 1) (while . 1) (until . 1)
(do . 2) (dotimes . 1) (dolist . 1) (test . 1)
(condition-case . 1) (guard . 1) (rec . 1)
(call-with-current-continuation . 0)
))
(define indent-prefix-rules
`(("with-" . -1) ("call-with-" . -1) ("define-" . 1))
)
(define indent-suffix-rules
`(("-case" . 1))
)
(define (pp-indentation form)
(let ((indent
(cond
((assq (car form) indent-rules) => cdr)
((and (symbol? (car form))
(let ((str (symbol->string (car form))))
(or (find (lambda (rx) (string-prefix? (car rx) str))
indent-prefix-rules)
(find (lambda (rx) (string-suffix? (car rx) str))
indent-suffix-rules))))
=> cdr)
(else #f))))
(if (and (number? indent) (negative? indent))
(max 0 (- (+ (length+ form) indent) 1))
indent)))
(define (pp-with-indent indent-rule ls)
(lambda (st)
(let* ((col1 (fmt-col st))
(st ((cat "(" (pp-object (car ls))) st))
(col2 (fmt-col st))
(fixed (take* (cdr ls) (or indent-rule 1)))
(tail (drop* (cdr ls) (or indent-rule 1)))
(st2 (fmt-copy-shares st))
(first-line
((fmt-to-string (cat " " (fmt-join/shares pp-flat fixed " "))) st2))
(default
(let ((sep (make-nl-space (+ col1 1))))
(cat sep (fmt-join/shares pp-object (cdr ls) sep) ")"))))
(cond
((< (+ col2 (string-length first-line)) (fmt-width st2))
fixed values on first line
(let ((sep (make-nl-space
(if indent-rule (+ col1 2) (+ col2 1)))))
((cat first-line
(cond
((not (or (null? tail) (pair? tail)))
(cat ". " (pp-object tail)))
((> (length+ (cdr ls)) (or indent-rule 1))
(cat sep (fmt-join/shares pp-object tail sep)))
(else
fmt-null))
")")
st2)))
fixed values lined up , body indented two spaces
((fmt-try-fit
(lambda (st)
((cat
" "
(fmt-join/shares pp-object fixed (make-nl-space (+ col2 1)))
(if (pair? tail)
(let ((sep (make-nl-space (+ col1 2))))
(cat sep (fmt-join/shares pp-object tail sep)))
"")
")")
(fmt-copy-shares st)))
default)
st))
(else
(default st))))))
(define (pp-app ls)
(let ((indent-rule (pp-indentation ls)))
(if (procedure? indent-rule)
(indent-rule ls)
(pp-with-indent indent-rule ls))))
(define (proper-non-shared-list? ls shares)
(let ((tab (car shares)))
(let lp ((ls ls))
(or (null? ls)
(and (pair? ls)
(not (eq?-table-ref tab ls))
(lp (cdr ls)))))))
(define (pp-flat x)
(cond
((pair? x)
(fmt-shared-write
x
(cond
((and (pair? (cdr x)) (null? (cddr x))
(assq (car x) syntax-abbrevs))
=> (lambda (abbrev)
(cat (cdr abbrev) (pp-flat (cadr x)))))
(else
(cat "(" (fmt-join/shares pp-flat x " ") ")")))))
((vector? x)
(fmt-shared-write
x
(cat "#(" (fmt-join/shares pp-flat (vector->list x) " ") ")")))
(else
(lambda (st) ((write-with-shares x (fmt-shares st)) st)))))
(define (pp-pair ls)
(fmt-shared-write
ls
(cond
one element list , no lines to break
((null? (cdr ls))
(cat "(" (pp-object (car ls)) ")"))
((and (pair? (cdr ls)) (null? (cddr ls))
(assq (car ls) syntax-abbrevs))
=> (lambda (abbrev)
(cat (cdr abbrev) (pp-object (cadr ls)))))
(else
(fmt-try-fit
(lambda (st) ((pp-flat ls) (fmt-copy-shares st)))
(lambda (st)
(if (and (non-app? ls)
(proper-non-shared-list? ls (fmt-shares st)))
((pp-data-list ls) st)
((pp-app ls) st))))))))
(define (pp-data-list ls)
(lambda (st)
(let* ((output (fmt-writer st))
(st (output "(" st))
(col (fmt-col st))
(width (- (fmt-width st) col))
(st2 (fmt-copy-shares st)))
(cond
((and (pair? (cdr ls)) (pair? (cddr ls)) (pair? (cdddr ls))
((fits-in-columns ls pp-flat width) st2))
=> (lambda (ls)
at least four elements which can be broken into columns
(let* ((prefix (make-nl-space (+ col 1)))
(widest (+ 1 (car ls)))
always > = 2
(let lp ((ls (cdr ls)) (st st2) (i 1))
(cond
((null? ls)
(output ")" st))
((null? (cdr ls))
(output ")" (output (car ls) st)))
(else
(let ((st (output (car ls) st)))
(if (>= i columns)
(lp (cdr ls) (output prefix st) 1)
(let* ((pad (- widest (string-length (car ls))))
(st (output (make-space pad) st)))
(lp (cdr ls) st (+ i 1)))))))))))
(else
no room , print one per line
((cat (fmt-join pp-object ls (make-nl-space col)) ")") st))))))
(define (pp-vector vec)
(fmt-shared-write vec (cat "#" (pp-data-list (vector->list vec)))))
(define (pp-object obj)
(cond
((pair? obj) (pp-pair obj))
((vector? obj) (pp-vector obj))
(else (lambda (st) ((write-with-shares obj (fmt-shares st)) st)))))
(define (pretty obj)
(fmt-bind 'shares (cons (make-shared-ref-table obj) 0)
(cat (pp-object obj) fl)))
(define (pretty/unshared obj)
(fmt-bind 'shares (cons (make-eq?-table) 0) (cat (pp-object obj) fl)))
|
3dbd6421d945e312aca5f3823d85c56ad24aa81e4d7e293163f04471ed3315ab | BitGameEN/bitgamex | ecrn_test.erl | , LLC . All Rights Reserved .
%%%
This file is provided to you under the BSD License ; you may not use
%%% this file except in compliance with the License.
-module(ecrn_test).
-include_lib("eunit/include/eunit.hrl").
%%%===================================================================
%%% Types
%%%===================================================================
cron_test_() ->
{setup,
fun() ->
ecrn_app:manual_start()
end,
fun(_) ->
ecrn_app:manual_stop()
end,
{with, [fun set_alarm_test/1,
fun travel_back_in_time_test/1,
fun cancel_alarm_test/1,
fun big_time_jump_test/1,
fun cron_test/1,
fun validation_test/1]}}.
set_alarm_test(_) ->
EpochDay = {2000,1,1},
EpochDateTime = {EpochDay,{8,0,0}},
erlcron:set_datetime(EpochDateTime),
Alarm1TimeOfDay = {9,0,0},
Alarm2TimeOfDay = {9,0,1},
Self = self(),
erlcron:at(Alarm1TimeOfDay, fun(_, _) ->
Self ! ack1
end),
erlcron:at(Alarm2TimeOfDay, fun(_, _) ->
Self ! ack2
end),
erlcron:set_datetime({EpochDay, {8, 59, 59}}),
%% The alarm should trigger this nearly immediately.
?assertMatch(ok, receive
ack1 -> ok
after
1500 -> timeout
end),
%% The alarm should trigger this 1 second later.
?assertMatch(ok, receive
ack2 -> ok
after
2500 -> timeout
end).
cancel_alarm_test(_) ->
Day = {2000,1,1},
erlcron:set_datetime({Day,{8,0,0}}),
AlarmTimeOfDay = {9,0,0},
Self = self(),
Ref = erlcron:at(AlarmTimeOfDay, fun(_, _) ->
Self ! ack
end),
erlcron:cancel(Ref),
erlcron:set_datetime({Day, AlarmTimeOfDay}),
?assertMatch(ok, receive
ack -> ack
after
%% There is no event-driven way to
%% ensure we never receive an ack.
125 -> ok
end).
Time jumps ahead one day so we should see the alarms from both days .
big_time_jump_test(_) ->
Day1 = {2000,1,1},
Day2 = {2000,1,2},
EpochDateTime = {Day1,{8,0,0}},
erlcron:set_datetime(EpochDateTime),
Alarm1TimeOfDay = {9,0,0},
Alarm2TimeOfDay = {9,0,1},
Self = self(),
erlcron:at(Alarm1TimeOfDay, fun(_, _) ->
Self ! ack1
end),
erlcron:at(Alarm2TimeOfDay, fun(_, _) ->
Self ! ack2
end),
erlcron:set_datetime({Day2, {9, 10, 0}}),
?assertMatch(ok, receive
ack1 -> ok
after
1500 -> timeout
end),
?assertMatch(ok, receive
ack2 -> ok
after
1500 -> timeout
end),
?assertMatch(ok, receive
ack1 -> ok
after
1500 -> timeout
end),
?assertMatch(ok, receive
ack2 -> ok
after
2500 -> timeout
end).
travel_back_in_time_test(_) ->
Seconds = seconds_now(),
Past = {{2000,1,1},{12,0,0}},
erlcron:set_datetime(Past),
{ExpectedDateTime, _} = erlcron:datetime(),
ExpectedSeconds = calendar:datetime_to_gregorian_seconds(ExpectedDateTime),
?assertMatch(true, ExpectedSeconds >= calendar:datetime_to_gregorian_seconds(Past)),
?assertMatch(true, ExpectedSeconds < Seconds).
Time jumps ahead one day so we should see the alarms from both days .
cron_test(_) ->
Day1 = {2000,1,1},
AlarmTimeOfDay = {15,29,58},
erlcron:set_datetime({Day1, AlarmTimeOfDay}),
Self = self(),
Job = {{daily, {3, 30, pm}},
fun(_, _) ->
Self ! ack
end},
erlcron:cron(Job),
?assertMatch(ok, receive
ack -> ok
after
2500 -> timeout
end).
validation_test(_) ->
?assertMatch(valid, ecrn_agent:validate({once, {3, 30, pm}})),
?assertMatch(valid, ecrn_agent:validate({once, 3600})),
?assertMatch(valid, ecrn_agent:validate({daily, {every, {23, sec},
{between, {3, pm}, {3, 30, pm}}}})),
?assertMatch(valid, ecrn_agent:validate({daily, {3, 30, pm}})),
?assertMatch(valid, ecrn_agent:validate({weekly, thu, {2, am}})),
?assertMatch(valid, ecrn_agent:validate({weekly, wed, {2, am}})),
?assertMatch(valid, ecrn_agent:validate({monthly, 1, {2, am}})),
?assertMatch(valid, ecrn_agent:validate({monthly, 4, {2, am}})),
?assertMatch(invalid, ecrn_agent:validate({daily, {55, 22, am}})),
?assertMatch(invalid, ecrn_agent:validate({monthly, 65, {55, am}})).
%%%===================================================================
%%% Internal Functions
%%%===================================================================
seconds_now() ->
calendar:datetime_to_gregorian_seconds(calendar:universal_time()).
| null | https://raw.githubusercontent.com/BitGameEN/bitgamex/151ba70a481615379f9648581a5d459b503abe19/src/deps/erlcron/src/ecrn_test.erl | erlang |
this file except in compliance with the License.
===================================================================
Types
===================================================================
The alarm should trigger this nearly immediately.
The alarm should trigger this 1 second later.
There is no event-driven way to
ensure we never receive an ack.
===================================================================
Internal Functions
=================================================================== | , LLC . All Rights Reserved .
This file is provided to you under the BSD License ; you may not use
-module(ecrn_test).
-include_lib("eunit/include/eunit.hrl").
cron_test_() ->
{setup,
fun() ->
ecrn_app:manual_start()
end,
fun(_) ->
ecrn_app:manual_stop()
end,
{with, [fun set_alarm_test/1,
fun travel_back_in_time_test/1,
fun cancel_alarm_test/1,
fun big_time_jump_test/1,
fun cron_test/1,
fun validation_test/1]}}.
set_alarm_test(_) ->
EpochDay = {2000,1,1},
EpochDateTime = {EpochDay,{8,0,0}},
erlcron:set_datetime(EpochDateTime),
Alarm1TimeOfDay = {9,0,0},
Alarm2TimeOfDay = {9,0,1},
Self = self(),
erlcron:at(Alarm1TimeOfDay, fun(_, _) ->
Self ! ack1
end),
erlcron:at(Alarm2TimeOfDay, fun(_, _) ->
Self ! ack2
end),
erlcron:set_datetime({EpochDay, {8, 59, 59}}),
?assertMatch(ok, receive
ack1 -> ok
after
1500 -> timeout
end),
?assertMatch(ok, receive
ack2 -> ok
after
2500 -> timeout
end).
cancel_alarm_test(_) ->
Day = {2000,1,1},
erlcron:set_datetime({Day,{8,0,0}}),
AlarmTimeOfDay = {9,0,0},
Self = self(),
Ref = erlcron:at(AlarmTimeOfDay, fun(_, _) ->
Self ! ack
end),
erlcron:cancel(Ref),
erlcron:set_datetime({Day, AlarmTimeOfDay}),
?assertMatch(ok, receive
ack -> ack
after
125 -> ok
end).
Time jumps ahead one day so we should see the alarms from both days .
big_time_jump_test(_) ->
Day1 = {2000,1,1},
Day2 = {2000,1,2},
EpochDateTime = {Day1,{8,0,0}},
erlcron:set_datetime(EpochDateTime),
Alarm1TimeOfDay = {9,0,0},
Alarm2TimeOfDay = {9,0,1},
Self = self(),
erlcron:at(Alarm1TimeOfDay, fun(_, _) ->
Self ! ack1
end),
erlcron:at(Alarm2TimeOfDay, fun(_, _) ->
Self ! ack2
end),
erlcron:set_datetime({Day2, {9, 10, 0}}),
?assertMatch(ok, receive
ack1 -> ok
after
1500 -> timeout
end),
?assertMatch(ok, receive
ack2 -> ok
after
1500 -> timeout
end),
?assertMatch(ok, receive
ack1 -> ok
after
1500 -> timeout
end),
?assertMatch(ok, receive
ack2 -> ok
after
2500 -> timeout
end).
travel_back_in_time_test(_) ->
Seconds = seconds_now(),
Past = {{2000,1,1},{12,0,0}},
erlcron:set_datetime(Past),
{ExpectedDateTime, _} = erlcron:datetime(),
ExpectedSeconds = calendar:datetime_to_gregorian_seconds(ExpectedDateTime),
?assertMatch(true, ExpectedSeconds >= calendar:datetime_to_gregorian_seconds(Past)),
?assertMatch(true, ExpectedSeconds < Seconds).
Time jumps ahead one day so we should see the alarms from both days .
cron_test(_) ->
Day1 = {2000,1,1},
AlarmTimeOfDay = {15,29,58},
erlcron:set_datetime({Day1, AlarmTimeOfDay}),
Self = self(),
Job = {{daily, {3, 30, pm}},
fun(_, _) ->
Self ! ack
end},
erlcron:cron(Job),
?assertMatch(ok, receive
ack -> ok
after
2500 -> timeout
end).
validation_test(_) ->
?assertMatch(valid, ecrn_agent:validate({once, {3, 30, pm}})),
?assertMatch(valid, ecrn_agent:validate({once, 3600})),
?assertMatch(valid, ecrn_agent:validate({daily, {every, {23, sec},
{between, {3, pm}, {3, 30, pm}}}})),
?assertMatch(valid, ecrn_agent:validate({daily, {3, 30, pm}})),
?assertMatch(valid, ecrn_agent:validate({weekly, thu, {2, am}})),
?assertMatch(valid, ecrn_agent:validate({weekly, wed, {2, am}})),
?assertMatch(valid, ecrn_agent:validate({monthly, 1, {2, am}})),
?assertMatch(valid, ecrn_agent:validate({monthly, 4, {2, am}})),
?assertMatch(invalid, ecrn_agent:validate({daily, {55, 22, am}})),
?assertMatch(invalid, ecrn_agent:validate({monthly, 65, {55, am}})).
seconds_now() ->
calendar:datetime_to_gregorian_seconds(calendar:universal_time()).
|
81663978facd2018a31b5f12520a41afbeab7827b9f2a85623d5346286dfe17b | HaskellEmbedded/data-stm32 | SPI.hs | --
-- SPI.hs --- SPI peripheral
--
module Ivory.BSP.STM32.Peripheral.SPI
( module Ivory.BSP.STM32.Peripheral.SPI.Peripheral
, module Ivory.BSP.STM32.Peripheral.SPI.Regs
, module Ivory.BSP.STM32.Peripheral.SPI.RegTypes
, module Ivory.BSP.STM32.Peripheral.SPI.Pins
) where
import Ivory.BSP.STM32.Peripheral.SPI.Peripheral
import Ivory.BSP.STM32.Peripheral.SPI.Regs
import Ivory.BSP.STM32.Peripheral.SPI.RegTypes
import Ivory.BSP.STM32.Peripheral.SPI.Pins
| null | https://raw.githubusercontent.com/HaskellEmbedded/data-stm32/204aff53eaae422d30516039719a6ec7522a6ab7/templates/STM32/Peripheral/SPI.hs | haskell |
SPI.hs --- SPI peripheral
|
module Ivory.BSP.STM32.Peripheral.SPI
( module Ivory.BSP.STM32.Peripheral.SPI.Peripheral
, module Ivory.BSP.STM32.Peripheral.SPI.Regs
, module Ivory.BSP.STM32.Peripheral.SPI.RegTypes
, module Ivory.BSP.STM32.Peripheral.SPI.Pins
) where
import Ivory.BSP.STM32.Peripheral.SPI.Peripheral
import Ivory.BSP.STM32.Peripheral.SPI.Regs
import Ivory.BSP.STM32.Peripheral.SPI.RegTypes
import Ivory.BSP.STM32.Peripheral.SPI.Pins
|
f035ec8ed3065e8bb9525d781b8228f07ff844bc6b7b6536f6e48f70f8ed68aa | d-plaindoux/transept | literals.mli | module Make (Parser : Transept_specs.PARSER with type e = char) : sig
val space : char Parser.t
val spaces : string Parser.t
val alpha : char Parser.t
val digit : char Parser.t
val ident : string Parser.t
val natural : int Parser.t
val integer : int Parser.t
val float : float Parser.t
val string : string Parser.t
val char : char Parser.t
end
| null | https://raw.githubusercontent.com/d-plaindoux/transept/8567803721f6c3f5d876131b15cb301cb5b084a4/lib/transept_extension/literals.mli | ocaml | module Make (Parser : Transept_specs.PARSER with type e = char) : sig
val space : char Parser.t
val spaces : string Parser.t
val alpha : char Parser.t
val digit : char Parser.t
val ident : string Parser.t
val natural : int Parser.t
val integer : int Parser.t
val float : float Parser.t
val string : string Parser.t
val char : char Parser.t
end
| |
ab21f0e509628f7454ebc2272fbb805b28b812fd45b7a8f6c2a1667098aab6de | nikita-volkov/rerebase | Base.hs | module Data.Vector.Unboxed.Base
(
module Rebase.Data.Vector.Unboxed.Base
)
where
import Rebase.Data.Vector.Unboxed.Base
| null | https://raw.githubusercontent.com/nikita-volkov/rerebase/25895e6d8b0c515c912c509ad8dd8868780a74b6/library/Data/Vector/Unboxed/Base.hs | haskell | module Data.Vector.Unboxed.Base
(
module Rebase.Data.Vector.Unboxed.Base
)
where
import Rebase.Data.Vector.Unboxed.Base
| |
e6214cd2451bb08fdba2779dbfdc3f29c99d983e9ac33df596554b3c47b2bb99 | TDacik/Deadlock | lockset_gui.ml | Experimental visualisation of lockset analysis results
*
* TODO : Refactoring
*
* Author : ( ) , 2021
*
* TODO: Refactoring
*
* Author: Tomas Dacik (), 2021
*)
open !Deadlock_top
open Dgraph_helper
open Pretty_source
open Gtk_helper
open Gui_utils
open Graph_views
open Cil_types
open Cil_datatype
open Lock_types
open Trace_utils
open Thread_analysis
open Deadlock_options
module KF = Kernel_function
module Results = Lockset_analysis.Results
let empty_table () = GPack.table ~columns:1 ()
let empty_stmt_table () =
let table = GPack.table ~columns:5 () in
table#attach ~left:0 ~top:0 ~xpadding:12 (GMisc.label ~text:"Thread" ())#coerce;
table#attach ~left:1 ~top:0 ~xpadding:12 (GMisc.label ~text:"Entry lockset" ())#coerce;
table#attach ~left:2 ~top:0 ~xpadding:12 (GMisc.label ~text:"Context" ())#coerce;
table#attach ~left:3 ~top:0 ~xpadding:12 (GMisc.label ~text:"Exit locksets" ())#coerce;
table
let lockset_info = ref None
let get_results () = match !Deadlock_main._results with
| Some results -> results
| None -> failwith "Lockset analysis was not computed"
let get_thread_graph () = match !Deadlock_main._thread_graph with
| Some g -> g
| None -> failwith "Thread analysis was not computed"
let state_button main_ui (table : GPack.table) top state =
if Gui_utils.state_too_long state then
let text = Format.asprintf "%a" Cvalue.Model.pretty state in
let label = "Initial context" in
let button = GButton.button ~label ~relief:`NONE () in
let callback = Gui_utils.text_window main_ui#main_window "Initial context" text in
ignore @@ button#connect#clicked ~callback;
table#attach ~left:2 ~top button#coerce
else
let text = Format.asprintf "%a" Cvalue.Model.pretty state in
table#attach ~left:2 ~top (GMisc.label ~text ())#coerce
let thread_button main_ui (table : GPack.table) top thread =
let label = Format.asprintf "%a" Thread.pp thread in
let state, arg_value = Thread.get_init_state thread in
let text = Format.asprintf "State:\n %a Argument:\n%a"
Cvalue.Model.pretty state
Cvalue.V.pretty arg_value
in
let equiv_threads = Thread_graph.get_equiv_threads (get_thread_graph ()) thread in
let text2 = "\n\nThis thread's initial state is equivalent to " in
let text3 = List.fold_left (fun acc t -> acc ^ " ," ^ (Thread.to_string t))
text2 equiv_threads in
let button = GButton.button ~label ~relief:`NONE () in
let callback = Gui_utils.text_window main_ui#main_window label (text^text3) in
ignore @@ button#connect#clicked ~callback;
table#attach ~left:0 ~top button#coerce
(** Statement summary *)
let table_stmt main_ui results stmt =
let summaries = Results.summaries_of_stmt results stmt in
let table = empty_stmt_table () in
if Stmt_summaries.cardinal summaries = 0 then
table#attach
~left:0
~top:1
(GMisc.label
~text:"This statement was not reached during lockset analysis."
()
)#coerce;
let _ = Stmt_summaries.fold
(fun (stmt, (thread, ls, context)) lss row ->
let ls_str = Format.asprintf "%a" Lockset.pp ls in
let lss_str = Format.asprintf "%a" LocksetSet.pp lss in
let context_str = Format.asprintf "%a" Cvalue.Model.pretty context in
thread_button main_ui table row thread;
table#attach ~left:1 ~top:row (GMisc.label ~text:ls_str ())#coerce;
table#attach ~left:2 ~top:row (GMisc.label ~text:context_str ())#coerce;
table#attach ~left:3 ~top:row (GMisc.label ~text:lss_str ())#coerce;
row + 1
) summaries 1
in table
let show_lockgraph_fn lockgraph main_ui () =
Dgraph_helper.graph_window_through_dot
~parent: main_ui#main_window
~title:"Lockgraph"
(fun fmt -> Lockgraph_dot.fprint_graph fmt lockgraph)
let table_fn_summaries main_ui results varinfo =
try
let kf = Globals.Functions.find_by_name
(Format.asprintf "%a" Printer.pp_varinfo varinfo) in
let fn = Kernel_function.get_definition kf in
let (table : GPack.table) = GPack.table
~columns: 5
()
in
let summaries = Results.summaries_of_fn results fn in
table#attach ~left:0 ~top:0 ~xpadding:12 (GMisc.label ~text:"Thread" ())#coerce;
table#attach ~left:1 ~top:0 ~xpadding:12 (GMisc.label ~text:"Entry lockset" ())#coerce;
table#attach ~left:2 ~top:0 ~xpadding:12 (GMisc.label ~text:"Context" ())#coerce;
table#attach ~left:3 ~top:0 ~xpadding:12 (GMisc.label ~text:"Exit locksets" ())#coerce;
table#attach ~left:4 ~top:0 ~xpadding:12 (GMisc.label ~text:"Lockgraph (|E|)" ())#coerce;
let _ = Function_summaries.fold
(fun (fn, (thread, ls, context)) (lss, g) (row : int) ->
let ls_str = Format.asprintf "%a" Lockset.pp ls in
let lss_str = Format.asprintf "%a" LocksetSet.pp lss in
let lockgraph = Format.asprintf "lockgraph (%d)" (Lockgraph.nb_edges g) in
thread_button main_ui table row thread;
table#attach ~left:1 ~top:row (GMisc.label ~text:ls_str ())#coerce;
state_button main_ui table row context;
table#attach ~left:3 ~top:row (GMisc.label ~text:lss_str ())#coerce;
(* Create label with callback *)
let label = GButton.button ~label:lockgraph ~relief:`NONE () in
ignore @@ label#connect#clicked ~callback:(show_lockgraph_fn g main_ui);
table#attach ~left:4 ~top:row label#coerce;
row + 1
) summaries 1 in
table
with KF.No_Definition ->
empty_table ()
let table_expr results kinstr expr = match kinstr with
| Kstmt stmt ->
let ls = Lockset_analysis.possible_locks stmt expr in
let table = GPack.table ~columns:1 ~rows:1 () in
table#attach ~left:0 ~top:0 (GMisc.label ~text:(Lockset.to_string ls) ())#coerce;
table
| Kglobal -> empty_table ()
(** Callback: selection of element in the source code. *)
let on_select menu (main_ui : Design.main_window_extension_points) ~button selected =
let results = get_results () in
let notebook = main_ui#lower_notebook in
let table = match selected with
(* Statements *)
| PStmt (_, stmt) | PStmtStart (_, stmt) -> table_stmt main_ui results stmt
(* Declaration and definition of functior or variable *)
| PVDecl (_, _, varinfo) ->
begin match varinfo.vtype with
| TFun _ -> table_fn_summaries main_ui results varinfo
| _ ->
let t = GPack.table ~columns:1 ~rows:1 () in
t#attach
~left:0
~top:0
(GMisc.label
~text:"Debug: Deadlock has no info for this selection."
()
)#coerce;
t
end
Expression
| PExp (_, kinstr, expr) -> table_expr results kinstr expr
| PLval (_, kinstr, lval) ->
let loc = match kinstr with
| Kstmt stmt -> Stmt.loc stmt
| Kglobal -> Location.unknown
in
let expr = Cil.mkAddrOf ~loc lval in
table_expr results kinstr expr
(* Otherwise empty table *)
| _ ->
let t = GPack.table ~columns:1 ~rows:1 () in
t#attach ~left:0 ~top:0 (GMisc.label ~text:"Debug: Deadlock has no info for this selection." ())#coerce;
t
in
let table = table#coerce in
let page = Option.get !lockset_info in
let pos_focused = main_ui#lower_notebook#current_page in
let pos = main_ui#lower_notebook#page_num page in
main_ui#lower_notebook#remove_page pos;
let label = Some (GMisc.label ~text:"Deadlock" ())#coerce in
let page_pos = main_ui#lower_notebook#insert_page ?tab_label:label ?menu_label:label ~pos table in
let page = main_ui#lower_notebook#get_nth_page page_pos in
main_ui#lower_notebook#goto_page pos_focused;
lockset_info := Some page;
()
let show_lockgraph main_ui () =
let lockgraph = Results.get_lockgraph (get_results ()) in
Dgraph_helper.graph_window_through_dot
~parent: main_ui#main_window
~title:"Lockgraph"
(fun fmt -> Lockgraph_dot.fprint_graph fmt lockgraph)
let show_thread_graph main_ui () =
let thread_graph = get_thread_graph () in
Dgraph_helper.graph_window_through_dot
~parent: main_ui#main_window
~title:"Thread graph"
(fun fmt -> Thread_graph_dot.fprint_graph fmt thread_graph)
let change_thread thread main_ui () =
Eva_wrapper.set_active_thread thread;
let _ = Eva.Value_results.get_results () in
main_ui#redisplay ()
let deadlock_panel main_ui =
let box = GPack.box `VERTICAL () in
let button1 = GButton.button ~label:"Show lockgraph" () in
ignore @@ button1#connect#clicked ~callback:(show_lockgraph main_ui);
let button2 = GButton.button ~label:"Show thread graph" () in
ignore @@ button2#connect#clicked ~callback:(show_thread_graph main_ui);
TODO : fix build problems
let label = GMisc.label ~text:"Active thread " ( ) in
let liste = GList.liste ( ) ~selection_mode:`SINGLE in
let threads = Thread_graph.get_threads ( get_thread_graph ( ) ) in
List.iteri
( fun i thread - >
let = Thread.to_string thread in
let item = GList.list_item ~label : ( ) in
ignore ) ;
: i item
) threads ;
let label = GMisc.label ~text:"Active thread" () in
let liste = GList.liste () ~selection_mode:`SINGLE in
let threads = Thread_graph.get_threads (get_thread_graph ()) in
List.iteri
(fun i thread ->
let thread_str = Thread.to_string thread in
let item = GList.list_item ~label:thread_str () in
ignore @@ item#connect#select ~callback:(change_thread thread main_ui);
liste#insert ~pos:i item
) threads;
*)
(box :> GContainer.container)#add button1#coerce;
(box :> GContainer.container)#add button2#coerce;
( box :> ;
( box :> GContainer.container)#add liste#coerce ;
(box :> GContainer.container)#add label#coerce;
(box :> GContainer.container)#add liste#coerce;
*)
box#coerce
let high buffer localizable ~start ~stop =
let results = get_results () in
let buffer = buffer#buffer in
match localizable with
| PStmt (_, stmt) ->
if Cil_datatype.Stmt.Set.mem stmt (Results.get_imprecise_lock_stmts results) then
let tag = make_tag buffer "deadlock" [`BACKGROUND "red"] in
apply_tag buffer tag start stop
| PVDecl (_, _, varinfo) ->
begin match varinfo.vtype with
| TFun _ ->
let kf = Option.get @@ kf_of_localizable localizable in
let fundec = Kernel_function.get_definition kf in
if List.mem ~eq:Fundec.equal fundec (Results.imprecise_fns results) then
let tag = make_tag buffer "deadlock" [`BACKGROUND "red" ] in
apply_tag buffer tag start stop
| _ -> ()
end
| _ -> ()
(** Initialisation of new tabe in lower notebook. **)
let main (main_ui : Design.main_window_extension_points) =
main_ui#register_source_selector on_select;
main_ui#register_panel (fun main_ui -> ("Deadlock", deadlock_panel main_ui, None));
main_ui#register_source_highlighter high;
(* Create page in lower notebook and store reference to it. *)
let tab_label = Some (GMisc.label ~text:"Deadlock" ())#coerce in
let info = empty_table () in
let page_pos = main_ui#lower_notebook#append_page
?tab_label:tab_label ?menu_label:tab_label info#coerce in
let page = main_ui#lower_notebook#get_nth_page page_pos in
lockset_info := Some page;
()
let init main_ui = if Enabled.get () then main main_ui else ()
let () = Design.register_extension init
| null | https://raw.githubusercontent.com/TDacik/Deadlock/b8b551610bd1fd8eeb33dea2df863c014a1be447/src/deadlock_gui/lockset_gui.ml | ocaml | * Statement summary
Create label with callback
* Callback: selection of element in the source code.
Statements
Declaration and definition of functior or variable
Otherwise empty table
* Initialisation of new tabe in lower notebook. *
Create page in lower notebook and store reference to it. | Experimental visualisation of lockset analysis results
*
* TODO : Refactoring
*
* Author : ( ) , 2021
*
* TODO: Refactoring
*
* Author: Tomas Dacik (), 2021
*)
open !Deadlock_top
open Dgraph_helper
open Pretty_source
open Gtk_helper
open Gui_utils
open Graph_views
open Cil_types
open Cil_datatype
open Lock_types
open Trace_utils
open Thread_analysis
open Deadlock_options
module KF = Kernel_function
module Results = Lockset_analysis.Results
let empty_table () = GPack.table ~columns:1 ()
let empty_stmt_table () =
let table = GPack.table ~columns:5 () in
table#attach ~left:0 ~top:0 ~xpadding:12 (GMisc.label ~text:"Thread" ())#coerce;
table#attach ~left:1 ~top:0 ~xpadding:12 (GMisc.label ~text:"Entry lockset" ())#coerce;
table#attach ~left:2 ~top:0 ~xpadding:12 (GMisc.label ~text:"Context" ())#coerce;
table#attach ~left:3 ~top:0 ~xpadding:12 (GMisc.label ~text:"Exit locksets" ())#coerce;
table
let lockset_info = ref None
let get_results () = match !Deadlock_main._results with
| Some results -> results
| None -> failwith "Lockset analysis was not computed"
let get_thread_graph () = match !Deadlock_main._thread_graph with
| Some g -> g
| None -> failwith "Thread analysis was not computed"
let state_button main_ui (table : GPack.table) top state =
if Gui_utils.state_too_long state then
let text = Format.asprintf "%a" Cvalue.Model.pretty state in
let label = "Initial context" in
let button = GButton.button ~label ~relief:`NONE () in
let callback = Gui_utils.text_window main_ui#main_window "Initial context" text in
ignore @@ button#connect#clicked ~callback;
table#attach ~left:2 ~top button#coerce
else
let text = Format.asprintf "%a" Cvalue.Model.pretty state in
table#attach ~left:2 ~top (GMisc.label ~text ())#coerce
let thread_button main_ui (table : GPack.table) top thread =
let label = Format.asprintf "%a" Thread.pp thread in
let state, arg_value = Thread.get_init_state thread in
let text = Format.asprintf "State:\n %a Argument:\n%a"
Cvalue.Model.pretty state
Cvalue.V.pretty arg_value
in
let equiv_threads = Thread_graph.get_equiv_threads (get_thread_graph ()) thread in
let text2 = "\n\nThis thread's initial state is equivalent to " in
let text3 = List.fold_left (fun acc t -> acc ^ " ," ^ (Thread.to_string t))
text2 equiv_threads in
let button = GButton.button ~label ~relief:`NONE () in
let callback = Gui_utils.text_window main_ui#main_window label (text^text3) in
ignore @@ button#connect#clicked ~callback;
table#attach ~left:0 ~top button#coerce
let table_stmt main_ui results stmt =
let summaries = Results.summaries_of_stmt results stmt in
let table = empty_stmt_table () in
if Stmt_summaries.cardinal summaries = 0 then
table#attach
~left:0
~top:1
(GMisc.label
~text:"This statement was not reached during lockset analysis."
()
)#coerce;
let _ = Stmt_summaries.fold
(fun (stmt, (thread, ls, context)) lss row ->
let ls_str = Format.asprintf "%a" Lockset.pp ls in
let lss_str = Format.asprintf "%a" LocksetSet.pp lss in
let context_str = Format.asprintf "%a" Cvalue.Model.pretty context in
thread_button main_ui table row thread;
table#attach ~left:1 ~top:row (GMisc.label ~text:ls_str ())#coerce;
table#attach ~left:2 ~top:row (GMisc.label ~text:context_str ())#coerce;
table#attach ~left:3 ~top:row (GMisc.label ~text:lss_str ())#coerce;
row + 1
) summaries 1
in table
let show_lockgraph_fn lockgraph main_ui () =
Dgraph_helper.graph_window_through_dot
~parent: main_ui#main_window
~title:"Lockgraph"
(fun fmt -> Lockgraph_dot.fprint_graph fmt lockgraph)
let table_fn_summaries main_ui results varinfo =
try
let kf = Globals.Functions.find_by_name
(Format.asprintf "%a" Printer.pp_varinfo varinfo) in
let fn = Kernel_function.get_definition kf in
let (table : GPack.table) = GPack.table
~columns: 5
()
in
let summaries = Results.summaries_of_fn results fn in
table#attach ~left:0 ~top:0 ~xpadding:12 (GMisc.label ~text:"Thread" ())#coerce;
table#attach ~left:1 ~top:0 ~xpadding:12 (GMisc.label ~text:"Entry lockset" ())#coerce;
table#attach ~left:2 ~top:0 ~xpadding:12 (GMisc.label ~text:"Context" ())#coerce;
table#attach ~left:3 ~top:0 ~xpadding:12 (GMisc.label ~text:"Exit locksets" ())#coerce;
table#attach ~left:4 ~top:0 ~xpadding:12 (GMisc.label ~text:"Lockgraph (|E|)" ())#coerce;
let _ = Function_summaries.fold
(fun (fn, (thread, ls, context)) (lss, g) (row : int) ->
let ls_str = Format.asprintf "%a" Lockset.pp ls in
let lss_str = Format.asprintf "%a" LocksetSet.pp lss in
let lockgraph = Format.asprintf "lockgraph (%d)" (Lockgraph.nb_edges g) in
thread_button main_ui table row thread;
table#attach ~left:1 ~top:row (GMisc.label ~text:ls_str ())#coerce;
state_button main_ui table row context;
table#attach ~left:3 ~top:row (GMisc.label ~text:lss_str ())#coerce;
let label = GButton.button ~label:lockgraph ~relief:`NONE () in
ignore @@ label#connect#clicked ~callback:(show_lockgraph_fn g main_ui);
table#attach ~left:4 ~top:row label#coerce;
row + 1
) summaries 1 in
table
with KF.No_Definition ->
empty_table ()
let table_expr results kinstr expr = match kinstr with
| Kstmt stmt ->
let ls = Lockset_analysis.possible_locks stmt expr in
let table = GPack.table ~columns:1 ~rows:1 () in
table#attach ~left:0 ~top:0 (GMisc.label ~text:(Lockset.to_string ls) ())#coerce;
table
| Kglobal -> empty_table ()
let on_select menu (main_ui : Design.main_window_extension_points) ~button selected =
let results = get_results () in
let notebook = main_ui#lower_notebook in
let table = match selected with
| PStmt (_, stmt) | PStmtStart (_, stmt) -> table_stmt main_ui results stmt
| PVDecl (_, _, varinfo) ->
begin match varinfo.vtype with
| TFun _ -> table_fn_summaries main_ui results varinfo
| _ ->
let t = GPack.table ~columns:1 ~rows:1 () in
t#attach
~left:0
~top:0
(GMisc.label
~text:"Debug: Deadlock has no info for this selection."
()
)#coerce;
t
end
Expression
| PExp (_, kinstr, expr) -> table_expr results kinstr expr
| PLval (_, kinstr, lval) ->
let loc = match kinstr with
| Kstmt stmt -> Stmt.loc stmt
| Kglobal -> Location.unknown
in
let expr = Cil.mkAddrOf ~loc lval in
table_expr results kinstr expr
| _ ->
let t = GPack.table ~columns:1 ~rows:1 () in
t#attach ~left:0 ~top:0 (GMisc.label ~text:"Debug: Deadlock has no info for this selection." ())#coerce;
t
in
let table = table#coerce in
let page = Option.get !lockset_info in
let pos_focused = main_ui#lower_notebook#current_page in
let pos = main_ui#lower_notebook#page_num page in
main_ui#lower_notebook#remove_page pos;
let label = Some (GMisc.label ~text:"Deadlock" ())#coerce in
let page_pos = main_ui#lower_notebook#insert_page ?tab_label:label ?menu_label:label ~pos table in
let page = main_ui#lower_notebook#get_nth_page page_pos in
main_ui#lower_notebook#goto_page pos_focused;
lockset_info := Some page;
()
let show_lockgraph main_ui () =
let lockgraph = Results.get_lockgraph (get_results ()) in
Dgraph_helper.graph_window_through_dot
~parent: main_ui#main_window
~title:"Lockgraph"
(fun fmt -> Lockgraph_dot.fprint_graph fmt lockgraph)
let show_thread_graph main_ui () =
let thread_graph = get_thread_graph () in
Dgraph_helper.graph_window_through_dot
~parent: main_ui#main_window
~title:"Thread graph"
(fun fmt -> Thread_graph_dot.fprint_graph fmt thread_graph)
let change_thread thread main_ui () =
Eva_wrapper.set_active_thread thread;
let _ = Eva.Value_results.get_results () in
main_ui#redisplay ()
let deadlock_panel main_ui =
let box = GPack.box `VERTICAL () in
let button1 = GButton.button ~label:"Show lockgraph" () in
ignore @@ button1#connect#clicked ~callback:(show_lockgraph main_ui);
let button2 = GButton.button ~label:"Show thread graph" () in
ignore @@ button2#connect#clicked ~callback:(show_thread_graph main_ui);
TODO : fix build problems
let label = GMisc.label ~text:"Active thread " ( ) in
let liste = GList.liste ( ) ~selection_mode:`SINGLE in
let threads = Thread_graph.get_threads ( get_thread_graph ( ) ) in
List.iteri
( fun i thread - >
let = Thread.to_string thread in
let item = GList.list_item ~label : ( ) in
ignore ) ;
: i item
) threads ;
let label = GMisc.label ~text:"Active thread" () in
let liste = GList.liste () ~selection_mode:`SINGLE in
let threads = Thread_graph.get_threads (get_thread_graph ()) in
List.iteri
(fun i thread ->
let thread_str = Thread.to_string thread in
let item = GList.list_item ~label:thread_str () in
ignore @@ item#connect#select ~callback:(change_thread thread main_ui);
liste#insert ~pos:i item
) threads;
*)
(box :> GContainer.container)#add button1#coerce;
(box :> GContainer.container)#add button2#coerce;
( box :> ;
( box :> GContainer.container)#add liste#coerce ;
(box :> GContainer.container)#add label#coerce;
(box :> GContainer.container)#add liste#coerce;
*)
box#coerce
let high buffer localizable ~start ~stop =
let results = get_results () in
let buffer = buffer#buffer in
match localizable with
| PStmt (_, stmt) ->
if Cil_datatype.Stmt.Set.mem stmt (Results.get_imprecise_lock_stmts results) then
let tag = make_tag buffer "deadlock" [`BACKGROUND "red"] in
apply_tag buffer tag start stop
| PVDecl (_, _, varinfo) ->
begin match varinfo.vtype with
| TFun _ ->
let kf = Option.get @@ kf_of_localizable localizable in
let fundec = Kernel_function.get_definition kf in
if List.mem ~eq:Fundec.equal fundec (Results.imprecise_fns results) then
let tag = make_tag buffer "deadlock" [`BACKGROUND "red" ] in
apply_tag buffer tag start stop
| _ -> ()
end
| _ -> ()
let main (main_ui : Design.main_window_extension_points) =
main_ui#register_source_selector on_select;
main_ui#register_panel (fun main_ui -> ("Deadlock", deadlock_panel main_ui, None));
main_ui#register_source_highlighter high;
let tab_label = Some (GMisc.label ~text:"Deadlock" ())#coerce in
let info = empty_table () in
let page_pos = main_ui#lower_notebook#append_page
?tab_label:tab_label ?menu_label:tab_label info#coerce in
let page = main_ui#lower_notebook#get_nth_page page_pos in
lockset_info := Some page;
()
let init main_ui = if Enabled.get () then main main_ui else ()
let () = Design.register_extension init
|
d3f315459b2c68167439c494f0bbe87b6527973a3a339dfc8d88ea57c667bf44 | pflanze/chj-schemelib | string-case-bench.scm | Copyright 2018 by < >
;;; This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
;;; (at your option) any later version.
(require easy
string-case
memcmp
test-random)
(include "cj-standarddeclares.scm")
(namespace ("string-case-bench#" t1 t2 t3))
(both-times
(def string-case-bench:cases
'("ho"
"hi"
"case"
"cond"
"let"
"if"
"string-case"
"string-cond"
"string="
""
"for"
"foreach"
"forall"
"forever"
"loop"
"while"
"not"
"int"
"hello world"
"hello lovely world how are you today? it's been a long way."))
(def string-case-bench:safe #t))
(def (t1 v)
(IF (not string-case-bench:safe)
(declare (fixnum) (not safe)))
(enable-unquoting
(string-case v
,@(map (lambda (str)
`((,str) ',(.symbol str)))
string-case-bench:cases)
(else 'nomatch))))
(def (t2 v)
(IF (not string-case-bench:safe)
(declare (fixnum) (not safe)))
(enable-unquoting
(cond ,@(map (lambda (str)
`((string=? v ,str) ',(.symbol str)))
string-case-bench:cases)
(else 'nomatch))))
(use-memcmp)
(def (t3 v)
(IF string-case-bench:safe
(assert (string? v)))
(enable-unquoting
(cond ,@(map (lambda (str)
`((memcmp:@string=? v ,str) ',(.symbol str)))
string-case-bench:cases)
(else 'nomatch))))
(def (string-case-bench str n)
(assert (equal?* (time (repeat n (t1 str)))
(time (repeat n (t2 str)))
(time (repeat n (t3 str))))))
(def (string-case-bench2 str n)
(assert (equal?* (time (repeat n (t1 str)))
(time (repeat n (t3 str))))))
(TEST
> (for-each (lambda (str)
(assert (equal?* (string-case-bench#t1 str)
(string-case-bench#t2 str)
(string-case-bench#t3 str))))
(cons "nonexisting case" string-case-bench:cases)))
(def (string-case-bench-all n t)
(time (for-each (lambda (str)
(repeat n (t str)))
string-case-bench:cases)))
(def (string-case-bench-nonmatches n t)
(time (repeat 30
(let ((str (random-string (random-integer 40))))
(repeat n (t str))))))
| null | https://raw.githubusercontent.com/pflanze/chj-schemelib/59ff8476e39f207c2f1d807cfc9670581c8cedd3/string-case-bench.scm | scheme | This file is free software; you can redistribute it and/or modify
(at your option) any later version. | Copyright 2018 by < >
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
(require easy
string-case
memcmp
test-random)
(include "cj-standarddeclares.scm")
(namespace ("string-case-bench#" t1 t2 t3))
(both-times
(def string-case-bench:cases
'("ho"
"hi"
"case"
"cond"
"let"
"if"
"string-case"
"string-cond"
"string="
""
"for"
"foreach"
"forall"
"forever"
"loop"
"while"
"not"
"int"
"hello world"
"hello lovely world how are you today? it's been a long way."))
(def string-case-bench:safe #t))
(def (t1 v)
(IF (not string-case-bench:safe)
(declare (fixnum) (not safe)))
(enable-unquoting
(string-case v
,@(map (lambda (str)
`((,str) ',(.symbol str)))
string-case-bench:cases)
(else 'nomatch))))
(def (t2 v)
(IF (not string-case-bench:safe)
(declare (fixnum) (not safe)))
(enable-unquoting
(cond ,@(map (lambda (str)
`((string=? v ,str) ',(.symbol str)))
string-case-bench:cases)
(else 'nomatch))))
(use-memcmp)
(def (t3 v)
(IF string-case-bench:safe
(assert (string? v)))
(enable-unquoting
(cond ,@(map (lambda (str)
`((memcmp:@string=? v ,str) ',(.symbol str)))
string-case-bench:cases)
(else 'nomatch))))
(def (string-case-bench str n)
(assert (equal?* (time (repeat n (t1 str)))
(time (repeat n (t2 str)))
(time (repeat n (t3 str))))))
(def (string-case-bench2 str n)
(assert (equal?* (time (repeat n (t1 str)))
(time (repeat n (t3 str))))))
(TEST
> (for-each (lambda (str)
(assert (equal?* (string-case-bench#t1 str)
(string-case-bench#t2 str)
(string-case-bench#t3 str))))
(cons "nonexisting case" string-case-bench:cases)))
(def (string-case-bench-all n t)
(time (for-each (lambda (str)
(repeat n (t str)))
string-case-bench:cases)))
(def (string-case-bench-nonmatches n t)
(time (repeat 30
(let ((str (random-string (random-integer 40))))
(repeat n (t str))))))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.