_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
49f91b4ae6e43bf4e030c737b8e0a194315b527897469a84089ac20393753ea3 | ptal/AbSolute | hc4.ml | Copyright 2019 AbSolute Team
This program is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details. *)
open Bounds.Converter
open Lang.Ast
open Core.Bot
open Lang.Rewritting
open Pc_interpretation
module type PC_closure_sig = functor (I: PC_interpretation_sig) ->
sig
module I: PC_interpretation_sig
val incremental_closure: I.A.t -> I.rconstraint -> I.A.t * bool
val entailment: I.A.t -> I.rconstraint -> bool
val project: I.A.t -> I.var_id -> I.V.t
val embed: I.A.t -> I.var_id -> I.V.bound * I.V.bound -> I.A.t
end with module I=I
module Make(I: PC_interpretation_sig) =
struct
module I = I
module A = I.A
module V = I.V
module FromA = Converter(A.B)(I.V.B)
module ToA = Converter(I.V.B)(A.B)
let expr_val expr = I.(expr.value)
let exprs_val exprs = List.map expr_val exprs
let vardom_of abs vid =
let (l,u) = A.project abs vid in
V.of_bounds' (FromA.convert_down l, FromA.convert_up u)
let merge_view abs root vids =
List.fold_left
(fun v vid -> debot (V.meet v (vardom_of abs vid)))
root vids
let project abs vids =
merge_view abs (vardom_of abs (List.hd vids)) (List.tl vids)
let embed abs vids (l,u) =
let (l,u) = ToA.convert_down l, ToA.convert_up u in
List.fold_left (fun abs vid -> A.embed abs vid (l,u)) abs vids
I. Evaluation part
First step of the HC4 - revise algorithm : it computes the intervals for each node of the expression .
For example : given ` x + 3 ` with ` x in [ 1 .. 3 ] ` , it annotates ` + ` with ` [ 4 .. 6 ] ` .
It stores this new valued expression tree ( ` node ` ) into ` rexpr.value ` .
This function is also useful for testing transfer functions errors ( e.g. division by zero ) .
- We raise Bot_found in case the expression only evaluates to error values .
- Otherwise , we return only the non - error values .
First step of the HC4-revise algorithm: it computes the intervals for each node of the expression.
For example: given `x + 3` with `x in [1..3]`, it annotates `+` with `[4..6]`.
It stores this new valued expression tree (`node`) into `rexpr.value`.
This function is also useful for testing transfer functions errors (e.g. division by zero).
- We raise Bot_found in case the expression only evaluates to error values.
- Otherwise, we return only the non-error values.
*)
let rec eval abs expr =
let open I in
match expr.node with
| BVar (vids, _) -> expr.value <- project abs vids
| BCst (v,_) -> expr.value <- v
| BUnary (o,e1) ->
begin
eval abs e1;
match o with
| NEG -> expr.value <- V.unop NEG (expr_val e1)
end
| BBinary (e1,o,e2) ->
begin
eval abs e1;
eval abs e2;
let v1 = expr_val e1 and v2 = expr_val e2 in
let v = match o with
| ADD -> V.binop ADD v1 v2
| SUB -> V.binop SUB v1 v2
| DIV -> debot (V.div v1 v2)
| MUL ->
let r = V.binop MUL v1 v2 in
if V.equal v1 v2 then
(* special case: squares are positive *)
V.unop ABS r
else r
| POW -> V.binop POW v1 v2 in
expr.value <- v
end
| BFuncall(name, args) ->
begin
List.iter (eval abs) args;
let r = debot (V.eval_fun name (exprs_val args)) in
expr.value <- r
end
II . Refine part
Second step of the HC4 - revise algorithm .
It propagates the intervals from the root of the expression tree ` e ` to the leaves .
For example : Given ` y = x + 3 ` , ` x in [ 1 .. 3 ] ` , ` y in [ 1 .. 5 ] ` .
Then after ` eval ` we know that the node at ` + ` has the interval ` [ 4 .. 6 ] ` .
Therefore we can intersect ` y ` with ` [ 4 .. 6 ] ` due to the equality .
Note that we can call again ` eval ` to restrain further ` + ` , and then another round of ` refine ` will restrain ` x ` as well .
We raise ` Bot_found ` in case of unsatisfiability .
NOTE : This step is functional : it does not modify the ` rexpr.value ` field .
Second step of the HC4-revise algorithm.
It propagates the intervals from the root of the expression tree `e` to the leaves.
For example: Given `y = x + 3`, `x in [1..3]`, `y in [1..5]`.
Then after `eval` we know that the node at `+` has the interval `[4..6]`.
Therefore we can intersect `y` with `[4..6]` due to the equality.
Note that we can call again `eval` to restrain further `+`, and then another round of `refine` will restrain `x` as well.
We raise `Bot_found` in case of unsatisfiability.
NOTE: This step is functional: it does not modify the `rexpr.value` field.
*)
(* refines binary operator to handle constants *)
let refine_bop f1 f2 e1 e2 x (b:bool) =
let open I in
match e1.node, e2.node, b with
| BCst _, BCst _, _ -> Nb (e1.value, e2.value)
| BCst _, _, true -> merge_bot2 (Nb e1.value) (f2 e2.value e1.value x)
| BCst _, _, false -> merge_bot2 (Nb e1.value) (f2 e2.value x e1.value)
| _, BCst _, _ -> merge_bot2 (f1 e1.value e2.value x) (Nb e2.value)
| _, _, true -> merge_bot2 (f1 e1.value e2.value x) (f2 e2.value e1.value x)
| _, _, false -> merge_bot2 (f1 e1.value e2.value x) (f2 e2.value x e1.value)
(* u + v = r => u = r - v /\ v = r - u *)
let refine_add u v r =
refine_bop (V.filter_binop_f ADD) (V.filter_binop_f ADD) u v r true
(* u - v = r => u = r + v /\ v = u - r *)
let refine_sub u v r =
refine_bop (V.filter_binop_f SUB) (V.filter_binop_f ADD) u v r false
u * v = r = > ( u = r / v \/ v = r=0 ) /\ ( v = r / u \/ u = r=0 )
let refine_mul u v r =
refine_bop (V.filter_binop_f MUL) (V.filter_binop_f MUL) u v r true
u / v = r = > u = r * v /\ ( v = u / r \/ u = r=0 )
let refine_div u v r =
refine_bop V.filter_div_f (V.filter_binop_f MUL) u v r false
let refine readonly abs root expr =
let rec aux abs root expr =
let open I in
match expr with
| BFuncall(name,args) ->
let res = V.filter_fun name (List.map (fun e -> I.(e.value)) args) root in
List.fold_left2 (fun acc res e -> aux acc res e.node) abs (debot res) args
| BVar (vids, _) ->
if readonly then
let _ = ignore(merge_view abs root vids) in
abs
else
let (l,u) = V.to_range root in
Format.printf " Embed % a in % s for % d domains.\n " V.print root tv.name ( vids ) ;
embed abs vids (l,u)
| BCst (i,_) -> ignore (debot (V.meet root i)); abs
| BUnary (op,e) ->
let j = match op with
| NEG -> V.filter_unop NEG e.value root
in aux abs (debot j) e.node
| BBinary (e1,o,e2) ->
let j = match o with
| ADD -> refine_add e1 e2 root
| SUB -> refine_sub e1 e2 root
| MUL -> refine_mul e1 e2 root
| DIV -> refine_div e1 e2 root
| POW -> V.filter_binop POW e1.value e2.value root
in
let j1,j2 = debot j in
aux (aux abs j1 e1.node) j2 e2.node in
aux abs root expr
III . HC4 - revise algorithm ( combining eval and refine ) .
Apply the evaluation followed by the refine step of the HC4 - revise algorithm .
It prunes the domain of the variables in ` abs ` according to the constraint ` e1 o e2 ` .
Apply the evaluation followed by the refine step of the HC4-revise algorithm.
It prunes the domain of the variables in `abs` according to the constraint `e1 o e2`.
*)
let hc4_revise readonly abs (e1,op,e2) =
let i1,i2 = expr_val e1, expr_val e2 in
let j1,j2 = match op with
| LT -> debot (V.filter_lt i1 i2)
| LEQ -> debot (V.filter_leq i1 i2)
(* a > b <=> b < a*)
| GEQ -> let j2,j1 = debot (V.filter_leq i2 i1) in (j1,j2)
| GT -> let j2,j1 = debot (V.filter_lt i2 i1) in (j1,j2)
| NEQ -> debot (V.filter_neq i1 i2)
| EQ -> debot (V.filter_eq i1 i2)
in
let refined_store = if V.equal j1 i1 then abs else refine readonly abs j1 I.(e1.node) in
if j2 = i2 then refined_store else refine readonly refined_store j2 I.(e2.node)
let hc4_eval_revise abs (e1,op,e2) =
begin
eval abs e1;
eval abs e2;
let abs = hc4_revise false abs (e1,op,e2) in
try
ignore(hc4_revise true abs (e1,neg op,e2));
abs, false
with Bot_found -> abs, true
end
let incremental_closure abs c = hc4_eval_revise abs c
let entailment abs (e1,op,e2) =
try
eval abs e1;
eval abs e2;
ignore(hc4_revise true abs (e1,neg op,e2));
false
with Bot_found -> true
end
| null | https://raw.githubusercontent.com/ptal/AbSolute/469159d87e3a717499573c1e187e5cfa1b569829/src/transformers/propagator_completion/hc4.ml | ocaml | special case: squares are positive
refines binary operator to handle constants
u + v = r => u = r - v /\ v = r - u
u - v = r => u = r + v /\ v = u - r
a > b <=> b < a | Copyright 2019 AbSolute Team
This program is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details. *)
open Bounds.Converter
open Lang.Ast
open Core.Bot
open Lang.Rewritting
open Pc_interpretation
module type PC_closure_sig = functor (I: PC_interpretation_sig) ->
sig
module I: PC_interpretation_sig
val incremental_closure: I.A.t -> I.rconstraint -> I.A.t * bool
val entailment: I.A.t -> I.rconstraint -> bool
val project: I.A.t -> I.var_id -> I.V.t
val embed: I.A.t -> I.var_id -> I.V.bound * I.V.bound -> I.A.t
end with module I=I
module Make(I: PC_interpretation_sig) =
struct
module I = I
module A = I.A
module V = I.V
module FromA = Converter(A.B)(I.V.B)
module ToA = Converter(I.V.B)(A.B)
let expr_val expr = I.(expr.value)
let exprs_val exprs = List.map expr_val exprs
let vardom_of abs vid =
let (l,u) = A.project abs vid in
V.of_bounds' (FromA.convert_down l, FromA.convert_up u)
let merge_view abs root vids =
List.fold_left
(fun v vid -> debot (V.meet v (vardom_of abs vid)))
root vids
let project abs vids =
merge_view abs (vardom_of abs (List.hd vids)) (List.tl vids)
let embed abs vids (l,u) =
let (l,u) = ToA.convert_down l, ToA.convert_up u in
List.fold_left (fun abs vid -> A.embed abs vid (l,u)) abs vids
I. Evaluation part
First step of the HC4 - revise algorithm : it computes the intervals for each node of the expression .
For example : given ` x + 3 ` with ` x in [ 1 .. 3 ] ` , it annotates ` + ` with ` [ 4 .. 6 ] ` .
It stores this new valued expression tree ( ` node ` ) into ` rexpr.value ` .
This function is also useful for testing transfer functions errors ( e.g. division by zero ) .
- We raise Bot_found in case the expression only evaluates to error values .
- Otherwise , we return only the non - error values .
First step of the HC4-revise algorithm: it computes the intervals for each node of the expression.
For example: given `x + 3` with `x in [1..3]`, it annotates `+` with `[4..6]`.
It stores this new valued expression tree (`node`) into `rexpr.value`.
This function is also useful for testing transfer functions errors (e.g. division by zero).
- We raise Bot_found in case the expression only evaluates to error values.
- Otherwise, we return only the non-error values.
*)
let rec eval abs expr =
let open I in
match expr.node with
| BVar (vids, _) -> expr.value <- project abs vids
| BCst (v,_) -> expr.value <- v
| BUnary (o,e1) ->
begin
eval abs e1;
match o with
| NEG -> expr.value <- V.unop NEG (expr_val e1)
end
| BBinary (e1,o,e2) ->
begin
eval abs e1;
eval abs e2;
let v1 = expr_val e1 and v2 = expr_val e2 in
let v = match o with
| ADD -> V.binop ADD v1 v2
| SUB -> V.binop SUB v1 v2
| DIV -> debot (V.div v1 v2)
| MUL ->
let r = V.binop MUL v1 v2 in
if V.equal v1 v2 then
V.unop ABS r
else r
| POW -> V.binop POW v1 v2 in
expr.value <- v
end
| BFuncall(name, args) ->
begin
List.iter (eval abs) args;
let r = debot (V.eval_fun name (exprs_val args)) in
expr.value <- r
end
II . Refine part
Second step of the HC4 - revise algorithm .
It propagates the intervals from the root of the expression tree ` e ` to the leaves .
For example : Given ` y = x + 3 ` , ` x in [ 1 .. 3 ] ` , ` y in [ 1 .. 5 ] ` .
Then after ` eval ` we know that the node at ` + ` has the interval ` [ 4 .. 6 ] ` .
Therefore we can intersect ` y ` with ` [ 4 .. 6 ] ` due to the equality .
Note that we can call again ` eval ` to restrain further ` + ` , and then another round of ` refine ` will restrain ` x ` as well .
We raise ` Bot_found ` in case of unsatisfiability .
NOTE : This step is functional : it does not modify the ` rexpr.value ` field .
Second step of the HC4-revise algorithm.
It propagates the intervals from the root of the expression tree `e` to the leaves.
For example: Given `y = x + 3`, `x in [1..3]`, `y in [1..5]`.
Then after `eval` we know that the node at `+` has the interval `[4..6]`.
Therefore we can intersect `y` with `[4..6]` due to the equality.
Note that we can call again `eval` to restrain further `+`, and then another round of `refine` will restrain `x` as well.
We raise `Bot_found` in case of unsatisfiability.
NOTE: This step is functional: it does not modify the `rexpr.value` field.
*)
let refine_bop f1 f2 e1 e2 x (b:bool) =
let open I in
match e1.node, e2.node, b with
| BCst _, BCst _, _ -> Nb (e1.value, e2.value)
| BCst _, _, true -> merge_bot2 (Nb e1.value) (f2 e2.value e1.value x)
| BCst _, _, false -> merge_bot2 (Nb e1.value) (f2 e2.value x e1.value)
| _, BCst _, _ -> merge_bot2 (f1 e1.value e2.value x) (Nb e2.value)
| _, _, true -> merge_bot2 (f1 e1.value e2.value x) (f2 e2.value e1.value x)
| _, _, false -> merge_bot2 (f1 e1.value e2.value x) (f2 e2.value x e1.value)
let refine_add u v r =
refine_bop (V.filter_binop_f ADD) (V.filter_binop_f ADD) u v r true
let refine_sub u v r =
refine_bop (V.filter_binop_f SUB) (V.filter_binop_f ADD) u v r false
u * v = r = > ( u = r / v \/ v = r=0 ) /\ ( v = r / u \/ u = r=0 )
let refine_mul u v r =
refine_bop (V.filter_binop_f MUL) (V.filter_binop_f MUL) u v r true
u / v = r = > u = r * v /\ ( v = u / r \/ u = r=0 )
let refine_div u v r =
refine_bop V.filter_div_f (V.filter_binop_f MUL) u v r false
let refine readonly abs root expr =
let rec aux abs root expr =
let open I in
match expr with
| BFuncall(name,args) ->
let res = V.filter_fun name (List.map (fun e -> I.(e.value)) args) root in
List.fold_left2 (fun acc res e -> aux acc res e.node) abs (debot res) args
| BVar (vids, _) ->
if readonly then
let _ = ignore(merge_view abs root vids) in
abs
else
let (l,u) = V.to_range root in
Format.printf " Embed % a in % s for % d domains.\n " V.print root tv.name ( vids ) ;
embed abs vids (l,u)
| BCst (i,_) -> ignore (debot (V.meet root i)); abs
| BUnary (op,e) ->
let j = match op with
| NEG -> V.filter_unop NEG e.value root
in aux abs (debot j) e.node
| BBinary (e1,o,e2) ->
let j = match o with
| ADD -> refine_add e1 e2 root
| SUB -> refine_sub e1 e2 root
| MUL -> refine_mul e1 e2 root
| DIV -> refine_div e1 e2 root
| POW -> V.filter_binop POW e1.value e2.value root
in
let j1,j2 = debot j in
aux (aux abs j1 e1.node) j2 e2.node in
aux abs root expr
III . HC4 - revise algorithm ( combining eval and refine ) .
Apply the evaluation followed by the refine step of the HC4 - revise algorithm .
It prunes the domain of the variables in ` abs ` according to the constraint ` e1 o e2 ` .
Apply the evaluation followed by the refine step of the HC4-revise algorithm.
It prunes the domain of the variables in `abs` according to the constraint `e1 o e2`.
*)
let hc4_revise readonly abs (e1,op,e2) =
let i1,i2 = expr_val e1, expr_val e2 in
let j1,j2 = match op with
| LT -> debot (V.filter_lt i1 i2)
| LEQ -> debot (V.filter_leq i1 i2)
| GEQ -> let j2,j1 = debot (V.filter_leq i2 i1) in (j1,j2)
| GT -> let j2,j1 = debot (V.filter_lt i2 i1) in (j1,j2)
| NEQ -> debot (V.filter_neq i1 i2)
| EQ -> debot (V.filter_eq i1 i2)
in
let refined_store = if V.equal j1 i1 then abs else refine readonly abs j1 I.(e1.node) in
if j2 = i2 then refined_store else refine readonly refined_store j2 I.(e2.node)
let hc4_eval_revise abs (e1,op,e2) =
begin
eval abs e1;
eval abs e2;
let abs = hc4_revise false abs (e1,op,e2) in
try
ignore(hc4_revise true abs (e1,neg op,e2));
abs, false
with Bot_found -> abs, true
end
let incremental_closure abs c = hc4_eval_revise abs c
let entailment abs (e1,op,e2) =
try
eval abs e1;
eval abs e2;
ignore(hc4_revise true abs (e1,neg op,e2));
false
with Bot_found -> true
end
|
e332b713b6261d7e4cdf94dd2bd4548d8dbae622b2698d6ab1fd8f9058446f8f | emqx/emqx | emqx_ee_conf_schema_SUITE.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2022 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%--------------------------------------------------------------------
-module(emqx_ee_conf_schema_SUITE).
-compile(nowarn_export_all).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
all() ->
emqx_common_test_helpers:all(?MODULE).
%%------------------------------------------------------------------------------
%% Tests
%%------------------------------------------------------------------------------
t_namespace(_Config) ->
?assertEqual(
emqx_conf_schema:namespace(),
emqx_ee_conf_schema:namespace()
).
t_roots(_Config) ->
BaseRoots = emqx_conf_schema:roots(),
EnterpriseRoots = emqx_ee_conf_schema:roots(),
?assertEqual([], BaseRoots -- EnterpriseRoots),
?assert(
lists:any(
fun
({license, _}) -> true;
(_) -> false
end,
EnterpriseRoots
)
).
t_fields(_Config) ->
?assertEqual(
emqx_conf_schema:fields("node"),
emqx_ee_conf_schema:fields("node")
).
t_translations(_Config) ->
[Root | _] = emqx_ee_conf_schema:translations(),
?assertEqual(
emqx_conf_schema:translation(Root),
emqx_ee_conf_schema:translation(Root)
).
| null | https://raw.githubusercontent.com/emqx/emqx/dbc10c2eed3df314586c7b9ac6292083204f1f68/lib-ee/emqx_ee_conf/test/emqx_ee_conf_schema_SUITE.erl | erlang | --------------------------------------------------------------------
--------------------------------------------------------------------
------------------------------------------------------------------------------
Tests
------------------------------------------------------------------------------ | Copyright ( c ) 2022 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
-module(emqx_ee_conf_schema_SUITE).
-compile(nowarn_export_all).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
all() ->
emqx_common_test_helpers:all(?MODULE).
t_namespace(_Config) ->
?assertEqual(
emqx_conf_schema:namespace(),
emqx_ee_conf_schema:namespace()
).
t_roots(_Config) ->
BaseRoots = emqx_conf_schema:roots(),
EnterpriseRoots = emqx_ee_conf_schema:roots(),
?assertEqual([], BaseRoots -- EnterpriseRoots),
?assert(
lists:any(
fun
({license, _}) -> true;
(_) -> false
end,
EnterpriseRoots
)
).
t_fields(_Config) ->
?assertEqual(
emqx_conf_schema:fields("node"),
emqx_ee_conf_schema:fields("node")
).
t_translations(_Config) ->
[Root | _] = emqx_ee_conf_schema:translations(),
?assertEqual(
emqx_conf_schema:translation(Root),
emqx_ee_conf_schema:translation(Root)
).
|
b6ec07b961947f15a8bab2666d475c587cacd811add48c97188fbd5c18650e97 | exercism/babashka | change_test.clj | (ns change-test
(:require [clojure.test :refer [deftest is]]
[change :refer [issue]]))
(deftest single-coin-change
(is (= (issue 25 #{1 5 10 25 100})
'(25))))
(deftest multiple-coin-change
(is (= (issue 15 #{1 5 10 25 100})
'(5 10))))
(deftest change-with-lilliputian-coins
(is (= (issue 23 #{1 4 15 20 50})
'(4 4 15))))
(deftest change-with-elbonia-coins
(is (= (issue 63 #{1 5 10 21 25})
'(21 21 21))))
(deftest large-target-values
(is (= (issue 999 #{1 2 5 10 20 50 100})
'(2 2 5 20 20 50 100 100 100 100 100 100 100 100 100))))
(deftest no-coins-make-zero-change
(is (empty? (issue 0 #{1, 5, 10, 21, 25}))))
(deftest error-testing-for-change-smallet-than-the-smallest-coin
(is (thrown-with-msg? IllegalArgumentException #"cannot change"
(issue 3 #{5 10}))))
(deftest cannot-find-negative-change-values
(is (thrown-with-msg? IllegalArgumentException #"cannot change"
(issue -5 #{1 2 5}))))
| null | https://raw.githubusercontent.com/exercism/babashka/707356c52e08490e66cb1b2e63e4f4439d91cf08/exercises/practice/change/test/change_test.clj | clojure | (ns change-test
(:require [clojure.test :refer [deftest is]]
[change :refer [issue]]))
(deftest single-coin-change
(is (= (issue 25 #{1 5 10 25 100})
'(25))))
(deftest multiple-coin-change
(is (= (issue 15 #{1 5 10 25 100})
'(5 10))))
(deftest change-with-lilliputian-coins
(is (= (issue 23 #{1 4 15 20 50})
'(4 4 15))))
(deftest change-with-elbonia-coins
(is (= (issue 63 #{1 5 10 21 25})
'(21 21 21))))
(deftest large-target-values
(is (= (issue 999 #{1 2 5 10 20 50 100})
'(2 2 5 20 20 50 100 100 100 100 100 100 100 100 100))))
(deftest no-coins-make-zero-change
(is (empty? (issue 0 #{1, 5, 10, 21, 25}))))
(deftest error-testing-for-change-smallet-than-the-smallest-coin
(is (thrown-with-msg? IllegalArgumentException #"cannot change"
(issue 3 #{5 10}))))
(deftest cannot-find-negative-change-values
(is (thrown-with-msg? IllegalArgumentException #"cannot change"
(issue -5 #{1 2 5}))))
| |
a2366a636b6d62f738c49a72a91e7253f3315039f6fd9826a37a4af74e57458b | korya/efuns | top.ml | (***********************************************************************)
(* *)
xlib for
(* *)
Fabrice Le Fessant , projet Para / SOR , INRIA Rocquencourt
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
(* *)
(***********************************************************************)
open Xtypes
open Gwml
open Wob
type top_internals = {
mutable center : wob option ;
mutable left : wob option;
mutable right : wob option;
mutable title : wob option;
mutable bottom : wob option;
borders : geometry;
}
let client_mask = [
StructureNotifyMask;
PropertyChangeMask;
VisibilityChangeMask;
EnterWindowMask;
LeaveWindowMask;
ColormapChangeMask;
FocusChangeMask]
let top_mask =
[StructureNotifyMask;PropertyChangeMask;
VisibilityChangeMask ;EnterWindowMask ;
LeaveWindowMask; SubstructureRedirectMask;
SubstructureNotifyMask; ButtonPressMask; KeyPressMask;
ColormapChangeMask ; FocusChangeMask]
let notify_mode mode =
match mode with
| NotifyNormal -> "NotifyNormal"
| NotifyGrab -> "NotifyGrab"
| NotifyUngrab -> "NotifyUngrab"
| NotifyWhileGrabbed -> "NotifyWhileGrabbed"
let notify_detail detail =
match detail with
| NotifyAncestor -> "NotifyAncestor"
| NotifyVirtual -> "NotifyVirtual"
| NotifyInferior -> "NotifyInferior"
| NotifyNonlinear -> "NotifyNonlinear"
| NotifyNonlinearVirtual -> "NotifyNonlinearVirtual"
| NotifyPointer -> "NotifyPointer"
| NotifyPointerRoot -> "NotifyPointerRoot"
| NotifyDetailNone -> "NotifyDetailNone"
let ssend opt e =
match opt with
None -> ()
| Some w -> Wob.send w e
let broadcast d e =
ssend d.center e;
ssend d.left e;
ssend d.right e;
ssend d.title e;
ssend d.bottom e
open Options
let no_borders_with_shape = define_option ["no_borders_with_shape"] ""
bool_option true
let set_shape w deco =
let module S = Shape in
let module X = S.X in
let tw = w in
let init = S.Set in
let init = let g = w.w_geometry in
if g.border > 0 && not !!no_borders_with_shape then begin
X.shapeRectangles display w.w_window S.Bounding 0 0 [
- g.border, - g.border,
g.width + 2 * g.border, g.height + 2 * g.border
] init UnSorted;
X.shapeRectangles display w.w_window S.Bounding 0 0
[
0,0, g.width, g.height
] S.Substract UnSorted;
S.Union
end else init
in
let init = match deco.center with None -> init | Some w ->
let g = w.w_geometry in
X.shapeRectangles display w.w_window S.Bounding 0 0
[ -g.border , -g.border , g.width+2*g.border+1 , g.height+2*g.border+1
] S.Union UnSorted ;
X.shapeRectangles display w.w_window S.Bounding 0 0
[-g.border, -g.border, g.width+2*g.border+1, g.height+2*g.border+1
] S.Union UnSorted;
*)
X.shapeCombine display tw.w_window S.Bounding
(g.x+g.border) (g.y+g.border)
w.w_window S.Bounding init;
S.Union in
let init = match deco.left with None -> init | Some w ->
let g = w.w_geometry in
X.shapeCombine display tw.w_window S.Bounding
(g.x+g.border) (g.y+g.border)
w.w_window S.Bounding init; S.Union in
let init = match deco.right with None -> init | Some w ->
let g = w.w_geometry in
X.shapeCombine display tw.w_window S.Bounding
(g.x+g.border) (g.y+g.border)
w.w_window S.Bounding init; S.Union in
let init = match deco.title with None -> init | Some w ->
let g = w.w_geometry in
X.shapeCombine display tw.w_window S.Bounding
(g.x+g.border) (g.y+g.border)
w.w_window S.Bounding init; S.Union in
let init = match deco.bottom with None -> init | Some w ->
let g = w.w_geometry in
X.shapeCombine display tw.w_window S.Bounding
(g.x+g.border) (g.y+g.border)
w.w_window S.Bounding init; S.Union in
()
let size name ow =
match ow with
None -> ()
| Some w ->
let g = w.w_geometry in
Log.printf "Inside: %s\n" name;
Log.printf "x= %d\n" g.x;
Log.printf "y= %d\n" g.y;
Log.printf "dx= %d\n" g.width;
Log.printf "dy= %d\n" g.height;
Log.printf "border= %d\n" g.border
let top_list = ref []
class top deco name =
object (self)
inherit wob_base as super
val mutable client = (None: client_desc option)
val deco = deco
val over = (name = "")
val name = name
method deco = deco
method set_client c = client <- Some c
method client = match client with None -> raise Not_found | Some c -> c
method is_shaped = is_shaped
method iter f =
f (self :> wob_desc);
let d = deco in
List.iter (fun d ->
match d with None -> () | Some w ->
w.w_oo#iter f) [d.center; d.left; d.right; d.title; d.bottom]
method mask =
if over then super#mask else client_mask @ super#mask
method first_hook e =
let w = self#wob in
match e with
WobGetSize ->
broadcast deco WobGetSize;
let width,height = match deco.center with
None -> assert false
| Some w ->
let g = w.w_geometry in
g.width + 2 * g.border,
g.height + 2 * g.border in
deco.borders.x <- (match deco.left with None->0
| Some w-> let g = w.w_geometry in g.width + 2 * g.border);
deco.borders.y <- (match deco.title with None->0
| Some w-> let g = w.w_geometry in g.height+2*g.border);
deco.borders.width <- (match deco.right with None->0
| Some w-> let g = w.w_geometry in g.width + 2 * g.border);
deco.borders.height <- (match deco.bottom with None->0
| Some w-> let g = w.w_geometry in g.height+2*g.border);
w.w_geometry.width <- width + deco.borders.x + deco.borders.width;
w.w_geometry.height <- height + deco.borders.y + deco.borders.height;
Wob.send w (WobResize false);
| WobResize force ->
let borders = deco.borders in
let tg = w.w_geometry in
let ldx = borders.x in
let rdx = borders.width in
let tdy = borders.y in
let bdy = borders.height in
let dx = tg.width - ldx - rdx in
let dy = tg.height - tdy - bdy in
let cw = match deco.center with None -> assert false | Some w->w in
let cg = cw.w_geometry in
cg.x <- ldx;
cg.y <- tdy;
cg.width <- dx - 2 * cg.border;
cg.height <- dy - 2 * cg.border;
begin
match deco.left with
None -> ()
| Some w -> let g = w.w_geometry in
g.x <- 0;
g.y <- tdy;
g.width <- ldx - 2 * g.border;
g.height <- dy - 2 * g.border;
end;
begin
match deco.right with
None -> ()
| Some w -> let g = w.w_geometry in
g.x <- dx + ldx;
g.y <- tdy;
g.width <- rdx - 2 * g.border;
g.height <- dy - 2 * g.border;
end;
begin
match deco.title with
None -> ()
| Some w -> let g = w.w_geometry in
g.x <- 0;
g.y <- 0;
g.width <- tg.width - 2 * g.border;
g.height <- tdy - 2 * g.border;
end;
begin
match deco.bottom with
None -> ()
| Some w -> let g = w.w_geometry in
g.x <- 0;
g.y <- tdy + dy;
g.width <- tg.width - 2 * g.border;
g.height <- bdy - 2 * g.border;
end;
Log.printf " RESIZE : % s\n " " " ;
Log.printf " Width % d\n " tg.width ;
Log.printf " Height % d\n " tg.height ;
Log.printf " Border % d\n " tg.border ;
size " Center " deco.center ;
size " Left " deco.left ;
size " Right " deco.right ;
size " Top " deco.title ;
size " Bottom " deco.bottom ;
Log.printf "RESIZE: %s\n" "";
Log.printf "Width %d\n" tg.width;
Log.printf "Height %d\n" tg.height;
Log.printf "Border %d\n" tg.border;
size "Center" deco.center;
size "Left" deco.left;
size "Right" deco.right;
size "Top" deco.title;
size "Bottom" deco.bottom;
*)
if w.w_window <> noWindow then begin
if over then begin
Xlib.moveResizeWindow display w.w_window
tg.x tg.y tg.width tg.height
end
else begin
Xlib.resizeWindow display w.w_window tg.width tg.height;
let s = w.w_screen in
let (c,cw) = Wintbl.find s.s_clients w.w_window in
let cg = cw.w_geometry in
let ctg = cw.w_top.w_geometry in
cg.width <- tg.width;
cg.height <- tg.height;
Wob.send cw.w_top WobGetSize
end
end;
broadcast deco (WobResize force);
if is_shaped then Wob.send_one w WobUpdateShape
| WobUpdateShape ->
broadcast deco WobUpdateShape;
if is_shaped then set_shape w deco
| WobCreate ->
let tw = w in
let g = tw.w_geometry in
Wob.setenv w is_mapped false;
self#create over;
if not over then
begin
Icccm.setWM_NAME display w.w_window name;
Icccm.setWM_CLASS display w.w_window ["gwml_"^name;"GwML"];
end;
let shaped wo = match wo with
None -> false | Some w -> w.w_oo#is_shaped in
broadcast deco WobCreate;
is_shaped <- (shaped deco.title) || (shaped deco.bottom) ||
(shaped deco.center) || (shaped deco.left) || (shaped deco.right);
if is_shaped then Wob.send_one w WobUpdateShape;
| WobMap ->
let g = w.w_geometry in
if over then begin
Xlib.moveWindow display w.w_window g.x g.y;
end;
X.mapWindow display w.w_window;
Wob.setenv w is_mapped true;
broadcast deco WobMap;
(* Wob.send w WobRaiseWindow *)
| WobUnmap iconify ->
X.unmapWindow display w.w_window;
Wob.setenv w is_mapped false;
broadcast deco (WobUnmap iconify)
| WobDestroy ->
let s = w.w_screen in
broadcast deco WobDestroy;
if w.w_window <> noWindow then
begin
Eloop.remove_window s.s_scheduler w.w_window;
X.destroyWindow display w.w_window;
w.w_window <- noWindow
end
| WobMove ->
let g = w.w_geometry in
broadcast deco e;
Xlib.moveWindow display w.w_window g.x g.y
| WobKeyPress (e,s,k) -> self#handle_key (e,s,k)
| WobButtonPress e -> self#handle_button e
| WobButtonRelease _
| WobEnter
| WobLeave _ -> () (* discard these local events *)
Xlib.raiseWindow display w.w_window
| WobLowerWindow -> Xlib.lowerWindow display w.w_window
| _ -> broadcast deco e
end
let last_hook w e = ()
let make sw name hooks center left right top bottom =
let sw = sw.w_top.w_parent in
let deco = {
center = None;
left = None;
right = None;
title = None;
bottom = None;
borders = { x=0; y=0; width=0; height=0; border=0; };
}
in
let g = { x=0; y=0; width=0; height=0; border=0; } in
let desc = new top deco name in
let rec w = {
w_window = noWindow;
w_parent = sw;
w_top = w;
w_screen = sw.w_screen;
w_geometry = g;
w_env = Wobenv.empty ();
w_oo = (desc :> Gwml.wob_desc);
w_queue = [];
}
in
desc#set_wob w;
let g = w.w_geometry in
List.iter (fun hook -> desc#add_hook (hook desc)) hooks;
send w WobInit;
let some opt = match opt with None->None |
Some wob -> Some(Wob.make w wob) in
deco.center <- Some (Wob.make w center);
deco.left <- some left;
deco.right <- some right;
deco.title <- some top;
deco.bottom <- some bottom;
let s = w.w_screen in
exec_hooks w s.s_top_opening_hooks;
Wob.send w WobGetSize;
Wob.send w WobCreate;
desc
(* Resize client wob *)
let resize w dx dy =
let g = w.w_geometry in
g.width <- dx;
g.height <- dy;
Wob.send_one w.w_top WobGetSize
(* Resize top window *)
let resize_top w dx dy =
w.w_geometry.width <- dx;
w.w_geometry.height <- dy;
Wob.send_one w (WobResize false)
type window_desc =
(top -> Gwml.wob_event -> unit) list *
Gwml.wob_desc option *
Gwml.wob_desc option *
Gwml.wob_desc option *
Gwml.wob_desc option
| null | https://raw.githubusercontent.com/korya/efuns/78b21d9dff45b7eec764c63132c7a564f5367c30/gwml/top.ml | ocaml | *********************************************************************
*********************************************************************
Wob.send w WobRaiseWindow
discard these local events
Resize client wob
Resize top window | xlib for
Fabrice Le Fessant , projet Para / SOR , INRIA Rocquencourt
Copyright 1999 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
open Xtypes
open Gwml
open Wob
type top_internals = {
mutable center : wob option ;
mutable left : wob option;
mutable right : wob option;
mutable title : wob option;
mutable bottom : wob option;
borders : geometry;
}
let client_mask = [
StructureNotifyMask;
PropertyChangeMask;
VisibilityChangeMask;
EnterWindowMask;
LeaveWindowMask;
ColormapChangeMask;
FocusChangeMask]
let top_mask =
[StructureNotifyMask;PropertyChangeMask;
VisibilityChangeMask ;EnterWindowMask ;
LeaveWindowMask; SubstructureRedirectMask;
SubstructureNotifyMask; ButtonPressMask; KeyPressMask;
ColormapChangeMask ; FocusChangeMask]
let notify_mode mode =
match mode with
| NotifyNormal -> "NotifyNormal"
| NotifyGrab -> "NotifyGrab"
| NotifyUngrab -> "NotifyUngrab"
| NotifyWhileGrabbed -> "NotifyWhileGrabbed"
let notify_detail detail =
match detail with
| NotifyAncestor -> "NotifyAncestor"
| NotifyVirtual -> "NotifyVirtual"
| NotifyInferior -> "NotifyInferior"
| NotifyNonlinear -> "NotifyNonlinear"
| NotifyNonlinearVirtual -> "NotifyNonlinearVirtual"
| NotifyPointer -> "NotifyPointer"
| NotifyPointerRoot -> "NotifyPointerRoot"
| NotifyDetailNone -> "NotifyDetailNone"
let ssend opt e =
match opt with
None -> ()
| Some w -> Wob.send w e
let broadcast d e =
ssend d.center e;
ssend d.left e;
ssend d.right e;
ssend d.title e;
ssend d.bottom e
open Options
let no_borders_with_shape = define_option ["no_borders_with_shape"] ""
bool_option true
let set_shape w deco =
let module S = Shape in
let module X = S.X in
let tw = w in
let init = S.Set in
let init = let g = w.w_geometry in
if g.border > 0 && not !!no_borders_with_shape then begin
X.shapeRectangles display w.w_window S.Bounding 0 0 [
- g.border, - g.border,
g.width + 2 * g.border, g.height + 2 * g.border
] init UnSorted;
X.shapeRectangles display w.w_window S.Bounding 0 0
[
0,0, g.width, g.height
] S.Substract UnSorted;
S.Union
end else init
in
let init = match deco.center with None -> init | Some w ->
let g = w.w_geometry in
X.shapeRectangles display w.w_window S.Bounding 0 0
[ -g.border , -g.border , g.width+2*g.border+1 , g.height+2*g.border+1
] S.Union UnSorted ;
X.shapeRectangles display w.w_window S.Bounding 0 0
[-g.border, -g.border, g.width+2*g.border+1, g.height+2*g.border+1
] S.Union UnSorted;
*)
X.shapeCombine display tw.w_window S.Bounding
(g.x+g.border) (g.y+g.border)
w.w_window S.Bounding init;
S.Union in
let init = match deco.left with None -> init | Some w ->
let g = w.w_geometry in
X.shapeCombine display tw.w_window S.Bounding
(g.x+g.border) (g.y+g.border)
w.w_window S.Bounding init; S.Union in
let init = match deco.right with None -> init | Some w ->
let g = w.w_geometry in
X.shapeCombine display tw.w_window S.Bounding
(g.x+g.border) (g.y+g.border)
w.w_window S.Bounding init; S.Union in
let init = match deco.title with None -> init | Some w ->
let g = w.w_geometry in
X.shapeCombine display tw.w_window S.Bounding
(g.x+g.border) (g.y+g.border)
w.w_window S.Bounding init; S.Union in
let init = match deco.bottom with None -> init | Some w ->
let g = w.w_geometry in
X.shapeCombine display tw.w_window S.Bounding
(g.x+g.border) (g.y+g.border)
w.w_window S.Bounding init; S.Union in
()
let size name ow =
match ow with
None -> ()
| Some w ->
let g = w.w_geometry in
Log.printf "Inside: %s\n" name;
Log.printf "x= %d\n" g.x;
Log.printf "y= %d\n" g.y;
Log.printf "dx= %d\n" g.width;
Log.printf "dy= %d\n" g.height;
Log.printf "border= %d\n" g.border
let top_list = ref []
class top deco name =
object (self)
inherit wob_base as super
val mutable client = (None: client_desc option)
val deco = deco
val over = (name = "")
val name = name
method deco = deco
method set_client c = client <- Some c
method client = match client with None -> raise Not_found | Some c -> c
method is_shaped = is_shaped
method iter f =
f (self :> wob_desc);
let d = deco in
List.iter (fun d ->
match d with None -> () | Some w ->
w.w_oo#iter f) [d.center; d.left; d.right; d.title; d.bottom]
method mask =
if over then super#mask else client_mask @ super#mask
method first_hook e =
let w = self#wob in
match e with
WobGetSize ->
broadcast deco WobGetSize;
let width,height = match deco.center with
None -> assert false
| Some w ->
let g = w.w_geometry in
g.width + 2 * g.border,
g.height + 2 * g.border in
deco.borders.x <- (match deco.left with None->0
| Some w-> let g = w.w_geometry in g.width + 2 * g.border);
deco.borders.y <- (match deco.title with None->0
| Some w-> let g = w.w_geometry in g.height+2*g.border);
deco.borders.width <- (match deco.right with None->0
| Some w-> let g = w.w_geometry in g.width + 2 * g.border);
deco.borders.height <- (match deco.bottom with None->0
| Some w-> let g = w.w_geometry in g.height+2*g.border);
w.w_geometry.width <- width + deco.borders.x + deco.borders.width;
w.w_geometry.height <- height + deco.borders.y + deco.borders.height;
Wob.send w (WobResize false);
| WobResize force ->
let borders = deco.borders in
let tg = w.w_geometry in
let ldx = borders.x in
let rdx = borders.width in
let tdy = borders.y in
let bdy = borders.height in
let dx = tg.width - ldx - rdx in
let dy = tg.height - tdy - bdy in
let cw = match deco.center with None -> assert false | Some w->w in
let cg = cw.w_geometry in
cg.x <- ldx;
cg.y <- tdy;
cg.width <- dx - 2 * cg.border;
cg.height <- dy - 2 * cg.border;
begin
match deco.left with
None -> ()
| Some w -> let g = w.w_geometry in
g.x <- 0;
g.y <- tdy;
g.width <- ldx - 2 * g.border;
g.height <- dy - 2 * g.border;
end;
begin
match deco.right with
None -> ()
| Some w -> let g = w.w_geometry in
g.x <- dx + ldx;
g.y <- tdy;
g.width <- rdx - 2 * g.border;
g.height <- dy - 2 * g.border;
end;
begin
match deco.title with
None -> ()
| Some w -> let g = w.w_geometry in
g.x <- 0;
g.y <- 0;
g.width <- tg.width - 2 * g.border;
g.height <- tdy - 2 * g.border;
end;
begin
match deco.bottom with
None -> ()
| Some w -> let g = w.w_geometry in
g.x <- 0;
g.y <- tdy + dy;
g.width <- tg.width - 2 * g.border;
g.height <- bdy - 2 * g.border;
end;
Log.printf " RESIZE : % s\n " " " ;
Log.printf " Width % d\n " tg.width ;
Log.printf " Height % d\n " tg.height ;
Log.printf " Border % d\n " tg.border ;
size " Center " deco.center ;
size " Left " deco.left ;
size " Right " deco.right ;
size " Top " deco.title ;
size " Bottom " deco.bottom ;
Log.printf "RESIZE: %s\n" "";
Log.printf "Width %d\n" tg.width;
Log.printf "Height %d\n" tg.height;
Log.printf "Border %d\n" tg.border;
size "Center" deco.center;
size "Left" deco.left;
size "Right" deco.right;
size "Top" deco.title;
size "Bottom" deco.bottom;
*)
if w.w_window <> noWindow then begin
if over then begin
Xlib.moveResizeWindow display w.w_window
tg.x tg.y tg.width tg.height
end
else begin
Xlib.resizeWindow display w.w_window tg.width tg.height;
let s = w.w_screen in
let (c,cw) = Wintbl.find s.s_clients w.w_window in
let cg = cw.w_geometry in
let ctg = cw.w_top.w_geometry in
cg.width <- tg.width;
cg.height <- tg.height;
Wob.send cw.w_top WobGetSize
end
end;
broadcast deco (WobResize force);
if is_shaped then Wob.send_one w WobUpdateShape
| WobUpdateShape ->
broadcast deco WobUpdateShape;
if is_shaped then set_shape w deco
| WobCreate ->
let tw = w in
let g = tw.w_geometry in
Wob.setenv w is_mapped false;
self#create over;
if not over then
begin
Icccm.setWM_NAME display w.w_window name;
Icccm.setWM_CLASS display w.w_window ["gwml_"^name;"GwML"];
end;
let shaped wo = match wo with
None -> false | Some w -> w.w_oo#is_shaped in
broadcast deco WobCreate;
is_shaped <- (shaped deco.title) || (shaped deco.bottom) ||
(shaped deco.center) || (shaped deco.left) || (shaped deco.right);
if is_shaped then Wob.send_one w WobUpdateShape;
| WobMap ->
let g = w.w_geometry in
if over then begin
Xlib.moveWindow display w.w_window g.x g.y;
end;
X.mapWindow display w.w_window;
Wob.setenv w is_mapped true;
broadcast deco WobMap;
| WobUnmap iconify ->
X.unmapWindow display w.w_window;
Wob.setenv w is_mapped false;
broadcast deco (WobUnmap iconify)
| WobDestroy ->
let s = w.w_screen in
broadcast deco WobDestroy;
if w.w_window <> noWindow then
begin
Eloop.remove_window s.s_scheduler w.w_window;
X.destroyWindow display w.w_window;
w.w_window <- noWindow
end
| WobMove ->
let g = w.w_geometry in
broadcast deco e;
Xlib.moveWindow display w.w_window g.x g.y
| WobKeyPress (e,s,k) -> self#handle_key (e,s,k)
| WobButtonPress e -> self#handle_button e
| WobButtonRelease _
| WobEnter
Xlib.raiseWindow display w.w_window
| WobLowerWindow -> Xlib.lowerWindow display w.w_window
| _ -> broadcast deco e
end
let last_hook w e = ()
let make sw name hooks center left right top bottom =
let sw = sw.w_top.w_parent in
let deco = {
center = None;
left = None;
right = None;
title = None;
bottom = None;
borders = { x=0; y=0; width=0; height=0; border=0; };
}
in
let g = { x=0; y=0; width=0; height=0; border=0; } in
let desc = new top deco name in
let rec w = {
w_window = noWindow;
w_parent = sw;
w_top = w;
w_screen = sw.w_screen;
w_geometry = g;
w_env = Wobenv.empty ();
w_oo = (desc :> Gwml.wob_desc);
w_queue = [];
}
in
desc#set_wob w;
let g = w.w_geometry in
List.iter (fun hook -> desc#add_hook (hook desc)) hooks;
send w WobInit;
let some opt = match opt with None->None |
Some wob -> Some(Wob.make w wob) in
deco.center <- Some (Wob.make w center);
deco.left <- some left;
deco.right <- some right;
deco.title <- some top;
deco.bottom <- some bottom;
let s = w.w_screen in
exec_hooks w s.s_top_opening_hooks;
Wob.send w WobGetSize;
Wob.send w WobCreate;
desc
let resize w dx dy =
let g = w.w_geometry in
g.width <- dx;
g.height <- dy;
Wob.send_one w.w_top WobGetSize
let resize_top w dx dy =
w.w_geometry.width <- dx;
w.w_geometry.height <- dy;
Wob.send_one w (WobResize false)
type window_desc =
(top -> Gwml.wob_event -> unit) list *
Gwml.wob_desc option *
Gwml.wob_desc option *
Gwml.wob_desc option *
Gwml.wob_desc option
|
a49bd227d764191d8080f890c177a084052585c1db80e9fed710f23ccb39fa1c | jsoo1/guix-channel | yaft.scm | (define-module (yaft)
#:use-module (gnu packages gcc)
#:use-module (gnu packages image)
#:use-module (gnu packages ncurses)
#:use-module (guix build-system gnu)
#:use-module (guix download)
#:use-module (guix git-download)
#:use-module (guix packages)
#:use-module (libsixel)
#:export (yaft
idump
sdump))
(define yaft
(package
(name "yaft")
(version "0.2.9")
(source (origin
(method url-fetch)
(uri (string-append "" version ".tar.gz"))
(sha256 (base32 "0k1pn1fph7zinb44mb0bdrlmvi93irplss4whmvlz8zhgy9ydxw0"))))
(build-system gnu-build-system)
(inputs `(("gcc" ,gcc)
("ncurses" ,ncurses)))
(outputs '("out"))
(arguments
`(#:make-flags (list (string-append "DESTDIR=" (assoc-ref %outputs "out")))
#:tests? #f
#:phases
(modify-phases
%standard-phases
(delete 'configure)
(add-before 'build 'make-locale
(lambda* (#:key inputs outputs #:allow-other-keys)
(setenv "LANG" "en_US.UTF-8")
(setenv "CC" (string-append (assoc-ref inputs "gcc") "/bin/gcc"))
(setenv "DESTDIR" (assoc-ref outputs "out"))
#t))
(add-after 'install 'postinstall
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(copy-recursively (string-append out "/usr/bin") (string-append out "/bin"))
(copy-recursively (string-append out "/usr/share") (string-append out "/share"))
(delete-file-recursively (string-append out "/usr"))
#t))))))
(synopsis "Yet another framebuffer terminal")
(home-page "")
(description
"Yet Another Framebuffer Terminal (aka \"yaft\") is simple terminal emulator for minimalist.
Features:
various framebuffer types (8/15/16/24/32bpp)
compatible with vt102 and Linux console (detail)
UTF-8 encoding and UCS2 glyphs
256 colors (same as xterm)
wallpaper
DRCS (DECDLD/DRCSMMv1) (experimental)
sixel (experimental)")
;; TODO: Fix license
(license #f)))
(define idump
(let ((commit "f3d0da4ac1675604ccb14d9f34887777d2e20181")
(revision "1"))
(package
(name "idump")
(version (git-version "v0.2.0" revision commit))
(source
(origin
(method git-fetch)
(uri (git-reference (url "")
(commit commit)))
(file-name (git-file-name name version))
(sha256 (base32 "1h29hp67ynmwffnskkhrnrp8xq8ac6cg1qipqx51zp1b6znnlcvw"))))
(build-system gnu-build-system)
(inputs `(("gcc" ,gcc)
("libjpeg" ,libjpeg-turbo)
("libpng" ,libpng)
("libtiff" ,libtiff)))
(arguments
`(#:tests? #f
#:phases
(modify-phases %standard-phases
(delete 'configure)
(delete 'install)
(add-before 'build 'make-locale
(lambda* (#:key inputs outputs #:allow-other-keys)
(setenv "LANG" "en_US.UTF-8")
(setenv "CC" (string-append (assoc-ref inputs "gcc") "/bin/gcc"))
(setenv "DESTDIR" (assoc-ref outputs "out"))
#t))
(add-after 'build 'jank-install
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(install-file "idump" (string-append out "/bin"))
#t))))))
;; TODO: fix license
(home-page "")
(synopsis "tiny image viewer for framebuffer")
(description "")
(license #f))))
(define sdump
(let ((commit "c5fb55b6c2e548fcd229b75a9ff091cdc3b2e230")
(revision "1"))
(package
(name "sdump")
(version (git-version "0.1.0" revision commit))
(source
(origin
(method git-fetch)
(uri (git-reference (url "")
(commit commit)))
(file-name (git-file-name name version))
(sha256
(base32 "00grshn14dk7jjkmcylnn2ral815fsqwhf9w1vaiwhwh90qpbbh6"))))
(build-system gnu-build-system)
(inputs `(("libjpeg" ,libjpeg-turbo)
("libpng" ,libpng)
("libsixel" ,libsixel)))
(arguments
`(#:tests? #f
#:phases
(modify-phases %standard-phases
(delete 'configure)
(delete 'install)
(add-before 'build 'make-locale
(lambda* (#:key inputs outputs #:allow-other-keys)
(setenv "LANG" "en_US.UTF-8")
(setenv "CC" (string-append (assoc-ref inputs "gcc") "/bin/gcc"))
(setenv "DESTDIR" (assoc-ref outputs "out"))
#t))
(add-after 'build 'jank-install
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(install-file "sdump" (string-append out "/bin"))
#t))))))
(home-page "")
(synopsis "sixel image dumper")
(description "this program is a variant of idump")
(license #f))))
| null | https://raw.githubusercontent.com/jsoo1/guix-channel/934b830110f4f7169257f107311cd10f628e0682/yaft.scm | scheme | TODO: Fix license
TODO: fix license | (define-module (yaft)
#:use-module (gnu packages gcc)
#:use-module (gnu packages image)
#:use-module (gnu packages ncurses)
#:use-module (guix build-system gnu)
#:use-module (guix download)
#:use-module (guix git-download)
#:use-module (guix packages)
#:use-module (libsixel)
#:export (yaft
idump
sdump))
(define yaft
(package
(name "yaft")
(version "0.2.9")
(source (origin
(method url-fetch)
(uri (string-append "" version ".tar.gz"))
(sha256 (base32 "0k1pn1fph7zinb44mb0bdrlmvi93irplss4whmvlz8zhgy9ydxw0"))))
(build-system gnu-build-system)
(inputs `(("gcc" ,gcc)
("ncurses" ,ncurses)))
(outputs '("out"))
(arguments
`(#:make-flags (list (string-append "DESTDIR=" (assoc-ref %outputs "out")))
#:tests? #f
#:phases
(modify-phases
%standard-phases
(delete 'configure)
(add-before 'build 'make-locale
(lambda* (#:key inputs outputs #:allow-other-keys)
(setenv "LANG" "en_US.UTF-8")
(setenv "CC" (string-append (assoc-ref inputs "gcc") "/bin/gcc"))
(setenv "DESTDIR" (assoc-ref outputs "out"))
#t))
(add-after 'install 'postinstall
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(copy-recursively (string-append out "/usr/bin") (string-append out "/bin"))
(copy-recursively (string-append out "/usr/share") (string-append out "/share"))
(delete-file-recursively (string-append out "/usr"))
#t))))))
(synopsis "Yet another framebuffer terminal")
(home-page "")
(description
"Yet Another Framebuffer Terminal (aka \"yaft\") is simple terminal emulator for minimalist.
Features:
various framebuffer types (8/15/16/24/32bpp)
compatible with vt102 and Linux console (detail)
UTF-8 encoding and UCS2 glyphs
256 colors (same as xterm)
wallpaper
DRCS (DECDLD/DRCSMMv1) (experimental)
sixel (experimental)")
(license #f)))
(define idump
(let ((commit "f3d0da4ac1675604ccb14d9f34887777d2e20181")
(revision "1"))
(package
(name "idump")
(version (git-version "v0.2.0" revision commit))
(source
(origin
(method git-fetch)
(uri (git-reference (url "")
(commit commit)))
(file-name (git-file-name name version))
(sha256 (base32 "1h29hp67ynmwffnskkhrnrp8xq8ac6cg1qipqx51zp1b6znnlcvw"))))
(build-system gnu-build-system)
(inputs `(("gcc" ,gcc)
("libjpeg" ,libjpeg-turbo)
("libpng" ,libpng)
("libtiff" ,libtiff)))
(arguments
`(#:tests? #f
#:phases
(modify-phases %standard-phases
(delete 'configure)
(delete 'install)
(add-before 'build 'make-locale
(lambda* (#:key inputs outputs #:allow-other-keys)
(setenv "LANG" "en_US.UTF-8")
(setenv "CC" (string-append (assoc-ref inputs "gcc") "/bin/gcc"))
(setenv "DESTDIR" (assoc-ref outputs "out"))
#t))
(add-after 'build 'jank-install
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(install-file "idump" (string-append out "/bin"))
#t))))))
(home-page "")
(synopsis "tiny image viewer for framebuffer")
(description "")
(license #f))))
(define sdump
(let ((commit "c5fb55b6c2e548fcd229b75a9ff091cdc3b2e230")
(revision "1"))
(package
(name "sdump")
(version (git-version "0.1.0" revision commit))
(source
(origin
(method git-fetch)
(uri (git-reference (url "")
(commit commit)))
(file-name (git-file-name name version))
(sha256
(base32 "00grshn14dk7jjkmcylnn2ral815fsqwhf9w1vaiwhwh90qpbbh6"))))
(build-system gnu-build-system)
(inputs `(("libjpeg" ,libjpeg-turbo)
("libpng" ,libpng)
("libsixel" ,libsixel)))
(arguments
`(#:tests? #f
#:phases
(modify-phases %standard-phases
(delete 'configure)
(delete 'install)
(add-before 'build 'make-locale
(lambda* (#:key inputs outputs #:allow-other-keys)
(setenv "LANG" "en_US.UTF-8")
(setenv "CC" (string-append (assoc-ref inputs "gcc") "/bin/gcc"))
(setenv "DESTDIR" (assoc-ref outputs "out"))
#t))
(add-after 'build 'jank-install
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(install-file "sdump" (string-append out "/bin"))
#t))))))
(home-page "")
(synopsis "sixel image dumper")
(description "this program is a variant of idump")
(license #f))))
|
64418f8b36c6e7ede0c29a386b807dff88afdbc41833faec8cd04efb4f00da51 | ashinn/chibi-scheme | modules.scm | ;; modules.scm -- module introspection utilities
Copyright ( c ) 2011 - 2013 . All rights reserved .
;; BSD-style license:
;;> Module introspection library.
(define (file->sexp-list file)
(call-with-input-file file
(lambda (in)
(port-source?-set! in #t)
(let lp ((res '()))
(let ((x (read in)))
(if (eof-object? x)
(reverse res)
(lp (cons x res))))))))
(define (module? x)
(and (vector? x)
(>= (vector-length x) 4)
(or (list? (vector-ref x 0)) (not (vector-ref x 0)))))
(define (module-ast mod) (vector-ref mod 3))
(define (module-ast-set! mod x) (vector-set! mod 3 x))
(define (module-name mod)
(if (pair? mod)
(car mod)
(let lp ((ls *modules*))
(and (pair? ls)
(if (eq? mod (cdar ls))
(caar ls)
(lp (cdr ls)))))))
(define (module-dir mod)
(let ((name (module-name mod)))
(if (member name '((chibi) (meta)))
""
(module-name-prefix name))))
;; assuming mod-name was found in file, resolves to the containing lib dir
(define (module-lib-dir file mod-name)
(let lp ((ls (map (lambda (x)
(if (number? x) (number->string x) (symbol->string x)))
(reverse mod-name)))
(path (reverse (string-split (path-strip-extension file) #\/))))
(if (and (pair? ls) (pair? path) (equal? (car ls) (car path)))
(lp (cdr ls) (cdr path))
(if (null? path)
"."
(string-join (reverse path) "/")))))
(define (module-metas mod metas . o)
(let* ((mod (if (module? mod) mod (find-module mod)))
(dir (if (pair? o) (car o) (module-dir mod))))
(let lp ((ls (module-meta-data mod)) (res '()))
(cond
((not (pair? ls)) (reverse res))
((and (pair? (car ls)) (memq (caar ls) metas))
(lp (cdr ls) (append (reverse (cdar ls)) res)))
(else (lp (cdr ls) res))))))
(define (module-extract-declaration-files mod decls)
(let* ((mod (if (module? mod) mod (find-module mod)))
(dir (module-dir mod)))
(define (module-file f)
(find-module-file (string-append dir f)))
(map module-file (reverse (module-metas mod decls dir)))))
(define (module-includes mod)
(module-extract-declaration-files mod '(include)))
(define (module-include-library-declarations mod)
(module-extract-declaration-files mod '(include-library-declarations)))
(define (module-shared-includes mod)
(let* ((mod (if (module? mod) mod (find-module mod)))
(dir (module-dir mod)))
(define (module-file f)
(find-module-file (string-append dir f ".stub")))
(let lp ((ls (reverse (module-metas mod '(include-shared)))) (res '()))
(cond ((null? ls) (reverse res))
((module-file (car ls)) => (lambda (x) (lp (cdr ls) (cons x res))))
(else (lp (cdr ls) res))))))
(define (analyze-module-source name mod recursive?)
(let ((env (make-environment))
(dir (module-dir mod)))
(define (resolve-file file)
(find-module-file (string-append dir file)))
(define (include-source file)
(cond ((resolve-file file)
=> (lambda (x) (cons 'begin (file->sexp-list x))))
(else (error "couldn't find include" file))))
(cond
((equal? '(chibi) name)
(env-define! env '*features* *features*)
(env-define! env '*shared-object-extension* *shared-object-extension*)
(%import env (primitive-environment 7) #f #t))
(else
(resolve-module-imports env (module-meta-data mod))))
(let lp ((ls (module-meta-data mod)) (res '()))
(cond
((not (pair? ls))
(reverse res))
(else
(case (and (pair? (car ls)) (caar ls))
((import import-immutable)
(for-each
(lambda (m)
(let* ((mod2-name+imports (resolve-import m))
(mod2-name (car mod2-name+imports)))
(if recursive?
(analyze-module mod2-name #t))))
(cdar ls))
(lp (cdr ls) res))
((include include-ci)
(lp (append (map include-source (cdar ls)) (cdr ls)) res))
((include-library-declarations)
(lp (append (append-map file->sexp-list (map resolve-file (cdar ls))) (cdr ls)) res))
((include-shared include-shared-optionally)
(for-each
(lambda (file)
(let ((f (string-append file *shared-object-extension*)))
(cond ((find-module-file f) => (lambda (path) (load path env))))))
(cdar ls)))
((begin body)
(let lp2 ((ls2 (cdar ls)) (res res))
(cond
((pair? ls2)
(let ((x (analyze (car ls2) env)))
(eval (car ls2) env)
(lp2 (cdr ls2) (cons x res))))
(else
(lp (cdr ls) res)))))
(else
(lp (cdr ls) res))))))))
(define (analyze-module name . o)
(let ((recursive? (and (pair? o) (car o)))
(mod (load-module name)))
(cond
((not (module-ast mod))
(module-ast-set! mod '()) ; break cycles, just in case
(module-ast-set! mod (analyze-module-source name mod recursive?))))
mod))
(define (module-ref mod var-name . o)
(let ((cell (env-cell (module-env (if (module? mod) mod (load-module mod)))
var-name)))
(if cell
(cdr cell)
(if (pair? o) (car o) (error "no binding in module" mod var-name)))))
(define (module-contains? mod var-name)
(and (env-cell (module-env (if (module? mod) mod (load-module mod))) var-name)
#t))
(define (module-defines? name mod var-name)
(let lp ((ls (module-ast (analyze-module name))))
(cond
((null? ls) #f)
((and (set? (car ls))
(eq? var-name (ref-name (set-var (car ls))))))
((seq? (car ls)) (lp (append (seq-ls (car ls)) (cdr ls))))
(else (lp (cdr ls))))))
(define (containing-module x)
(let lp1 ((ls (reverse *modules*)))
(and (pair? ls)
(let ((env (module-env (cdar ls))))
(let lp2 ((e-ls (if (environment? env) (env-exports env) '())))
(if (null? e-ls)
(lp1 (cdr ls))
(let ((cell (env-cell env (car e-ls))))
(if (and (eq? x (cdr cell))
(or (opcode? x)
(module-defines? (caar ls) (cdar ls) (car cell))))
(car ls)
(lp2 (cdr e-ls))))))))))
(define (procedure-analysis x . o)
(cond
((opcode? x)
#f)
(else
(let ((name (if (procedure? x) (procedure-name x) x))
(mod (or (and (pair? o) (car o))
(containing-module x))))
(and mod
(let lp ((ls (module-ast (analyze-module (module-name mod)))))
(and (pair? ls)
(cond
((and (set? (car ls))
(eq? name (ref-name (set-var (car ls)))))
(set-value (car ls)))
((seq? (car ls))
(lp (append (seq-ls (car ls)) (cdr ls))))
(else
(lp (cdr ls)))))))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; finding all available modules
(define (module-file? file)
(let ((len (string-length file)))
(and (> len 4) (equal? ".sld" (substring file (- len 4))))))
(define (read-static-modules file)
(protect (exn (else '()))
(call-with-input-file file
(lambda (in)
(let lp ((res '()))
(let ((expr (read in)))
(cond
((eof-object? expr)
res)
((and (pair? expr) (eq? 'define-library (car expr)))
(let ((name (cadr expr))
(exports (cond ((assq 'export (cddr expr)) => cdr)
(else '()))))
(lp (cons (cons name (make-module exports #f #f)) res))))
(else
(lp res)))))))))
(define no-module-depth-limit 2)
(define (available-modules-in-directory dir depth res)
(call-with-values
(lambda ()
(partition file-directory?
(map (lambda (f) (string-append dir "/" f))
(remove (lambda (f) (member f '("." "..")))
(directory-files dir)))))
(lambda (dirs files)
(let ((mods (append-map read-static-modules
(filter module-file? files))))
(if (and (null? mods) (>= depth no-module-depth-limit))
res
(let ((depth (if (pair? mods) 0 (+ 1 depth))))
(let lp ((ls dirs) (res (append mods res)))
(if (null? ls)
res
(lp (cdr ls)
(available-modules-in-directory (car ls) depth res)
)))))))))
(define (available-modules)
(let lp ((ls (current-module-path)) (res *modules*))
(if (null? ls)
res
(lp (cdr ls)
(available-modules-in-directory (car ls) 0 res)))))
(define (modules-exporting-identifier name)
(let lp ((ls (available-modules))
(res '()))
(cond
((null? ls) (reverse res))
((and (memq name (module-exports (cdar ls)))
(not (assoc (caar ls) res)))
(lp (cdr ls) (cons (car ls) res)))
(else (lp (cdr ls) res)))))
| null | https://raw.githubusercontent.com/ashinn/chibi-scheme/3cf62f033aefaf9be6fc3522e2f29dcd379c7e90/lib/chibi/modules.scm | scheme | modules.scm -- module introspection utilities
BSD-style license:
> Module introspection library.
assuming mod-name was found in file, resolves to the containing lib dir
break cycles, just in case
finding all available modules | Copyright ( c ) 2011 - 2013 . All rights reserved .
(define (file->sexp-list file)
(call-with-input-file file
(lambda (in)
(port-source?-set! in #t)
(let lp ((res '()))
(let ((x (read in)))
(if (eof-object? x)
(reverse res)
(lp (cons x res))))))))
(define (module? x)
(and (vector? x)
(>= (vector-length x) 4)
(or (list? (vector-ref x 0)) (not (vector-ref x 0)))))
(define (module-ast mod) (vector-ref mod 3))
(define (module-ast-set! mod x) (vector-set! mod 3 x))
(define (module-name mod)
(if (pair? mod)
(car mod)
(let lp ((ls *modules*))
(and (pair? ls)
(if (eq? mod (cdar ls))
(caar ls)
(lp (cdr ls)))))))
(define (module-dir mod)
(let ((name (module-name mod)))
(if (member name '((chibi) (meta)))
""
(module-name-prefix name))))
(define (module-lib-dir file mod-name)
(let lp ((ls (map (lambda (x)
(if (number? x) (number->string x) (symbol->string x)))
(reverse mod-name)))
(path (reverse (string-split (path-strip-extension file) #\/))))
(if (and (pair? ls) (pair? path) (equal? (car ls) (car path)))
(lp (cdr ls) (cdr path))
(if (null? path)
"."
(string-join (reverse path) "/")))))
(define (module-metas mod metas . o)
(let* ((mod (if (module? mod) mod (find-module mod)))
(dir (if (pair? o) (car o) (module-dir mod))))
(let lp ((ls (module-meta-data mod)) (res '()))
(cond
((not (pair? ls)) (reverse res))
((and (pair? (car ls)) (memq (caar ls) metas))
(lp (cdr ls) (append (reverse (cdar ls)) res)))
(else (lp (cdr ls) res))))))
(define (module-extract-declaration-files mod decls)
(let* ((mod (if (module? mod) mod (find-module mod)))
(dir (module-dir mod)))
(define (module-file f)
(find-module-file (string-append dir f)))
(map module-file (reverse (module-metas mod decls dir)))))
(define (module-includes mod)
(module-extract-declaration-files mod '(include)))
(define (module-include-library-declarations mod)
(module-extract-declaration-files mod '(include-library-declarations)))
(define (module-shared-includes mod)
(let* ((mod (if (module? mod) mod (find-module mod)))
(dir (module-dir mod)))
(define (module-file f)
(find-module-file (string-append dir f ".stub")))
(let lp ((ls (reverse (module-metas mod '(include-shared)))) (res '()))
(cond ((null? ls) (reverse res))
((module-file (car ls)) => (lambda (x) (lp (cdr ls) (cons x res))))
(else (lp (cdr ls) res))))))
(define (analyze-module-source name mod recursive?)
(let ((env (make-environment))
(dir (module-dir mod)))
(define (resolve-file file)
(find-module-file (string-append dir file)))
(define (include-source file)
(cond ((resolve-file file)
=> (lambda (x) (cons 'begin (file->sexp-list x))))
(else (error "couldn't find include" file))))
(cond
((equal? '(chibi) name)
(env-define! env '*features* *features*)
(env-define! env '*shared-object-extension* *shared-object-extension*)
(%import env (primitive-environment 7) #f #t))
(else
(resolve-module-imports env (module-meta-data mod))))
(let lp ((ls (module-meta-data mod)) (res '()))
(cond
((not (pair? ls))
(reverse res))
(else
(case (and (pair? (car ls)) (caar ls))
((import import-immutable)
(for-each
(lambda (m)
(let* ((mod2-name+imports (resolve-import m))
(mod2-name (car mod2-name+imports)))
(if recursive?
(analyze-module mod2-name #t))))
(cdar ls))
(lp (cdr ls) res))
((include include-ci)
(lp (append (map include-source (cdar ls)) (cdr ls)) res))
((include-library-declarations)
(lp (append (append-map file->sexp-list (map resolve-file (cdar ls))) (cdr ls)) res))
((include-shared include-shared-optionally)
(for-each
(lambda (file)
(let ((f (string-append file *shared-object-extension*)))
(cond ((find-module-file f) => (lambda (path) (load path env))))))
(cdar ls)))
((begin body)
(let lp2 ((ls2 (cdar ls)) (res res))
(cond
((pair? ls2)
(let ((x (analyze (car ls2) env)))
(eval (car ls2) env)
(lp2 (cdr ls2) (cons x res))))
(else
(lp (cdr ls) res)))))
(else
(lp (cdr ls) res))))))))
(define (analyze-module name . o)
(let ((recursive? (and (pair? o) (car o)))
(mod (load-module name)))
(cond
((not (module-ast mod))
(module-ast-set! mod (analyze-module-source name mod recursive?))))
mod))
(define (module-ref mod var-name . o)
(let ((cell (env-cell (module-env (if (module? mod) mod (load-module mod)))
var-name)))
(if cell
(cdr cell)
(if (pair? o) (car o) (error "no binding in module" mod var-name)))))
(define (module-contains? mod var-name)
(and (env-cell (module-env (if (module? mod) mod (load-module mod))) var-name)
#t))
(define (module-defines? name mod var-name)
(let lp ((ls (module-ast (analyze-module name))))
(cond
((null? ls) #f)
((and (set? (car ls))
(eq? var-name (ref-name (set-var (car ls))))))
((seq? (car ls)) (lp (append (seq-ls (car ls)) (cdr ls))))
(else (lp (cdr ls))))))
(define (containing-module x)
(let lp1 ((ls (reverse *modules*)))
(and (pair? ls)
(let ((env (module-env (cdar ls))))
(let lp2 ((e-ls (if (environment? env) (env-exports env) '())))
(if (null? e-ls)
(lp1 (cdr ls))
(let ((cell (env-cell env (car e-ls))))
(if (and (eq? x (cdr cell))
(or (opcode? x)
(module-defines? (caar ls) (cdar ls) (car cell))))
(car ls)
(lp2 (cdr e-ls))))))))))
(define (procedure-analysis x . o)
(cond
((opcode? x)
#f)
(else
(let ((name (if (procedure? x) (procedure-name x) x))
(mod (or (and (pair? o) (car o))
(containing-module x))))
(and mod
(let lp ((ls (module-ast (analyze-module (module-name mod)))))
(and (pair? ls)
(cond
((and (set? (car ls))
(eq? name (ref-name (set-var (car ls)))))
(set-value (car ls)))
((seq? (car ls))
(lp (append (seq-ls (car ls)) (cdr ls))))
(else
(lp (cdr ls)))))))))))
(define (module-file? file)
(let ((len (string-length file)))
(and (> len 4) (equal? ".sld" (substring file (- len 4))))))
(define (read-static-modules file)
(protect (exn (else '()))
(call-with-input-file file
(lambda (in)
(let lp ((res '()))
(let ((expr (read in)))
(cond
((eof-object? expr)
res)
((and (pair? expr) (eq? 'define-library (car expr)))
(let ((name (cadr expr))
(exports (cond ((assq 'export (cddr expr)) => cdr)
(else '()))))
(lp (cons (cons name (make-module exports #f #f)) res))))
(else
(lp res)))))))))
(define no-module-depth-limit 2)
(define (available-modules-in-directory dir depth res)
(call-with-values
(lambda ()
(partition file-directory?
(map (lambda (f) (string-append dir "/" f))
(remove (lambda (f) (member f '("." "..")))
(directory-files dir)))))
(lambda (dirs files)
(let ((mods (append-map read-static-modules
(filter module-file? files))))
(if (and (null? mods) (>= depth no-module-depth-limit))
res
(let ((depth (if (pair? mods) 0 (+ 1 depth))))
(let lp ((ls dirs) (res (append mods res)))
(if (null? ls)
res
(lp (cdr ls)
(available-modules-in-directory (car ls) depth res)
)))))))))
(define (available-modules)
(let lp ((ls (current-module-path)) (res *modules*))
(if (null? ls)
res
(lp (cdr ls)
(available-modules-in-directory (car ls) 0 res)))))
(define (modules-exporting-identifier name)
(let lp ((ls (available-modules))
(res '()))
(cond
((null? ls) (reverse res))
((and (memq name (module-exports (cdar ls)))
(not (assoc (caar ls) res)))
(lp (cdr ls) (cons (car ls) res)))
(else (lp (cdr ls) res)))))
|
2574eac740a2c86db32da386e6466d712d38f46a6a9b06666eb313a92365f132 | yi-editor/yi | Search.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Search
-- License : GPL-2
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
Search / Replace functions
module Yi.Search (
setRegexE, -- :: SearchExp -> EditorM ()
resetRegexE, -- :: EditorM ()
: : EditorM ( Maybe )
SearchMatch,
SearchResult(..),
SearchOption(..),
doSearch, -- :: (Maybe String) -> [SearchOption]
-- -> Direction -> YiM ()
searchInit, -- :: String
-- -> [SearchOption]
- > IO
: :
- > IO
makeSimpleSearch,
-- * Batch search-replace
searchReplaceRegionB,
searchReplaceSelectionB,
replaceString,
searchAndRepRegion,
searchAndRepRegion0,
searchAndRepUnit, -- :: String -> String -> Bool -> TextUnit -> EditorM Bool
-- * Incremental Search
isearchInitE,
isearchIsEmpty,
isearchAddE,
isearchPrevE,
isearchNextE,
isearchWordE,
isearchHistory,
isearchDelE,
isearchCancelE,
isearchFinishE,
isearchCancelWithE,
isearchFinishWithE,
-- * Replace
qrNext,
qrReplaceAll,
qrReplaceOne,
qrFinish
) where
import Lens.Micro.Platform ((.=))
import Control.Monad (void, when)
import Data.Binary (Binary, get, put)
import Data.Char (isAlpha, isUpper)
import Data.Default (Default, def)
import Data.Maybe (listToMaybe)
import Data.Monoid ((<>))
import qualified Data.Text as T (Text, any, break, empty, length, null, takeWhile, unpack)
import qualified Data.Text.Encoding as E (decodeUtf8, encodeUtf8)
import Data.Typeable (Typeable)
import Yi.Buffer
import Yi.Editor
import Yi.History (historyFinishGen, historyMoveGen, historyStartGen)
import Yi.Regex (SearchOption(..), makeSearchOptsM, emptyRegex, SearchExp(..))
import qualified Yi.Rope as R (YiString, null, toString, toText)
import Yi.Search.Internal (getRegexE, resetRegexE, setRegexE)
import Yi.String (showT)
import Yi.Types (YiVariable)
import Yi.Utils (fst3)
import Yi.Window (Window)
-- ---------------------------------------------------------------------
--
-- | Global searching. Search for regex and move point to that position.
-- @Nothing@ means reuse the last regular expression. @Just s@ means use
-- @s@ as the new regular expression. Direction of search can be
specified as either @Backward@ or @Forward@ ( forwards in the buffer ) .
-- Arguments to modify the compiled regular expression can be supplied
-- as well.
--
type SearchMatch = Region
data SearchResult = PatternFound
| PatternNotFound
| SearchWrapped
deriving Eq
doSearch :: Maybe String -- ^ @Nothing@ means used previous
-- pattern, if any. Complain otherwise.
-- Use getRegexE to check for previous patterns
-> [SearchOption] -- ^ Flags to modify the compiled regex
^ @Backward@ or @Forward@
-> EditorM SearchResult
doSearch (Just re) fs d = searchInit re d fs >>= withCurrentBuffer . continueSearch
doSearch Nothing _ d = do
mre <- getRegexE
case mre of
NB
Just r -> withCurrentBuffer (continueSearch (r,d))
-- | Set up a search.
searchInit :: String -> Direction -> [SearchOption] -> EditorM (SearchExp, Direction)
searchInit re d fs = do
let Right c_re = makeSearchOptsM fs re
setRegexE c_re
searchDirectionA .= d
return (c_re,d)
| Do a search , placing cursor at first char of pattern , if found .
Keymaps may implement their own regex language . How do we provide for this ?
-- Also, what's happening with ^ not matching sol?
continueSearch :: (SearchExp, Direction) -> BufferM SearchResult
continueSearch (c_re, dir) = do
mp <- savingPointB $ do
moveB Character dir -- start immed. after cursor
rs <- regexB dir c_re
moveB Document (reverseDir dir) -- wrap around
ls <- regexB dir c_re
return $ listToMaybe $ fmap Right rs ++ fmap Left ls
maybe (return ()) (moveTo . regionStart . either id id) mp
return $ f mp
where
f (Just (Right _)) = PatternFound
f (Just (Left _)) = SearchWrapped
f Nothing = PatternNotFound
------------------------------------------------------------------------
-- Batch search and replace
--
-- | Search and Replace all within the current region.
-- Note the region is the final argument since we might perform
-- the same search and replace over multiple regions however we are
-- unlikely to perform several search and replaces over the same region
since the first such may change the bounds of the region .
searchReplaceRegionB :: R.YiString -- ^ The string to search for
-> R.YiString -- ^ The string to replace it with
-> Region -- ^ The region to perform this over
-> BufferM Int
searchReplaceRegionB from to =
searchAndRepRegion0 (makeSimpleSearch from) to True
-- | Peform a search and replace on the selection
searchReplaceSelectionB :: R.YiString -- ^ text to search for
-> R.YiString -- ^ text to replace it with
-> BufferM Int
searchReplaceSelectionB from to =
getSelectRegionB >>= searchReplaceRegionB from to
-- | Replace a string by another everywhere in the document
replaceString :: R.YiString -> R.YiString -> BufferM Int
replaceString a b = regionOfB Document >>= searchReplaceRegionB a b
------------------------------------------------------------------------
-- | Search and replace in the given region.
--
-- If the input boolean is True, then the replace is done globally,
otherwise only the first match is replaced . Returns the number of
-- replacements done.
searchAndRepRegion0 :: SearchExp -> R.YiString -> Bool -> Region -> BufferM Int
searchAndRepRegion0 c_re str globally region = do
mp <- (if globally then id else take 1) <$> regexRegionB c_re region -- find the regex
-- mp' is a maybe not reversed version of mp, the goal
-- is to avoid replaceRegionB to mess up the next regions.
-- So we start from the end.
let mp' = mayReverse (reverseDir $ regionDirection region) mp
mapM_ (`replaceRegionB` str) mp'
return (length mp)
searchAndRepRegion :: R.YiString -> R.YiString -> Bool -> Region -> EditorM Bool
searchAndRepRegion s str globally region = case R.null s of
False -> return False
True -> do
let c_re = makeSimpleSearch s
setRegexE c_re -- store away for later use
searchDirectionA .= Forward
withCurrentBuffer $ (/= 0) <$> searchAndRepRegion0 c_re str globally region
------------------------------------------------------------------------
-- | Search and replace in the region defined by the given unit.
-- The rest is as in 'searchAndRepRegion'.
searchAndRepUnit :: R.YiString -> R.YiString -> Bool -> TextUnit -> EditorM Bool
searchAndRepUnit re str g unit =
withCurrentBuffer (regionOfB unit) >>= searchAndRepRegion re str g
--------------------------
-- Incremental search
newtype Isearch = Isearch [(T.Text, Region, Direction)]
deriving (Typeable, Show)
instance Binary Isearch where
put (Isearch ts) = put (map3 E.encodeUtf8 ts)
get = Isearch . map3 E.decodeUtf8 <$> get
map3 :: (a -> d) -> [(a, b, c)] -> [(d, b, c)]
map3 _ [] = []
map3 f ((a, b, c):xs) = (f a, b, c) : map3 f xs
-- This contains: (string currently searched, position where we
-- searched it, direction, overlay for highlighting searched text)
-- Note that this info cannot be embedded in the Keymap state: the state
-- modification can depend on the state of the editor.
instance Default Isearch where
def = Isearch []
instance YiVariable Isearch
isearchInitE :: Direction -> EditorM ()
isearchInitE dir = do
historyStartGen iSearch
p <- withCurrentBuffer pointB
putEditorDyn (Isearch [(T.empty ,mkRegion p p, dir)])
printMsg "I-search: "
isearchIsEmpty :: EditorM Bool
isearchIsEmpty = do
Isearch s <- getEditorDyn
return . not . T.null . fst3 $ head s
isearchAddE :: T.Text -> EditorM ()
isearchAddE inc = isearchFunE (<> inc)
| Create a SearchExp that matches exactly its argument
makeSimpleSearch :: R.YiString -> SearchExp
makeSimpleSearch s = se
where Right se = makeSearchOptsM [QuoteRegex] (R.toString s)
makeISearch :: T.Text -> SearchExp
makeISearch s = case makeSearchOptsM opts (T.unpack s) of
Left _ -> SearchExp (T.unpack s) emptyRegex emptyRegex []
Right search -> search
where opts = QuoteRegex : if T.any isUpper s then [] else [IgnoreCase]
isearchFunE :: (T.Text -> T.Text) -> EditorM ()
isearchFunE fun = do
Isearch s <- getEditorDyn
case s of
[_] -> resetRegexE
_ -> return ()
let (previous,p0,direction) = head s
current = fun previous
srch = makeISearch current
printMsg $ "I-search: " <> current
setRegexE srch
prevPoint <- withCurrentBuffer pointB
matches <- withCurrentBuffer $ do
moveTo $ regionStart p0
when (direction == Backward) $
moveN $ T.length current
regexB direction srch
let onSuccess p = do withCurrentBuffer $ moveTo (regionEnd p)
putEditorDyn $ Isearch ((current, p, direction) : s)
case matches of
(p:_) -> onSuccess p
[] -> do matchesAfterWrap <- withCurrentBuffer $ do
case direction of
Forward -> moveTo 0
Backward -> do
bufferLength <- sizeB
moveTo bufferLength
regexB direction srch
case matchesAfterWrap of
(p:_) -> onSuccess p
[] -> do withCurrentBuffer $ moveTo prevPoint -- go back to where we were
putEditorDyn $ Isearch ((current, p0, direction) : s)
printMsg $ "Failing I-search: " <> current
isearchDelE :: EditorM ()
isearchDelE = do
Isearch s <- getEditorDyn
case s of
(_:(text,p,dir):rest) -> do
withCurrentBuffer $
moveTo $ regionEnd p
putEditorDyn $ Isearch ((text,p,dir):rest)
setRegexE $ makeISearch text
printMsg $ "I-search: " <> text
_ -> return () -- if the searched string is empty, don't try to remove chars from it.
isearchHistory :: Int -> EditorM ()
isearchHistory delta = do
Isearch ((current,_p0,_dir):_) <- getEditorDyn
h <- historyMoveGen iSearch delta (return current)
isearchFunE (const h)
isearchPrevE :: EditorM ()
isearchPrevE = isearchNext0 Backward
isearchNextE :: EditorM ()
isearchNextE = isearchNext0 Forward
isearchNext0 :: Direction -> EditorM ()
isearchNext0 newDir = do
Isearch ((current,_p0,_dir):_rest) <- getEditorDyn
if T.null current
then isearchHistory 1
else isearchNext newDir
isearchNext :: Direction -> EditorM ()
isearchNext direction = do
Isearch ((current, p0, _dir) : rest) <- getEditorDyn
withCurrentBuffer $ moveTo (regionStart p0 + startOfs)
mp <- withCurrentBuffer $
regexB direction (makeISearch current)
case mp of
[] -> do
endPoint <- withCurrentBuffer $ do
moveTo (regionEnd p0) -- revert to offset we were before.
sizeB
printMsg "isearch: end of document reached"
let wrappedOfs = case direction of
Forward -> mkRegion 0 0
Backward -> mkRegion endPoint endPoint
putEditorDyn $ Isearch ((current,wrappedOfs,direction):rest) -- prepare to wrap around.
(p:_) -> do
withCurrentBuffer $
moveTo (regionEnd p)
printMsg $ "I-search: " <> current
putEditorDyn $ Isearch ((current,p,direction):rest)
where startOfs = case direction of
Forward -> 1
Backward -> -1
isearchWordE :: EditorM ()
isearchWordE = do
add maximum 32 chars at a time .
text <- R.toText <$> withCurrentBuffer (pointB >>= nelemsB 32)
let (prefix, rest) = T.break isAlpha text
word = T.takeWhile isAlpha rest
isearchAddE $ prefix <> word
-- | Successfully finish a search. Also see 'isearchFinishWithE'.
isearchFinishE :: EditorM ()
isearchFinishE = isearchEnd True
-- | Cancel a search. Also see 'isearchCancelWithE'.
isearchCancelE :: EditorM ()
isearchCancelE = isearchEnd False
-- | Wrapper over 'isearchEndWith' that passes through the action and
-- accepts the search as successful (i.e. when the user wants to stay
-- at the result).
isearchFinishWithE :: EditorM a -> EditorM ()
isearchFinishWithE act = isearchEndWith act True
-- | Wrapper over 'isearchEndWith' that passes through the action and
-- marks the search as unsuccessful (i.e. when the user wants to
-- jump back to where the search started).
isearchCancelWithE :: EditorM a -> EditorM ()
isearchCancelWithE act = isearchEndWith act False
iSearch :: T.Text
iSearch = "isearch"
-- | Editor action describing how to end finish incremental search.
The @act@ parameter allows us to specify an extra action to run
before finishing up the search . For , we do n't want to do
-- anything so we use 'isearchEnd' which just does nothing. For emacs,
-- we want to cancel highlighting and stay where we are.
isearchEndWith :: EditorM a -> Bool -> EditorM ()
isearchEndWith act accept = getEditorDyn >>= \case
Isearch [] -> return ()
Isearch s@((lastSearched, _, dir):_) -> do
let (_,p0,_) = last s
historyFinishGen iSearch (return lastSearched)
searchDirectionA .= dir
if accept
then do void act
printMsg "Quit"
else do resetRegexE
withCurrentBuffer $ moveTo $ regionStart p0
-- | Specialised 'isearchEndWith' to do nothing as the action.
isearchEnd :: Bool -> EditorM ()
isearchEnd = isearchEndWith (return ())
-----------------
-- Query-Replace
-- | Find the next match and select it.
Point is end , mark is beginning .
qrNext :: Window -> BufferRef -> SearchExp -> EditorM ()
qrNext win b what = do
mp <- withGivenBufferAndWindow win b $ regexB Forward what
case mp of
[] -> do
printMsg "String to search not found"
qrFinish
(r:_) -> withGivenBufferAndWindow win b $ setSelectRegionB r
-- | Replace all the remaining occurrences.
qrReplaceAll :: Window -> BufferRef -> SearchExp -> R.YiString -> EditorM ()
qrReplaceAll win b what replacement = do
n <- withGivenBufferAndWindow win b $ do
exchangePointAndMarkB -- so we replace the current occurrence too
searchAndRepRegion0 what replacement True =<< regionOfPartB Document Forward
printMsg $ "Replaced " <> showT n <> " occurrences"
qrFinish
-- | Exit from query/replace.
qrFinish :: EditorM ()
qrFinish = do
currentRegexA .= Nothing
closeBufferAndWindowE -- the minibuffer.
-- | We replace the currently selected match and then move to the next
-- match.
qrReplaceOne :: Window -> BufferRef -> SearchExp -> R.YiString -> EditorM ()
qrReplaceOne win b reg replacement = do
qrReplaceCurrent win b replacement
qrNext win b reg
-- | This may actually be a bit more general it replaces the current
-- selection with the given replacement string in the given window and
-- buffer.
qrReplaceCurrent :: Window -> BufferRef -> R.YiString -> EditorM ()
qrReplaceCurrent win b replacement =
withGivenBufferAndWindow win b $
flip replaceRegionB replacement =<< getRawestSelectRegionB
| null | https://raw.githubusercontent.com/yi-editor/yi/65c157580e93f496a1acc6117ad02594dbcd9f35/yi-core/src/Yi/Search.hs | haskell | # LANGUAGE DeriveDataTypeable #
# LANGUAGE OverloadedStrings #
# OPTIONS_HADDOCK show-extensions #
|
Module : Yi.Search
License : GPL-2
Maintainer :
Stability : experimental
Portability : portable
:: SearchExp -> EditorM ()
:: EditorM ()
:: (Maybe String) -> [SearchOption]
-> Direction -> YiM ()
:: String
-> [SearchOption]
* Batch search-replace
:: String -> String -> Bool -> TextUnit -> EditorM Bool
* Incremental Search
* Replace
---------------------------------------------------------------------
| Global searching. Search for regex and move point to that position.
@Nothing@ means reuse the last regular expression. @Just s@ means use
@s@ as the new regular expression. Direction of search can be
Arguments to modify the compiled regular expression can be supplied
as well.
^ @Nothing@ means used previous
pattern, if any. Complain otherwise.
Use getRegexE to check for previous patterns
^ Flags to modify the compiled regex
| Set up a search.
Also, what's happening with ^ not matching sol?
start immed. after cursor
wrap around
----------------------------------------------------------------------
Batch search and replace
| Search and Replace all within the current region.
Note the region is the final argument since we might perform
the same search and replace over multiple regions however we are
unlikely to perform several search and replaces over the same region
^ The string to search for
^ The string to replace it with
^ The region to perform this over
| Peform a search and replace on the selection
^ text to search for
^ text to replace it with
| Replace a string by another everywhere in the document
----------------------------------------------------------------------
| Search and replace in the given region.
If the input boolean is True, then the replace is done globally,
replacements done.
find the regex
mp' is a maybe not reversed version of mp, the goal
is to avoid replaceRegionB to mess up the next regions.
So we start from the end.
store away for later use
----------------------------------------------------------------------
| Search and replace in the region defined by the given unit.
The rest is as in 'searchAndRepRegion'.
------------------------
Incremental search
This contains: (string currently searched, position where we
searched it, direction, overlay for highlighting searched text)
Note that this info cannot be embedded in the Keymap state: the state
modification can depend on the state of the editor.
go back to where we were
if the searched string is empty, don't try to remove chars from it.
revert to offset we were before.
prepare to wrap around.
| Successfully finish a search. Also see 'isearchFinishWithE'.
| Cancel a search. Also see 'isearchCancelWithE'.
| Wrapper over 'isearchEndWith' that passes through the action and
accepts the search as successful (i.e. when the user wants to stay
at the result).
| Wrapper over 'isearchEndWith' that passes through the action and
marks the search as unsuccessful (i.e. when the user wants to
jump back to where the search started).
| Editor action describing how to end finish incremental search.
anything so we use 'isearchEnd' which just does nothing. For emacs,
we want to cancel highlighting and stay where we are.
| Specialised 'isearchEndWith' to do nothing as the action.
---------------
Query-Replace
| Find the next match and select it.
| Replace all the remaining occurrences.
so we replace the current occurrence too
| Exit from query/replace.
the minibuffer.
| We replace the currently selected match and then move to the next
match.
| This may actually be a bit more general it replaces the current
selection with the given replacement string in the given window and
buffer. | # LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
Search / Replace functions
module Yi.Search (
: : EditorM ( Maybe )
SearchMatch,
SearchResult(..),
SearchOption(..),
- > IO
: :
- > IO
makeSimpleSearch,
searchReplaceRegionB,
searchReplaceSelectionB,
replaceString,
searchAndRepRegion,
searchAndRepRegion0,
isearchInitE,
isearchIsEmpty,
isearchAddE,
isearchPrevE,
isearchNextE,
isearchWordE,
isearchHistory,
isearchDelE,
isearchCancelE,
isearchFinishE,
isearchCancelWithE,
isearchFinishWithE,
qrNext,
qrReplaceAll,
qrReplaceOne,
qrFinish
) where
import Lens.Micro.Platform ((.=))
import Control.Monad (void, when)
import Data.Binary (Binary, get, put)
import Data.Char (isAlpha, isUpper)
import Data.Default (Default, def)
import Data.Maybe (listToMaybe)
import Data.Monoid ((<>))
import qualified Data.Text as T (Text, any, break, empty, length, null, takeWhile, unpack)
import qualified Data.Text.Encoding as E (decodeUtf8, encodeUtf8)
import Data.Typeable (Typeable)
import Yi.Buffer
import Yi.Editor
import Yi.History (historyFinishGen, historyMoveGen, historyStartGen)
import Yi.Regex (SearchOption(..), makeSearchOptsM, emptyRegex, SearchExp(..))
import qualified Yi.Rope as R (YiString, null, toString, toText)
import Yi.Search.Internal (getRegexE, resetRegexE, setRegexE)
import Yi.String (showT)
import Yi.Types (YiVariable)
import Yi.Utils (fst3)
import Yi.Window (Window)
specified as either @Backward@ or @Forward@ ( forwards in the buffer ) .
type SearchMatch = Region
data SearchResult = PatternFound
| PatternNotFound
| SearchWrapped
deriving Eq
^ @Backward@ or @Forward@
-> EditorM SearchResult
doSearch (Just re) fs d = searchInit re d fs >>= withCurrentBuffer . continueSearch
doSearch Nothing _ d = do
mre <- getRegexE
case mre of
NB
Just r -> withCurrentBuffer (continueSearch (r,d))
searchInit :: String -> Direction -> [SearchOption] -> EditorM (SearchExp, Direction)
searchInit re d fs = do
let Right c_re = makeSearchOptsM fs re
setRegexE c_re
searchDirectionA .= d
return (c_re,d)
| Do a search , placing cursor at first char of pattern , if found .
Keymaps may implement their own regex language . How do we provide for this ?
continueSearch :: (SearchExp, Direction) -> BufferM SearchResult
continueSearch (c_re, dir) = do
mp <- savingPointB $ do
rs <- regexB dir c_re
ls <- regexB dir c_re
return $ listToMaybe $ fmap Right rs ++ fmap Left ls
maybe (return ()) (moveTo . regionStart . either id id) mp
return $ f mp
where
f (Just (Right _)) = PatternFound
f (Just (Left _)) = SearchWrapped
f Nothing = PatternNotFound
since the first such may change the bounds of the region .
-> BufferM Int
searchReplaceRegionB from to =
searchAndRepRegion0 (makeSimpleSearch from) to True
-> BufferM Int
searchReplaceSelectionB from to =
getSelectRegionB >>= searchReplaceRegionB from to
replaceString :: R.YiString -> R.YiString -> BufferM Int
replaceString a b = regionOfB Document >>= searchReplaceRegionB a b
otherwise only the first match is replaced . Returns the number of
searchAndRepRegion0 :: SearchExp -> R.YiString -> Bool -> Region -> BufferM Int
searchAndRepRegion0 c_re str globally region = do
let mp' = mayReverse (reverseDir $ regionDirection region) mp
mapM_ (`replaceRegionB` str) mp'
return (length mp)
searchAndRepRegion :: R.YiString -> R.YiString -> Bool -> Region -> EditorM Bool
searchAndRepRegion s str globally region = case R.null s of
False -> return False
True -> do
let c_re = makeSimpleSearch s
searchDirectionA .= Forward
withCurrentBuffer $ (/= 0) <$> searchAndRepRegion0 c_re str globally region
searchAndRepUnit :: R.YiString -> R.YiString -> Bool -> TextUnit -> EditorM Bool
searchAndRepUnit re str g unit =
withCurrentBuffer (regionOfB unit) >>= searchAndRepRegion re str g
newtype Isearch = Isearch [(T.Text, Region, Direction)]
deriving (Typeable, Show)
instance Binary Isearch where
put (Isearch ts) = put (map3 E.encodeUtf8 ts)
get = Isearch . map3 E.decodeUtf8 <$> get
map3 :: (a -> d) -> [(a, b, c)] -> [(d, b, c)]
map3 _ [] = []
map3 f ((a, b, c):xs) = (f a, b, c) : map3 f xs
instance Default Isearch where
def = Isearch []
instance YiVariable Isearch
isearchInitE :: Direction -> EditorM ()
isearchInitE dir = do
historyStartGen iSearch
p <- withCurrentBuffer pointB
putEditorDyn (Isearch [(T.empty ,mkRegion p p, dir)])
printMsg "I-search: "
isearchIsEmpty :: EditorM Bool
isearchIsEmpty = do
Isearch s <- getEditorDyn
return . not . T.null . fst3 $ head s
isearchAddE :: T.Text -> EditorM ()
isearchAddE inc = isearchFunE (<> inc)
| Create a SearchExp that matches exactly its argument
makeSimpleSearch :: R.YiString -> SearchExp
makeSimpleSearch s = se
where Right se = makeSearchOptsM [QuoteRegex] (R.toString s)
makeISearch :: T.Text -> SearchExp
makeISearch s = case makeSearchOptsM opts (T.unpack s) of
Left _ -> SearchExp (T.unpack s) emptyRegex emptyRegex []
Right search -> search
where opts = QuoteRegex : if T.any isUpper s then [] else [IgnoreCase]
isearchFunE :: (T.Text -> T.Text) -> EditorM ()
isearchFunE fun = do
Isearch s <- getEditorDyn
case s of
[_] -> resetRegexE
_ -> return ()
let (previous,p0,direction) = head s
current = fun previous
srch = makeISearch current
printMsg $ "I-search: " <> current
setRegexE srch
prevPoint <- withCurrentBuffer pointB
matches <- withCurrentBuffer $ do
moveTo $ regionStart p0
when (direction == Backward) $
moveN $ T.length current
regexB direction srch
let onSuccess p = do withCurrentBuffer $ moveTo (regionEnd p)
putEditorDyn $ Isearch ((current, p, direction) : s)
case matches of
(p:_) -> onSuccess p
[] -> do matchesAfterWrap <- withCurrentBuffer $ do
case direction of
Forward -> moveTo 0
Backward -> do
bufferLength <- sizeB
moveTo bufferLength
regexB direction srch
case matchesAfterWrap of
(p:_) -> onSuccess p
putEditorDyn $ Isearch ((current, p0, direction) : s)
printMsg $ "Failing I-search: " <> current
isearchDelE :: EditorM ()
isearchDelE = do
Isearch s <- getEditorDyn
case s of
(_:(text,p,dir):rest) -> do
withCurrentBuffer $
moveTo $ regionEnd p
putEditorDyn $ Isearch ((text,p,dir):rest)
setRegexE $ makeISearch text
printMsg $ "I-search: " <> text
isearchHistory :: Int -> EditorM ()
isearchHistory delta = do
Isearch ((current,_p0,_dir):_) <- getEditorDyn
h <- historyMoveGen iSearch delta (return current)
isearchFunE (const h)
isearchPrevE :: EditorM ()
isearchPrevE = isearchNext0 Backward
isearchNextE :: EditorM ()
isearchNextE = isearchNext0 Forward
isearchNext0 :: Direction -> EditorM ()
isearchNext0 newDir = do
Isearch ((current,_p0,_dir):_rest) <- getEditorDyn
if T.null current
then isearchHistory 1
else isearchNext newDir
isearchNext :: Direction -> EditorM ()
isearchNext direction = do
Isearch ((current, p0, _dir) : rest) <- getEditorDyn
withCurrentBuffer $ moveTo (regionStart p0 + startOfs)
mp <- withCurrentBuffer $
regexB direction (makeISearch current)
case mp of
[] -> do
endPoint <- withCurrentBuffer $ do
sizeB
printMsg "isearch: end of document reached"
let wrappedOfs = case direction of
Forward -> mkRegion 0 0
Backward -> mkRegion endPoint endPoint
(p:_) -> do
withCurrentBuffer $
moveTo (regionEnd p)
printMsg $ "I-search: " <> current
putEditorDyn $ Isearch ((current,p,direction):rest)
where startOfs = case direction of
Forward -> 1
Backward -> -1
isearchWordE :: EditorM ()
isearchWordE = do
add maximum 32 chars at a time .
text <- R.toText <$> withCurrentBuffer (pointB >>= nelemsB 32)
let (prefix, rest) = T.break isAlpha text
word = T.takeWhile isAlpha rest
isearchAddE $ prefix <> word
isearchFinishE :: EditorM ()
isearchFinishE = isearchEnd True
isearchCancelE :: EditorM ()
isearchCancelE = isearchEnd False
isearchFinishWithE :: EditorM a -> EditorM ()
isearchFinishWithE act = isearchEndWith act True
isearchCancelWithE :: EditorM a -> EditorM ()
isearchCancelWithE act = isearchEndWith act False
iSearch :: T.Text
iSearch = "isearch"
The @act@ parameter allows us to specify an extra action to run
before finishing up the search . For , we do n't want to do
isearchEndWith :: EditorM a -> Bool -> EditorM ()
isearchEndWith act accept = getEditorDyn >>= \case
Isearch [] -> return ()
Isearch s@((lastSearched, _, dir):_) -> do
let (_,p0,_) = last s
historyFinishGen iSearch (return lastSearched)
searchDirectionA .= dir
if accept
then do void act
printMsg "Quit"
else do resetRegexE
withCurrentBuffer $ moveTo $ regionStart p0
isearchEnd :: Bool -> EditorM ()
isearchEnd = isearchEndWith (return ())
Point is end , mark is beginning .
qrNext :: Window -> BufferRef -> SearchExp -> EditorM ()
qrNext win b what = do
mp <- withGivenBufferAndWindow win b $ regexB Forward what
case mp of
[] -> do
printMsg "String to search not found"
qrFinish
(r:_) -> withGivenBufferAndWindow win b $ setSelectRegionB r
qrReplaceAll :: Window -> BufferRef -> SearchExp -> R.YiString -> EditorM ()
qrReplaceAll win b what replacement = do
n <- withGivenBufferAndWindow win b $ do
searchAndRepRegion0 what replacement True =<< regionOfPartB Document Forward
printMsg $ "Replaced " <> showT n <> " occurrences"
qrFinish
qrFinish :: EditorM ()
qrFinish = do
currentRegexA .= Nothing
qrReplaceOne :: Window -> BufferRef -> SearchExp -> R.YiString -> EditorM ()
qrReplaceOne win b reg replacement = do
qrReplaceCurrent win b replacement
qrNext win b reg
qrReplaceCurrent :: Window -> BufferRef -> R.YiString -> EditorM ()
qrReplaceCurrent win b replacement =
withGivenBufferAndWindow win b $
flip replaceRegionB replacement =<< getRawestSelectRegionB
|
7b2b881ca802ef6ba67da4bf1c655b363299877f9ee0453655eab637face775c | shayan-najd/HsAST | HsTypes.hs | # OPTIONS_GHC -Wall #
# LANGUAGE TypeFamilies #
module HsTypes where
import Dependencies
import HsExtension
import {-# SOURCE #-} HsExpr (HsSplice)
import HsDoc
type LHsIPName = Located HsIPName
newtype HsIPName = HsIPName FastString
type LBangType p = Located (BangType p)
type BangType p = HsType p
type LHsContext p = Located (HsContext p)
type HsContext p = [LHsType p]
type LHsKind p = Located (HsKind p)
type HsKind p = HsType p
data LHsQTyVars p
= HsQTvs { hsq_ext :: XHsQTvs p
, hsq_explicit :: [LHsTyVarBndr p]
}
| XLHsQTyVars (XXLHsQTyVars p)
data HsImplicitBndrs p thing
= HsIB { hsib_ext :: XHsIB p thing
, hsib_body :: thing
}
| XHsImplicitBndrs (XXHsImplicitBndrs p thing)
data HsWildCardBndrs p thing
= HsWC { hswc_ext :: XHsWC p thing
, hswc_body :: thing
}
| XHsWildCardBndrs (XXHsWildCardBndrs p thing)
type LHsSigType p = HsImplicitBndrs p (LHsType p)
type LHsSigWcType p = HsWildCardBndrs p (LHsSigType p)
type LHsTyVarBndr p = Located (HsTyVarBndr p)
data HsTyVarBndr p
= UserTyVar
(XUserTyVar p)
LIP
| KindedTyVar
(XKindedTyVar p)
LIP
(LHsKind p)
| XTyVarBndr
(XXTyVarBndr p)
type LHsType p = Located (HsType p)
data HsType p
= HsForAllTy
{ hst_xforall :: XForAllTy p,
hst_bndrs :: [LHsTyVarBndr p]
, hst_body :: LHsType p
}
| HsQualTy
{ hst_xqual :: XQualTy p
, hst_ctxt :: LHsContext p
, hst_body :: LHsType p }
| HsTyVar (XTyVar p)
Promoted
(LIdP p)
| HsAppTy (XAppTy p)
(LHsType p)
(LHsType p)
| HsFunTy (XFunTy p)
(LHsType p)
(LHsType p)
| HsListTy (XListTy p)
(LHsType p)
| HsTupleTy (XTupleTy p)
HsTupleSort
[LHsType p]
| HsSumTy (XSumTy p)
[LHsType p]
| HsOpTy (XOpTy p)
(LHsType p)
(LIdP p)
(LHsType p)
| HsParTy (XParTy p)
(LHsType p)
| HsIParamTy (XIParamTy p)
(LHsIPName)
(LHsType p)
| HsStarTy (XStarTy p)
Bool
| HsKindSig (XKindSig p)
(LHsType p)
(LHsKind p)
| HsSpliceTy (XSpliceTy p)
(HsSplice p)
| HsDocTy (XDocTy p)
(LHsType p)
LHsDocString
| HsBangTy (XBangTy p)
HsSrcBang
(LHsType p)
| HsRecTy (XRecTy p)
[LConDeclField p]
| HsExplicitListTy (XExplicitListTy p)
Promoted
[LHsType p]
| HsExplicitTupleTy (XExplicitTupleTy p)
[LHsType p]
| HsTyLit (XTyLit p)
HsTyLit
| HsWildCardTy (XWildCardTy p)
| XHsType (XXType p)
data HsTyLit
TTG - Todo
SourceText
SourceText
-}
Integer
TTG - Todo
SourceText
SourceText
-}
FastString
data HsTupleSort = HsUnboxedTuple
| HsBoxedTuple
| HsConstraintTuple
| HsBoxedOrConstraintTuple
data Promoted = Promoted
| NotPromoted
type LConDeclField p = Located (ConDeclField p)
data ConDeclField p
= ConDeclField { cd_fld_ext :: XConDeclField p,
cd_fld_names :: [LFieldOcc p],
cd_fld_type :: LBangType p,
cd_fld_doc :: Maybe LHsDocString }
| XConDeclField (XXConDeclField p)
data HsConDetails arg rec
= PrefixCon [arg]
| RecCon rec
| InfixCon arg arg
type LFieldOcc p = Located (FieldOcc p)
data FieldOcc p
= FieldOcc { extFieldOcc :: XCFieldOcc p
TTG - Todo new
TTG - Todo
, rdrNameFieldOcc : : Located RdrName
, rdrNameFieldOcc :: Located RdrName
-}
}
| XFieldOcc
(XXFieldOcc p)
TTG - Todo new
data AmbiguousFieldOcc p
TTG : todo : SrcLocs
TTG : todo : SrcLocs
TTG - Todo
data AmbiguousFieldOcc p
= Unambiguous ( XUnambiguous p ) ( Located RdrName )
| Ambiguous ( XAmbiguous p ) ( Located RdrName )
| XAmbiguousFieldOcc ( XXAmbiguousFieldOcc p )
data AmbiguousFieldOcc p
= Unambiguous (XUnambiguous p) (Located RdrName)
| Ambiguous (XAmbiguous p) (Located RdrName)
| XAmbiguousFieldOcc (XXAmbiguousFieldOcc p)
-}
| null | https://raw.githubusercontent.com/shayan-najd/HsAST/9bd46612c63165d0925b866fc2922a9150a89943/HsTypes.hs | haskell | # SOURCE # | # OPTIONS_GHC -Wall #
# LANGUAGE TypeFamilies #
module HsTypes where
import Dependencies
import HsExtension
import HsDoc
type LHsIPName = Located HsIPName
newtype HsIPName = HsIPName FastString
type LBangType p = Located (BangType p)
type BangType p = HsType p
type LHsContext p = Located (HsContext p)
type HsContext p = [LHsType p]
type LHsKind p = Located (HsKind p)
type HsKind p = HsType p
data LHsQTyVars p
= HsQTvs { hsq_ext :: XHsQTvs p
, hsq_explicit :: [LHsTyVarBndr p]
}
| XLHsQTyVars (XXLHsQTyVars p)
data HsImplicitBndrs p thing
= HsIB { hsib_ext :: XHsIB p thing
, hsib_body :: thing
}
| XHsImplicitBndrs (XXHsImplicitBndrs p thing)
data HsWildCardBndrs p thing
= HsWC { hswc_ext :: XHsWC p thing
, hswc_body :: thing
}
| XHsWildCardBndrs (XXHsWildCardBndrs p thing)
type LHsSigType p = HsImplicitBndrs p (LHsType p)
type LHsSigWcType p = HsWildCardBndrs p (LHsSigType p)
type LHsTyVarBndr p = Located (HsTyVarBndr p)
data HsTyVarBndr p
= UserTyVar
(XUserTyVar p)
LIP
| KindedTyVar
(XKindedTyVar p)
LIP
(LHsKind p)
| XTyVarBndr
(XXTyVarBndr p)
type LHsType p = Located (HsType p)
data HsType p
= HsForAllTy
{ hst_xforall :: XForAllTy p,
hst_bndrs :: [LHsTyVarBndr p]
, hst_body :: LHsType p
}
| HsQualTy
{ hst_xqual :: XQualTy p
, hst_ctxt :: LHsContext p
, hst_body :: LHsType p }
| HsTyVar (XTyVar p)
Promoted
(LIdP p)
| HsAppTy (XAppTy p)
(LHsType p)
(LHsType p)
| HsFunTy (XFunTy p)
(LHsType p)
(LHsType p)
| HsListTy (XListTy p)
(LHsType p)
| HsTupleTy (XTupleTy p)
HsTupleSort
[LHsType p]
| HsSumTy (XSumTy p)
[LHsType p]
| HsOpTy (XOpTy p)
(LHsType p)
(LIdP p)
(LHsType p)
| HsParTy (XParTy p)
(LHsType p)
| HsIParamTy (XIParamTy p)
(LHsIPName)
(LHsType p)
| HsStarTy (XStarTy p)
Bool
| HsKindSig (XKindSig p)
(LHsType p)
(LHsKind p)
| HsSpliceTy (XSpliceTy p)
(HsSplice p)
| HsDocTy (XDocTy p)
(LHsType p)
LHsDocString
| HsBangTy (XBangTy p)
HsSrcBang
(LHsType p)
| HsRecTy (XRecTy p)
[LConDeclField p]
| HsExplicitListTy (XExplicitListTy p)
Promoted
[LHsType p]
| HsExplicitTupleTy (XExplicitTupleTy p)
[LHsType p]
| HsTyLit (XTyLit p)
HsTyLit
| HsWildCardTy (XWildCardTy p)
| XHsType (XXType p)
data HsTyLit
TTG - Todo
SourceText
SourceText
-}
Integer
TTG - Todo
SourceText
SourceText
-}
FastString
data HsTupleSort = HsUnboxedTuple
| HsBoxedTuple
| HsConstraintTuple
| HsBoxedOrConstraintTuple
data Promoted = Promoted
| NotPromoted
type LConDeclField p = Located (ConDeclField p)
data ConDeclField p
= ConDeclField { cd_fld_ext :: XConDeclField p,
cd_fld_names :: [LFieldOcc p],
cd_fld_type :: LBangType p,
cd_fld_doc :: Maybe LHsDocString }
| XConDeclField (XXConDeclField p)
data HsConDetails arg rec
= PrefixCon [arg]
| RecCon rec
| InfixCon arg arg
type LFieldOcc p = Located (FieldOcc p)
data FieldOcc p
= FieldOcc { extFieldOcc :: XCFieldOcc p
TTG - Todo new
TTG - Todo
, rdrNameFieldOcc : : Located RdrName
, rdrNameFieldOcc :: Located RdrName
-}
}
| XFieldOcc
(XXFieldOcc p)
TTG - Todo new
data AmbiguousFieldOcc p
TTG : todo : SrcLocs
TTG : todo : SrcLocs
TTG - Todo
data AmbiguousFieldOcc p
= Unambiguous ( XUnambiguous p ) ( Located RdrName )
| Ambiguous ( XAmbiguous p ) ( Located RdrName )
| XAmbiguousFieldOcc ( XXAmbiguousFieldOcc p )
data AmbiguousFieldOcc p
= Unambiguous (XUnambiguous p) (Located RdrName)
| Ambiguous (XAmbiguous p) (Located RdrName)
| XAmbiguousFieldOcc (XXAmbiguousFieldOcc p)
-}
|
d128524c37be137eb92d4fb87f9064155dcae373c96fcbe069802afc9e6bcccf | janestreet/core | hashable_intf.ml | open! Import
module type Common = sig
type t [@@deriving compare, hash]
val hashable : t Hashtbl.Hashable.t
end
module type S_plain = sig
include Common
module Table : Hashtbl.S_plain with type key = t
module Hash_set : Hash_set.S_plain with type elt = t
module Hash_queue : Hash_queue.S with type key = t
end
module type S = sig
include Common
module Table : Hashtbl.S with type key = t
module Hash_set : Hash_set.S with type elt = t
module Hash_queue : Hash_queue.S with type key = t
end
module type S_binable = sig
type t [@@deriving hash]
val hashable : t Hashtbl.Hashable.t
module Table : Hashtbl.S_binable with type key = t
module Hash_set : Hash_set.S_binable with type elt = t
module Hash_queue : Hash_queue.S with type key = t
end
module type Hashable = sig
module type Common = Common
module type S = S
module type S_binable = S_binable
module type S_plain = S_plain
module Make_plain (T : sig
type t [@@deriving hash]
include Hashtbl.Key_plain with type t := t
end) : S_plain with type t := T.t
module Make_plain_and_derive_hash_fold_t (T : Hashtbl.Key_plain) :
S_plain with type t := T.t
module Make (T : sig
type t [@@deriving hash]
include Hashtbl.Key with type t := t
end) : S with type t := T.t
module Make_and_derive_hash_fold_t (T : Hashtbl.Key) : S with type t := T.t
module Make_binable (T : sig
type t [@@deriving hash]
include Hashtbl.Key_binable with type t := t
end) : S_binable with type t := T.t
module Make_plain_with_hashable (T : sig
module Key : sig
type t [@@deriving hash]
include Hashtbl.Key_plain with type t := t
end
val hashable : Key.t Hashtbl.Hashable.t
end) : S_plain with type t := T.Key.t
module Make_with_hashable (T : sig
module Key : sig
type t [@@deriving hash]
include Hashtbl.Key with type t := t
end
val hashable : Key.t Hashtbl.Hashable.t
end) : S with type t := T.Key.t
module Make_binable_with_hashable (T : sig
module Key : sig
type t [@@deriving hash]
include Hashtbl.Key_binable with type t := t
end
val hashable : Key.t Hashtbl.Hashable.t
end) : S_binable with type t := T.Key.t
module Make_binable_and_derive_hash_fold_t (T : Hashtbl.Key_binable) :
S_binable with type t := T.t
module Stable : sig
module V1 : sig
module type S = sig
type key
module Table : sig
type 'a t = (key, 'a) Hashtbl.t [@@deriving sexp, bin_io]
end
module Hash_set : sig
type t = key Hash_set.t [@@deriving sexp, bin_io]
end
val hashable : key Hashtbl.Hashable.t
end
module Make (Key : Hashtbl.Key_binable) : S with type key := Key.t
module Make_with_hashable (T : sig
module Key : Hashtbl.Key_binable
val hashable : Key.t Hashtbl.Hashable.t
end) : S with type key := T.Key.t
module With_stable_witness : sig
module type S = sig
type key
module Table : sig
type 'a t = (key, 'a) Hashtbl.t [@@deriving sexp, bin_io, stable_witness]
end
module Hash_set : sig
type t = key Hash_set.t [@@deriving sexp, bin_io, stable_witness]
end
val hashable : key Hashtbl.Hashable.t
end
module Make (Key : Hashtbl.Key_stable) : S with type key := Key.t
module Make_with_hashable (T : sig
module Key : Hashtbl.Key_stable
val hashable : Key.t Hashtbl.Hashable.t
end) : S with type key := T.Key.t
end
end
end
end
| null | https://raw.githubusercontent.com/janestreet/core/4b6635d206f7adcfac8324820d246299d6f572fe/core/src/hashable_intf.ml | ocaml | open! Import
module type Common = sig
type t [@@deriving compare, hash]
val hashable : t Hashtbl.Hashable.t
end
module type S_plain = sig
include Common
module Table : Hashtbl.S_plain with type key = t
module Hash_set : Hash_set.S_plain with type elt = t
module Hash_queue : Hash_queue.S with type key = t
end
module type S = sig
include Common
module Table : Hashtbl.S with type key = t
module Hash_set : Hash_set.S with type elt = t
module Hash_queue : Hash_queue.S with type key = t
end
module type S_binable = sig
type t [@@deriving hash]
val hashable : t Hashtbl.Hashable.t
module Table : Hashtbl.S_binable with type key = t
module Hash_set : Hash_set.S_binable with type elt = t
module Hash_queue : Hash_queue.S with type key = t
end
module type Hashable = sig
module type Common = Common
module type S = S
module type S_binable = S_binable
module type S_plain = S_plain
module Make_plain (T : sig
type t [@@deriving hash]
include Hashtbl.Key_plain with type t := t
end) : S_plain with type t := T.t
module Make_plain_and_derive_hash_fold_t (T : Hashtbl.Key_plain) :
S_plain with type t := T.t
module Make (T : sig
type t [@@deriving hash]
include Hashtbl.Key with type t := t
end) : S with type t := T.t
module Make_and_derive_hash_fold_t (T : Hashtbl.Key) : S with type t := T.t
module Make_binable (T : sig
type t [@@deriving hash]
include Hashtbl.Key_binable with type t := t
end) : S_binable with type t := T.t
module Make_plain_with_hashable (T : sig
module Key : sig
type t [@@deriving hash]
include Hashtbl.Key_plain with type t := t
end
val hashable : Key.t Hashtbl.Hashable.t
end) : S_plain with type t := T.Key.t
module Make_with_hashable (T : sig
module Key : sig
type t [@@deriving hash]
include Hashtbl.Key with type t := t
end
val hashable : Key.t Hashtbl.Hashable.t
end) : S with type t := T.Key.t
module Make_binable_with_hashable (T : sig
module Key : sig
type t [@@deriving hash]
include Hashtbl.Key_binable with type t := t
end
val hashable : Key.t Hashtbl.Hashable.t
end) : S_binable with type t := T.Key.t
module Make_binable_and_derive_hash_fold_t (T : Hashtbl.Key_binable) :
S_binable with type t := T.t
module Stable : sig
module V1 : sig
module type S = sig
type key
module Table : sig
type 'a t = (key, 'a) Hashtbl.t [@@deriving sexp, bin_io]
end
module Hash_set : sig
type t = key Hash_set.t [@@deriving sexp, bin_io]
end
val hashable : key Hashtbl.Hashable.t
end
module Make (Key : Hashtbl.Key_binable) : S with type key := Key.t
module Make_with_hashable (T : sig
module Key : Hashtbl.Key_binable
val hashable : Key.t Hashtbl.Hashable.t
end) : S with type key := T.Key.t
module With_stable_witness : sig
module type S = sig
type key
module Table : sig
type 'a t = (key, 'a) Hashtbl.t [@@deriving sexp, bin_io, stable_witness]
end
module Hash_set : sig
type t = key Hash_set.t [@@deriving sexp, bin_io, stable_witness]
end
val hashable : key Hashtbl.Hashable.t
end
module Make (Key : Hashtbl.Key_stable) : S with type key := Key.t
module Make_with_hashable (T : sig
module Key : Hashtbl.Key_stable
val hashable : Key.t Hashtbl.Hashable.t
end) : S with type key := T.Key.t
end
end
end
end
| |
0b616a638560125caaca0185370948becde537a405ec3b27e557a72ec847c8b5 | patrikja/AFPcourse | Snake.hs | {-
A simple single player snake game.
-}
module Snake where
import ANSI (ansiGoto, ansiColour, Colour(..))
import Program (Program, putS, getC)
import Game (runGame, Game)
import Coord (Coord, Dir(..), outOfBounds, movePos)
| A snake is a list of body coord.s and a dir . of travel .
data Snake = Snake { pos :: [Coord]
, dir :: Dir
}
-- | The starting position of the snake.
startingSnake :: Snake
startingSnake = Snake ((11,10) : replicate 20 (10,10)) East
-- | Check if a snake has collided with itself.
collision :: Snake -> Bool
collision g = case pos g of
[] -> False
p : ps -> outOfBounds p || p `elem` ps
-- | Output a string at a given coordinate (uses some ANSI magic).
putStrAt :: Coord -> String -> Program ()
putStrAt p s = putS $ gotoPos p ++ s
where
gotoPos (x, y) = ansiGoto (x * 2 + 1) (y + 1)
-- | Draw the snake. The last part of the tail is erased.
drawSnake :: Colour -> String -> Snake -> Program ()
drawSnake col px s = do
let ps = pos s
putStrAt (last ps) " " -- erase previous tail
putStrAt (head ps) $ ansiColour col px -- print new head
-- | The different actions that the player can take.
data Action = Turn Dir | Exit deriving Show
-- | Keyboard controls. Binds keys to actions.
controls :: [(Char, Action)]
controls =
zip "wasd" (map Turn [North, West, South, East]) ++
[ ('q', Exit), ('\ESC', Exit) ]
| One step of the actual game
snake :: Game Snake
snake g
| collision g = do
putStrAt (5, 7) "Game Over!"
stop
| otherwise = do
drawSnake Yellow "()" g
putStrAt (0,0) ""
mc <- getC
case mc >>= \c -> lookup c controls of -- Maybe is a monad
Nothing -> continue_
Just (Turn d) -> continue d
Just Exit -> stop
where
-- Moving the snake means adding a new head and removing
-- the last element of the tail.
move (p:ps) d = movePos p d : p : init ps
stop = return Nothing
continue_ = continue (dir g)
continue d = return $ Just $ g { pos = move (pos g) d
, dir = d }
| null | https://raw.githubusercontent.com/patrikja/AFPcourse/1a079ae80ba2dbb36f3f79f0fc96a502c0f670b6/L3/src/Snake.hs | haskell |
A simple single player snake game.
| The starting position of the snake.
| Check if a snake has collided with itself.
| Output a string at a given coordinate (uses some ANSI magic).
| Draw the snake. The last part of the tail is erased.
erase previous tail
print new head
| The different actions that the player can take.
| Keyboard controls. Binds keys to actions.
Maybe is a monad
Moving the snake means adding a new head and removing
the last element of the tail. | module Snake where
import ANSI (ansiGoto, ansiColour, Colour(..))
import Program (Program, putS, getC)
import Game (runGame, Game)
import Coord (Coord, Dir(..), outOfBounds, movePos)
| A snake is a list of body coord.s and a dir . of travel .
data Snake = Snake { pos :: [Coord]
, dir :: Dir
}
startingSnake :: Snake
startingSnake = Snake ((11,10) : replicate 20 (10,10)) East
collision :: Snake -> Bool
collision g = case pos g of
[] -> False
p : ps -> outOfBounds p || p `elem` ps
putStrAt :: Coord -> String -> Program ()
putStrAt p s = putS $ gotoPos p ++ s
where
gotoPos (x, y) = ansiGoto (x * 2 + 1) (y + 1)
drawSnake :: Colour -> String -> Snake -> Program ()
drawSnake col px s = do
let ps = pos s
data Action = Turn Dir | Exit deriving Show
controls :: [(Char, Action)]
controls =
zip "wasd" (map Turn [North, West, South, East]) ++
[ ('q', Exit), ('\ESC', Exit) ]
| One step of the actual game
snake :: Game Snake
snake g
| collision g = do
putStrAt (5, 7) "Game Over!"
stop
| otherwise = do
drawSnake Yellow "()" g
putStrAt (0,0) ""
mc <- getC
Nothing -> continue_
Just (Turn d) -> continue d
Just Exit -> stop
where
move (p:ps) d = movePos p d : p : init ps
stop = return Nothing
continue_ = continue (dir g)
continue d = return $ Just $ g { pos = move (pos g) d
, dir = d }
|
600c5405dd140e57395f6aaf2227b0f674373767477982d19f5d5d9d896bb6bf | disconcision/fructure | legacy.rkt | #lang racket
(require racket/hash
"../../shared/fructerm/fructerm.rkt"
"../language/syntax.rkt"
"../common.rkt")
(provide mode:legacy)
(define (mode:legacy key state)
(define transform
(hash-ref (hash-union packaged-alpha-constructors keymap
#:combine/key (λ (k v v1) v))
key identity->))
(apply-> transform state))
; -------------------------------------------------
; packaged constructors and their helpers
; structure for annotating transformation rules
(struct -> (class props payload) #:transparent)
(define (make-constructor raw-rule)
(define (select mstx)
(match mstx
[`(,y ... / ,d ...)
`(▹ ,@y / ,@d)]))
(define (wrap⋱select mstx)
(for/list ([s mstx])
(match s
[`[,a ,b]
`[⋱ ,(select a) ,(select b)]])))
`(compose->
,(-> 'runtime
(set 'meta 'move-▹)
'([(c ⋱ (▹ ys ... / (d ⋱ (xs ... / ⊙))))
(c ⋱ (ys ... / (d ⋱ (▹ xs ... / ⊙))))]
[A A]))
,(-> 'runtime
(set 'object 'constructor)
(wrap⋱select raw-rule))))
(define make-destructor
make-constructor)
(define identity->
(-> 'runtime
(set 'object)
'([A A])))
(define (make-movement raw-rule)
(-> 'runtime
(set 'meta 'move-▹)
raw-rule))
(define keymap
; map from keys to functions
; this is mostly deprecated by transform mode
; but is still useful for editing identifers
; pending a more structure solution
(hash
; constructors
"1" (make-constructor
'([([sort expr] xs ... / ⊙)
([sort expr] xs ... / 0)]))
"2" (make-constructor
'([([sort expr] xs ... / ⊙)
([sort expr] xs ... / (app ([sort expr] / ⊙)
([sort expr] / ⊙)))]))
"3" (make-constructor
'([([sort expr] xs ... / ⊙)
([sort expr] xs ... / (λ ( / ((#;[sort pat] / (id ([sort char] / ⊙)))))
([sort expr] / ⊙)))]))
; destructors
"\b" (-> 'runtime (set)
'([⋱
(xs ... / (id as ... a (▹ ys ... / b) bs ...))
(xs ... / (id as ... (▹ ys ... / b) bs ...))]))
"\u007F" `(fallthrough->
,(-> 'runtime (set)
'([⋱
(xs ... / (id as ... (▹ ys ... / a) (zs ... / b) bs ...))
(xs ... / (id as ... (▹ zs ... / b) bs ...))]))
,(make-destructor
'([(xs ... / 0)
(xs ... / ⊙)]
[(xs ... / (ref a))
(xs ... / ⊙)]
[(xs ... / (id a))
(xs ... / ⊙)]
[(xs ... / (app a b))
(xs ... / ⊙)]
[(xs ... / (λ a b))
(xs ... / ⊙)])))
; movements
"up" (make-movement
'([⋱
(a ... / (λ (b ... / ((c ... / (id x ... (▹ ys ... / y) z ...)))) e))
(▹ a ... / (λ (b ... / ((c ... / (id x ... (ys ... / y) z ...)))) e))]
[⋱
(As ... / (a ... (▹ Bs ... / b) c ...))
(▹ As ... / (a ... (Bs ... / b) c ...))]))
"down" (make-movement
'([⋱
(▹ a ... / (λ (b ... / ((c ... / (id (ys ... / y) z ...)))) e))
(a ... / (λ (b ... / ((c ... / (id (▹ ys ... / y) z ...)))) e))]
[⋱
(▹ As ... / (ctx ⋱ (sort Bs ... / b)))
(As ... / (ctx ⋱ (▹ sort Bs ... / b)))]))
"left" (make-movement
'([⋱
(◇ (▹ As ... / c))
(◇ (▹ As ... / c))]
[⋱
(var (▹ As ... / c))
(var (▹ As ... / c))]
[⋱
(app (▹ As ... / c) d ...)
(app (▹ As ... / c) d ...)]
[⋱
(λ (Cs ... / ((▹ Bs ... / a))) b)
(λ (Cs ... / ((▹ Bs ... / a))) b)]
[⋱
(λ (Cs ... / ((As ... / a))) (▹ Bs ... / b))
(λ (Cs ... / ((▹ As ... / a))) (Bs ... / b))]
[⋱
((▹ As ... / c) d ...)
((▹ As ... / c) d ...)]
[⋱
(a ... (As ... / b) (▹ Bs ... / c) d ...)
(a ... (▹ As ... / b) (Bs ... / c) d ...)]))
"right" (make-movement
'([⋱
(λ (Cs ... / ((▹ As ... / a))) (Bs ... / b))
(λ (Cs ... / ((As ... / a))) (▹ Bs ... / b))]
[⋱
(a ... (▹ As ... / b) (Bs ... / c) d ...)
(a ... (As ... / b) (▹ Bs ... / c) d ...)]))))
(define alphabet
; character set for identifiers
'(a b c d e f g h i j k l m n o p q r s t u v w x y z))
; make constructors for each character
(define packaged-alpha-constructors
(for/fold ([alpha (hash)])
([x alphabet])
(hash-set alpha
(symbol->string x)
(-> 'runtime (set)
`([⋱
(xs ... / (id as ... (▹ ys ... / b) bs ...))
(xs ... / (id as ... ([sort char] / ',x) (▹ ys ... / b) bs ...))])))))
; perform a sequence of actions
(define (do-seq stx actions)
(for/fold ([s stx])
([a actions])
(runtime-match literals a s)))
(define (apply-> transform state)
(define update (curry hash-set* state))
(define-from state
stx mode transforms messages)
(match transform
[`(fallthrough-> ,t0 ,t1)
(let ([new-state (apply-> t0 state)])
(if (equal? new-state state)
(apply-> t1 state)
new-state))]
[`(compose-> ,x)
(apply-> x state)]
[`(compose-> ,xs ..1 ,x)
(apply-> `(compose-> ,@xs)
(apply-> x state))]
[(-> 'runtime _ t)
(match (runtime-match literals t stx)
['no-match state]
[new-stx
(update
'stx new-stx
'transforms `(,t ,@transforms)
'messages `("performed action" ,@messages)
)])])) | null | https://raw.githubusercontent.com/disconcision/fructure/d434086052eab3c450f631b7b14dcbf9358f45b7/src/mode/legacy.rkt | racket | -------------------------------------------------
packaged constructors and their helpers
structure for annotating transformation rules
map from keys to functions
this is mostly deprecated by transform mode
but is still useful for editing identifers
pending a more structure solution
constructors
[sort pat] / (id ([sort char] / ⊙)))))
destructors
movements
character set for identifiers
make constructors for each character
perform a sequence of actions | #lang racket
(require racket/hash
"../../shared/fructerm/fructerm.rkt"
"../language/syntax.rkt"
"../common.rkt")
(provide mode:legacy)
(define (mode:legacy key state)
(define transform
(hash-ref (hash-union packaged-alpha-constructors keymap
#:combine/key (λ (k v v1) v))
key identity->))
(apply-> transform state))
(struct -> (class props payload) #:transparent)
(define (make-constructor raw-rule)
(define (select mstx)
(match mstx
[`(,y ... / ,d ...)
`(▹ ,@y / ,@d)]))
(define (wrap⋱select mstx)
(for/list ([s mstx])
(match s
[`[,a ,b]
`[⋱ ,(select a) ,(select b)]])))
`(compose->
,(-> 'runtime
(set 'meta 'move-▹)
'([(c ⋱ (▹ ys ... / (d ⋱ (xs ... / ⊙))))
(c ⋱ (ys ... / (d ⋱ (▹ xs ... / ⊙))))]
[A A]))
,(-> 'runtime
(set 'object 'constructor)
(wrap⋱select raw-rule))))
(define make-destructor
make-constructor)
(define identity->
(-> 'runtime
(set 'object)
'([A A])))
(define (make-movement raw-rule)
(-> 'runtime
(set 'meta 'move-▹)
raw-rule))
(define keymap
(hash
"1" (make-constructor
'([([sort expr] xs ... / ⊙)
([sort expr] xs ... / 0)]))
"2" (make-constructor
'([([sort expr] xs ... / ⊙)
([sort expr] xs ... / (app ([sort expr] / ⊙)
([sort expr] / ⊙)))]))
"3" (make-constructor
'([([sort expr] xs ... / ⊙)
([sort expr] / ⊙)))]))
"\b" (-> 'runtime (set)
'([⋱
(xs ... / (id as ... a (▹ ys ... / b) bs ...))
(xs ... / (id as ... (▹ ys ... / b) bs ...))]))
"\u007F" `(fallthrough->
,(-> 'runtime (set)
'([⋱
(xs ... / (id as ... (▹ ys ... / a) (zs ... / b) bs ...))
(xs ... / (id as ... (▹ zs ... / b) bs ...))]))
,(make-destructor
'([(xs ... / 0)
(xs ... / ⊙)]
[(xs ... / (ref a))
(xs ... / ⊙)]
[(xs ... / (id a))
(xs ... / ⊙)]
[(xs ... / (app a b))
(xs ... / ⊙)]
[(xs ... / (λ a b))
(xs ... / ⊙)])))
"up" (make-movement
'([⋱
(a ... / (λ (b ... / ((c ... / (id x ... (▹ ys ... / y) z ...)))) e))
(▹ a ... / (λ (b ... / ((c ... / (id x ... (ys ... / y) z ...)))) e))]
[⋱
(As ... / (a ... (▹ Bs ... / b) c ...))
(▹ As ... / (a ... (Bs ... / b) c ...))]))
"down" (make-movement
'([⋱
(▹ a ... / (λ (b ... / ((c ... / (id (ys ... / y) z ...)))) e))
(a ... / (λ (b ... / ((c ... / (id (▹ ys ... / y) z ...)))) e))]
[⋱
(▹ As ... / (ctx ⋱ (sort Bs ... / b)))
(As ... / (ctx ⋱ (▹ sort Bs ... / b)))]))
"left" (make-movement
'([⋱
(◇ (▹ As ... / c))
(◇ (▹ As ... / c))]
[⋱
(var (▹ As ... / c))
(var (▹ As ... / c))]
[⋱
(app (▹ As ... / c) d ...)
(app (▹ As ... / c) d ...)]
[⋱
(λ (Cs ... / ((▹ Bs ... / a))) b)
(λ (Cs ... / ((▹ Bs ... / a))) b)]
[⋱
(λ (Cs ... / ((As ... / a))) (▹ Bs ... / b))
(λ (Cs ... / ((▹ As ... / a))) (Bs ... / b))]
[⋱
((▹ As ... / c) d ...)
((▹ As ... / c) d ...)]
[⋱
(a ... (As ... / b) (▹ Bs ... / c) d ...)
(a ... (▹ As ... / b) (Bs ... / c) d ...)]))
"right" (make-movement
'([⋱
(λ (Cs ... / ((▹ As ... / a))) (Bs ... / b))
(λ (Cs ... / ((As ... / a))) (▹ Bs ... / b))]
[⋱
(a ... (▹ As ... / b) (Bs ... / c) d ...)
(a ... (As ... / b) (▹ Bs ... / c) d ...)]))))
(define alphabet
'(a b c d e f g h i j k l m n o p q r s t u v w x y z))
(define packaged-alpha-constructors
(for/fold ([alpha (hash)])
([x alphabet])
(hash-set alpha
(symbol->string x)
(-> 'runtime (set)
`([⋱
(xs ... / (id as ... (▹ ys ... / b) bs ...))
(xs ... / (id as ... ([sort char] / ',x) (▹ ys ... / b) bs ...))])))))
(define (do-seq stx actions)
(for/fold ([s stx])
([a actions])
(runtime-match literals a s)))
(define (apply-> transform state)
(define update (curry hash-set* state))
(define-from state
stx mode transforms messages)
(match transform
[`(fallthrough-> ,t0 ,t1)
(let ([new-state (apply-> t0 state)])
(if (equal? new-state state)
(apply-> t1 state)
new-state))]
[`(compose-> ,x)
(apply-> x state)]
[`(compose-> ,xs ..1 ,x)
(apply-> `(compose-> ,@xs)
(apply-> x state))]
[(-> 'runtime _ t)
(match (runtime-match literals t stx)
['no-match state]
[new-stx
(update
'stx new-stx
'transforms `(,t ,@transforms)
'messages `("performed action" ,@messages)
)])])) |
acc55e39e388b424c1b49ac26204a692748dcbcbcbc15d955951aee26b1772d2 | bmeurer/ocaml-arm | moreLabels.mli | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, Kyoto University RIMS
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* the special exception on linking described in file ../LICENSE. *)
(* *)
(***********************************************************************)
$ Id$
* Extra labeled libraries .
This meta - module provides labelized version of the { ! } ,
{ ! Map } and { ! Set } modules .
They only differ by their labels . They are provided to help
porting from previous versions of OCaml .
The contents of this module are subject to change .
This meta-module provides labelized version of the {!Hashtbl},
{!Map} and {!Set} modules.
They only differ by their labels. They are provided to help
porting from previous versions of OCaml.
The contents of this module are subject to change.
*)
module Hashtbl : sig
type ('a, 'b) t = ('a, 'b) Hashtbl.t
val create : ?random:bool -> int -> ('a, 'b) t
val clear : ('a, 'b) t -> unit
val reset : ('a, 'b) t -> unit
val copy : ('a, 'b) t -> ('a, 'b) t
val add : ('a, 'b) t -> key:'a -> data:'b -> unit
val find : ('a, 'b) t -> 'a -> 'b
val find_all : ('a, 'b) t -> 'a -> 'b list
val mem : ('a, 'b) t -> 'a -> bool
val remove : ('a, 'b) t -> 'a -> unit
val replace : ('a, 'b) t -> key:'a -> data:'b -> unit
val iter : f:(key:'a -> data:'b -> unit) -> ('a, 'b) t -> unit
val fold :
f:(key:'a -> data:'b -> 'c -> 'c) ->
('a, 'b) t -> init:'c -> 'c
val length : ('a, 'b) t -> int
val randomize : unit -> unit
type statistics = Hashtbl.statistics
val stats : ('a, 'b) t -> statistics
module type HashedType = Hashtbl.HashedType
module type SeededHashedType = Hashtbl.SeededHashedType
module type S =
sig
type key
and 'a t
val create : int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy : 'a t -> 'a t
val add : 'a t -> key:key -> data:'a -> unit
val remove : 'a t -> key -> unit
val find : 'a t -> key -> 'a
val find_all : 'a t -> key -> 'a list
val replace : 'a t -> key:key -> data:'a -> unit
val mem : 'a t -> key -> bool
val iter : f:(key:key -> data:'a -> unit) -> 'a t -> unit
val fold :
f:(key:key -> data:'a -> 'b -> 'b) ->
'a t -> init:'b -> 'b
val length : 'a t -> int
val stats: 'a t -> statistics
end
module type SeededS =
sig
type key
and 'a t
val create : ?random:bool -> int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy : 'a t -> 'a t
val add : 'a t -> key:key -> data:'a -> unit
val remove : 'a t -> key -> unit
val find : 'a t -> key -> 'a
val find_all : 'a t -> key -> 'a list
val replace : 'a t -> key:key -> data:'a -> unit
val mem : 'a t -> key -> bool
val iter : f:(key:key -> data:'a -> unit) -> 'a t -> unit
val fold :
f:(key:key -> data:'a -> 'b -> 'b) ->
'a t -> init:'b -> 'b
val length : 'a t -> int
val stats: 'a t -> statistics
end
module Make : functor (H : HashedType) -> S with type key = H.t
module MakeSeeded (H : SeededHashedType) : SeededS with type key = H.t
val hash : 'a -> int
val seeded_hash : int -> 'a -> int
val hash_param : int -> int -> 'a -> int
val seeded_hash_param : int -> int -> int -> 'a -> int
end
module Map : sig
module type OrderedType = Map.OrderedType
module type S =
sig
type key
and (+'a) t
val empty : 'a t
val is_empty: 'a t -> bool
val mem : key -> 'a t -> bool
val add : key:key -> data:'a -> 'a t -> 'a t
val singleton: key -> 'a -> 'a t
val remove : key -> 'a t -> 'a t
val merge: f:(key -> 'a option -> 'b option -> 'c option) -> 'a t -> 'b t -> 'c t
val compare: cmp:('a -> 'a -> int) -> 'a t -> 'a t -> int
val equal: cmp:('a -> 'a -> bool) -> 'a t -> 'a t -> bool
val iter : f:(key:key -> data:'a -> unit) -> 'a t -> unit
val fold :
f:(key:key -> data:'a -> 'b -> 'b) ->
'a t -> init:'b -> 'b
val for_all: f:(key -> 'a -> bool) -> 'a t -> bool
val exists: f:(key -> 'a -> bool) -> 'a t -> bool
val filter: f:(key -> 'a -> bool) -> 'a t -> 'a t
val partition: f:(key -> 'a -> bool) -> 'a t -> 'a t * 'a t
val cardinal: 'a t -> int
val bindings: 'a t -> (key * 'a) list
val min_binding: 'a t -> (key * 'a)
val max_binding: 'a t -> (key * 'a)
val choose: 'a t -> (key * 'a)
val split: key -> 'a t -> 'a t * 'a option * 'a t
val find : key -> 'a t -> 'a
val map : f:('a -> 'b) -> 'a t -> 'b t
val mapi : f:(key -> 'a -> 'b) -> 'a t -> 'b t
end
module Make : functor (Ord : OrderedType) -> S with type key = Ord.t
end
module Set : sig
module type OrderedType = Set.OrderedType
module type S =
sig
type elt
and t
val empty : t
val is_empty : t -> bool
val mem : elt -> t -> bool
val add : elt -> t -> t
val singleton : elt -> t
val remove : elt -> t -> t
val union : t -> t -> t
val inter : t -> t -> t
val diff : t -> t -> t
val compare : t -> t -> int
val equal : t -> t -> bool
val subset : t -> t -> bool
val iter : f:(elt -> unit) -> t -> unit
val fold : f:(elt -> 'a -> 'a) -> t -> init:'a -> 'a
val for_all : f:(elt -> bool) -> t -> bool
val exists : f:(elt -> bool) -> t -> bool
val filter : f:(elt -> bool) -> t -> t
val partition : f:(elt -> bool) -> t -> t * t
val cardinal : t -> int
val elements : t -> elt list
val min_elt : t -> elt
val max_elt : t -> elt
val choose : t -> elt
val split: elt -> t -> t * bool * t
end
module Make : functor (Ord : OrderedType) -> S with type elt = Ord.t
end
| null | https://raw.githubusercontent.com/bmeurer/ocaml-arm/43f7689c76a349febe3d06ae7a4fc1d52984fd8b/stdlib/moreLabels.mli | ocaml | *********************************************************************
OCaml
the special exception on linking described in file ../LICENSE.
********************************************************************* | , Kyoto University RIMS
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
$ Id$
* Extra labeled libraries .
This meta - module provides labelized version of the { ! } ,
{ ! Map } and { ! Set } modules .
They only differ by their labels . They are provided to help
porting from previous versions of OCaml .
The contents of this module are subject to change .
This meta-module provides labelized version of the {!Hashtbl},
{!Map} and {!Set} modules.
They only differ by their labels. They are provided to help
porting from previous versions of OCaml.
The contents of this module are subject to change.
*)
module Hashtbl : sig
type ('a, 'b) t = ('a, 'b) Hashtbl.t
val create : ?random:bool -> int -> ('a, 'b) t
val clear : ('a, 'b) t -> unit
val reset : ('a, 'b) t -> unit
val copy : ('a, 'b) t -> ('a, 'b) t
val add : ('a, 'b) t -> key:'a -> data:'b -> unit
val find : ('a, 'b) t -> 'a -> 'b
val find_all : ('a, 'b) t -> 'a -> 'b list
val mem : ('a, 'b) t -> 'a -> bool
val remove : ('a, 'b) t -> 'a -> unit
val replace : ('a, 'b) t -> key:'a -> data:'b -> unit
val iter : f:(key:'a -> data:'b -> unit) -> ('a, 'b) t -> unit
val fold :
f:(key:'a -> data:'b -> 'c -> 'c) ->
('a, 'b) t -> init:'c -> 'c
val length : ('a, 'b) t -> int
val randomize : unit -> unit
type statistics = Hashtbl.statistics
val stats : ('a, 'b) t -> statistics
module type HashedType = Hashtbl.HashedType
module type SeededHashedType = Hashtbl.SeededHashedType
module type S =
sig
type key
and 'a t
val create : int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy : 'a t -> 'a t
val add : 'a t -> key:key -> data:'a -> unit
val remove : 'a t -> key -> unit
val find : 'a t -> key -> 'a
val find_all : 'a t -> key -> 'a list
val replace : 'a t -> key:key -> data:'a -> unit
val mem : 'a t -> key -> bool
val iter : f:(key:key -> data:'a -> unit) -> 'a t -> unit
val fold :
f:(key:key -> data:'a -> 'b -> 'b) ->
'a t -> init:'b -> 'b
val length : 'a t -> int
val stats: 'a t -> statistics
end
module type SeededS =
sig
type key
and 'a t
val create : ?random:bool -> int -> 'a t
val clear : 'a t -> unit
val reset : 'a t -> unit
val copy : 'a t -> 'a t
val add : 'a t -> key:key -> data:'a -> unit
val remove : 'a t -> key -> unit
val find : 'a t -> key -> 'a
val find_all : 'a t -> key -> 'a list
val replace : 'a t -> key:key -> data:'a -> unit
val mem : 'a t -> key -> bool
val iter : f:(key:key -> data:'a -> unit) -> 'a t -> unit
val fold :
f:(key:key -> data:'a -> 'b -> 'b) ->
'a t -> init:'b -> 'b
val length : 'a t -> int
val stats: 'a t -> statistics
end
module Make : functor (H : HashedType) -> S with type key = H.t
module MakeSeeded (H : SeededHashedType) : SeededS with type key = H.t
val hash : 'a -> int
val seeded_hash : int -> 'a -> int
val hash_param : int -> int -> 'a -> int
val seeded_hash_param : int -> int -> int -> 'a -> int
end
module Map : sig
module type OrderedType = Map.OrderedType
module type S =
sig
type key
and (+'a) t
val empty : 'a t
val is_empty: 'a t -> bool
val mem : key -> 'a t -> bool
val add : key:key -> data:'a -> 'a t -> 'a t
val singleton: key -> 'a -> 'a t
val remove : key -> 'a t -> 'a t
val merge: f:(key -> 'a option -> 'b option -> 'c option) -> 'a t -> 'b t -> 'c t
val compare: cmp:('a -> 'a -> int) -> 'a t -> 'a t -> int
val equal: cmp:('a -> 'a -> bool) -> 'a t -> 'a t -> bool
val iter : f:(key:key -> data:'a -> unit) -> 'a t -> unit
val fold :
f:(key:key -> data:'a -> 'b -> 'b) ->
'a t -> init:'b -> 'b
val for_all: f:(key -> 'a -> bool) -> 'a t -> bool
val exists: f:(key -> 'a -> bool) -> 'a t -> bool
val filter: f:(key -> 'a -> bool) -> 'a t -> 'a t
val partition: f:(key -> 'a -> bool) -> 'a t -> 'a t * 'a t
val cardinal: 'a t -> int
val bindings: 'a t -> (key * 'a) list
val min_binding: 'a t -> (key * 'a)
val max_binding: 'a t -> (key * 'a)
val choose: 'a t -> (key * 'a)
val split: key -> 'a t -> 'a t * 'a option * 'a t
val find : key -> 'a t -> 'a
val map : f:('a -> 'b) -> 'a t -> 'b t
val mapi : f:(key -> 'a -> 'b) -> 'a t -> 'b t
end
module Make : functor (Ord : OrderedType) -> S with type key = Ord.t
end
module Set : sig
module type OrderedType = Set.OrderedType
module type S =
sig
type elt
and t
val empty : t
val is_empty : t -> bool
val mem : elt -> t -> bool
val add : elt -> t -> t
val singleton : elt -> t
val remove : elt -> t -> t
val union : t -> t -> t
val inter : t -> t -> t
val diff : t -> t -> t
val compare : t -> t -> int
val equal : t -> t -> bool
val subset : t -> t -> bool
val iter : f:(elt -> unit) -> t -> unit
val fold : f:(elt -> 'a -> 'a) -> t -> init:'a -> 'a
val for_all : f:(elt -> bool) -> t -> bool
val exists : f:(elt -> bool) -> t -> bool
val filter : f:(elt -> bool) -> t -> t
val partition : f:(elt -> bool) -> t -> t * t
val cardinal : t -> int
val elements : t -> elt list
val min_elt : t -> elt
val max_elt : t -> elt
val choose : t -> elt
val split: elt -> t -> t * bool * t
end
module Make : functor (Ord : OrderedType) -> S with type elt = Ord.t
end
|
498ab522406553d6e8a6c851647a463c57a9f063af8422f1b5d5080181a96015 | titola/incudine | error.lisp | Copyright ( c ) 2013
;;;
;;; This library is free software; you can redistribute it and/or
;;; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
;;;
;;; This library is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;; Lesser General Public License for more details.
;;;
You should have received a copy of the GNU Lesser General Public
;;; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA
(in-package :sndfile)
(define-condition sndfile-error (simple-error) ())
(define-condition allocation-error (sndfile-error)
((object-type :reader object-type-of :initarg :object-type))
(:report (lambda (condition stream)
(cl:format stream "Failed object allocation for ~A."
(object-type-of condition)))))
(define-condition error-generic (sndfile-error)
((errno :reader errno :initarg :errno))
(:report (lambda (condition stream)
(princ (error-number (errno condition))
stream))))
(defmacro allocation-error (obj-type)
`(cl:error 'allocation-error :object-type ,obj-type))
(defmacro error-generic (errno)
`(cl:error 'error-generic :errno ,errno))
| null | https://raw.githubusercontent.com/titola/incudine/325174a54a540f4daa67bcbb29780073c35b7b80/contrib/cl-sndfile/error.lisp | lisp |
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software | Copyright ( c ) 2013
version 2.1 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA
(in-package :sndfile)
(define-condition sndfile-error (simple-error) ())
(define-condition allocation-error (sndfile-error)
((object-type :reader object-type-of :initarg :object-type))
(:report (lambda (condition stream)
(cl:format stream "Failed object allocation for ~A."
(object-type-of condition)))))
(define-condition error-generic (sndfile-error)
((errno :reader errno :initarg :errno))
(:report (lambda (condition stream)
(princ (error-number (errno condition))
stream))))
(defmacro allocation-error (obj-type)
`(cl:error 'allocation-error :object-type ,obj-type))
(defmacro error-generic (errno)
`(cl:error 'error-generic :errno ,errno))
|
c072dec6eebc162bcd3503590e339f5e553fd16528f9454239ab153b47a8b3e6 | willowtreeapps/wombats-api | helpers.clj | (ns wombats.handlers.helpers
(:require [wombats.constants :refer [errors]]
[cemerick.url :refer [url url-encode]]))
(defn wombat-error
"Throws an error that will be caught by the exception interceptor."
[{code :code
details :details
params :params
message :message
field-error :field-error
:or {code 1
details {}
params []
message nil
field-error nil}}]
(let [message (or message
(get errors code "Oops, looks like something went wrong."))]
(throw (ex-info "Wombat Error" (cond-> {:type :wombat-error
:message (->> params
(into [message])
(apply format))
:details details
:code code}
(not (nil? field-error))
(merge {:field-error field-error}))))))
(defn- format-link
[link]
(let [query (:query (url link))
page-number-string (get query "page")]
{:query query
:page-number (when page-number-string
(Integer/parseInt page-number-string))
:link link}))
(defn- format-links
[links]
(apply merge
(map (fn [link]
(let [[ln rel] (clojure.string/split link #";")]
(when (and ln rel)
{(keyword (last (re-find #"rel=\"(.*)\"" rel)))
(format-link (last (re-find #"<(.*)>" link)))})))
links)))
(defn parse-link-headers
"Parses the link header into a usable data structure.
Ex: [{:query {}
:page-number 0
:link \"http://...\"}]
NOTE: This could be used put into a cljc file and used on the client as well"
[context]
(let [link-header (get-in context [:request :headers "link"])]
(when link-header
(-> link-header
(clojure.string/split #",")
(format-links)))))
(defn- join-pair [k v] (str (name k) "=" (url-encode v)))
(defn- format-query
[query]
(->> query
(map
(fn [[k v]]
(if (string? v)
(join-pair k v)
(map #(join-pair k %) v))))
(flatten)
(clojure.string/join "&")))
(defn format-url
[base uri query]
(str base uri "?" (format-query query)))
(defn- generate-link-headers
[current-page last-page query url-formatter]
(cond-> [{:link (url-formatter (assoc query :page "0"))
:rel "first"}
{:link (url-formatter (assoc query :page (str last-page)))
:rel "last"}]
(> current-page 0)
(conj {:link (url-formatter (assoc query :page (str (dec current-page))))
:rel "prev"})
(< current-page last-page)
(conj {:link (url-formatter (assoc query :page (str (inc current-page))))
:rel "next"})))
(defn- format-link-headers
[{headers :headers
uri :uri
query :query-params} current-page last-page]
(let [origin (get headers "origin")
link-headers (generate-link-headers current-page
last-page
query
(partial format-url origin uri))]
(->> link-headers
(map (fn [{:keys [link rel]}]
(str "<" link ">; rel=\"" rel "\"")))
(clojure.string/join ", "))))
(defn paginate-response
"Wraps over a response, paginating the data passed to it. If
the data-pred propery is passed, data will be filtered / sorted
prior to pagination."
[{request :request
response :response
response-data :response-data
page-number-string :page-number
per-page :per-page
data :data
data-pred :data-pred
:or {response-data {:status 200}
data-pred identity}}]
(let [page-number (Integer/parseInt (or page-number-string "0"))
formatted-data (vec (data-pred data))
total-records (count formatted-data)
total-pages (-> (/ total-records per-page)
(Math/ceil)
(int)
(dec))
start-record (Math/min (* page-number per-page) total-records)
end-record (Math/min (+ start-record per-page) total-records)
paginated-data (subvec formatted-data start-record end-record)]
(-> response
(merge response-data)
(assoc :body paginated-data)
(assoc-in [:headers "Link"] (format-link-headers request
page-number
total-pages)))))
| null | https://raw.githubusercontent.com/willowtreeapps/wombats-api/738e4cc8d7011998695ec85e663f650be71f60ae/src/wombats/handlers/helpers.clj | clojure | (ns wombats.handlers.helpers
(:require [wombats.constants :refer [errors]]
[cemerick.url :refer [url url-encode]]))
(defn wombat-error
"Throws an error that will be caught by the exception interceptor."
[{code :code
details :details
params :params
message :message
field-error :field-error
:or {code 1
details {}
params []
message nil
field-error nil}}]
(let [message (or message
(get errors code "Oops, looks like something went wrong."))]
(throw (ex-info "Wombat Error" (cond-> {:type :wombat-error
:message (->> params
(into [message])
(apply format))
:details details
:code code}
(not (nil? field-error))
(merge {:field-error field-error}))))))
(defn- format-link
[link]
(let [query (:query (url link))
page-number-string (get query "page")]
{:query query
:page-number (when page-number-string
(Integer/parseInt page-number-string))
:link link}))
(defn- format-links
[links]
(apply merge
(map (fn [link]
(let [[ln rel] (clojure.string/split link #";")]
(when (and ln rel)
{(keyword (last (re-find #"rel=\"(.*)\"" rel)))
(format-link (last (re-find #"<(.*)>" link)))})))
links)))
(defn parse-link-headers
"Parses the link header into a usable data structure.
Ex: [{:query {}
:page-number 0
:link \"http://...\"}]
NOTE: This could be used put into a cljc file and used on the client as well"
[context]
(let [link-header (get-in context [:request :headers "link"])]
(when link-header
(-> link-header
(clojure.string/split #",")
(format-links)))))
(defn- join-pair [k v] (str (name k) "=" (url-encode v)))
(defn- format-query
[query]
(->> query
(map
(fn [[k v]]
(if (string? v)
(join-pair k v)
(map #(join-pair k %) v))))
(flatten)
(clojure.string/join "&")))
(defn format-url
[base uri query]
(str base uri "?" (format-query query)))
(defn- generate-link-headers
[current-page last-page query url-formatter]
(cond-> [{:link (url-formatter (assoc query :page "0"))
:rel "first"}
{:link (url-formatter (assoc query :page (str last-page)))
:rel "last"}]
(> current-page 0)
(conj {:link (url-formatter (assoc query :page (str (dec current-page))))
:rel "prev"})
(< current-page last-page)
(conj {:link (url-formatter (assoc query :page (str (inc current-page))))
:rel "next"})))
(defn- format-link-headers
[{headers :headers
uri :uri
query :query-params} current-page last-page]
(let [origin (get headers "origin")
link-headers (generate-link-headers current-page
last-page
query
(partial format-url origin uri))]
(->> link-headers
(map (fn [{:keys [link rel]}]
(str "<" link ">; rel=\"" rel "\"")))
(clojure.string/join ", "))))
(defn paginate-response
"Wraps over a response, paginating the data passed to it. If
the data-pred propery is passed, data will be filtered / sorted
prior to pagination."
[{request :request
response :response
response-data :response-data
page-number-string :page-number
per-page :per-page
data :data
data-pred :data-pred
:or {response-data {:status 200}
data-pred identity}}]
(let [page-number (Integer/parseInt (or page-number-string "0"))
formatted-data (vec (data-pred data))
total-records (count formatted-data)
total-pages (-> (/ total-records per-page)
(Math/ceil)
(int)
(dec))
start-record (Math/min (* page-number per-page) total-records)
end-record (Math/min (+ start-record per-page) total-records)
paginated-data (subvec formatted-data start-record end-record)]
(-> response
(merge response-data)
(assoc :body paginated-data)
(assoc-in [:headers "Link"] (format-link-headers request
page-number
total-pages)))))
| |
813edc305e1dffa711f431454072e79c1af13089588963be5f20097abe1cc23b | deepmarker/ocaml-fastws | leak.ml | open Core
open Async
(* let uri = Uri.make ~scheme:"https" ~host:"echo.websocket.org" () *)
(* let uri = Uri.make ~scheme:"http" ~host:"demos.kaazing.com" ~path:"echo" () *)
let url = Uri.make ~scheme:"https" ~host:"ftx.com" ~path:"ws/" ()
(* This does not leak. *)
let rec inner = function
* | n when n < 0 - > invalid_arg " inner "
* | 0 - > Deferred.unit
* | n - >
* Fastws_async.connect > > = function
* | Error _ - > failwith " error "
* | Ok { r ; w ; _ } - >
* Pipe.close_read r ;
* ;
* Logs_async.app ( fun m - > m " inner % d " n ) > > = fun _ - >
* ( Time_ns . Span.of_int_sec 3 ) > > = fun ( ) - >
* inner ( pred n )
* | n when n < 0 -> invalid_arg "inner"
* | 0 -> Deferred.unit
* | n ->
* Fastws_async.connect uri >>= function
* | Error _ -> failwith "fastws error"
* | Ok { r; w; _ } ->
* Pipe.close_read r ;
* Pipe.close w ;
* Logs_async.app (fun m -> m "inner %d" n) >>= fun _ ->
* Clock_ns.after (Time_ns.Span.of_int_sec 3) >>= fun () ->
* inner (pred n) *)
let rec inner = function
| 0 -> Deferred.unit
| n when n > 0 ->
Async_uri.with_connection url (fun {r; w; _} ->
Fastws_async.with_connection url r w Fastws_async.of_frame_s
Fastws_async.to_frame_s (fun _r _w ->
Logs_async.app (fun m -> m "inner %d" n) ) )
>>= fun _ ->
Clock_ns.after (Time_ns.Span.of_int_sec 3) >>= fun () -> inner (pred n)
| _ -> invalid_arg "inner"
(* This does not leak. *)
let rec inner = function
* | n when n < 0 - > invalid_arg " inner "
* | 0 - > Deferred.unit
* | n - >
* > > = function
* | Error _ - > failwith " error "
* | Ok ( r , w ) - >
* Pipe.close_read r ;
* ;
* Logs_async.app ( fun m - > m " inner % d " n ) > > = fun _ - >
* ( Time_ns . Span.of_int_sec 3 ) > > = fun ( ) - >
* inner ( pred n )
* | n when n < 0 -> invalid_arg "inner"
* | 0 -> Deferred.unit
* | n ->
* Fastws_async_raw.connect uri >>= function
* | Error _ -> failwith "fastws error"
* | Ok (r, w) ->
* Pipe.close_read r ;
* Pipe.close w ;
* Logs_async.app (fun m -> m "inner %d" n) >>= fun _ ->
* Clock_ns.after (Time_ns.Span.of_int_sec 3) >>= fun () ->
* inner (pred n) *)
let cmd =
Command.async ~summary:"Leak test"
(let open Command.Let_syntax in
[%map_open
let () = Logs_async_reporter.set_level_via_param []
and n = anon ("n" %: int) in
fun () ->
Logs.set_reporter (Logs_async_reporter.reporter ()) ;
inner n])
let () = Command_unix.run cmd
| null | https://raw.githubusercontent.com/deepmarker/ocaml-fastws/b8ff3200dd3d516a905e531177ca5087ab0053ab/bin/leak.ml | ocaml | let uri = Uri.make ~scheme:"https" ~host:"echo.websocket.org" ()
let uri = Uri.make ~scheme:"http" ~host:"demos.kaazing.com" ~path:"echo" ()
This does not leak.
This does not leak. | open Core
open Async
let url = Uri.make ~scheme:"https" ~host:"ftx.com" ~path:"ws/" ()
let rec inner = function
* | n when n < 0 - > invalid_arg " inner "
* | 0 - > Deferred.unit
* | n - >
* Fastws_async.connect > > = function
* | Error _ - > failwith " error "
* | Ok { r ; w ; _ } - >
* Pipe.close_read r ;
* ;
* Logs_async.app ( fun m - > m " inner % d " n ) > > = fun _ - >
* ( Time_ns . Span.of_int_sec 3 ) > > = fun ( ) - >
* inner ( pred n )
* | n when n < 0 -> invalid_arg "inner"
* | 0 -> Deferred.unit
* | n ->
* Fastws_async.connect uri >>= function
* | Error _ -> failwith "fastws error"
* | Ok { r; w; _ } ->
* Pipe.close_read r ;
* Pipe.close w ;
* Logs_async.app (fun m -> m "inner %d" n) >>= fun _ ->
* Clock_ns.after (Time_ns.Span.of_int_sec 3) >>= fun () ->
* inner (pred n) *)
let rec inner = function
| 0 -> Deferred.unit
| n when n > 0 ->
Async_uri.with_connection url (fun {r; w; _} ->
Fastws_async.with_connection url r w Fastws_async.of_frame_s
Fastws_async.to_frame_s (fun _r _w ->
Logs_async.app (fun m -> m "inner %d" n) ) )
>>= fun _ ->
Clock_ns.after (Time_ns.Span.of_int_sec 3) >>= fun () -> inner (pred n)
| _ -> invalid_arg "inner"
let rec inner = function
* | n when n < 0 - > invalid_arg " inner "
* | 0 - > Deferred.unit
* | n - >
* > > = function
* | Error _ - > failwith " error "
* | Ok ( r , w ) - >
* Pipe.close_read r ;
* ;
* Logs_async.app ( fun m - > m " inner % d " n ) > > = fun _ - >
* ( Time_ns . Span.of_int_sec 3 ) > > = fun ( ) - >
* inner ( pred n )
* | n when n < 0 -> invalid_arg "inner"
* | 0 -> Deferred.unit
* | n ->
* Fastws_async_raw.connect uri >>= function
* | Error _ -> failwith "fastws error"
* | Ok (r, w) ->
* Pipe.close_read r ;
* Pipe.close w ;
* Logs_async.app (fun m -> m "inner %d" n) >>= fun _ ->
* Clock_ns.after (Time_ns.Span.of_int_sec 3) >>= fun () ->
* inner (pred n) *)
let cmd =
Command.async ~summary:"Leak test"
(let open Command.Let_syntax in
[%map_open
let () = Logs_async_reporter.set_level_via_param []
and n = anon ("n" %: int) in
fun () ->
Logs.set_reporter (Logs_async_reporter.reporter ()) ;
inner n])
let () = Command_unix.run cmd
|
85294f3fa9b77390d76d6783ed115adcf28cb59dcc2e6d741879127ddba69ccc | gfngfn/otfed | decodeAdvancedTableScheme.ml |
open Basic
open DecodeBasic
open DecodeOperation.Open
include GeneralTable(struct type t = unit end)
let make core ~offset ~length =
make_scheme core offset length ()
type script = {
script_source : t;
script_tag : string;
script_offset_Script : offset;
script_offset_FeatureList : offset;
script_offset_LookupList : offset;
}
type langsys = {
langsys_source : t;
langsys_tag : string;
langsys_offset_LangSys : offset;
langsys_offset_FeatureList : offset;
langsys_offset_LookupList : offset;
}
type feature = {
feature_source : t;
feature_tag : string;
feature_offset_Feature : offset;
feature_offset_LookupList : offset;
}
let get_script_tag script =
script.script_tag
let get_langsys_tag langsys =
langsys.langsys_tag
let get_feature_tag feature =
feature.feature_tag
let d_tag_and_offset_record (offset_ScriptList : offset) : (string * offset) decoder =
let open DecodeOperation in
d_bytes 4 >>= fun tag ->
d_offset offset_ScriptList >>= fun offset ->
return (tag, offset)
let d_tag_and_offset_list : ((string * int) list) decoder =
let open DecodeOperation in
current >>= fun offset_ScriptList ->
d_list (d_tag_and_offset_record offset_ScriptList)
let scripts (gxxx : t) : (script list) ok =
let offset_Gxxx = gxxx.offset in
let dec =
let open DecodeOperation in
d_uint32 >>= fun version ->
if version <> !%% 0x00010000L then
err @@ Error.UnknownTableVersion(version)
else
d_fetch offset_Gxxx d_tag_and_offset_list >>= fun scriptList ->
d_offset offset_Gxxx >>= fun offset_FeatureList ->
d_offset offset_Gxxx >>= fun offset_LookupList ->
let scripts =
scriptList |> List.map (fun (scriptTag, offset_Script) ->
{
script_source = gxxx;
script_tag = scriptTag;
script_offset_Script = offset_Script;
script_offset_FeatureList = offset_FeatureList;
script_offset_LookupList = offset_LookupList;
}
)
in
return scripts
in
dec |> DecodeOperation.run gxxx.core offset_Gxxx
let langsyses (script : script) : (langsys option * langsys list) ok =
let gxxx = script.script_source in
let offset_Script = script.script_offset_Script in
let offset_FeatureList = script.script_offset_FeatureList in
let offset_LookupList = script.script_offset_LookupList in
let dec =
let open DecodeOperation in
d_offset_opt offset_Script >>= fun offset_DefaultLangSys_opt ->
d_list (d_tag_and_offset_record offset_Script) >>= fun langSysList ->
let default_langsys_opt =
offset_DefaultLangSys_opt |> Option.map (fun offset_DefaultLangSys ->
{
langsys_source = gxxx;
langsys_tag = "DFLT";
langsys_offset_LangSys = offset_DefaultLangSys;
langsys_offset_FeatureList = offset_FeatureList;
langsys_offset_LookupList = offset_LookupList;
}
)
in
let langsyses =
langSysList |> List.map (fun (langSysTag, offset_LangSys) ->
{
langsys_source = gxxx;
langsys_tag = langSysTag;
langsys_offset_LangSys = offset_LangSys;
langsys_offset_FeatureList = offset_FeatureList;
langsys_offset_LookupList = offset_LookupList;
}
)
in
return (default_langsys_opt, langsyses)
in
dec |> DecodeOperation.run gxxx.core offset_Script
module FeatureIndexSet = Set.Make(Int)
let features (langsys : langsys) : (feature option * feature list) ok =
let gxxx = langsys.langsys_source in
let offset_LangSys = langsys.langsys_offset_LangSys in
let offset_FeatureList = langsys.langsys_offset_FeatureList in
let offset_LookupList = langsys.langsys_offset_LookupList in
let decLangSys =
let open DecodeOperation in
The position is set to the beginning of a LangSys table [ page 134 ] .
d_uint16 >>= fun lookupOrder ->
if lookupOrder <> 0 then
err @@ Error.UnknownLookupOrder(lookupOrder)
else
d_uint16 >>= fun requiredFeatureIndex ->
d_list d_uint16 >>= fun featureIndices ->
return (requiredFeatureIndex, FeatureIndexSet.of_list featureIndices)
in
let decFeature (requiredFeatureIndex, featureIndexSet) =
let open DecodeOperation in
d_list_filtered
(d_tag_and_offset_record offset_FeatureList)
(fun i -> FeatureIndexSet.mem i featureIndexSet) >>= fun featureList ->
let features =
featureList |> List.map (fun (featureTag, offset_Feature) ->
{
feature_source = gxxx;
feature_tag = featureTag;
feature_offset_Feature = offset_Feature;
feature_offset_LookupList = offset_LookupList;
}
)
in
begin
match requiredFeatureIndex with
| 0xFFFF ->
return None
| _ ->
let dec =
d_tag_and_offset_record offset_FeatureList >>= fun pair ->
return @@ Some(pair)
in
pick (offset_FeatureList + 6 * requiredFeatureIndex) dec
6 is the size of FeatureRecord [ page 135 ] .
end >>= fun tag_and_offset_opt ->
let required_feature_opt =
tag_and_offset_opt |> Option.map (fun (tag, offset) ->
{
feature_source = gxxx;
feature_tag = tag;
feature_offset_Feature = offset;
feature_offset_LookupList = offset_LookupList;
}
)
in
return (required_feature_opt, features)
in
let open ResultMonad in
decLangSys |> DecodeOperation.run gxxx.core offset_LangSys >>= fun pair ->
(decFeature pair) |> DecodeOperation.run gxxx.core offset_FeatureList
module LookupListIndexSet = Set.Make(Int)
let subtables_scheme : 'a. 'a decoder -> feature -> ('a list) ok =
fun lookup feature ->
let gxxx = feature.feature_source in
let offset_Feature = feature.feature_offset_Feature in
let offset_LookupList = feature.feature_offset_LookupList in
let decFeature =
let open DecodeOperation in
(* The position is set to the beginning of a Feature table. *)
d_uint16 >>= fun _featureParams ->
d_list d_uint16 >>= fun lookupListIndexList ->
return @@ LookupListIndexSet.of_list lookupListIndexList
in
let decLookup lookupListIndexSet =
let open DecodeOperation in
d_list_filtered
(d_offset offset_LookupList)
(fun i -> LookupListIndexSet.mem i lookupListIndexSet) >>= fun offsets ->
pick_each offsets lookup
in
let open ResultMonad in
decFeature |> DecodeOperation.run gxxx.core offset_Feature >>= fun lookupListIndexSet ->
(decLookup lookupListIndexSet) |> DecodeOperation.run gxxx.core offset_LookupList
| null | https://raw.githubusercontent.com/gfngfn/otfed/aa2624d29f2fea934c65308816fb6788f3bd818a/src/decodeAdvancedTableScheme.ml | ocaml | The position is set to the beginning of a Feature table. |
open Basic
open DecodeBasic
open DecodeOperation.Open
include GeneralTable(struct type t = unit end)
let make core ~offset ~length =
make_scheme core offset length ()
type script = {
script_source : t;
script_tag : string;
script_offset_Script : offset;
script_offset_FeatureList : offset;
script_offset_LookupList : offset;
}
type langsys = {
langsys_source : t;
langsys_tag : string;
langsys_offset_LangSys : offset;
langsys_offset_FeatureList : offset;
langsys_offset_LookupList : offset;
}
type feature = {
feature_source : t;
feature_tag : string;
feature_offset_Feature : offset;
feature_offset_LookupList : offset;
}
let get_script_tag script =
script.script_tag
let get_langsys_tag langsys =
langsys.langsys_tag
let get_feature_tag feature =
feature.feature_tag
let d_tag_and_offset_record (offset_ScriptList : offset) : (string * offset) decoder =
let open DecodeOperation in
d_bytes 4 >>= fun tag ->
d_offset offset_ScriptList >>= fun offset ->
return (tag, offset)
let d_tag_and_offset_list : ((string * int) list) decoder =
let open DecodeOperation in
current >>= fun offset_ScriptList ->
d_list (d_tag_and_offset_record offset_ScriptList)
let scripts (gxxx : t) : (script list) ok =
let offset_Gxxx = gxxx.offset in
let dec =
let open DecodeOperation in
d_uint32 >>= fun version ->
if version <> !%% 0x00010000L then
err @@ Error.UnknownTableVersion(version)
else
d_fetch offset_Gxxx d_tag_and_offset_list >>= fun scriptList ->
d_offset offset_Gxxx >>= fun offset_FeatureList ->
d_offset offset_Gxxx >>= fun offset_LookupList ->
let scripts =
scriptList |> List.map (fun (scriptTag, offset_Script) ->
{
script_source = gxxx;
script_tag = scriptTag;
script_offset_Script = offset_Script;
script_offset_FeatureList = offset_FeatureList;
script_offset_LookupList = offset_LookupList;
}
)
in
return scripts
in
dec |> DecodeOperation.run gxxx.core offset_Gxxx
let langsyses (script : script) : (langsys option * langsys list) ok =
let gxxx = script.script_source in
let offset_Script = script.script_offset_Script in
let offset_FeatureList = script.script_offset_FeatureList in
let offset_LookupList = script.script_offset_LookupList in
let dec =
let open DecodeOperation in
d_offset_opt offset_Script >>= fun offset_DefaultLangSys_opt ->
d_list (d_tag_and_offset_record offset_Script) >>= fun langSysList ->
let default_langsys_opt =
offset_DefaultLangSys_opt |> Option.map (fun offset_DefaultLangSys ->
{
langsys_source = gxxx;
langsys_tag = "DFLT";
langsys_offset_LangSys = offset_DefaultLangSys;
langsys_offset_FeatureList = offset_FeatureList;
langsys_offset_LookupList = offset_LookupList;
}
)
in
let langsyses =
langSysList |> List.map (fun (langSysTag, offset_LangSys) ->
{
langsys_source = gxxx;
langsys_tag = langSysTag;
langsys_offset_LangSys = offset_LangSys;
langsys_offset_FeatureList = offset_FeatureList;
langsys_offset_LookupList = offset_LookupList;
}
)
in
return (default_langsys_opt, langsyses)
in
dec |> DecodeOperation.run gxxx.core offset_Script
module FeatureIndexSet = Set.Make(Int)
let features (langsys : langsys) : (feature option * feature list) ok =
let gxxx = langsys.langsys_source in
let offset_LangSys = langsys.langsys_offset_LangSys in
let offset_FeatureList = langsys.langsys_offset_FeatureList in
let offset_LookupList = langsys.langsys_offset_LookupList in
let decLangSys =
let open DecodeOperation in
The position is set to the beginning of a LangSys table [ page 134 ] .
d_uint16 >>= fun lookupOrder ->
if lookupOrder <> 0 then
err @@ Error.UnknownLookupOrder(lookupOrder)
else
d_uint16 >>= fun requiredFeatureIndex ->
d_list d_uint16 >>= fun featureIndices ->
return (requiredFeatureIndex, FeatureIndexSet.of_list featureIndices)
in
let decFeature (requiredFeatureIndex, featureIndexSet) =
let open DecodeOperation in
d_list_filtered
(d_tag_and_offset_record offset_FeatureList)
(fun i -> FeatureIndexSet.mem i featureIndexSet) >>= fun featureList ->
let features =
featureList |> List.map (fun (featureTag, offset_Feature) ->
{
feature_source = gxxx;
feature_tag = featureTag;
feature_offset_Feature = offset_Feature;
feature_offset_LookupList = offset_LookupList;
}
)
in
begin
match requiredFeatureIndex with
| 0xFFFF ->
return None
| _ ->
let dec =
d_tag_and_offset_record offset_FeatureList >>= fun pair ->
return @@ Some(pair)
in
pick (offset_FeatureList + 6 * requiredFeatureIndex) dec
6 is the size of FeatureRecord [ page 135 ] .
end >>= fun tag_and_offset_opt ->
let required_feature_opt =
tag_and_offset_opt |> Option.map (fun (tag, offset) ->
{
feature_source = gxxx;
feature_tag = tag;
feature_offset_Feature = offset;
feature_offset_LookupList = offset_LookupList;
}
)
in
return (required_feature_opt, features)
in
let open ResultMonad in
decLangSys |> DecodeOperation.run gxxx.core offset_LangSys >>= fun pair ->
(decFeature pair) |> DecodeOperation.run gxxx.core offset_FeatureList
module LookupListIndexSet = Set.Make(Int)
let subtables_scheme : 'a. 'a decoder -> feature -> ('a list) ok =
fun lookup feature ->
let gxxx = feature.feature_source in
let offset_Feature = feature.feature_offset_Feature in
let offset_LookupList = feature.feature_offset_LookupList in
let decFeature =
let open DecodeOperation in
d_uint16 >>= fun _featureParams ->
d_list d_uint16 >>= fun lookupListIndexList ->
return @@ LookupListIndexSet.of_list lookupListIndexList
in
let decLookup lookupListIndexSet =
let open DecodeOperation in
d_list_filtered
(d_offset offset_LookupList)
(fun i -> LookupListIndexSet.mem i lookupListIndexSet) >>= fun offsets ->
pick_each offsets lookup
in
let open ResultMonad in
decFeature |> DecodeOperation.run gxxx.core offset_Feature >>= fun lookupListIndexSet ->
(decLookup lookupListIndexSet) |> DecodeOperation.run gxxx.core offset_LookupList
|
38635ce78c1b1fde3f3e5f351b47901def4f22d9c46450bb8394e2d30bce6737 | joedevivo/hpack | hpack_integer_tests.erl | -module(hpack_integer_tests).
-include_lib("eunit/include/eunit.hrl").
-compile([export_all]).
decode_zero_test() ->
?assertEqual({0, <<>>}, hpack_integer:decode(<<0:5>>, 5)),
?assertEqual({0, <<1>>}, hpack_integer:decode(<<0:5,1:8>>, 5)),
ok.
decode_sixtytwo_test() ->
?assertEqual({62, <<>>}, hpack_integer:decode(<<62:6>>, 6)),
?assertEqual({62, <<1>>}, hpack_integer:decode(<<62:6,1:8>>, 6)),
ok.
decode_sixtyfour_test() ->
?assertEqual({64, <<>>}, hpack_integer:decode(<<63:6,1:8>>, 6)),
?assertEqual({64, <<1>>}, hpack_integer:decode(<<63:6,1:8,1:8>>, 6)),
ok.
encode_integer_test() ->
?assertEqual(<<62:6>>, hpack_integer:encode(62,6)),
?assertEqual(<<63:6,1:8>>, hpack_integer:encode(64,6)),
?assertEqual(<<63:6,0:8>>, hpack_integer:encode(63,6)),
ok.
encode_prefix_max_equals_value_test() ->
?assertEqual(<< 1:1,0:8>>, hpack_integer:encode( 1,1)),
?assertEqual(<< 3:2,0:8>>, hpack_integer:encode( 3,2)),
?assertEqual(<< 7:3,0:8>>, hpack_integer:encode( 7,3)),
?assertEqual(<< 15:4,0:8>>, hpack_integer:encode( 15,4)),
?assertEqual(<< 31:5,0:8>>, hpack_integer:encode( 31,5)),
?assertEqual(<< 63:6,0:8>>, hpack_integer:encode( 63,6)),
?assertEqual(<<127:7,0:8>>, hpack_integer:encode(127,7)),
?assertEqual(<<255,0>>, hpack_integer:encode(255,8)),
ok.
encode_zero_test() ->
?assertEqual(<<0:1>>, hpack_integer:encode(0,1)),
?assertEqual(<<0:2>>, hpack_integer:encode(0,2)),
?assertEqual(<<0:3>>, hpack_integer:encode(0,3)),
?assertEqual(<<0:4>>, hpack_integer:encode(0,4)),
?assertEqual(<<0:5>>, hpack_integer:encode(0,5)),
?assertEqual(<<0:6>>, hpack_integer:encode(0,6)),
?assertEqual(<<0:7>>, hpack_integer:encode(0,7)),
?assertEqual(<<0>>, hpack_integer:encode(0,8)),
ok.
mplus7_test() ->
?assertEqual(<<127:7,140,1>>, hpack_integer:encode(267, 7)),
?assertEqual({267, <<>>}, hpack_integer:decode(<<127:7,140,1>>, 7)),
ok.
| null | https://raw.githubusercontent.com/joedevivo/hpack/d61d06bf156b6c9517b3a5c3e571b87ec81d7f5d/test/hpack_integer_tests.erl | erlang | -module(hpack_integer_tests).
-include_lib("eunit/include/eunit.hrl").
-compile([export_all]).
decode_zero_test() ->
?assertEqual({0, <<>>}, hpack_integer:decode(<<0:5>>, 5)),
?assertEqual({0, <<1>>}, hpack_integer:decode(<<0:5,1:8>>, 5)),
ok.
decode_sixtytwo_test() ->
?assertEqual({62, <<>>}, hpack_integer:decode(<<62:6>>, 6)),
?assertEqual({62, <<1>>}, hpack_integer:decode(<<62:6,1:8>>, 6)),
ok.
decode_sixtyfour_test() ->
?assertEqual({64, <<>>}, hpack_integer:decode(<<63:6,1:8>>, 6)),
?assertEqual({64, <<1>>}, hpack_integer:decode(<<63:6,1:8,1:8>>, 6)),
ok.
encode_integer_test() ->
?assertEqual(<<62:6>>, hpack_integer:encode(62,6)),
?assertEqual(<<63:6,1:8>>, hpack_integer:encode(64,6)),
?assertEqual(<<63:6,0:8>>, hpack_integer:encode(63,6)),
ok.
encode_prefix_max_equals_value_test() ->
?assertEqual(<< 1:1,0:8>>, hpack_integer:encode( 1,1)),
?assertEqual(<< 3:2,0:8>>, hpack_integer:encode( 3,2)),
?assertEqual(<< 7:3,0:8>>, hpack_integer:encode( 7,3)),
?assertEqual(<< 15:4,0:8>>, hpack_integer:encode( 15,4)),
?assertEqual(<< 31:5,0:8>>, hpack_integer:encode( 31,5)),
?assertEqual(<< 63:6,0:8>>, hpack_integer:encode( 63,6)),
?assertEqual(<<127:7,0:8>>, hpack_integer:encode(127,7)),
?assertEqual(<<255,0>>, hpack_integer:encode(255,8)),
ok.
encode_zero_test() ->
?assertEqual(<<0:1>>, hpack_integer:encode(0,1)),
?assertEqual(<<0:2>>, hpack_integer:encode(0,2)),
?assertEqual(<<0:3>>, hpack_integer:encode(0,3)),
?assertEqual(<<0:4>>, hpack_integer:encode(0,4)),
?assertEqual(<<0:5>>, hpack_integer:encode(0,5)),
?assertEqual(<<0:6>>, hpack_integer:encode(0,6)),
?assertEqual(<<0:7>>, hpack_integer:encode(0,7)),
?assertEqual(<<0>>, hpack_integer:encode(0,8)),
ok.
mplus7_test() ->
?assertEqual(<<127:7,140,1>>, hpack_integer:encode(267, 7)),
?assertEqual({267, <<>>}, hpack_integer:decode(<<127:7,140,1>>, 7)),
ok.
| |
0c77eb756284a6be742bb2f50f7b692634a3dacdbe3142420cb3cdf46716d4e2 | input-output-hk/cardano-ledger | Constants.hs | # LANGUAGE DataKinds #
# LANGUAGE TypeApplications #
module Test.Cardano.Ledger.Shelley.Generator.Constants (
Constants (..),
defaultConstants,
)
where
import Cardano.Ledger.BaseTypes (Version, natVersion)
import Cardano.Ledger.Coin (Coin (..))
import Data.Word (Word64)
data Constants = Constants
{ minNumGenInputs :: Int
-- ^ minimal number of transaction inputs to select
, maxNumGenInputs :: Int
-- ^ maximal number of transaction inputs to select
, frequencyRegKeyCert :: Int
-- ^ Relative frequency of generated credential registration certificates
, frequencyRegPoolCert :: Int
-- ^ Relative frequency of generated pool registration certificates
, frequencyDelegationCert :: Int
-- ^ Relative frequency of generated delegation certificates
, frequencyGenesisDelegationCert :: Int
-- ^ Relative frequency of generated genesis delegation certificates
, frequencyDeRegKeyCert :: Int
-- ^ Relative frequency of generated credential de-registration certificates
, frequencyRetirePoolCert :: Int
-- ^ Relative frequency of generated pool retirement certificates
, frequencyMIRCert :: Int
^ Relative frequency of generated MIR certificates
, frequencyScriptCredReg :: Int
-- ^ Relative frequency of script credentials in credential registration
-- certificates
, frequencyKeyCredReg :: Int
-- ^ Relative frequency of key credentials in credential registration
-- certificates
, frequencyScriptCredDeReg :: Int
-- ^ Relative frequency of script credentials in credential de-registration
-- certificates
, frequencyKeyCredDeReg :: Int
-- ^ Relative frequency of key credentials in credential de-registration
-- certificates
, frequencyScriptCredDelegation :: Int
-- ^ Relative frequency of script credentials in credential delegation
-- certificates
, frequencyKeyCredDelegation :: Int
-- ^ Relative frequency of key credentials in credential delegation
-- certificates
, frequencyTxUpdates :: Int
^ Relative frequency of Prototol / Application Updates in a transaction
, frequencyTxWithMetadata :: Int
^ Relative frequency of Metadata in a transaction
, minGenesisUTxOouts :: Int
-- ^ minimal number of genesis UTxO outputs
, maxGenesisUTxOouts :: Int
-- ^ maximal number of genesis UTxO outputs
, maxCertsPerTx :: Word64
-- ^ maximal number of certificates per transaction
, maxTxsPerBlock :: Word64
^ maximal number of Txs per block
, maxNumKeyPairs :: Word64
^ maximal numbers of generated
, minGenesisOutputVal :: Integer
-- ^ minimal coin value for generated genesis outputs
, maxGenesisOutputVal :: Integer
-- ^ maximal coin value for generated genesis outputs
, numBaseScripts :: Int
-- ^ Number of base scripts from which multi sig scripts are built.
, numSimpleScripts :: Int
-- ^ Number of simple scripts which appear in the choices, the remainder are compound (MofN, All, Any, etc.) scripts
, frequencyNoWithdrawals :: Int
-- ^ Relative frequency that a transaction does not include any reward withdrawals
, frequencyAFewWithdrawals :: Int
-- ^ Relative frequency that a transaction includes a small number of
-- reward withdrawals, bounded by 'maxAFewWithdrawals'.
, maxAFewWithdrawals :: Int
-- ^ Maximum number of reward withdrawals that counts as a small number.
, frequencyPotentiallyManyWithdrawals :: Int
-- ^ Relative frequency that a transaction includes any positive number of
-- reward withdrawals
, minSlotTrace :: Int
-- ^ Minimal slot for CHAIN trace generation.
, maxSlotTrace :: Int
^ Maximal slot for CHAIN trace generation .
, frequencyLowMaxEpoch :: Word64
^ Lower bound of the MaxEpoch protocol parameter
, maxMinFeeA :: Coin
, maxMinFeeB :: Coin
, numCoreNodes :: Word64
, minTreasury :: Integer
, maxTreasury :: Integer
, minReserves :: Integer
, maxReserves :: Integer
, minMajorPV :: Version
, maxMajorPV :: Version
, genTxStableUtxoSize :: Int
^ When generating Tx , we want the size to fluctuate around this point . If
-- it gets too small, we can't balance the fee, too large it gets too complicated.
, genTxUtxoIncrement :: Int
^ If we need to grow the when generating a Tx , how much should it grow by .
}
deriving (Show)
defaultConstants :: Constants
defaultConstants =
Constants
{ minNumGenInputs = 1
, maxNumGenInputs = 5
, frequencyRegKeyCert = 2
, frequencyRegPoolCert = 2
, frequencyDelegationCert = 3
, frequencyGenesisDelegationCert = 1
, frequencyDeRegKeyCert = 1
, frequencyRetirePoolCert = 1
, frequencyMIRCert = 1
, frequencyScriptCredReg = 1
, frequencyKeyCredReg = 2
, frequencyScriptCredDeReg = 1
, frequencyKeyCredDeReg = 2
, frequencyScriptCredDelegation = 1
, frequencyKeyCredDelegation = 2
, frequencyTxUpdates = 10
, frequencyTxWithMetadata = 10
, minGenesisUTxOouts = 10
, maxGenesisUTxOouts = 100
, maxCertsPerTx = 3
, maxTxsPerBlock = 10
, maxNumKeyPairs = 150
, minGenesisOutputVal = 1000000
, maxGenesisOutputVal = 100000000
, numBaseScripts = 3
, numSimpleScripts = 20
, frequencyNoWithdrawals = 75
, frequencyAFewWithdrawals = 20
, maxAFewWithdrawals = 10
, frequencyPotentiallyManyWithdrawals = 5
, minSlotTrace = 1000
, maxSlotTrace = 5000
, frequencyLowMaxEpoch = 200
, maxMinFeeA = Coin 1000
, maxMinFeeB = Coin 3
, numCoreNodes = 7
, minTreasury = 1000000
, maxTreasury = 10000000
, minReserves = 1000000
, maxReserves = 10000000
, minMajorPV = natVersion @2
, maxMajorPV = maxBound
, genTxStableUtxoSize = 100
, genTxUtxoIncrement = 3
}
| null | https://raw.githubusercontent.com/input-output-hk/cardano-ledger/1c2f8aab38d7e22166209aba232e39b6aa5a55b2/eras/shelley/test-suite/src/Test/Cardano/Ledger/Shelley/Generator/Constants.hs | haskell | ^ minimal number of transaction inputs to select
^ maximal number of transaction inputs to select
^ Relative frequency of generated credential registration certificates
^ Relative frequency of generated pool registration certificates
^ Relative frequency of generated delegation certificates
^ Relative frequency of generated genesis delegation certificates
^ Relative frequency of generated credential de-registration certificates
^ Relative frequency of generated pool retirement certificates
^ Relative frequency of script credentials in credential registration
certificates
^ Relative frequency of key credentials in credential registration
certificates
^ Relative frequency of script credentials in credential de-registration
certificates
^ Relative frequency of key credentials in credential de-registration
certificates
^ Relative frequency of script credentials in credential delegation
certificates
^ Relative frequency of key credentials in credential delegation
certificates
^ minimal number of genesis UTxO outputs
^ maximal number of genesis UTxO outputs
^ maximal number of certificates per transaction
^ minimal coin value for generated genesis outputs
^ maximal coin value for generated genesis outputs
^ Number of base scripts from which multi sig scripts are built.
^ Number of simple scripts which appear in the choices, the remainder are compound (MofN, All, Any, etc.) scripts
^ Relative frequency that a transaction does not include any reward withdrawals
^ Relative frequency that a transaction includes a small number of
reward withdrawals, bounded by 'maxAFewWithdrawals'.
^ Maximum number of reward withdrawals that counts as a small number.
^ Relative frequency that a transaction includes any positive number of
reward withdrawals
^ Minimal slot for CHAIN trace generation.
it gets too small, we can't balance the fee, too large it gets too complicated. | # LANGUAGE DataKinds #
# LANGUAGE TypeApplications #
module Test.Cardano.Ledger.Shelley.Generator.Constants (
Constants (..),
defaultConstants,
)
where
import Cardano.Ledger.BaseTypes (Version, natVersion)
import Cardano.Ledger.Coin (Coin (..))
import Data.Word (Word64)
data Constants = Constants
{ minNumGenInputs :: Int
, maxNumGenInputs :: Int
, frequencyRegKeyCert :: Int
, frequencyRegPoolCert :: Int
, frequencyDelegationCert :: Int
, frequencyGenesisDelegationCert :: Int
, frequencyDeRegKeyCert :: Int
, frequencyRetirePoolCert :: Int
, frequencyMIRCert :: Int
^ Relative frequency of generated MIR certificates
, frequencyScriptCredReg :: Int
, frequencyKeyCredReg :: Int
, frequencyScriptCredDeReg :: Int
, frequencyKeyCredDeReg :: Int
, frequencyScriptCredDelegation :: Int
, frequencyKeyCredDelegation :: Int
, frequencyTxUpdates :: Int
^ Relative frequency of Prototol / Application Updates in a transaction
, frequencyTxWithMetadata :: Int
^ Relative frequency of Metadata in a transaction
, minGenesisUTxOouts :: Int
, maxGenesisUTxOouts :: Int
, maxCertsPerTx :: Word64
, maxTxsPerBlock :: Word64
^ maximal number of Txs per block
, maxNumKeyPairs :: Word64
^ maximal numbers of generated
, minGenesisOutputVal :: Integer
, maxGenesisOutputVal :: Integer
, numBaseScripts :: Int
, numSimpleScripts :: Int
, frequencyNoWithdrawals :: Int
, frequencyAFewWithdrawals :: Int
, maxAFewWithdrawals :: Int
, frequencyPotentiallyManyWithdrawals :: Int
, minSlotTrace :: Int
, maxSlotTrace :: Int
^ Maximal slot for CHAIN trace generation .
, frequencyLowMaxEpoch :: Word64
^ Lower bound of the MaxEpoch protocol parameter
, maxMinFeeA :: Coin
, maxMinFeeB :: Coin
, numCoreNodes :: Word64
, minTreasury :: Integer
, maxTreasury :: Integer
, minReserves :: Integer
, maxReserves :: Integer
, minMajorPV :: Version
, maxMajorPV :: Version
, genTxStableUtxoSize :: Int
^ When generating Tx , we want the size to fluctuate around this point . If
, genTxUtxoIncrement :: Int
^ If we need to grow the when generating a Tx , how much should it grow by .
}
deriving (Show)
defaultConstants :: Constants
defaultConstants =
Constants
{ minNumGenInputs = 1
, maxNumGenInputs = 5
, frequencyRegKeyCert = 2
, frequencyRegPoolCert = 2
, frequencyDelegationCert = 3
, frequencyGenesisDelegationCert = 1
, frequencyDeRegKeyCert = 1
, frequencyRetirePoolCert = 1
, frequencyMIRCert = 1
, frequencyScriptCredReg = 1
, frequencyKeyCredReg = 2
, frequencyScriptCredDeReg = 1
, frequencyKeyCredDeReg = 2
, frequencyScriptCredDelegation = 1
, frequencyKeyCredDelegation = 2
, frequencyTxUpdates = 10
, frequencyTxWithMetadata = 10
, minGenesisUTxOouts = 10
, maxGenesisUTxOouts = 100
, maxCertsPerTx = 3
, maxTxsPerBlock = 10
, maxNumKeyPairs = 150
, minGenesisOutputVal = 1000000
, maxGenesisOutputVal = 100000000
, numBaseScripts = 3
, numSimpleScripts = 20
, frequencyNoWithdrawals = 75
, frequencyAFewWithdrawals = 20
, maxAFewWithdrawals = 10
, frequencyPotentiallyManyWithdrawals = 5
, minSlotTrace = 1000
, maxSlotTrace = 5000
, frequencyLowMaxEpoch = 200
, maxMinFeeA = Coin 1000
, maxMinFeeB = Coin 3
, numCoreNodes = 7
, minTreasury = 1000000
, maxTreasury = 10000000
, minReserves = 1000000
, maxReserves = 10000000
, minMajorPV = natVersion @2
, maxMajorPV = maxBound
, genTxStableUtxoSize = 100
, genTxUtxoIncrement = 3
}
|
f7dd9b612a5e87fdb9ab545bbd01488edf44e7e11096da3105ed48b4bd0eb5fe | qfpl/applied-fp-course | Types.hs | module Level06.DB.Types where
import Data.Text (Text)
import Data.Time (UTCTime)
import Database.SQLite.Simple.FromRow (FromRow (fromRow), field)
-- To try to avoid leaking various types and expected functionality around the
-- application, we create a stand alone type that will represent the data we
-- store in the database. In this instance, it is the raw types that make up a
-- comment.
data DBComment = DBComment
{ dbCommentId :: Int
, dbCommentTopic :: Text
, dbCommentComment :: Text
, dbCommentTime :: UTCTime
}
deriving Show
-- This type class instance comes from our DB package and tells the DB package
-- how to decode a single row from the database into a single representation of
-- our type. This technique of translating a result row to a type will differ
-- between different packages/databases.
instance FromRow DBComment where
fromRow = DBComment
field : : FromField a = > RowParser a
<$> field
<*> field
<*> field
<*> field
| null | https://raw.githubusercontent.com/qfpl/applied-fp-course/d5a94a9dcee677bc95a5184c2ed13329c9f07559/src/Level06/DB/Types.hs | haskell | To try to avoid leaking various types and expected functionality around the
application, we create a stand alone type that will represent the data we
store in the database. In this instance, it is the raw types that make up a
comment.
This type class instance comes from our DB package and tells the DB package
how to decode a single row from the database into a single representation of
our type. This technique of translating a result row to a type will differ
between different packages/databases. | module Level06.DB.Types where
import Data.Text (Text)
import Data.Time (UTCTime)
import Database.SQLite.Simple.FromRow (FromRow (fromRow), field)
data DBComment = DBComment
{ dbCommentId :: Int
, dbCommentTopic :: Text
, dbCommentComment :: Text
, dbCommentTime :: UTCTime
}
deriving Show
instance FromRow DBComment where
fromRow = DBComment
field : : FromField a = > RowParser a
<$> field
<*> field
<*> field
<*> field
|
2fe0a5fd6ea2103af863074ab53ee6f7674c9666894def47e1d3906b7e3b0526 | dyzsr/ocaml-selectml | set.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* NOTE: If this file is set.mli, do not edit it directly! Instead,
edit templates/set.template.mli and run tools/sync_stdlib_docs *)
* Sets over ordered types .
This module implements the set data structure , given a total ordering
function over the set elements . All operations over sets
are purely applicative ( no side - effects ) .
The implementation uses balanced binary trees , and is therefore
reasonably efficient : insertion and membership take time
logarithmic in the size of the set , for instance .
The { ! Make } functor constructs implementations for any type , given a
[ compare ] function .
For instance :
{ [
module IntPairs =
struct
type t = int * int
let compare ( x0,y0 ) ( ) =
match Stdlib.compare x0 x1 with
0 - > Stdlib.compare y0 y1
| c - > c
end
module = Set . Make(IntPairs )
let m = PairsSet.(empty | > add ( 2,3 ) | > add ( 5,7 ) | > add ( 11,13 ) )
] }
This creates a new module [ PairsSet ] , with a new type [ PairsSet.t ]
of sets of [ int * int ] .
This module implements the set data structure, given a total ordering
function over the set elements. All operations over sets
are purely applicative (no side-effects).
The implementation uses balanced binary trees, and is therefore
reasonably efficient: insertion and membership take time
logarithmic in the size of the set, for instance.
The {!Make} functor constructs implementations for any type, given a
[compare] function.
For instance:
{[
module IntPairs =
struct
type t = int * int
let compare (x0,y0) (x1,y1) =
match Stdlib.compare x0 x1 with
0 -> Stdlib.compare y0 y1
| c -> c
end
module PairsSet = Set.Make(IntPairs)
let m = PairsSet.(empty |> add (2,3) |> add (5,7) |> add (11,13))
]}
This creates a new module [PairsSet], with a new type [PairsSet.t]
of sets of [int * int].
*)
module type OrderedType =
sig
type t
(** The type of the set elements. *)
val compare : t -> t -> int
* A total ordering function over the set elements .
This is a two - argument function [ f ] such that
[ f e1 e2 ] is zero if the elements [ e1 ] and [ e2 ] are equal ,
[ f e1 e2 ] is strictly negative if [ e1 ] is smaller than [ e2 ] ,
and [ f e1 e2 ] is strictly positive if [ e1 ] is greater than [ e2 ] .
Example : a suitable ordering function is the generic structural
comparison function { ! Stdlib.compare } .
This is a two-argument function [f] such that
[f e1 e2] is zero if the elements [e1] and [e2] are equal,
[f e1 e2] is strictly negative if [e1] is smaller than [e2],
and [f e1 e2] is strictly positive if [e1] is greater than [e2].
Example: a suitable ordering function is the generic structural
comparison function {!Stdlib.compare}. *)
end
(** Input signature of the functor {!Make}. *)
module type S =
sig
type elt
(** The type of the set elements. *)
type t
(** The type of sets. *)
val empty: t
(** The empty set. *)
val is_empty: t -> bool
(** Test whether a set is empty or not. *)
val mem: elt -> t -> bool
(** [mem x s] tests whether [x] belongs to the set [s]. *)
val add: elt -> t -> t
* [ add x s ] returns a set containing all elements of [ s ] ,
plus [ x ] . If [ x ] was already in [ s ] , [ s ] is returned unchanged
( the result of the function is then physically equal to [ s ] ) .
@before 4.03 Physical equality was not ensured .
plus [x]. If [x] was already in [s], [s] is returned unchanged
(the result of the function is then physically equal to [s]).
@before 4.03 Physical equality was not ensured. *)
val singleton: elt -> t
* [ singleton x ] returns the one - element set containing only [ x ] .
val remove: elt -> t -> t
* [ remove x s ] returns a set containing all elements of [ s ] ,
except [ x ] . If [ x ] was not in [ s ] , [ s ] is returned unchanged
( the result of the function is then physically equal to [ s ] ) .
@before 4.03 Physical equality was not ensured .
except [x]. If [x] was not in [s], [s] is returned unchanged
(the result of the function is then physically equal to [s]).
@before 4.03 Physical equality was not ensured. *)
val union: t -> t -> t
(** Set union. *)
val inter: t -> t -> t
(** Set intersection. *)
val disjoint: t -> t -> bool
* Test if two sets are disjoint .
@since 4.08.0
@since 4.08.0 *)
val diff: t -> t -> t
(** Set difference: [diff s1 s2] contains the elements of [s1]
that are not in [s2]. *)
val compare: t -> t -> int
(** Total ordering between sets. Can be used as the ordering function
for doing sets of sets. *)
val equal: t -> t -> bool
(** [equal s1 s2] tests whether the sets [s1] and [s2] are
equal, that is, contain equal elements. *)
val subset: t -> t -> bool
(** [subset s1 s2] tests whether the set [s1] is a subset of
the set [s2]. *)
val iter: (elt -> unit) -> t -> unit
(** [iter f s] applies [f] in turn to all elements of [s].
The elements of [s] are presented to [f] in increasing order
with respect to the ordering over the type of the elements. *)
val map: (elt -> elt) -> t -> t
* [ map f s ] is the set whose elements are [ f a0],[f a1 ] ... [ f
aN ] , where [ a0],[a1] ... [aN ] are the elements of [ s ] .
The elements are passed to [ f ] in increasing order
with respect to the ordering over the type of the elements .
If no element of [ s ] is changed by [ f ] , [ s ] is returned
unchanged . ( If each output of [ f ] is physically equal to its
input , the returned set is physically equal to [ s ] . )
@since 4.04.0
aN], where [a0],[a1]...[aN] are the elements of [s].
The elements are passed to [f] in increasing order
with respect to the ordering over the type of the elements.
If no element of [s] is changed by [f], [s] is returned
unchanged. (If each output of [f] is physically equal to its
input, the returned set is physically equal to [s].)
@since 4.04.0 *)
val fold: (elt -> 'a -> 'a) -> t -> 'a -> 'a
(** [fold f s init] computes [(f xN ... (f x2 (f x1 init))...)],
where [x1 ... xN] are the elements of [s], in increasing order. *)
val for_all: (elt -> bool) -> t -> bool
(** [for_all f s] checks if all elements of the set
satisfy the predicate [f]. *)
val exists: (elt -> bool) -> t -> bool
* [ exists f s ] checks if at least one element of
the set satisfies the predicate [ f ] .
the set satisfies the predicate [f]. *)
val filter: (elt -> bool) -> t -> t
* [ filter f s ] returns the set of all elements in [ s ]
that satisfy predicate [ f ] . If [ f ] satisfies every element in [ s ] ,
[ s ] is returned unchanged ( the result of the function is then
physically equal to [ s ] ) .
@before 4.03 Physical equality was not ensured .
that satisfy predicate [f]. If [f] satisfies every element in [s],
[s] is returned unchanged (the result of the function is then
physically equal to [s]).
@before 4.03 Physical equality was not ensured.*)
val filter_map: (elt -> elt option) -> t -> t
* [ filter_map f s ] returns the set of all [ v ] such that
[ f x = Some v ] for some element [ x ] of [ s ] .
For example ,
{ [ filter_map ( fun n - > if n mod 2 = 0 then Some ( n / 2 ) else None ) s ] }
is the set of halves of the even elements of [ s ] .
If no element of [ s ] is changed or dropped by [ f ] ( if
[ f x = Some x ] for each element [ x ] ) , then
[ s ] is returned unchanged : the result of the function
is then physically equal to [ s ] .
@since 4.11.0
[f x = Some v] for some element [x] of [s].
For example,
{[filter_map (fun n -> if n mod 2 = 0 then Some (n / 2) else None) s]}
is the set of halves of the even elements of [s].
If no element of [s] is changed or dropped by [f] (if
[f x = Some x] for each element [x]), then
[s] is returned unchanged: the result of the function
is then physically equal to [s].
@since 4.11.0
*)
val partition: (elt -> bool) -> t -> t * t
(** [partition f s] returns a pair of sets [(s1, s2)], where
[s1] is the set of all the elements of [s] that satisfy the
predicate [f], and [s2] is the set of all the elements of
[s] that do not satisfy [f]. *)
val cardinal: t -> int
(** Return the number of elements of a set. *)
val elements: t -> elt list
* Return the list of all elements of the given set .
The returned list is sorted in increasing order with respect
to the ordering [ Ord.compare ] , where [ ] is the argument
given to { ! . Set . Make } .
The returned list is sorted in increasing order with respect
to the ordering [Ord.compare], where [Ord] is the argument
given to {!Stdlib.Set.Make}. *)
val min_elt: t -> elt
(** Return the smallest element of the given set
(with respect to the [Ord.compare] ordering), or raise
[Not_found] if the set is empty. *)
val min_elt_opt: t -> elt option
* Return the smallest element of the given set
( with respect to the [ Ord.compare ] ordering ) , or [ None ]
if the set is empty .
@since 4.05
(with respect to the [Ord.compare] ordering), or [None]
if the set is empty.
@since 4.05
*)
val max_elt: t -> elt
(** Same as {!min_elt}, but returns the largest element of the
given set. *)
val max_elt_opt: t -> elt option
* Same as { ! min_elt_opt } , but returns the largest element of the
given set .
@since 4.05
given set.
@since 4.05
*)
val choose: t -> elt
* Return one element of the given set , or raise [ Not_found ] if
the set is empty . Which element is chosen is unspecified ,
but equal elements will be chosen for equal sets .
the set is empty. Which element is chosen is unspecified,
but equal elements will be chosen for equal sets. *)
val choose_opt: t -> elt option
* Return one element of the given set , or [ None ] if
the set is empty . Which element is chosen is unspecified ,
but equal elements will be chosen for equal sets .
@since 4.05
the set is empty. Which element is chosen is unspecified,
but equal elements will be chosen for equal sets.
@since 4.05
*)
val split: elt -> t -> t * bool * t
(** [split x s] returns a triple [(l, present, r)], where
[l] is the set of elements of [s] that are
strictly less than [x];
[r] is the set of elements of [s] that are
strictly greater than [x];
[present] is [false] if [s] contains no element equal to [x],
or [true] if [s] contains an element equal to [x]. *)
val find: elt -> t -> elt
* [ find x s ] returns the element of [ s ] equal to [ x ] ( according
to [ Ord.compare ] ) , or raise [ Not_found ] if no such element
exists .
@since 4.01.0
to [Ord.compare]), or raise [Not_found] if no such element
exists.
@since 4.01.0 *)
val find_opt: elt -> t -> elt option
* [ find_opt x s ] returns the element of [ s ] equal to [ x ] ( according
to [ Ord.compare ] ) , or [ None ] if no such element
exists .
@since 4.05
to [Ord.compare]), or [None] if no such element
exists.
@since 4.05 *)
val find_first: (elt -> bool) -> t -> elt
* [ find_first f s ] , where [ f ] is a monotonically increasing function ,
returns the lowest element [ e ] of [ s ] such that [ f e ] ,
or raises [ Not_found ] if no such element exists .
For example , [ find_first ( fun e - > Ord.compare e x > = 0 ) s ] will return
the first element [ e ] of [ s ] where [ Ord.compare e x > = 0 ] ( intuitively :
[ e > = x ] ) , or raise [ Not_found ] if [ x ] is greater than any element of
[ s ] .
@since 4.05
returns the lowest element [e] of [s] such that [f e],
or raises [Not_found] if no such element exists.
For example, [find_first (fun e -> Ord.compare e x >= 0) s] will return
the first element [e] of [s] where [Ord.compare e x >= 0] (intuitively:
[e >= x]), or raise [Not_found] if [x] is greater than any element of
[s].
@since 4.05
*)
val find_first_opt: (elt -> bool) -> t -> elt option
* [ find_first_opt f s ] , where [ f ] is a monotonically increasing
function , returns an option containing the lowest element [ e ] of [ s ]
such that [ f e ] , or [ None ] if no such element exists .
@since 4.05
function, returns an option containing the lowest element [e] of [s]
such that [f e], or [None] if no such element exists.
@since 4.05
*)
val find_last: (elt -> bool) -> t -> elt
* [ find_last f s ] , where [ f ] is a monotonically decreasing function ,
returns the highest element [ e ] of [ s ] such that [ f e ] ,
or raises [ Not_found ] if no such element exists .
@since 4.05
returns the highest element [e] of [s] such that [f e],
or raises [Not_found] if no such element exists.
@since 4.05
*)
val find_last_opt: (elt -> bool) -> t -> elt option
* [ find_last_opt f s ] , where [ f ] is a monotonically decreasing
function , returns an option containing the highest element [ e ] of [ s ]
such that [ f e ] , or [ None ] if no such element exists .
@since 4.05
function, returns an option containing the highest element [e] of [s]
such that [f e], or [None] if no such element exists.
@since 4.05
*)
val of_list: elt list -> t
* [ of_list l ] creates a set from a list of elements .
This is usually more efficient than folding [ add ] over the list ,
except perhaps for lists with many duplicated elements .
@since 4.02.0
This is usually more efficient than folding [add] over the list,
except perhaps for lists with many duplicated elements.
@since 4.02.0 *)
(** {1 Iterators} *)
val to_seq_from : elt -> t -> elt Seq.t
* [ to_seq_from x s ] iterates on a subset of the elements of [ s ]
in ascending order , from [ x ] or above .
@since 4.07
in ascending order, from [x] or above.
@since 4.07 *)
val to_seq : t -> elt Seq.t
* Iterate on the whole set , in ascending order
@since 4.07
@since 4.07 *)
val to_rev_seq : t -> elt Seq.t
* Iterate on the whole set , in descending order
@since 4.12
@since 4.12 *)
val add_seq : elt Seq.t -> t -> t
* Add the given elements to the set , in order .
@since 4.07
@since 4.07 *)
val of_seq : elt Seq.t -> t
* Build a set from the given bindings
@since 4.07
@since 4.07 *)
end
(** Output signature of the functor {!Make}. *)
module Make (Ord : OrderedType) : S with type elt = Ord.t
(** Functor building an implementation of the set structure
given a totally ordered type. *)
| null | https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/stdlib/set.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
NOTE: If this file is set.mli, do not edit it directly! Instead,
edit templates/set.template.mli and run tools/sync_stdlib_docs
* The type of the set elements.
* Input signature of the functor {!Make}.
* The type of the set elements.
* The type of sets.
* The empty set.
* Test whether a set is empty or not.
* [mem x s] tests whether [x] belongs to the set [s].
* Set union.
* Set intersection.
* Set difference: [diff s1 s2] contains the elements of [s1]
that are not in [s2].
* Total ordering between sets. Can be used as the ordering function
for doing sets of sets.
* [equal s1 s2] tests whether the sets [s1] and [s2] are
equal, that is, contain equal elements.
* [subset s1 s2] tests whether the set [s1] is a subset of
the set [s2].
* [iter f s] applies [f] in turn to all elements of [s].
The elements of [s] are presented to [f] in increasing order
with respect to the ordering over the type of the elements.
* [fold f s init] computes [(f xN ... (f x2 (f x1 init))...)],
where [x1 ... xN] are the elements of [s], in increasing order.
* [for_all f s] checks if all elements of the set
satisfy the predicate [f].
* [partition f s] returns a pair of sets [(s1, s2)], where
[s1] is the set of all the elements of [s] that satisfy the
predicate [f], and [s2] is the set of all the elements of
[s] that do not satisfy [f].
* Return the number of elements of a set.
* Return the smallest element of the given set
(with respect to the [Ord.compare] ordering), or raise
[Not_found] if the set is empty.
* Same as {!min_elt}, but returns the largest element of the
given set.
* [split x s] returns a triple [(l, present, r)], where
[l] is the set of elements of [s] that are
strictly less than [x];
[r] is the set of elements of [s] that are
strictly greater than [x];
[present] is [false] if [s] contains no element equal to [x],
or [true] if [s] contains an element equal to [x].
* {1 Iterators}
* Output signature of the functor {!Make}.
* Functor building an implementation of the set structure
given a totally ordered type. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* Sets over ordered types .
This module implements the set data structure , given a total ordering
function over the set elements . All operations over sets
are purely applicative ( no side - effects ) .
The implementation uses balanced binary trees , and is therefore
reasonably efficient : insertion and membership take time
logarithmic in the size of the set , for instance .
The { ! Make } functor constructs implementations for any type , given a
[ compare ] function .
For instance :
{ [
module IntPairs =
struct
type t = int * int
let compare ( x0,y0 ) ( ) =
match Stdlib.compare x0 x1 with
0 - > Stdlib.compare y0 y1
| c - > c
end
module = Set . Make(IntPairs )
let m = PairsSet.(empty | > add ( 2,3 ) | > add ( 5,7 ) | > add ( 11,13 ) )
] }
This creates a new module [ PairsSet ] , with a new type [ PairsSet.t ]
of sets of [ int * int ] .
This module implements the set data structure, given a total ordering
function over the set elements. All operations over sets
are purely applicative (no side-effects).
The implementation uses balanced binary trees, and is therefore
reasonably efficient: insertion and membership take time
logarithmic in the size of the set, for instance.
The {!Make} functor constructs implementations for any type, given a
[compare] function.
For instance:
{[
module IntPairs =
struct
type t = int * int
let compare (x0,y0) (x1,y1) =
match Stdlib.compare x0 x1 with
0 -> Stdlib.compare y0 y1
| c -> c
end
module PairsSet = Set.Make(IntPairs)
let m = PairsSet.(empty |> add (2,3) |> add (5,7) |> add (11,13))
]}
This creates a new module [PairsSet], with a new type [PairsSet.t]
of sets of [int * int].
*)
module type OrderedType =
sig
type t
val compare : t -> t -> int
* A total ordering function over the set elements .
This is a two - argument function [ f ] such that
[ f e1 e2 ] is zero if the elements [ e1 ] and [ e2 ] are equal ,
[ f e1 e2 ] is strictly negative if [ e1 ] is smaller than [ e2 ] ,
and [ f e1 e2 ] is strictly positive if [ e1 ] is greater than [ e2 ] .
Example : a suitable ordering function is the generic structural
comparison function { ! Stdlib.compare } .
This is a two-argument function [f] such that
[f e1 e2] is zero if the elements [e1] and [e2] are equal,
[f e1 e2] is strictly negative if [e1] is smaller than [e2],
and [f e1 e2] is strictly positive if [e1] is greater than [e2].
Example: a suitable ordering function is the generic structural
comparison function {!Stdlib.compare}. *)
end
module type S =
sig
type elt
type t
val empty: t
val is_empty: t -> bool
val mem: elt -> t -> bool
val add: elt -> t -> t
* [ add x s ] returns a set containing all elements of [ s ] ,
plus [ x ] . If [ x ] was already in [ s ] , [ s ] is returned unchanged
( the result of the function is then physically equal to [ s ] ) .
@before 4.03 Physical equality was not ensured .
plus [x]. If [x] was already in [s], [s] is returned unchanged
(the result of the function is then physically equal to [s]).
@before 4.03 Physical equality was not ensured. *)
val singleton: elt -> t
* [ singleton x ] returns the one - element set containing only [ x ] .
val remove: elt -> t -> t
* [ remove x s ] returns a set containing all elements of [ s ] ,
except [ x ] . If [ x ] was not in [ s ] , [ s ] is returned unchanged
( the result of the function is then physically equal to [ s ] ) .
@before 4.03 Physical equality was not ensured .
except [x]. If [x] was not in [s], [s] is returned unchanged
(the result of the function is then physically equal to [s]).
@before 4.03 Physical equality was not ensured. *)
val union: t -> t -> t
val inter: t -> t -> t
val disjoint: t -> t -> bool
* Test if two sets are disjoint .
@since 4.08.0
@since 4.08.0 *)
val diff: t -> t -> t
val compare: t -> t -> int
val equal: t -> t -> bool
val subset: t -> t -> bool
val iter: (elt -> unit) -> t -> unit
val map: (elt -> elt) -> t -> t
* [ map f s ] is the set whose elements are [ f a0],[f a1 ] ... [ f
aN ] , where [ a0],[a1] ... [aN ] are the elements of [ s ] .
The elements are passed to [ f ] in increasing order
with respect to the ordering over the type of the elements .
If no element of [ s ] is changed by [ f ] , [ s ] is returned
unchanged . ( If each output of [ f ] is physically equal to its
input , the returned set is physically equal to [ s ] . )
@since 4.04.0
aN], where [a0],[a1]...[aN] are the elements of [s].
The elements are passed to [f] in increasing order
with respect to the ordering over the type of the elements.
If no element of [s] is changed by [f], [s] is returned
unchanged. (If each output of [f] is physically equal to its
input, the returned set is physically equal to [s].)
@since 4.04.0 *)
val fold: (elt -> 'a -> 'a) -> t -> 'a -> 'a
val for_all: (elt -> bool) -> t -> bool
val exists: (elt -> bool) -> t -> bool
* [ exists f s ] checks if at least one element of
the set satisfies the predicate [ f ] .
the set satisfies the predicate [f]. *)
val filter: (elt -> bool) -> t -> t
* [ filter f s ] returns the set of all elements in [ s ]
that satisfy predicate [ f ] . If [ f ] satisfies every element in [ s ] ,
[ s ] is returned unchanged ( the result of the function is then
physically equal to [ s ] ) .
@before 4.03 Physical equality was not ensured .
that satisfy predicate [f]. If [f] satisfies every element in [s],
[s] is returned unchanged (the result of the function is then
physically equal to [s]).
@before 4.03 Physical equality was not ensured.*)
val filter_map: (elt -> elt option) -> t -> t
* [ filter_map f s ] returns the set of all [ v ] such that
[ f x = Some v ] for some element [ x ] of [ s ] .
For example ,
{ [ filter_map ( fun n - > if n mod 2 = 0 then Some ( n / 2 ) else None ) s ] }
is the set of halves of the even elements of [ s ] .
If no element of [ s ] is changed or dropped by [ f ] ( if
[ f x = Some x ] for each element [ x ] ) , then
[ s ] is returned unchanged : the result of the function
is then physically equal to [ s ] .
@since 4.11.0
[f x = Some v] for some element [x] of [s].
For example,
{[filter_map (fun n -> if n mod 2 = 0 then Some (n / 2) else None) s]}
is the set of halves of the even elements of [s].
If no element of [s] is changed or dropped by [f] (if
[f x = Some x] for each element [x]), then
[s] is returned unchanged: the result of the function
is then physically equal to [s].
@since 4.11.0
*)
val partition: (elt -> bool) -> t -> t * t
val cardinal: t -> int
val elements: t -> elt list
* Return the list of all elements of the given set .
The returned list is sorted in increasing order with respect
to the ordering [ Ord.compare ] , where [ ] is the argument
given to { ! . Set . Make } .
The returned list is sorted in increasing order with respect
to the ordering [Ord.compare], where [Ord] is the argument
given to {!Stdlib.Set.Make}. *)
val min_elt: t -> elt
val min_elt_opt: t -> elt option
* Return the smallest element of the given set
( with respect to the [ Ord.compare ] ordering ) , or [ None ]
if the set is empty .
@since 4.05
(with respect to the [Ord.compare] ordering), or [None]
if the set is empty.
@since 4.05
*)
val max_elt: t -> elt
val max_elt_opt: t -> elt option
* Same as { ! min_elt_opt } , but returns the largest element of the
given set .
@since 4.05
given set.
@since 4.05
*)
val choose: t -> elt
* Return one element of the given set , or raise [ Not_found ] if
the set is empty . Which element is chosen is unspecified ,
but equal elements will be chosen for equal sets .
the set is empty. Which element is chosen is unspecified,
but equal elements will be chosen for equal sets. *)
val choose_opt: t -> elt option
* Return one element of the given set , or [ None ] if
the set is empty . Which element is chosen is unspecified ,
but equal elements will be chosen for equal sets .
@since 4.05
the set is empty. Which element is chosen is unspecified,
but equal elements will be chosen for equal sets.
@since 4.05
*)
val split: elt -> t -> t * bool * t
val find: elt -> t -> elt
* [ find x s ] returns the element of [ s ] equal to [ x ] ( according
to [ Ord.compare ] ) , or raise [ Not_found ] if no such element
exists .
@since 4.01.0
to [Ord.compare]), or raise [Not_found] if no such element
exists.
@since 4.01.0 *)
val find_opt: elt -> t -> elt option
* [ find_opt x s ] returns the element of [ s ] equal to [ x ] ( according
to [ Ord.compare ] ) , or [ None ] if no such element
exists .
@since 4.05
to [Ord.compare]), or [None] if no such element
exists.
@since 4.05 *)
val find_first: (elt -> bool) -> t -> elt
* [ find_first f s ] , where [ f ] is a monotonically increasing function ,
returns the lowest element [ e ] of [ s ] such that [ f e ] ,
or raises [ Not_found ] if no such element exists .
For example , [ find_first ( fun e - > Ord.compare e x > = 0 ) s ] will return
the first element [ e ] of [ s ] where [ Ord.compare e x > = 0 ] ( intuitively :
[ e > = x ] ) , or raise [ Not_found ] if [ x ] is greater than any element of
[ s ] .
@since 4.05
returns the lowest element [e] of [s] such that [f e],
or raises [Not_found] if no such element exists.
For example, [find_first (fun e -> Ord.compare e x >= 0) s] will return
the first element [e] of [s] where [Ord.compare e x >= 0] (intuitively:
[e >= x]), or raise [Not_found] if [x] is greater than any element of
[s].
@since 4.05
*)
val find_first_opt: (elt -> bool) -> t -> elt option
* [ find_first_opt f s ] , where [ f ] is a monotonically increasing
function , returns an option containing the lowest element [ e ] of [ s ]
such that [ f e ] , or [ None ] if no such element exists .
@since 4.05
function, returns an option containing the lowest element [e] of [s]
such that [f e], or [None] if no such element exists.
@since 4.05
*)
val find_last: (elt -> bool) -> t -> elt
* [ find_last f s ] , where [ f ] is a monotonically decreasing function ,
returns the highest element [ e ] of [ s ] such that [ f e ] ,
or raises [ Not_found ] if no such element exists .
@since 4.05
returns the highest element [e] of [s] such that [f e],
or raises [Not_found] if no such element exists.
@since 4.05
*)
val find_last_opt: (elt -> bool) -> t -> elt option
* [ find_last_opt f s ] , where [ f ] is a monotonically decreasing
function , returns an option containing the highest element [ e ] of [ s ]
such that [ f e ] , or [ None ] if no such element exists .
@since 4.05
function, returns an option containing the highest element [e] of [s]
such that [f e], or [None] if no such element exists.
@since 4.05
*)
val of_list: elt list -> t
* [ of_list l ] creates a set from a list of elements .
This is usually more efficient than folding [ add ] over the list ,
except perhaps for lists with many duplicated elements .
@since 4.02.0
This is usually more efficient than folding [add] over the list,
except perhaps for lists with many duplicated elements.
@since 4.02.0 *)
val to_seq_from : elt -> t -> elt Seq.t
* [ to_seq_from x s ] iterates on a subset of the elements of [ s ]
in ascending order , from [ x ] or above .
@since 4.07
in ascending order, from [x] or above.
@since 4.07 *)
val to_seq : t -> elt Seq.t
* Iterate on the whole set , in ascending order
@since 4.07
@since 4.07 *)
val to_rev_seq : t -> elt Seq.t
* Iterate on the whole set , in descending order
@since 4.12
@since 4.12 *)
val add_seq : elt Seq.t -> t -> t
* Add the given elements to the set , in order .
@since 4.07
@since 4.07 *)
val of_seq : elt Seq.t -> t
* Build a set from the given bindings
@since 4.07
@since 4.07 *)
end
module Make (Ord : OrderedType) : S with type elt = Ord.t
|
89b2290818b593f17a9eef00e717b83066538e0e04d7be3a17156b31c22f4c1d | returntocorp/ocaml-tree-sitter-core | Codegen_util.ml | (*
Various reusable utilities involved in code generation.
*)
open Printf
let translate_ident =
let map =
Protect_ident.create ~reserved_dst:Protect_ident.ocaml_reserved ()
in
fun ident -> Protect_ident.add_translation map ident
let interleave sep l =
let rec loop = function
| [] -> []
| x :: xs -> sep :: x :: loop xs
in
match l with
| x :: xs -> x :: loop xs
| [] -> []
Like , with additional argument indicating the position
in the list .
in the list. *)
let fold_righti f xs init_acc =
let rec fold pos = function
| [] -> init_acc
| x :: xs ->
f pos x (fold (pos + 1) xs)
in
fold 0 xs
Create the list [ 0 ; 1 ; ... ; n-1 ]
let enum n =
Array.init n (fun i -> i) |> Array.to_list
let format_binding ~is_rec ~is_local pos binding =
let open Indent.Types in
let is_first = (pos = 0) in
let let_ =
if is_first then (
if is_rec then
"let rec"
else
"let"
)
else (
if is_rec then
"and"
else
"let"
)
in
let individual_in =
if is_local && not is_rec then
[Line "in"]
else
[]
in
(match binding with
| Line first_line :: rest ->
Line (sprintf "%s %s" let_ first_line) :: rest
| _ ->
Line let_ :: binding
) @ individual_in
Insert the correct ' let ' , ' let rec ' , ' and ' , ' in ' from a list of OCaml
bindings .
The first item must be a line without the ' let ' .
bindings.
The first item must be a line without the 'let'.
*)
let format_bindings ~is_rec ~is_local bindings =
let open Indent.Types in
match bindings with
| [] -> []
| bindings ->
let final_in, spacing =
if is_local && is_rec then
[Line "in"], []
else
[], [Line ""]
in
[
Inline (List.mapi (format_binding ~is_rec ~is_local) bindings
|> interleave spacing
|> List.flatten);
Inline final_in;
]
Tuareg - mode for emacs gets confused by comments like "
highlighting.
"(*"
This inserts a line-break after the slash.
*)
let make_editor_friendly_comment =
let regexp = Str.regexp "\\*)" in
fun s ->
Str.global_substitute regexp (fun _ -> "*\\\n )") s
let has_escape_characters s =
try
String.iter (function
| '"'
| '\'' -> raise Exit
| _ -> ()
) s;
false
with Exit -> true
(*
Prevent code injections from tree-sitter token nodes with funny names.
It's like sprintf "%S", but tries to not double-quote things that don't
need it.
*)
let comment s =
(if has_escape_characters s then
sprintf "%S" s
else
s
)
|> make_editor_friendly_comment
| null | https://raw.githubusercontent.com/returntocorp/ocaml-tree-sitter-core/28f750bb894ea4c0a7f6b911e568ab9d731cc0b5/src/gen/lib/Codegen_util.ml | ocaml |
Various reusable utilities involved in code generation.
Prevent code injections from tree-sitter token nodes with funny names.
It's like sprintf "%S", but tries to not double-quote things that don't
need it.
|
open Printf
let translate_ident =
let map =
Protect_ident.create ~reserved_dst:Protect_ident.ocaml_reserved ()
in
fun ident -> Protect_ident.add_translation map ident
let interleave sep l =
let rec loop = function
| [] -> []
| x :: xs -> sep :: x :: loop xs
in
match l with
| x :: xs -> x :: loop xs
| [] -> []
Like , with additional argument indicating the position
in the list .
in the list. *)
let fold_righti f xs init_acc =
let rec fold pos = function
| [] -> init_acc
| x :: xs ->
f pos x (fold (pos + 1) xs)
in
fold 0 xs
Create the list [ 0 ; 1 ; ... ; n-1 ]
let enum n =
Array.init n (fun i -> i) |> Array.to_list
let format_binding ~is_rec ~is_local pos binding =
let open Indent.Types in
let is_first = (pos = 0) in
let let_ =
if is_first then (
if is_rec then
"let rec"
else
"let"
)
else (
if is_rec then
"and"
else
"let"
)
in
let individual_in =
if is_local && not is_rec then
[Line "in"]
else
[]
in
(match binding with
| Line first_line :: rest ->
Line (sprintf "%s %s" let_ first_line) :: rest
| _ ->
Line let_ :: binding
) @ individual_in
Insert the correct ' let ' , ' let rec ' , ' and ' , ' in ' from a list of OCaml
bindings .
The first item must be a line without the ' let ' .
bindings.
The first item must be a line without the 'let'.
*)
let format_bindings ~is_rec ~is_local bindings =
let open Indent.Types in
match bindings with
| [] -> []
| bindings ->
let final_in, spacing =
if is_local && is_rec then
[Line "in"], []
else
[], [Line ""]
in
[
Inline (List.mapi (format_binding ~is_rec ~is_local) bindings
|> interleave spacing
|> List.flatten);
Inline final_in;
]
Tuareg - mode for emacs gets confused by comments like "
highlighting.
"(*"
This inserts a line-break after the slash.
*)
let make_editor_friendly_comment =
let regexp = Str.regexp "\\*)" in
fun s ->
Str.global_substitute regexp (fun _ -> "*\\\n )") s
let has_escape_characters s =
try
String.iter (function
| '"'
| '\'' -> raise Exit
| _ -> ()
) s;
false
with Exit -> true
let comment s =
(if has_escape_characters s then
sprintf "%S" s
else
s
)
|> make_editor_friendly_comment
|
6bb6bf06d1323ccb5869980b859543b724c557e0ca4c23a1394721ffbcd721a3 | b0-system/b0 | b0_cmd_scope.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2020 The b0 programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2020 The b0 programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open B0_std
open Result.Syntax
let is_vcs ~all find (_, dir) =
let* vcs = find ?dir:(Some dir) () in
match vcs with
| None -> Ok false
| Some vcs -> if all then Ok true else B00_vcs.is_dirty vcs
let get_scopes c root excludes k =
XXX should n't we rather save them in ` B0_def . Scopes ` ?
Log.if_error ~use:B0_driver.Exit.no_b0_file @@
let* b0_file = B0_driver.Conf.get_b0_file c in
Log.if_error' ~header:"" ~use:B0_driver.Exit.b0_file_error @@
let* s = Os.File.read b0_file in
let* src = B0_file.of_string ~file:b0_file s in
let* incs = match root with
| true -> Ok (B0_file.b0_includes src)
| false ->
let* e = B0_file.expand src in
Ok (B0_file.expanded_b0_includes e)
in
let inc_to_scope ((n, _), (p, _)) = n, Fpath.parent p in
let root = ("." (* XXX what should we use here ? *), Fpath.parent b0_file) in
let scopes = root :: List.sort compare (List.map inc_to_scope incs) in
k (List.filter (fun (n, _) -> not (List.mem n excludes)) scopes)
let exec_when cond c root excludes keep_going cmd =
let err (_, dir) e =
Log.err (fun m -> m "@[%a: %s@]" Fpath.pp dir e);
Ok B00_cli.Exit.some_error
in
get_scopes c root excludes @@ function scopes ->
let rec loop = function
| [] -> Ok B00_cli.Exit.ok
| (n, p as s) :: ss ->
match cond s with
| Error e -> err s e
| Ok false -> loop ss
| Ok true ->
Log.app begin fun m ->
m "@[%a: %a@]"
Fmt.(code string) n (Fmt.tty [`Faint] Fpath.pp) p
end;
match Os.Cmd.run ~cwd:p cmd with
| Error e when not keep_going -> err s e
| Error _ | Ok () -> Log.app (fun m -> m ""); loop ss
in
loop scopes
let list root excludes format path c =
get_scopes c root excludes @@ function scopes ->
let pp_scope = match path with
| true -> fun ppf (_, dir) -> Fpath.pp_unquoted ppf dir
| false ->
match format with
| `Short -> fun ppf (n, _) -> Fmt.(code string) ppf n
| `Normal | `Long ->
fun ppf (n, dir) ->
Fmt.pf ppf "@[%a %a@]" Fmt.(code string) n Fpath.pp_unquoted dir
in
Log.app (fun m -> m "@[<v>%a@]" Fmt.(list pp_scope) scopes);
Ok B00_cli.Exit.ok
let exec root excludes keep_going tool tool_args c =
let cmd = tool :: tool_args in
exec_when (fun _ -> Ok true) c root excludes keep_going (Cmd.list cmd)
let git root excludes all keep_going full_cmd subcmd subcmd_args c =
let cmd = subcmd :: subcmd_args in
let cmd = if full_cmd then Cmd.list cmd else Cmd.(atom "git" %% list cmd) in
exec_when (is_vcs ~all B00_vcs.Git.find) c root excludes keep_going cmd
let hg root excludes all keep_going full_cmd subcmd subcmd_args c=
let cmd = subcmd :: subcmd_args in
let cmd = if full_cmd then Cmd.list cmd else Cmd.(atom "hg" %% list cmd) in
exec_when (is_vcs ~all B00_vcs.Hg.find) c root excludes keep_going cmd
(* Command line interface *)
open Cmdliner
let root =
let doc = "Only consider scopes included by the root B0 file. Those \
recursively included by these are excluded."
in
Arg.(value & flag & info ["root"] ~doc)
let excludes =
let doc = "Exclude scope $(docv) from the request. Repeatable." in
Arg.(value & opt_all string [] & info ["x"; "exclude"] ~doc ~docv:"SCOPE")
let keep_going =
let doc = "Do not stop if a tool invocation exits with non zero." in
Arg.(value & flag & info ["k"; "keep-going"] ~doc)
let full_cmd =
let doc = "Specify a full command rather than a subcommand of the VCS." in
Arg.(value & flag & info ["c"; "full-cmd"] ~doc)
let tool =
let doc = "Invoke tool $(docv)." in
Arg.(required & pos 0 (some string) None & info [] ~doc ~docv:"TOOL")
let all =
let doc = "Apply command to all VCS scopes, not only those that are dirty." in
Arg.(value & flag & info ["a"; "all"] ~doc)
let vcs_subcmd =
let doc = "Invoke VCS subcommand $(docv)." in
Arg.(required & pos 0 (some string) None & info [] ~doc ~docv:"SUBCMD")
let tool_args =
let doc = "Argument for the tool. Start with a $(b,--) \
token otherwise options get interpreted by $(mname)."
in
Arg.(value & pos_right 0 string [] & info [] ~doc ~docv:"ARG")
let list_term =
let path =
let doc = "Only print the scope paths." in
Arg.(value & flag & info ["path"] ~doc)
in
Term.(const list $ root $ excludes $ B0_b0.Cli.format $ path)
let vcs_syn =
"$(mname) $(b,scope) $(tname) [$(i,OPTION)]… $(b,--) $(i,SUBCMD) [$(i,ARG)]…"
(* Commands *)
let exec =
let doc = "Execute a tool in scope directories" in
let synopsis = `P "$(mname) $(b,scope) $(tname) [$(i,OPTION)]… $(b,--) \
$(i,TOOL) [$(i,ARG)]…"
in
let descr = `P "$(tname) executes $(i,TOOL) with given arguments in the \
directory of each of the scopes. The process is stopped \
if $(i,TOOL) returns with a non zero exit code, use the \
option $(b,--keep-going) to prevent that."
in
B0_b0.Cli.subcmd_with_driver_conf "exec" ~doc ~synopsis ~descr
Term.(const exec $ root $ excludes $ keep_going $ tool $ tool_args)
let hg =
let doc = "Execute $(b,hg) in dirty Mercurial managed scopes" in
let synopsis = `P vcs_syn in
let descr = `P "$(tname) works exactly like $(b,b0 scope git) but with the \
Mercurial VCS, see $(mname) $(b,scope git --help) for
more information"
in
B0_b0.Cli.subcmd_with_driver_conf "hg" ~doc ~synopsis ~descr
Term.(const hg $ root $ excludes $ all $ keep_going $ full_cmd $
vcs_subcmd $ tool_args)
let git =
let doc = "Execute $(b,git) in dirty Git managed scopes" in
let synopsis = `P vcs_syn in
let descr = `Blocks [
`P "$(tname) executes the Git subcommand $(i,SUBCMD) \
with given arguments in the directory of each of the scopes
which are found to be managed by Git and dirty;
or all of them if $(b,--all) is specified.";
`P "If $(b,--full-cmd) is specified the positional arguments specify a
full command like $(b,scope exec) does, not a VCS subcommand.";
`P "The process is stopped if an execution returns with a non zero exit
code, use the option $(b,--keep-going) to prevent that." ]
in
B0_b0.Cli.subcmd_with_driver_conf "git" ~doc ~synopsis ~descr
Term.(const git $ root $ excludes $ all $ keep_going $ full_cmd $
vcs_subcmd $ tool_args)
let list =
let doc = "List scopes (default command)" in
let descr = `P "$(tname) lists scope names and their location. \
If $(b,--path) is specified only paths are listed."
in
let envs = B0_b0.Cli.pager_envs in
B0_b0.Cli.subcmd_with_driver_conf "list" ~doc ~descr ~envs list_term
let subs = [exec; hg; git; list;]
let cmd =
let doc = "Operate on B0 scopes" in
let descr =
`Blocks [
`P "$(tname) operates on scopes. The default command is $(tname) \
$(b,list).";
`P "$(tname) can fold over scope directories and bulk operate \
their VCSs (if applicable) when repositories are dirty. \
Typical usage:";
`P "$(b,> b0)"; `Noblank;
`P "Error: ..."; `Noblank;
`P "$(b,> ... # Fix errors)"; `Noblank;
`P "$(b,> b0)"; `Noblank;
`P "$(b,> b0 scope git -- status)"; `Noblank;
`P "$(b,> b0 scope git -- add -p)"; `Noblank;
`P "$(b,> b0 scope git -- commit -m 'Cope with changes!')"; `Noblank;
`P "$(b,> b0 scope git --all -- push)";
`P "To invoke arbitrary tools in scopes use $(b,b0 scope exec). Options
$(b,--root) and $(b,-x) allow to prune the list of scopes.";
]
in
let default = list_term in
B0_b0.Cli.cmd_group_with_driver_conf "scope" ~doc ~descr ~default subs
(*
let scope
c details path root excludes all full_cmd keep_going action action_args
=
match action with
| `List -> list c root excludes details path
| `Exec -> exec c root excludes keep_going action_args
| `Git -> git c root excludes all keep_going full_cmd action_args
| `Hg -> hg c root excludes all keep_going full_cmd action_args
let action =
let action = [ "list", `List; "exec", `Exec; "git", `Git; "hg", `Hg; ] in
let doc =
let alts = Arg.doc_alts_enum action in
Fmt.str "The action to perform. $(docv) must be one of %s." alts
in
let action = Arg.enum action in
Arg.(required & pos 0 (some action) None & info [] ~doc ~docv:"ACTION")
*)
---------------------------------------------------------------------------
Copyright ( c ) 2020 The b0 programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2020 The b0 programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/b0-system/b0/cbe12b8a55da6b50ab01ed058b339dbed3cfe894/tool-b0/b0_cmd_scope.ml | ocaml | XXX what should we use here ?
Command line interface
Commands
let scope
c details path root excludes all full_cmd keep_going action action_args
=
match action with
| `List -> list c root excludes details path
| `Exec -> exec c root excludes keep_going action_args
| `Git -> git c root excludes all keep_going full_cmd action_args
| `Hg -> hg c root excludes all keep_going full_cmd action_args
let action =
let action = [ "list", `List; "exec", `Exec; "git", `Git; "hg", `Hg; ] in
let doc =
let alts = Arg.doc_alts_enum action in
Fmt.str "The action to perform. $(docv) must be one of %s." alts
in
let action = Arg.enum action in
Arg.(required & pos 0 (some action) None & info [] ~doc ~docv:"ACTION")
| ---------------------------------------------------------------------------
Copyright ( c ) 2020 The b0 programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2020 The b0 programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open B0_std
open Result.Syntax
let is_vcs ~all find (_, dir) =
let* vcs = find ?dir:(Some dir) () in
match vcs with
| None -> Ok false
| Some vcs -> if all then Ok true else B00_vcs.is_dirty vcs
let get_scopes c root excludes k =
XXX should n't we rather save them in ` B0_def . Scopes ` ?
Log.if_error ~use:B0_driver.Exit.no_b0_file @@
let* b0_file = B0_driver.Conf.get_b0_file c in
Log.if_error' ~header:"" ~use:B0_driver.Exit.b0_file_error @@
let* s = Os.File.read b0_file in
let* src = B0_file.of_string ~file:b0_file s in
let* incs = match root with
| true -> Ok (B0_file.b0_includes src)
| false ->
let* e = B0_file.expand src in
Ok (B0_file.expanded_b0_includes e)
in
let inc_to_scope ((n, _), (p, _)) = n, Fpath.parent p in
let scopes = root :: List.sort compare (List.map inc_to_scope incs) in
k (List.filter (fun (n, _) -> not (List.mem n excludes)) scopes)
let exec_when cond c root excludes keep_going cmd =
let err (_, dir) e =
Log.err (fun m -> m "@[%a: %s@]" Fpath.pp dir e);
Ok B00_cli.Exit.some_error
in
get_scopes c root excludes @@ function scopes ->
let rec loop = function
| [] -> Ok B00_cli.Exit.ok
| (n, p as s) :: ss ->
match cond s with
| Error e -> err s e
| Ok false -> loop ss
| Ok true ->
Log.app begin fun m ->
m "@[%a: %a@]"
Fmt.(code string) n (Fmt.tty [`Faint] Fpath.pp) p
end;
match Os.Cmd.run ~cwd:p cmd with
| Error e when not keep_going -> err s e
| Error _ | Ok () -> Log.app (fun m -> m ""); loop ss
in
loop scopes
let list root excludes format path c =
get_scopes c root excludes @@ function scopes ->
let pp_scope = match path with
| true -> fun ppf (_, dir) -> Fpath.pp_unquoted ppf dir
| false ->
match format with
| `Short -> fun ppf (n, _) -> Fmt.(code string) ppf n
| `Normal | `Long ->
fun ppf (n, dir) ->
Fmt.pf ppf "@[%a %a@]" Fmt.(code string) n Fpath.pp_unquoted dir
in
Log.app (fun m -> m "@[<v>%a@]" Fmt.(list pp_scope) scopes);
Ok B00_cli.Exit.ok
let exec root excludes keep_going tool tool_args c =
let cmd = tool :: tool_args in
exec_when (fun _ -> Ok true) c root excludes keep_going (Cmd.list cmd)
let git root excludes all keep_going full_cmd subcmd subcmd_args c =
let cmd = subcmd :: subcmd_args in
let cmd = if full_cmd then Cmd.list cmd else Cmd.(atom "git" %% list cmd) in
exec_when (is_vcs ~all B00_vcs.Git.find) c root excludes keep_going cmd
let hg root excludes all keep_going full_cmd subcmd subcmd_args c=
let cmd = subcmd :: subcmd_args in
let cmd = if full_cmd then Cmd.list cmd else Cmd.(atom "hg" %% list cmd) in
exec_when (is_vcs ~all B00_vcs.Hg.find) c root excludes keep_going cmd
open Cmdliner
let root =
let doc = "Only consider scopes included by the root B0 file. Those \
recursively included by these are excluded."
in
Arg.(value & flag & info ["root"] ~doc)
let excludes =
let doc = "Exclude scope $(docv) from the request. Repeatable." in
Arg.(value & opt_all string [] & info ["x"; "exclude"] ~doc ~docv:"SCOPE")
let keep_going =
let doc = "Do not stop if a tool invocation exits with non zero." in
Arg.(value & flag & info ["k"; "keep-going"] ~doc)
let full_cmd =
let doc = "Specify a full command rather than a subcommand of the VCS." in
Arg.(value & flag & info ["c"; "full-cmd"] ~doc)
let tool =
let doc = "Invoke tool $(docv)." in
Arg.(required & pos 0 (some string) None & info [] ~doc ~docv:"TOOL")
let all =
let doc = "Apply command to all VCS scopes, not only those that are dirty." in
Arg.(value & flag & info ["a"; "all"] ~doc)
let vcs_subcmd =
let doc = "Invoke VCS subcommand $(docv)." in
Arg.(required & pos 0 (some string) None & info [] ~doc ~docv:"SUBCMD")
let tool_args =
let doc = "Argument for the tool. Start with a $(b,--) \
token otherwise options get interpreted by $(mname)."
in
Arg.(value & pos_right 0 string [] & info [] ~doc ~docv:"ARG")
let list_term =
let path =
let doc = "Only print the scope paths." in
Arg.(value & flag & info ["path"] ~doc)
in
Term.(const list $ root $ excludes $ B0_b0.Cli.format $ path)
let vcs_syn =
"$(mname) $(b,scope) $(tname) [$(i,OPTION)]… $(b,--) $(i,SUBCMD) [$(i,ARG)]…"
let exec =
let doc = "Execute a tool in scope directories" in
let synopsis = `P "$(mname) $(b,scope) $(tname) [$(i,OPTION)]… $(b,--) \
$(i,TOOL) [$(i,ARG)]…"
in
let descr = `P "$(tname) executes $(i,TOOL) with given arguments in the \
directory of each of the scopes. The process is stopped \
if $(i,TOOL) returns with a non zero exit code, use the \
option $(b,--keep-going) to prevent that."
in
B0_b0.Cli.subcmd_with_driver_conf "exec" ~doc ~synopsis ~descr
Term.(const exec $ root $ excludes $ keep_going $ tool $ tool_args)
let hg =
let doc = "Execute $(b,hg) in dirty Mercurial managed scopes" in
let synopsis = `P vcs_syn in
let descr = `P "$(tname) works exactly like $(b,b0 scope git) but with the \
Mercurial VCS, see $(mname) $(b,scope git --help) for
more information"
in
B0_b0.Cli.subcmd_with_driver_conf "hg" ~doc ~synopsis ~descr
Term.(const hg $ root $ excludes $ all $ keep_going $ full_cmd $
vcs_subcmd $ tool_args)
let git =
let doc = "Execute $(b,git) in dirty Git managed scopes" in
let synopsis = `P vcs_syn in
let descr = `Blocks [
`P "$(tname) executes the Git subcommand $(i,SUBCMD) \
with given arguments in the directory of each of the scopes
which are found to be managed by Git and dirty;
or all of them if $(b,--all) is specified.";
`P "If $(b,--full-cmd) is specified the positional arguments specify a
full command like $(b,scope exec) does, not a VCS subcommand.";
`P "The process is stopped if an execution returns with a non zero exit
code, use the option $(b,--keep-going) to prevent that." ]
in
B0_b0.Cli.subcmd_with_driver_conf "git" ~doc ~synopsis ~descr
Term.(const git $ root $ excludes $ all $ keep_going $ full_cmd $
vcs_subcmd $ tool_args)
let list =
let doc = "List scopes (default command)" in
let descr = `P "$(tname) lists scope names and their location. \
If $(b,--path) is specified only paths are listed."
in
let envs = B0_b0.Cli.pager_envs in
B0_b0.Cli.subcmd_with_driver_conf "list" ~doc ~descr ~envs list_term
let subs = [exec; hg; git; list;]
let cmd =
let doc = "Operate on B0 scopes" in
let descr =
`Blocks [
`P "$(tname) operates on scopes. The default command is $(tname) \
$(b,list).";
`P "$(tname) can fold over scope directories and bulk operate \
their VCSs (if applicable) when repositories are dirty. \
Typical usage:";
`P "$(b,> b0)"; `Noblank;
`P "Error: ..."; `Noblank;
`P "$(b,> ... # Fix errors)"; `Noblank;
`P "$(b,> b0)"; `Noblank;
`P "$(b,> b0 scope git -- status)"; `Noblank;
`P "$(b,> b0 scope git -- add -p)"; `Noblank;
`P "$(b,> b0 scope git -- commit -m 'Cope with changes!')"; `Noblank;
`P "$(b,> b0 scope git --all -- push)";
`P "To invoke arbitrary tools in scopes use $(b,b0 scope exec). Options
$(b,--root) and $(b,-x) allow to prune the list of scopes.";
]
in
let default = list_term in
B0_b0.Cli.cmd_group_with_driver_conf "scope" ~doc ~descr ~default subs
---------------------------------------------------------------------------
Copyright ( c ) 2020 The b0 programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2020 The b0 programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
b6be41d37af991a67c6b13c62cb0e2040be562581a11dea6b7c3b59bf334cd8c | GNOME/aisleriot | gold-mine.scm | ; AisleRiot - gold_mine.scm
Copyright ( C ) 1998 , 2003 < >
;
; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
You should have received a copy of the GNU General Public License
; along with this program. If not, see </>.
(use-modules (aisleriot interface) (aisleriot api))
(primitive-load-path "klondike")
(define deal-one #f)
(define deal-three #t)
(define kings-only #f)
(define max-redeal 0)
(define tableau '(6 7 8 9 10 11 12))
(define foundation '(2 3 4 5))
(define stock 0)
(define waste 1)
(define (new-game)
(initialize-playing-area)
(set-ace-low)
(make-standard-deck)
(shuffle-deck)
(add-normal-slot DECK 'stock)
(if deal-three
(add-partially-extended-slot '() right 3 'waste)
(add-normal-slot '() 'waste))
(add-blank-slot)
(add-normal-slot '() 'foundation)
(add-normal-slot '() 'foundation)
(add-normal-slot '() 'foundation)
(add-normal-slot '() 'foundation)
(add-carriage-return-slot)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(give-status-message)
(list 7 3.1)
)
(define (get-options)
'())
(define (apply-options options) #f)
(set-lambda! 'new-game new-game)
(set-lambda! 'get-options get-options)
(set-lambda! 'apply-options apply-options)
| null | https://raw.githubusercontent.com/GNOME/aisleriot/5ab7f90d8a196f1fcfe5a552cef4a4c1a4b5ac39/games/gold-mine.scm | scheme | AisleRiot - gold_mine.scm
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>. | Copyright ( C ) 1998 , 2003 < >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(use-modules (aisleriot interface) (aisleriot api))
(primitive-load-path "klondike")
(define deal-one #f)
(define deal-three #t)
(define kings-only #f)
(define max-redeal 0)
(define tableau '(6 7 8 9 10 11 12))
(define foundation '(2 3 4 5))
(define stock 0)
(define waste 1)
(define (new-game)
(initialize-playing-area)
(set-ace-low)
(make-standard-deck)
(shuffle-deck)
(add-normal-slot DECK 'stock)
(if deal-three
(add-partially-extended-slot '() right 3 'waste)
(add-normal-slot '() 'waste))
(add-blank-slot)
(add-normal-slot '() 'foundation)
(add-normal-slot '() 'foundation)
(add-normal-slot '() 'foundation)
(add-normal-slot '() 'foundation)
(add-carriage-return-slot)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(add-extended-slot '() down 'tableau)
(give-status-message)
(list 7 3.1)
)
(define (get-options)
'())
(define (apply-options options) #f)
(set-lambda! 'new-game new-game)
(set-lambda! 'get-options get-options)
(set-lambda! 'apply-options apply-options)
|
7256f8a9a5f3d7ace766fd1feab2f783e8dc5fab199ad01c4b3e6ea767d2ca65 | kwanghoon/bidi | Main.hs | module Main where
import AST
import Context
import NameGen
import Pretty
import Type
import Worklist
import System.Environment (getArgs, withArgs)
main :: IO ()
main =
do args <- getArgs
if "worklist" `elem` args
then runAlty
else runBidi
benchmark =
[
idnotype,
idunitnotype,
polyidunit,
eid,
idunit, -- Todo: alty bug??
idid,
ididunit
]
runBidi = mapM_ run benchmark
where
run prog =
let (polytype, context) = typesynthClosed prog
str_prog = pretty prog
str_context = pretty context
str_polytype = pretty polytype
in do putStrLn $ "Expr: " ++ str_prog
putStrLn $ "Type: " ++ str_polytype
putStrLn $ "Context: " ++ str_context
putStrLn ""
runAlty = mapM_ run benchmark
where
run prog =
do let ty = altyClosed prog
putStrLn $ "Expr : " ++ pretty prog
putStrLn $ "Type : " ++ pretty ty
| null | https://raw.githubusercontent.com/kwanghoon/bidi/b3a63dcf11995fe1c37dab2fd315522d66cd3e16/app/Main.hs | haskell | Todo: alty bug?? | module Main where
import AST
import Context
import NameGen
import Pretty
import Type
import Worklist
import System.Environment (getArgs, withArgs)
main :: IO ()
main =
do args <- getArgs
if "worklist" `elem` args
then runAlty
else runBidi
benchmark =
[
idnotype,
idunitnotype,
polyidunit,
eid,
idid,
ididunit
]
runBidi = mapM_ run benchmark
where
run prog =
let (polytype, context) = typesynthClosed prog
str_prog = pretty prog
str_context = pretty context
str_polytype = pretty polytype
in do putStrLn $ "Expr: " ++ str_prog
putStrLn $ "Type: " ++ str_polytype
putStrLn $ "Context: " ++ str_context
putStrLn ""
runAlty = mapM_ run benchmark
where
run prog =
do let ty = altyClosed prog
putStrLn $ "Expr : " ++ pretty prog
putStrLn $ "Type : " ++ pretty ty
|
1b063f1ccc798287ed0bb32078de01a5dcb864c9a380c820014560f19accb1b6 | AmpersandTarski/Ampersand | ShowMeatGrinder.hs | # LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
module Ampersand.FSpec.ShowMeatGrinder
( grind,
metaModel,
MetaModel (..),
)
where
import Ampersand.ADL1
import Ampersand.Basics
import Ampersand.Core.A2P_Converters
import Ampersand.Core.ParseTree
import Ampersand.FSpec.FSpec
import Ampersand.FSpec.Transformers
import qualified RIO.Set as Set
import qualified RIO.Text as T
data MetaModel = FormalAmpersand | PrototypeContext
deriving (Eq, Ord, Enum, Bounded, Show)
instance Named MetaModel where
name FormalAmpersand = "Formal Ampersand"
name PrototypeContext = "Prototype context"
-- | This produces the metamodel of either
" FormalAmpersand " or " PrototypeContext " as defined by their transformers .
metaModel :: MetaModel -> P_Context
metaModel mmLabel =
PCtx
{ ctx_nm = "MetaModel" <> T.pack (show mmLabel),
ctx_pos = [],
ctx_lang = Nothing,
ctx_markup = Nothing,
ctx_pats = [],
ctx_rs = [],
ctx_ds = map metarelation (transformers emptyFSpec),
ctx_cs = [],
ctx_ks = [],
ctx_rrules = [],
ctx_reprs = [],
ctx_vs = [],
ctx_gs = [],
ctx_ifcs = [],
ctx_ps = [],
ctx_pops = [],
ctx_metas = [],
ctx_enfs = []
}
where
transformers = case mmLabel of
FormalAmpersand -> transformersFormalAmpersand
PrototypeContext -> transformersPrototypeContext
-- | The 'grind' function lifts a model to the population of a metamodel.
-- The model is "ground" with respect to a metamodel defined in transformersFormalAmpersand,
The result is delivered as a P_Context , so it can be merged with other Ampersand results .
grind :: (FSpec -> [Transformer]) -> FSpec -> P_Context
grind transformers userFspec =
PCtx
{ ctx_nm = "Grinded_" <> name userFspec,
ctx_pos = [],
ctx_lang = Nothing,
ctx_markup = Nothing,
ctx_pats = [],
ctx_rs = [],
ctx_ds = map metarelation filtered,
ctx_cs = [],
ctx_ks = [],
ctx_rrules = [],
ctx_reprs = [],
ctx_vs = [],
ctx_gs = [],
ctx_ifcs = [],
ctx_ps = [],
ctx_pops = map transformer2pop filtered,
ctx_metas = [],
ctx_enfs = []
}
where
filtered :: [Transformer]
filtered = filter (not . null . tPairs) . transformers $ userFspec
metarelation :: Transformer -> P_Relation
metarelation tr =
P_Relation
{ dec_nm = tRel tr,
dec_sign =
P_Sign
(mkPConcept (tSrc tr))
(mkPConcept (tTrg tr)),
dec_prps = aProps2Pprops $ mults tr,
dec_defaults = [],
dec_pragma = Nothing,
dec_Mean = [],
pos = OriginUnknown
}
transformer2pop :: Transformer -> P_Population
transformer2pop tr =
P_RelPopu
{ p_src = Nothing,
p_tgt = Nothing,
TODO trace to origin
p_nmdr =
PNamedRel
TODO trace to origin
p_nrnm = tRel tr,
p_mbSign =
Just
( P_Sign
(mkPConcept (tSrc tr))
(mkPConcept (tTrg tr))
)
},
p_popps = tPairs tr
}
| null | https://raw.githubusercontent.com/AmpersandTarski/Ampersand/054079bd7f6ce40c73e30bd81ecdba5b6b36dba5/src/Ampersand/FSpec/ShowMeatGrinder.hs | haskell | | This produces the metamodel of either
| The 'grind' function lifts a model to the population of a metamodel.
The model is "ground" with respect to a metamodel defined in transformersFormalAmpersand, | # LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
module Ampersand.FSpec.ShowMeatGrinder
( grind,
metaModel,
MetaModel (..),
)
where
import Ampersand.ADL1
import Ampersand.Basics
import Ampersand.Core.A2P_Converters
import Ampersand.Core.ParseTree
import Ampersand.FSpec.FSpec
import Ampersand.FSpec.Transformers
import qualified RIO.Set as Set
import qualified RIO.Text as T
data MetaModel = FormalAmpersand | PrototypeContext
deriving (Eq, Ord, Enum, Bounded, Show)
instance Named MetaModel where
name FormalAmpersand = "Formal Ampersand"
name PrototypeContext = "Prototype context"
" FormalAmpersand " or " PrototypeContext " as defined by their transformers .
metaModel :: MetaModel -> P_Context
metaModel mmLabel =
PCtx
{ ctx_nm = "MetaModel" <> T.pack (show mmLabel),
ctx_pos = [],
ctx_lang = Nothing,
ctx_markup = Nothing,
ctx_pats = [],
ctx_rs = [],
ctx_ds = map metarelation (transformers emptyFSpec),
ctx_cs = [],
ctx_ks = [],
ctx_rrules = [],
ctx_reprs = [],
ctx_vs = [],
ctx_gs = [],
ctx_ifcs = [],
ctx_ps = [],
ctx_pops = [],
ctx_metas = [],
ctx_enfs = []
}
where
transformers = case mmLabel of
FormalAmpersand -> transformersFormalAmpersand
PrototypeContext -> transformersPrototypeContext
The result is delivered as a P_Context , so it can be merged with other Ampersand results .
grind :: (FSpec -> [Transformer]) -> FSpec -> P_Context
grind transformers userFspec =
PCtx
{ ctx_nm = "Grinded_" <> name userFspec,
ctx_pos = [],
ctx_lang = Nothing,
ctx_markup = Nothing,
ctx_pats = [],
ctx_rs = [],
ctx_ds = map metarelation filtered,
ctx_cs = [],
ctx_ks = [],
ctx_rrules = [],
ctx_reprs = [],
ctx_vs = [],
ctx_gs = [],
ctx_ifcs = [],
ctx_ps = [],
ctx_pops = map transformer2pop filtered,
ctx_metas = [],
ctx_enfs = []
}
where
filtered :: [Transformer]
filtered = filter (not . null . tPairs) . transformers $ userFspec
metarelation :: Transformer -> P_Relation
metarelation tr =
P_Relation
{ dec_nm = tRel tr,
dec_sign =
P_Sign
(mkPConcept (tSrc tr))
(mkPConcept (tTrg tr)),
dec_prps = aProps2Pprops $ mults tr,
dec_defaults = [],
dec_pragma = Nothing,
dec_Mean = [],
pos = OriginUnknown
}
transformer2pop :: Transformer -> P_Population
transformer2pop tr =
P_RelPopu
{ p_src = Nothing,
p_tgt = Nothing,
TODO trace to origin
p_nmdr =
PNamedRel
TODO trace to origin
p_nrnm = tRel tr,
p_mbSign =
Just
( P_Sign
(mkPConcept (tSrc tr))
(mkPConcept (tTrg tr))
)
},
p_popps = tPairs tr
}
|
7275d67c5e3ca999a545d3eee750a4ac33b0ced62dc84f973071ee910d729785 | funcool/promesa | exec_bulkhead_test.clj | (ns promesa.tests.exec-bulkhead-test
(:require
[promesa.core :as p]
[promesa.exec.bulkhead :as pbh]
[promesa.exec :as px]
[promesa.util :as pu]
[clojure.test :as t]))
(def ^:dynamic *executor* nil)
(t/use-fixtures :each (fn [next]
(binding [px/*default-executor* (px/forkjoin-executor)]
;; (prn "PRE" px/*default-executor*)
(next)
;; (prn "POST" px/*default-executor*)
(.shutdown ^java.util.concurrent.ExecutorService px/*default-executor*))))
(defn timing-fn
"Create a measurement checkpoint for time measurement of potentially
asynchronous flow."
[]
(let [p1 (System/nanoTime)]
#(- (System/nanoTime) p1)))
(defn waiting-fn
([] (waiting-fn 200))
([ms] #(do
(px/sleep ms)
(rand-int 100))))
;; (t/deftest basic-operations-submit
( let [ instance ( pbh / create { : permits 1 : type : executor } ) ]
( let [ res ( px / submit ! instance ( timing - fn ) ) ]
;; (t/is (p/promise? res))
( t / is ( < @res 10000000 ) ) ) ) )
(t/deftest operations-with-executor-bulkhead
(let [instance (pbh/create {:permits 1 :queue 1 :type :executor})
res1 (pu/try! (px/submit! instance (waiting-fn 1000)))
res2 (pu/try! (px/submit! instance (waiting-fn 200)))
res3 (pu/try! (px/submit! instance (waiting-fn 200)))
]
(t/is (p/promise? res1))
(t/is (p/promise? res2))
(t/is (instance? Throwable res3))
(t/is (p/pending? res1))
(t/is (p/pending? res2))
(t/is (pos? (deref res1 2000 -1)))
(t/is (pos? (deref res2 2000 -1)))
(let [data (ex-data res3)]
(t/is (= :bulkhead-error (:type data)))
(t/is (= :capacity-limit-reached (:code data))))))
(t/deftest operations-with-semaphore-bulkhead
(let [instance (pbh/create {:permits 1 :queue 1 :type :semaphore})
res1 (px/with-dispatch :thread
(px/submit! instance (waiting-fn 1000)))
res2 (px/with-dispatch :thread
(px/submit! instance (waiting-fn 200)))
res3 (px/with-dispatch :thread
(px/submit! instance (waiting-fn 200)))
]
(t/is (p/promise? res1))
(t/is (p/promise? res2))
(t/is (p/promise? res3))
(t/is (p/pending? res1))
(t/is (p/pending? res2))
(p/await res3)
(t/is (p/rejected? res3))
(t/is (pos? (deref res1 2000 -1)))
(t/is (pos? (deref res2 2000 -1)))
(t/is (thrown? java.util.concurrent.ExecutionException (deref res3)))
(let [data (ex-data (p/extract res3))]
(t/is (= :bulkhead-error (:type data)))
(t/is (= :capacity-limit-reached (:code data))))))
| null | https://raw.githubusercontent.com/funcool/promesa/5c3ba347f6be1e22d48e1dc70488b755f5e33920/test/promesa/tests/exec_bulkhead_test.clj | clojure | (prn "PRE" px/*default-executor*)
(prn "POST" px/*default-executor*)
(t/deftest basic-operations-submit
(t/is (p/promise? res)) | (ns promesa.tests.exec-bulkhead-test
(:require
[promesa.core :as p]
[promesa.exec.bulkhead :as pbh]
[promesa.exec :as px]
[promesa.util :as pu]
[clojure.test :as t]))
(def ^:dynamic *executor* nil)
(t/use-fixtures :each (fn [next]
(binding [px/*default-executor* (px/forkjoin-executor)]
(next)
(.shutdown ^java.util.concurrent.ExecutorService px/*default-executor*))))
(defn timing-fn
"Create a measurement checkpoint for time measurement of potentially
asynchronous flow."
[]
(let [p1 (System/nanoTime)]
#(- (System/nanoTime) p1)))
(defn waiting-fn
([] (waiting-fn 200))
([ms] #(do
(px/sleep ms)
(rand-int 100))))
( let [ instance ( pbh / create { : permits 1 : type : executor } ) ]
( let [ res ( px / submit ! instance ( timing - fn ) ) ]
( t / is ( < @res 10000000 ) ) ) ) )
(t/deftest operations-with-executor-bulkhead
(let [instance (pbh/create {:permits 1 :queue 1 :type :executor})
res1 (pu/try! (px/submit! instance (waiting-fn 1000)))
res2 (pu/try! (px/submit! instance (waiting-fn 200)))
res3 (pu/try! (px/submit! instance (waiting-fn 200)))
]
(t/is (p/promise? res1))
(t/is (p/promise? res2))
(t/is (instance? Throwable res3))
(t/is (p/pending? res1))
(t/is (p/pending? res2))
(t/is (pos? (deref res1 2000 -1)))
(t/is (pos? (deref res2 2000 -1)))
(let [data (ex-data res3)]
(t/is (= :bulkhead-error (:type data)))
(t/is (= :capacity-limit-reached (:code data))))))
(t/deftest operations-with-semaphore-bulkhead
(let [instance (pbh/create {:permits 1 :queue 1 :type :semaphore})
res1 (px/with-dispatch :thread
(px/submit! instance (waiting-fn 1000)))
res2 (px/with-dispatch :thread
(px/submit! instance (waiting-fn 200)))
res3 (px/with-dispatch :thread
(px/submit! instance (waiting-fn 200)))
]
(t/is (p/promise? res1))
(t/is (p/promise? res2))
(t/is (p/promise? res3))
(t/is (p/pending? res1))
(t/is (p/pending? res2))
(p/await res3)
(t/is (p/rejected? res3))
(t/is (pos? (deref res1 2000 -1)))
(t/is (pos? (deref res2 2000 -1)))
(t/is (thrown? java.util.concurrent.ExecutionException (deref res3)))
(let [data (ex-data (p/extract res3))]
(t/is (= :bulkhead-error (:type data)))
(t/is (= :capacity-limit-reached (:code data))))))
|
129a4f9da135deb4f8619657bf477bedbc59a22969a2eb34f6980ac14cec6cae | hasktorch/hasktorch | Main.hs | module Main where
import Torch
import Torch.NN.Recurrent.Cell.Elman
convTest = do
input : minibatch , channels , input width
input <- randnIO' [1, 2, 5]
-- weights: out channels, in channels, kernel width
let weights =
asTensor
( [ [[0, 1, 0], [0, 1, 0]],
[[0, 1, 0], [0, 0, 1]]
] ::
[[[Float]]]
)
let bias = zeros' [2] -- bias: out channels
let output = conv1d' weights bias 1 1 input
putStrLn "input"
print $ squeezeAll input
putStrLn "kernel"
print $ squeezeAll weights
putStrLn "output"
print $ squeezeAll output
convTest' input = do
weights : ( 2 output channels , 3 input channels , 3 width kernel )
let weights =
asTensor
( [ [[0, 1, 0], [0, 1, 0], [0, 1, 0]],
[[0, 1, 0], [0, 0, 1], [0, 1, 0]]
] ::
[[[Float]]]
)
let bias = zeros' [2] -- bias: out channels
let output = conv1d' weights bias 1 1 input
putStrLn "input"
print $ squeezeAll input
putStrLn "kernel"
print $ squeezeAll weights
putStrLn "output"
print $ squeezeAll output
pure output
embedTest :: IO Tensor
embedTest = do
let dic = asTensor ([[1, 2, 3], [4, 5, 6]] :: [[Float]])
let indices = asTensor ([0, 0, 1, 0, 1] :: [Int])
let x = embedding' dic indices
this results in 5 x 3 where
5 = input width , 3 = # channels
pure $ reshape [1, 3, 5] $ transpose2D x
rnnTest :: IO Tensor
rnnTest = do
let hx = zeros' [hsz]
let wih = zeros' [hsz, isz]
let whh = zeros' [hsz, hsz]
let bih = zeros' [hsz]
let bhh = zeros' [hsz]
input <- randnIO' [isz]
pure $ rnnReluCell wih whh bih bhh hx input
where
hsz = 5 -- hidden dimensions
isz = 3 -- input dimensions
lstmTest :: IO (Tensor, Tensor)
lstmTest = do
let hx = (zeros' [bsz, hsz], zeros' [bsz, hsz])
let wih = full' [hsz * 4, isz] (1.0 :: Float)
let whh = full' [hsz * 4, hsz] (1.0 :: Float)
let bih = full' [hsz * 4] (1.0 :: Float)
let bhh = full' [hsz * 4] (1.0 :: Float)
let input = full' [bsz, isz] (1.0 :: Float)
pure $ lstmCell wih whh bih bhh hx input
where
bsz = 3 -- batch size
hsz = 2 -- hidden dimensions
isz = 5 -- input dimensions
main = do
x <- embedTest
putStrLn "Embeddings Shape"
print $ shape x
-- Convolutions
putStrLn "\nConvolution"
outputs <- convTest' x
print outputs
RNN Cells
putStrLn "\nElman"
print =<< rnnTest
putStrLn "\nLSTM"
print =<< lstmTest
-- cosineSimilarity
| null | https://raw.githubusercontent.com/hasktorch/hasktorch/c34996b0a401a5b1b98b5774e892fde88adaa079/experimental/untyped-nlp/Main.hs | haskell | weights: out channels, in channels, kernel width
bias: out channels
bias: out channels
hidden dimensions
input dimensions
batch size
hidden dimensions
input dimensions
Convolutions
cosineSimilarity | module Main where
import Torch
import Torch.NN.Recurrent.Cell.Elman
convTest = do
input : minibatch , channels , input width
input <- randnIO' [1, 2, 5]
let weights =
asTensor
( [ [[0, 1, 0], [0, 1, 0]],
[[0, 1, 0], [0, 0, 1]]
] ::
[[[Float]]]
)
let output = conv1d' weights bias 1 1 input
putStrLn "input"
print $ squeezeAll input
putStrLn "kernel"
print $ squeezeAll weights
putStrLn "output"
print $ squeezeAll output
convTest' input = do
weights : ( 2 output channels , 3 input channels , 3 width kernel )
let weights =
asTensor
( [ [[0, 1, 0], [0, 1, 0], [0, 1, 0]],
[[0, 1, 0], [0, 0, 1], [0, 1, 0]]
] ::
[[[Float]]]
)
let output = conv1d' weights bias 1 1 input
putStrLn "input"
print $ squeezeAll input
putStrLn "kernel"
print $ squeezeAll weights
putStrLn "output"
print $ squeezeAll output
pure output
embedTest :: IO Tensor
embedTest = do
let dic = asTensor ([[1, 2, 3], [4, 5, 6]] :: [[Float]])
let indices = asTensor ([0, 0, 1, 0, 1] :: [Int])
let x = embedding' dic indices
this results in 5 x 3 where
5 = input width , 3 = # channels
pure $ reshape [1, 3, 5] $ transpose2D x
rnnTest :: IO Tensor
rnnTest = do
let hx = zeros' [hsz]
let wih = zeros' [hsz, isz]
let whh = zeros' [hsz, hsz]
let bih = zeros' [hsz]
let bhh = zeros' [hsz]
input <- randnIO' [isz]
pure $ rnnReluCell wih whh bih bhh hx input
where
lstmTest :: IO (Tensor, Tensor)
lstmTest = do
let hx = (zeros' [bsz, hsz], zeros' [bsz, hsz])
let wih = full' [hsz * 4, isz] (1.0 :: Float)
let whh = full' [hsz * 4, hsz] (1.0 :: Float)
let bih = full' [hsz * 4] (1.0 :: Float)
let bhh = full' [hsz * 4] (1.0 :: Float)
let input = full' [bsz, isz] (1.0 :: Float)
pure $ lstmCell wih whh bih bhh hx input
where
main = do
x <- embedTest
putStrLn "Embeddings Shape"
print $ shape x
putStrLn "\nConvolution"
outputs <- convTest' x
print outputs
RNN Cells
putStrLn "\nElman"
print =<< rnnTest
putStrLn "\nLSTM"
print =<< lstmTest
|
36f70ff23f9ef904c3b6d6d037e747d255e9e34a000d500035044fb6722ebe05 | rtoy/ansi-cl-tests | ba-test-package.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Sat May 28 06:38:29 2005
;;;; Contains: Definition of BA-TEST package.
(in-package :cl-user)
(let* ((name :ba-test)
(pkg (find-package name)))
(unless pkg (setq pkg (make-package name :use '(:cl :regression-test
:cl-test))))
(let ((*package* pkg))
(shadow '(#:handler-case #:handler-bind))
(import '(common-lisp-user::compile-and-load) pkg)
(import '(cl-test::*universe* cl-test::*mini-universe*) pkg)
)
(let ((s (find-symbol "QUIT" "CL-USER")))
(when s (import s :ba-test))))
| null | https://raw.githubusercontent.com/rtoy/ansi-cl-tests/9708f3977220c46def29f43bb237e97d62033c1d/beyond-ansi/ba-test-package.lsp | lisp | -*- Mode: Lisp -*-
Contains: Definition of BA-TEST package. | Author :
Created : Sat May 28 06:38:29 2005
(in-package :cl-user)
(let* ((name :ba-test)
(pkg (find-package name)))
(unless pkg (setq pkg (make-package name :use '(:cl :regression-test
:cl-test))))
(let ((*package* pkg))
(shadow '(#:handler-case #:handler-bind))
(import '(common-lisp-user::compile-and-load) pkg)
(import '(cl-test::*universe* cl-test::*mini-universe*) pkg)
)
(let ((s (find-symbol "QUIT" "CL-USER")))
(when s (import s :ba-test))))
|
950919677418c3fb7a8e16f525ed02e5ff0fc9e2afdb53e57c3c53ece784ec69 | YoshikuniJujo/funpaala | safeRecip.hs | safeRecip 0 = Nothing
safeRecip x = Just (1 / x)
| null | https://raw.githubusercontent.com/YoshikuniJujo/funpaala/5366130826da0e6b1180992dfff94c4a634cda99/samples/05_function/safeRecip.hs | haskell | safeRecip 0 = Nothing
safeRecip x = Just (1 / x)
| |
fac7e201e8de98fb96f23c7863604c74ebd8115b103a43c9fdf37bc24e09dbad | DavidAlphaFox/RabbitMQ | rabbit_auth_mechanism_ssl_app.erl | The contents of this file are subject to the Mozilla Public License
%% Version 1.1 (the "License"); you may not use this file except in
%% compliance with the License. You may obtain a copy of the License
%% at /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and
%% limitations under the License.
%%
The Original Code is RabbitMQ .
%%
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2007 - 2014 GoPivotal , Inc. All rights reserved .
%%
-module(rabbit_auth_mechanism_ssl_app).
-behaviour(application).
-export([start/2, stop/1]).
Dummy supervisor - see comment at
-library-applications-without-processes-td2094473.html
-behaviour(supervisor).
-export([init/1]).
start(normal, []) ->
supervisor:start_link({local,?MODULE},?MODULE,[]).
stop(_State) ->
ok.
init([]) ->
{ok, {{one_for_one,3,10},[]}}.
| null | https://raw.githubusercontent.com/DavidAlphaFox/RabbitMQ/0a64e6f0464a9a4ce85c6baa52fb1c584689f49a/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl_app.erl | erlang | Version 1.1 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License
at /
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and
limitations under the License.
| The contents of this file are subject to the Mozilla Public License
Software distributed under the License is distributed on an " AS IS "
The Original Code is RabbitMQ .
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2007 - 2014 GoPivotal , Inc. All rights reserved .
-module(rabbit_auth_mechanism_ssl_app).
-behaviour(application).
-export([start/2, stop/1]).
Dummy supervisor - see comment at
-library-applications-without-processes-td2094473.html
-behaviour(supervisor).
-export([init/1]).
start(normal, []) ->
supervisor:start_link({local,?MODULE},?MODULE,[]).
stop(_State) ->
ok.
init([]) ->
{ok, {{one_for_one,3,10},[]}}.
|
2c49901afe32be0a83dc4000a2c610671de831f7a8b5225a66e3c66b493a1c4e | max-au/erlperf | erlperf_sup.erl | ( C ) 2019 - 2023 ,
@private
%%% Top-level supervisor. Always starts process group scope
%%% for `erlperf'. Depending on the configuration starts
%%% a number of jobs or a cluster-wide monitoring solution.
-module(erlperf_sup).
-author("").
-behaviour(supervisor).
-export([
start_link/0,
init/1
]).
-spec start_link() -> supervisor:startlink_ret().
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-spec init([]) -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}.
init([]) ->
SupFlags = #{strategy => rest_for_one, intensity => 2, period => 60},
ChildSpecs = [
%% start own pg scope, needed for cluster-wide operations
%% even if the node-wide monitoring is not running, the scope
%% needs to be up to send "job started" events for the cluster
#{
id => pg,
start => {pg, start_link, [erlperf]},
modules => [pg]
},
%% monitoring
#{
id => erlperf_monitor,
start => {erlperf_monitor, start_link, []},
modules => [erlperf_monitor]
},
%% supervisor for statically started jobs
#{
id => erlperf_job_sup,
start => {erlperf_job_sup, start_link, []},
type => supervisor,
modules => [erlperf_job_sup]
}],
{ok, {SupFlags, ChildSpecs}}.
| null | https://raw.githubusercontent.com/max-au/erlperf/430310d3141d668c402a9900c9d19486ceb27493/src/erlperf_sup.erl | erlang | Top-level supervisor. Always starts process group scope
for `erlperf'. Depending on the configuration starts
a number of jobs or a cluster-wide monitoring solution.
start own pg scope, needed for cluster-wide operations
even if the node-wide monitoring is not running, the scope
needs to be up to send "job started" events for the cluster
monitoring
supervisor for statically started jobs | ( C ) 2019 - 2023 ,
@private
-module(erlperf_sup).
-author("").
-behaviour(supervisor).
-export([
start_link/0,
init/1
]).
-spec start_link() -> supervisor:startlink_ret().
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-spec init([]) -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}.
init([]) ->
SupFlags = #{strategy => rest_for_one, intensity => 2, period => 60},
ChildSpecs = [
#{
id => pg,
start => {pg, start_link, [erlperf]},
modules => [pg]
},
#{
id => erlperf_monitor,
start => {erlperf_monitor, start_link, []},
modules => [erlperf_monitor]
},
#{
id => erlperf_job_sup,
start => {erlperf_job_sup, start_link, []},
type => supervisor,
modules => [erlperf_job_sup]
}],
{ok, {SupFlags, ChildSpecs}}.
|
fac435b3f97f7f662e7243a90ac781177ba7f19e3ae8c40b66bdf6564978f131 | jaspervdj/psqueues | OrdPSQ.hs | | An ' OrdPSQ ' uses the ' ' instance of the key type to build a priority
-- search queue.
--
It is based on work .
--
* , R. , A Simple Implementation Technique for Priority Search Queues ,
ICFP 2001 , pp . 110 - 121
--
-- <>
--
-- This means it is similar to the
< -1.1 PSQueue > package but
-- our benchmarks showed it perform quite a bit faster.
{-# LANGUAGE Safe #-}
# LANGUAGE ScopedTypeVariables #
module Data.OrdPSQ
( -- * Type
OrdPSQ
-- * Query
, null
, size
, member
, lookup
, findMin
-- * Construction
, empty
, singleton
-- * Insertion
, insert
-- * Delete/Update
, delete
, deleteMin
, alter
, alterMin
-- * Conversion
, fromList
, toList
, toAscList
, keys
-- * Views
, insertView
, deleteView
, minView
, atMostView
*
, map
, unsafeMapMonotonic
, fold'
-- * Validity check
, valid
) where
import Prelude hiding (foldr, lookup, map, null)
import Data.OrdPSQ.Internal
| null | https://raw.githubusercontent.com/jaspervdj/psqueues/f6c8e112a7e6bc3e75303d87473f72e3c34822ec/src/Data/OrdPSQ.hs | haskell | search queue.
<>
This means it is similar to the
our benchmarks showed it perform quite a bit faster.
# LANGUAGE Safe #
* Type
* Query
* Construction
* Insertion
* Delete/Update
* Conversion
* Views
* Validity check | | An ' OrdPSQ ' uses the ' ' instance of the key type to build a priority
It is based on work .
* , R. , A Simple Implementation Technique for Priority Search Queues ,
ICFP 2001 , pp . 110 - 121
< -1.1 PSQueue > package but
# LANGUAGE ScopedTypeVariables #
module Data.OrdPSQ
OrdPSQ
, null
, size
, member
, lookup
, findMin
, empty
, singleton
, insert
, delete
, deleteMin
, alter
, alterMin
, fromList
, toList
, toAscList
, keys
, insertView
, deleteView
, minView
, atMostView
*
, map
, unsafeMapMonotonic
, fold'
, valid
) where
import Prelude hiding (foldr, lookup, map, null)
import Data.OrdPSQ.Internal
|
988c987d369817eaac5c21f4af3e54631f4bc371f69316d9c0ae3f48dcea1ac9 | PJK/haskell-pattern-matching | IntOps.hs | module IntOps where
Not exhaustive and the first clause is redundant because it 's always false
f :: Int -> Int
f x | 2 `div` 1 == 1 = 1
Second is redundant
g :: Int -> Int
g x | 2 `rem` 3 == 2 = 1
| otherwise = 2
-- Same but with infix
h :: Int -> Int
h x | 2 `rem` 3 == 2 = 1
| otherwise = 2
| null | https://raw.githubusercontent.com/PJK/haskell-pattern-matching/7ef4a45731beba92ee01614ae563df65be36e8ba/data/exact/IntOps.hs | haskell | Same but with infix | module IntOps where
Not exhaustive and the first clause is redundant because it 's always false
f :: Int -> Int
f x | 2 `div` 1 == 1 = 1
Second is redundant
g :: Int -> Int
g x | 2 `rem` 3 == 2 = 1
| otherwise = 2
h :: Int -> Int
h x | 2 `rem` 3 == 2 = 1
| otherwise = 2
|
5e79e5529b2bddc087937a2502b6143640db259ddc88833cc5ca44ebfe141234 | viercc/kitchen-sink-hs | Heap.hs | # OPTIONS_GHC -Wall #
module Heap where
import Prelude hiding (repeat, zipWith, (!!))
import Data.Bits
data Heap a = Heap a (Heap a) (Heap a)
deriving (Show)
instance Functor Heap where
fmap f = go
where go (Heap a al ar) = Heap (f a) (go al) (go ar)
instance Applicative Heap where
pure = repeat
(<*>) = zipWith id
(<*) = const
(*>) = const id
instance Monad Heap where
return = pure
Heap a l r >>= f = Heap b l' r'
where
Heap b _ _ = f a
Heap _ l' _ = l >>= f
Heap _ _ r' = r >>= f
repeat :: a -> Heap a
repeat a = let as = Heap a as as in as
zipWith :: (a -> b -> c) -> Heap a -> Heap b -> Heap c
zipWith f = go
where go (Heap a as1 as2) (Heap b bs1 bs2) =
Heap (f a b) (go as1 bs1) (go as2 bs2)
genericIndex :: (Integral b, Bits b) => Heap a -> b -> a
genericIndex (Heap zero neg pos) n
| n < 0 = at' neg (negate n)
| n == 0 = zero
| otherwise = at' pos n
where
at' (Heap a _ _) 1 = a
at' (Heap _ l r) k =
let next = if k .&. 1 == 0 then l else r
in at' next (k `unsafeShiftR` 1)
(!!) :: Heap a -> Int -> a
(!!) = genericIndex
integerTable :: Heap Integer
integerTable = Heap 0 (fmap negate natTable) natTable
natTable :: Heap Integer
natTable = Heap 1 (fmap l natTable) (fmap r natTable)
where
l n = n `unsafeShiftL` 1
r n = (n `unsafeShiftL` 1) .|. 1
| null | https://raw.githubusercontent.com/viercc/kitchen-sink-hs/5038b17a39e4e6f19e6fb4779a7c8aaddf64d922/experiment/src/Heap.hs | haskell | # OPTIONS_GHC -Wall #
module Heap where
import Prelude hiding (repeat, zipWith, (!!))
import Data.Bits
data Heap a = Heap a (Heap a) (Heap a)
deriving (Show)
instance Functor Heap where
fmap f = go
where go (Heap a al ar) = Heap (f a) (go al) (go ar)
instance Applicative Heap where
pure = repeat
(<*>) = zipWith id
(<*) = const
(*>) = const id
instance Monad Heap where
return = pure
Heap a l r >>= f = Heap b l' r'
where
Heap b _ _ = f a
Heap _ l' _ = l >>= f
Heap _ _ r' = r >>= f
repeat :: a -> Heap a
repeat a = let as = Heap a as as in as
zipWith :: (a -> b -> c) -> Heap a -> Heap b -> Heap c
zipWith f = go
where go (Heap a as1 as2) (Heap b bs1 bs2) =
Heap (f a b) (go as1 bs1) (go as2 bs2)
genericIndex :: (Integral b, Bits b) => Heap a -> b -> a
genericIndex (Heap zero neg pos) n
| n < 0 = at' neg (negate n)
| n == 0 = zero
| otherwise = at' pos n
where
at' (Heap a _ _) 1 = a
at' (Heap _ l r) k =
let next = if k .&. 1 == 0 then l else r
in at' next (k `unsafeShiftR` 1)
(!!) :: Heap a -> Int -> a
(!!) = genericIndex
integerTable :: Heap Integer
integerTable = Heap 0 (fmap negate natTable) natTable
natTable :: Heap Integer
natTable = Heap 1 (fmap l natTable) (fmap r natTable)
where
l n = n `unsafeShiftL` 1
r n = (n `unsafeShiftL` 1) .|. 1
| |
020d31ed9b3d158ef28bbac5fb423aa94ec5e96d3660e74196645cad90820733 | scsibug/feedparser-clj | core.clj | (ns feedparser-clj.test.core
(:import [com.sun.syndication.io SyndFeedInput XmlReader]
[java.net URL]
[java.io InputStreamReader]
[com.sun.syndication.feed.synd SyndFeed])
(:require [feedparser-clj.core :refer :all :reload true]
[clojure.test :refer :all]))
(defn load-feed-fixture [name]
(str (clojure.java.io/resource (format "fixtures/%s" name))))
(deftest parse-test
(let [pf (parse-feed (load-feed-fixture "gonzih-blog.xml"))]
(testing :feed
(is (= (-> pf :author) " (Max Gonzih)"))
(is (= (-> pf :categories) []))
(is (= (-> pf :contributors) []))
(is (= (-> pf :entry-links) []))
(is (= (-> pf :image) nil))
(is (= (-> pf :copyright) "This work is licensed under a Creative Commons Attribution 4.0 International License."))
(is (= (-> pf :description) "Recent content on Max Gonzih"))
(is (= (-> pf :encoding) nil))
(is (= (-> pf :feed-type) "rss_2.0"))
(is (= (-> pf :language) "en-us"))
(is (= (-> pf :link) ""))
(is (= (-> pf :published-date) #inst "2015-12-11T00:00:00.000-00:00"))
(is (= (-> pf :title) "Max Gonzih"))
(is (= (-> pf :uri) nil)))
(testing :entry
(is (= (-> pf :entries count) 15))
(let [entry (-> pf :entries first)]
(is (= (:authors entry) []))
(is (= (:categories entry) []))
(is (= (:contributors entry) []))
(is (= (:enclosures entry) []))
(is (= (:contents entry) []))
(is (= "text/html" (:type (:description entry))))
(is (re-find #"Collection of tweaks that I gathered after installing Arch.*" (:value (:description entry))))
(is (= (:author entry) " (Max Gonzih)"))
(is (= (:link entry) "-linux-on-lenovo-ideapad-y700-15/"))
(is (= (:published-date entry) #inst "2015-12-11T00:00:00.000-00:00"))
(is (= (:title entry) "Arch Linux on Lenovo IdeaPad Y700 15\""))
(is (= (:updated-date entry) nil))
(is (= (:uri entry) "-linux-on-lenovo-ideapad-y700-15/"))))))
| null | https://raw.githubusercontent.com/scsibug/feedparser-clj/6dd3aa07ee9e41c2a723892b90756966dc1d4e62/test/feedparser_clj/test/core.clj | clojure | (ns feedparser-clj.test.core
(:import [com.sun.syndication.io SyndFeedInput XmlReader]
[java.net URL]
[java.io InputStreamReader]
[com.sun.syndication.feed.synd SyndFeed])
(:require [feedparser-clj.core :refer :all :reload true]
[clojure.test :refer :all]))
(defn load-feed-fixture [name]
(str (clojure.java.io/resource (format "fixtures/%s" name))))
(deftest parse-test
(let [pf (parse-feed (load-feed-fixture "gonzih-blog.xml"))]
(testing :feed
(is (= (-> pf :author) " (Max Gonzih)"))
(is (= (-> pf :categories) []))
(is (= (-> pf :contributors) []))
(is (= (-> pf :entry-links) []))
(is (= (-> pf :image) nil))
(is (= (-> pf :copyright) "This work is licensed under a Creative Commons Attribution 4.0 International License."))
(is (= (-> pf :description) "Recent content on Max Gonzih"))
(is (= (-> pf :encoding) nil))
(is (= (-> pf :feed-type) "rss_2.0"))
(is (= (-> pf :language) "en-us"))
(is (= (-> pf :link) ""))
(is (= (-> pf :published-date) #inst "2015-12-11T00:00:00.000-00:00"))
(is (= (-> pf :title) "Max Gonzih"))
(is (= (-> pf :uri) nil)))
(testing :entry
(is (= (-> pf :entries count) 15))
(let [entry (-> pf :entries first)]
(is (= (:authors entry) []))
(is (= (:categories entry) []))
(is (= (:contributors entry) []))
(is (= (:enclosures entry) []))
(is (= (:contents entry) []))
(is (= "text/html" (:type (:description entry))))
(is (re-find #"Collection of tweaks that I gathered after installing Arch.*" (:value (:description entry))))
(is (= (:author entry) " (Max Gonzih)"))
(is (= (:link entry) "-linux-on-lenovo-ideapad-y700-15/"))
(is (= (:published-date entry) #inst "2015-12-11T00:00:00.000-00:00"))
(is (= (:title entry) "Arch Linux on Lenovo IdeaPad Y700 15\""))
(is (= (:updated-date entry) nil))
(is (= (:uri entry) "-linux-on-lenovo-ideapad-y700-15/"))))))
| |
e90ec55fda19b5acbaf7471dbb165252b963c199797057cf887d4c0005531bfe | spurious/sagittarius-scheme-mirror | generators-and-accumulators.scm | -*- mode : scheme ; coding : utf-8 ; -*-
;;;
;;; srfi/%3a158/generators-and-accumulators.scm - Generators and Accumulators
;;;
Copyright ( c ) 2017 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
(library (srfi :158 generators-and-accumulators)
(export generator circular-generator make-iota-generator
make-range-generator make-coroutine-generator
list->generator vector->generator
reverse-vector->generator string->generator
bytevector->generator
make-for-each-generator make-unfold-generator
gcons* gappend gcombine gfilter gremove
gtake gdrop gtake-while gdrop-while
gflatten ggroup gmerge gmap gstate-filter
gdelete gdelete-neighbor-dups gindex gselect gpath
generator->list generator->reverse-list
generator->vector generator->vector! generator->string
generator-fold generator-map->list generator-for-each generator-find
generator-count generator-any generator-every generator-unfold
make-accumulator count-accumulator list-accumulator
reverse-list-accumulator vector-accumulator
reverse-vector-accumulator vector-accumulator!
string-accumulator bytevector-accumulator bytevector-accumulator!
sum-accumulator product-accumulator)
(import (rnrs)
(srfi :121 generators)
(srfi :133 vectors)
(sagittarius)
(sagittarius generators))
(define (ggroup gen k :optional padding)
(if (undefined? padding)
(gslices gen k)
(gslices gen k #t padding)))
(define (generator-map->list proc gen . opts)
(generator->list (apply gmap proc gen opts)))
(define (make-accumulator kons knil finalizer)
(lambda (v)
(if (eof-object? v)
(finalizer knil)
(set! knil (kons v knil)))))
(define (count-accumulator)
(make-accumulator (lambda (_ count) (+ 1 count)) 0 values))
(define (list-accumulator) (make-accumulator cons '() reverse!))
(define (reverse-list-accumulator) (make-accumulator cons '() values))
(define (vector-accumulator) (make-accumulator cons '() reverse-list->vector))
(define (reverse-vector-accumulator) (make-accumulator cons '() list->vector))
(define (vector-accumulator! vector at)
(make-accumulator (lambda (v index)
(vector-set! vector index v)
(+ index 1))
at
(lambda (_) vector)))
(define (string-accumulator)
(let-values (((out extract) (open-string-output-port)))
(make-accumulator (lambda (c p) (put-char p c) p)
out
(lambda (_) (extract)))))
(define (bytevector-accumulator)
(let-values (((out extract) (open-bytevector-output-port)))
(make-accumulator (lambda (c p) (put-u8 p c) p)
out
(lambda (_) (extract)))))
(define (bytevector-accumulator! bv at)
(make-accumulator (lambda (v index)
(bytevector-u8-set! bv index v)
(+ index 1))
at
(lambda (_) bv)))
(define (sum-accumulator) (make-accumulator + 0 values))
(define (product-accumulator) (make-accumulator * 1 values))
)
| null | https://raw.githubusercontent.com/spurious/sagittarius-scheme-mirror/53f104188934109227c01b1e9a9af5312f9ce997/sitelib/srfi/%253a158/generators-and-accumulators.scm | scheme | coding : utf-8 ; -*-
srfi/%3a158/generators-and-accumulators.scm - Generators and Accumulators
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| Copyright ( c ) 2017 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
(library (srfi :158 generators-and-accumulators)
(export generator circular-generator make-iota-generator
make-range-generator make-coroutine-generator
list->generator vector->generator
reverse-vector->generator string->generator
bytevector->generator
make-for-each-generator make-unfold-generator
gcons* gappend gcombine gfilter gremove
gtake gdrop gtake-while gdrop-while
gflatten ggroup gmerge gmap gstate-filter
gdelete gdelete-neighbor-dups gindex gselect gpath
generator->list generator->reverse-list
generator->vector generator->vector! generator->string
generator-fold generator-map->list generator-for-each generator-find
generator-count generator-any generator-every generator-unfold
make-accumulator count-accumulator list-accumulator
reverse-list-accumulator vector-accumulator
reverse-vector-accumulator vector-accumulator!
string-accumulator bytevector-accumulator bytevector-accumulator!
sum-accumulator product-accumulator)
(import (rnrs)
(srfi :121 generators)
(srfi :133 vectors)
(sagittarius)
(sagittarius generators))
(define (ggroup gen k :optional padding)
(if (undefined? padding)
(gslices gen k)
(gslices gen k #t padding)))
(define (generator-map->list proc gen . opts)
(generator->list (apply gmap proc gen opts)))
(define (make-accumulator kons knil finalizer)
(lambda (v)
(if (eof-object? v)
(finalizer knil)
(set! knil (kons v knil)))))
(define (count-accumulator)
(make-accumulator (lambda (_ count) (+ 1 count)) 0 values))
(define (list-accumulator) (make-accumulator cons '() reverse!))
(define (reverse-list-accumulator) (make-accumulator cons '() values))
(define (vector-accumulator) (make-accumulator cons '() reverse-list->vector))
(define (reverse-vector-accumulator) (make-accumulator cons '() list->vector))
(define (vector-accumulator! vector at)
(make-accumulator (lambda (v index)
(vector-set! vector index v)
(+ index 1))
at
(lambda (_) vector)))
(define (string-accumulator)
(let-values (((out extract) (open-string-output-port)))
(make-accumulator (lambda (c p) (put-char p c) p)
out
(lambda (_) (extract)))))
(define (bytevector-accumulator)
(let-values (((out extract) (open-bytevector-output-port)))
(make-accumulator (lambda (c p) (put-u8 p c) p)
out
(lambda (_) (extract)))))
(define (bytevector-accumulator! bv at)
(make-accumulator (lambda (v index)
(bytevector-u8-set! bv index v)
(+ index 1))
at
(lambda (_) bv)))
(define (sum-accumulator) (make-accumulator + 0 values))
(define (product-accumulator) (make-accumulator * 1 values))
)
|
24793959054573d773aa624c12a55ce0af58d2a94a4281017cb24da6e4681298 | Bogdanp/koyo | dispatch.rkt | #lang racket/base
(require (for-syntax racket/base
syntax/parse)
racket/function
web-server/dispatch
web-server/dispatchers/dispatch)
(provide
dispatch-rules+roles)
(begin-for-syntax
(define-syntax-class dispatch-fun
(pattern fun:id
#:attr name #''fun)
(pattern (fun:dispatch-fun e ...)
#:attr name (attribute fun.name))))
(define-syntax (dispatch-rules+roles stx)
(syntax-parse stx
#:literals (else)
[(_
[pat
(~alt (~optional (~seq #:method method) #:defaults ([method #'"get"]))
(~optional (~seq #:roles (role:id ...)) #:defaults ([(role 1) null]))
(~optional (~seq #:name name) #:defaults ([name #'#f]))) ...
fun:dispatch-fun]
...
[else else-fun])
(with-syntax ([(fun-reverse-name ...)
(map (lambda (given-name default-name)
(if (syntax->datum given-name)
given-name
default-name))
(syntax->list #'(name ...))
(syntax->list #'(fun.name ...)))])
(syntax/loc stx
(let-values ([(dispatch _)
(dispatch-rules
[pat #:method method fun] ...
[else else-fun])]
[(reverse-uri)
(dispatch-url
[pat fun-reverse-name] ...)]
[(roles)
(dispatch-case
[pat #:method method (const '(role ...))] ...
[else (const null)])])
(values dispatch reverse-uri roles))))]
[(_
[pat
(~alt (~optional (~seq #:method method) #:defaults ([method #'"get"]))
(~optional (~seq #:roles (role:id ...)) #:defaults ([(role 1) null]))
(~optional (~seq #:name name) #:defaults ([name #'#f]))) ...
fun] ...)
(syntax/loc stx
(dispatch-rules+roles
[pat #:method method #:roles (role ...) #:name name fun] ...
[else (lambda (req) (next-dispatcher))]))]))
| null | https://raw.githubusercontent.com/Bogdanp/koyo/93f3fd06ee596a62bb0b286cb6290a800e911154/koyo-lib/koyo/dispatch.rkt | racket | #lang racket/base
(require (for-syntax racket/base
syntax/parse)
racket/function
web-server/dispatch
web-server/dispatchers/dispatch)
(provide
dispatch-rules+roles)
(begin-for-syntax
(define-syntax-class dispatch-fun
(pattern fun:id
#:attr name #''fun)
(pattern (fun:dispatch-fun e ...)
#:attr name (attribute fun.name))))
(define-syntax (dispatch-rules+roles stx)
(syntax-parse stx
#:literals (else)
[(_
[pat
(~alt (~optional (~seq #:method method) #:defaults ([method #'"get"]))
(~optional (~seq #:roles (role:id ...)) #:defaults ([(role 1) null]))
(~optional (~seq #:name name) #:defaults ([name #'#f]))) ...
fun:dispatch-fun]
...
[else else-fun])
(with-syntax ([(fun-reverse-name ...)
(map (lambda (given-name default-name)
(if (syntax->datum given-name)
given-name
default-name))
(syntax->list #'(name ...))
(syntax->list #'(fun.name ...)))])
(syntax/loc stx
(let-values ([(dispatch _)
(dispatch-rules
[pat #:method method fun] ...
[else else-fun])]
[(reverse-uri)
(dispatch-url
[pat fun-reverse-name] ...)]
[(roles)
(dispatch-case
[pat #:method method (const '(role ...))] ...
[else (const null)])])
(values dispatch reverse-uri roles))))]
[(_
[pat
(~alt (~optional (~seq #:method method) #:defaults ([method #'"get"]))
(~optional (~seq #:roles (role:id ...)) #:defaults ([(role 1) null]))
(~optional (~seq #:name name) #:defaults ([name #'#f]))) ...
fun] ...)
(syntax/loc stx
(dispatch-rules+roles
[pat #:method method #:roles (role ...) #:name name fun] ...
[else (lambda (req) (next-dispatcher))]))]))
| |
0685ca5a41584ead627ceed2f7fd456d483f3fb1fedd7f2ee041c8f1db69af40 | Chris00/ocaml-cairo | textextents.ml | (* This file is part of the tutorial
/
*)
open Cairo
let () =
let text = "joy" in
let surface = Cairo.Image.create Cairo.Image.ARGB32 ~w:600 ~h:600 in
let cr = Cairo.create surface in
Examples are in 1.0 x 1.0 coordinate space
Cairo.scale cr 600. 600.;
Cairo.set_font_size cr 0.5;
(* Drawing code goes here *)
Cairo.set_source_rgb cr 0.0 0.0 0.0;
Cairo.select_font_face cr "Georgia" ~weight:Bold;
let ux, uy = Cairo.device_to_user_distance cr 1. 1. in
let px = max ux uy in
let fe = Cairo.font_extents cr in
let te = Cairo.text_extents cr text in
(* The position of the text will be (x, y) *)
let x = 0.5 -. te.x_bearing -. te.width /. 2.
and y = 0.5 -. fe.descent +. fe.baseline /. 2. in
(* baseline, descent, ascent, height (in dashed green) *)
Cairo.set_line_width cr (4. *. px);
Cairo.set_dash cr [| 9. *. px |];
Cairo.set_source_rgba cr 0. 0.6 0. 0.5;
let horizontal_line y =
Cairo.move_to cr (x +. te.x_bearing) y;
Cairo.rel_line_to cr te.width 0. in
horizontal_line y;
horizontal_line (y +. fe.descent);
horizontal_line (y -. fe.ascent);
horizontal_line (y -. fe.baseline);
Cairo.stroke cr;
(* extents: width & height (in dashed blue) *)
Cairo.set_source_rgba cr 0. 0. 0.75 0.5;
Cairo.set_line_width cr px;
Cairo.set_dash cr [| 3. *. px |];
Cairo.rectangle cr (x +. te.x_bearing) (y +. te.y_bearing)
~w:te.width ~h:te.height;
Cairo.stroke cr;
(* text *)
Cairo.move_to cr x y;
Cairo.set_source_rgb cr 0. 0. 0.;
Cairo.show_text cr text;
(* bearing (solid blue line) *)
Cairo.set_dash cr [| |];
Cairo.set_line_width cr (2. *. px);
Cairo.set_source_rgba cr 0. 0. 0.75 0.5;
Cairo.move_to cr x y;
Cairo.rel_line_to cr te.x_bearing te.y_bearing;
Cairo.stroke cr;
(* text's advance (blue dot) *)
Cairo.set_source_rgba cr 0. 0. 0.75 0.5;
let two_pi = 8. *. atan 1. in
Cairo.arc cr (x +. te.x_advance) (y +. te.y_advance) ~r:(6. *. px)
~a1:0. ~a2:two_pi;
Cairo.fill cr;
(* reference point (x,y) (red dot) *)
Cairo.arc cr x y ~r:(6. *. px) ~a1:0. ~a2:two_pi;
Cairo.set_source_rgba cr 0.75 0. 0. 0.5;
Cairo.fill cr;
(* Write output *)
Cairo.PNG.write surface "textextents.png"
| null | https://raw.githubusercontent.com/Chris00/ocaml-cairo/202674a8d0c533b689ceacdb523ca167611e1b4c/examples/textextents.ml | ocaml | This file is part of the tutorial
/
Drawing code goes here
The position of the text will be (x, y)
baseline, descent, ascent, height (in dashed green)
extents: width & height (in dashed blue)
text
bearing (solid blue line)
text's advance (blue dot)
reference point (x,y) (red dot)
Write output |
open Cairo
let () =
let text = "joy" in
let surface = Cairo.Image.create Cairo.Image.ARGB32 ~w:600 ~h:600 in
let cr = Cairo.create surface in
Examples are in 1.0 x 1.0 coordinate space
Cairo.scale cr 600. 600.;
Cairo.set_font_size cr 0.5;
Cairo.set_source_rgb cr 0.0 0.0 0.0;
Cairo.select_font_face cr "Georgia" ~weight:Bold;
let ux, uy = Cairo.device_to_user_distance cr 1. 1. in
let px = max ux uy in
let fe = Cairo.font_extents cr in
let te = Cairo.text_extents cr text in
let x = 0.5 -. te.x_bearing -. te.width /. 2.
and y = 0.5 -. fe.descent +. fe.baseline /. 2. in
Cairo.set_line_width cr (4. *. px);
Cairo.set_dash cr [| 9. *. px |];
Cairo.set_source_rgba cr 0. 0.6 0. 0.5;
let horizontal_line y =
Cairo.move_to cr (x +. te.x_bearing) y;
Cairo.rel_line_to cr te.width 0. in
horizontal_line y;
horizontal_line (y +. fe.descent);
horizontal_line (y -. fe.ascent);
horizontal_line (y -. fe.baseline);
Cairo.stroke cr;
Cairo.set_source_rgba cr 0. 0. 0.75 0.5;
Cairo.set_line_width cr px;
Cairo.set_dash cr [| 3. *. px |];
Cairo.rectangle cr (x +. te.x_bearing) (y +. te.y_bearing)
~w:te.width ~h:te.height;
Cairo.stroke cr;
Cairo.move_to cr x y;
Cairo.set_source_rgb cr 0. 0. 0.;
Cairo.show_text cr text;
Cairo.set_dash cr [| |];
Cairo.set_line_width cr (2. *. px);
Cairo.set_source_rgba cr 0. 0. 0.75 0.5;
Cairo.move_to cr x y;
Cairo.rel_line_to cr te.x_bearing te.y_bearing;
Cairo.stroke cr;
Cairo.set_source_rgba cr 0. 0. 0.75 0.5;
let two_pi = 8. *. atan 1. in
Cairo.arc cr (x +. te.x_advance) (y +. te.y_advance) ~r:(6. *. px)
~a1:0. ~a2:two_pi;
Cairo.fill cr;
Cairo.arc cr x y ~r:(6. *. px) ~a1:0. ~a2:two_pi;
Cairo.set_source_rgba cr 0.75 0. 0. 0.5;
Cairo.fill cr;
Cairo.PNG.write surface "textextents.png"
|
47349dd00de9437f76828cda28e54eb9c901954cf51f5dd1bfca83a71c1567cd | HealthSamurai/pg3 | utils.clj | (ns pg3.utils
(:require [k8s.core :as k8s]
[pg3.naming :as naming]
[clojure.string :as str]
[unifn.core :as u]))
(defn find-pginstances-by-role [instances role]
(filter #(= role (get-in % [:spec :role])) instances))
(defn find-pginstance-by-role [instances role]
(first (find-pginstances-by-role instances role)))
(defn pginstances [namespace service-name]
(let [pginstances (:items (k8s/query {:kind naming/instance-resource-kind
:ns namespace
:apiVersion naming/api}
{:labelSelector
(format "service in (%s)" service-name)}))]
{:master (find-pginstance-by-role pginstances "master")
:replicas (find-pginstances-by-role pginstances "replica")
:all pginstances}))
(defmethod u/*fn ::cluster-active? [{pginstances ::pginstances}]
(let [{all-pgi :all} pginstances
phases (mapv #(get-in % [:status :phase]) all-pgi)]
(when-not (apply = (conj phases "active"))
{::u/status :stop})))
(defmethod u/*fn ::success [{message ::message}]
{::u/status :success
::u/message message})
(defn read-int [s]
(Integer/parseInt (str/trim s)))
(defn resource-ok? [resource]
(every? (partial = "True") (map :status (get-in resource [:status :conditions]))))
(defn resource-errors [resource]
(->> resource
((comp :conditions :status))
(filter (fn [c] (= (:status c) "False")))
(mapv :message)))
(defn parse-period [period]
(let [n (read-int (str/join "" (butlast period)))
t (last period)]
(case t
\s (* n 1000)
\m (* n 1000 60)
\h (* n 1000 60 60)
(throw (Exception. (str "Not supported type: " t))))))
(defn date->string [date]
(let [tz (java.util.TimeZone/getTimeZone "UTC")
df (doto (java.text.SimpleDateFormat. "yyyy-MM-dd'T'HH:mm:ss'Z'")
(.setTimeZone tz))]
(.format df date)))
(defn string->date [s]
(let [tz (java.util.TimeZone/getTimeZone "UTC")
df (doto (java.text.SimpleDateFormat. "yyyy-MM-dd'T'HH:mm:ss'Z'")
(.setTimeZone tz))]
(.parse df s)))
(defn since [last-update]
(let [last-update (or (and last-update (string->date last-update))
(java.util.Date.))]
(- (.getTime (java.util.Date.)) (.getTime last-update))))
(defn duration [resource]
(let [last-update (get-in resource [:status :lastUpdate])
last-update (or (and last-update (string->date last-update))
(java.util.Date.))]
(- (.getTime (java.util.Date.)) (.getTime last-update))))
(defn now-string []
(date->string (java.util.Date.)))
(defn timestamp-string []
(str (.getTime (java.util.Date.))))
| null | https://raw.githubusercontent.com/HealthSamurai/pg3/3fc680c9849b00f08ccb18dc64b72dc19b04fd2a/src/pg3/utils.clj | clojure | (ns pg3.utils
(:require [k8s.core :as k8s]
[pg3.naming :as naming]
[clojure.string :as str]
[unifn.core :as u]))
(defn find-pginstances-by-role [instances role]
(filter #(= role (get-in % [:spec :role])) instances))
(defn find-pginstance-by-role [instances role]
(first (find-pginstances-by-role instances role)))
(defn pginstances [namespace service-name]
(let [pginstances (:items (k8s/query {:kind naming/instance-resource-kind
:ns namespace
:apiVersion naming/api}
{:labelSelector
(format "service in (%s)" service-name)}))]
{:master (find-pginstance-by-role pginstances "master")
:replicas (find-pginstances-by-role pginstances "replica")
:all pginstances}))
(defmethod u/*fn ::cluster-active? [{pginstances ::pginstances}]
(let [{all-pgi :all} pginstances
phases (mapv #(get-in % [:status :phase]) all-pgi)]
(when-not (apply = (conj phases "active"))
{::u/status :stop})))
(defmethod u/*fn ::success [{message ::message}]
{::u/status :success
::u/message message})
(defn read-int [s]
(Integer/parseInt (str/trim s)))
(defn resource-ok? [resource]
(every? (partial = "True") (map :status (get-in resource [:status :conditions]))))
(defn resource-errors [resource]
(->> resource
((comp :conditions :status))
(filter (fn [c] (= (:status c) "False")))
(mapv :message)))
(defn parse-period [period]
(let [n (read-int (str/join "" (butlast period)))
t (last period)]
(case t
\s (* n 1000)
\m (* n 1000 60)
\h (* n 1000 60 60)
(throw (Exception. (str "Not supported type: " t))))))
(defn date->string [date]
(let [tz (java.util.TimeZone/getTimeZone "UTC")
df (doto (java.text.SimpleDateFormat. "yyyy-MM-dd'T'HH:mm:ss'Z'")
(.setTimeZone tz))]
(.format df date)))
(defn string->date [s]
(let [tz (java.util.TimeZone/getTimeZone "UTC")
df (doto (java.text.SimpleDateFormat. "yyyy-MM-dd'T'HH:mm:ss'Z'")
(.setTimeZone tz))]
(.parse df s)))
(defn since [last-update]
(let [last-update (or (and last-update (string->date last-update))
(java.util.Date.))]
(- (.getTime (java.util.Date.)) (.getTime last-update))))
(defn duration [resource]
(let [last-update (get-in resource [:status :lastUpdate])
last-update (or (and last-update (string->date last-update))
(java.util.Date.))]
(- (.getTime (java.util.Date.)) (.getTime last-update))))
(defn now-string []
(date->string (java.util.Date.)))
(defn timestamp-string []
(str (.getTime (java.util.Date.))))
| |
2ce2c31a2cd635c9b527f0cce059be03014ec4e18887a46c534c5404f1792065 | soulomoon/SICP | Exercise 1.35.scm | ; Exercise 1.35: Show that the golden ratio φφ (1.2.2) is a fixed point of the transformation x↦1+1/xx↦1+1/x, and use this fact to compute φφ by means of the fixed-point procedure.
#lang planet neil/sicp
(define tolerence 0.00001)
(define (fixedPoint f first_guess)
(define (closeEnough x y)
(let ((dis (abs (- x y))))
(if (< dis tolerence)
true
false
)
)
)
(define (try old_guess)
(let ((new_guess (f old_guess)))
(if (closeEnough new_guess old_guess)
new_guess
(try new_guess))
)
)
(try first_guess)
)
(define (golden_ratio_approximation x)
(+ 1 (/ 1.0 x)))
(fixedPoint golden_ratio_approximation 2)
(fixedPoint golden_ratio_approximation 112)
1.6180327868852458
1.61803281074865
| null | https://raw.githubusercontent.com/soulomoon/SICP/1c6cbf5ecf6397eaeb990738a938d48c193af1bb/Chapter1/Exercise%201.35.scm | scheme | Exercise 1.35: Show that the golden ratio φφ (1.2.2) is a fixed point of the transformation x↦1+1/xx↦1+1/x, and use this fact to compute φφ by means of the fixed-point procedure. | #lang planet neil/sicp
(define tolerence 0.00001)
(define (fixedPoint f first_guess)
(define (closeEnough x y)
(let ((dis (abs (- x y))))
(if (< dis tolerence)
true
false
)
)
)
(define (try old_guess)
(let ((new_guess (f old_guess)))
(if (closeEnough new_guess old_guess)
new_guess
(try new_guess))
)
)
(try first_guess)
)
(define (golden_ratio_approximation x)
(+ 1 (/ 1.0 x)))
(fixedPoint golden_ratio_approximation 2)
(fixedPoint golden_ratio_approximation 112)
1.6180327868852458
1.61803281074865
|
794d5edeb8a66a9c86f911e21b44c40233887b9d720a774ee980ce76804622f5 | aryx/xix | file.ml | Copyright 2016 , see copyright.txt
open Common
(* could be moved in graph.ml *)
(* opti? time cache, and flag to skip cache if want refresh *)
let timeof file =
try
(* bugfix: use stat, not lstat, to get the time of what is pointed
* by the symlink, not the symlink
*)
let stat = Unix.stat file in
Some (stat.Unix.st_mtime)
with Unix.Unix_error (_, _, _) -> None
let str_of_time timeopt =
match timeopt with
| None -> "0"
| Some t -> spf "%.1f" t
| null | https://raw.githubusercontent.com/aryx/xix/60ce1bd9a3f923e0e8bb2192f8938a9aa49c739c/mk/file.ml | ocaml | could be moved in graph.ml
opti? time cache, and flag to skip cache if want refresh
bugfix: use stat, not lstat, to get the time of what is pointed
* by the symlink, not the symlink
| Copyright 2016 , see copyright.txt
open Common
let timeof file =
try
let stat = Unix.stat file in
Some (stat.Unix.st_mtime)
with Unix.Unix_error (_, _, _) -> None
let str_of_time timeopt =
match timeopt with
| None -> "0"
| Some t -> spf "%.1f" t
|
09dbd325015d3ecbcaaf8cee0fa75ff49a8a43dcf30a97b38c15d4139bc10eaa | pink-gorilla/pinkie | pinkie_render.cljs | (ns pinkie.pinkie-render
(:require
[pinkie.error :refer [error-boundary]]
[pinkie.pinkie :refer [tag-inject convert-style-as-strings-to-map convert-render-as]]
[pinkie.text]))
(defn reagent-inject [{:keys [map-keywords fix-style]
:or {map-keywords true
fix-style true}} component]
(let [;_ (info "map-keywords: " map-keywords "widget: " widget " reagent component: " component)
;_ (info "meta data: " (meta component))
component (convert-render-as component)
;_ (println "after convert-render-as " component)
component (if map-keywords (tag-inject component) component)
component (if fix-style (convert-style-as-strings-to-map component) component)
;_ (info "inject result: " component)
]
[:div.reagent-output component]))
(defn ^{:category :pinkie}
pinkie-render
"renders a reagent hiccup vector that can contain pinkie components.
wrapped with react error boundary.
[:p/pinkie {:map-keywords true
:fix-style false}
[:p/vega spec]]
"
([pinkie-spec]
(pinkie-render {:map-keywords true
:fix-style true}
pinkie-spec))
([options pinkie-spec]
[error-boundary
[reagent-inject options pinkie-spec]]))
( defn ^{:category : pinkie
; :hidden true}
; components
; "displays pinkie component table as text.
; useful for debugging as it has no dependencies."
; []
; [pinkie.text/text (component-list->str)])
| null | https://raw.githubusercontent.com/pink-gorilla/pinkie/f129cf6eb21d3822ef677bbbc0ef67bf785b197e/src/pinkie/pinkie_render.cljs | clojure | _ (info "map-keywords: " map-keywords "widget: " widget " reagent component: " component)
_ (info "meta data: " (meta component))
_ (println "after convert-render-as " component)
_ (info "inject result: " component)
:hidden true}
components
"displays pinkie component table as text.
useful for debugging as it has no dependencies."
[]
[pinkie.text/text (component-list->str)]) | (ns pinkie.pinkie-render
(:require
[pinkie.error :refer [error-boundary]]
[pinkie.pinkie :refer [tag-inject convert-style-as-strings-to-map convert-render-as]]
[pinkie.text]))
(defn reagent-inject [{:keys [map-keywords fix-style]
:or {map-keywords true
fix-style true}} component]
component (convert-render-as component)
component (if map-keywords (tag-inject component) component)
component (if fix-style (convert-style-as-strings-to-map component) component)
]
[:div.reagent-output component]))
(defn ^{:category :pinkie}
pinkie-render
"renders a reagent hiccup vector that can contain pinkie components.
wrapped with react error boundary.
[:p/pinkie {:map-keywords true
:fix-style false}
[:p/vega spec]]
"
([pinkie-spec]
(pinkie-render {:map-keywords true
:fix-style true}
pinkie-spec))
([options pinkie-spec]
[error-boundary
[reagent-inject options pinkie-spec]]))
( defn ^{:category : pinkie
|
cd9b79d835d7e672ad8fd94eaf801b5457c96ec2556dbac41eebe54ffc0b38bc | jyp/prettiest | Compact.hs | # LANGUAGE TupleSections #
{-# LANGUAGE OverloadedStrings #-}
-- | Compact pretty-printer.
--
-- == Examples
--
Assume that we want to pretty print S - Expressions , which can either be atom or a list of S - Expressions .
--
> > > data SExpr = SExpr [ SExpr ] | Atom String deriving Show
> > > let pretty : : SExpr - > Doc ( ) ; pretty ( Atom s ) = text s ; pretty ( SExpr xs ) = text " ( " < > ( sep $ map pretty xs ) < > text " ) "
--
Using the above representation , the S - Expression @(a b c d)@ has the following encoding :
--
> > > let abcd = SExpr [ Atom " a",Atom " b",Atom " c",Atom " d " ]
--
The legible layouts of the @abcd@ S - Expression defined above would be either
--
-- >>> putStrLn $ render $ pretty abcd
-- (a b c d)
--
-- or
--
> > > putStrLn $ renderWith defaultOptions { optsPageWidth = 5 } $ pretty abcd
-- (a
-- b
-- c
-- d)
--
The @testData@ S - Expression is specially crafted to
demonstrate general shortcomings of both Hughes and libraries .
--
> > > let = SExpr [ abcd , abcd , abcd , abcd ]
> > > let testData = SExpr [ SExpr [ Atom " abcde " , ] , SExpr [ Atom " abcdefgh " , ] ]
-- >>> putStrLn $ render $ pretty testData
-- ((abcde ((a b c d) (a b c d) (a b c d) (a b c d)))
( abcdefgh ( ( a b c d ) ( a b c d ) ( a b c d ) ( a b c d ) ) ) )
--
on 20 - column - wide page
--
> > > putStrLn $ renderWith defaultOptions { optsPageWidth = 20 } $ pretty testData
-- ((abcde ((a b c d)
-- (a b c d)
-- (a b c d)
-- (a b c d)))
( abcdefgh
-- ((a b c d)
-- (a b c d)
-- (a b c d)
-- (a b c d))))
--
Yet , neither Hughes ' nor 's library can deliver those results .
--
-- === Annotations
--
For example we can annotate every /car/ element of S - Expressions ,
-- and in the rendering phase emphasise them by rendering them in uppercase.
--
> > > let pretty ' : : SExpr - > Doc Any ; pretty ' ( Atom s ) = text s ; pretty ' ( SExpr [ ] ) = text " ( ) " ; pretty ' ( SExpr ( x : xs ) ) = text " ( " < > ( sep $ annotate ( Any True ) ( pretty ' x ) : map pretty ' xs ) < > text " ) "
> > > let render ' = renderWith defaultOptions { optsAnnotate = \a x - > if a = = Any True then map toUpper x else x }
> > > render ' $ pretty ' testData
-- ((ABCDE ((A B C D) (A B C D) (A B C D) (A B C D)))
( ABCDEFGH ( ( A B C D ) ( A b c d ) ( A b c d ) ( A b c d ) ) ) )
--
module Text.PrettyPrint.Compact (
-- * Documents
Doc,
-- * Basic combinators
module Data.Monoid, text, flush, char,
hang, hangWith, encloseSep, list, tupled, semiBraces,
-- * Operators
(<+>), ($$), (</>), (<//>), (<$$>),
-- * List combinators
hsep, sep, hcat, vcat, cat, punctuate,
-- * Fill combiantors
-- fillSep, fillCat,
-- * Bracketing combinators
enclose, squotes, dquotes, parens, angles, braces, brackets,
-- * Character documents
lparen, rparen, langle, rangle, lbrace, rbrace, lbracket, rbracket,
squote, dquote, semi, colon, comma, space, dot, backslash, equals,
-- * Primitive type documents
string, int, integer, float, double, rational,
bool,
-- * Rendering
renderWith,
render,
Options(..),
defaultOptions,
-- * Annotations
annotate,
-- * Undocumented
-- column, nesting, width
) where
import Data.Monoid
import Text.PrettyPrint.Compact.Core as Text.PrettyPrint.Compact
-- | Render the 'Doc' into 'String' omitting all annotations.
render :: Annotation a => Doc a -> String
render = renderWith defaultOptions
defaultOptions :: Options a String
defaultOptions = Options
{ optsAnnotate = \_ s -> s
, optsPageWidth = 80
}
-- | The document @(list xs)@ comma separates the documents @xs@ and
-- encloses them in square brackets. The documents are rendered
-- horizontally if that fits the page. Otherwise they are aligned
-- vertically. All comma separators are put in front of the elements.
list :: Annotation a => [Doc a] -> Doc a
list = encloseSep lbracket rbracket comma
-- | The document @(tupled xs)@ comma separates the documents @xs@ and
-- encloses them in parenthesis. The documents are rendered
-- horizontally if that fits the page. Otherwise they are aligned
-- vertically. All comma separators are put in front of the elements.
tupled :: Annotation a => [Doc a] -> Doc a
tupled = encloseSep lparen rparen comma
-- | The document @(semiBraces xs)@ separates the documents @xs@ with
-- semi colons and encloses them in braces. The documents are rendered
-- horizontally if that fits the page. Otherwise they are aligned
-- vertically. All semi colons are put in front of the elements.
semiBraces :: Annotation a => [Doc a] -> Doc a
semiBraces = encloseSep lbrace rbrace semi
-- | The document @(enclosure l r sep xs)@ concatenates the documents
-- @xs@ separated by @sep@ and encloses the resulting document by @l@
and @r@. The documents are rendered horizontally if that fits the
-- page. Otherwise they are aligned vertically. All separators are put
-- in front of the elements. For example, the combinator 'list' can be
-- defined with @enclosure@:
--
-- > list xs = enclosure lbracket rbracket comma xs
-- > test = text "list" <+> (list (map int [10,200,3000]))
--
Which is layed out with a page width of 20 as :
--
-- @
list [ 10,200,3000 ]
-- @
--
But when the page width is 15 , it is layed out as :
--
-- @
list [ 10
, 200
, 3000 ]
-- @
encloseSep :: Annotation a => Doc a -> Doc a -> Doc a -> [Doc a] -> Doc a
encloseSep left right separator ds
= (<> right) $ case ds of
[] -> left
[d] -> left <> d
(d:ds') -> cat (left <> d:map (separator <>) ds')
-----------------------------------------------------------
-- punctuate p [d1,d2,...,dn] => [d1 <> p,d2 <> p, ... ,dn]
-----------------------------------------------------------
-- | @(punctuate p xs)@ concatenates all documents in @xs@ with
-- document @p@ except for the last document.
--
-- > someText = map text ["words","in","a","tuple"]
-- > test = parens (align (cat (punctuate comma someText)))
--
This is layed out on a page width of 20 as :
--
-- @
-- (words,in,a,tuple)
-- @
--
But when the page width is 15 , it is layed out as :
--
-- @
-- (words,
-- in,
-- a,
-- tuple)
-- @
--
-- (If you want put the commas in front of their elements instead of
-- at the end, you should use 'tupled' or, in general, 'encloseSep'.)
punctuate :: Annotation a => Doc a -> [Doc a] -> [Doc a]
punctuate _p [] = []
punctuate _p [d] = [d]
punctuate p (d:ds) = (d <> p) : punctuate p ds
-----------------------------------------------------------
-- high-level combinators
-----------------------------------------------------------
| The document @(sep xs)@ concatenates all documents @xs@ either
-- horizontally with @(\<+\>)@, if it fits the page, or vertically
with @(\<$$\>)@. Documents on the left of horizontal concatenation
-- must fit on a single line.
--
sep :: Annotation a => [Doc a] -> Doc a
sep xs = groupingBy " " (map (0,) xs)
-- -- | The document @(fillSep xs)@ concatenates documents @xs@
-- -- horizontally with @(\<+\>)@ as long as its fits the page, than
-- inserts a @line@ and continues doing that for all documents in
-- -- @xs@.
-- --
-- -- > fillSep xs = foldr (\<\/\>) empty xs
-- fillSep :: Annotation a => [Doc a] -> Doc a
-- fillSep = foldDoc (</>)
| The document @(hsep xs)@ concatenates all documents @xs@
horizontally with @(\<+\>)@.
hsep :: Annotation a => [Doc a] -> Doc a
hsep = foldDoc (<+>)
-- | The document @(cat xs)@ concatenates all documents @xs@ either
horizontally with , if it fits the page , or vertically with
-- @(\<$$\>)@.
--
cat :: Annotation a => [Doc a] -> Doc a
cat xs = groupingBy "" (map (0,) xs)
-- -- | The document @(fillCat xs)@ concatenates documents @xs@
-- horizontally with as long as its fits the page , than inserts
-- a @linebreak@ and continues doing that for all documents in @xs@.
-- --
-- > fillCat xs = foldr ( \<\/\/\ > ) empty xs
-- fillCat :: Annotation a => [Doc a] -> Doc a
-- fillCat = foldDoc (<//>)
-- | The document @(hcat xs)@ concatenates all documents @xs@
-- horizontally with @(\<\>)@.
hcat :: Annotation a => [Doc a] -> Doc a
hcat = foldDoc (<>)
-- | The document @(vcat xs)@ concatenates all documents @xs@
-- vertically with @($$)@.
vcat :: Annotation a => [Doc a] -> Doc a
vcat = foldDoc ($$)
foldDoc :: Annotation a => (Doc a -> Doc a -> Doc a) -> [Doc a] -> Doc a
foldDoc _ [] = mempty
foldDoc f ds = foldr1 f ds
-- | The document @(x \<+\> y)@ concatenates document @x@ and @y@ with a
@space@ in between . ( infixr 6 )
(<+>) :: Annotation a => Doc a -> Doc a -> Doc a
x <+> y = x <> space <> y
-- | The document @(x \<\/\> y)@ puts @x@ and @y@ either next to each other
( with a @space@ in between ) or underneath each other . ( infixr 5 )
(</>) :: Annotation a => Doc a -> Doc a -> Doc a
x </> y = hang 0 x y
-- | The document @(x \<\/\/\> y)@ puts @x@ and @y@ either right next
-- to each other (if @x@ fits on a single line) or underneath each
other . ( infixr 5 )
(<//>) :: Annotation a => Doc a -> Doc a -> Doc a
x <//> y = hangWith "" 0 x y
-- | The document @(x \<$$\> y)@ concatenates document @x@ and @y@ with
a linebreak in between . ( infixr 5 )
(<$$>) :: Annotation a => Doc a -> Doc a -> Doc a
(<$$>) = ($$)
-- | Document @(squotes x)@ encloses document @x@ with single quotes
-- \"'\".
squotes :: Annotation a => Doc a -> Doc a
squotes = enclose squote squote
-- | Document @(dquotes x)@ encloses document @x@ with double quotes
-- '\"'.
dquotes :: Annotation a => Doc a -> Doc a
dquotes = enclose dquote dquote
| Document @(braces x)@ encloses document @x@ in braces ,
-- \"}\".
braces :: Annotation a => Doc a -> Doc a
braces = enclose lbrace rbrace
-- | Document @(parens x)@ encloses document @x@ in parenthesis, \"(\"
-- and \")\".
parens :: Annotation a => Doc a -> Doc a
parens = enclose lparen rparen
-- | Document @(angles x)@ encloses document @x@ in angles, \"\<\" and
-- \"\>\".
angles :: Annotation a => Doc a -> Doc a
angles = enclose langle rangle
-- | Document @(brackets x)@ encloses document @x@ in square brackets,
\"[\ " and \"]\ " .
brackets :: Annotation a => Doc a -> Doc a
brackets = enclose lbracket rbracket
-- | The document @(enclose l r x)@ encloses document @x@ between
documents @l@ and @r@ using @(\<\>)@.
enclose :: Annotation a => Doc a -> Doc a -> Doc a -> Doc a
enclose l r x = l <> x <> r
char :: Annotation a => Char -> Doc a
char x = text [x]
-- | The document @lparen@ contains a left parenthesis, \"(\".
lparen :: Annotation a => Doc a
lparen = char '('
-- | The document @rparen@ contains a right parenthesis, \")\".
rparen :: Annotation a => Doc a
rparen = char ')'
-- | The document @langle@ contains a left angle, \"\<\".
langle :: Annotation a => Doc a
langle = char '<'
| The document @rangle@ contains a right angle , " .
rangle :: Annotation a => Doc a
rangle = char '>'
| The document @lbrace@ contains a left brace , .
lbrace :: Annotation a => Doc a
lbrace = char '{'
-- | The document @rbrace@ contains a right brace, \"}\".
rbrace :: Annotation a => Doc a
rbrace = char '}'
| The document @lbracket@ contains a left square bracket , \"[\ " .
lbracket :: Annotation a => Doc a
lbracket = char '['
-- | The document @rbracket@ contains a right square bracket, \"]\".
rbracket :: Annotation a => Doc a
rbracket = char ']'
| The document @squote@ contains a single quote , \"'\ " .
squote :: Annotation a => Doc a
squote = char '\''
-- | The document @dquote@ contains a double quote, '\"'.
dquote :: Annotation a => Doc a
dquote = char '"'
| The document @semi@ contains a semi colon , \";\ " .
semi :: Annotation a => Doc a
semi = char ';'
-- | The document @colon@ contains a colon, \":\".
colon :: Annotation a => Doc a
colon = char ':'
| The document @comma@ contains a comma , \",\ " .
comma :: Annotation a => Doc a
comma = char ','
-- | The document @dot@ contains a single dot, \".\".
dot :: Annotation a => Doc a
dot = char '.'
| The document @backslash@ contains a back slash , \"\\\ " .
backslash :: Annotation a => Doc a
backslash = char '\\'
-- | The document @equals@ contains an equal sign, \"=\".
equals :: Annotation a => Doc a
equals = char '='
-----------------------------------------------------------
-- Combinators for prelude types
-----------------------------------------------------------
-- string is like "text" but replaces '\n' by "line"
-- | The document @(string s)@ concatenates all characters in @s@
using @line@ for newline characters and @char@ for all other
-- characters. It is used instead of 'text' whenever the text contains
-- newline characters.
string :: Annotation a => String -> Doc a
string = vcat . map text . lines
bool :: Annotation a => Bool -> Doc a
bool b = text (show b)
| The document @(int i)@ shows the literal integer @i@ using
-- 'text'.
int :: Annotation a => Int -> Doc a
int i = text (show i)
| The document @(integer i)@ shows the literal integer @i@ using
-- 'text'.
integer :: Annotation a => Integer -> Doc a
integer i = text (show i)
| The document @(float f)@ shows the literal float @f@ using
-- 'text'.
float :: Annotation a => Float -> Doc a
float f = text (show f)
-- | The document @(double d)@ shows the literal double @d@ using
-- 'text'.
double :: Annotation a => Double -> Doc a
double d = text (show d)
-- | The document @(rational r)@ shows the literal rational @r@ using
-- 'text'.
rational :: Annotation a => Rational -> Doc a
rational r = text (show r)
-- | The hang combinator implements hanging indentation. The document
-- @(hang i x y)@ either @x@ and @y@ concatenated with @\<+\>@ or @y@
below @x@ with an additional indentation of
hang :: Annotation a => Int -> Doc a -> Doc a -> Doc a
hang = hangWith " "
-- | The hang combinator implements hanging indentation. The document
@(hang separator i x y)@ either @x@ and @y@ concatenated with >
-- text separator \<\>@ or @y@ below @x@ with an additional
indentation of
hangWith :: Annotation a => String -> Int -> Doc a -> Doc a -> Doc a
hangWith separator n x y = groupingBy separator [(0,x), (n,y)]
space :: Annotation a => Doc a
space = text " "
-- $setup
> > > import Data . Monoid
> > > import Data .
| null | https://raw.githubusercontent.com/jyp/prettiest/e5ce6cd6b4da71860c3d97da84bed4a827fa00ef/Text/PrettyPrint/Compact.hs | haskell | # LANGUAGE OverloadedStrings #
| Compact pretty-printer.
== Examples
>>> putStrLn $ render $ pretty abcd
(a b c d)
or
(a
b
c
d)
>>> putStrLn $ render $ pretty testData
((abcde ((a b c d) (a b c d) (a b c d) (a b c d)))
((abcde ((a b c d)
(a b c d)
(a b c d)
(a b c d)))
((a b c d)
(a b c d)
(a b c d)
(a b c d))))
=== Annotations
and in the rendering phase emphasise them by rendering them in uppercase.
((ABCDE ((A B C D) (A B C D) (A B C D) (A B C D)))
* Documents
* Basic combinators
* Operators
* List combinators
* Fill combiantors
fillSep, fillCat,
* Bracketing combinators
* Character documents
* Primitive type documents
* Rendering
* Annotations
* Undocumented
column, nesting, width
| Render the 'Doc' into 'String' omitting all annotations.
| The document @(list xs)@ comma separates the documents @xs@ and
encloses them in square brackets. The documents are rendered
horizontally if that fits the page. Otherwise they are aligned
vertically. All comma separators are put in front of the elements.
| The document @(tupled xs)@ comma separates the documents @xs@ and
encloses them in parenthesis. The documents are rendered
horizontally if that fits the page. Otherwise they are aligned
vertically. All comma separators are put in front of the elements.
| The document @(semiBraces xs)@ separates the documents @xs@ with
semi colons and encloses them in braces. The documents are rendered
horizontally if that fits the page. Otherwise they are aligned
vertically. All semi colons are put in front of the elements.
| The document @(enclosure l r sep xs)@ concatenates the documents
@xs@ separated by @sep@ and encloses the resulting document by @l@
page. Otherwise they are aligned vertically. All separators are put
in front of the elements. For example, the combinator 'list' can be
defined with @enclosure@:
> list xs = enclosure lbracket rbracket comma xs
> test = text "list" <+> (list (map int [10,200,3000]))
@
@
@
@
---------------------------------------------------------
punctuate p [d1,d2,...,dn] => [d1 <> p,d2 <> p, ... ,dn]
---------------------------------------------------------
| @(punctuate p xs)@ concatenates all documents in @xs@ with
document @p@ except for the last document.
> someText = map text ["words","in","a","tuple"]
> test = parens (align (cat (punctuate comma someText)))
@
(words,in,a,tuple)
@
@
(words,
in,
a,
tuple)
@
(If you want put the commas in front of their elements instead of
at the end, you should use 'tupled' or, in general, 'encloseSep'.)
---------------------------------------------------------
high-level combinators
---------------------------------------------------------
horizontally with @(\<+\>)@, if it fits the page, or vertically
must fit on a single line.
-- | The document @(fillSep xs)@ concatenates documents @xs@
-- horizontally with @(\<+\>)@ as long as its fits the page, than
inserts a @line@ and continues doing that for all documents in
-- @xs@.
--
-- > fillSep xs = foldr (\<\/\>) empty xs
fillSep :: Annotation a => [Doc a] -> Doc a
fillSep = foldDoc (</>)
| The document @(cat xs)@ concatenates all documents @xs@ either
@(\<$$\>)@.
-- | The document @(fillCat xs)@ concatenates documents @xs@
horizontally with as long as its fits the page , than inserts
a @linebreak@ and continues doing that for all documents in @xs@.
--
> fillCat xs = foldr ( \<\/\/\ > ) empty xs
fillCat :: Annotation a => [Doc a] -> Doc a
fillCat = foldDoc (<//>)
| The document @(hcat xs)@ concatenates all documents @xs@
horizontally with @(\<\>)@.
| The document @(vcat xs)@ concatenates all documents @xs@
vertically with @($$)@.
| The document @(x \<+\> y)@ concatenates document @x@ and @y@ with a
| The document @(x \<\/\> y)@ puts @x@ and @y@ either next to each other
| The document @(x \<\/\/\> y)@ puts @x@ and @y@ either right next
to each other (if @x@ fits on a single line) or underneath each
| The document @(x \<$$\> y)@ concatenates document @x@ and @y@ with
| Document @(squotes x)@ encloses document @x@ with single quotes
\"'\".
| Document @(dquotes x)@ encloses document @x@ with double quotes
'\"'.
\"}\".
| Document @(parens x)@ encloses document @x@ in parenthesis, \"(\"
and \")\".
| Document @(angles x)@ encloses document @x@ in angles, \"\<\" and
\"\>\".
| Document @(brackets x)@ encloses document @x@ in square brackets,
| The document @(enclose l r x)@ encloses document @x@ between
| The document @lparen@ contains a left parenthesis, \"(\".
| The document @rparen@ contains a right parenthesis, \")\".
| The document @langle@ contains a left angle, \"\<\".
| The document @rbrace@ contains a right brace, \"}\".
| The document @rbracket@ contains a right square bracket, \"]\".
| The document @dquote@ contains a double quote, '\"'.
| The document @colon@ contains a colon, \":\".
| The document @dot@ contains a single dot, \".\".
| The document @equals@ contains an equal sign, \"=\".
---------------------------------------------------------
Combinators for prelude types
---------------------------------------------------------
string is like "text" but replaces '\n' by "line"
| The document @(string s)@ concatenates all characters in @s@
characters. It is used instead of 'text' whenever the text contains
newline characters.
'text'.
'text'.
'text'.
| The document @(double d)@ shows the literal double @d@ using
'text'.
| The document @(rational r)@ shows the literal rational @r@ using
'text'.
| The hang combinator implements hanging indentation. The document
@(hang i x y)@ either @x@ and @y@ concatenated with @\<+\>@ or @y@
| The hang combinator implements hanging indentation. The document
text separator \<\>@ or @y@ below @x@ with an additional
$setup | # LANGUAGE TupleSections #
Assume that we want to pretty print S - Expressions , which can either be atom or a list of S - Expressions .
> > > data SExpr = SExpr [ SExpr ] | Atom String deriving Show
> > > let pretty : : SExpr - > Doc ( ) ; pretty ( Atom s ) = text s ; pretty ( SExpr xs ) = text " ( " < > ( sep $ map pretty xs ) < > text " ) "
Using the above representation , the S - Expression @(a b c d)@ has the following encoding :
> > > let abcd = SExpr [ Atom " a",Atom " b",Atom " c",Atom " d " ]
The legible layouts of the @abcd@ S - Expression defined above would be either
> > > putStrLn $ renderWith defaultOptions { optsPageWidth = 5 } $ pretty abcd
The @testData@ S - Expression is specially crafted to
demonstrate general shortcomings of both Hughes and libraries .
> > > let = SExpr [ abcd , abcd , abcd , abcd ]
> > > let testData = SExpr [ SExpr [ Atom " abcde " , ] , SExpr [ Atom " abcdefgh " , ] ]
( abcdefgh ( ( a b c d ) ( a b c d ) ( a b c d ) ( a b c d ) ) ) )
on 20 - column - wide page
> > > putStrLn $ renderWith defaultOptions { optsPageWidth = 20 } $ pretty testData
( abcdefgh
Yet , neither Hughes ' nor 's library can deliver those results .
For example we can annotate every /car/ element of S - Expressions ,
> > > let pretty ' : : SExpr - > Doc Any ; pretty ' ( Atom s ) = text s ; pretty ' ( SExpr [ ] ) = text " ( ) " ; pretty ' ( SExpr ( x : xs ) ) = text " ( " < > ( sep $ annotate ( Any True ) ( pretty ' x ) : map pretty ' xs ) < > text " ) "
> > > let render ' = renderWith defaultOptions { optsAnnotate = \a x - > if a = = Any True then map toUpper x else x }
> > > render ' $ pretty ' testData
( ABCDEFGH ( ( A B C D ) ( A b c d ) ( A b c d ) ( A b c d ) ) ) )
module Text.PrettyPrint.Compact (
Doc,
module Data.Monoid, text, flush, char,
hang, hangWith, encloseSep, list, tupled, semiBraces,
(<+>), ($$), (</>), (<//>), (<$$>),
hsep, sep, hcat, vcat, cat, punctuate,
enclose, squotes, dquotes, parens, angles, braces, brackets,
lparen, rparen, langle, rangle, lbrace, rbrace, lbracket, rbracket,
squote, dquote, semi, colon, comma, space, dot, backslash, equals,
string, int, integer, float, double, rational,
bool,
renderWith,
render,
Options(..),
defaultOptions,
annotate,
) where
import Data.Monoid
import Text.PrettyPrint.Compact.Core as Text.PrettyPrint.Compact
render :: Annotation a => Doc a -> String
render = renderWith defaultOptions
defaultOptions :: Options a String
defaultOptions = Options
{ optsAnnotate = \_ s -> s
, optsPageWidth = 80
}
list :: Annotation a => [Doc a] -> Doc a
list = encloseSep lbracket rbracket comma
tupled :: Annotation a => [Doc a] -> Doc a
tupled = encloseSep lparen rparen comma
semiBraces :: Annotation a => [Doc a] -> Doc a
semiBraces = encloseSep lbrace rbrace semi
and @r@. The documents are rendered horizontally if that fits the
Which is layed out with a page width of 20 as :
list [ 10,200,3000 ]
But when the page width is 15 , it is layed out as :
list [ 10
, 200
, 3000 ]
encloseSep :: Annotation a => Doc a -> Doc a -> Doc a -> [Doc a] -> Doc a
encloseSep left right separator ds
= (<> right) $ case ds of
[] -> left
[d] -> left <> d
(d:ds') -> cat (left <> d:map (separator <>) ds')
This is layed out on a page width of 20 as :
But when the page width is 15 , it is layed out as :
punctuate :: Annotation a => Doc a -> [Doc a] -> [Doc a]
punctuate _p [] = []
punctuate _p [d] = [d]
punctuate p (d:ds) = (d <> p) : punctuate p ds
| The document @(sep xs)@ concatenates all documents @xs@ either
with @(\<$$\>)@. Documents on the left of horizontal concatenation
sep :: Annotation a => [Doc a] -> Doc a
sep xs = groupingBy " " (map (0,) xs)
| The document @(hsep xs)@ concatenates all documents @xs@
horizontally with @(\<+\>)@.
hsep :: Annotation a => [Doc a] -> Doc a
hsep = foldDoc (<+>)
horizontally with , if it fits the page , or vertically with
cat :: Annotation a => [Doc a] -> Doc a
cat xs = groupingBy "" (map (0,) xs)
hcat :: Annotation a => [Doc a] -> Doc a
hcat = foldDoc (<>)
vcat :: Annotation a => [Doc a] -> Doc a
vcat = foldDoc ($$)
foldDoc :: Annotation a => (Doc a -> Doc a -> Doc a) -> [Doc a] -> Doc a
foldDoc _ [] = mempty
foldDoc f ds = foldr1 f ds
@space@ in between . ( infixr 6 )
(<+>) :: Annotation a => Doc a -> Doc a -> Doc a
x <+> y = x <> space <> y
( with a @space@ in between ) or underneath each other . ( infixr 5 )
(</>) :: Annotation a => Doc a -> Doc a -> Doc a
x </> y = hang 0 x y
other . ( infixr 5 )
(<//>) :: Annotation a => Doc a -> Doc a -> Doc a
x <//> y = hangWith "" 0 x y
a linebreak in between . ( infixr 5 )
(<$$>) :: Annotation a => Doc a -> Doc a -> Doc a
(<$$>) = ($$)
squotes :: Annotation a => Doc a -> Doc a
squotes = enclose squote squote
dquotes :: Annotation a => Doc a -> Doc a
dquotes = enclose dquote dquote
| Document @(braces x)@ encloses document @x@ in braces ,
braces :: Annotation a => Doc a -> Doc a
braces = enclose lbrace rbrace
parens :: Annotation a => Doc a -> Doc a
parens = enclose lparen rparen
angles :: Annotation a => Doc a -> Doc a
angles = enclose langle rangle
\"[\ " and \"]\ " .
brackets :: Annotation a => Doc a -> Doc a
brackets = enclose lbracket rbracket
documents @l@ and @r@ using @(\<\>)@.
enclose :: Annotation a => Doc a -> Doc a -> Doc a -> Doc a
enclose l r x = l <> x <> r
char :: Annotation a => Char -> Doc a
char x = text [x]
lparen :: Annotation a => Doc a
lparen = char '('
rparen :: Annotation a => Doc a
rparen = char ')'
langle :: Annotation a => Doc a
langle = char '<'
| The document @rangle@ contains a right angle , " .
rangle :: Annotation a => Doc a
rangle = char '>'
| The document @lbrace@ contains a left brace , .
lbrace :: Annotation a => Doc a
lbrace = char '{'
rbrace :: Annotation a => Doc a
rbrace = char '}'
| The document @lbracket@ contains a left square bracket , \"[\ " .
lbracket :: Annotation a => Doc a
lbracket = char '['
rbracket :: Annotation a => Doc a
rbracket = char ']'
| The document @squote@ contains a single quote , \"'\ " .
squote :: Annotation a => Doc a
squote = char '\''
dquote :: Annotation a => Doc a
dquote = char '"'
| The document @semi@ contains a semi colon , \";\ " .
semi :: Annotation a => Doc a
semi = char ';'
colon :: Annotation a => Doc a
colon = char ':'
| The document @comma@ contains a comma , \",\ " .
comma :: Annotation a => Doc a
comma = char ','
dot :: Annotation a => Doc a
dot = char '.'
| The document @backslash@ contains a back slash , \"\\\ " .
backslash :: Annotation a => Doc a
backslash = char '\\'
equals :: Annotation a => Doc a
equals = char '='
using @line@ for newline characters and @char@ for all other
string :: Annotation a => String -> Doc a
string = vcat . map text . lines
bool :: Annotation a => Bool -> Doc a
bool b = text (show b)
| The document @(int i)@ shows the literal integer @i@ using
int :: Annotation a => Int -> Doc a
int i = text (show i)
| The document @(integer i)@ shows the literal integer @i@ using
integer :: Annotation a => Integer -> Doc a
integer i = text (show i)
| The document @(float f)@ shows the literal float @f@ using
float :: Annotation a => Float -> Doc a
float f = text (show f)
double :: Annotation a => Double -> Doc a
double d = text (show d)
rational :: Annotation a => Rational -> Doc a
rational r = text (show r)
below @x@ with an additional indentation of
hang :: Annotation a => Int -> Doc a -> Doc a -> Doc a
hang = hangWith " "
@(hang separator i x y)@ either @x@ and @y@ concatenated with >
indentation of
hangWith :: Annotation a => String -> Int -> Doc a -> Doc a -> Doc a
hangWith separator n x y = groupingBy separator [(0,x), (n,y)]
space :: Annotation a => Doc a
space = text " "
> > > import Data . Monoid
> > > import Data .
|
ff90ea7682a166efef35d4f44d753ccf3dfaed19d07e9decf603a3fb1754bf0c | luchiniatwork/cambada | native_image.clj | (ns cambada.native-image
(:require [cambada.cli :as cli]
[cambada.compile :as compile]
[cambada.jar-utils :as jar-utils]
[cambada.utils :as utils]
[clojure.java.io :as io]
[clojure.java.shell :as shell]
[clojure.tools.deps.alpha :as tools.deps]
[clojure.string :as string])
(:import [java.io File]
[java.nio.file Files Paths]
[java.nio.file.attribute FileAttribute]))
(def cli-options
(concat [["-m" "--main NS_NAME" "The namespace with the -main function"]
[nil "--image-name NAME" "The name of the image to be created"
:default (utils/directory-name)]
[nil "--graalvm-home PATH" "Path of the GraalVM home (defaults to GRAALVM_HOME)"
:default (System/getenv "GRAALVM_HOME")]
["-O" "--graalvm-opt OPT" "Opt to the GraalVM compiler. Can be specified multiple times"
:default []
:default-desc ""
:assoc-fn (fn [m k v]
(let [opts (get m k)]
(assoc m k (conj opts v))))]]
compile/cli-options))
(defn ^:private make-classpath
[{:keys [deps-map out] :as task}]
(tools.deps/make-classpath
(tools.deps/resolve-deps deps-map nil)
(conj (:paths deps-map) (utils/compiled-classes-path out))
{:extra-paths (:extra-paths deps-map)}))
(defn ^:private graalvm-opts [coll-from-task]
(map #(str "-" %) coll-from-task))
(defn ^:private shell-native-image
[bin all-args]
(let [{:keys [out err]} (apply shell/sh bin all-args)]
(some-> err not-empty cli/abort)
(some-> out not-empty cli/info)))
(defn ^:private build-native-image
[{:keys [main graalvm-opt] :as task} bin image-file]
(let [cp (make-classpath task)
base-args ["-cp" cp
"-H:+ReportExceptionStackTraces"
"-J-Dclojure.spec.skip-macros=true"
"-J-Dclojure.compiler.direct-linking=true"
#_"-H:ReflectionConfigurationFiles=reflection.json"
"--initialize-at-run-time=java.lang.Math\\$RandomNumberGeneratorHolder"
"--initialize-at-build-time"
"-H:Log=registerResource:"
"-H:EnableURLProtocols=http,https"
"--enable-all-security-services"
"-H:+JNI"
"--no-fallback"
"--no-server"
"-J-Xmx3g"
(format "-H:Name=%s" image-file)]
all-args (cond-> base-args
graalvm-opt (concat (graalvm-opts graalvm-opt))
true vec
:always (conj main))]
(shell-native-image bin all-args)))
(defn get-native-image-bin [graalvm-home]
(let [out (io/file graalvm-home "bin/native-image")]
(if-not (.exists out)
(cli/abort (->> ["Can't find GraalVM's native-image."
"Make sure it's installed and --graalvm-home is used correctly."]
(string/join " ")))
(.getAbsolutePath out))))
(defn apply! [{:keys [graalvm-home out image-name] :as task}]
(compile/apply! task)
(let [bin (get-native-image-bin graalvm-home)
image-file (.getPath (io/file out image-name))]
(cli/info "Creating" image-file)
(build-native-image task bin image-file)))
(defn -main [& args]
(let [{:keys [help] :as task} (cli/args->task args cli-options)]
(cli/runner
{:help? help
:task task
:entrypoint-main
"cambada.native-image"
:entrypoint-description
"Uses GraalVM's native-image build to generate a self-hosted image."
:apply-fn apply!})))
| null | https://raw.githubusercontent.com/luchiniatwork/cambada/99654689f8b6656c615543b3ed47d9b3a3c5773f/src/cambada/native_image.clj | clojure | (ns cambada.native-image
(:require [cambada.cli :as cli]
[cambada.compile :as compile]
[cambada.jar-utils :as jar-utils]
[cambada.utils :as utils]
[clojure.java.io :as io]
[clojure.java.shell :as shell]
[clojure.tools.deps.alpha :as tools.deps]
[clojure.string :as string])
(:import [java.io File]
[java.nio.file Files Paths]
[java.nio.file.attribute FileAttribute]))
(def cli-options
(concat [["-m" "--main NS_NAME" "The namespace with the -main function"]
[nil "--image-name NAME" "The name of the image to be created"
:default (utils/directory-name)]
[nil "--graalvm-home PATH" "Path of the GraalVM home (defaults to GRAALVM_HOME)"
:default (System/getenv "GRAALVM_HOME")]
["-O" "--graalvm-opt OPT" "Opt to the GraalVM compiler. Can be specified multiple times"
:default []
:default-desc ""
:assoc-fn (fn [m k v]
(let [opts (get m k)]
(assoc m k (conj opts v))))]]
compile/cli-options))
(defn ^:private make-classpath
[{:keys [deps-map out] :as task}]
(tools.deps/make-classpath
(tools.deps/resolve-deps deps-map nil)
(conj (:paths deps-map) (utils/compiled-classes-path out))
{:extra-paths (:extra-paths deps-map)}))
(defn ^:private graalvm-opts [coll-from-task]
(map #(str "-" %) coll-from-task))
(defn ^:private shell-native-image
[bin all-args]
(let [{:keys [out err]} (apply shell/sh bin all-args)]
(some-> err not-empty cli/abort)
(some-> out not-empty cli/info)))
(defn ^:private build-native-image
[{:keys [main graalvm-opt] :as task} bin image-file]
(let [cp (make-classpath task)
base-args ["-cp" cp
"-H:+ReportExceptionStackTraces"
"-J-Dclojure.spec.skip-macros=true"
"-J-Dclojure.compiler.direct-linking=true"
#_"-H:ReflectionConfigurationFiles=reflection.json"
"--initialize-at-run-time=java.lang.Math\\$RandomNumberGeneratorHolder"
"--initialize-at-build-time"
"-H:Log=registerResource:"
"-H:EnableURLProtocols=http,https"
"--enable-all-security-services"
"-H:+JNI"
"--no-fallback"
"--no-server"
"-J-Xmx3g"
(format "-H:Name=%s" image-file)]
all-args (cond-> base-args
graalvm-opt (concat (graalvm-opts graalvm-opt))
true vec
:always (conj main))]
(shell-native-image bin all-args)))
(defn get-native-image-bin [graalvm-home]
(let [out (io/file graalvm-home "bin/native-image")]
(if-not (.exists out)
(cli/abort (->> ["Can't find GraalVM's native-image."
"Make sure it's installed and --graalvm-home is used correctly."]
(string/join " ")))
(.getAbsolutePath out))))
(defn apply! [{:keys [graalvm-home out image-name] :as task}]
(compile/apply! task)
(let [bin (get-native-image-bin graalvm-home)
image-file (.getPath (io/file out image-name))]
(cli/info "Creating" image-file)
(build-native-image task bin image-file)))
(defn -main [& args]
(let [{:keys [help] :as task} (cli/args->task args cli-options)]
(cli/runner
{:help? help
:task task
:entrypoint-main
"cambada.native-image"
:entrypoint-description
"Uses GraalVM's native-image build to generate a self-hosted image."
:apply-fn apply!})))
| |
0494f823f381c39c9869626481837f493977cb74df942fca376fafc895e6db7b | metaocaml/ber-metaocaml | match_failure.ml | (* TEST
*)
(**
Test that value match failure in a match block raises Match_failure.
*)
let return_some_3 () = Some (1 + 2)
;;
let test_match_partial_match =
try
let _ = (match return_some_3 () with
| Some x when x < 3 -> "Some x"
| exception Failure _ -> "failure"
| exception Invalid_argument _ -> "invalid argument"
| None -> "None"
) [@ocaml.warning "-8"] in
assert false
with
Match_failure _ ->
print_endline "match failure, as expected"
;;
| null | https://raw.githubusercontent.com/metaocaml/ber-metaocaml/4992d1f87fc08ccb958817926cf9d1d739caf3a2/testsuite/tests/match-exception/match_failure.ml | ocaml | TEST
*
Test that value match failure in a match block raises Match_failure.
|
let return_some_3 () = Some (1 + 2)
;;
let test_match_partial_match =
try
let _ = (match return_some_3 () with
| Some x when x < 3 -> "Some x"
| exception Failure _ -> "failure"
| exception Invalid_argument _ -> "invalid argument"
| None -> "None"
) [@ocaml.warning "-8"] in
assert false
with
Match_failure _ ->
print_endline "match failure, as expected"
;;
|
4d53c3f25a043c3d80685f2a4bf731675cf2f4e96215786c7de601556008f5c3 | takikawa/racket-ppa | info.rkt | (module info setup/infotab (#%module-begin (define collection (quote multi)) (define build-deps (quote ("errortrace-doc" "macro-debugger" "profile-doc" "readline-doc" "macro-debugger-text-lib" "profile-lib" "readline-lib" "xrepl-lib" "racket-doc"))) (define deps (quote ("base" "sandbox-lib" "scribble-lib"))) (define update-implies (quote ("xrepl-lib"))) (define pkg-desc "documentation part of \"xrepl\"") (define pkg-authors (quote (eli))) (define license (quote (Apache-2.0 OR MIT)))))
| null | https://raw.githubusercontent.com/takikawa/racket-ppa/26d6ae74a1b19258c9789b7c14c074d867a4b56b/share/pkgs/xrepl-doc/info.rkt | racket | (module info setup/infotab (#%module-begin (define collection (quote multi)) (define build-deps (quote ("errortrace-doc" "macro-debugger" "profile-doc" "readline-doc" "macro-debugger-text-lib" "profile-lib" "readline-lib" "xrepl-lib" "racket-doc"))) (define deps (quote ("base" "sandbox-lib" "scribble-lib"))) (define update-implies (quote ("xrepl-lib"))) (define pkg-desc "documentation part of \"xrepl\"") (define pkg-authors (quote (eli))) (define license (quote (Apache-2.0 OR MIT)))))
| |
a73843729dcda17ca507b7dc04976128af3523efc7150f6b5d8eaab2f661402e | byteally/dbrecord | Main.hs | {-# LANGUAGE OverloadedStrings #-}
module Main where
import DBRecord.Postgres.Internal.Reify
import Database.PostgreSQL.Simple
import Data.Function
main = do
let hints = defHints
getPostgresDbSchemaInfo localConnectInfo
where localConnectInfo =
defaultConnectInfo { connectHost = "localhost"
, connectPassword = "postgres"
, connectDatabase = "dbrecord_test"
, connectUser = "postgres"
}
| null | https://raw.githubusercontent.com/byteally/dbrecord/991efe9a293532ee9242b3e9a26434cf16f5b2a0/dbrecord-postgres-simple/reify-test/Main.hs | haskell | # LANGUAGE OverloadedStrings # | module Main where
import DBRecord.Postgres.Internal.Reify
import Database.PostgreSQL.Simple
import Data.Function
main = do
let hints = defHints
getPostgresDbSchemaInfo localConnectInfo
where localConnectInfo =
defaultConnectInfo { connectHost = "localhost"
, connectPassword = "postgres"
, connectDatabase = "dbrecord_test"
, connectUser = "postgres"
}
|
58eda2830d8be5e2644a7e4fc6fba1ad2c95f39bc63e0d8b067f3944a6bfadc2 | ds-wizard/engine-backend | List_GET.hs | module Wizard.Specs.API.Questionnaire.Version.List_GET (
list_GET,
) where
import Data.Aeson (encode)
import qualified Data.ByteString.Char8 as BS
import qualified Data.UUID as U
import Network.HTTP.Types
import Network.Wai (Application)
import Test.Hspec
import Test.Hspec.Wai hiding (shouldRespondWith)
import Test.Hspec.Wai.Matcher
import Shared.Api.Resource.Error.ErrorJM ()
import Shared.Localization.Messages.Public
import Shared.Model.Error.Error
import Wizard.Database.DAO.Questionnaire.QuestionnaireDAO
import qualified Wizard.Database.Migration.Development.DocumentTemplate.DocumentTemplateMigration as TML
import Wizard.Database.Migration.Development.Questionnaire.Data.QuestionnaireVersions
import Wizard.Database.Migration.Development.Questionnaire.Data.Questionnaires
import qualified Wizard.Database.Migration.Development.Questionnaire.QuestionnaireMigration as QTN
import qualified Wizard.Database.Migration.Development.User.UserMigration as U
import Wizard.Localization.Messages.Public
import Wizard.Model.Context.AppContext
import Wizard.Model.Questionnaire.Questionnaire
import SharedTest.Specs.API.Common
import Wizard.Specs.API.Common
import Wizard.Specs.Common
-- ------------------------------------------------------------------------
-- GET /questionnaires/{qtnUuid}/versions
-- ------------------------------------------------------------------------
list_GET :: AppContext -> SpecWith ((), Application)
list_GET appContext =
describe "GET /questionnaires/{qtnUuid}/versions" $ do
test_200 appContext
test_403 appContext
test_404 appContext
-- ----------------------------------------------------
-- ----------------------------------------------------
-- ----------------------------------------------------
reqMethod = methodGet
reqUrlT qtnUuid = BS.pack $ "/questionnaires/" ++ U.toString qtnUuid ++ "/versions"
reqHeadersT authHeader = authHeader
reqBody = ""
-- ----------------------------------------------------
-- ----------------------------------------------------
-- ----------------------------------------------------
test_200 appContext = do
create_test_200
"HTTP 200 OK (Owner, Private)"
appContext
questionnaire1
questionnaire1Ctn
[reqAuthHeader]
[qtn1AlbertEditPermRecordDto]
create_test_200
"HTTP 200 OK (Non-Owner, VisibleView)"
appContext
questionnaire2
questionnaire2Ctn
[reqNonAdminAuthHeader]
[qtn1AlbertEditPermRecordDto]
create_test_200
"HTTP 200 OK (Anonymous, VisibleView, Sharing)"
appContext
questionnaire7
questionnaire7Ctn
[]
[qtn1AlbertEditPermRecordDto]
create_test_200
"HTTP 200 OK (Non-Owner, VisibleEdit)"
appContext
questionnaire3
questionnaire3Ctn
[reqNonAdminAuthHeader]
[]
create_test_200 "HTTP 200 OK (Anonymous, Public, Sharing)" appContext questionnaire10 questionnaire10Ctn [] []
create_test_200 title appContext qtn qtnCtn authHeader permissions =
it title $
-- GIVEN: Prepare request
do
let reqUrl = reqUrlT qtn.uuid
let reqHeaders = reqHeadersT authHeader
-- AND: Prepare expectation
let expStatus = 200
let expHeaders = resCtHeader : resCorsHeaders
let expDto = qVersionsDto
let expBody = encode expDto
-- AND: Run migrations
runInContextIO U.runMigration appContext
runInContextIO TML.runMigration appContext
runInContextIO QTN.runMigration appContext
runInContextIO (insertQuestionnaire questionnaire7) appContext
runInContextIO (insertQuestionnaire questionnaire10) appContext
-- WHEN: Call API
response <- request reqMethod reqUrl reqHeaders reqBody
-- THEN: Compare response with expectation
let responseMatcher =
ResponseMatcher {matchHeaders = expHeaders, matchStatus = expStatus, matchBody = bodyEquals expBody}
response `shouldRespondWith` responseMatcher
-- ----------------------------------------------------
-- ----------------------------------------------------
-- ----------------------------------------------------
test_403 appContext = do
create_test_403
"HTTP 403 FORBIDDEN (Non-Owner, Private)"
appContext
questionnaire1
[reqNonAdminAuthHeader]
(_ERROR_VALIDATION__FORBIDDEN "View Questionnaire")
create_test_403
"HTTP 403 FORBIDDEN (Anonymous, VisibleView)"
appContext
questionnaire2
[]
_ERROR_SERVICE_USER__MISSING_USER
create_test_403
"HTTP 403 FORBIDDEN (Anonymous, Public)"
appContext
questionnaire3
[]
_ERROR_SERVICE_USER__MISSING_USER
create_test_403 title appContext qtn authHeader errorMessage =
it title $
-- GIVEN: Prepare request
do
let reqUrl = reqUrlT qtn.uuid
let reqHeaders = reqHeadersT authHeader
-- AND: Prepare expectation
let expStatus = 403
let expHeaders = resCtHeader : resCorsHeaders
let expDto = ForbiddenError errorMessage
let expBody = encode expDto
-- AND: Run migrations
runInContextIO U.runMigration appContext
runInContextIO TML.runMigration appContext
runInContextIO QTN.runMigration appContext
-- WHEN: Call API
response <- request reqMethod reqUrl reqHeaders reqBody
-- THEN: Compare response with expectation
let responseMatcher =
ResponseMatcher {matchHeaders = expHeaders, matchStatus = expStatus, matchBody = bodyEquals expBody}
response `shouldRespondWith` responseMatcher
-- ----------------------------------------------------
-- ----------------------------------------------------
-- ----------------------------------------------------
test_404 appContext =
createNotFoundTest'
reqMethod
"/questionnaires/f08ead5f-746d-411b-aee6-77ea3d24016a/versions"
[reqHeadersT reqAuthHeader]
reqBody
"questionnaire"
[("uuid", "f08ead5f-746d-411b-aee6-77ea3d24016a")]
| null | https://raw.githubusercontent.com/ds-wizard/engine-backend/d392b751192a646064305d3534c57becaa229f28/engine-wizard/test/Wizard/Specs/API/Questionnaire/Version/List_GET.hs | haskell | ------------------------------------------------------------------------
GET /questionnaires/{qtnUuid}/versions
------------------------------------------------------------------------
----------------------------------------------------
----------------------------------------------------
----------------------------------------------------
----------------------------------------------------
----------------------------------------------------
----------------------------------------------------
GIVEN: Prepare request
AND: Prepare expectation
AND: Run migrations
WHEN: Call API
THEN: Compare response with expectation
----------------------------------------------------
----------------------------------------------------
----------------------------------------------------
GIVEN: Prepare request
AND: Prepare expectation
AND: Run migrations
WHEN: Call API
THEN: Compare response with expectation
----------------------------------------------------
----------------------------------------------------
---------------------------------------------------- | module Wizard.Specs.API.Questionnaire.Version.List_GET (
list_GET,
) where
import Data.Aeson (encode)
import qualified Data.ByteString.Char8 as BS
import qualified Data.UUID as U
import Network.HTTP.Types
import Network.Wai (Application)
import Test.Hspec
import Test.Hspec.Wai hiding (shouldRespondWith)
import Test.Hspec.Wai.Matcher
import Shared.Api.Resource.Error.ErrorJM ()
import Shared.Localization.Messages.Public
import Shared.Model.Error.Error
import Wizard.Database.DAO.Questionnaire.QuestionnaireDAO
import qualified Wizard.Database.Migration.Development.DocumentTemplate.DocumentTemplateMigration as TML
import Wizard.Database.Migration.Development.Questionnaire.Data.QuestionnaireVersions
import Wizard.Database.Migration.Development.Questionnaire.Data.Questionnaires
import qualified Wizard.Database.Migration.Development.Questionnaire.QuestionnaireMigration as QTN
import qualified Wizard.Database.Migration.Development.User.UserMigration as U
import Wizard.Localization.Messages.Public
import Wizard.Model.Context.AppContext
import Wizard.Model.Questionnaire.Questionnaire
import SharedTest.Specs.API.Common
import Wizard.Specs.API.Common
import Wizard.Specs.Common
list_GET :: AppContext -> SpecWith ((), Application)
list_GET appContext =
describe "GET /questionnaires/{qtnUuid}/versions" $ do
test_200 appContext
test_403 appContext
test_404 appContext
reqMethod = methodGet
reqUrlT qtnUuid = BS.pack $ "/questionnaires/" ++ U.toString qtnUuid ++ "/versions"
reqHeadersT authHeader = authHeader
reqBody = ""
test_200 appContext = do
create_test_200
"HTTP 200 OK (Owner, Private)"
appContext
questionnaire1
questionnaire1Ctn
[reqAuthHeader]
[qtn1AlbertEditPermRecordDto]
create_test_200
"HTTP 200 OK (Non-Owner, VisibleView)"
appContext
questionnaire2
questionnaire2Ctn
[reqNonAdminAuthHeader]
[qtn1AlbertEditPermRecordDto]
create_test_200
"HTTP 200 OK (Anonymous, VisibleView, Sharing)"
appContext
questionnaire7
questionnaire7Ctn
[]
[qtn1AlbertEditPermRecordDto]
create_test_200
"HTTP 200 OK (Non-Owner, VisibleEdit)"
appContext
questionnaire3
questionnaire3Ctn
[reqNonAdminAuthHeader]
[]
create_test_200 "HTTP 200 OK (Anonymous, Public, Sharing)" appContext questionnaire10 questionnaire10Ctn [] []
create_test_200 title appContext qtn qtnCtn authHeader permissions =
it title $
do
let reqUrl = reqUrlT qtn.uuid
let reqHeaders = reqHeadersT authHeader
let expStatus = 200
let expHeaders = resCtHeader : resCorsHeaders
let expDto = qVersionsDto
let expBody = encode expDto
runInContextIO U.runMigration appContext
runInContextIO TML.runMigration appContext
runInContextIO QTN.runMigration appContext
runInContextIO (insertQuestionnaire questionnaire7) appContext
runInContextIO (insertQuestionnaire questionnaire10) appContext
response <- request reqMethod reqUrl reqHeaders reqBody
let responseMatcher =
ResponseMatcher {matchHeaders = expHeaders, matchStatus = expStatus, matchBody = bodyEquals expBody}
response `shouldRespondWith` responseMatcher
test_403 appContext = do
create_test_403
"HTTP 403 FORBIDDEN (Non-Owner, Private)"
appContext
questionnaire1
[reqNonAdminAuthHeader]
(_ERROR_VALIDATION__FORBIDDEN "View Questionnaire")
create_test_403
"HTTP 403 FORBIDDEN (Anonymous, VisibleView)"
appContext
questionnaire2
[]
_ERROR_SERVICE_USER__MISSING_USER
create_test_403
"HTTP 403 FORBIDDEN (Anonymous, Public)"
appContext
questionnaire3
[]
_ERROR_SERVICE_USER__MISSING_USER
create_test_403 title appContext qtn authHeader errorMessage =
it title $
do
let reqUrl = reqUrlT qtn.uuid
let reqHeaders = reqHeadersT authHeader
let expStatus = 403
let expHeaders = resCtHeader : resCorsHeaders
let expDto = ForbiddenError errorMessage
let expBody = encode expDto
runInContextIO U.runMigration appContext
runInContextIO TML.runMigration appContext
runInContextIO QTN.runMigration appContext
response <- request reqMethod reqUrl reqHeaders reqBody
let responseMatcher =
ResponseMatcher {matchHeaders = expHeaders, matchStatus = expStatus, matchBody = bodyEquals expBody}
response `shouldRespondWith` responseMatcher
test_404 appContext =
createNotFoundTest'
reqMethod
"/questionnaires/f08ead5f-746d-411b-aee6-77ea3d24016a/versions"
[reqHeadersT reqAuthHeader]
reqBody
"questionnaire"
[("uuid", "f08ead5f-746d-411b-aee6-77ea3d24016a")]
|
e05c3dabc6878c8a9e27cbbb38a8c2fd9b29454247b1b0a317a5cd02f69f0fb5 | ku-fpg/blank-canvas | Utils.hs | # LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
module Graphics.Blank.Utils where
import qualified Data.ByteString as B
import Data.ByteString.Base64
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Graphics.Blank.Canvas
import Graphics.Blank.Generated
import Graphics.Blank.JavaScript
import Graphics.Blank.Types
import Prelude.Compat
-- | Clear the screen. Restores the default transformation matrix.
clearCanvas :: Canvas ()
clearCanvas = do
setTransform (1, 0, 0, 1, 0, 0)
me <- myCanvasContext
clearRect (0,0,width me,height me)
-- | Wrap a canvas computation in 'save' / 'restore'.
saveRestore :: Canvas a -> Canvas a
saveRestore m = do
save ()
r <- m
restore ()
return r
infixr 0 #
| The @#@-operator is the analog to the @.@-operator
in JavaScript . Example :
--
> grd # addColorStop(0 , " # 8ED6FF " ) ;
--
This can be seen as equivalent of @grd.addColorStop(0 , " # 8ED6FF")@.
(#) :: a -> (a -> b) -> b
(#) obj act = act obj
-- | Read a file, and generate a data URL.
--
-- > url <- readDataURL "image/png" "image/foo.png"
--
readDataURL :: Text -> FilePath -> IO URL
readDataURL mime_type filePath = do
dat <- B.readFile filePath
return $ URL $ "data:" <> mime_type <> ";base64," <> decodeUtf8 (encode dat)
-- | Find the MIME type for a data URL.
--
> > dataURLMimeType " data : image / png;base64,iVBORw ... "
-- > "image/png"
dataURLMimeType :: Text -> Text
dataURLMimeType txt
| dat /= "data" = error "dataURLMimeType: no 'data:'"
| not (Text.null rest0) && not (Text.null rest2) = mime_type
| otherwise = error "dataURLMimeType: bad parse"
where
(dat,rest0) = Text.span (/= ':') txt
rest1 = case Text.uncons rest0 of
Just (_,rest1') -> rest1'
Nothing -> "dataURLMimeType: Unexpected empty Text"
(mime_type,rest2) = Text.span (/= ';') rest1
-- | Write a data URL to a given file.
writeDataURL :: FilePath -> Text -> IO ()
writeDataURL fileName
= B.writeFile fileName
. decodeLenient
. encodeUtf8
. Text.tail
. Text.dropWhile (/= ',')
-- | Draws an image onto the canvas at the given x- and y-coordinates.
drawImageAt :: Image image => (image, Double, Double) -> Canvas ()
drawImageAt (img, dx, dy) = drawImage (img, [dx, dy])
| Acts like ' drawImageAt ' , but with two extra ' Double ' arguments . The third and fourth
-- 'Double's specify the width and height of the image, respectively.
drawImageSize :: Image image => (image, Double, Double, Double, Double) -> Canvas ()
drawImageSize (img, dx, dy, dw, dh) = drawImage (img, [dx, dy, dw, dh])
| Acts like ' drawImageSize ' , but with four extra ' Double ' arguments before the arguments
of ' drawImageSize ' . The first and second ' Double 's specify the x- and y - coordinates at
which the image begins to crop . The third and fourth ' Double 's specify the width and
-- height of the cropped image.
--
-- @
-- 'drawImageCrop' img 0 0 dw dh dx dy dw dh = 'drawImageSize' = dx dy dw dh
-- @
drawImageCrop :: Image image => (image, Double, Double, Double, Double, Double, Double, Double, Double) -> Canvas ()
drawImageCrop (img, sx, sy, sw, sh, dx, dy, dw, dh)
= drawImage (img, [sx, sy, sw, sh, dx, dy, dw, dh])
| Writes ' ImageData ' to the canvas at the given x- and y - coordinates .
putImageDataAt :: (ImageData, Double, Double) -> Canvas ()
putImageDataAt (imgData, dx, dy) = putImageData (imgData, [dx, dy])
| Acts like ' putImageDataAt ' , but with four extra ' Double ' arguments that specify
which region of the ' ImageData ' ( the dirty rectangle ) should be drawn . The third
and fourth ' Double 's specify the dirty rectangle 's x- and y- coordinates , and the
fifth and sixth ' Double 's specify the dirty rectangle 's width and height .
--
-- @
-- 'putImageDataDirty' imgData dx dy 0 0 w h = 'putImageDataAt' imgData dx dy
-- where (w, h) = case imgData of ImageData w' h' _ -> (w', h')
-- @
putImageDataDirty :: (ImageData, Double, Double, Double, Double, Double, Double) -> Canvas ()
putImageDataDirty (imgData, dx, dy, dirtyX, dirtyY, dirtyWidth, dirtyHeight)
= putImageData (imgData, [dx, dy, dirtyX, dirtyY, dirtyWidth, dirtyHeight])
| null | https://raw.githubusercontent.com/ku-fpg/blank-canvas/c6e8342d60ddbe09af2dc97ae6400e8c848f6266/Graphics/Blank/Utils.hs | haskell | # LANGUAGE OverloadedStrings #
| Clear the screen. Restores the default transformation matrix.
| Wrap a canvas computation in 'save' / 'restore'.
| Read a file, and generate a data URL.
> url <- readDataURL "image/png" "image/foo.png"
| Find the MIME type for a data URL.
> "image/png"
| Write a data URL to a given file.
| Draws an image onto the canvas at the given x- and y-coordinates.
'Double's specify the width and height of the image, respectively.
height of the cropped image.
@
'drawImageCrop' img 0 0 dw dh dx dy dw dh = 'drawImageSize' = dx dy dw dh
@
@
'putImageDataDirty' imgData dx dy 0 0 w h = 'putImageDataAt' imgData dx dy
where (w, h) = case imgData of ImageData w' h' _ -> (w', h')
@ | # LANGUAGE NoImplicitPrelude #
module Graphics.Blank.Utils where
import qualified Data.ByteString as B
import Data.ByteString.Base64
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Graphics.Blank.Canvas
import Graphics.Blank.Generated
import Graphics.Blank.JavaScript
import Graphics.Blank.Types
import Prelude.Compat
clearCanvas :: Canvas ()
clearCanvas = do
setTransform (1, 0, 0, 1, 0, 0)
me <- myCanvasContext
clearRect (0,0,width me,height me)
saveRestore :: Canvas a -> Canvas a
saveRestore m = do
save ()
r <- m
restore ()
return r
infixr 0 #
| The @#@-operator is the analog to the @.@-operator
in JavaScript . Example :
> grd # addColorStop(0 , " # 8ED6FF " ) ;
This can be seen as equivalent of @grd.addColorStop(0 , " # 8ED6FF")@.
(#) :: a -> (a -> b) -> b
(#) obj act = act obj
readDataURL :: Text -> FilePath -> IO URL
readDataURL mime_type filePath = do
dat <- B.readFile filePath
return $ URL $ "data:" <> mime_type <> ";base64," <> decodeUtf8 (encode dat)
> > dataURLMimeType " data : image / png;base64,iVBORw ... "
dataURLMimeType :: Text -> Text
dataURLMimeType txt
| dat /= "data" = error "dataURLMimeType: no 'data:'"
| not (Text.null rest0) && not (Text.null rest2) = mime_type
| otherwise = error "dataURLMimeType: bad parse"
where
(dat,rest0) = Text.span (/= ':') txt
rest1 = case Text.uncons rest0 of
Just (_,rest1') -> rest1'
Nothing -> "dataURLMimeType: Unexpected empty Text"
(mime_type,rest2) = Text.span (/= ';') rest1
writeDataURL :: FilePath -> Text -> IO ()
writeDataURL fileName
= B.writeFile fileName
. decodeLenient
. encodeUtf8
. Text.tail
. Text.dropWhile (/= ',')
drawImageAt :: Image image => (image, Double, Double) -> Canvas ()
drawImageAt (img, dx, dy) = drawImage (img, [dx, dy])
| Acts like ' drawImageAt ' , but with two extra ' Double ' arguments . The third and fourth
drawImageSize :: Image image => (image, Double, Double, Double, Double) -> Canvas ()
drawImageSize (img, dx, dy, dw, dh) = drawImage (img, [dx, dy, dw, dh])
| Acts like ' drawImageSize ' , but with four extra ' Double ' arguments before the arguments
of ' drawImageSize ' . The first and second ' Double 's specify the x- and y - coordinates at
which the image begins to crop . The third and fourth ' Double 's specify the width and
drawImageCrop :: Image image => (image, Double, Double, Double, Double, Double, Double, Double, Double) -> Canvas ()
drawImageCrop (img, sx, sy, sw, sh, dx, dy, dw, dh)
= drawImage (img, [sx, sy, sw, sh, dx, dy, dw, dh])
| Writes ' ImageData ' to the canvas at the given x- and y - coordinates .
putImageDataAt :: (ImageData, Double, Double) -> Canvas ()
putImageDataAt (imgData, dx, dy) = putImageData (imgData, [dx, dy])
| Acts like ' putImageDataAt ' , but with four extra ' Double ' arguments that specify
which region of the ' ImageData ' ( the dirty rectangle ) should be drawn . The third
and fourth ' Double 's specify the dirty rectangle 's x- and y- coordinates , and the
fifth and sixth ' Double 's specify the dirty rectangle 's width and height .
putImageDataDirty :: (ImageData, Double, Double, Double, Double, Double, Double) -> Canvas ()
putImageDataDirty (imgData, dx, dy, dirtyX, dirtyY, dirtyWidth, dirtyHeight)
= putImageData (imgData, [dx, dy, dirtyX, dirtyY, dirtyWidth, dirtyHeight])
|
28f11453a8e970791842b254d8bae95eb68d8b39cb27cabc1300a3201d6ed3b5 | Opetushallitus/ataru | render_field_schema.cljs | (ns ataru.hakija.schema.render-field-schema
(:require [schema.core :as s]))
(s/defschema RenderFieldArgs
{:field-descriptor s/Any
:render-field s/Any
:idx (s/maybe s/Int)})
| null | https://raw.githubusercontent.com/Opetushallitus/ataru/2d8ef1d3f972621e301a3818567d4e11219d2e82/src/cljs/ataru/hakija/schema/render_field_schema.cljs | clojure | (ns ataru.hakija.schema.render-field-schema
(:require [schema.core :as s]))
(s/defschema RenderFieldArgs
{:field-descriptor s/Any
:render-field s/Any
:idx (s/maybe s/Int)})
| |
2d192d7781cbd907e61b34a45f6c0968d43e4e7412d2633571b592a4c7ea9b19 | plumatic/grab-bag | config.clj | {:service {:type "test-service"
:jvm-opts "-Xmx2g -Xms2g"}
:machine {:tags {:owner "grabbag-corp"}}
:parameters {:foo 1
:swank-port 6666
:forward-ports {6666 :swank-port}}
:envs {:publisher {:env :stage
:machine {:groups ["woven" "grabbag-test"]}
:parameters {:service-type :publisher}}
:subscriber {:env :stage
:machine {:replicated? true
:groups ["woven" "grabbag-test-subscriber"]}
:parameters {:service-type :subscriber
:publisher-timeout [10 :years]}}}}
| null | https://raw.githubusercontent.com/plumatic/grab-bag/a15e943322fbbf6f00790ce5614ba6f90de1a9b5/service/test-service/src/test_service/config.clj | clojure | {:service {:type "test-service"
:jvm-opts "-Xmx2g -Xms2g"}
:machine {:tags {:owner "grabbag-corp"}}
:parameters {:foo 1
:swank-port 6666
:forward-ports {6666 :swank-port}}
:envs {:publisher {:env :stage
:machine {:groups ["woven" "grabbag-test"]}
:parameters {:service-type :publisher}}
:subscriber {:env :stage
:machine {:replicated? true
:groups ["woven" "grabbag-test-subscriber"]}
:parameters {:service-type :subscriber
:publisher-timeout [10 :years]}}}}
| |
9161ce3c0f09a2d7ba957d635d6f60fa8e85ae22e197c5003a29addfa4a258ba | elnewfie/lslforge | DOMCombinators.hs | # OPTIONS_GHC -XNoMonomorphismRestriction #
module Language.Lsl.Internal.DOMCombinators where
import Control.Monad.State
import Control.Monad.Except
import Data.Maybe
import Language.Lsl.Internal.DOMProcessing
import Text.XML.HaXml(Attribute,AttValue(..),Document(..),Element(..),Content(..),Reference(..),xmlParse,info)
import Text.XML.HaXml.Posn(Posn(..),noPos)
import Language.Lsl.Internal.Util(readM)
type ContentAcceptor a = [Content Posn] -> Either String a
type ContentFinder a = StateT [Content Posn] (Either String) a
type ElementAcceptor a = Posn -> Element Posn -> Either String a
type ElementTester a = Posn -> Element Posn -> Either String (Maybe a)
type AttributeAcceptor a = Posn -> [Attribute] -> Either String a
type AttributeFinder a = StateT (Posn,[Attribute]) (Either String) a
type AttributeTester a = Posn -> Attribute -> Either String (Maybe a)
type AttributesTester a = Posn -> [Attribute] -> Either String (Maybe a)
el :: String -> (b -> a) -> ContentAcceptor b -> ElementTester a
el tag f cf p (Elem name _ cs) | tag /= unqualifiedQName name = Right Nothing
| otherwise = case cf cs of
Left s -> Left ("at " ++ show p ++ ": " ++ s)
Right v -> Right (Just (f v))
elWith :: String -> (a -> b -> c) -> AttributeAcceptor (Maybe a) -> ContentAcceptor b -> ElementTester c
elWith tag f af cf p (Elem name attrs cs) | tag /= unqualifiedQName name = Right Nothing
| otherwise = do
av <- af p attrs
case av of
Nothing -> Right Nothing
Just av -> do
cv <- cf cs
return (Just (f av cv))
liftElemTester :: (Posn -> (Element Posn) -> Either String (Maybe a)) -> (Content Posn -> Either String (Maybe a))
liftElemTester ef (CElem e pos) = case ef pos e of
Left s -> Left ("at " ++ show pos ++ ": " ++ s)
Right v -> Right v
canHaveElem :: ElementTester a -> ContentFinder (Maybe a)
canHaveElem ef = get >>= \ cs ->
mapM (\ c -> (lift . liftElemTester ef) c >>= return . (,) c) [ e | e@(CElem _ _) <- cs ]
>>= (\ vs -> case span (isNothing . snd) vs of
(bs,[]) -> put (map fst bs) >> return Nothing
(bs,c:cs) -> put (map fst (bs ++ cs)) >> return (snd c))
mustHaveElem :: ElementTester a -> ContentFinder a
mustHaveElem ef = get >>= \ cs ->
mapM (\ c -> (lift . liftElemTester ef) c >>= return . (,) c) [ e | e@(CElem _ _) <- cs ]
>>= (\ vs -> case span (isNothing . snd) vs of
(bs,[]) -> throwError ("element not found")
(bs,c:cs) -> put (map fst (bs ++ cs)) >> return (fromJust $ snd c))
mustHave :: String -> ContentAcceptor a -> ContentFinder a
mustHave s ca = catchError (mustHaveElem (el s id ca)) (\ e -> throwError (e ++ " (" ++ s ++ ")"))
canHave :: String -> ContentAcceptor a -> ContentFinder (Maybe a)
canHave s ca = canHaveElem (el s id ca)
comprises :: ContentFinder a -> ContentAcceptor a
comprises cf cs = case runStateT cf cs of
Left s -> throwError s
Right (v,cs') -> empty cs' >> return v
many :: ElementTester a -> ContentAcceptor [a]
many et cs = case runStateT go cs of
Left s -> throwError ("many: " ++ s)
Right (v,cs') -> empty cs' >> return v
where go = do
isEmpty <- get >>= return . null
if isEmpty then return []
else do
v <- mustHaveElem et
vs <- go
return (v:vs)
attContent :: AttValue -> String
attContent (AttValue xs) = foldl (flip (flip (++) . either id refToString)) [] xs
refToString (RefEntity s) = refEntityString s
refToString (RefChar i) = [toEnum i]
attrIs :: String -> String -> AttributeTester ()
attrIs k v _ (nm,attv) | v == attContent attv && k == unqualifiedQName nm = return (Just ())
| otherwise = return Nothing
hasAttr :: AttributeTester a -> AttributeFinder (Maybe a)
hasAttr at = get >>= \ (pos,attrs) -> mapM (lift . at pos) attrs >>= return . zip attrs >>= (\ ps -> case span (isNothing . snd) ps of
(bs,[]) -> return Nothing
(bs,c:cs) -> put (pos,map fst (bs ++ cs)) >> return (snd c))
thisAttr :: String -> String -> AttributesTester ()
thisAttr k v p atts = case runStateT (hasAttr (attrIs k v)) (p,atts) of
Left s -> throwError ("at " ++ show p ++ ": " ++ s)
Right (Nothing,(_,l)) -> return Nothing
Right (v,(_,[])) -> return v
_ -> throwError ("at " ++ show p ++ ": unexpected attributes")
infixr 1 <|>
(<|>) :: ElementTester a -> ElementTester a -> ElementTester a
(<|>) l r p e = case l p e of
Left s -> throwError ("at: " ++ show p ++ s)
Right Nothing -> r p e
Right v -> return v
nope :: ElementTester a
nope _ _ = return Nothing
choice :: [ElementTester a] -> ElementTester a
choice = foldl (<|>) nope
boolContent cs = simpleContent cs >>= (\ v -> case v of
"true" -> Right True
"false" -> Right False
s -> Left ("unrecognized bool " ++ s))
readableContent :: Read a => ContentAcceptor a
readableContent cs = simpleContent cs >>= readM
refEntityString "lt" = "<"
refEntityString "gt" = ">"
refEntityString "amp" = "&"
refEntityString "quot" = "\""
refEntityString "apos" = "'"
refEntityString _ = "?"
simpleContent :: ContentAcceptor String
simpleContent cs = mapM processContentItem cs >>= return . concat
where
processContentItem (CElem (Elem name _ _) _) = Left ("unexpected content element (" ++ show name ++ ")")
processContentItem (CString _ s _) = Right s
processContentItem (CRef (RefEntity s) _) = Right $ refEntityString s
processContentItem (CRef (RefChar i) _) = Right $ [toEnum i]
processContentItem (CMisc _ _) = Right "unexpected content"
empty :: ContentAcceptor ()
empty [] = Right ()
empty (c:_) = Left ("unexpected content at" ++ show (info c))
----------------------
data Foo = Bar { x :: Int, y :: String, z :: Maybe Double }
| Baz { q :: String, r :: Int }
deriving Show
bar :: ContentAcceptor Foo
bar = comprises $ do
x <- mustHave "x" readableContent
y <- mustHave "y" simpleContent
z <- canHave "z" readableContent
return (Bar x y z)
baz = comprises $ do
q <- mustHave "q" simpleContent
r <- mustHave "r" readableContent
return (Baz q r)
fooE = el "BarFoo" id bar
<|> el "BazFoo" id baz
fooAs :: String -> ElementTester Foo
fooAs s = elWith s (const id) (thisAttr "class" "BarFoo") bar
<|> elWith s (const id) (thisAttr "class" "BazFoo") baz
data Zzz = Zzz { content :: [Foo], bleah :: Foo } deriving Show
zzzE = el "Zzz" id $ comprises (mustHave "content" (many fooE) >>= \ cs -> mustHaveElem (fooAs "bleah") >>= \ b -> return $ Zzz cs b)
parse :: ElementAcceptor a -> String -> Either String a
parse eaf s = eaf noPos el
where Document _ _ el _ = xmlParse "" s
| null | https://raw.githubusercontent.com/elnewfie/lslforge/27eb84231c53fffba6bdb0db67bde81c1c12dbb9/lslforge/haskell/src/Language/Lsl/Internal/DOMCombinators.hs | haskell | -------------------- | # OPTIONS_GHC -XNoMonomorphismRestriction #
module Language.Lsl.Internal.DOMCombinators where
import Control.Monad.State
import Control.Monad.Except
import Data.Maybe
import Language.Lsl.Internal.DOMProcessing
import Text.XML.HaXml(Attribute,AttValue(..),Document(..),Element(..),Content(..),Reference(..),xmlParse,info)
import Text.XML.HaXml.Posn(Posn(..),noPos)
import Language.Lsl.Internal.Util(readM)
type ContentAcceptor a = [Content Posn] -> Either String a
type ContentFinder a = StateT [Content Posn] (Either String) a
type ElementAcceptor a = Posn -> Element Posn -> Either String a
type ElementTester a = Posn -> Element Posn -> Either String (Maybe a)
type AttributeAcceptor a = Posn -> [Attribute] -> Either String a
type AttributeFinder a = StateT (Posn,[Attribute]) (Either String) a
type AttributeTester a = Posn -> Attribute -> Either String (Maybe a)
type AttributesTester a = Posn -> [Attribute] -> Either String (Maybe a)
el :: String -> (b -> a) -> ContentAcceptor b -> ElementTester a
el tag f cf p (Elem name _ cs) | tag /= unqualifiedQName name = Right Nothing
| otherwise = case cf cs of
Left s -> Left ("at " ++ show p ++ ": " ++ s)
Right v -> Right (Just (f v))
elWith :: String -> (a -> b -> c) -> AttributeAcceptor (Maybe a) -> ContentAcceptor b -> ElementTester c
elWith tag f af cf p (Elem name attrs cs) | tag /= unqualifiedQName name = Right Nothing
| otherwise = do
av <- af p attrs
case av of
Nothing -> Right Nothing
Just av -> do
cv <- cf cs
return (Just (f av cv))
liftElemTester :: (Posn -> (Element Posn) -> Either String (Maybe a)) -> (Content Posn -> Either String (Maybe a))
liftElemTester ef (CElem e pos) = case ef pos e of
Left s -> Left ("at " ++ show pos ++ ": " ++ s)
Right v -> Right v
canHaveElem :: ElementTester a -> ContentFinder (Maybe a)
canHaveElem ef = get >>= \ cs ->
mapM (\ c -> (lift . liftElemTester ef) c >>= return . (,) c) [ e | e@(CElem _ _) <- cs ]
>>= (\ vs -> case span (isNothing . snd) vs of
(bs,[]) -> put (map fst bs) >> return Nothing
(bs,c:cs) -> put (map fst (bs ++ cs)) >> return (snd c))
mustHaveElem :: ElementTester a -> ContentFinder a
mustHaveElem ef = get >>= \ cs ->
mapM (\ c -> (lift . liftElemTester ef) c >>= return . (,) c) [ e | e@(CElem _ _) <- cs ]
>>= (\ vs -> case span (isNothing . snd) vs of
(bs,[]) -> throwError ("element not found")
(bs,c:cs) -> put (map fst (bs ++ cs)) >> return (fromJust $ snd c))
mustHave :: String -> ContentAcceptor a -> ContentFinder a
mustHave s ca = catchError (mustHaveElem (el s id ca)) (\ e -> throwError (e ++ " (" ++ s ++ ")"))
canHave :: String -> ContentAcceptor a -> ContentFinder (Maybe a)
canHave s ca = canHaveElem (el s id ca)
comprises :: ContentFinder a -> ContentAcceptor a
comprises cf cs = case runStateT cf cs of
Left s -> throwError s
Right (v,cs') -> empty cs' >> return v
many :: ElementTester a -> ContentAcceptor [a]
many et cs = case runStateT go cs of
Left s -> throwError ("many: " ++ s)
Right (v,cs') -> empty cs' >> return v
where go = do
isEmpty <- get >>= return . null
if isEmpty then return []
else do
v <- mustHaveElem et
vs <- go
return (v:vs)
attContent :: AttValue -> String
attContent (AttValue xs) = foldl (flip (flip (++) . either id refToString)) [] xs
refToString (RefEntity s) = refEntityString s
refToString (RefChar i) = [toEnum i]
attrIs :: String -> String -> AttributeTester ()
attrIs k v _ (nm,attv) | v == attContent attv && k == unqualifiedQName nm = return (Just ())
| otherwise = return Nothing
hasAttr :: AttributeTester a -> AttributeFinder (Maybe a)
hasAttr at = get >>= \ (pos,attrs) -> mapM (lift . at pos) attrs >>= return . zip attrs >>= (\ ps -> case span (isNothing . snd) ps of
(bs,[]) -> return Nothing
(bs,c:cs) -> put (pos,map fst (bs ++ cs)) >> return (snd c))
thisAttr :: String -> String -> AttributesTester ()
thisAttr k v p atts = case runStateT (hasAttr (attrIs k v)) (p,atts) of
Left s -> throwError ("at " ++ show p ++ ": " ++ s)
Right (Nothing,(_,l)) -> return Nothing
Right (v,(_,[])) -> return v
_ -> throwError ("at " ++ show p ++ ": unexpected attributes")
infixr 1 <|>
(<|>) :: ElementTester a -> ElementTester a -> ElementTester a
(<|>) l r p e = case l p e of
Left s -> throwError ("at: " ++ show p ++ s)
Right Nothing -> r p e
Right v -> return v
nope :: ElementTester a
nope _ _ = return Nothing
choice :: [ElementTester a] -> ElementTester a
choice = foldl (<|>) nope
boolContent cs = simpleContent cs >>= (\ v -> case v of
"true" -> Right True
"false" -> Right False
s -> Left ("unrecognized bool " ++ s))
readableContent :: Read a => ContentAcceptor a
readableContent cs = simpleContent cs >>= readM
refEntityString "lt" = "<"
refEntityString "gt" = ">"
refEntityString "amp" = "&"
refEntityString "quot" = "\""
refEntityString "apos" = "'"
refEntityString _ = "?"
simpleContent :: ContentAcceptor String
simpleContent cs = mapM processContentItem cs >>= return . concat
where
processContentItem (CElem (Elem name _ _) _) = Left ("unexpected content element (" ++ show name ++ ")")
processContentItem (CString _ s _) = Right s
processContentItem (CRef (RefEntity s) _) = Right $ refEntityString s
processContentItem (CRef (RefChar i) _) = Right $ [toEnum i]
processContentItem (CMisc _ _) = Right "unexpected content"
empty :: ContentAcceptor ()
empty [] = Right ()
empty (c:_) = Left ("unexpected content at" ++ show (info c))
data Foo = Bar { x :: Int, y :: String, z :: Maybe Double }
| Baz { q :: String, r :: Int }
deriving Show
bar :: ContentAcceptor Foo
bar = comprises $ do
x <- mustHave "x" readableContent
y <- mustHave "y" simpleContent
z <- canHave "z" readableContent
return (Bar x y z)
baz = comprises $ do
q <- mustHave "q" simpleContent
r <- mustHave "r" readableContent
return (Baz q r)
fooE = el "BarFoo" id bar
<|> el "BazFoo" id baz
fooAs :: String -> ElementTester Foo
fooAs s = elWith s (const id) (thisAttr "class" "BarFoo") bar
<|> elWith s (const id) (thisAttr "class" "BazFoo") baz
data Zzz = Zzz { content :: [Foo], bleah :: Foo } deriving Show
zzzE = el "Zzz" id $ comprises (mustHave "content" (many fooE) >>= \ cs -> mustHaveElem (fooAs "bleah") >>= \ b -> return $ Zzz cs b)
parse :: ElementAcceptor a -> String -> Either String a
parse eaf s = eaf noPos el
where Document _ _ el _ = xmlParse "" s
|
9753e5812da26a2d952bc7c72fe356e076b6c4475bee62760f62b100634fff23 | clojurewerkz/ogre | local_test.clj | (ns clojurewerkz.ogre.suite.local-test
(:refer-clojure :exclude [and count drop filter group-by key key identity iterate loop map max min next not or range repeat reverse sort shuffle])
(:require [clojurewerkz.ogre.core :refer :all])
(:import (org.apache.tinkerpop.gremlin.structure T)))
(defn get_g_V_localXoutE_countX
"g.V().local(outE().count())"
[g]
(traverse g (V)
(local (__ (outE) (count)))))
(defn get_g_VX4X_localXbothE_limitX2XX_otherV_name
"g.V(v4Id).local(bothE().limit(2)).otherV().values('name')"
[g v4Id]
(traverse g (V v4Id)
(local (__ (bothE) (limit 2)))
(otherV)
(values :name)))
(defn get_g_V_localXpropertiesXlocationX_order_byXvalueX_limitX2XX_value
"g.V().local(properties('location').order().by(T.value, Order.incr).range(0, 2)).value()"
[g]
(traverse g (V)
(local (__ (properties :location) (order) (by (T/value) (sort :incr)) (range 0 2)))
(value)))
(defn get_g_V_hasXlabel_personX_asXaX_localXoutXcreatedX_asXbXX_selectXa_bX_byXnameX_byXidX
"g.V().has(T.label, 'person').as('a').local(out('created').as('b')).select('a', 'b').by('name').by(T.id)"
[g]
(traverse g (V)
(has T/label :person) (as :a)
(local (__ (out :created) (as :b)))
(select :a :b)
(by :name)
(by T/id)))
(defn get_g_VX1X_localXoutEXknowsX_limitX1XX_inV_name
"g.V(v1Id).local(outE('knows').limit(1)).inV().values('name')"
[g v1Id]
(traverse g (V v1Id)
(local (__ (outE :knows) (limit 1)))
(inV)
(values :name)))
(defn get_g_V_localXbothEXcreatedX_limitX1XX_otherV_name
"g.V().local(bothE('created').limit(1)).otherV().values('name')"
[g]
(traverse g (V)
(local (__ (bothE :created) (limit 1)))
(otherV)
(values :name)))
(defn get_g_VX4X_localXbothEX1_createdX_limitX1XX
"g.V(v4Id).local(bothE('created').limit(1))"
[g v4Id]
(traverse g (V v4Id)
(local (__ (bothE :created) (limit 1)))))
(defn get_g_VX4X_localXbothEXknows_createdX_limitX1XX
"g.V(v4Id).local(bothE('knows', 'created').limit(1))"
[g v4Id]
(traverse g (V v4Id)
(local (__ (bothE :knows :created) (limit 1)))))
(defn get_g_VX4X_localXbothE_limitX1XX_otherV_name
"g.V(v4Id).local(bothE().limit(1)).otherV().values('name')"
[g v4Id]
(traverse g (V v4Id)
(local (__ (bothE) (limit 1)))
(otherV)
(values :name)))
(defn get_g_V_localXinEXknowsX_limitX2XX_outV_name
"g.V().local(inE('knows').limit(2)).outV().values('name')"
[g]
(traverse g (V)
(local (__ (inE :knows) (limit 2)))
(outV)
(values :name)))
(defn get_g_V_localXmatchXproject__created_person__person_name_nameX_selectXname_projectX_by_byXnameX
"g.V().local(__.match(as('project').in('created').as('person'),
as('person').values('name').as('name')))
.<String>select('name', 'project').by().by('name');"
[g]
(traverse g (V)
(local (__ (match (__ (as :project) (in :created) (as :person))
(__ (as :person) (values :name) (as :name)))))
(select :name :project)
(by) (by :name)))
| null | https://raw.githubusercontent.com/clojurewerkz/ogre/cfc5648881d509a55f8a951e01d7b2a166e71d17/test/clojure/clojurewerkz/ogre/suite/local_test.clj | clojure | " | (ns clojurewerkz.ogre.suite.local-test
(:refer-clojure :exclude [and count drop filter group-by key key identity iterate loop map max min next not or range repeat reverse sort shuffle])
(:require [clojurewerkz.ogre.core :refer :all])
(:import (org.apache.tinkerpop.gremlin.structure T)))
(defn get_g_V_localXoutE_countX
"g.V().local(outE().count())"
[g]
(traverse g (V)
(local (__ (outE) (count)))))
(defn get_g_VX4X_localXbothE_limitX2XX_otherV_name
"g.V(v4Id).local(bothE().limit(2)).otherV().values('name')"
[g v4Id]
(traverse g (V v4Id)
(local (__ (bothE) (limit 2)))
(otherV)
(values :name)))
(defn get_g_V_localXpropertiesXlocationX_order_byXvalueX_limitX2XX_value
"g.V().local(properties('location').order().by(T.value, Order.incr).range(0, 2)).value()"
[g]
(traverse g (V)
(local (__ (properties :location) (order) (by (T/value) (sort :incr)) (range 0 2)))
(value)))
(defn get_g_V_hasXlabel_personX_asXaX_localXoutXcreatedX_asXbXX_selectXa_bX_byXnameX_byXidX
"g.V().has(T.label, 'person').as('a').local(out('created').as('b')).select('a', 'b').by('name').by(T.id)"
[g]
(traverse g (V)
(has T/label :person) (as :a)
(local (__ (out :created) (as :b)))
(select :a :b)
(by :name)
(by T/id)))
(defn get_g_VX1X_localXoutEXknowsX_limitX1XX_inV_name
"g.V(v1Id).local(outE('knows').limit(1)).inV().values('name')"
[g v1Id]
(traverse g (V v1Id)
(local (__ (outE :knows) (limit 1)))
(inV)
(values :name)))
(defn get_g_V_localXbothEXcreatedX_limitX1XX_otherV_name
"g.V().local(bothE('created').limit(1)).otherV().values('name')"
[g]
(traverse g (V)
(local (__ (bothE :created) (limit 1)))
(otherV)
(values :name)))
(defn get_g_VX4X_localXbothEX1_createdX_limitX1XX
"g.V(v4Id).local(bothE('created').limit(1))"
[g v4Id]
(traverse g (V v4Id)
(local (__ (bothE :created) (limit 1)))))
(defn get_g_VX4X_localXbothEXknows_createdX_limitX1XX
"g.V(v4Id).local(bothE('knows', 'created').limit(1))"
[g v4Id]
(traverse g (V v4Id)
(local (__ (bothE :knows :created) (limit 1)))))
(defn get_g_VX4X_localXbothE_limitX1XX_otherV_name
"g.V(v4Id).local(bothE().limit(1)).otherV().values('name')"
[g v4Id]
(traverse g (V v4Id)
(local (__ (bothE) (limit 1)))
(otherV)
(values :name)))
(defn get_g_V_localXinEXknowsX_limitX2XX_outV_name
"g.V().local(inE('knows').limit(2)).outV().values('name')"
[g]
(traverse g (V)
(local (__ (inE :knows) (limit 2)))
(outV)
(values :name)))
(defn get_g_V_localXmatchXproject__created_person__person_name_nameX_selectXname_projectX_by_byXnameX
"g.V().local(__.match(as('project').in('created').as('person'),
as('person').values('name').as('name')))
[g]
(traverse g (V)
(local (__ (match (__ (as :project) (in :created) (as :person))
(__ (as :person) (values :name) (as :name)))))
(select :name :project)
(by) (by :name)))
|
2be5b19e43c72aef7aabfff20f1e61d12b451de2ae1f53455c153ad48fe40650 | iijlab/direct-hs | sample.hs | {-# LANGUAGE OverloadedStrings #-}
module Main where
-- | Sample application of wss-client.
-- A simple command like wscat.
import Control.Monad (forever, unless)
import Control.Concurrent (forkIO, killThread)
import qualified Data.ByteString.Char8 as B
import System.Environment (getArgs)
import qualified Network.WebSockets.Client as WS
main :: IO ()
main = do
url <- head <$> getArgs
WS.withConnection url $ \conn -> do
tid <- forkIO $ forever $ do
msg <- WS.receiveData conn
B.putStrLn msg
let loop = do
line <- B.getLine
unless (B.null line || line == "\r")
$ WS.sendTextData conn line
>> loop
loop
WS.sendClose conn $ B.pack "Bye!"
killThread tid
| null | https://raw.githubusercontent.com/iijlab/direct-hs/2422fd6fe008109e8dfb74f31d65b0d5a0330788/wss-client/app/sample.hs | haskell | # LANGUAGE OverloadedStrings #
| Sample application of wss-client.
A simple command like wscat. |
module Main where
import Control.Monad (forever, unless)
import Control.Concurrent (forkIO, killThread)
import qualified Data.ByteString.Char8 as B
import System.Environment (getArgs)
import qualified Network.WebSockets.Client as WS
main :: IO ()
main = do
url <- head <$> getArgs
WS.withConnection url $ \conn -> do
tid <- forkIO $ forever $ do
msg <- WS.receiveData conn
B.putStrLn msg
let loop = do
line <- B.getLine
unless (B.null line || line == "\r")
$ WS.sendTextData conn line
>> loop
loop
WS.sendClose conn $ B.pack "Bye!"
killThread tid
|
de2e21d02dbcc33708765afd1557fb835239a73f1a8b464d67075396642b969b | bytekid/mkbtt | nodeTermIndex.mli | Copyright 2010
* GNU Lesser General Public License
*
* This file is part of MKBtt .
*
* is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of MKBtt.
*
* MKBtt is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* MKBtt is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt. If not, see </>.
*)
(*** TYPES **************************************************************)
type entry = int * bool
type t = entry World.M.ACDiscTree.t
(*** VALUES *************************************************************)
val make : t World.Monad.t
val insert : U.Term.t * entry -> unit World.Monad.t
delete : U.Term.t * Types.NodeTermIndex.entry - > unit World . Monad.t
val variants :
U.Term.t -> entry list World.Monad.t
val variants_in :
t -> U.Term.t -> entry list World.Monad.t
val encompassments :
(U.Term.t * ACPosition.t list) -> (entry * ACPosition.t) list World.Monad.t
val encompassments_in :
t -> (U.Term.t * ACPosition.t list) -> (entry * ACPosition.t) list World.Monad.t
val encompassments_below_root :
(U.Term.t * ACPosition.t list) -> (entry * ACPosition.t) list World.Monad.t
val empty : t World.Monad.t
val get_index : t World.Monad.t
val indexing_required :
U.Term.t -> U.Term.t -> (int * bool) -> bool World.Monad.t
val insert_one :
t -> U.Term.t * entry -> t World.Monad.t
val add_node : int -> unit World.Monad.t
val single_index : int -> t World.Monad.t
val add_s_node : int -> unit World.Monad.t
val s_encompassments :
(U.Term.t * ACPosition.t list) -> (entry * ACPosition.t) list World.Monad.t
remove_node : bool - > int - > unit World . Monad.t
| null | https://raw.githubusercontent.com/bytekid/mkbtt/c2f8e0615389b52eabd12655fe48237aa0fe83fd/src/mascott/src/nodeTermIndex.mli | ocaml | ** TYPES *************************************************************
** VALUES ************************************************************ | Copyright 2010
* GNU Lesser General Public License
*
* This file is part of MKBtt .
*
* is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of MKBtt.
*
* MKBtt is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* MKBtt is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt. If not, see </>.
*)
type entry = int * bool
type t = entry World.M.ACDiscTree.t
val make : t World.Monad.t
val insert : U.Term.t * entry -> unit World.Monad.t
delete : U.Term.t * Types.NodeTermIndex.entry - > unit World . Monad.t
val variants :
U.Term.t -> entry list World.Monad.t
val variants_in :
t -> U.Term.t -> entry list World.Monad.t
val encompassments :
(U.Term.t * ACPosition.t list) -> (entry * ACPosition.t) list World.Monad.t
val encompassments_in :
t -> (U.Term.t * ACPosition.t list) -> (entry * ACPosition.t) list World.Monad.t
val encompassments_below_root :
(U.Term.t * ACPosition.t list) -> (entry * ACPosition.t) list World.Monad.t
val empty : t World.Monad.t
val get_index : t World.Monad.t
val indexing_required :
U.Term.t -> U.Term.t -> (int * bool) -> bool World.Monad.t
val insert_one :
t -> U.Term.t * entry -> t World.Monad.t
val add_node : int -> unit World.Monad.t
val single_index : int -> t World.Monad.t
val add_s_node : int -> unit World.Monad.t
val s_encompassments :
(U.Term.t * ACPosition.t list) -> (entry * ACPosition.t) list World.Monad.t
remove_node : bool - > int - > unit World . Monad.t
|
53e99e954b494e7d9b3f87dd7ad2cb143e4b3886c277d09ea0e6be3973161504 | rd--/hsc3 | localIn.help.hs | -- localIn ; ping pong ; warning=feedback
let z = soundIn 0
a1 = localIn 2 ar 0 + mce [z,0]
a2 = delayN a1 0.2 0.2
a3 = mceEdit reverse a2 * 0.8
in mrg [z + a2,localOut a3]
localIn ; tape - delay ; ; warning = feedback
let rotate2_mce z p =
case mceChannels z of
[l,r] -> rotate2 l r p
_ -> error "rotate2_mce"
tape_delay dt fb z =
let a = amplitude kr (mix z) 0.01 0.01
z' = z * (a >** 0.02)
l0 = localIn 2 ar 0
l1 = onePole l0 0.4
l2 = onePole l1 (-0.08)
l3 = rotate2_mce l2 0.2
l4 = delayN l3 dt dt
l5 = leakDC l4 0.995
l6 = softClip ((l5 + z') * fb)
in mrg2 (l6 * 0.1) (localOut l6)
y = mouseY kr 0.75 1.25 Linear 0.2
in tape_delay 0.25 y (soundIn 0)
| null | https://raw.githubusercontent.com/rd--/hsc3/60cb422f0e2049f00b7e15076b2667b85ad8f638/Help/Ugen/localIn.help.hs | haskell | localIn ; ping pong ; warning=feedback | let z = soundIn 0
a1 = localIn 2 ar 0 + mce [z,0]
a2 = delayN a1 0.2 0.2
a3 = mceEdit reverse a2 * 0.8
in mrg [z + a2,localOut a3]
localIn ; tape - delay ; ; warning = feedback
let rotate2_mce z p =
case mceChannels z of
[l,r] -> rotate2 l r p
_ -> error "rotate2_mce"
tape_delay dt fb z =
let a = amplitude kr (mix z) 0.01 0.01
z' = z * (a >** 0.02)
l0 = localIn 2 ar 0
l1 = onePole l0 0.4
l2 = onePole l1 (-0.08)
l3 = rotate2_mce l2 0.2
l4 = delayN l3 dt dt
l5 = leakDC l4 0.995
l6 = softClip ((l5 + z') * fb)
in mrg2 (l6 * 0.1) (localOut l6)
y = mouseY kr 0.75 1.25 Linear 0.2
in tape_delay 0.25 y (soundIn 0)
|
f5c2fa85dce318d70bc9cb0a1e45b7be0e517c5b23e9ec41d9f3a88473e5425d | chrondb/chrondb | main_test.clj | (ns counter.main-test
(:require [chrondb.api-v1 :as api-v1]
[chrondb.config :as config]
[chrondb.func :as func]
[clojure.pprint :as pp]
[clojure.test :refer [deftest is]]
[counter.main :as counter]
[io.pedestal.http :as http]
[io.pedestal.test :refer [response-for]]))
(set! *warn-on-reflection* true)
(deftest counter-v0
#_(func/delete-database config/chrondb-local-git-dir)
(let [chronn (func/path->repo config/chrondb-local-git-dir
:branch-name config/chrondb-local-repo-branch)
service-fn (-> {::counter/chronn chronn}
counter/app-v0
http/default-interceptors
http/dev-interceptors
http/create-servlet
::http/service-fn)]
(is (= {:status 200, :body "{}"}
(-> (response-for service-fn :get "/")
(select-keys [:status :body])
(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":1}"}
(-> (response-for service-fn :post "/")
(select-keys [:status :body])
(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":1}"}
(-> (response-for service-fn :get "/")
(select-keys [:status :body])
(doto pp/pprint))))))
(deftest counter-v1
(let [chronn (-> "chrondb:file-v1"
(doto api-v1/delete-database
api-v1/create-database)
api-v1/connect)
service-fn (-> {::counter/chronn chronn}
counter/app-v1
http/default-interceptors
http/dev-interceptors
http/create-servlet
::http/service-fn)]
(is (= {:status 200, :body "{}"}
(-> (response-for service-fn :get "/")
(select-keys [:status :body])
#_(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":1}"}
(-> (response-for service-fn :post "/")
(select-keys [:status :body])
#_(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":1}"}
(-> (response-for service-fn :get "/")
(select-keys [:status :body])
#_(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":2}"}
(-> (response-for service-fn :post "/")
(select-keys [:status :body])
#_(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":2}"}
(-> (response-for service-fn :get "/")
(select-keys [:status :body])
#_(doto pp/pprint))))))
| null | https://raw.githubusercontent.com/chrondb/chrondb/6542716a5dbd7898d33a88e9fbb7e984d8059b06/samples/counter/test/counter/main_test.clj | clojure | (ns counter.main-test
(:require [chrondb.api-v1 :as api-v1]
[chrondb.config :as config]
[chrondb.func :as func]
[clojure.pprint :as pp]
[clojure.test :refer [deftest is]]
[counter.main :as counter]
[io.pedestal.http :as http]
[io.pedestal.test :refer [response-for]]))
(set! *warn-on-reflection* true)
(deftest counter-v0
#_(func/delete-database config/chrondb-local-git-dir)
(let [chronn (func/path->repo config/chrondb-local-git-dir
:branch-name config/chrondb-local-repo-branch)
service-fn (-> {::counter/chronn chronn}
counter/app-v0
http/default-interceptors
http/dev-interceptors
http/create-servlet
::http/service-fn)]
(is (= {:status 200, :body "{}"}
(-> (response-for service-fn :get "/")
(select-keys [:status :body])
(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":1}"}
(-> (response-for service-fn :post "/")
(select-keys [:status :body])
(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":1}"}
(-> (response-for service-fn :get "/")
(select-keys [:status :body])
(doto pp/pprint))))))
(deftest counter-v1
(let [chronn (-> "chrondb:file-v1"
(doto api-v1/delete-database
api-v1/create-database)
api-v1/connect)
service-fn (-> {::counter/chronn chronn}
counter/app-v1
http/default-interceptors
http/dev-interceptors
http/create-servlet
::http/service-fn)]
(is (= {:status 200, :body "{}"}
(-> (response-for service-fn :get "/")
(select-keys [:status :body])
#_(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":1}"}
(-> (response-for service-fn :post "/")
(select-keys [:status :body])
#_(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":1}"}
(-> (response-for service-fn :get "/")
(select-keys [:status :body])
#_(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":2}"}
(-> (response-for service-fn :post "/")
(select-keys [:status :body])
#_(doto pp/pprint))))
(is (= {:status 200, :body "{\"n\":2}"}
(-> (response-for service-fn :get "/")
(select-keys [:status :body])
#_(doto pp/pprint))))))
| |
3035f04a6b1bf579aeea5804d4c40299bd1915de6e4fd90e134265a4b1104831 | philnguyen/soft-contract | data.rkt | #lang racket/base
(require racket/contract)
(provide
label/c
node/c
suffix-tree/c
(contract-out
[struct label ([datum (vectorof (or/c char? symbol?))] [i exact-nonnegative-integer?] [j exact-nonnegative-integer?])]
[struct suffix-tree ([root node/c])]
[struct node ([up-label label/c] [parent (or/c not node/c)] [children {listof node/c}] [suffix-link (or/c not node/c)])]))
(define-struct label (datum i j) #:mutable)
;; A suffix tree consists of a root node.
(define-struct suffix-tree (root))
;; up-label: label
;; parent: (union #f node)
children : ( listof node )
;; suffix-link: (union #f node)
(define-struct node (up-label parent children suffix-link) #:mutable)
(define label/c (struct/c label (vectorof (or/c char? symbol?)) exact-nonnegative-integer? exact-nonnegative-integer?))
(define node/c (struct/c node
label/c
(or/c not (recursive-contract node/c #:chaperone))
(listof (recursive-contract node/c #:chaperone))
(or/c not (recursive-contract node/c #:chaperone))))
(define suffix-tree/c (struct/c suffix-tree node/c))
| null | https://raw.githubusercontent.com/philnguyen/soft-contract/5e07dc2d622ee80b961f4e8aebd04ce950720239/soft-contract/test/gradual-typing-benchmarks/suffixtree/data.rkt | racket | A suffix tree consists of a root node.
up-label: label
parent: (union #f node)
suffix-link: (union #f node) | #lang racket/base
(require racket/contract)
(provide
label/c
node/c
suffix-tree/c
(contract-out
[struct label ([datum (vectorof (or/c char? symbol?))] [i exact-nonnegative-integer?] [j exact-nonnegative-integer?])]
[struct suffix-tree ([root node/c])]
[struct node ([up-label label/c] [parent (or/c not node/c)] [children {listof node/c}] [suffix-link (or/c not node/c)])]))
(define-struct label (datum i j) #:mutable)
(define-struct suffix-tree (root))
children : ( listof node )
(define-struct node (up-label parent children suffix-link) #:mutable)
(define label/c (struct/c label (vectorof (or/c char? symbol?)) exact-nonnegative-integer? exact-nonnegative-integer?))
(define node/c (struct/c node
label/c
(or/c not (recursive-contract node/c #:chaperone))
(listof (recursive-contract node/c #:chaperone))
(or/c not (recursive-contract node/c #:chaperone))))
(define suffix-tree/c (struct/c suffix-tree node/c))
|
d2099293fe6dde66a1abe4d9dbef43bcab389e34d6d0a078d6d69908fa493584 | jordanthayer/ocaml-search | iterative_deepening_search.ml | *
Iterative deepening search
- July 2009
Iterative deepening search
Jordan Thayer - July 2009 *)
type 'a node = {
data : 'a;
g : float;
depth: int;
}
let wrap f =
(** takes a function to be applied to the data payload
such as the goal-test or the domain heuristic and
wraps it so that it can be applied to the entire
node *)
(fun n -> f n.data)
let unwrap_sol s =
(** Unwraps a solution which is in the form of a search node and presents
it in the format the domain expects it, which is domain data followed
by cost *)
match s with
Limit.Nothing -> None
| Limit.Incumbent (q,n) -> Some (n.data, n.g)
let better_p a b =
(** Sorts nodes solely on total cost information *)
a.g <= b.g
let make_expand expand =
(** Takes the domain expand function and a heuristic calculator
and creates an expand function which returns search nodes. *)
(fun n ->
List.map (fun (d, g) -> { data = d;
g = g;
depth = n.depth + 1;}) (expand n.data n.g))
let this_bound = ref 1
let expansions = ref 0
let iter_no = ref 0
let reset_bound vl =
Iterative_deepening.output_col_hdr ();
this_bound := vl;
iter_no := 0;
expansions := 0
let see_expansion depth n children =
incr expansions
let next_bound _ n =
Iterative_deepening.output_row !iter_no (float !this_bound) !expansions;
incr iter_no;
this_bound := !this_bound + 1;
expansions := 0
let check_bound n = n.depth <= !this_bound
let no_dups sface =
(** Performs an A* search from the initial state to a goal,
for domains with no duplicates. *)
let search_interface = Search_interface.make
~node_expand:(make_expand sface.Search_interface.domain_expand)
~goal_p:(wrap sface.Search_interface.goal_p)
~halt_on:sface.Search_interface.halt_on
~hash:sface.Search_interface.hash
~equals:sface.Search_interface.equals
sface.Search_interface.domain
{ data = sface.Search_interface.initial;
g = 0.;
depth = 0;}
better_p
(Limit.make_default_logger (fun n -> n.g)
(wrap sface.Search_interface.get_sol_length))
in
reset_bound 1;
Limit.unwrap_sol5 unwrap_sol
(Iterative_deepening.no_dups
search_interface
better_p
see_expansion
check_bound
next_bound)
let dups sface =
(** Performs an A* search from the initial state to a goal,
for domains with no duplicates. *)
let search_interface = Search_interface.make
~node_expand:(make_expand sface.Search_interface.domain_expand)
~goal_p:(wrap sface.Search_interface.goal_p)
~halt_on:sface.Search_interface.halt_on
~hash:sface.Search_interface.hash
~equals:sface.Search_interface.equals
sface.Search_interface.domain
{ data = sface.Search_interface.initial;
g = 0.;
depth = 0;}
better_p
(Limit.make_default_logger (fun n -> n.g)
(wrap sface.Search_interface.get_sol_length))
in
reset_bound 1;
Limit.unwrap_sol6 unwrap_sol
(Iterative_deepening.dups
search_interface
better_p
see_expansion
check_bound
next_bound)
EOF
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/search/iterative/iterative_deepening_search.ml | ocaml | * takes a function to be applied to the data payload
such as the goal-test or the domain heuristic and
wraps it so that it can be applied to the entire
node
* Unwraps a solution which is in the form of a search node and presents
it in the format the domain expects it, which is domain data followed
by cost
* Sorts nodes solely on total cost information
* Takes the domain expand function and a heuristic calculator
and creates an expand function which returns search nodes.
* Performs an A* search from the initial state to a goal,
for domains with no duplicates.
* Performs an A* search from the initial state to a goal,
for domains with no duplicates. | *
Iterative deepening search
- July 2009
Iterative deepening search
Jordan Thayer - July 2009 *)
type 'a node = {
data : 'a;
g : float;
depth: int;
}
let wrap f =
(fun n -> f n.data)
let unwrap_sol s =
match s with
Limit.Nothing -> None
| Limit.Incumbent (q,n) -> Some (n.data, n.g)
let better_p a b =
a.g <= b.g
let make_expand expand =
(fun n ->
List.map (fun (d, g) -> { data = d;
g = g;
depth = n.depth + 1;}) (expand n.data n.g))
let this_bound = ref 1
let expansions = ref 0
let iter_no = ref 0
let reset_bound vl =
Iterative_deepening.output_col_hdr ();
this_bound := vl;
iter_no := 0;
expansions := 0
let see_expansion depth n children =
incr expansions
let next_bound _ n =
Iterative_deepening.output_row !iter_no (float !this_bound) !expansions;
incr iter_no;
this_bound := !this_bound + 1;
expansions := 0
let check_bound n = n.depth <= !this_bound
let no_dups sface =
let search_interface = Search_interface.make
~node_expand:(make_expand sface.Search_interface.domain_expand)
~goal_p:(wrap sface.Search_interface.goal_p)
~halt_on:sface.Search_interface.halt_on
~hash:sface.Search_interface.hash
~equals:sface.Search_interface.equals
sface.Search_interface.domain
{ data = sface.Search_interface.initial;
g = 0.;
depth = 0;}
better_p
(Limit.make_default_logger (fun n -> n.g)
(wrap sface.Search_interface.get_sol_length))
in
reset_bound 1;
Limit.unwrap_sol5 unwrap_sol
(Iterative_deepening.no_dups
search_interface
better_p
see_expansion
check_bound
next_bound)
let dups sface =
let search_interface = Search_interface.make
~node_expand:(make_expand sface.Search_interface.domain_expand)
~goal_p:(wrap sface.Search_interface.goal_p)
~halt_on:sface.Search_interface.halt_on
~hash:sface.Search_interface.hash
~equals:sface.Search_interface.equals
sface.Search_interface.domain
{ data = sface.Search_interface.initial;
g = 0.;
depth = 0;}
better_p
(Limit.make_default_logger (fun n -> n.g)
(wrap sface.Search_interface.get_sol_length))
in
reset_bound 1;
Limit.unwrap_sol6 unwrap_sol
(Iterative_deepening.dups
search_interface
better_p
see_expansion
check_bound
next_bound)
EOF
|
2027845474021618d3ce5f7fcbe8fe54a78be89ee1ed355fe733729fa626e61a | planetfederal/signal | response.clj | Copyright 2016 - 2018 Boundless ,
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns signal.components.http.response
(:require [camel-snake-kebab.core :refer :all]
[camel-snake-kebab.extras :refer [transform-keys]]
[clojure.tools.logging :as log]))
(defn is-error? [status]
(< 300 status))
(defn make-response
"Creates a response map using the status, body, and headers."
[status body & {:as headers}]
(let [res-body (assoc {}
:result (if (is-error? status) nil body)
:error (if (is-error? status) body nil))]
(let [res {:status status :body res-body :headers headers}]
(log/trace "Returning response" res)
res)))
(def ok (partial make-response 200))
(def created (partial make-response 201))
(def accepted (partial make-response 202))
(def bad-request (partial make-response 400))
(def unauthorized (partial make-response 401))
(def forbidden (partial make-response 403))
(def not-found (partial make-response 404))
(def conflict (partial make-response 409))
(def error (partial make-response 500))
(def unavailable (partial make-response 503))
| null | https://raw.githubusercontent.com/planetfederal/signal/e3eae56c753f0a56614ba8522278057ab2358c96/src/signal/components/http/response.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright 2016 - 2018 Boundless ,
distributed under the License is distributed on an " AS IS " BASIS ,
(ns signal.components.http.response
(:require [camel-snake-kebab.core :refer :all]
[camel-snake-kebab.extras :refer [transform-keys]]
[clojure.tools.logging :as log]))
(defn is-error? [status]
(< 300 status))
(defn make-response
"Creates a response map using the status, body, and headers."
[status body & {:as headers}]
(let [res-body (assoc {}
:result (if (is-error? status) nil body)
:error (if (is-error? status) body nil))]
(let [res {:status status :body res-body :headers headers}]
(log/trace "Returning response" res)
res)))
(def ok (partial make-response 200))
(def created (partial make-response 201))
(def accepted (partial make-response 202))
(def bad-request (partial make-response 400))
(def unauthorized (partial make-response 401))
(def forbidden (partial make-response 403))
(def not-found (partial make-response 404))
(def conflict (partial make-response 409))
(def error (partial make-response 500))
(def unavailable (partial make-response 503))
|
fa2979824c5761d8c5727b1f72f2ab97c3af68a42623ab0017d8ef994fe980f3 | mivoq/hunpos | main_ocaml.ml | let _ = Hello.hello_world ();
Gc.full_major ();
;;
| null | https://raw.githubusercontent.com/mivoq/hunpos/0f0f775039fa749e67711c07ac681a16c0979349/ocaml-cmake/examples/hello_world_lib/main_ocaml.ml | ocaml | let _ = Hello.hello_world ();
Gc.full_major ();
;;
| |
b716364a340a45be1988dd065a1e8434d2c8655c8bf220e6a4c286a358d9f446 | gonimo/gonimo | GonimoFront.hs | # LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecursiveDo #-}
# LANGUAGE ScopedTypeVariables #
import qualified Gonimo.Client.Host.Impl as Gonimo
import qualified Gonimo.Client.Main as Gonimo
import qualified GHCJS.DOM.Types as JS
main :: IO ()
main = Gonimo.main =<< Gonimo.makeEmptyHostVars
| null | https://raw.githubusercontent.com/gonimo/gonimo/f4072db9e56f0c853a9f07e048e254eaa671283b/front-ghcjs/app/GonimoFront.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
# LANGUAGE RecursiveDo # | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
import qualified Gonimo.Client.Host.Impl as Gonimo
import qualified Gonimo.Client.Main as Gonimo
import qualified GHCJS.DOM.Types as JS
main :: IO ()
main = Gonimo.main =<< Gonimo.makeEmptyHostVars
|
58ac4fafdf417d5af7b936311a8a2c0d9516d4ebea2b11c0b3d59379abee144c | alvatar/spheres | make-lib.scm | Copyright ( c ) 2007 - 2011 ,
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions are met:
;; * Redistributions of source code must retain the above copyright
;; notice, this list of conditions and the following disclaimer.
;; * Redistributions in binary form must reproduce the above copyright
;; notice, this list of conditions and the following disclaimer in the
;; documentation and/or other materials provided with the distribution.
;; * Neither the name of the <organization> nor the
;; names of its contributors may be used to endorse or promote products
;; derived from this software without specific prior written permission.
;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND
ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL < COPYRIGHT HOLDER > BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
;; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
;; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(module __unicode_makelib
(import utf)
(eval (export-all)))
| null | https://raw.githubusercontent.com/alvatar/spheres/568836f234a469ef70c69f4a2d9b56d41c3fc5bd/doc/string/unicode/bigloo-unicode/make-lib.scm | scheme | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| Copyright ( c ) 2007 - 2011 ,
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND
ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
DISCLAIMED . IN NO EVENT SHALL < COPYRIGHT HOLDER > BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
(module __unicode_makelib
(import utf)
(eval (export-all)))
|
a4e4bad691559314d30b00fb712eb6b89e8a8fed414f4e784473770a23e4979d | d-cent/mooncake | activity.clj | (ns mooncake.test.domain.activity
(:require [midje.sweet :refer :all]
[mooncake.domain.activity :as activity]))
(fact "about activity->default-action-text"
(let [activity-1 {:type "Create"
:published "2015-09-06T11:05:53+03:00"
:object {:type "UnrecognisedType"
:name "SAMPLE TITLE"
:content "SAMPLE CONTENT"}}]
(activity/activity->default-action-text activity-1) => "- UnrecognisedType - Create"))
| null | https://raw.githubusercontent.com/d-cent/mooncake/eb16b7239e7580a73b98f7cdacb324ab4e301f9c/test/mooncake/test/domain/activity.clj | clojure | (ns mooncake.test.domain.activity
(:require [midje.sweet :refer :all]
[mooncake.domain.activity :as activity]))
(fact "about activity->default-action-text"
(let [activity-1 {:type "Create"
:published "2015-09-06T11:05:53+03:00"
:object {:type "UnrecognisedType"
:name "SAMPLE TITLE"
:content "SAMPLE CONTENT"}}]
(activity/activity->default-action-text activity-1) => "- UnrecognisedType - Create"))
| |
b172acd71e280abedbed4218d4a18f17be36238672906f43315f9c500ce7d57f | Jell/euroclojure-2016 | diagram_rails.cljs | (ns euroclojure.diagram-rails
(:require-macros [euroclojure.inline :refer [inline-file]]))
(defn slide []
[:div.slide
[:div.diagram
{:dangerouslySetInnerHTML
{:__html
(inline-file "resources/public/images/RailsProcess.svg")}}]])
| null | https://raw.githubusercontent.com/Jell/euroclojure-2016/a8ca883e8480a4616ede19995aaacd4a495608af/src/euroclojure/diagram_rails.cljs | clojure | (ns euroclojure.diagram-rails
(:require-macros [euroclojure.inline :refer [inline-file]]))
(defn slide []
[:div.slide
[:div.diagram
{:dangerouslySetInnerHTML
{:__html
(inline-file "resources/public/images/RailsProcess.svg")}}]])
| |
b08fa7023816dd95f704dc06ee775e81d7fb31576514c1baa4ccf37d7cd98823 | smaccoun/polysemy-servant | DB.hs | module Effects.DB where
import AppBase
import Polysemy
import Data.Pool (Pool)
import Database.Persist.Sql (SqlBackend, toSqlKey)
import qualified Database.Persist.Sql as P
import Database.Persist
type CommonRecordConstraint record = (PersistQueryRead SqlBackend, PersistEntityBackend record ~ BaseBackend SqlBackend, PersistEntity record, ToBackendKey SqlBackend record)
data Db m a where
RunSql :: forall a m. ReaderT SqlBackend IO a -> Db m a
GetEntities :: (CommonRecordConstraint record) => Proxy record -> [Filter record] -> [SelectOpt record] -> Db m [Entity record]
GetEntityById :: CommonRecordConstraint record => EntityField record (Key record) -> Int64 -> Db m (Maybe (P.Entity record))
InsertEntities :: CommonRecordConstraint record => [record] -> Db m [Key record]
ReplaceEntity :: CommonRecordConstraint record => Int64 -> record -> Db m ()
makeSem ''Db
runDbIO :: forall r a. Member (Lift IO) r
=> Pool SqlBackend
-> Sem (Db ': r) a
-> Sem r a
runDbIO pool' = interpret $ \case
RunSql sql' -> runQ sql'
GetEntities _ withMatchingFilters andSelectOptions -> runQ $
selectList withMatchingFilters andSelectOptions
GetEntityById recordIdCon idVal -> runQ $
selectFirst [recordIdCon ==. P.toSqlKey idVal] []
InsertEntities vals' -> runQ $ insertMany vals'
ReplaceEntity key' newRecord -> runQ $ replace (toSqlKey key') newRecord
where
runQ :: ReaderT SqlBackend IO v -> Sem r v
runQ q = sendM $ runPool q
runPool :: ReaderT SqlBackend IO v -> IO v
runPool = runSqlIO pool'
runSqlIO :: Pool SqlBackend -> ReaderT SqlBackend IO a -> IO a
runSqlIO pool' sql' = P.runSqlPool sql' pool'
| null | https://raw.githubusercontent.com/smaccoun/polysemy-servant/93f7977d6c03995d03674e9e5e0c24f02458b182/src/Effects/DB.hs | haskell | module Effects.DB where
import AppBase
import Polysemy
import Data.Pool (Pool)
import Database.Persist.Sql (SqlBackend, toSqlKey)
import qualified Database.Persist.Sql as P
import Database.Persist
type CommonRecordConstraint record = (PersistQueryRead SqlBackend, PersistEntityBackend record ~ BaseBackend SqlBackend, PersistEntity record, ToBackendKey SqlBackend record)
data Db m a where
RunSql :: forall a m. ReaderT SqlBackend IO a -> Db m a
GetEntities :: (CommonRecordConstraint record) => Proxy record -> [Filter record] -> [SelectOpt record] -> Db m [Entity record]
GetEntityById :: CommonRecordConstraint record => EntityField record (Key record) -> Int64 -> Db m (Maybe (P.Entity record))
InsertEntities :: CommonRecordConstraint record => [record] -> Db m [Key record]
ReplaceEntity :: CommonRecordConstraint record => Int64 -> record -> Db m ()
makeSem ''Db
runDbIO :: forall r a. Member (Lift IO) r
=> Pool SqlBackend
-> Sem (Db ': r) a
-> Sem r a
runDbIO pool' = interpret $ \case
RunSql sql' -> runQ sql'
GetEntities _ withMatchingFilters andSelectOptions -> runQ $
selectList withMatchingFilters andSelectOptions
GetEntityById recordIdCon idVal -> runQ $
selectFirst [recordIdCon ==. P.toSqlKey idVal] []
InsertEntities vals' -> runQ $ insertMany vals'
ReplaceEntity key' newRecord -> runQ $ replace (toSqlKey key') newRecord
where
runQ :: ReaderT SqlBackend IO v -> Sem r v
runQ q = sendM $ runPool q
runPool :: ReaderT SqlBackend IO v -> IO v
runPool = runSqlIO pool'
runSqlIO :: Pool SqlBackend -> ReaderT SqlBackend IO a -> IO a
runSqlIO pool' sql' = P.runSqlPool sql' pool'
| |
370d664bd8f6369c5eeace896946406a17ac2e4d61c550df81b9848a99f5678f | nasa/Common-Metadata-Repository | project.clj | (defproject gov.nasa.earthdata/cmr-authz "0.1.3"
:description "An authorization utility library for CMR services"
:url "-exchange/authz"
:license {:name "Apache License, Version 2.0"
:url "-2.0"}
:dependencies [[cheshire "5.8.1"]
[clojusc/trifl "0.4.2"]
[clojusc/twig "0.4.0"]
[com.stuartsierra/component "0.3.2"]
[gov.nasa.earthdata/cmr-exchange-common "0.3.3"]
[gov.nasa.earthdata/cmr-http-kit "0.2.0"]
[http-kit "2.5.3"]
[metosin/reitit-ring "0.2.7"]
[org.clojure/clojure "1.9.0"]
[org.clojure/core.cache "0.7.1"]
[org.clojure/data.xml "0.2.0-alpha5"]
[tolitius/xml-in "0.1.0"]]
:profiles {:ubercompile {:aot :all
:source-paths ["test"]}
:security {:plugins [[com.livingsocial/lein-dependency-check "1.1.1"]]
:dependency-check {:output-format [:all]
:suppression-file "resources/security/suppression.xml"}
:source-paths ^:replace ["src"]
:exclusions [
The following are excluded due to their being flagged as a CVE
[com.google.protobuf/protobuf-java]
[com.google.javascript/closure-compiler-unshaded]]}
:lint {:source-paths ^:replace ["src"]
:test-paths ^:replace []
:plugins [[jonase/eastwood "0.3.3"]
[lein-ancient "0.6.15"]
[lein-kibit "0.1.8"]]}
:test {:dependencies [[clojusc/ltest "0.3.0"]]
:plugins [[lein-ltest "0.3.0"]]
:test-selectors {:unit #(not (or (:integration %) (:system %)))
:integration :integration
:system :system
:default (complement :system)}}}
:aliases {
;; Dev & Testing Aliases
"repl" ["do"
["clean"]
["repl"]]
"ubercompile" ["with-profile" "+ubercompile,+security" "compile"]
"check-vers" ["with-profile" "+lint" "ancient" "check" ":all"]
"check-jars" ["with-profile" "+lint" "do"
["deps" ":tree"]
["deps" ":plugin-tree"]]
"check-deps" ["do"
["check-jars"]
["check-vers"]]
"ltest" ["with-profile" "+test,+system,+security" "ltest"]
Linting
"kibit" ["with-profile" "+lint" "kibit"]
"eastwood" ["with-profile" "+lint" "eastwood" "{:namespaces [:source-paths]}"]
"lint" ["do"
["kibit"]]
;["eastwood"]
Security
"check-sec" ["with-profile" "+security" "do"
["clean"]
["dependency-check"]]
;; Build tasks
"build-jar" ["with-profile" "+security" "jar"]
"build-uberjar" ["with-profile" "+security" "uberjar"]
"build-lite" ["do"
["ltest" ":unit"]]
"build" ["do"
["clean"]
["check-vers"]
["check-sec"]
["ltest" ":unit"]
["ubercompile"]
["build-uberjar"]]
"build-full" ["do"
["ltest" ":unit"]
["ubercompile"]
["build-uberjar"]]
;; Publishing
"publish" ["with-profile" "+security" "do"
["clean"]
["build-jar"]
["deploy" "clojars"]]})
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/63001cf021d32d61030b1dcadd8b253e4a221662/other/cmr-exchange/authz/project.clj | clojure | Dev & Testing Aliases
["eastwood"]
Build tasks
Publishing | (defproject gov.nasa.earthdata/cmr-authz "0.1.3"
:description "An authorization utility library for CMR services"
:url "-exchange/authz"
:license {:name "Apache License, Version 2.0"
:url "-2.0"}
:dependencies [[cheshire "5.8.1"]
[clojusc/trifl "0.4.2"]
[clojusc/twig "0.4.0"]
[com.stuartsierra/component "0.3.2"]
[gov.nasa.earthdata/cmr-exchange-common "0.3.3"]
[gov.nasa.earthdata/cmr-http-kit "0.2.0"]
[http-kit "2.5.3"]
[metosin/reitit-ring "0.2.7"]
[org.clojure/clojure "1.9.0"]
[org.clojure/core.cache "0.7.1"]
[org.clojure/data.xml "0.2.0-alpha5"]
[tolitius/xml-in "0.1.0"]]
:profiles {:ubercompile {:aot :all
:source-paths ["test"]}
:security {:plugins [[com.livingsocial/lein-dependency-check "1.1.1"]]
:dependency-check {:output-format [:all]
:suppression-file "resources/security/suppression.xml"}
:source-paths ^:replace ["src"]
:exclusions [
The following are excluded due to their being flagged as a CVE
[com.google.protobuf/protobuf-java]
[com.google.javascript/closure-compiler-unshaded]]}
:lint {:source-paths ^:replace ["src"]
:test-paths ^:replace []
:plugins [[jonase/eastwood "0.3.3"]
[lein-ancient "0.6.15"]
[lein-kibit "0.1.8"]]}
:test {:dependencies [[clojusc/ltest "0.3.0"]]
:plugins [[lein-ltest "0.3.0"]]
:test-selectors {:unit #(not (or (:integration %) (:system %)))
:integration :integration
:system :system
:default (complement :system)}}}
:aliases {
"repl" ["do"
["clean"]
["repl"]]
"ubercompile" ["with-profile" "+ubercompile,+security" "compile"]
"check-vers" ["with-profile" "+lint" "ancient" "check" ":all"]
"check-jars" ["with-profile" "+lint" "do"
["deps" ":tree"]
["deps" ":plugin-tree"]]
"check-deps" ["do"
["check-jars"]
["check-vers"]]
"ltest" ["with-profile" "+test,+system,+security" "ltest"]
Linting
"kibit" ["with-profile" "+lint" "kibit"]
"eastwood" ["with-profile" "+lint" "eastwood" "{:namespaces [:source-paths]}"]
"lint" ["do"
["kibit"]]
Security
"check-sec" ["with-profile" "+security" "do"
["clean"]
["dependency-check"]]
"build-jar" ["with-profile" "+security" "jar"]
"build-uberjar" ["with-profile" "+security" "uberjar"]
"build-lite" ["do"
["ltest" ":unit"]]
"build" ["do"
["clean"]
["check-vers"]
["check-sec"]
["ltest" ":unit"]
["ubercompile"]
["build-uberjar"]]
"build-full" ["do"
["ltest" ":unit"]
["ubercompile"]
["build-uberjar"]]
"publish" ["with-profile" "+security" "do"
["clean"]
["build-jar"]
["deploy" "clojars"]]})
|
d1a2ed69b9d732be50ed31be57207242a0d69d299e2b9a1a6c37294a01c65b22 | falsetru/htdp | 39.1.3.scm | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-advanced-reader.ss" "lang")((modname 39.1.3) (read-case-sensitive #t) (teachpacks ((lib "gui.ss" "teachpack" "htdp") (lib "draw.ss" "teachpack" "htdp") (lib "guess.ss" "teachpack" "htdp"))) (htdp-settings #(#t constructor repeating-decimal #t #t none #f ((lib "gui.ss" "teachpack" "htdp") (lib "draw.ss" "teachpack" "htdp") (lib "guess.ss" "teachpack" "htdp")))))
;; the positions of the bulbs
(define BULB-RADIUS 20)
(define BULB-DISTANCE 10)
(define Y-RED (+ BULB-DISTANCE BULB-RADIUS))
(define Y-YELLOW (+ Y-RED BULB-DISTANCE (* 2 BULB-RADIUS)))
(define Y-GREEN (+ Y-YELLOW BULB-DISTANCE (* 2 BULB-RADIUS)))
(define WIDTH 50)
(define RADIUS 20)
(define DISTANCE-BETWEEN-BULBS 10)
(define HEIGHT
(+ DISTANCE-BETWEEN-BULBS
(* 2 RADIUS)
DISTANCE-BETWEEN-BULBS
(* 2 RADIUS)
DISTANCE-BETWEEN-BULBS
(* 2 RADIUS)
DISTANCE-BETWEEN-BULBS))
(define (clear-bulb x color)
(cond [(symbol=? color 'red)
(and
(clear-solid-disk (make-posn x Y-RED) BULB-RADIUS 'red)
(draw-circle (make-posn x Y-RED) BULB-RADIUS 'red))]
[(symbol=? color 'yellow)
(and
(clear-solid-disk (make-posn x Y-YELLOW) BULB-RADIUS 'yellow)
(draw-circle (make-posn x Y-YELLOW) BULB-RADIUS 'yellow))]
[(symbol=? color 'green)
(and
(clear-solid-disk (make-posn x Y-GREEN) BULB-RADIUS 'green)
(draw-circle (make-posn x Y-GREEN) BULB-RADIUS 'green))]))
(define (draw-bulb x color)
(cond [(symbol=? color 'red)
(and
(clear-circle (make-posn x Y-RED) BULB-RADIUS 'red)
(draw-solid-disk (make-posn x Y-RED) BULB-RADIUS 'red))]
[(symbol=? color 'yellow)
(and
(clear-circle (make-posn x Y-YELLOW) BULB-RADIUS 'yellow)
(draw-solid-disk (make-posn x Y-YELLOW) BULB-RADIUS 'yellow))]
[(symbol=? color 'green)
(and
(clear-circle (make-posn x Y-GREEN) BULB-RADIUS 'green)
(draw-solid-disk (make-posn x Y-GREEN) BULB-RADIUS 'green))]))
;; View:
;; draw-light : TL-color number -> true
;; to (re)draw the traffic light on the canvas
(define (draw-light current-color x-posn)
(local ((define left-border-x (- x-posn (/ WIDTH 2)))
(define right-border-x (+ left-border-x WIDTH)))
(begin
(clear-solid-line (make-posn left-border-x 0)
(make-posn left-border-x HEIGHT))
(clear-solid-line (make-posn right-border-x 0)
(make-posn right-border-x HEIGHT))
(draw-solid-line (make-posn left-border-x 0)
(make-posn left-border-x HEIGHT))
(draw-solid-line (make-posn right-border-x 0)
(make-posn right-border-x HEIGHT))
(for-each
(lambda (c)
(if (symbol=? c current-color)
(begin (clear-bulb x-posn c) (draw-bulb x-posn c))
(begin (draw-bulb x-posn c) (clear-bulb x-posn c))))
'(red green yellow))
true)
))
;; Model:
;; make-traffic-light : symbol number -> ( -> true)
;; to create a red light with (make-posn x-posn 0) as the upper-left corner
;; effect: draw the traffic light on the canvas
(define (make-traffic-light street x-posn)
(local (;; current-color : TL-color
;; to keep track of the current color of the traffic light
(define current-color 'red)
;; init-traffic-light : -> true
;; to (re)set current-color to red and to (re)create the view
(define (init-traffic-light)
(begin
(set! current-color 'red)
(draw-light current-color x-posn)))
;; next : -> true
;; effect: to change current-color from 'green to 'yellow,
;; 'yellow to 'red, and 'red to 'green
(define (next)
(begin
(set! current-color (next-color current-color))
(draw-light current-color x-posn)))
;; next-color : TL-color -> TL-color
;; to compute the successor of current-color based on the traffic laws
(define (next-color current-color)
(cond
[(symbol=? 'green current-color) 'yellow]
[(symbol=? 'yellow current-color) 'red]
[(symbol=? 'red current-color) 'green])))
(begin
Initialize and produce next
(init-traffic-light)
next)))
(start 300 300)
;; lights : (listof traffic-light)
to manage the lights along Sunrise
(define lights
(list (make-traffic-light 'sunrise@rice 50)
(make-traffic-light 'sunrise@cmu 150)))
((second lights))
(andmap (lambda (a-light) (a-light)) lights) | null | https://raw.githubusercontent.com/falsetru/htdp/4cdad3b999f19b89ff4fa7561839cbcbaad274df/39/39.1.3.scm | scheme | about the language level of this file in a form that our tools can easily process.
the positions of the bulbs
View:
draw-light : TL-color number -> true
to (re)draw the traffic light on the canvas
Model:
make-traffic-light : symbol number -> ( -> true)
to create a red light with (make-posn x-posn 0) as the upper-left corner
effect: draw the traffic light on the canvas
current-color : TL-color
to keep track of the current color of the traffic light
init-traffic-light : -> true
to (re)set current-color to red and to (re)create the view
next : -> true
effect: to change current-color from 'green to 'yellow,
'yellow to 'red, and 'red to 'green
next-color : TL-color -> TL-color
to compute the successor of current-color based on the traffic laws
lights : (listof traffic-light) | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-advanced-reader.ss" "lang")((modname 39.1.3) (read-case-sensitive #t) (teachpacks ((lib "gui.ss" "teachpack" "htdp") (lib "draw.ss" "teachpack" "htdp") (lib "guess.ss" "teachpack" "htdp"))) (htdp-settings #(#t constructor repeating-decimal #t #t none #f ((lib "gui.ss" "teachpack" "htdp") (lib "draw.ss" "teachpack" "htdp") (lib "guess.ss" "teachpack" "htdp")))))
(define BULB-RADIUS 20)
(define BULB-DISTANCE 10)
(define Y-RED (+ BULB-DISTANCE BULB-RADIUS))
(define Y-YELLOW (+ Y-RED BULB-DISTANCE (* 2 BULB-RADIUS)))
(define Y-GREEN (+ Y-YELLOW BULB-DISTANCE (* 2 BULB-RADIUS)))
(define WIDTH 50)
(define RADIUS 20)
(define DISTANCE-BETWEEN-BULBS 10)
(define HEIGHT
(+ DISTANCE-BETWEEN-BULBS
(* 2 RADIUS)
DISTANCE-BETWEEN-BULBS
(* 2 RADIUS)
DISTANCE-BETWEEN-BULBS
(* 2 RADIUS)
DISTANCE-BETWEEN-BULBS))
(define (clear-bulb x color)
(cond [(symbol=? color 'red)
(and
(clear-solid-disk (make-posn x Y-RED) BULB-RADIUS 'red)
(draw-circle (make-posn x Y-RED) BULB-RADIUS 'red))]
[(symbol=? color 'yellow)
(and
(clear-solid-disk (make-posn x Y-YELLOW) BULB-RADIUS 'yellow)
(draw-circle (make-posn x Y-YELLOW) BULB-RADIUS 'yellow))]
[(symbol=? color 'green)
(and
(clear-solid-disk (make-posn x Y-GREEN) BULB-RADIUS 'green)
(draw-circle (make-posn x Y-GREEN) BULB-RADIUS 'green))]))
(define (draw-bulb x color)
(cond [(symbol=? color 'red)
(and
(clear-circle (make-posn x Y-RED) BULB-RADIUS 'red)
(draw-solid-disk (make-posn x Y-RED) BULB-RADIUS 'red))]
[(symbol=? color 'yellow)
(and
(clear-circle (make-posn x Y-YELLOW) BULB-RADIUS 'yellow)
(draw-solid-disk (make-posn x Y-YELLOW) BULB-RADIUS 'yellow))]
[(symbol=? color 'green)
(and
(clear-circle (make-posn x Y-GREEN) BULB-RADIUS 'green)
(draw-solid-disk (make-posn x Y-GREEN) BULB-RADIUS 'green))]))
(define (draw-light current-color x-posn)
(local ((define left-border-x (- x-posn (/ WIDTH 2)))
(define right-border-x (+ left-border-x WIDTH)))
(begin
(clear-solid-line (make-posn left-border-x 0)
(make-posn left-border-x HEIGHT))
(clear-solid-line (make-posn right-border-x 0)
(make-posn right-border-x HEIGHT))
(draw-solid-line (make-posn left-border-x 0)
(make-posn left-border-x HEIGHT))
(draw-solid-line (make-posn right-border-x 0)
(make-posn right-border-x HEIGHT))
(for-each
(lambda (c)
(if (symbol=? c current-color)
(begin (clear-bulb x-posn c) (draw-bulb x-posn c))
(begin (draw-bulb x-posn c) (clear-bulb x-posn c))))
'(red green yellow))
true)
))
(define (make-traffic-light street x-posn)
(define current-color 'red)
(define (init-traffic-light)
(begin
(set! current-color 'red)
(draw-light current-color x-posn)))
(define (next)
(begin
(set! current-color (next-color current-color))
(draw-light current-color x-posn)))
(define (next-color current-color)
(cond
[(symbol=? 'green current-color) 'yellow]
[(symbol=? 'yellow current-color) 'red]
[(symbol=? 'red current-color) 'green])))
(begin
Initialize and produce next
(init-traffic-light)
next)))
(start 300 300)
to manage the lights along Sunrise
(define lights
(list (make-traffic-light 'sunrise@rice 50)
(make-traffic-light 'sunrise@cmu 150)))
((second lights))
(andmap (lambda (a-light) (a-light)) lights) |
279bc75216659c4570f4b64f5402e82e0e1ae271fce986945d7898bb6f34cef5 | backtracking/mlpost | fonts.mli | open Dvi_util
(** Load fonts and extract information *)
type type1 = private {
glyphs_tag : int;
(* unique tag *)
glyphs_ft : Mlpost_ft.t;
the file , pfb or pfa , which define the glyphs
glyphs_enc : int -> int;
(* the conversion of the charactersx between tex and the font *)
slant : float option;
extend : float option;
glyphs_ratio_cm : float;
}
type vf = private {
vf_design_size : float;
vf_font_map : Dvi_util.font_def Dvi_util.Int32Map.t;
(* Font on which the virtual font is defined *)
vf_chars : Dvi.command list Int32H.t;
(* The dvi command which define each character*)
}
type glyphs = Type1 of type1 | VirtualFont of vf
type t
(** the type of a loaded font *)
val load_font : font_def -> float -> t
(** [load_font def f] loads font [def] scaled by [f] *)
val metric : t -> Tfm.t
(** Obtain the font metric *)
val tex_name : t -> string
(** get the name of the font as used by TeX *)
val ratio_cm : t -> float
(** The font ratio, in cm *)
val glyphs : t -> glyphs
val char_width : t -> int -> float
val char_height : t -> int -> float
val char_depth : t -> int -> float
(** get information about the [i]th char of the font *)
val char_dims : t -> int -> float * float * float
(** width, height, depth of the [i]th char *)
val scale : t -> float -> float
(** [scale t f] scale the given float [f] by [ratio_cm t] *)
val design_size : t -> float
(** the design size of the font *)
| null | https://raw.githubusercontent.com/backtracking/mlpost/bd4305289fd64d531b9f42d64dd641d72ab82fd5/src/fonts.mli | ocaml | * Load fonts and extract information
unique tag
the conversion of the charactersx between tex and the font
Font on which the virtual font is defined
The dvi command which define each character
* the type of a loaded font
* [load_font def f] loads font [def] scaled by [f]
* Obtain the font metric
* get the name of the font as used by TeX
* The font ratio, in cm
* get information about the [i]th char of the font
* width, height, depth of the [i]th char
* [scale t f] scale the given float [f] by [ratio_cm t]
* the design size of the font | open Dvi_util
type type1 = private {
glyphs_tag : int;
glyphs_ft : Mlpost_ft.t;
the file , pfb or pfa , which define the glyphs
glyphs_enc : int -> int;
slant : float option;
extend : float option;
glyphs_ratio_cm : float;
}
type vf = private {
vf_design_size : float;
vf_font_map : Dvi_util.font_def Dvi_util.Int32Map.t;
vf_chars : Dvi.command list Int32H.t;
}
type glyphs = Type1 of type1 | VirtualFont of vf
type t
val load_font : font_def -> float -> t
val metric : t -> Tfm.t
val tex_name : t -> string
val ratio_cm : t -> float
val glyphs : t -> glyphs
val char_width : t -> int -> float
val char_height : t -> int -> float
val char_depth : t -> int -> float
val char_dims : t -> int -> float * float * float
val scale : t -> float -> float
val design_size : t -> float
|
bf070d191b3e672efcfbb00c1b03f935179aba41c5ebc522b2552f43aa7986c1 | mpickering/apply-refact | Pragma10.hs | # LANGUAGE DataKinds #
# LANGUAGE GADTs , DataKinds #
| null | https://raw.githubusercontent.com/mpickering/apply-refact/a4343ea0f4f9d8c2e16d6b16b9068f321ba4f272/tests/examples/Pragma10.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE GADTs , DataKinds #
| |
516c440a92a572e1141c5736d990ea489d4f839e0e1b89397191e3f0850c0b92 | keybase/colorbase | db.clj | (ns colorbase.db
(:require [colorbase.config :refer [config]]
[colorbase.util :as util]
[hugsql.core :as hugsql]))
(defn result-one-snake->kebab [this result options]
(let [row (hugsql.adapter/result-one this result options)]
(util/map-keys util/keyword-snake->kebab row)))
(defn result-many-snake->kebab [this result options]
(let [rows (hugsql.adapter/result-many this result options)]
(map (partial util/map-keys util/keyword-snake->kebab) rows)))
(defmethod hugsql/hugsql-result-fn :1 [sym] 'colorbase.db/result-one-snake->kebab)
(defmethod hugsql/hugsql-result-fn :one [sym] 'colorbase.db/result-one-snake->kebab)
(defmethod hugsql/hugsql-result-fn :* [sym] 'colorbase.db/result-many-snake->kebab)
(defmethod hugsql/hugsql-result-fn :many [sym] 'colorbase.db/result-many-snake->kebab)
(def sqlite-connection
{:classname "org.sqlite.JDBC"
:subprotocol "sqlite"
:subname (:database-path config)})
(def cmd (util/map-values
(fn [fn-info] (partial (:fn fn-info) sqlite-connection))
(hugsql/map-of-db-fns "colorbase/sql/schema.sql")))
| null | https://raw.githubusercontent.com/keybase/colorbase/f4b4a87e98c91b6b184db8d84ccd523ac9842a15/src/colorbase/db.clj | clojure | (ns colorbase.db
(:require [colorbase.config :refer [config]]
[colorbase.util :as util]
[hugsql.core :as hugsql]))
(defn result-one-snake->kebab [this result options]
(let [row (hugsql.adapter/result-one this result options)]
(util/map-keys util/keyword-snake->kebab row)))
(defn result-many-snake->kebab [this result options]
(let [rows (hugsql.adapter/result-many this result options)]
(map (partial util/map-keys util/keyword-snake->kebab) rows)))
(defmethod hugsql/hugsql-result-fn :1 [sym] 'colorbase.db/result-one-snake->kebab)
(defmethod hugsql/hugsql-result-fn :one [sym] 'colorbase.db/result-one-snake->kebab)
(defmethod hugsql/hugsql-result-fn :* [sym] 'colorbase.db/result-many-snake->kebab)
(defmethod hugsql/hugsql-result-fn :many [sym] 'colorbase.db/result-many-snake->kebab)
(def sqlite-connection
{:classname "org.sqlite.JDBC"
:subprotocol "sqlite"
:subname (:database-path config)})
(def cmd (util/map-values
(fn [fn-info] (partial (:fn fn-info) sqlite-connection))
(hugsql/map-of-db-fns "colorbase/sql/schema.sql")))
| |
99a638be3d68d180341006029b2cdc91204e00e9f2f4523f8abd5a22f61f290a | WFP-VAM/RAMResourcesScripts | LCS-EN-indicator.sps |
SPSS Syntax to compute LCS essential needs (LCS_EN)
Please refer to Github for additonal scripts in R and STATA data analysis tools
***Livelihood Coping ***
***define value labels
Value labels
LcsEN_stress_DomAsset
LcsEN_stress_CrdtFood
LcsEN_stress_Saving
LcsEN_stress_BorrowCash
LcsEN_crisis_ProdAsset
LcsEN_crisis_HealthEdu
LcsEN_crisis_OutSchool
LcsEN_em_IllegalAct
LcsEN_em_Begged
LcsEN_em_ResAsset
10 ‘No, because I did not need to’
20 ‘No because I already sold those assets or have engaged in this activity within the last 12 months and cannot continue to do it’
30 ‘Yes’
9999 ‘Not applicable (don’t have children/ these assets)’.
***stress strategies*** (must have 4 stress strategies to calculate LCS-EN, if you have more then use the most frequently applied strategies)
Variable labels
LcsEN_stress_DomAsset ‘Sold household assets/goods (radio, furniture, refrigerator, television, jewellery etc.)’
LcsEN_stress_CrdtFood ‘Purchased food or other essential items on credit’
LcsEN_stress_Saving ‘Spent savings’
LcsEN_stress_BorrowCash ‘Borrowed money’.
Do if (LcsEN_stress_DomAsset = 20) | (LcsEN_stress_DomAsset = 30) | (LcsEN_stress_CrdtFood = 20) | (LcsEN_stress_CrdtFood = 30) | (LcsEN_stress_Saving =20) | (LcsEN_stress_Saving =30) | (LcsEN_stress_BorrowCash =20) | (LcsEN_stress_BorrowCash=30).
Compute stress_coping_EN =1.
Else.
Compute stress_coping_EN =0.
End if.
EXECUTE.
***crisis strategies***(must have 3 crisis strategies to calculate LCS-EN, if you have more then use the most frequently applied strategies)
Variable labels
LcsEN_crisis_ProdAsset ‘Sold productive assets or means of transport (sewing machine, wheelbarrow, bicycle, car, etc.)’
LcsEN_crisis_HealthEdu ‘Reduced expenses on health (including drugs) or education’
LcsEN_crisis_OutSchool ‘Withdrew children from school.’
Do if (LcsEN_crisis_ProdAsset = 20) | (LcsEN_crisis_ProdAsset =30) | (LcsEN_crisis_HealthEdu =20) | (LcsEN_crisis_HealthEdu=30) | (LcsEN_crisis_OutSchool =20) | (LcsEN_crisis_OutSchool =30).
Compute crisis_coping_EN =1.
Else.
Compute crisis_coping_EN =0.
End if.
EXECUTE.
***emergency strategies ***(must have 3 emergency strategies to calculate LCS, if you have more then use the most frequently applied strategies)
Variable labels
LcsEN_em_ResAsset ‘Mortgaged/Sold house or land’
LcsEN_em_Begged ‘Begged and/or scavenged (asked strangers for money/food)’
LcsEN_em_IllegalAct ‘Had to engage in illegal income activities (theft, prostitution)’.
Do if (LcsEN_em_ResAsset = 20) | (LcsEN_em_ResAsset = 30) | (LcsEN_em_Begged = 20) | (LcsEN_em_Begged =30) | (LcsEN_em_IllegalAct = 20) | (LcsEN_em_IllegalAct = 30).
Compute emergency_coping_EN =1.
Else.
Compute emergency_coping_EN = 0.
End if.
EXECUTE.
*** label new variable
variable labels stress_coping_EN 'Did the HH engage in stress coping strategies?'.
variable labels crisis_coping_EN 'Did the HH engage in crisis coping strategies?'.
variable labels emergency_coping_EN 'Did the HH engage in emergency coping strategies?'.
*** recode variables to compute one variable with coping behavior
recode stress_coping_EN (0=0) (1=2).
recode crisis_coping_EN (0=0) (1=3).
recode emergency_coping_EN (0=0) (1=4).
COMPUTE Max_coping_behaviourEN=MAX(stress_coping_EN, crisis_coping_EN, emergency_coping_EN).
RECODE Max_coping_behaviour (0=1).
Value labels Max_coping_behaviourEN 1 'HH not adopting coping strategies' 2 'Stress coping strategies ' 3 'Crisis coping strategies ' 4 'Emergencies coping strategies'.
Variable Labels Max_coping_behaviourEN 'Summary of asset depletion'.
EXECUTE.
Frequencies Max_coping_behaviourEN.
***calculate LCS-FS indicator using the LCS-EN module to be able to calculate CARI
***define value labels
Value labels EnAccessRsn
1 ‘To buy food’
2 ‘To pay for rent’
3 ‘To pay school, education costs’
4 ‘To cover health expenses‘
5 ‘To buy essential non-food items (clothes, small furniture...)’
6 ‘To access water or sanitation facilities’
7 ‘To access essential dwelling services (electricity, energy, waste disposal…)’
8 ‘To pay for existing debts’
999 ‘Other, specify’.
*Create a multi-response dataset for reasons selected for applying livelihood coping strategies
MRSETS
/MDGROUP NAME=$ReasonsforCoping CATEGORYLABELS=VARLABELS VARIABLES= LhCSIEnAccess/1 LhCSIEnAccess/2 LhCSIEnAccess/3 LhCSIEnAccess/4 LhCSIEnAccess/5 LhCSIEnAccess/6 LhCSIEnAccess/7 LhCSIEnAccess/8 LhCSIEnAccess/999
VALUE=1
/DISPLAY NAME=[$ReasonsforCoping].
*Customs table to check frequencies for each reason by column percentages and table column percentages
CTABLES
/VLABELS VARIABLES=$ReasonsforCoping DISPLAY=LABEL
/TABLE $ReasonsforCoping [COLPCT.RESPONSES.COUNT PCT40.1, COLPCT.COUNT PCT40.1]
/CATEGORIES VARIABLES=$ReasonsforCoping EMPTY=INCLUDE
/CRITERIA CILEVEL=95.
***********************************Calculating LCS-FS using the LCS-EN module************************************
***Important note: If ‘to buy food’ is not among the reasons selected for applying livelihood coping strategies then these case/households should be considered under ‘not coping’ when computing CARI.
/*If the design of this question provides responses in a single cell then the analyst should manually split the responses in excel prior to running this syntax
***define value labels
Value labels EnAccessRsn
LhCSIEnAccess/1 = To buy food
If (LhCSIEnAccess/1=0) Max_coping_behaviourEN =1.
*rename variable in order to continue with the CARI syntax.
Rename variable (Max_coping_behaviourEN=Max_coping_behaviour).
Frequencies Max_coping_behaviour.
| null | https://raw.githubusercontent.com/WFP-VAM/RAMResourcesScripts/236e1a630f053d27161c73dfe01a9ef04b7ebdcc/Indicators/Livelihood-Coping-Strategies-EN/LCS-EN-indicator.sps | scheme |
SPSS Syntax to compute LCS essential needs (LCS_EN)
Please refer to Github for additonal scripts in R and STATA data analysis tools
***Livelihood Coping ***
***define value labels
Value labels
LcsEN_stress_DomAsset
LcsEN_stress_CrdtFood
LcsEN_stress_Saving
LcsEN_stress_BorrowCash
LcsEN_crisis_ProdAsset
LcsEN_crisis_HealthEdu
LcsEN_crisis_OutSchool
LcsEN_em_IllegalAct
LcsEN_em_Begged
LcsEN_em_ResAsset
10 ‘No, because I did not need to’
20 ‘No because I already sold those assets or have engaged in this activity within the last 12 months and cannot continue to do it’
30 ‘Yes’
9999 ‘Not applicable (don’t have children/ these assets)’.
***stress strategies*** (must have 4 stress strategies to calculate LCS-EN, if you have more then use the most frequently applied strategies)
Variable labels
LcsEN_stress_DomAsset ‘Sold household assets/goods (radio, furniture, refrigerator, television, jewellery etc.)’
LcsEN_stress_CrdtFood ‘Purchased food or other essential items on credit’
LcsEN_stress_Saving ‘Spent savings’
LcsEN_stress_BorrowCash ‘Borrowed money’.
Do if (LcsEN_stress_DomAsset = 20) | (LcsEN_stress_DomAsset = 30) | (LcsEN_stress_CrdtFood = 20) | (LcsEN_stress_CrdtFood = 30) | (LcsEN_stress_Saving =20) | (LcsEN_stress_Saving =30) | (LcsEN_stress_BorrowCash =20) | (LcsEN_stress_BorrowCash=30).
Compute stress_coping_EN =1.
Else.
Compute stress_coping_EN =0.
End if.
EXECUTE.
***crisis strategies***(must have 3 crisis strategies to calculate LCS-EN, if you have more then use the most frequently applied strategies)
Variable labels
LcsEN_crisis_ProdAsset ‘Sold productive assets or means of transport (sewing machine, wheelbarrow, bicycle, car, etc.)’
LcsEN_crisis_HealthEdu ‘Reduced expenses on health (including drugs) or education’
LcsEN_crisis_OutSchool ‘Withdrew children from school.’
Do if (LcsEN_crisis_ProdAsset = 20) | (LcsEN_crisis_ProdAsset =30) | (LcsEN_crisis_HealthEdu =20) | (LcsEN_crisis_HealthEdu=30) | (LcsEN_crisis_OutSchool =20) | (LcsEN_crisis_OutSchool =30).
Compute crisis_coping_EN =1.
Else.
Compute crisis_coping_EN =0.
End if.
EXECUTE.
***emergency strategies ***(must have 3 emergency strategies to calculate LCS, if you have more then use the most frequently applied strategies)
Variable labels
LcsEN_em_ResAsset ‘Mortgaged/Sold house or land’
LcsEN_em_Begged ‘Begged and/or scavenged (asked strangers for money/food)’
LcsEN_em_IllegalAct ‘Had to engage in illegal income activities (theft, prostitution)’.
Do if (LcsEN_em_ResAsset = 20) | (LcsEN_em_ResAsset = 30) | (LcsEN_em_Begged = 20) | (LcsEN_em_Begged =30) | (LcsEN_em_IllegalAct = 20) | (LcsEN_em_IllegalAct = 30).
Compute emergency_coping_EN =1.
Else.
Compute emergency_coping_EN = 0.
End if.
EXECUTE.
*** label new variable
variable labels stress_coping_EN 'Did the HH engage in stress coping strategies?'.
variable labels crisis_coping_EN 'Did the HH engage in crisis coping strategies?'.
variable labels emergency_coping_EN 'Did the HH engage in emergency coping strategies?'.
*** recode variables to compute one variable with coping behavior
recode stress_coping_EN (0=0) (1=2).
recode crisis_coping_EN (0=0) (1=3).
recode emergency_coping_EN (0=0) (1=4).
COMPUTE Max_coping_behaviourEN=MAX(stress_coping_EN, crisis_coping_EN, emergency_coping_EN).
RECODE Max_coping_behaviour (0=1).
Value labels Max_coping_behaviourEN 1 'HH not adopting coping strategies' 2 'Stress coping strategies ' 3 'Crisis coping strategies ' 4 'Emergencies coping strategies'.
Variable Labels Max_coping_behaviourEN 'Summary of asset depletion'.
EXECUTE.
Frequencies Max_coping_behaviourEN.
***calculate LCS-FS indicator using the LCS-EN module to be able to calculate CARI
***define value labels
Value labels EnAccessRsn
1 ‘To buy food’
2 ‘To pay for rent’
3 ‘To pay school, education costs’
4 ‘To cover health expenses‘
5 ‘To buy essential non-food items (clothes, small furniture...)’
6 ‘To access water or sanitation facilities’
7 ‘To access essential dwelling services (electricity, energy, waste disposal…)’
8 ‘To pay for existing debts’
999 ‘Other, specify’.
*Create a multi-response dataset for reasons selected for applying livelihood coping strategies
MRSETS
/MDGROUP NAME=$ReasonsforCoping CATEGORYLABELS=VARLABELS VARIABLES= LhCSIEnAccess/1 LhCSIEnAccess/2 LhCSIEnAccess/3 LhCSIEnAccess/4 LhCSIEnAccess/5 LhCSIEnAccess/6 LhCSIEnAccess/7 LhCSIEnAccess/8 LhCSIEnAccess/999
VALUE=1
/DISPLAY NAME=[$ReasonsforCoping].
*Customs table to check frequencies for each reason by column percentages and table column percentages
CTABLES
/VLABELS VARIABLES=$ReasonsforCoping DISPLAY=LABEL
/TABLE $ReasonsforCoping [COLPCT.RESPONSES.COUNT PCT40.1, COLPCT.COUNT PCT40.1]
/CATEGORIES VARIABLES=$ReasonsforCoping EMPTY=INCLUDE
/CRITERIA CILEVEL=95.
***********************************Calculating LCS-FS using the LCS-EN module************************************
***Important note: If ‘to buy food’ is not among the reasons selected for applying livelihood coping strategies then these case/households should be considered under ‘not coping’ when computing CARI.
/*If the design of this question provides responses in a single cell then the analyst should manually split the responses in excel prior to running this syntax
***define value labels
Value labels EnAccessRsn
LhCSIEnAccess/1 = To buy food
If (LhCSIEnAccess/1=0) Max_coping_behaviourEN =1.
*rename variable in order to continue with the CARI syntax.
Rename variable (Max_coping_behaviourEN=Max_coping_behaviour).
Frequencies Max_coping_behaviour.
| |
c621a27a36cec258a46a1c1563c84a15bf7d95b3ef304555aae73be4fb16641d | apache/couchdb-erlfdb | erlfdb_key.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(erlfdb_key).
-export([
to_selector/1,
last_less_than/1,
last_less_or_equal/1,
first_greater_than/1,
first_greater_or_equal/1,
strinc/1
]).
to_selector(<<_/binary>> = Key) ->
{Key, gteq};
to_selector({<<_/binary>>, _} = Sel) ->
Sel;
to_selector({<<_/binary>>, _, _} = Sel) ->
Sel;
to_selector(Else) ->
erlang:error({invalid_key_selector, Else}).
last_less_than(Key) when is_binary(Key) ->
{Key, lt}.
last_less_or_equal(Key) when is_binary(Key) ->
{Key, lteq}.
first_greater_than(Key) when is_binary(Key) ->
{Key, gt}.
first_greater_or_equal(Key) when is_binary(Key) ->
{Key, gteq}.
strinc(Key) when is_binary(Key) ->
Prefix = rstrip_ff(Key),
PrefixLen = size(Prefix),
Head = binary:part(Prefix, {0, PrefixLen - 1}),
Tail = binary:at(Prefix, PrefixLen - 1),
<<Head/binary, (Tail + 1)>>.
rstrip_ff(<<>>) ->
erlang:error("Key must contain at least one byte not equal to 0xFF");
rstrip_ff(Key) ->
KeyLen = size(Key),
case binary:at(Key, KeyLen - 1) of
16#FF -> rstrip_ff(binary:part(Key, {0, KeyLen - 1}));
_ -> Key
end.
| null | https://raw.githubusercontent.com/apache/couchdb-erlfdb/0fa8eac025253c56ddacfe0e0420d4d3ac5e9768/src/erlfdb_key.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(erlfdb_key).
-export([
to_selector/1,
last_less_than/1,
last_less_or_equal/1,
first_greater_than/1,
first_greater_or_equal/1,
strinc/1
]).
to_selector(<<_/binary>> = Key) ->
{Key, gteq};
to_selector({<<_/binary>>, _} = Sel) ->
Sel;
to_selector({<<_/binary>>, _, _} = Sel) ->
Sel;
to_selector(Else) ->
erlang:error({invalid_key_selector, Else}).
last_less_than(Key) when is_binary(Key) ->
{Key, lt}.
last_less_or_equal(Key) when is_binary(Key) ->
{Key, lteq}.
first_greater_than(Key) when is_binary(Key) ->
{Key, gt}.
first_greater_or_equal(Key) when is_binary(Key) ->
{Key, gteq}.
strinc(Key) when is_binary(Key) ->
Prefix = rstrip_ff(Key),
PrefixLen = size(Prefix),
Head = binary:part(Prefix, {0, PrefixLen - 1}),
Tail = binary:at(Prefix, PrefixLen - 1),
<<Head/binary, (Tail + 1)>>.
rstrip_ff(<<>>) ->
erlang:error("Key must contain at least one byte not equal to 0xFF");
rstrip_ff(Key) ->
KeyLen = size(Key),
case binary:at(Key, KeyLen - 1) of
16#FF -> rstrip_ff(binary:part(Key, {0, KeyLen - 1}));
_ -> Key
end.
|
a7b9979f0f5816c2ef30eeca6f830078f0cb01530cd990f2d17aac5ff77858e5 | Hendrick/ring-okta | session.clj | (ns ring.ring-okta.session
(:require [ring.ring-okta.saml :as saml]
[clojure.core.incubator :refer [dissoc-in]]
[ring.util.response :as response]))
(defn login [{:keys [params okta-config-location]}]
(let [okta-response (saml/respond-to-okta-post params okta-config-location)]
(assoc-in
(response/redirect-after-post (:redirect-url okta-response))
[:session :okta/user]
(:authenticated-user-email okta-response))))
(defn logout [request]
(dissoc-in request [:session :okta/user]))
| null | https://raw.githubusercontent.com/Hendrick/ring-okta/bad2278d2ef7287da108684de8e9f5206d74d510/src/ring/ring_okta/session.clj | clojure | (ns ring.ring-okta.session
(:require [ring.ring-okta.saml :as saml]
[clojure.core.incubator :refer [dissoc-in]]
[ring.util.response :as response]))
(defn login [{:keys [params okta-config-location]}]
(let [okta-response (saml/respond-to-okta-post params okta-config-location)]
(assoc-in
(response/redirect-after-post (:redirect-url okta-response))
[:session :okta/user]
(:authenticated-user-email okta-response))))
(defn logout [request]
(dissoc-in request [:session :okta/user]))
| |
bde69650216764b31e1bce2871a84eb724facb59eb334073fb05195850762880 | marick/fp-oo | building-data.clj | (def fap
(fn []
(fn [lookup-key] nil)))
(def fassoc
(fn [fap new-key value]
(fn [lookup-key]
(if (= lookup-key new-key)
value
(fap lookup-key)))))
| null | https://raw.githubusercontent.com/marick/fp-oo/434937826d794d6fe02b3e9a62cf5b4fbc314412/old-code/sources/building-data.clj | clojure | (def fap
(fn []
(fn [lookup-key] nil)))
(def fassoc
(fn [fap new-key value]
(fn [lookup-key]
(if (= lookup-key new-key)
value
(fap lookup-key)))))
| |
0526b2251987ab366fd602c441aeeadb1b5b07ca5e46dc0bcc87ddc198741c24 | sealchain-project/sealchain | Binary.hs | module Pos.Infra.Binary () where
import Pos.Infra.Binary.DHTModel ()
| null | https://raw.githubusercontent.com/sealchain-project/sealchain/e97b4bac865fb147979cb14723a12c716a62e51e/infra/src/Pos/Infra/Binary.hs | haskell | module Pos.Infra.Binary () where
import Pos.Infra.Binary.DHTModel ()
| |
26e353e5717999748999e2beb338ea0a89af9329155cbcd63621ee88926716f7 | mattmundell/nightshade | ambiguous-files.lisp | ;;; Tests of lisp:ambiguous-files.
(in-package "LISP")
(import '(deftest:deftest deftest:with-test-dir
deftest:with-test-search-list))
(defmacro files= (list1 list2 list2-dir)
"Return true if $list1 lists the same set of files as those in $list2
merged with $dir."
`(equal (sort (mapcar #'namestring ,list1) #'string<)
(sort (mapcar (lambda (ele)
(namestring (merge-pathnames ele ,list2-dir)))
,list2)
#'string<)))
Lone directory .
(deftest ambiguous-files (() ambiguous-files-1)
"Test `ambiguous-files' with the absolute complete name of a lone empty
dir, in directory name form (with the trailing slash)."
(with-test-dir (dir "a/")
(ambiguous-files (merge-pathnames "a/" dir))))
(deftest ambiguous-files (() ambiguous-files-2)
"Test `ambiguous-files' with a relative complete name of a lone empty
dir, in directory name form (with the trailing slash)."
(with-test-dir (dir "a/")
(in-directory dir (ambiguous-files "a/"))))
(deftest ambiguous-files (t ambiguous-files-3)
"Test `ambiguous-files' with the absolute complete name of a lone dir, in
file name form."
(with-test-dir (dir "bbbbbbb/")
(files= (ambiguous-files (merge-pathnames "bbbbbbb" dir))
'("bbbbbbb")
dir)))
(deftest ambiguous-files (t ambiguous-files-4)
"Test `ambiguous-files' with the relative complete name of a lone dir, in
file name form."
(with-test-dir (dir "bbbbbbb/")
(files= (in-directory dir (ambiguous-files "bbbbbbb"))
'("bbbbbbb")
dir)))
(deftest ambiguous-files (t ambiguous-files-5)
"Test `ambiguous-files' with the absolute partial name of a lone dir, in
file name form."
(with-test-dir (dir "abcdef/")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-6)
"Test `ambiguous-files' with the relative partial name of a lone dir, in
file name form."
(with-test-dir (dir "abcdef/")
(files= (in-directory dir (ambiguous-files "abc"))
'("abcdef")
dir)))
Lone file .
(deftest ambiguous-files (t ambiguous-files-11)
"Test `ambiguous-files' with the absolute complete name of a lone file."
(with-test-dir (dir "a")
(files= (ambiguous-files (merge-pathnames "a" dir))
'("a")
dir)))
(deftest ambiguous-files (t ambiguous-files-12)
"Test `ambiguous-files' with a relative complete name of a lone file."
(with-test-dir (dir "a")
(files= (in-directory dir (ambiguous-files "a"))
'("a")
dir)))
(deftest ambiguous-files (t ambiguous-files-13)
"Test `ambiguous-files' with the absolute partial name of a lone file."
(with-test-dir (dir "abcdef")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-14)
"Test `ambiguous-files' with the relative partial name of a lone file."
(with-test-dir (dir "abcdef")
(files= (in-directory dir (ambiguous-files "abc"))
'("abcdef")
dir)))
;;;; Directory with siblings.
(deftest ambiguous-files (() ambiguous-files-21)
"Test `ambiguous-files' with the absolute complete unique name of a
directory with siblings, in directory name form (with the trailing
slash)."
(with-test-dir (dir "a/" "b/" "c" ".a")
(ambiguous-files (merge-pathnames "a/" dir))))
(deftest ambiguous-files (() ambiguous-files-22)
"Test `ambiguous-files' with a relative complete unique name of a directory
with siblings, in directory name form (with the trailing slash)."
(with-test-dir (dir "a/" "ba/" "b" ".b")
(in-directory dir (ambiguous-files "a/"))))
(deftest ambiguous-files (t ambiguous-files-23)
"Test `ambiguous-files' with the absolute complete unique name of a
directory with siblings, in file name form."
(with-test-dir (dir "bbbbbbb/" "bb" "ccccccc/")
(files= (ambiguous-files (merge-pathnames "bbbbbbb" dir))
'("bbbbbbb")
dir)))
(deftest ambiguous-files (t ambiguous-files-24)
"Test `ambiguous-files' with the relative complete unique name of a
directory with siblings, in file name form."
(with-test-dir (dir "bbbbbbb/" "c" "d.c")
(files= (in-directory dir (ambiguous-files "bbbbbbb"))
'("bbbbbbb")
dir)))
(deftest ambiguous-files (t ambiguous-files-25)
"Test `ambiguous-files' with the absolute partial unique name of a dir with
siblings, in file name form."
(with-test-dir (dir "abcdef/" ".a" "bcdef/" "A")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-26)
"Test `ambiguous-files' with the relative partial unique name of a dir with
siblings, in file name form."
(with-test-dir (dir "abcdef/" "abCDEF/" "aabcdef")
(files= (in-directory dir (ambiguous-files "abc"))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-27)
"Test `ambiguous-files' with an absolute partial ambiguous name of a dir
with siblings, in file name form."
(with-test-dir (dir "abcdef/" ".a" "bcdef/" "A" "abcd" "abacus")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef" "abcd")
dir)))
(deftest ambiguous-files (t ambiguous-files-28)
"Test `ambiguous-files' with a relative partial ambiguous name of a
directory with siblings, in file name form."
(with-test-dir (dir "abcdef/" "abCDEF/" "aabcdef")
(files= (in-directory dir (ambiguous-files "ab"))
'("abcdef" "abCDEF")
dir)))
;;;; File with siblings.
(deftest ambiguous-files (t ambiguous-files-31)
"Test `ambiguous-files' with the absolute unique complete name of a file
with siblings."
(with-test-dir (dir "a" "b" "dir/")
(files= (ambiguous-files (merge-pathnames "a" dir))
'("a")
dir)))
(deftest ambiguous-files (t ambiguous-files-32)
"Test `ambiguous-files' with a relative unique complete name of a file with
siblings."
(with-test-dir (dir "a" "bbb")
(files= (in-directory dir (ambiguous-files "a"))
'("a")
dir)))
(deftest ambiguous-files (t ambiguous-files-33)
"Test `ambiguous-files' with the absolute partial name of a file with
siblings."
(with-test-dir (dir "abcdef" "cde")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-36)
"Test `ambiguous-files' with the relative partial name of a file with
siblings."
(with-test-dir (dir "abcdef" "ab" "A" "a")
(files= (in-directory dir (ambiguous-files "abc"))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-37)
"Test `ambiguous-files' with an absolute partial ambiguous name of a file
with siblings."
(with-test-dir (dir "abcdef.c" ".a" "bcdef/" "A.bcd" "abc" "abacus")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef.c" "abc")
dir)))
(deftest ambiguous-files (t ambiguous-files-38)
"Test `ambiguous-files' with a relative partial ambiguous name of a file
with siblings."
(with-test-dir (dir "abcdef.c" "abcDEF/" "aabcdef")
(files= (in-directory dir (ambiguous-files "ab"))
'("abcdef.c" "abcDEF")
dir)))
;;;; Failure.
(deftest ambiguous-files (() ambiguous-files-40)
"Test `ambiguous-files' with an absolute failing file name in a directory
of many files."
(with-test-dir (dir "abcdef.c" ".a" "bcdef/" "A.bcd" "abc" "abacus")
(ambiguous-files (merge-pathnames "abcx" dir))))
(deftest ambiguous-files (() ambiguous-files-41)
"Test `ambiguous-files' with a relative failing file name in a directory of
many files."
(with-test-dir (dir "abcdef.c" ".a" "bcdef/" "A.bcd" "abc" "abacus")
(in-directory dir (ambiguous-files "abcx"))))
(deftest ambiguous-files (() ambiguous-files-42)
"Test `ambiguous-files' with an absolute failing file name in a directory
of many files."
(with-test-dir (dir "abcdef.c" ".a" "bcdef/" "A.bcd" "abc" "abacus")
(ambiguous-files (merge-pathnames "xXx/" dir))))
(deftest ambiguous-files (() ambiguous-files-43)
"Test `ambiguous-files' with a relative failing file name in a directory
of many files."
(with-test-dir (dir "abcdef.c" ".a" "bcdef/" "A.bcd" "abc" "abacus")
(in-directory dir (ambiguous-files "xXx/"))))
;;;; Symlink to file.
(deftest ambiguous-files (t ambiguous-files-50)
"Test `ambiguous-files' with the absolute complete unique name of a file
symlink with siblings."
(with-test-dir (dir "a" ("l" "a") "b" "dir/")
(files= (ambiguous-files (merge-pathnames "l" dir))
'("l")
dir)))
(deftest ambiguous-files (t ambiguous-files-51)
"Test `ambiguous-files' with a relative complete unique name of a file
symlink with siblings."
(with-test-dir (dir "a" ("l" "a") "bbb")
(files= (in-directory dir (ambiguous-files "l"))
'("l")
dir)))
(deftest ambiguous-files (t ambiguous-files-52)
"Test `ambiguous-files' with the absolute partial name of a file symlink
with siblings."
(with-test-dir (dir "abcdef" (".link" "abcdef") "cde")
(files= (ambiguous-files (merge-pathnames ".li" dir))
'(".link")
dir)))
(deftest ambiguous-files (t ambiguous-files-53)
"Test `ambiguous-files' with the relative partial name of a file symlink
which has siblings."
(with-test-dir (dir "abcdef" "ab" "A" ("l.A" "A") "a")
(files= (in-directory dir (ambiguous-files "l"))
'("l.A")
dir)))
(deftest ambiguous-files (t ambiguous-files-54)
"Test `ambiguous-files' with an absolute partial ambiguous unique name of a
file symlink with siblings."
(with-test-dir (dir "abc.c" ("l" "abc.c") "abc.b" ("link" "abc.b"))
(files= (ambiguous-files (merge-pathnames "l" dir))
'("l" "link")
dir)))
(deftest ambiguous-files (t ambiguous-files-55)
"Test `ambiguous-files' with a relative partial ambiguous name of a file
symlink with siblings."
(with-test-dir (dir "abc.c" (".li" "abc.c") "abc.b" (".link" "abc.b"))
(files= (in-directory dir
(ambiguous-files ".li"))
'(".li" ".link")
dir)))
(deftest ambiguous-files (t ambiguous-files-56)
"Test `ambiguous-files' with an absolute partial ambiguous name of a broken
file symlink with siblings."
(with-test-dir (dir "abc.c" ("li" "abc") "abc.b" ("link" "abc.b"))
(files= (ambiguous-files (merge-pathnames "li" dir))
'("li" "link")
dir)))
(deftest ambiguous-files (t ambiguous-files-57)
"Test `ambiguous-files' with a relative partial ambiguous name of a broken
file symlink with siblings."
(with-test-dir (dir "abc.c" (".linky" "abc") "abc.b" (".link" "abc.b"))
(files= (in-directory dir
(ambiguous-files ".li"))
'(".linky" ".link")
dir)))
;;;; Symlink to directory.
(deftest ambiguous-files (t ambiguous-files-60)
"Test `ambiguous-files' with the absolute complete unique name of a
directory symlink with siblings."
(with-test-dir (dir "a/" ("l" "a/") "b" "dir/")
(files= (ambiguous-files (merge-pathnames "l" dir))
'("l")
dir)))
(deftest ambiguous-files (t ambiguous-files-61)
"Test `ambiguous-files' with a relative complete unique name of a directory
symlink with siblings."
(with-test-dir (dir "a/" ("l" "a/") "bbb")
(files= (in-directory dir (ambiguous-files "l"))
'("l")
dir)))
(deftest ambiguous-files (t ambiguous-files-62)
"Test `ambiguous-files' with the absolute partial name of a directory
symlink with siblings."
(with-test-dir (dir "abcdef/" (".link" "abcdef/") "cde")
(files= (ambiguous-files (merge-pathnames ".li" dir))
'(".link")
dir)))
(deftest ambiguous-files (t ambiguous-files-63)
"Test `ambiguous-files' with the relative partial name of a directory
symlink which has siblings."
(with-test-dir (dir "abcdef" "ab" "A/" ("l.A" "A/") "a")
(files= (in-directory dir (ambiguous-files "l"))
'("l.A")
dir)))
(deftest ambiguous-files (t ambiguous-files-64)
"Test `ambiguous-files' with an absolute partial ambiguous name of a
directory symlink with siblings."
(with-test-dir (dir "abc.c/" ("lin" "abc.c/") "abc.b/" ("link" "abc.b/"))
(files= (ambiguous-files (merge-pathnames "li" dir))
'("lin" "link")
dir)))
(deftest ambiguous-files (t ambiguous-files-65)
"Test `ambiguous-files' with a relative partial ambiguous name of a
directory symlink with siblings."
(with-test-dir (dir "abc.c/" (".li" "abc.c/") "abc.b/" (".link" "abc.b/"))
(files= (in-directory dir
(ambiguous-files ".li"))
'(".li" ".link")
dir)))
(deftest ambiguous-files (t ambiguous-files-66)
"Test `ambiguous-files' with an absolute partial ambiguous name of a broken
directory symlink with siblings."
(with-test-dir (dir "abc.c" ("li" "abc/") "abc.b" ("link" "abc.b"))
(files= (ambiguous-files (merge-pathnames "li" dir))
'("li" "link")
dir)))
(deftest ambiguous-files (t ambiguous-files-67)
"Test `ambiguous-files' with a relative partial ambiguous name of a broken
directory symlink with siblings."
(with-test-dir (dir "abc.c/" (".li" "abc/") "abcd" (".link" "abc.b/"))
(files= (in-directory dir
(ambiguous-files ".l"))
'(".li" ".link")
dir)))
;;;; Hidden file.
(deftest ambiguous-files (t ambiguous-files-70)
"Test `ambiguous-files' with the absolute complete unique name of a hidden
file with siblings."
(with-test-dir (dir ".a" ("l" "a") "b" "dir/")
(files= (ambiguous-files (merge-pathnames ".a" dir))
'(".a")
dir)))
(deftest ambiguous-files (t ambiguous-files-71)
"Test `ambiguous-files' with a relative complete unique name of a hidden
file with siblings."
(with-test-dir (dir ".a" ("l" ".a") "bbb")
(files= (in-directory dir (ambiguous-files ".a"))
'(".a")
dir)))
(deftest ambiguous-files (t ambiguous-files-72)
"Test `ambiguous-files' with the absolute partial name of a hidden file
with siblings."
(with-test-dir (dir ".abcdef" (".link" "abcdef") "cde")
(files= (ambiguous-files (merge-pathnames ".ab" dir))
'(".abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-73)
"Test `ambiguous-files' with the relative partial unique name of a hidden
file which has siblings."
(with-test-dir (dir ".abcdef" "ab" "A" ("l.A" "A") "a")
(files= (in-directory dir (ambiguous-files ".abc"))
'(".abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-74)
"Test `ambiguous-files' with an absolute partial ambiguous name of a hidden
file with siblings."
(with-test-dir (dir ".abc.c" ("li" ".abc.c") ".abc.b" ("link" ".abc.b"))
(files= (ambiguous-files (merge-pathnames ".abc" dir))
'(".abc.c" ".abc.b")
dir)))
(deftest ambiguous-files (t ambiguous-files-75)
"Test `ambiguous-files' with a relative partial ambiguous name of a hidden
file symlink with siblings."
(with-test-dir (dir ".abc.c" (".li" ".abc.c") ".abc.b" (".link" ".abc.b"))
(files= (in-directory dir (ambiguous-files ".ab"))
'(".abc.c" ".abc.b")
dir)))
;;;; Backup file.
(deftest ambiguous-files (t ambiguous-files-80)
"Test `ambiguous-files' with the absolute complete unique name of a backup
file with siblings."
(with-test-dir (dir "a~" ("l" "a~") "b" "dir/")
(files= (ambiguous-files (merge-pathnames "a~" dir))
'("a~")
dir)))
(deftest ambiguous-files (t ambiguous-files-81)
"Test `ambiguous-files' with a relative complete unique name of a backup
file with siblings."
(with-test-dir (dir "a.BAK" ("l" ".a") "bbb")
(files= (in-directory dir (ambiguous-files "a.BAK"))
'("a.BAK")
dir)))
(deftest ambiguous-files (t ambiguous-files-82)
"Test `ambiguous-files' with the absolute partial name of a backup file
with siblings."
(with-test-dir (dir ".abcdef.BAK" (".link" "abcdef.BAK") "cde")
(files= (ambiguous-files (merge-pathnames ".ab" dir))
'(".abcdef.BAK")
dir)))
(deftest ambiguous-files (t ambiguous-files-83)
"Test `ambiguous-files' with the relative partial unique name of a backup
file which has siblings."
(with-test-dir (dir ".abcdef" "ab.CKP" "A" ("l.A" "A") "a")
(files= (in-directory dir (ambiguous-files "ab"))
'("ab.CKP")
dir)))
(deftest ambiguous-files (t ambiguous-files-84)
"Test `ambiguous-files' with an absolute partial ambiguous name of a backup
file with siblings."
(with-test-dir (dir ".abc.c~" ("li" ".abc.c~") ".abc.b"
("link" ".abc.b"))
(files= (ambiguous-files (merge-pathnames ".abc" dir))
'(".abc.c~" ".abc.b")
dir)))
(deftest ambiguous-files (t ambiguous-files-85)
"Test `ambiguous-files' with a relative partial ambiguous name of a backup
file symlink with siblings."
(with-test-dir (dir ".abc.c.BAK" (".li" ".abc.c.BAK")
".abc.b" (".link" ".abc.b"))
(files= (in-directory dir (ambiguous-files ".ab"))
'(".abc.c.BAK" ".abc.b")
dir)))
;;;; Mixed.
(deftest ambiguous-files (t ambiguous-files-90)
"Test `ambiguous-files' with an absolute ambiguous name in a directory of
many entities."
(with-test-dir (dir "abc/" "bc" "ab.c" ("l" "ab.c") "ccc/z/"
".abc" "bc.BAK")
(files= (ambiguous-files (merge-pathnames "a" dir))
'("ab.c" "abc")
dir)))
(deftest ambiguous-files (t ambiguous-files-91)
"Test `ambiguous-files' with a relative ambiguous name in a directory of
many entities."
(with-test-dir (dir "abc/" "bc" "ab.c" ("l" "ab.c") "ccc/z/"
".abc" "bc.BAK")
(files= (in-directory dir (ambiguous-files "a"))
'("ab.c" "abc")
dir)))
;;;; Search list.
(deftest ambiguous-files (t ambiguous-files-100)
"Test `ambiguous-files' with a partial ambiguous name including a search
list."
(with-test-dir (dir ".abc.c.BAK" (".li" ".abc.c.BAK")
".abc.b" (".link" ".abc.b"))
(files= (with-test-search-list ("a" dir)
(ambiguous-files "a:.ab"))
'(".abc.c.BAK" ".abc.b")
dir)))
(deftest ambiguous-files (t ambiguous-files-101)
"Test `ambiguous-files' with a search list bound to a directory of
files."
(with-test-dir (dir ".abc.c.BAK" (".li" ".abc.c.BAK")
"abc.b" ("link" "abc.b"))
(files= (with-test-search-list ("a" dir)
(ambiguous-files "a:"))
'(".abc.c.BAK" ".li" "abc.b" "link")
dir)))
(deftest ambiguous-files (t ambiguous-files-102)
"Test `ambiguous-files' with a search list full of hidden files."
(with-test-dir (dir ".abc.c.BAK" (".li" ".abc.c.BAK")
".abc.b" (".link" ".abc.b"))
(files= (with-test-search-list ("a" dir)
(ambiguous-files "a:"))
'(".abc.c.BAK" ".li" ".abc.b" ".link")
dir)))
(deftest ambiguous-files (() ambiguous-files-103)
"Test `ambiguous-files' with a search list bound to an empty directory."
(with-test-dir (dir)
(with-test-search-list ("a" dir)
(ambiguous-files "a:"))))
;;;; Ancestor directory.
(deftest ambiguous-files (t ambiguous-files-110)
"Test `ambiguous-files' on a subdirectory."
(with-test-dir (dir "a/" "a/b/" "a/b/c/" "a/b/c/f")
(files= (in-directory dir (ambiguous-files "a/b"))
'("b")
(merge-pathnames "a/" dir))))
(deftest ambiguous-files (t ambiguous-files-111)
"Test `ambiguous-files' on a parent directory."
(with-test-dir (dir "a/b/c/" "a/b/c/f")
(files= (in-directory dir
(in-directory "a/b/c/"
;; Inside a is b.
(ambiguous-files "../../b")))
'("b/c/../../b")
(merge-pathnames "a/" dir))))
;;;; Errors.
(deftest ambiguous-files (t ambiguous-files-120)
"Test `ambiguous-files' with wildcards."
(let (ret)
(with-test-dir (dir ".abc.c.BAK" (".li" ".abc.c.BAK")
"abc.b" (".link" "abc.b"))
(handler-case
(multiple-value-list (in-directory dir (ambiguous-files "*.*")))
(error () (setq ret t))))
ret))
| null | https://raw.githubusercontent.com/mattmundell/nightshade/d8abd7bd3424b95b70bed599e0cfe033e15299e0/src/tests/code/filesys.lisp/ambiguous-files.lisp | lisp | Tests of lisp:ambiguous-files.
Directory with siblings.
File with siblings.
Failure.
Symlink to file.
Symlink to directory.
Hidden file.
Backup file.
Mixed.
Search list.
Ancestor directory.
Inside a is b.
Errors. |
(in-package "LISP")
(import '(deftest:deftest deftest:with-test-dir
deftest:with-test-search-list))
(defmacro files= (list1 list2 list2-dir)
"Return true if $list1 lists the same set of files as those in $list2
merged with $dir."
`(equal (sort (mapcar #'namestring ,list1) #'string<)
(sort (mapcar (lambda (ele)
(namestring (merge-pathnames ele ,list2-dir)))
,list2)
#'string<)))
Lone directory .
(deftest ambiguous-files (() ambiguous-files-1)
"Test `ambiguous-files' with the absolute complete name of a lone empty
dir, in directory name form (with the trailing slash)."
(with-test-dir (dir "a/")
(ambiguous-files (merge-pathnames "a/" dir))))
(deftest ambiguous-files (() ambiguous-files-2)
"Test `ambiguous-files' with a relative complete name of a lone empty
dir, in directory name form (with the trailing slash)."
(with-test-dir (dir "a/")
(in-directory dir (ambiguous-files "a/"))))
(deftest ambiguous-files (t ambiguous-files-3)
"Test `ambiguous-files' with the absolute complete name of a lone dir, in
file name form."
(with-test-dir (dir "bbbbbbb/")
(files= (ambiguous-files (merge-pathnames "bbbbbbb" dir))
'("bbbbbbb")
dir)))
(deftest ambiguous-files (t ambiguous-files-4)
"Test `ambiguous-files' with the relative complete name of a lone dir, in
file name form."
(with-test-dir (dir "bbbbbbb/")
(files= (in-directory dir (ambiguous-files "bbbbbbb"))
'("bbbbbbb")
dir)))
(deftest ambiguous-files (t ambiguous-files-5)
"Test `ambiguous-files' with the absolute partial name of a lone dir, in
file name form."
(with-test-dir (dir "abcdef/")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-6)
"Test `ambiguous-files' with the relative partial name of a lone dir, in
file name form."
(with-test-dir (dir "abcdef/")
(files= (in-directory dir (ambiguous-files "abc"))
'("abcdef")
dir)))
Lone file .
(deftest ambiguous-files (t ambiguous-files-11)
"Test `ambiguous-files' with the absolute complete name of a lone file."
(with-test-dir (dir "a")
(files= (ambiguous-files (merge-pathnames "a" dir))
'("a")
dir)))
(deftest ambiguous-files (t ambiguous-files-12)
"Test `ambiguous-files' with a relative complete name of a lone file."
(with-test-dir (dir "a")
(files= (in-directory dir (ambiguous-files "a"))
'("a")
dir)))
(deftest ambiguous-files (t ambiguous-files-13)
"Test `ambiguous-files' with the absolute partial name of a lone file."
(with-test-dir (dir "abcdef")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-14)
"Test `ambiguous-files' with the relative partial name of a lone file."
(with-test-dir (dir "abcdef")
(files= (in-directory dir (ambiguous-files "abc"))
'("abcdef")
dir)))
(deftest ambiguous-files (() ambiguous-files-21)
"Test `ambiguous-files' with the absolute complete unique name of a
directory with siblings, in directory name form (with the trailing
slash)."
(with-test-dir (dir "a/" "b/" "c" ".a")
(ambiguous-files (merge-pathnames "a/" dir))))
(deftest ambiguous-files (() ambiguous-files-22)
"Test `ambiguous-files' with a relative complete unique name of a directory
with siblings, in directory name form (with the trailing slash)."
(with-test-dir (dir "a/" "ba/" "b" ".b")
(in-directory dir (ambiguous-files "a/"))))
(deftest ambiguous-files (t ambiguous-files-23)
"Test `ambiguous-files' with the absolute complete unique name of a
directory with siblings, in file name form."
(with-test-dir (dir "bbbbbbb/" "bb" "ccccccc/")
(files= (ambiguous-files (merge-pathnames "bbbbbbb" dir))
'("bbbbbbb")
dir)))
(deftest ambiguous-files (t ambiguous-files-24)
"Test `ambiguous-files' with the relative complete unique name of a
directory with siblings, in file name form."
(with-test-dir (dir "bbbbbbb/" "c" "d.c")
(files= (in-directory dir (ambiguous-files "bbbbbbb"))
'("bbbbbbb")
dir)))
(deftest ambiguous-files (t ambiguous-files-25)
"Test `ambiguous-files' with the absolute partial unique name of a dir with
siblings, in file name form."
(with-test-dir (dir "abcdef/" ".a" "bcdef/" "A")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-26)
"Test `ambiguous-files' with the relative partial unique name of a dir with
siblings, in file name form."
(with-test-dir (dir "abcdef/" "abCDEF/" "aabcdef")
(files= (in-directory dir (ambiguous-files "abc"))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-27)
"Test `ambiguous-files' with an absolute partial ambiguous name of a dir
with siblings, in file name form."
(with-test-dir (dir "abcdef/" ".a" "bcdef/" "A" "abcd" "abacus")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef" "abcd")
dir)))
(deftest ambiguous-files (t ambiguous-files-28)
"Test `ambiguous-files' with a relative partial ambiguous name of a
directory with siblings, in file name form."
(with-test-dir (dir "abcdef/" "abCDEF/" "aabcdef")
(files= (in-directory dir (ambiguous-files "ab"))
'("abcdef" "abCDEF")
dir)))
(deftest ambiguous-files (t ambiguous-files-31)
"Test `ambiguous-files' with the absolute unique complete name of a file
with siblings."
(with-test-dir (dir "a" "b" "dir/")
(files= (ambiguous-files (merge-pathnames "a" dir))
'("a")
dir)))
(deftest ambiguous-files (t ambiguous-files-32)
"Test `ambiguous-files' with a relative unique complete name of a file with
siblings."
(with-test-dir (dir "a" "bbb")
(files= (in-directory dir (ambiguous-files "a"))
'("a")
dir)))
(deftest ambiguous-files (t ambiguous-files-33)
"Test `ambiguous-files' with the absolute partial name of a file with
siblings."
(with-test-dir (dir "abcdef" "cde")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-36)
"Test `ambiguous-files' with the relative partial name of a file with
siblings."
(with-test-dir (dir "abcdef" "ab" "A" "a")
(files= (in-directory dir (ambiguous-files "abc"))
'("abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-37)
"Test `ambiguous-files' with an absolute partial ambiguous name of a file
with siblings."
(with-test-dir (dir "abcdef.c" ".a" "bcdef/" "A.bcd" "abc" "abacus")
(files= (ambiguous-files (merge-pathnames "abc" dir))
'("abcdef.c" "abc")
dir)))
(deftest ambiguous-files (t ambiguous-files-38)
"Test `ambiguous-files' with a relative partial ambiguous name of a file
with siblings."
(with-test-dir (dir "abcdef.c" "abcDEF/" "aabcdef")
(files= (in-directory dir (ambiguous-files "ab"))
'("abcdef.c" "abcDEF")
dir)))
(deftest ambiguous-files (() ambiguous-files-40)
"Test `ambiguous-files' with an absolute failing file name in a directory
of many files."
(with-test-dir (dir "abcdef.c" ".a" "bcdef/" "A.bcd" "abc" "abacus")
(ambiguous-files (merge-pathnames "abcx" dir))))
(deftest ambiguous-files (() ambiguous-files-41)
"Test `ambiguous-files' with a relative failing file name in a directory of
many files."
(with-test-dir (dir "abcdef.c" ".a" "bcdef/" "A.bcd" "abc" "abacus")
(in-directory dir (ambiguous-files "abcx"))))
(deftest ambiguous-files (() ambiguous-files-42)
"Test `ambiguous-files' with an absolute failing file name in a directory
of many files."
(with-test-dir (dir "abcdef.c" ".a" "bcdef/" "A.bcd" "abc" "abacus")
(ambiguous-files (merge-pathnames "xXx/" dir))))
(deftest ambiguous-files (() ambiguous-files-43)
"Test `ambiguous-files' with a relative failing file name in a directory
of many files."
(with-test-dir (dir "abcdef.c" ".a" "bcdef/" "A.bcd" "abc" "abacus")
(in-directory dir (ambiguous-files "xXx/"))))
(deftest ambiguous-files (t ambiguous-files-50)
"Test `ambiguous-files' with the absolute complete unique name of a file
symlink with siblings."
(with-test-dir (dir "a" ("l" "a") "b" "dir/")
(files= (ambiguous-files (merge-pathnames "l" dir))
'("l")
dir)))
(deftest ambiguous-files (t ambiguous-files-51)
"Test `ambiguous-files' with a relative complete unique name of a file
symlink with siblings."
(with-test-dir (dir "a" ("l" "a") "bbb")
(files= (in-directory dir (ambiguous-files "l"))
'("l")
dir)))
(deftest ambiguous-files (t ambiguous-files-52)
"Test `ambiguous-files' with the absolute partial name of a file symlink
with siblings."
(with-test-dir (dir "abcdef" (".link" "abcdef") "cde")
(files= (ambiguous-files (merge-pathnames ".li" dir))
'(".link")
dir)))
(deftest ambiguous-files (t ambiguous-files-53)
"Test `ambiguous-files' with the relative partial name of a file symlink
which has siblings."
(with-test-dir (dir "abcdef" "ab" "A" ("l.A" "A") "a")
(files= (in-directory dir (ambiguous-files "l"))
'("l.A")
dir)))
(deftest ambiguous-files (t ambiguous-files-54)
"Test `ambiguous-files' with an absolute partial ambiguous unique name of a
file symlink with siblings."
(with-test-dir (dir "abc.c" ("l" "abc.c") "abc.b" ("link" "abc.b"))
(files= (ambiguous-files (merge-pathnames "l" dir))
'("l" "link")
dir)))
(deftest ambiguous-files (t ambiguous-files-55)
"Test `ambiguous-files' with a relative partial ambiguous name of a file
symlink with siblings."
(with-test-dir (dir "abc.c" (".li" "abc.c") "abc.b" (".link" "abc.b"))
(files= (in-directory dir
(ambiguous-files ".li"))
'(".li" ".link")
dir)))
(deftest ambiguous-files (t ambiguous-files-56)
"Test `ambiguous-files' with an absolute partial ambiguous name of a broken
file symlink with siblings."
(with-test-dir (dir "abc.c" ("li" "abc") "abc.b" ("link" "abc.b"))
(files= (ambiguous-files (merge-pathnames "li" dir))
'("li" "link")
dir)))
(deftest ambiguous-files (t ambiguous-files-57)
"Test `ambiguous-files' with a relative partial ambiguous name of a broken
file symlink with siblings."
(with-test-dir (dir "abc.c" (".linky" "abc") "abc.b" (".link" "abc.b"))
(files= (in-directory dir
(ambiguous-files ".li"))
'(".linky" ".link")
dir)))
(deftest ambiguous-files (t ambiguous-files-60)
"Test `ambiguous-files' with the absolute complete unique name of a
directory symlink with siblings."
(with-test-dir (dir "a/" ("l" "a/") "b" "dir/")
(files= (ambiguous-files (merge-pathnames "l" dir))
'("l")
dir)))
(deftest ambiguous-files (t ambiguous-files-61)
"Test `ambiguous-files' with a relative complete unique name of a directory
symlink with siblings."
(with-test-dir (dir "a/" ("l" "a/") "bbb")
(files= (in-directory dir (ambiguous-files "l"))
'("l")
dir)))
(deftest ambiguous-files (t ambiguous-files-62)
"Test `ambiguous-files' with the absolute partial name of a directory
symlink with siblings."
(with-test-dir (dir "abcdef/" (".link" "abcdef/") "cde")
(files= (ambiguous-files (merge-pathnames ".li" dir))
'(".link")
dir)))
(deftest ambiguous-files (t ambiguous-files-63)
"Test `ambiguous-files' with the relative partial name of a directory
symlink which has siblings."
(with-test-dir (dir "abcdef" "ab" "A/" ("l.A" "A/") "a")
(files= (in-directory dir (ambiguous-files "l"))
'("l.A")
dir)))
(deftest ambiguous-files (t ambiguous-files-64)
"Test `ambiguous-files' with an absolute partial ambiguous name of a
directory symlink with siblings."
(with-test-dir (dir "abc.c/" ("lin" "abc.c/") "abc.b/" ("link" "abc.b/"))
(files= (ambiguous-files (merge-pathnames "li" dir))
'("lin" "link")
dir)))
(deftest ambiguous-files (t ambiguous-files-65)
"Test `ambiguous-files' with a relative partial ambiguous name of a
directory symlink with siblings."
(with-test-dir (dir "abc.c/" (".li" "abc.c/") "abc.b/" (".link" "abc.b/"))
(files= (in-directory dir
(ambiguous-files ".li"))
'(".li" ".link")
dir)))
(deftest ambiguous-files (t ambiguous-files-66)
"Test `ambiguous-files' with an absolute partial ambiguous name of a broken
directory symlink with siblings."
(with-test-dir (dir "abc.c" ("li" "abc/") "abc.b" ("link" "abc.b"))
(files= (ambiguous-files (merge-pathnames "li" dir))
'("li" "link")
dir)))
(deftest ambiguous-files (t ambiguous-files-67)
"Test `ambiguous-files' with a relative partial ambiguous name of a broken
directory symlink with siblings."
(with-test-dir (dir "abc.c/" (".li" "abc/") "abcd" (".link" "abc.b/"))
(files= (in-directory dir
(ambiguous-files ".l"))
'(".li" ".link")
dir)))
(deftest ambiguous-files (t ambiguous-files-70)
"Test `ambiguous-files' with the absolute complete unique name of a hidden
file with siblings."
(with-test-dir (dir ".a" ("l" "a") "b" "dir/")
(files= (ambiguous-files (merge-pathnames ".a" dir))
'(".a")
dir)))
(deftest ambiguous-files (t ambiguous-files-71)
"Test `ambiguous-files' with a relative complete unique name of a hidden
file with siblings."
(with-test-dir (dir ".a" ("l" ".a") "bbb")
(files= (in-directory dir (ambiguous-files ".a"))
'(".a")
dir)))
(deftest ambiguous-files (t ambiguous-files-72)
"Test `ambiguous-files' with the absolute partial name of a hidden file
with siblings."
(with-test-dir (dir ".abcdef" (".link" "abcdef") "cde")
(files= (ambiguous-files (merge-pathnames ".ab" dir))
'(".abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-73)
"Test `ambiguous-files' with the relative partial unique name of a hidden
file which has siblings."
(with-test-dir (dir ".abcdef" "ab" "A" ("l.A" "A") "a")
(files= (in-directory dir (ambiguous-files ".abc"))
'(".abcdef")
dir)))
(deftest ambiguous-files (t ambiguous-files-74)
"Test `ambiguous-files' with an absolute partial ambiguous name of a hidden
file with siblings."
(with-test-dir (dir ".abc.c" ("li" ".abc.c") ".abc.b" ("link" ".abc.b"))
(files= (ambiguous-files (merge-pathnames ".abc" dir))
'(".abc.c" ".abc.b")
dir)))
(deftest ambiguous-files (t ambiguous-files-75)
"Test `ambiguous-files' with a relative partial ambiguous name of a hidden
file symlink with siblings."
(with-test-dir (dir ".abc.c" (".li" ".abc.c") ".abc.b" (".link" ".abc.b"))
(files= (in-directory dir (ambiguous-files ".ab"))
'(".abc.c" ".abc.b")
dir)))
(deftest ambiguous-files (t ambiguous-files-80)
"Test `ambiguous-files' with the absolute complete unique name of a backup
file with siblings."
(with-test-dir (dir "a~" ("l" "a~") "b" "dir/")
(files= (ambiguous-files (merge-pathnames "a~" dir))
'("a~")
dir)))
(deftest ambiguous-files (t ambiguous-files-81)
"Test `ambiguous-files' with a relative complete unique name of a backup
file with siblings."
(with-test-dir (dir "a.BAK" ("l" ".a") "bbb")
(files= (in-directory dir (ambiguous-files "a.BAK"))
'("a.BAK")
dir)))
(deftest ambiguous-files (t ambiguous-files-82)
"Test `ambiguous-files' with the absolute partial name of a backup file
with siblings."
(with-test-dir (dir ".abcdef.BAK" (".link" "abcdef.BAK") "cde")
(files= (ambiguous-files (merge-pathnames ".ab" dir))
'(".abcdef.BAK")
dir)))
(deftest ambiguous-files (t ambiguous-files-83)
"Test `ambiguous-files' with the relative partial unique name of a backup
file which has siblings."
(with-test-dir (dir ".abcdef" "ab.CKP" "A" ("l.A" "A") "a")
(files= (in-directory dir (ambiguous-files "ab"))
'("ab.CKP")
dir)))
(deftest ambiguous-files (t ambiguous-files-84)
"Test `ambiguous-files' with an absolute partial ambiguous name of a backup
file with siblings."
(with-test-dir (dir ".abc.c~" ("li" ".abc.c~") ".abc.b"
("link" ".abc.b"))
(files= (ambiguous-files (merge-pathnames ".abc" dir))
'(".abc.c~" ".abc.b")
dir)))
(deftest ambiguous-files (t ambiguous-files-85)
"Test `ambiguous-files' with a relative partial ambiguous name of a backup
file symlink with siblings."
(with-test-dir (dir ".abc.c.BAK" (".li" ".abc.c.BAK")
".abc.b" (".link" ".abc.b"))
(files= (in-directory dir (ambiguous-files ".ab"))
'(".abc.c.BAK" ".abc.b")
dir)))
(deftest ambiguous-files (t ambiguous-files-90)
"Test `ambiguous-files' with an absolute ambiguous name in a directory of
many entities."
(with-test-dir (dir "abc/" "bc" "ab.c" ("l" "ab.c") "ccc/z/"
".abc" "bc.BAK")
(files= (ambiguous-files (merge-pathnames "a" dir))
'("ab.c" "abc")
dir)))
(deftest ambiguous-files (t ambiguous-files-91)
"Test `ambiguous-files' with a relative ambiguous name in a directory of
many entities."
(with-test-dir (dir "abc/" "bc" "ab.c" ("l" "ab.c") "ccc/z/"
".abc" "bc.BAK")
(files= (in-directory dir (ambiguous-files "a"))
'("ab.c" "abc")
dir)))
(deftest ambiguous-files (t ambiguous-files-100)
"Test `ambiguous-files' with a partial ambiguous name including a search
list."
(with-test-dir (dir ".abc.c.BAK" (".li" ".abc.c.BAK")
".abc.b" (".link" ".abc.b"))
(files= (with-test-search-list ("a" dir)
(ambiguous-files "a:.ab"))
'(".abc.c.BAK" ".abc.b")
dir)))
(deftest ambiguous-files (t ambiguous-files-101)
"Test `ambiguous-files' with a search list bound to a directory of
files."
(with-test-dir (dir ".abc.c.BAK" (".li" ".abc.c.BAK")
"abc.b" ("link" "abc.b"))
(files= (with-test-search-list ("a" dir)
(ambiguous-files "a:"))
'(".abc.c.BAK" ".li" "abc.b" "link")
dir)))
(deftest ambiguous-files (t ambiguous-files-102)
"Test `ambiguous-files' with a search list full of hidden files."
(with-test-dir (dir ".abc.c.BAK" (".li" ".abc.c.BAK")
".abc.b" (".link" ".abc.b"))
(files= (with-test-search-list ("a" dir)
(ambiguous-files "a:"))
'(".abc.c.BAK" ".li" ".abc.b" ".link")
dir)))
(deftest ambiguous-files (() ambiguous-files-103)
"Test `ambiguous-files' with a search list bound to an empty directory."
(with-test-dir (dir)
(with-test-search-list ("a" dir)
(ambiguous-files "a:"))))
(deftest ambiguous-files (t ambiguous-files-110)
"Test `ambiguous-files' on a subdirectory."
(with-test-dir (dir "a/" "a/b/" "a/b/c/" "a/b/c/f")
(files= (in-directory dir (ambiguous-files "a/b"))
'("b")
(merge-pathnames "a/" dir))))
(deftest ambiguous-files (t ambiguous-files-111)
"Test `ambiguous-files' on a parent directory."
(with-test-dir (dir "a/b/c/" "a/b/c/f")
(files= (in-directory dir
(in-directory "a/b/c/"
(ambiguous-files "../../b")))
'("b/c/../../b")
(merge-pathnames "a/" dir))))
(deftest ambiguous-files (t ambiguous-files-120)
"Test `ambiguous-files' with wildcards."
(let (ret)
(with-test-dir (dir ".abc.c.BAK" (".li" ".abc.c.BAK")
"abc.b" (".link" "abc.b"))
(handler-case
(multiple-value-list (in-directory dir (ambiguous-files "*.*")))
(error () (setq ret t))))
ret))
|
8806a19eec34f167854373203b2cc916c4763cd4992b783b29e6329c0617a17a | tek/proteome | InitTest.hs | module Proteome.Test.InitTest where
import Polysemy.Test (UnitTest, (===))
import Ribosome.Api (nvimCommand, vimGetVar)
import qualified Ribosome.Settings as Settings
import Ribosome.Test (testError)
import Proteome.Data.ProjectName (ProjectName (ProjectName))
import Proteome.Data.ProjectType (ProjectType (ProjectType))
import Proteome.Init (projectConfig, resolveAndInitMain)
import qualified Proteome.Settings as Settings
import Proteome.Test.Run (proteomeTest)
test_init :: UnitTest
test_init =
proteomeTest do
nvimCommand "autocmd User ProteomeProject let g:success = 13"
testError resolveAndInitMain
projectConfig
tpe <- Settings.get Settings.mainType
name <- Settings.get Settings.mainName
ProjectName "flagellum" === name
ProjectType "haskell" === tpe
((13 :: Int) ===) =<< vimGetVar "success"
| null | https://raw.githubusercontent.com/tek/proteome/019928432bd5f5ba87d35eab19e341a5c98b1bba/packages/proteome/test/Proteome/Test/InitTest.hs | haskell | module Proteome.Test.InitTest where
import Polysemy.Test (UnitTest, (===))
import Ribosome.Api (nvimCommand, vimGetVar)
import qualified Ribosome.Settings as Settings
import Ribosome.Test (testError)
import Proteome.Data.ProjectName (ProjectName (ProjectName))
import Proteome.Data.ProjectType (ProjectType (ProjectType))
import Proteome.Init (projectConfig, resolveAndInitMain)
import qualified Proteome.Settings as Settings
import Proteome.Test.Run (proteomeTest)
test_init :: UnitTest
test_init =
proteomeTest do
nvimCommand "autocmd User ProteomeProject let g:success = 13"
testError resolveAndInitMain
projectConfig
tpe <- Settings.get Settings.mainType
name <- Settings.get Settings.mainName
ProjectName "flagellum" === name
ProjectType "haskell" === tpe
((13 :: Int) ===) =<< vimGetVar "success"
| |
47186c7637f3d9ba6c34b5f89c57ac9f56ef42fab756072fb52e7409b3d22054 | hackinghat/cl-mysql | package.lisp | ;;;; -*- Mode: Lisp -*-
$ Id$
;;;;
Copyright ( c ) 2009 < >
;;;;
;;;; Permission is hereby granted, free of charge, to any person obtaining
;;;; a copy of this software and associated documentation files (the
" Software " ) , to deal in the Software without restriction , including
;;;; without limitation the rights to use, copy, modify, merge, publish,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
;;;; the following conditions:
;;;;
;;;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
;;;; MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;;;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
;;;; OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
;;;; WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
;;;;
(defpackage com.hackinghat.cl-mysql
(:use :cl)
(:nicknames "CL-MYSQL")
(:shadowing-import-from "CL-MYSQL-SYSTEM"
#:connect #:query #:use #:disconnect #:ping #:option
#:client-version #:server-version
#:list-dbs #:list-tables #:list-processes #:list-fields
#:escape-string #:next-result-set #:next-row #:*type-map*
#:nth-row #:with-rows #:result-set-fields #:process-result-set
#:opt-connect-timeout #:opt-compress #:opt-named-pipe
#:init-command #:read-default-file #:read-default-group
#:+client-compress+ #:+client-found-rows+ #:+client-ignore-sigpipe+
#:+client-ignore-space+ #:+client-interactive+ #:+client-local-files+
#:+client-multi-statements+ #:+client-multi-results+ #:+client-no-schema+
#:+client-ssl+ #:+client-remember-options+
#:set-charset-dir #:set-charset-name #:opt-local-infile
#:opt-protocol #:shared-memory-base-name #:opt-read-timeout
#:opt-write-timeout #:opt-use-result
#:opt-use-remote-connection #:opt-use-embedded-connection
#:opt-guess-connection #:set-client-ip #:secure-auth
#:report-data-truncation #:opt-reconnect
#:opt-ssl-verify-server-cert)
(:export #:connect #:query #:use #:disconnect #:ping #:option
#:client-version #:server-version
#:list-dbs #:list-tables #:list-processes #:list-fields
#:escape-string #:next-result-set #:next-row #:*type-map*
#:nth-row #:with-rows #:result-set-fields #:process-result-set
#:+client-compress+ #:+client-found-rows+ #:+client-ignore-sigpipe+
#:+client-ignore-space+ #:+client-interactive+ #:+client-local-files+
#:+client-multi-statements+ #:+client-multi-results+ #:+client-no-schema+
#:+client-ssl+ #:+client-remember-options+
#:opt-connect-timeout #:opt-compress #:opt-named-pipe
#:init-command #:read-default-file #:read-default-group
#:set-charset-dir #:set-charset-name #:opt-local-infile
#:opt-protocol #:shared-memory-base-name #:opt-read-timeout
#:opt-write-timeout #:opt-use-result
#:opt-use-remote-connection #:opt-use-embedded-connection
#:opt-guess-connection #:set-client-ip #:secure-auth
#:report-data-truncation #:opt-reconnect
#:opt-ssl-verify-server-cert))
| null | https://raw.githubusercontent.com/hackinghat/cl-mysql/3fbf6e1421484f64c5bcf2ff3c4b96c6f0414f09/package.lisp | lisp | -*- Mode: Lisp -*-
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
without limitation the rights to use, copy, modify, merge, publish,
the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
| $ Id$
Copyright ( c ) 2009 < >
" Software " ) , to deal in the Software without restriction , including
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
(defpackage com.hackinghat.cl-mysql
(:use :cl)
(:nicknames "CL-MYSQL")
(:shadowing-import-from "CL-MYSQL-SYSTEM"
#:connect #:query #:use #:disconnect #:ping #:option
#:client-version #:server-version
#:list-dbs #:list-tables #:list-processes #:list-fields
#:escape-string #:next-result-set #:next-row #:*type-map*
#:nth-row #:with-rows #:result-set-fields #:process-result-set
#:opt-connect-timeout #:opt-compress #:opt-named-pipe
#:init-command #:read-default-file #:read-default-group
#:+client-compress+ #:+client-found-rows+ #:+client-ignore-sigpipe+
#:+client-ignore-space+ #:+client-interactive+ #:+client-local-files+
#:+client-multi-statements+ #:+client-multi-results+ #:+client-no-schema+
#:+client-ssl+ #:+client-remember-options+
#:set-charset-dir #:set-charset-name #:opt-local-infile
#:opt-protocol #:shared-memory-base-name #:opt-read-timeout
#:opt-write-timeout #:opt-use-result
#:opt-use-remote-connection #:opt-use-embedded-connection
#:opt-guess-connection #:set-client-ip #:secure-auth
#:report-data-truncation #:opt-reconnect
#:opt-ssl-verify-server-cert)
(:export #:connect #:query #:use #:disconnect #:ping #:option
#:client-version #:server-version
#:list-dbs #:list-tables #:list-processes #:list-fields
#:escape-string #:next-result-set #:next-row #:*type-map*
#:nth-row #:with-rows #:result-set-fields #:process-result-set
#:+client-compress+ #:+client-found-rows+ #:+client-ignore-sigpipe+
#:+client-ignore-space+ #:+client-interactive+ #:+client-local-files+
#:+client-multi-statements+ #:+client-multi-results+ #:+client-no-schema+
#:+client-ssl+ #:+client-remember-options+
#:opt-connect-timeout #:opt-compress #:opt-named-pipe
#:init-command #:read-default-file #:read-default-group
#:set-charset-dir #:set-charset-name #:opt-local-infile
#:opt-protocol #:shared-memory-base-name #:opt-read-timeout
#:opt-write-timeout #:opt-use-result
#:opt-use-remote-connection #:opt-use-embedded-connection
#:opt-guess-connection #:set-client-ip #:secure-auth
#:report-data-truncation #:opt-reconnect
#:opt-ssl-verify-server-cert))
|
15d0f6da8ed50547d1f0e874202beeede35877985676ad36d8f99c04174c3e57 | staples-sparx/kits | homeless.clj | (ns ^{:doc "Unfortunate, uncategorized utility functions and macros.
Please help one of these poor souls find a home in a
focused, Single-Responsibility namespace instead :("}
kits.homeless
(:require
[clojure.pprint :as pprint]
[clojure.set :as set]
[clojure.string :as str])
(:import
clojure.lang.Var
java.net.MalformedURLException
java.sql.SQLException
(java.util.concurrent Future TimeoutException)))
(set! *warn-on-reflection* false)
(defmacro ignore-exceptions
"Evaluate body, but return nil if any exceptions are thrown."
[& body]
`(try
~@body
(catch Exception e# nil)))
(defmacro defn-cond
"Variant of defn that allows for multiple alternative
implementations in the body, one of which is used based on a
matching predicate, e.g.,
(defn-cond test [a b]
(re-find #\"^1.2\" (clojure-version))
(* a b)
:else
(+ a b))
would define `test` one way under Clojure 1.2, and differently on
other versions."
[name & fdecl]
(let [[m fdecl] (if (string? (first fdecl))
[{:doc (first fdecl)} (rest fdecl)]
[{} fdecl])
[args & clauses] fdecl
m (conj {:arglists (list 'list (list 'quote args))} m)]
(list 'def
(with-meta name m)
(list*
(reduce (fn [acc [pred body]]
(conj acc pred `(fn [~@args] ~body)))
['cond]
(partition 2 clauses))))))
(defmacro def-many-methods
"Creates multiple multimethods with different dispatch values, but the same implementation."
[name dispatch-values & body]
`(doseq [dispatch-val# ~dispatch-values]
(defmethod ~name dispatch-val# ~@body)))
(defn time-elapsed*
"Returns time elapsed in millis."
[f]
(let [start (System/nanoTime)]
(f)
(/ (double (- (System/nanoTime) start)) 1000000.0)))
(defmacro time-elapsed
"Returns time elapsed in millis."
[& body]
`(time-elapsed* (fn [] ~@body)))
(defn raise
"Raise a RuntimeException with specified message."
[& msg]
(throw (RuntimeException. ^String (apply str msg))))
(defn print-vals
"Print the specified args, and return the value of the last arg."
[& args]
(apply println
(cons "*** "
(map #(if (string? %) % (with-out-str (pprint/pprint %)))
args)))
(last args))
(defn parse-int
"Parse integer value from string `s`"
[s]
(ignore-exceptions (Integer/parseInt s)))
(defn parse-long
"Parse long integer value from string `s`"
[s]
(ignore-exceptions (Long/parseLong s)))
(defn parse-short
"Parse short integer value from string `s`"
[s]
(ignore-exceptions (Short/parseShort s)))
(defn parse-float
"Parse floating point value from string `s`"
[s]
(ignore-exceptions (Float/parseFloat s)))
(defn parse-double
"Parse double precision number from string `s`"
[s]
(ignore-exceptions (Double/parseDouble s)))
(defn read-string-safely [s] (when s (read-string s)))
(defn parse-number
"Parse a number from string `s`, optionally passing a default value
to return."
([s]
(parse-number s nil))
([s default]
(cond
(number? s) s
(empty? s) default
:else (read-string-safely s))))
(defn rand-int*
"Return a random integer between min (inclusive) and max (exclusive)."
[min max]
(+ min (rand-int (- max min))))
(defn- time-ns
"Current value of the most precise available system timer, in
nanoseconds. This is NOT a guaranteed absolute time like time-ms and
doesn't work the same across all JVM architectures. Use this for
measuring TIME INTERVALS ONLY. See javadoc for System.nanoTime() for
more details."
[]
(System/nanoTime))
(defn- time-us
"Number of micro-seconds since epoch. This is NOT a guaranteed
absolute time like time-ms and doesn't work the same across all
architectures. Use this for measuring TIME INTERVALS ONLY. See
javadoc for System.nanoTime() for more details."
[]
(long (/ (time-ns) 1000)))
(defn time-ms
"Number of milli-seconds since epoch."
[]
(System/currentTimeMillis))
(defn value-and-elapsed-time
"Return the value of `thunk` and time taken to evaluate in
microseconds."
[thunk]
(let [start (time-us)
value (thunk)]
[value (- (time-us) start)]))
(defmacro bind-value-and-elapsed-time
"Binds [value elapsed-time-us] from evaluating `expr` and invokes
`body`."
[bindings expr & body]
`(let [~bindings (value-and-elapsed-time (fn [] ~expr))]
~@body))
(defn-cond call-with-timeout
"Evaluate the function `f` but throw a RuntimeException if it takes
longer than `timeout` milliseconds."
[timeout-ms f]
(re-find #"^1.2" (clojure-version))
(let [^Future fut (future-call f)]
(try
(.get fut
timeout-ms
java.util.concurrent.TimeUnit/MILLISECONDS)
(catch TimeoutException ex
(future-cancel fut)
(throw (RuntimeException. "Evaluation timeout")))))
:else
(let [ex (RuntimeException. "Evaluation timeout")
fut (future-call f)
r (deref fut timeout-ms ex)]
(if (= ex r)
(do
(future-cancel fut)
(throw ex))
r)))
(defmacro with-timeout
"Evaluate `body` but throw a RuntimeException if it takes longer
than `timeout` milliseconds."
[timeout & body]
`(call-with-timeout ~timeout (bound-fn [] ~@body)))
(defmacro periodic-fn
"creates a fn that executes 'body' every 'period' calls"
[args [var period] & body]
`(let [call-count# (atom 0)]
(fn [~@args]
(swap! call-count# inc)
(when (zero? (mod @call-count# ~period))
(let [~var @call-count#]
~@body)))))
(defn wrap-periodic
"Returns a fn which wraps f, that executes `f` once every `period` calls."
[f period]
(let [count (atom 0)]
(fn [& args]
(swap! count inc)
(when (zero? (mod @count period))
(apply f args)))))
(defn safe-sleep
"Sleep for `millis` milliseconds."
[millis]
(try (Thread/sleep millis)
(catch InterruptedException e
(.interrupt ^Thread (Thread/currentThread)))))
(defn random-sleep
"Sleep between 'min-millis' and 'max-millis' milliseconds"
[min-millis max-millis]
(let [range (- max-millis min-millis)
millis (+ min-millis (rand-int range))]
(safe-sleep millis)))
TODO : can we move fns like these into a meaningful namespace ?
;; they all deal with changing call semantics. -sd
;; wait-until, attempt-until, wrap-periodic, periodic-fn
;; with-timeout, call-with-timeout, etc.
(defn wait-until [done-fn? & {:keys [ms-per-loop timeout]
:or {ms-per-loop 1000 timeout 10000}}]
(loop [elapsed (long 0)]
(when-not (or (>= elapsed timeout) (done-fn?))
(Thread/sleep ms-per-loop)
(recur (long (+ elapsed ms-per-loop))))))
(defn attempt-until [f done?-fn & {:keys [ms-per-loop timeout]
:or {ms-per-loop 1000
timeout 10000}}]
(loop [elapsed (long 0)
result (f)]
(if (or (done?-fn result)
(>= elapsed timeout))
result
(do
(Thread/sleep ms-per-loop)
(recur (long (+ elapsed ms-per-loop)) (f))))))
(defn boolean? [x]
(or (true? x) (false? x)))
(defn wrap-trapping-errors
"Wraps the fn `f` to trap any Throwable, and return `default` in
that case."
[f & [default]]
(fn [& args]
(try
(apply f args)
(catch Throwable e default))))
(defn pos-integer?
"Return true if `x` is a positive integer value."
[x]
(every? #(% x) [pos? integer?]))
(defn zero-or-pos-integer?
"Return true if `x` is zero or positive integer value."
[x]
(or (zero? x) (pos-integer? x)))
(defn to-url
"Returns a java.net.URL instance or nil if URL failed to parse"
[^String s]
(when s
(try
(java.net.URL. s)
(catch MalformedURLException e
nil))))
(defn url? [s]
(boolean (to-url s)))
(def ^{:private true :const true} valid-ip-address-v4-re
#"^([01]?\d\d?|2[0-4]\d|25[0-5])\.([01]?\d\d?|2[0-4]\d|25[0-5])\.([01]?\d\d?|2[0-4]\d|25[0-5])\.([01]?\d\d?|2[0-4]\d|25[0-5])$")
(defn ip-address-v4?
"Test if the string `s` is a valid dotted IPv4 address."
[s]
(when s
(boolean
(re-matches valid-ip-address-v4-re s))))
(defn str->boolean
"Boolean value for the specified string, per the following rules:
\"true\" => true
\"false\" => false
\"foobar\" => true
nil or \"\" => false"
[^String s]
(if (not-empty s) (not= "false" (.toLowerCase s)) false))
(defn fprint
"Same as print but explicitly flushes *out*."
[& more]
(apply print more)
(flush))
(defn fprintln
"Same as println but explicitly flushes *out*."
[& more]
(apply println more)
(flush))
;; progress reporting
(def ^:dynamic *print-progress* true)
(defn make-default-progress-reporter
"A basic progress reporter function which can be used with
`with-progress-reporting`."
[{:keys [iters-per-row num-columns row-handler row-fmt no-summary]}]
(let [iters-per-row (or iters-per-row 1000)
num-columns (or num-columns 60)
iters-per-dot (int (/ iters-per-row num-columns))
row-handler (fn [i]
(if row-handler
(str " " (row-handler i))
""))
row-fmt (or row-fmt "%,8d rows%s")]
(fn [i final?]
(cond
final?
(when-not no-summary
(fprintln (format row-fmt i (row-handler i))))
(zero? (mod i iters-per-row))
(fprintln (format row-fmt i (row-handler i)))
(zero? (mod i iters-per-dot))
(fprint ".")))))
(defmacro with-progress-reporting
"Bind a `reportfn` function, and evaluate `body` wherein
calling (report!) will invoke the report function with the current
state of the iteration."
[opts & body]
`(let [iter# (atom 0)
opts# (or ~opts {})
reporter# (or (:reporter opts#)
(make-default-progress-reporter opts#))]
(letfn [(report# [& [fin?#]]
(when *print-progress*
(when-not fin?# (swap! iter# inc))
(reporter# @iter# (boolean fin?#))))]
(let [~'report! report#
val# (do ~@body)]
(report# true)
val#))))
(defn ipv4-dotted-to-integer
"Convert a dotted notation IPv4 address string to a 32-bit integer.
(ipv4-dotted-to-integer \"127.0.0.1\")
=> 2130706433"
[dotted]
(let [[b1 b2 b3 b4] (map #(or (parse-int %)
(raise (format "Invalid IP address: %s" dotted)))
(str/split dotted #"\."))]
(bit-or (bit-or (bit-or (bit-shift-left b1 24)
(bit-shift-left b2 16))
(bit-shift-left b3 8))
b4)))
(defn ipv4-integer-to-dotted
"Convert a 32-bit integer into a dotted notation IPv4 address string.
(ipv4-integer-to-dotted (ipv4-dotted-to-integer \"127.0.0.1\"))
=> \"127.0.0.1\""
[ip]
(format "%d.%d.%d.%d"
(bit-and (bit-shift-right ip 24) 0xff)
(bit-and (bit-shift-right ip 16) 0xff)
(bit-and (bit-shift-right ip 8) 0xff)
(bit-and ip 0xff)))
(defn uuid
"Return a UUID string."
[]
(str (java.util.UUID/randomUUID)))
(defmacro do-all-return-first
"Evaluate expr1 and exprs and return the value of expr1."
[expr1 & exprs]
`(let [v# ~expr1]
~@exprs
v#))
(defn parse-url
"Parse the url spec into a map with keys {:scheme, :host, etc.}"
[^String spec]
(when (seq spec)
(try
(let [[scheme comps] (if (re-find #".*://" spec)
(str/split spec #"://")
["file" spec])
[raw-host raw-path] (let [[h & r] (str/split comps #"/")]
[h (str "/" (str/join "/" r))])
comps (str/split raw-host #"@")
host (last comps)
[username password] (if (< 1 (count comps))
(str/split (first comps) #":")
[nil nil])
[path & [query]] (str/split raw-path #"\?")]
(into {}
(filter val
{:scheme scheme
:username (not-empty username)
:password (not-empty password)
:host (not-empty host)
:path (not-empty path)
:query (not-empty query)})))
(catch Exception ex
nil))))
(defn print-error
"Println to *err*"
[& args]
(binding [*out* *err*]
(apply println args)))
(defn safe-sleep
"Sleep for `millis` milliseconds."
[millis]
(try (Thread/sleep millis)
(catch InterruptedException e
(.interrupt ^Thread (Thread/currentThread)))))
(defn timestamp? [n]
(and (integer? n)
(>= n 0)
(<= n Long/MAX_VALUE)))
(defn stacktrace->str [e]
(map #(str % "\n") (.getStackTrace ^Exception e)))
(defn incremental-name-with-prefix [prefix]
(let [cnt (atom -1)]
(fn []
(swap! cnt inc)
(str prefix "-" @cnt))))
(defn retrying-fn
"Take a no-arg function f and max num retries, returns a new no-arg
function that will call f again if calling f throws a Throwable."
[f {:keys [max-times retry-handler fail-handler swallow-exceptions?] :as options}]
(fn this
([]
(this max-times))
([retry-count]
(try
(f)
(catch Throwable t
(if (zero? retry-count)
(do
(when fail-handler
(fail-handler options t))
(when-not swallow-exceptions?
(throw t)))
(do
(when retry-handler
(retry-handler options t retry-count))
(this (dec retry-count)))))))))
(def valid-with-retries-arg-set #{:max-times
:retry-handler
:fail-handler
:swallow-exceptions?})
(defmacro with-retries
"options can either be a map, or a number (which represents max-times)"
[max-times & body]
(let [opts (if (map? max-times)
max-times
{:max-times max-times})
arg-diff (set/difference (set (keys opts)) valid-with-retries-arg-set)]
(assert (= #{} arg-diff) (str "Valid args: " (vec valid-with-retries-arg-set)))
`((retrying-fn
(fn [] ~@body) ~opts))))
(defn make-comparator
"Similar to clojure.core/comparator but optionally accepts a
`key-fn` arg which is applied to each arg of the `pred-fn`, e.g.,
((make-comparator < :key-fn :id) {:name \"foo\" :id 2} {:name \"bar\" :id 1})
=> 1"
[pred-fn & {:keys [key-fn]}]
(let [key-fn (or key-fn identity)]
(comparator
(fn [a b]
(pred-fn (key-fn a) (key-fn b))))))
(defn average
"If nums is empty returns nil.
This is optimized for speed to loop over the nums only once."
[& nums]
(let [[sum cnt] (reduce (fn [[sum* cnt*] n]
[(+ sum* n) (inc cnt*)])
[0 0]
nums)]
(when-not (zero? cnt)
(/ sum cnt))))
(defn long? [x]
(instance? Long x))
(defn blank->nil [x]
(if (= x "")
nil
x))
(defn div [x by-y]
(when-not (zero? by-y)
(double (/ x by-y))))
(defn ensure-long [x]
(if (integer? x)
(long x)
(Long/parseLong x)))
(defmacro when-before-clojure-1-3 [& body]
(when (and (= 1 (:major *clojure-version*))
(< (:minor *clojure-version*) 3))
`(do ~@body)))
(defmacro when-after-clojure-1-2 [& body]
(when (and (pos? (:major *clojure-version*))
(> (:minor *clojure-version*) 2))
`(do ~@body)))
(defmacro when-before-clojure-1-5 [& body]
(when (and (= 1 (:major *clojure-version*))
(< (:minor *clojure-version*) 5))
`(do ~@body)))
Copied out of Clojure 1.3 +
(when-before-clojure-1-3
(defn some-fn
"Takes a set of predicates and returns a function f that returns the first logical true value
returned by one of its composing predicates against any of its arguments, else it returns
logical false. Note that f is short-circuiting in that it will stop execution on the first
argument that triggers a logical true result against the original predicates."
{:added "1.3"}
([p]
(fn sp1
([] nil)
([x] (p x))
([x y] (or (p x) (p y)))
([x y z] (or (p x) (p y) (p z)))
([x y z & args] (or (sp1 x y z)
(some p args)))))
([p1 p2]
(fn sp2
([] nil)
([x] (or (p1 x) (p2 x)))
([x y] (or (p1 x) (p1 y) (p2 x) (p2 y)))
([x y z] (or (p1 x) (p1 y) (p1 z) (p2 x) (p2 y) (p2 z)))
([x y z & args] (or (sp2 x y z)
(some #(or (p1 %) (p2 %)) args)))))
([p1 p2 p3]
(fn sp3
([] nil)
([x] (or (p1 x) (p2 x) (p3 x)))
([x y] (or (p1 x) (p2 x) (p3 x) (p1 y) (p2 y) (p3 y)))
([x y z] (or (p1 x) (p2 x) (p3 x) (p1 y) (p2 y) (p3 y) (p1 z) (p2 z) (p3 z)))
([x y z & args] (or (sp3 x y z)
(some #(or (p1 %) (p2 %) (p3 %)) args)))))
([p1 p2 p3 & ps]
(let [ps (list* p1 p2 p3 ps)]
(fn spn
([] nil)
([x] (some #(% x) ps))
([x y] (some #(or (% x) (% y)) ps))
([x y z] (some #(or (% x) (% y) (% z)) ps))
([x y z & args] (or (spn x y z)
(some #(some % args) ps))))))))
(when-before-clojure-1-3
(defn every-pred
"Takes a set of predicates and returns a function f that returns true if all of its
composing predicates return a logical true value against all of its arguments, else it returns
false. Note that f is short-circuiting in that it will stop execution on the first
argument that triggers a logical false result against the original predicates."
([p]
(fn ep1
([] true)
([x] (boolean (p x)))
([x y] (boolean (and (p x) (p y))))
([x y z] (boolean (and (p x) (p y) (p z))))
([x y z & args] (boolean (and (ep1 x y z)
(every? p args))))))
([p1 p2]
(fn ep2
([] true)
([x] (boolean (and (p1 x) (p2 x))))
([x y] (boolean (and (p1 x) (p1 y) (p2 x) (p2 y))))
([x y z] (boolean (and (p1 x) (p1 y) (p1 z) (p2 x) (p2 y) (p2 z))))
([x y z & args] (boolean (and (ep2 x y z)
(every? #(and (p1 %) (p2 %)) args))))))
([p1 p2 p3]
(fn ep3
([] true)
([x] (boolean (and (p1 x) (p2 x) (p3 x))))
([x y] (boolean (and (p1 x) (p2 x) (p3 x) (p1 y) (p2 y) (p3 y))))
([x y z] (boolean (and (p1 x) (p2 x) (p3 x) (p1 y) (p2 y) (p3 y) (p1 z) (p2 z) (p3 z))))
([x y z & args] (boolean (and (ep3 x y z)
(every? #(and (p1 %) (p2 %) (p3 %)) args))))))
([p1 p2 p3 & ps]
(let [ps (list* p1 p2 p3 ps)]
(fn epn
([] true)
([x] (every? #(% x) ps))
([x y] (every? #(and (% x) (% y)) ps))
([x y z] (every? #(and (% x) (% y) (% z)) ps))
([x y z & args] (boolean (and (epn x y z)
(every? #(every? % args) ps)))))))))
Copied out of Clojure 1.5 +
(when-before-clojure-1-5
(defmacro cond->
"Takes an expression and a set of test/form pairs. Threads expr (via ->)
through each form for which the corresponding test
expression is true. Note that, unlike cond branching, cond-> threading does
not short circuit after the first true test expression."
{:added "1.5"}
[expr & clauses]
(assert (even? (count clauses)))
(let [g (gensym)
pstep (fn [[test step]] `(if ~test (-> ~g ~step) ~g))]
`(let [~g ~expr
~@(interleave (repeat g) (map pstep (partition 2 clauses)))]
~g)))
(defmacro cond->>
"Takes an expression and a set of test/form pairs. Threads expr (via ->>)
through each form for which the corresponding test expression
is true. Note that, unlike cond branching, cond->> threading does not short circuit
after the first true test expression."
{:added "1.5"}
[expr & clauses]
(assert (even? (count clauses)))
(let [g (gensym)
pstep (fn [[test step]] `(if ~test (->> ~g ~step) ~g))]
`(let [~g ~expr
~@(interleave (repeat g) (map pstep (partition 2 clauses)))]
~g)))
(defmacro as->
"Binds name to expr, evaluates the first form in the lexical context
of that binding, then binds name to that result, repeating for each
successive form, returning the result of the last form."
{:added "1.5"}
[expr name & forms]
`(let [~name ~expr
~@(interleave (repeat name) forms)]
~name))
(defmacro some->
"When expr is not nil, threads it into the first form (via ->),
and when that result is not nil, through the next etc"
{:added "1.5"}
[expr & forms]
(let [g (gensym)
pstep (fn [step] `(if (nil? ~g) nil (-> ~g ~step)))]
`(let [~g ~expr
~@(interleave (repeat g) (map pstep forms))]
~g)))
(defmacro some->>
"When expr is not nil, threads it into the first form (via ->>),
and when that result is not nil, through the next etc"
{:added "1.5"}
[expr & forms]
(let [g (gensym)
pstep (fn [step] `(if (nil? ~g) nil (->> ~g ~step)))]
`(let [~g ~expr
~@(interleave (repeat g) (map pstep forms))]
~g))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn parse-cents
"Parses a string like '1.99', which represents a dollar value into a
Long representing the number of cents, in this case 199"
[s]
(some-> s
Double/parseDouble
(* 100)
long))
(defn cents->dollar-str [cents]
(format "%.2f" (/ cents 100.0)))
(defn single-destructuring-arg->form+name
"Turns any one binding arg (which may be a destructuring binding) into a vector
where the left elem is the arg with a possible :as added to it.
And the rght side is the symbol referring to the arg itself."
[arg-form]
(let [as-symbol (gensym 'symbol-for-destructured-arg)
snd-to-last-is-as? #(= :as (second (reverse %)))]
(cond (and (vector? arg-form) (snd-to-last-is-as? arg-form))
[arg-form (last arg-form)]
(vector? arg-form)
[(-> arg-form (conj :as) (conj as-symbol)) as-symbol]
(and (map? arg-form) (contains? arg-form :as))
[arg-form (:as arg-form)]
(map? arg-form)
[(assoc arg-form :as as-symbol) as-symbol]
:else
[arg-form arg-form])))
(defn rand-int* [min max]
(+ min (rand-int (- max min))))
(defn rand-long [n]
(long (rand-int n)))
(defn rand-long* [min max]
(+ min (rand-long (- max min))))
(defn random-sleep
"Sleep between 'min-millis' and 'max-millis' milliseconds"
[min-millis max-millis]
(let [range (- max-millis min-millis)
millis (+ min-millis (rand-int range))]
(safe-sleep millis)))
(defn read-string-securely [s]
(binding [*read-eval* false]
(read-string-safely s)))
(defmacro defn-kw
"A form of defn where the last arg is assumed to be keywords args, i.e.
(defn-kw f
\"optional doc-string here.\"
[a b & {:keys [c d]}]
(+ a b c d))
Has built-in assertion that you have not accidentally passed in keys that
were not listed in the key destructuring."
[& args]
{:arglists '([name arg-vec & body]
[name doc-string arg-vec & body])}
(let [[name doc-string arg-vec & body] (if (string? (second args))
args
(concat [(first args) nil] (rest args)))
_ (assert (map? (peek arg-vec))
(str "defn-kw expects the final element of the arg list, "
arg-vec
", to be a map destructuring."))
_ (assert (= '& (last (butlast arg-vec)))
(str "defn-kw expects the second to last element of the arg list, "
arg-vec
", to be an '&"))
keys-or-strs (cond (contains? (peek arg-vec) :keys) :keys
(contains? (peek arg-vec) :strs) :strs
:else (throw (AssertionError. "defn-kw expects the map destructuring to have a :keys or :strs key.")))
f (case keys-or-strs :keys keyword :strs str)
valid-key-set (set (map f (get (peek arg-vec) keys-or-strs)))
[kw-args-binding-with-as kw-args-map-sym] (single-destructuring-arg->form+name (peek arg-vec))
new-arg-vec (vec (concat (drop-last 2 arg-vec) ['& kw-args-binding-with-as]))]
`(defn ~(vary-meta name assoc :doc doc-string)
~new-arg-vec
(when-not (empty? ~kw-args-map-sym)
(let [actual-key-set# (set (keys ~kw-args-map-sym))
extra-keys# (set/difference actual-key-set# ~valid-key-set)]
(assert (empty? extra-keys#)
(str "Was passed these keyword args " extra-keys#
" which were not listed in the arg list " '~arg-vec))))
~@body)))
(defn apply-kw
"Like apply, but f take kw-args. The last arg to apply-kw is
a map of the kw-args to pass to f.
EXPECTS: {:pre [(map? (last args))]}"
[f & args]
(apply f (apply concat
(butlast args) (last args))))
(defn ->binary [result]
(if result 1 0))
(defn count-occurences [coll search-terms]
(->> coll
(map (fn [string-to-match]
(->> search-terms
(filter #(try
(re-find (re-pattern (str "(?i)" %)) string-to-match)
(catch Exception _
nil)))
count)))
(apply +)))
(defn- merge-meta!
"Destructively merge metadata from a source object into a target."
[source ^Var target]
(.setMeta target
(merge (meta source)
(select-keys (meta target) [:name :ns]))))
(defn- immigrate-one [sym ^Var v]
(merge-meta! v (if (.isBound v)
(intern *ns* sym (var-get v))
(intern *ns* sym))))
(defn immigrate
"Add all the public vars in a list of namespaces to the current
namespace.
Ex.
(immigrate ['criterium.core :except ['report 'warn]]
'print.foo
'gui.diff)"
[& namespaces]
(doseq [ns namespaces]
(require ns)
(if (sequential? ns)
(let [[ns _except_ var-exclusions] ns
var-exclusion-set (set var-exclusions)]
(doseq [[sym v] (ns-publics (find-ns ns))
:when (not (contains? var-exclusion-set sym))]
(immigrate-one sym v)))
(doseq [[sym v] (ns-publics (find-ns ns))]
(immigrate-one sym v)))))
(defmacro timebomb-comment
"Used to comment things out that we want to force ourselves to come
back to by a certain point in time in order to resolve later."
[timestamp-long & body]
`(if (<= ~timestamp-long (System/currentTimeMillis))
(throw (Exception. "Timebomb comment has passed its due date."))
(comment ~@body)))
(defn exception->map [^Throwable e]
(merge
{:class (str (class e))
:message (.getMessage e)
:stacktrace (mapv str (.getStackTrace e))}
(when (.getCause e)
{:cause (exception->map (.getCause e))})
(if (instance? SQLException e)
(if-let [ne (.getNextException ^SQLException e)]
{:next-exception (exception->map ne)}))))
(defn name-generator [prefix]
(let [cnt (atom -1)]
(fn [& args]
(swap! cnt inc)
(str prefix "-" @cnt))))
(defn trap-nil [x default]
(if-not (nil? x) x default))
(defn within? [max-difference x y]
(<= (Math/abs ^double (double (- x y)))
max-difference))
(defmacro when-resolvable [sym & body]
(try
(when (resolve sym)
(list* 'do body))
(catch ClassNotFoundException _#)))
(defmacro when-not-resolvable [sym & body]
(try
(when-not (resolve sym)
(list* 'do body))
(catch ClassNotFoundException _#)))
(defn approximately-equal?
"true if x is within epsilon of y. Default epsilon of 0.0001"
([x y]
(approximately-equal? x y 0.0001))
([x y epsilon]
(<= (Math/abs (- x y))
epsilon)))
| null | https://raw.githubusercontent.com/staples-sparx/kits/66ae99bce83e8fd1248cc5c0da1f23673f221073/src/clojure/kits/homeless.clj | clojure | they all deal with changing call semantics. -sd
wait-until, attempt-until, wrap-periodic, periodic-fn
with-timeout, call-with-timeout, etc.
progress reporting
| (ns ^{:doc "Unfortunate, uncategorized utility functions and macros.
Please help one of these poor souls find a home in a
focused, Single-Responsibility namespace instead :("}
kits.homeless
(:require
[clojure.pprint :as pprint]
[clojure.set :as set]
[clojure.string :as str])
(:import
clojure.lang.Var
java.net.MalformedURLException
java.sql.SQLException
(java.util.concurrent Future TimeoutException)))
(set! *warn-on-reflection* false)
(defmacro ignore-exceptions
"Evaluate body, but return nil if any exceptions are thrown."
[& body]
`(try
~@body
(catch Exception e# nil)))
(defmacro defn-cond
"Variant of defn that allows for multiple alternative
implementations in the body, one of which is used based on a
matching predicate, e.g.,
(defn-cond test [a b]
(re-find #\"^1.2\" (clojure-version))
(* a b)
:else
(+ a b))
would define `test` one way under Clojure 1.2, and differently on
other versions."
[name & fdecl]
(let [[m fdecl] (if (string? (first fdecl))
[{:doc (first fdecl)} (rest fdecl)]
[{} fdecl])
[args & clauses] fdecl
m (conj {:arglists (list 'list (list 'quote args))} m)]
(list 'def
(with-meta name m)
(list*
(reduce (fn [acc [pred body]]
(conj acc pred `(fn [~@args] ~body)))
['cond]
(partition 2 clauses))))))
(defmacro def-many-methods
"Creates multiple multimethods with different dispatch values, but the same implementation."
[name dispatch-values & body]
`(doseq [dispatch-val# ~dispatch-values]
(defmethod ~name dispatch-val# ~@body)))
(defn time-elapsed*
"Returns time elapsed in millis."
[f]
(let [start (System/nanoTime)]
(f)
(/ (double (- (System/nanoTime) start)) 1000000.0)))
(defmacro time-elapsed
"Returns time elapsed in millis."
[& body]
`(time-elapsed* (fn [] ~@body)))
(defn raise
"Raise a RuntimeException with specified message."
[& msg]
(throw (RuntimeException. ^String (apply str msg))))
(defn print-vals
"Print the specified args, and return the value of the last arg."
[& args]
(apply println
(cons "*** "
(map #(if (string? %) % (with-out-str (pprint/pprint %)))
args)))
(last args))
(defn parse-int
"Parse integer value from string `s`"
[s]
(ignore-exceptions (Integer/parseInt s)))
(defn parse-long
"Parse long integer value from string `s`"
[s]
(ignore-exceptions (Long/parseLong s)))
(defn parse-short
"Parse short integer value from string `s`"
[s]
(ignore-exceptions (Short/parseShort s)))
(defn parse-float
"Parse floating point value from string `s`"
[s]
(ignore-exceptions (Float/parseFloat s)))
(defn parse-double
"Parse double precision number from string `s`"
[s]
(ignore-exceptions (Double/parseDouble s)))
(defn read-string-safely [s] (when s (read-string s)))
(defn parse-number
"Parse a number from string `s`, optionally passing a default value
to return."
([s]
(parse-number s nil))
([s default]
(cond
(number? s) s
(empty? s) default
:else (read-string-safely s))))
(defn rand-int*
"Return a random integer between min (inclusive) and max (exclusive)."
[min max]
(+ min (rand-int (- max min))))
(defn- time-ns
"Current value of the most precise available system timer, in
nanoseconds. This is NOT a guaranteed absolute time like time-ms and
doesn't work the same across all JVM architectures. Use this for
measuring TIME INTERVALS ONLY. See javadoc for System.nanoTime() for
more details."
[]
(System/nanoTime))
(defn- time-us
"Number of micro-seconds since epoch. This is NOT a guaranteed
absolute time like time-ms and doesn't work the same across all
architectures. Use this for measuring TIME INTERVALS ONLY. See
javadoc for System.nanoTime() for more details."
[]
(long (/ (time-ns) 1000)))
(defn time-ms
"Number of milli-seconds since epoch."
[]
(System/currentTimeMillis))
(defn value-and-elapsed-time
"Return the value of `thunk` and time taken to evaluate in
microseconds."
[thunk]
(let [start (time-us)
value (thunk)]
[value (- (time-us) start)]))
(defmacro bind-value-and-elapsed-time
"Binds [value elapsed-time-us] from evaluating `expr` and invokes
`body`."
[bindings expr & body]
`(let [~bindings (value-and-elapsed-time (fn [] ~expr))]
~@body))
(defn-cond call-with-timeout
"Evaluate the function `f` but throw a RuntimeException if it takes
longer than `timeout` milliseconds."
[timeout-ms f]
(re-find #"^1.2" (clojure-version))
(let [^Future fut (future-call f)]
(try
(.get fut
timeout-ms
java.util.concurrent.TimeUnit/MILLISECONDS)
(catch TimeoutException ex
(future-cancel fut)
(throw (RuntimeException. "Evaluation timeout")))))
:else
(let [ex (RuntimeException. "Evaluation timeout")
fut (future-call f)
r (deref fut timeout-ms ex)]
(if (= ex r)
(do
(future-cancel fut)
(throw ex))
r)))
(defmacro with-timeout
"Evaluate `body` but throw a RuntimeException if it takes longer
than `timeout` milliseconds."
[timeout & body]
`(call-with-timeout ~timeout (bound-fn [] ~@body)))
(defmacro periodic-fn
"creates a fn that executes 'body' every 'period' calls"
[args [var period] & body]
`(let [call-count# (atom 0)]
(fn [~@args]
(swap! call-count# inc)
(when (zero? (mod @call-count# ~period))
(let [~var @call-count#]
~@body)))))
(defn wrap-periodic
"Returns a fn which wraps f, that executes `f` once every `period` calls."
[f period]
(let [count (atom 0)]
(fn [& args]
(swap! count inc)
(when (zero? (mod @count period))
(apply f args)))))
(defn safe-sleep
"Sleep for `millis` milliseconds."
[millis]
(try (Thread/sleep millis)
(catch InterruptedException e
(.interrupt ^Thread (Thread/currentThread)))))
(defn random-sleep
"Sleep between 'min-millis' and 'max-millis' milliseconds"
[min-millis max-millis]
(let [range (- max-millis min-millis)
millis (+ min-millis (rand-int range))]
(safe-sleep millis)))
TODO : can we move fns like these into a meaningful namespace ?
(defn wait-until [done-fn? & {:keys [ms-per-loop timeout]
:or {ms-per-loop 1000 timeout 10000}}]
(loop [elapsed (long 0)]
(when-not (or (>= elapsed timeout) (done-fn?))
(Thread/sleep ms-per-loop)
(recur (long (+ elapsed ms-per-loop))))))
(defn attempt-until [f done?-fn & {:keys [ms-per-loop timeout]
:or {ms-per-loop 1000
timeout 10000}}]
(loop [elapsed (long 0)
result (f)]
(if (or (done?-fn result)
(>= elapsed timeout))
result
(do
(Thread/sleep ms-per-loop)
(recur (long (+ elapsed ms-per-loop)) (f))))))
(defn boolean? [x]
(or (true? x) (false? x)))
(defn wrap-trapping-errors
"Wraps the fn `f` to trap any Throwable, and return `default` in
that case."
[f & [default]]
(fn [& args]
(try
(apply f args)
(catch Throwable e default))))
(defn pos-integer?
"Return true if `x` is a positive integer value."
[x]
(every? #(% x) [pos? integer?]))
(defn zero-or-pos-integer?
"Return true if `x` is zero or positive integer value."
[x]
(or (zero? x) (pos-integer? x)))
(defn to-url
"Returns a java.net.URL instance or nil if URL failed to parse"
[^String s]
(when s
(try
(java.net.URL. s)
(catch MalformedURLException e
nil))))
(defn url? [s]
(boolean (to-url s)))
(def ^{:private true :const true} valid-ip-address-v4-re
#"^([01]?\d\d?|2[0-4]\d|25[0-5])\.([01]?\d\d?|2[0-4]\d|25[0-5])\.([01]?\d\d?|2[0-4]\d|25[0-5])\.([01]?\d\d?|2[0-4]\d|25[0-5])$")
(defn ip-address-v4?
"Test if the string `s` is a valid dotted IPv4 address."
[s]
(when s
(boolean
(re-matches valid-ip-address-v4-re s))))
(defn str->boolean
"Boolean value for the specified string, per the following rules:
\"true\" => true
\"false\" => false
\"foobar\" => true
nil or \"\" => false"
[^String s]
(if (not-empty s) (not= "false" (.toLowerCase s)) false))
(defn fprint
"Same as print but explicitly flushes *out*."
[& more]
(apply print more)
(flush))
(defn fprintln
"Same as println but explicitly flushes *out*."
[& more]
(apply println more)
(flush))
(def ^:dynamic *print-progress* true)
(defn make-default-progress-reporter
"A basic progress reporter function which can be used with
`with-progress-reporting`."
[{:keys [iters-per-row num-columns row-handler row-fmt no-summary]}]
(let [iters-per-row (or iters-per-row 1000)
num-columns (or num-columns 60)
iters-per-dot (int (/ iters-per-row num-columns))
row-handler (fn [i]
(if row-handler
(str " " (row-handler i))
""))
row-fmt (or row-fmt "%,8d rows%s")]
(fn [i final?]
(cond
final?
(when-not no-summary
(fprintln (format row-fmt i (row-handler i))))
(zero? (mod i iters-per-row))
(fprintln (format row-fmt i (row-handler i)))
(zero? (mod i iters-per-dot))
(fprint ".")))))
(defmacro with-progress-reporting
"Bind a `reportfn` function, and evaluate `body` wherein
calling (report!) will invoke the report function with the current
state of the iteration."
[opts & body]
`(let [iter# (atom 0)
opts# (or ~opts {})
reporter# (or (:reporter opts#)
(make-default-progress-reporter opts#))]
(letfn [(report# [& [fin?#]]
(when *print-progress*
(when-not fin?# (swap! iter# inc))
(reporter# @iter# (boolean fin?#))))]
(let [~'report! report#
val# (do ~@body)]
(report# true)
val#))))
(defn ipv4-dotted-to-integer
"Convert a dotted notation IPv4 address string to a 32-bit integer.
(ipv4-dotted-to-integer \"127.0.0.1\")
=> 2130706433"
[dotted]
(let [[b1 b2 b3 b4] (map #(or (parse-int %)
(raise (format "Invalid IP address: %s" dotted)))
(str/split dotted #"\."))]
(bit-or (bit-or (bit-or (bit-shift-left b1 24)
(bit-shift-left b2 16))
(bit-shift-left b3 8))
b4)))
(defn ipv4-integer-to-dotted
"Convert a 32-bit integer into a dotted notation IPv4 address string.
(ipv4-integer-to-dotted (ipv4-dotted-to-integer \"127.0.0.1\"))
=> \"127.0.0.1\""
[ip]
(format "%d.%d.%d.%d"
(bit-and (bit-shift-right ip 24) 0xff)
(bit-and (bit-shift-right ip 16) 0xff)
(bit-and (bit-shift-right ip 8) 0xff)
(bit-and ip 0xff)))
(defn uuid
"Return a UUID string."
[]
(str (java.util.UUID/randomUUID)))
(defmacro do-all-return-first
"Evaluate expr1 and exprs and return the value of expr1."
[expr1 & exprs]
`(let [v# ~expr1]
~@exprs
v#))
(defn parse-url
"Parse the url spec into a map with keys {:scheme, :host, etc.}"
[^String spec]
(when (seq spec)
(try
(let [[scheme comps] (if (re-find #".*://" spec)
(str/split spec #"://")
["file" spec])
[raw-host raw-path] (let [[h & r] (str/split comps #"/")]
[h (str "/" (str/join "/" r))])
comps (str/split raw-host #"@")
host (last comps)
[username password] (if (< 1 (count comps))
(str/split (first comps) #":")
[nil nil])
[path & [query]] (str/split raw-path #"\?")]
(into {}
(filter val
{:scheme scheme
:username (not-empty username)
:password (not-empty password)
:host (not-empty host)
:path (not-empty path)
:query (not-empty query)})))
(catch Exception ex
nil))))
(defn print-error
"Println to *err*"
[& args]
(binding [*out* *err*]
(apply println args)))
(defn safe-sleep
"Sleep for `millis` milliseconds."
[millis]
(try (Thread/sleep millis)
(catch InterruptedException e
(.interrupt ^Thread (Thread/currentThread)))))
(defn timestamp? [n]
(and (integer? n)
(>= n 0)
(<= n Long/MAX_VALUE)))
(defn stacktrace->str [e]
(map #(str % "\n") (.getStackTrace ^Exception e)))
(defn incremental-name-with-prefix [prefix]
(let [cnt (atom -1)]
(fn []
(swap! cnt inc)
(str prefix "-" @cnt))))
(defn retrying-fn
"Take a no-arg function f and max num retries, returns a new no-arg
function that will call f again if calling f throws a Throwable."
[f {:keys [max-times retry-handler fail-handler swallow-exceptions?] :as options}]
(fn this
([]
(this max-times))
([retry-count]
(try
(f)
(catch Throwable t
(if (zero? retry-count)
(do
(when fail-handler
(fail-handler options t))
(when-not swallow-exceptions?
(throw t)))
(do
(when retry-handler
(retry-handler options t retry-count))
(this (dec retry-count)))))))))
(def valid-with-retries-arg-set #{:max-times
:retry-handler
:fail-handler
:swallow-exceptions?})
(defmacro with-retries
"options can either be a map, or a number (which represents max-times)"
[max-times & body]
(let [opts (if (map? max-times)
max-times
{:max-times max-times})
arg-diff (set/difference (set (keys opts)) valid-with-retries-arg-set)]
(assert (= #{} arg-diff) (str "Valid args: " (vec valid-with-retries-arg-set)))
`((retrying-fn
(fn [] ~@body) ~opts))))
(defn make-comparator
"Similar to clojure.core/comparator but optionally accepts a
`key-fn` arg which is applied to each arg of the `pred-fn`, e.g.,
((make-comparator < :key-fn :id) {:name \"foo\" :id 2} {:name \"bar\" :id 1})
=> 1"
[pred-fn & {:keys [key-fn]}]
(let [key-fn (or key-fn identity)]
(comparator
(fn [a b]
(pred-fn (key-fn a) (key-fn b))))))
(defn average
"If nums is empty returns nil.
This is optimized for speed to loop over the nums only once."
[& nums]
(let [[sum cnt] (reduce (fn [[sum* cnt*] n]
[(+ sum* n) (inc cnt*)])
[0 0]
nums)]
(when-not (zero? cnt)
(/ sum cnt))))
(defn long? [x]
(instance? Long x))
(defn blank->nil [x]
(if (= x "")
nil
x))
(defn div [x by-y]
(when-not (zero? by-y)
(double (/ x by-y))))
(defn ensure-long [x]
(if (integer? x)
(long x)
(Long/parseLong x)))
(defmacro when-before-clojure-1-3 [& body]
(when (and (= 1 (:major *clojure-version*))
(< (:minor *clojure-version*) 3))
`(do ~@body)))
(defmacro when-after-clojure-1-2 [& body]
(when (and (pos? (:major *clojure-version*))
(> (:minor *clojure-version*) 2))
`(do ~@body)))
(defmacro when-before-clojure-1-5 [& body]
(when (and (= 1 (:major *clojure-version*))
(< (:minor *clojure-version*) 5))
`(do ~@body)))
Copied out of Clojure 1.3 +
(when-before-clojure-1-3
(defn some-fn
"Takes a set of predicates and returns a function f that returns the first logical true value
returned by one of its composing predicates against any of its arguments, else it returns
logical false. Note that f is short-circuiting in that it will stop execution on the first
argument that triggers a logical true result against the original predicates."
{:added "1.3"}
([p]
(fn sp1
([] nil)
([x] (p x))
([x y] (or (p x) (p y)))
([x y z] (or (p x) (p y) (p z)))
([x y z & args] (or (sp1 x y z)
(some p args)))))
([p1 p2]
(fn sp2
([] nil)
([x] (or (p1 x) (p2 x)))
([x y] (or (p1 x) (p1 y) (p2 x) (p2 y)))
([x y z] (or (p1 x) (p1 y) (p1 z) (p2 x) (p2 y) (p2 z)))
([x y z & args] (or (sp2 x y z)
(some #(or (p1 %) (p2 %)) args)))))
([p1 p2 p3]
(fn sp3
([] nil)
([x] (or (p1 x) (p2 x) (p3 x)))
([x y] (or (p1 x) (p2 x) (p3 x) (p1 y) (p2 y) (p3 y)))
([x y z] (or (p1 x) (p2 x) (p3 x) (p1 y) (p2 y) (p3 y) (p1 z) (p2 z) (p3 z)))
([x y z & args] (or (sp3 x y z)
(some #(or (p1 %) (p2 %) (p3 %)) args)))))
([p1 p2 p3 & ps]
(let [ps (list* p1 p2 p3 ps)]
(fn spn
([] nil)
([x] (some #(% x) ps))
([x y] (some #(or (% x) (% y)) ps))
([x y z] (some #(or (% x) (% y) (% z)) ps))
([x y z & args] (or (spn x y z)
(some #(some % args) ps))))))))
(when-before-clojure-1-3
(defn every-pred
"Takes a set of predicates and returns a function f that returns true if all of its
composing predicates return a logical true value against all of its arguments, else it returns
false. Note that f is short-circuiting in that it will stop execution on the first
argument that triggers a logical false result against the original predicates."
([p]
(fn ep1
([] true)
([x] (boolean (p x)))
([x y] (boolean (and (p x) (p y))))
([x y z] (boolean (and (p x) (p y) (p z))))
([x y z & args] (boolean (and (ep1 x y z)
(every? p args))))))
([p1 p2]
(fn ep2
([] true)
([x] (boolean (and (p1 x) (p2 x))))
([x y] (boolean (and (p1 x) (p1 y) (p2 x) (p2 y))))
([x y z] (boolean (and (p1 x) (p1 y) (p1 z) (p2 x) (p2 y) (p2 z))))
([x y z & args] (boolean (and (ep2 x y z)
(every? #(and (p1 %) (p2 %)) args))))))
([p1 p2 p3]
(fn ep3
([] true)
([x] (boolean (and (p1 x) (p2 x) (p3 x))))
([x y] (boolean (and (p1 x) (p2 x) (p3 x) (p1 y) (p2 y) (p3 y))))
([x y z] (boolean (and (p1 x) (p2 x) (p3 x) (p1 y) (p2 y) (p3 y) (p1 z) (p2 z) (p3 z))))
([x y z & args] (boolean (and (ep3 x y z)
(every? #(and (p1 %) (p2 %) (p3 %)) args))))))
([p1 p2 p3 & ps]
(let [ps (list* p1 p2 p3 ps)]
(fn epn
([] true)
([x] (every? #(% x) ps))
([x y] (every? #(and (% x) (% y)) ps))
([x y z] (every? #(and (% x) (% y) (% z)) ps))
([x y z & args] (boolean (and (epn x y z)
(every? #(every? % args) ps)))))))))
Copied out of Clojure 1.5 +
(when-before-clojure-1-5
(defmacro cond->
"Takes an expression and a set of test/form pairs. Threads expr (via ->)
through each form for which the corresponding test
expression is true. Note that, unlike cond branching, cond-> threading does
not short circuit after the first true test expression."
{:added "1.5"}
[expr & clauses]
(assert (even? (count clauses)))
(let [g (gensym)
pstep (fn [[test step]] `(if ~test (-> ~g ~step) ~g))]
`(let [~g ~expr
~@(interleave (repeat g) (map pstep (partition 2 clauses)))]
~g)))
(defmacro cond->>
"Takes an expression and a set of test/form pairs. Threads expr (via ->>)
through each form for which the corresponding test expression
is true. Note that, unlike cond branching, cond->> threading does not short circuit
after the first true test expression."
{:added "1.5"}
[expr & clauses]
(assert (even? (count clauses)))
(let [g (gensym)
pstep (fn [[test step]] `(if ~test (->> ~g ~step) ~g))]
`(let [~g ~expr
~@(interleave (repeat g) (map pstep (partition 2 clauses)))]
~g)))
(defmacro as->
"Binds name to expr, evaluates the first form in the lexical context
of that binding, then binds name to that result, repeating for each
successive form, returning the result of the last form."
{:added "1.5"}
[expr name & forms]
`(let [~name ~expr
~@(interleave (repeat name) forms)]
~name))
(defmacro some->
"When expr is not nil, threads it into the first form (via ->),
and when that result is not nil, through the next etc"
{:added "1.5"}
[expr & forms]
(let [g (gensym)
pstep (fn [step] `(if (nil? ~g) nil (-> ~g ~step)))]
`(let [~g ~expr
~@(interleave (repeat g) (map pstep forms))]
~g)))
(defmacro some->>
"When expr is not nil, threads it into the first form (via ->>),
and when that result is not nil, through the next etc"
{:added "1.5"}
[expr & forms]
(let [g (gensym)
pstep (fn [step] `(if (nil? ~g) nil (->> ~g ~step)))]
`(let [~g ~expr
~@(interleave (repeat g) (map pstep forms))]
~g))))
(defn parse-cents
"Parses a string like '1.99', which represents a dollar value into a
Long representing the number of cents, in this case 199"
[s]
(some-> s
Double/parseDouble
(* 100)
long))
(defn cents->dollar-str [cents]
(format "%.2f" (/ cents 100.0)))
(defn single-destructuring-arg->form+name
"Turns any one binding arg (which may be a destructuring binding) into a vector
where the left elem is the arg with a possible :as added to it.
And the rght side is the symbol referring to the arg itself."
[arg-form]
(let [as-symbol (gensym 'symbol-for-destructured-arg)
snd-to-last-is-as? #(= :as (second (reverse %)))]
(cond (and (vector? arg-form) (snd-to-last-is-as? arg-form))
[arg-form (last arg-form)]
(vector? arg-form)
[(-> arg-form (conj :as) (conj as-symbol)) as-symbol]
(and (map? arg-form) (contains? arg-form :as))
[arg-form (:as arg-form)]
(map? arg-form)
[(assoc arg-form :as as-symbol) as-symbol]
:else
[arg-form arg-form])))
(defn rand-int* [min max]
(+ min (rand-int (- max min))))
(defn rand-long [n]
(long (rand-int n)))
(defn rand-long* [min max]
(+ min (rand-long (- max min))))
(defn random-sleep
"Sleep between 'min-millis' and 'max-millis' milliseconds"
[min-millis max-millis]
(let [range (- max-millis min-millis)
millis (+ min-millis (rand-int range))]
(safe-sleep millis)))
(defn read-string-securely [s]
(binding [*read-eval* false]
(read-string-safely s)))
(defmacro defn-kw
"A form of defn where the last arg is assumed to be keywords args, i.e.
(defn-kw f
\"optional doc-string here.\"
[a b & {:keys [c d]}]
(+ a b c d))
Has built-in assertion that you have not accidentally passed in keys that
were not listed in the key destructuring."
[& args]
{:arglists '([name arg-vec & body]
[name doc-string arg-vec & body])}
(let [[name doc-string arg-vec & body] (if (string? (second args))
args
(concat [(first args) nil] (rest args)))
_ (assert (map? (peek arg-vec))
(str "defn-kw expects the final element of the arg list, "
arg-vec
", to be a map destructuring."))
_ (assert (= '& (last (butlast arg-vec)))
(str "defn-kw expects the second to last element of the arg list, "
arg-vec
", to be an '&"))
keys-or-strs (cond (contains? (peek arg-vec) :keys) :keys
(contains? (peek arg-vec) :strs) :strs
:else (throw (AssertionError. "defn-kw expects the map destructuring to have a :keys or :strs key.")))
f (case keys-or-strs :keys keyword :strs str)
valid-key-set (set (map f (get (peek arg-vec) keys-or-strs)))
[kw-args-binding-with-as kw-args-map-sym] (single-destructuring-arg->form+name (peek arg-vec))
new-arg-vec (vec (concat (drop-last 2 arg-vec) ['& kw-args-binding-with-as]))]
`(defn ~(vary-meta name assoc :doc doc-string)
~new-arg-vec
(when-not (empty? ~kw-args-map-sym)
(let [actual-key-set# (set (keys ~kw-args-map-sym))
extra-keys# (set/difference actual-key-set# ~valid-key-set)]
(assert (empty? extra-keys#)
(str "Was passed these keyword args " extra-keys#
" which were not listed in the arg list " '~arg-vec))))
~@body)))
(defn apply-kw
"Like apply, but f take kw-args. The last arg to apply-kw is
a map of the kw-args to pass to f.
EXPECTS: {:pre [(map? (last args))]}"
[f & args]
(apply f (apply concat
(butlast args) (last args))))
(defn ->binary [result]
(if result 1 0))
(defn count-occurences [coll search-terms]
(->> coll
(map (fn [string-to-match]
(->> search-terms
(filter #(try
(re-find (re-pattern (str "(?i)" %)) string-to-match)
(catch Exception _
nil)))
count)))
(apply +)))
(defn- merge-meta!
"Destructively merge metadata from a source object into a target."
[source ^Var target]
(.setMeta target
(merge (meta source)
(select-keys (meta target) [:name :ns]))))
(defn- immigrate-one [sym ^Var v]
(merge-meta! v (if (.isBound v)
(intern *ns* sym (var-get v))
(intern *ns* sym))))
(defn immigrate
"Add all the public vars in a list of namespaces to the current
namespace.
Ex.
(immigrate ['criterium.core :except ['report 'warn]]
'print.foo
'gui.diff)"
[& namespaces]
(doseq [ns namespaces]
(require ns)
(if (sequential? ns)
(let [[ns _except_ var-exclusions] ns
var-exclusion-set (set var-exclusions)]
(doseq [[sym v] (ns-publics (find-ns ns))
:when (not (contains? var-exclusion-set sym))]
(immigrate-one sym v)))
(doseq [[sym v] (ns-publics (find-ns ns))]
(immigrate-one sym v)))))
(defmacro timebomb-comment
"Used to comment things out that we want to force ourselves to come
back to by a certain point in time in order to resolve later."
[timestamp-long & body]
`(if (<= ~timestamp-long (System/currentTimeMillis))
(throw (Exception. "Timebomb comment has passed its due date."))
(comment ~@body)))
(defn exception->map [^Throwable e]
(merge
{:class (str (class e))
:message (.getMessage e)
:stacktrace (mapv str (.getStackTrace e))}
(when (.getCause e)
{:cause (exception->map (.getCause e))})
(if (instance? SQLException e)
(if-let [ne (.getNextException ^SQLException e)]
{:next-exception (exception->map ne)}))))
(defn name-generator [prefix]
(let [cnt (atom -1)]
(fn [& args]
(swap! cnt inc)
(str prefix "-" @cnt))))
(defn trap-nil [x default]
(if-not (nil? x) x default))
(defn within? [max-difference x y]
(<= (Math/abs ^double (double (- x y)))
max-difference))
(defmacro when-resolvable [sym & body]
(try
(when (resolve sym)
(list* 'do body))
(catch ClassNotFoundException _#)))
(defmacro when-not-resolvable [sym & body]
(try
(when-not (resolve sym)
(list* 'do body))
(catch ClassNotFoundException _#)))
(defn approximately-equal?
"true if x is within epsilon of y. Default epsilon of 0.0001"
([x y]
(approximately-equal? x y 0.0001))
([x y epsilon]
(<= (Math/abs (- x y))
epsilon)))
|
9a53e3314b25764b58ad804fa7b2e51e2ea111e79047d70e1d714627c25e56c4 | xvw/planet | console.mli | (** Generic Console Binding. *)
open Bedrock
* { 2 API }
* { 3 Log / Print }
(** Log value on [console]. *)
val log : 'a -> unit
(** Print [string] on [console]. *)
val print : string -> unit
(** Clear [console]. *)
val clear : unit -> unit
(** Log info on [console]. *)
val info : 'a -> unit
(** Log error on [console]. *)
val error : 'a -> unit
(** Log warning on [console]. *)
val warning : 'a -> unit
* Display a JavaScript object whose properties should be output .
val dir : 'a -> unit
(** Outputs a stack trace. *)
val trace : unit -> unit
(** Display a table on the [console]. *)
val table : ?columns:string list -> 'a -> unit
(** {3 Counters} *)
* If supplied , [ Console.count ~label ( ) ] outputs the number of times it has
been called with that label . If omitted , [ count ] behaves as though it was
called with the [ " default " ] label .
been called with that label. If omitted, [count] behaves as though it was
called with the ["default"] label. *)
val count : ?label:string -> unit -> unit
(** If supplied, [count_reset] resets the count for that [label] to 0. If
omitted, [count_reset] resets the ["default"] counter to 0. *)
val count_reset : ?label:string -> unit -> unit
* { 3 Timers } Timers are used to calculate the procedure execution time . - We
instantiate a timer with : [ Console.time name ] , where [ name ] is a unique
timer ID ; - [ Console.timer_log name ] displays the time elapsed since calling
[ Console.timer name ] ; - [ Console.time_stop name ] stops the timer , referenced
by its name , in progress .
instantiate a timer with: [Console.time name], where [name] is a unique
timer ID; - [Console.timer_log name] displays the time elapsed since calling
[Console.timer name]; - [Console.time_stop name] stops the timer, referenced
by its name, in progress. *)
val time : string -> unit
val time_log : string -> 'a -> unit
val time_end : string -> unit
* [ Console.timetrack name actions ] is a shortcut , for example : { [ let ( ) =
Console.timetrack " answer time " [ ( fun logger - > logger ( ) ; Console.print
" Hello " ) ; ( fun logger - > logger ( ) ; Console.print " World " ) ] ; ; ] } Where
[ logger ] is a [ Console.time_log ] . This shortcut avoid the to instanciate and
closed a timer .
Console.timetrack "answer time" [ (fun logger -> logger (); Console.print
"Hello") ; (fun logger -> logger (); Console.print "World") ] ;; ]} Where
[logger] is a [Console.time_log]. This shortcut avoid the to instanciate and
closed a timer. *)
val timetrack : string -> (('a -> unit) -> unit) list -> unit
* { 3 Groups } You can use nested groups to help organize your output by
visually combining related material . To create a new nested block , call
[ Console.group ( ) ] .
To exit the current group , simply call [ Console.group_end ( ) ] .
visually combining related material. To create a new nested block, call
[Console.group ()].
To exit the current group, simply call [Console.group_end ()]. *)
(** Creates a new inline group in the [console]. *)
val group : ?label:'a -> unit -> unit
(** Exits the current inline group in the [console]. *)
val group_end : unit -> unit
(** render errors *)
val render_error : Error.t list -> unit
(** Generic printer *)
val dump_errors : 'a -> Error.t list -> unit
| null | https://raw.githubusercontent.com/xvw/planet/c2a77ea66f61cc76df78b9c2ad06d114795f3053/src/facade/console.mli | ocaml | * Generic Console Binding.
* Log value on [console].
* Print [string] on [console].
* Clear [console].
* Log info on [console].
* Log error on [console].
* Log warning on [console].
* Outputs a stack trace.
* Display a table on the [console].
* {3 Counters}
* If supplied, [count_reset] resets the count for that [label] to 0. If
omitted, [count_reset] resets the ["default"] counter to 0.
* Creates a new inline group in the [console].
* Exits the current inline group in the [console].
* render errors
* Generic printer |
open Bedrock
* { 2 API }
* { 3 Log / Print }
val log : 'a -> unit
val print : string -> unit
val clear : unit -> unit
val info : 'a -> unit
val error : 'a -> unit
val warning : 'a -> unit
* Display a JavaScript object whose properties should be output .
val dir : 'a -> unit
val trace : unit -> unit
val table : ?columns:string list -> 'a -> unit
* If supplied , [ Console.count ~label ( ) ] outputs the number of times it has
been called with that label . If omitted , [ count ] behaves as though it was
called with the [ " default " ] label .
been called with that label. If omitted, [count] behaves as though it was
called with the ["default"] label. *)
val count : ?label:string -> unit -> unit
val count_reset : ?label:string -> unit -> unit
* { 3 Timers } Timers are used to calculate the procedure execution time . - We
instantiate a timer with : [ Console.time name ] , where [ name ] is a unique
timer ID ; - [ Console.timer_log name ] displays the time elapsed since calling
[ Console.timer name ] ; - [ Console.time_stop name ] stops the timer , referenced
by its name , in progress .
instantiate a timer with: [Console.time name], where [name] is a unique
timer ID; - [Console.timer_log name] displays the time elapsed since calling
[Console.timer name]; - [Console.time_stop name] stops the timer, referenced
by its name, in progress. *)
val time : string -> unit
val time_log : string -> 'a -> unit
val time_end : string -> unit
* [ Console.timetrack name actions ] is a shortcut , for example : { [ let ( ) =
Console.timetrack " answer time " [ ( fun logger - > logger ( ) ; Console.print
" Hello " ) ; ( fun logger - > logger ( ) ; Console.print " World " ) ] ; ; ] } Where
[ logger ] is a [ Console.time_log ] . This shortcut avoid the to instanciate and
closed a timer .
Console.timetrack "answer time" [ (fun logger -> logger (); Console.print
"Hello") ; (fun logger -> logger (); Console.print "World") ] ;; ]} Where
[logger] is a [Console.time_log]. This shortcut avoid the to instanciate and
closed a timer. *)
val timetrack : string -> (('a -> unit) -> unit) list -> unit
* { 3 Groups } You can use nested groups to help organize your output by
visually combining related material . To create a new nested block , call
[ Console.group ( ) ] .
To exit the current group , simply call [ Console.group_end ( ) ] .
visually combining related material. To create a new nested block, call
[Console.group ()].
To exit the current group, simply call [Console.group_end ()]. *)
val group : ?label:'a -> unit -> unit
val group_end : unit -> unit
val render_error : Error.t list -> unit
val dump_errors : 'a -> Error.t list -> unit
|
8d4356ddca451f162ee0652f5f52c9d8f80f37482c8002e45329b7f78918f1fd | racket/gui | main-extracts.rkt | #lang racket/base
(require scribble/extract)
(provide-extracted (lib "framework/main.rkt"))
| null | https://raw.githubusercontent.com/racket/gui/d1fef7a43a482c0fdd5672be9a6e713f16d8be5c/gui-doc/scribblings/framework/main-extracts.rkt | racket | #lang racket/base
(require scribble/extract)
(provide-extracted (lib "framework/main.rkt"))
| |
5bd570c8975f727c24296d06d20df965b60a44db240fb71b5fef0ca4c449c96d | bazqux/bazqux-urweb | Basic.hs | # LANGUAGE BangPatterns , OverloadedStrings , RecordWildCards #
-- |
-- Module: Network.Riak.Basic
Copyright : ( c ) 2011 MailRank , Inc.
License : Apache
Maintainer : < > , < >
-- Stability: experimental
-- Portability: portable
--
Basic support for the Riak decentralized data store .
--
-- When storing and retrieving data, the functions in this module do
-- not perform any encoding or decoding of data, nor do they resolve
-- conflicts.
module Network.Riak.Basic
(
-- * Client configuration and identification
ClientID
, Client(..)
, defaultClient
-- * Connection management
, Connection(..)
, connect
, disconnect
, ping
, getClientID
, setClientID
, getServerInfo
-- * Data management
, Quorum(..)
, get
, put
, put_
, delete
-- * Metadata
, listBuckets
, foldKeys
, getBucket
, setBucket
-- * Map/reduce
, mapReduce
) where
import Data.Maybe (fromMaybe)
import Network.Riak.Connection.Internal
import Network.Riak.Escape (unescape)
import Network.Riak.Protocol.BucketProps
import Network.Riak.Protocol.Content
import Network.Riak.Protocol.ListKeysResponse
import Network.Riak.Protocol.MapReduce as MapReduce
import Network.Riak.Protocol.ServerInfo
import Network.Riak.Types.Internal hiding (MessageTag(..))
import qualified Data.Foldable as F
import qualified Data.Sequence as Seq
import qualified Network.Riak.Request as Req
import qualified Network.Riak.Response as Resp
import qualified Network.Riak.Types.Internal as T
-- | Check to see if the connection to the server is alive.
ping :: Connection -> IO ()
ping conn = exchange_ conn Req.ping
-- | Find out from the server what client ID this connection is using.
getClientID :: Connection -> IO ClientID
getClientID conn = Resp.getClientID <$> exchange conn Req.getClientID
-- | Retrieve information about the server.
getServerInfo :: Connection -> IO ServerInfo
getServerInfo conn = exchange conn Req.getServerInfo
-- | Retrieve a value. This may return multiple conflicting siblings.
-- Choosing among them is your responsibility.
get :: Connection -> T.Bucket -> T.Key -> R
-> IO (Maybe (Seq.Seq Content, VClock))
get conn bucket key r = Resp.get <$> exchangeMaybe conn (Req.get bucket key r)
-- | Store a single value. This may return multiple conflicting
-- siblings. Choosing among them, and storing a new value, is your
-- responsibility.
--
-- You should /only/ supply 'Nothing' as a 'T.VClock' if you are sure
that the given bucket+key combination does not already exist . If
you omit a ' T.VClock ' but the bucket+key /does/ exist , your value
-- will not be stored.
put :: Connection -> T.Bucket -> T.Key -> Maybe T.VClock
-> Content -> W -> DW
-> IO (Seq.Seq Content, VClock)
put conn bucket key mvclock cont w dw =
Resp.put <$> exchange conn (Req.put bucket key mvclock cont w dw True)
-- | Store a single value, without the possibility of conflict
-- resolution.
--
-- You should /only/ supply 'Nothing' as a 'T.VClock' if you are sure
that the given bucket+key combination does not already exist . If
you omit a ' T.VClock ' but the bucket+key /does/ exist , your value
-- will not be stored, and you will not be notified.
put_ :: Connection -> T.Bucket -> T.Key -> Maybe T.VClock
-> Content -> W -> DW
-> IO ()
put_ conn bucket key mvclock cont w dw =
exchange_ conn (Req.put bucket key mvclock cont w dw False)
-- | Delete a value.
delete :: Connection -> T.Bucket -> T.Key -> RW -> IO ()
delete conn bucket key rw = exchange_ conn $ Req.delete bucket key rw
-- List the buckets in the cluster.
--
-- /Note/: this operation is expensive. Do not use it in production.
listBuckets :: Connection -> IO (Seq.Seq T.Bucket)
listBuckets conn = Resp.listBuckets <$> exchange conn Req.listBuckets
Fold over the keys in a bucket .
--
-- /Note/: this operation is expensive. Do not use it in production.
foldKeys :: Connection -> T.Bucket -> (a -> Key -> IO a) -> a -> IO a
foldKeys conn bucket f z0 = do
sendRequest conn $ Req.listKeys bucket
let g z = f z . unescape
loop z = do
ListKeysResponse{..} <- recvResponse conn
z1 <- F.foldlM g z keys
if fromMaybe False done
then return z1
else loop z1
loop z0
-- | Retrieve the properties of a bucket.
getBucket :: Connection -> T.Bucket -> IO BucketProps
getBucket conn bucket = Resp.getBucket <$> exchange conn (Req.getBucket bucket)
-- | Store new properties for a bucket.
setBucket :: Connection -> T.Bucket -> BucketProps -> IO ()
setBucket conn bucket props = exchange_ conn $ Req.setBucket bucket props
-- | Run a 'MapReduce' job. Its result is consumed via a strict left
-- fold.
mapReduce :: Connection -> Job -> (a -> MapReduce -> a) -> a -> IO a
mapReduce conn job f z0 = loop z0 =<< (exchange conn . Req.mapReduce $ job)
where
loop z mr = do
let !z' = f z mr
if fromMaybe False . MapReduce.done $ mr
then return z'
else loop z' =<< recvResponse conn
| null | https://raw.githubusercontent.com/bazqux/bazqux-urweb/bf2d5a65b5b286348c131e91b6e57df9e8045c3f/crawler/Lib/riak-0.7.2.0/src/Network/Riak/Basic.hs | haskell | |
Module: Network.Riak.Basic
Stability: experimental
Portability: portable
When storing and retrieving data, the functions in this module do
not perform any encoding or decoding of data, nor do they resolve
conflicts.
* Client configuration and identification
* Connection management
* Data management
* Metadata
* Map/reduce
| Check to see if the connection to the server is alive.
| Find out from the server what client ID this connection is using.
| Retrieve information about the server.
| Retrieve a value. This may return multiple conflicting siblings.
Choosing among them is your responsibility.
| Store a single value. This may return multiple conflicting
siblings. Choosing among them, and storing a new value, is your
responsibility.
You should /only/ supply 'Nothing' as a 'T.VClock' if you are sure
will not be stored.
| Store a single value, without the possibility of conflict
resolution.
You should /only/ supply 'Nothing' as a 'T.VClock' if you are sure
will not be stored, and you will not be notified.
| Delete a value.
List the buckets in the cluster.
/Note/: this operation is expensive. Do not use it in production.
/Note/: this operation is expensive. Do not use it in production.
| Retrieve the properties of a bucket.
| Store new properties for a bucket.
| Run a 'MapReduce' job. Its result is consumed via a strict left
fold. | # LANGUAGE BangPatterns , OverloadedStrings , RecordWildCards #
Copyright : ( c ) 2011 MailRank , Inc.
License : Apache
Maintainer : < > , < >
Basic support for the Riak decentralized data store .
module Network.Riak.Basic
(
ClientID
, Client(..)
, defaultClient
, Connection(..)
, connect
, disconnect
, ping
, getClientID
, setClientID
, getServerInfo
, Quorum(..)
, get
, put
, put_
, delete
, listBuckets
, foldKeys
, getBucket
, setBucket
, mapReduce
) where
import Data.Maybe (fromMaybe)
import Network.Riak.Connection.Internal
import Network.Riak.Escape (unescape)
import Network.Riak.Protocol.BucketProps
import Network.Riak.Protocol.Content
import Network.Riak.Protocol.ListKeysResponse
import Network.Riak.Protocol.MapReduce as MapReduce
import Network.Riak.Protocol.ServerInfo
import Network.Riak.Types.Internal hiding (MessageTag(..))
import qualified Data.Foldable as F
import qualified Data.Sequence as Seq
import qualified Network.Riak.Request as Req
import qualified Network.Riak.Response as Resp
import qualified Network.Riak.Types.Internal as T
ping :: Connection -> IO ()
ping conn = exchange_ conn Req.ping
getClientID :: Connection -> IO ClientID
getClientID conn = Resp.getClientID <$> exchange conn Req.getClientID
getServerInfo :: Connection -> IO ServerInfo
getServerInfo conn = exchange conn Req.getServerInfo
get :: Connection -> T.Bucket -> T.Key -> R
-> IO (Maybe (Seq.Seq Content, VClock))
get conn bucket key r = Resp.get <$> exchangeMaybe conn (Req.get bucket key r)
that the given bucket+key combination does not already exist . If
you omit a ' T.VClock ' but the bucket+key /does/ exist , your value
put :: Connection -> T.Bucket -> T.Key -> Maybe T.VClock
-> Content -> W -> DW
-> IO (Seq.Seq Content, VClock)
put conn bucket key mvclock cont w dw =
Resp.put <$> exchange conn (Req.put bucket key mvclock cont w dw True)
that the given bucket+key combination does not already exist . If
you omit a ' T.VClock ' but the bucket+key /does/ exist , your value
put_ :: Connection -> T.Bucket -> T.Key -> Maybe T.VClock
-> Content -> W -> DW
-> IO ()
put_ conn bucket key mvclock cont w dw =
exchange_ conn (Req.put bucket key mvclock cont w dw False)
delete :: Connection -> T.Bucket -> T.Key -> RW -> IO ()
delete conn bucket key rw = exchange_ conn $ Req.delete bucket key rw
listBuckets :: Connection -> IO (Seq.Seq T.Bucket)
listBuckets conn = Resp.listBuckets <$> exchange conn Req.listBuckets
Fold over the keys in a bucket .
foldKeys :: Connection -> T.Bucket -> (a -> Key -> IO a) -> a -> IO a
foldKeys conn bucket f z0 = do
sendRequest conn $ Req.listKeys bucket
let g z = f z . unescape
loop z = do
ListKeysResponse{..} <- recvResponse conn
z1 <- F.foldlM g z keys
if fromMaybe False done
then return z1
else loop z1
loop z0
getBucket :: Connection -> T.Bucket -> IO BucketProps
getBucket conn bucket = Resp.getBucket <$> exchange conn (Req.getBucket bucket)
setBucket :: Connection -> T.Bucket -> BucketProps -> IO ()
setBucket conn bucket props = exchange_ conn $ Req.setBucket bucket props
mapReduce :: Connection -> Job -> (a -> MapReduce -> a) -> a -> IO a
mapReduce conn job f z0 = loop z0 =<< (exchange conn . Req.mapReduce $ job)
where
loop z mr = do
let !z' = f z mr
if fromMaybe False . MapReduce.done $ mr
then return z'
else loop z' =<< recvResponse conn
|
06d3ed6329a3da9a0227950b6aeefbb1145b4d4f0f9efc1d937ff8694a1cd365 | haskell-CI/haskell-ci | Config.hs | {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeApplications #
module HaskellCI.Config where
import HaskellCI.Prelude
import qualified Data.ByteString as BS
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Distribution.CabalSpecVersion as C
import qualified Distribution.Compat.CharParsing as C
import qualified Distribution.Compat.Newtype as C
import qualified Distribution.FieldGrammar as C
import qualified Distribution.Fields as C
import qualified Distribution.Parsec as C
import qualified Distribution.Pretty as C
import qualified Distribution.Types.PackageName as C
import qualified Distribution.Types.Version as C
import qualified Distribution.Types.VersionRange as C
import qualified Text.PrettyPrint as PP
import HaskellCI.Config.Components
import HaskellCI.Config.ConstraintSet
import HaskellCI.Config.CopyFields
import HaskellCI.Config.Docspec
import HaskellCI.Config.Doctest
import HaskellCI.Config.Empty
import HaskellCI.Config.Folds
import HaskellCI.Config.HLint
import HaskellCI.Config.Installed
import HaskellCI.Config.Jobs
import HaskellCI.Config.PackageScope
import HaskellCI.Config.Ubuntu
import HaskellCI.HeadHackage
import HaskellCI.Newtypes
import HaskellCI.OptionsGrammar
import HaskellCI.ParsecUtils
import HaskellCI.TestedWith
-------------------------------------------------------------------------------
-- Config
-------------------------------------------------------------------------------
TODO : split other blocks like
data Config = Config
{ cfgCabalInstallVersion :: Maybe Version
, cfgJobs :: Maybe Jobs
, cfgUbuntu :: !Ubuntu
, cfgTestedWith :: !TestedWithJobs
, cfgEnabledJobs :: !VersionRange
, cfgCopyFields :: !CopyFields
, cfgLocalGhcOptions :: [String]
, cfgSubmodules :: !Bool
, cfgCache :: !Bool
, cfgInstallDeps :: !Bool
, cfgInstalled :: [Installed]
, cfgTests :: !VersionRange
, cfgRunTests :: !VersionRange
, cfgBenchmarks :: !VersionRange
, cfgHaddock :: !VersionRange
, cfgHaddockComponents :: !Components
, cfgNoTestsNoBench :: !VersionRange
, cfgUnconstrainted :: !VersionRange
, cfgHeadHackage :: !VersionRange
, cfgHeadHackageOverride :: !Bool
, cfgGhcjsTests :: !Bool
, cfgGhcjsTools :: ![C.PackageName]
, cfgTestOutputDirect :: !Bool
, cfgCheck :: !Bool
, cfgOnlyBranches :: [String]
, cfgIrcChannels :: [String]
, cfgIrcNickname :: Maybe String
, cfgIrcPassword :: Maybe String
, cfgIrcIfInOriginRepo :: Bool
, cfgEmailNotifications :: Bool
, cfgProjectName :: Maybe String
, cfgFolds :: S.Set Fold
, cfgGhcHead :: !Bool
, cfgPostgres :: !Bool
, cfgGoogleChrome :: !Bool
, cfgEnv :: M.Map Version String
, cfgAllowFailures :: !VersionRange
, cfgLastInSeries :: !Bool
, cfgLinuxJobs :: !VersionRange
, cfgMacosJobs :: !VersionRange
, cfgGhcupCabal :: !Bool
, cfgGhcupJobs :: !VersionRange
, cfgGhcupVersion :: !Version
, cfgApt :: S.Set String
, cfgTravisPatches :: [FilePath]
, cfgGitHubPatches :: [FilePath]
, cfgInsertVersion :: !Bool
, cfgErrorMissingMethods :: !PackageScope
, cfgDoctest :: !DoctestConfig
, cfgDocspec :: !DocspecConfig
, cfgHLint :: !HLintConfig
, cfgConstraintSets :: [ConstraintSet]
, cfgRawProject :: [C.PrettyField ()]
, cfgRawTravis :: !String
, cfgGitHubActionName :: !(Maybe String)
, cfgTimeoutMinutes :: !Natural
}
deriving (Generic)
defaultCabalInstallVersion :: Maybe Version
defaultCabalInstallVersion = Just (C.mkVersion [3,9])
defaultGhcupVersion :: Version
defaultGhcupVersion = C.mkVersion [0,1,18,0]
emptyConfig :: Config
emptyConfig = case runEG configGrammar of
Left xs -> error $ "Required fields: " ++ show xs
Right x -> x
-------------------------------------------------------------------------------
-- Grammar
-------------------------------------------------------------------------------
configGrammar
:: ( OptionsGrammar c g, Applicative (g Config)
, c (Identity HLintJob)
, c (Identity PackageScope)
, c (Identity TestedWithJobs)
, c (Identity Ubuntu)
, c (Identity Jobs)
, c (Identity CopyFields)
, c (Identity Version)
, c (Identity Natural)
, c (Identity Components)
, c Env, c Folds, c CopyFields, c HeadVersion
, c (C.List C.FSep (Identity Installed) Installed)
, Applicative (g DoctestConfig)
, Applicative (g DocspecConfig)
, Applicative (g HLintConfig))
=> g Config Config
configGrammar = Config
<$> C.optionalFieldDefAla "cabal-install-version" HeadVersion (field @"cfgCabalInstallVersion") defaultCabalInstallVersion
^^^ metahelp "VERSION" "cabal-install version for all jobs"
<*> C.optionalField "jobs" (field @"cfgJobs")
^^^ metahelp "JOBS" "jobs (N:M - cabal:ghc)"
<*> C.optionalFieldDef "distribution" (field @"cfgUbuntu") Bionic
^^^ metahelp "DIST" (concat
[ "distribution version ("
, intercalate ", " $ map showUbuntu [minBound..maxBound]
, ")"
])
<*> C.optionalFieldDef "jobs-selection" (field @"cfgTestedWith") TestedWithUniform
^^^ metahelp "uniform|any" "Jobs selection across packages"
<*> rangeField "enabled" (field @"cfgEnabledJobs") anyVersion
^^^ metahelp "RANGE" "Restrict jobs selection futher from per package tested-with"
<*> C.optionalFieldDef "copy-fields" (field @"cfgCopyFields") CopyFieldsSome
^^^ metahelp "none|some|all" "Copy ? fields from cabal.project fields"
<*> C.monoidalFieldAla "local-ghc-options" (C.alaList' C.NoCommaFSep C.Token') (field @"cfgLocalGhcOptions")
^^^ metahelp "OPTS" "--ghc-options for local packages"
<*> C.booleanFieldDef "submodules" (field @"cfgSubmodules") False
^^^ help "Clone submodules, i.e. recursively"
<*> C.booleanFieldDef "cache" (field @"cfgCache") True
^^^ help "Disable caching"
<*> C.booleanFieldDef "install-dependencies" (field @"cfgInstallDeps") True
^^^ help "Skip separate dependency installation step"
<*> C.monoidalFieldAla "installed" (C.alaList C.FSep) (field @"cfgInstalled")
^^^ metahelp "+/-PKG" "Specify 'constraint: ... installed' packages"
<*> rangeField "tests" (field @"cfgTests") anyVersion
^^^ metahelp "RANGE" "Build tests with"
<*> rangeField "run-tests" (field @"cfgRunTests") anyVersion
^^^ metahelp "RANGE" "Run tests with (note: only built tests are run)"
<*> rangeField "benchmarks" (field @"cfgBenchmarks") anyVersion
^^^ metahelp "RANGE" "Build benchmarks"
<*> rangeField "haddock" (field @"cfgHaddock") anyVersion
^^^ metahelp "RANGE" "Haddock step"
<*> C.optionalFieldDef "haddock-components" (field @"cfgHaddockComponents") ComponentsAll
^^^ metahelp "all|libs" "Haddock components"
<*> rangeField "no-tests-no-benchmarks" (field @"cfgNoTestsNoBench") anyVersion
^^^ metahelp "RANGE" "Build without tests and benchmarks"
<*> rangeField "unconstrained" (field @"cfgUnconstrainted") anyVersion
^^^ metahelp "RANGE" "Make unconstrained build"
<*> rangeField "head-hackage" (field @"cfgHeadHackage") defaultHeadHackage
^^^ metahelp "RANGE" "Use head.hackage repository. Also marks as allow-failures"
<*> C.booleanFieldDef "head-hackage-override" (field @"cfgHeadHackageOverride") True
^^^ help "Use :override for head.hackage repository"
<*> C.booleanFieldDef "ghcjs-tests" (field @"cfgGhcjsTests") False
^^^ help "Run tests with GHCJS (experimental, relies on cabal-plan finding test-suites)"
<*> C.monoidalFieldAla "ghcjs-tools" (C.alaList C.FSep) (field @"cfgGhcjsTools")
^^^ metahelp " TOOL " " Additional host tools to install with GHCJS "
<*> C.booleanFieldDef "test-output-direct" (field @"cfgTestOutputDirect") True
^^^ help "Use --test-show-details=direct, may cause problems with build-type: Custom"
<*> C.booleanFieldDef "cabal-check" (field @"cfgCheck") True
^^^ help "Disable cabal check run"
<*> C.monoidalFieldAla "branches" (C.alaList' C.FSep C.Token') (field @"cfgOnlyBranches")
^^^ metahelp "BRANCH" "Enable builds only for specific branches"
<*> C.monoidalFieldAla "irc-channels" (C.alaList' C.FSep C.Token') (field @"cfgIrcChannels")
^^^ metahelp "IRC" "Enable IRC notifications to given channel (e.g. 'irc.libera.chat#haskell-lens')"
<*> C.freeTextField "irc-nickname" (field @"cfgIrcNickname")
^^^ metahelp "NICKNAME" "Nickname with which to authenticate to an IRC server. Only used if `irc-channels` are set."
<*> C.freeTextField "irc-password" (field @"cfgIrcPassword")
^^^ metahelp "PASSWORD" "Password with which to authenticate to an IRC server. Only used if `irc-channels` are set."
<*> C.booleanFieldDef "irc-if-in-origin-repo" (field @"cfgIrcIfInOriginRepo") False
^^^ help "Only send IRC notifications if run from the original remote (GitHub Actions only)"
<*> C.booleanFieldDef "email-notifications" (field @"cfgEmailNotifications") True
^^^ help "Disable email notifications"
<*> C.optionalFieldAla "project-name" C.Token' (field @"cfgProjectName")
^^^ metahelp "NAME" "Project name (used for IRC notifications), defaults to package name or name of first package listed in cabal.project file"
<*> C.monoidalFieldAla "folds" Folds (field @"cfgFolds")
^^^ metahelp "FOLD" "Build steps to fold"
<*> C.booleanFieldDef "ghc-head" (field @"cfgGhcHead") False
^^^ help "Add ghc-head job"
<*> C.booleanFieldDef "postgresql" (field @"cfgPostgres") False
^^^ help "Add postgresql service"
<*> C.booleanFieldDef "google-chrome" (field @"cfgGoogleChrome") False
^^^ help "Add google-chrome service"
<*> C.monoidalFieldAla "env" Env (field @"cfgEnv")
^^^ metahelp "ENV" "Environment variables per job (e.g. `8.0.2:HADDOCK=false`)"
<*> C.optionalFieldDefAla "allow-failures" Range (field @"cfgAllowFailures") noVersion
^^^ metahelp "JOB" "Allow failures of particular GHC version"
<*> C.booleanFieldDef "last-in-series" (field @"cfgLastInSeries") False
^^^ help "[Discouraged] Assume there are only GHCs last in major series: 8.2.* will match only 8.2.2"
<*> rangeField "linux-jobs" (field @"cfgLinuxJobs") anyVersion
^^^ metahelp "RANGE" "Jobs to build on Linux"
<*> rangeField "macos-jobs" (field @"cfgMacosJobs") noVersion
^^^ metahelp "RANGE" "Jobs to additionally build with OSX"
<*> C.booleanFieldDef "ghcup-cabal" (field @"cfgGhcupCabal") True
^^^ help "Use (or don't) ghcup to install cabal"
<*> rangeField "ghcup-jobs" (field @"cfgGhcupJobs") (C.unionVersionRanges (C.intersectVersionRanges (C.laterVersion (mkVersion [8,10,4])) (C.earlierVersion (mkVersion [9]))) (C.laterVersion (mkVersion [9,0,1])))
^^^ metahelp "RANGE" "(Linux) jobs to use ghcup to install tools"
<*> C.optionalFieldDef "ghcup-version" (field @"cfgGhcupVersion") defaultGhcupVersion
^^^ metahelp "VERSION" "ghcup version"
<*> C.monoidalFieldAla "apt" (alaSet' C.NoCommaFSep C.Token') (field @"cfgApt")
^^^ metahelp "PKG" "Additional apt packages to install"
<*> C.monoidalFieldAla "travis-patches" (C.alaList' C.NoCommaFSep C.Token') (field @"cfgTravisPatches")
^^^ metaActionHelp "PATCH" "file" ".patch files to apply to the generated Travis YAML file"
<*> C.monoidalFieldAla "github-patches" (C.alaList' C.NoCommaFSep C.Token') (field @"cfgGitHubPatches")
^^^ metaActionHelp "PATCH" "file" ".patch files to apply to the generated GitHub Actions YAML file"
<*> C.booleanFieldDef "insert-version" (field @"cfgInsertVersion") True
^^^ help "Don't insert the haskell-ci version into the generated Travis YAML file"
<*> C.optionalFieldDef "error-missing-methods" (field @"cfgErrorMissingMethods") PackageScopeLocal
^^^ metahelp "PKGSCOPE" "Insert -Werror=missing-methods for package scope (none, local, all)"
<*> C.blurFieldGrammar (field @"cfgDoctest") doctestConfigGrammar
<*> C.blurFieldGrammar (field @"cfgDocspec") docspecConfigGrammar
<*> C.blurFieldGrammar (field @"cfgHLint") hlintConfigGrammar
<*> pure [] -- constraint sets
<*> pure [] -- raw project fields
<*> C.freeTextFieldDef "raw-travis" (field @"cfgRawTravis")
^^^ help "Raw travis commands which will be run at the very end of the script"
<*> C.freeTextField "github-action-name" (field @"cfgGitHubActionName")
^^^ help "The name of GitHub Action"
<*> C.optionalFieldDef "timeout-minutes" (field @"cfgTimeoutMinutes") 60
^^^ metahelp "MINUTES" "The maximum number of minutes to let a job run"
-------------------------------------------------------------------------------
-- Reading
-------------------------------------------------------------------------------
readConfigFile :: MonadIO m => FilePath -> m Config
readConfigFile = liftIO . readAndParseFile parseConfigFile
parseConfigFile :: [C.Field C.Position] -> C.ParseResult Config
parseConfigFile fields0 = do
config <- C.parseFieldGrammar C.cabalSpecLatest fields configGrammar
config' <- traverse parseSection $ concat sections
return $ postprocess $ foldl' (&) config config'
where
(fields, sections) = C.partitionFields fields0
parseSection :: C.Section C.Position -> C.ParseResult (Config -> Config)
parseSection (C.MkSection (C.Name pos name) args cfields)
| name == "constraint-set" = do
name' <- parseName pos args
let (fs, _sections) = C.partitionFields cfields
cs <- C.parseFieldGrammar C.cabalSpecLatest fs (constraintSetGrammar name')
return $ over (field @"cfgConstraintSets") (cs :)
| name == "raw-project" = do
let fs = C.fromParsecFields cfields
return $ over (field @"cfgRawProject") (++ map void fs)
| otherwise = do
C.parseWarning pos C.PWTUnknownSection $ "Unknown section " ++ fromUTF8BS name
return id
postprocess :: Config -> Config
postprocess cfg
-- on yammy the only install option is ghcup
| cfgUbuntu cfg >= Jammy = cfg { cfgGhcupJobs = anyVersion }
| otherwise = cfg
-------------------------------------------------------------------------------
-- Env
-------------------------------------------------------------------------------
newtype Env = Env (M.Map Version String)
deriving anyclass (C.Newtype (M.Map Version String))
instance C.Parsec Env where
parsec = Env . M.fromList <$> C.parsecLeadingCommaList p where
p = do
v <- C.parsec
_ <- C.char ':'
s <- C.munch1 $ \c -> c /= ','
return (v, s)
instance C.Pretty Env where
pretty (Env m) = PP.fsep . PP.punctuate PP.comma . map p . M.toList $ m where
p (v, s) = C.pretty v PP.<> PP.colon PP.<> PP.text s
-------------------------------------------------------------------------------
From Cabal
-------------------------------------------------------------------------------
parseName :: C.Position -> [C.SectionArg C.Position] -> C.ParseResult String
parseName pos args = fromUTF8BS <$> parseNameBS pos args
parseNameBS :: C.Position -> [C.SectionArg C.Position] -> C.ParseResult BS.ByteString
parseNameBS pos args = case args of
[C.SecArgName _pos secName] ->
pure secName
[C.SecArgStr _pos secName] ->
pure secName
[] -> do
C.parseFailure pos "name required"
pure ""
_ -> do
-- TODO: pretty print args
C.parseFailure pos $ "Invalid name " ++ show args
pure ""
| null | https://raw.githubusercontent.com/haskell-CI/haskell-ci/c3a891f871775a1bd61a95ffac9b414b72987af3/src/HaskellCI/Config.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DataKinds #
-----------------------------------------------------------------------------
Config
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Grammar
-----------------------------------------------------------------------------
constraint sets
raw project fields
-----------------------------------------------------------------------------
Reading
-----------------------------------------------------------------------------
on yammy the only install option is ghcup
-----------------------------------------------------------------------------
Env
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
TODO: pretty print args | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeApplications #
module HaskellCI.Config where
import HaskellCI.Prelude
import qualified Data.ByteString as BS
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Distribution.CabalSpecVersion as C
import qualified Distribution.Compat.CharParsing as C
import qualified Distribution.Compat.Newtype as C
import qualified Distribution.FieldGrammar as C
import qualified Distribution.Fields as C
import qualified Distribution.Parsec as C
import qualified Distribution.Pretty as C
import qualified Distribution.Types.PackageName as C
import qualified Distribution.Types.Version as C
import qualified Distribution.Types.VersionRange as C
import qualified Text.PrettyPrint as PP
import HaskellCI.Config.Components
import HaskellCI.Config.ConstraintSet
import HaskellCI.Config.CopyFields
import HaskellCI.Config.Docspec
import HaskellCI.Config.Doctest
import HaskellCI.Config.Empty
import HaskellCI.Config.Folds
import HaskellCI.Config.HLint
import HaskellCI.Config.Installed
import HaskellCI.Config.Jobs
import HaskellCI.Config.PackageScope
import HaskellCI.Config.Ubuntu
import HaskellCI.HeadHackage
import HaskellCI.Newtypes
import HaskellCI.OptionsGrammar
import HaskellCI.ParsecUtils
import HaskellCI.TestedWith
TODO : split other blocks like
data Config = Config
{ cfgCabalInstallVersion :: Maybe Version
, cfgJobs :: Maybe Jobs
, cfgUbuntu :: !Ubuntu
, cfgTestedWith :: !TestedWithJobs
, cfgEnabledJobs :: !VersionRange
, cfgCopyFields :: !CopyFields
, cfgLocalGhcOptions :: [String]
, cfgSubmodules :: !Bool
, cfgCache :: !Bool
, cfgInstallDeps :: !Bool
, cfgInstalled :: [Installed]
, cfgTests :: !VersionRange
, cfgRunTests :: !VersionRange
, cfgBenchmarks :: !VersionRange
, cfgHaddock :: !VersionRange
, cfgHaddockComponents :: !Components
, cfgNoTestsNoBench :: !VersionRange
, cfgUnconstrainted :: !VersionRange
, cfgHeadHackage :: !VersionRange
, cfgHeadHackageOverride :: !Bool
, cfgGhcjsTests :: !Bool
, cfgGhcjsTools :: ![C.PackageName]
, cfgTestOutputDirect :: !Bool
, cfgCheck :: !Bool
, cfgOnlyBranches :: [String]
, cfgIrcChannels :: [String]
, cfgIrcNickname :: Maybe String
, cfgIrcPassword :: Maybe String
, cfgIrcIfInOriginRepo :: Bool
, cfgEmailNotifications :: Bool
, cfgProjectName :: Maybe String
, cfgFolds :: S.Set Fold
, cfgGhcHead :: !Bool
, cfgPostgres :: !Bool
, cfgGoogleChrome :: !Bool
, cfgEnv :: M.Map Version String
, cfgAllowFailures :: !VersionRange
, cfgLastInSeries :: !Bool
, cfgLinuxJobs :: !VersionRange
, cfgMacosJobs :: !VersionRange
, cfgGhcupCabal :: !Bool
, cfgGhcupJobs :: !VersionRange
, cfgGhcupVersion :: !Version
, cfgApt :: S.Set String
, cfgTravisPatches :: [FilePath]
, cfgGitHubPatches :: [FilePath]
, cfgInsertVersion :: !Bool
, cfgErrorMissingMethods :: !PackageScope
, cfgDoctest :: !DoctestConfig
, cfgDocspec :: !DocspecConfig
, cfgHLint :: !HLintConfig
, cfgConstraintSets :: [ConstraintSet]
, cfgRawProject :: [C.PrettyField ()]
, cfgRawTravis :: !String
, cfgGitHubActionName :: !(Maybe String)
, cfgTimeoutMinutes :: !Natural
}
deriving (Generic)
defaultCabalInstallVersion :: Maybe Version
defaultCabalInstallVersion = Just (C.mkVersion [3,9])
defaultGhcupVersion :: Version
defaultGhcupVersion = C.mkVersion [0,1,18,0]
emptyConfig :: Config
emptyConfig = case runEG configGrammar of
Left xs -> error $ "Required fields: " ++ show xs
Right x -> x
configGrammar
:: ( OptionsGrammar c g, Applicative (g Config)
, c (Identity HLintJob)
, c (Identity PackageScope)
, c (Identity TestedWithJobs)
, c (Identity Ubuntu)
, c (Identity Jobs)
, c (Identity CopyFields)
, c (Identity Version)
, c (Identity Natural)
, c (Identity Components)
, c Env, c Folds, c CopyFields, c HeadVersion
, c (C.List C.FSep (Identity Installed) Installed)
, Applicative (g DoctestConfig)
, Applicative (g DocspecConfig)
, Applicative (g HLintConfig))
=> g Config Config
configGrammar = Config
<$> C.optionalFieldDefAla "cabal-install-version" HeadVersion (field @"cfgCabalInstallVersion") defaultCabalInstallVersion
^^^ metahelp "VERSION" "cabal-install version for all jobs"
<*> C.optionalField "jobs" (field @"cfgJobs")
^^^ metahelp "JOBS" "jobs (N:M - cabal:ghc)"
<*> C.optionalFieldDef "distribution" (field @"cfgUbuntu") Bionic
^^^ metahelp "DIST" (concat
[ "distribution version ("
, intercalate ", " $ map showUbuntu [minBound..maxBound]
, ")"
])
<*> C.optionalFieldDef "jobs-selection" (field @"cfgTestedWith") TestedWithUniform
^^^ metahelp "uniform|any" "Jobs selection across packages"
<*> rangeField "enabled" (field @"cfgEnabledJobs") anyVersion
^^^ metahelp "RANGE" "Restrict jobs selection futher from per package tested-with"
<*> C.optionalFieldDef "copy-fields" (field @"cfgCopyFields") CopyFieldsSome
^^^ metahelp "none|some|all" "Copy ? fields from cabal.project fields"
<*> C.monoidalFieldAla "local-ghc-options" (C.alaList' C.NoCommaFSep C.Token') (field @"cfgLocalGhcOptions")
^^^ metahelp "OPTS" "--ghc-options for local packages"
<*> C.booleanFieldDef "submodules" (field @"cfgSubmodules") False
^^^ help "Clone submodules, i.e. recursively"
<*> C.booleanFieldDef "cache" (field @"cfgCache") True
^^^ help "Disable caching"
<*> C.booleanFieldDef "install-dependencies" (field @"cfgInstallDeps") True
^^^ help "Skip separate dependency installation step"
<*> C.monoidalFieldAla "installed" (C.alaList C.FSep) (field @"cfgInstalled")
^^^ metahelp "+/-PKG" "Specify 'constraint: ... installed' packages"
<*> rangeField "tests" (field @"cfgTests") anyVersion
^^^ metahelp "RANGE" "Build tests with"
<*> rangeField "run-tests" (field @"cfgRunTests") anyVersion
^^^ metahelp "RANGE" "Run tests with (note: only built tests are run)"
<*> rangeField "benchmarks" (field @"cfgBenchmarks") anyVersion
^^^ metahelp "RANGE" "Build benchmarks"
<*> rangeField "haddock" (field @"cfgHaddock") anyVersion
^^^ metahelp "RANGE" "Haddock step"
<*> C.optionalFieldDef "haddock-components" (field @"cfgHaddockComponents") ComponentsAll
^^^ metahelp "all|libs" "Haddock components"
<*> rangeField "no-tests-no-benchmarks" (field @"cfgNoTestsNoBench") anyVersion
^^^ metahelp "RANGE" "Build without tests and benchmarks"
<*> rangeField "unconstrained" (field @"cfgUnconstrainted") anyVersion
^^^ metahelp "RANGE" "Make unconstrained build"
<*> rangeField "head-hackage" (field @"cfgHeadHackage") defaultHeadHackage
^^^ metahelp "RANGE" "Use head.hackage repository. Also marks as allow-failures"
<*> C.booleanFieldDef "head-hackage-override" (field @"cfgHeadHackageOverride") True
^^^ help "Use :override for head.hackage repository"
<*> C.booleanFieldDef "ghcjs-tests" (field @"cfgGhcjsTests") False
^^^ help "Run tests with GHCJS (experimental, relies on cabal-plan finding test-suites)"
<*> C.monoidalFieldAla "ghcjs-tools" (C.alaList C.FSep) (field @"cfgGhcjsTools")
^^^ metahelp " TOOL " " Additional host tools to install with GHCJS "
<*> C.booleanFieldDef "test-output-direct" (field @"cfgTestOutputDirect") True
^^^ help "Use --test-show-details=direct, may cause problems with build-type: Custom"
<*> C.booleanFieldDef "cabal-check" (field @"cfgCheck") True
^^^ help "Disable cabal check run"
<*> C.monoidalFieldAla "branches" (C.alaList' C.FSep C.Token') (field @"cfgOnlyBranches")
^^^ metahelp "BRANCH" "Enable builds only for specific branches"
<*> C.monoidalFieldAla "irc-channels" (C.alaList' C.FSep C.Token') (field @"cfgIrcChannels")
^^^ metahelp "IRC" "Enable IRC notifications to given channel (e.g. 'irc.libera.chat#haskell-lens')"
<*> C.freeTextField "irc-nickname" (field @"cfgIrcNickname")
^^^ metahelp "NICKNAME" "Nickname with which to authenticate to an IRC server. Only used if `irc-channels` are set."
<*> C.freeTextField "irc-password" (field @"cfgIrcPassword")
^^^ metahelp "PASSWORD" "Password with which to authenticate to an IRC server. Only used if `irc-channels` are set."
<*> C.booleanFieldDef "irc-if-in-origin-repo" (field @"cfgIrcIfInOriginRepo") False
^^^ help "Only send IRC notifications if run from the original remote (GitHub Actions only)"
<*> C.booleanFieldDef "email-notifications" (field @"cfgEmailNotifications") True
^^^ help "Disable email notifications"
<*> C.optionalFieldAla "project-name" C.Token' (field @"cfgProjectName")
^^^ metahelp "NAME" "Project name (used for IRC notifications), defaults to package name or name of first package listed in cabal.project file"
<*> C.monoidalFieldAla "folds" Folds (field @"cfgFolds")
^^^ metahelp "FOLD" "Build steps to fold"
<*> C.booleanFieldDef "ghc-head" (field @"cfgGhcHead") False
^^^ help "Add ghc-head job"
<*> C.booleanFieldDef "postgresql" (field @"cfgPostgres") False
^^^ help "Add postgresql service"
<*> C.booleanFieldDef "google-chrome" (field @"cfgGoogleChrome") False
^^^ help "Add google-chrome service"
<*> C.monoidalFieldAla "env" Env (field @"cfgEnv")
^^^ metahelp "ENV" "Environment variables per job (e.g. `8.0.2:HADDOCK=false`)"
<*> C.optionalFieldDefAla "allow-failures" Range (field @"cfgAllowFailures") noVersion
^^^ metahelp "JOB" "Allow failures of particular GHC version"
<*> C.booleanFieldDef "last-in-series" (field @"cfgLastInSeries") False
^^^ help "[Discouraged] Assume there are only GHCs last in major series: 8.2.* will match only 8.2.2"
<*> rangeField "linux-jobs" (field @"cfgLinuxJobs") anyVersion
^^^ metahelp "RANGE" "Jobs to build on Linux"
<*> rangeField "macos-jobs" (field @"cfgMacosJobs") noVersion
^^^ metahelp "RANGE" "Jobs to additionally build with OSX"
<*> C.booleanFieldDef "ghcup-cabal" (field @"cfgGhcupCabal") True
^^^ help "Use (or don't) ghcup to install cabal"
<*> rangeField "ghcup-jobs" (field @"cfgGhcupJobs") (C.unionVersionRanges (C.intersectVersionRanges (C.laterVersion (mkVersion [8,10,4])) (C.earlierVersion (mkVersion [9]))) (C.laterVersion (mkVersion [9,0,1])))
^^^ metahelp "RANGE" "(Linux) jobs to use ghcup to install tools"
<*> C.optionalFieldDef "ghcup-version" (field @"cfgGhcupVersion") defaultGhcupVersion
^^^ metahelp "VERSION" "ghcup version"
<*> C.monoidalFieldAla "apt" (alaSet' C.NoCommaFSep C.Token') (field @"cfgApt")
^^^ metahelp "PKG" "Additional apt packages to install"
<*> C.monoidalFieldAla "travis-patches" (C.alaList' C.NoCommaFSep C.Token') (field @"cfgTravisPatches")
^^^ metaActionHelp "PATCH" "file" ".patch files to apply to the generated Travis YAML file"
<*> C.monoidalFieldAla "github-patches" (C.alaList' C.NoCommaFSep C.Token') (field @"cfgGitHubPatches")
^^^ metaActionHelp "PATCH" "file" ".patch files to apply to the generated GitHub Actions YAML file"
<*> C.booleanFieldDef "insert-version" (field @"cfgInsertVersion") True
^^^ help "Don't insert the haskell-ci version into the generated Travis YAML file"
<*> C.optionalFieldDef "error-missing-methods" (field @"cfgErrorMissingMethods") PackageScopeLocal
^^^ metahelp "PKGSCOPE" "Insert -Werror=missing-methods for package scope (none, local, all)"
<*> C.blurFieldGrammar (field @"cfgDoctest") doctestConfigGrammar
<*> C.blurFieldGrammar (field @"cfgDocspec") docspecConfigGrammar
<*> C.blurFieldGrammar (field @"cfgHLint") hlintConfigGrammar
<*> C.freeTextFieldDef "raw-travis" (field @"cfgRawTravis")
^^^ help "Raw travis commands which will be run at the very end of the script"
<*> C.freeTextField "github-action-name" (field @"cfgGitHubActionName")
^^^ help "The name of GitHub Action"
<*> C.optionalFieldDef "timeout-minutes" (field @"cfgTimeoutMinutes") 60
^^^ metahelp "MINUTES" "The maximum number of minutes to let a job run"
readConfigFile :: MonadIO m => FilePath -> m Config
readConfigFile = liftIO . readAndParseFile parseConfigFile
parseConfigFile :: [C.Field C.Position] -> C.ParseResult Config
parseConfigFile fields0 = do
config <- C.parseFieldGrammar C.cabalSpecLatest fields configGrammar
config' <- traverse parseSection $ concat sections
return $ postprocess $ foldl' (&) config config'
where
(fields, sections) = C.partitionFields fields0
parseSection :: C.Section C.Position -> C.ParseResult (Config -> Config)
parseSection (C.MkSection (C.Name pos name) args cfields)
| name == "constraint-set" = do
name' <- parseName pos args
let (fs, _sections) = C.partitionFields cfields
cs <- C.parseFieldGrammar C.cabalSpecLatest fs (constraintSetGrammar name')
return $ over (field @"cfgConstraintSets") (cs :)
| name == "raw-project" = do
let fs = C.fromParsecFields cfields
return $ over (field @"cfgRawProject") (++ map void fs)
| otherwise = do
C.parseWarning pos C.PWTUnknownSection $ "Unknown section " ++ fromUTF8BS name
return id
postprocess :: Config -> Config
postprocess cfg
| cfgUbuntu cfg >= Jammy = cfg { cfgGhcupJobs = anyVersion }
| otherwise = cfg
newtype Env = Env (M.Map Version String)
deriving anyclass (C.Newtype (M.Map Version String))
instance C.Parsec Env where
parsec = Env . M.fromList <$> C.parsecLeadingCommaList p where
p = do
v <- C.parsec
_ <- C.char ':'
s <- C.munch1 $ \c -> c /= ','
return (v, s)
instance C.Pretty Env where
pretty (Env m) = PP.fsep . PP.punctuate PP.comma . map p . M.toList $ m where
p (v, s) = C.pretty v PP.<> PP.colon PP.<> PP.text s
From Cabal
parseName :: C.Position -> [C.SectionArg C.Position] -> C.ParseResult String
parseName pos args = fromUTF8BS <$> parseNameBS pos args
parseNameBS :: C.Position -> [C.SectionArg C.Position] -> C.ParseResult BS.ByteString
parseNameBS pos args = case args of
[C.SecArgName _pos secName] ->
pure secName
[C.SecArgStr _pos secName] ->
pure secName
[] -> do
C.parseFailure pos "name required"
pure ""
_ -> do
C.parseFailure pos $ "Invalid name " ++ show args
pure ""
|
0043b517972ebfd301b2bb2044d7a9267354094c5f028067e746b08298e47ce9 | mirage/irmin | commit_intf.ml |
* Copyright ( c ) 2013 - 2022 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2013-2022 Thomas Gazagnaire <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open! Import
module type S_generic_key = sig
(** {1 Commit values} *)
type t [@@deriving irmin]
(** The type for commit values. *)
type node_key [@@deriving irmin]
(** Type for node keys. *)
type commit_key [@@deriving irmin]
(** Type for commit keys. *)
module Info : Info.S
(** The type for commit info. *)
val v : info:Info.t -> node:node_key -> parents:commit_key list -> t
(** Create a commit. *)
val node : t -> node_key
(** The underlying node key. *)
val parents : t -> commit_key list
(** The commit parents. *)
val info : t -> Info.t
(** The commit info. *)
end
module type S = sig
type hash [@@deriving irmin]
* @inline
include S_generic_key with type node_key = hash and type commit_key = hash
end
module type Portable = sig
include S
type commit
val of_commit : commit -> t
end
open struct
module S_is_a_generic_key (X : S) : S_generic_key = X
end
module type Maker_generic_key = sig
module Info : Info.S
module Make
(H : Type.S)
(N : Key.S with type hash = H.t)
(C : Key.S with type hash = H.t) : sig
include
S_generic_key
with type node_key = N.t
and type commit_key = C.t
and module Info = Info
module Portable :
Portable with type commit := t and type hash := H.t and module Info = Info
end
module Make_v2
(H : Type.S)
(N : Key.S with type hash = H.t)
(C : Key.S with type hash = H.t) : sig
include
S_generic_key
with type node_key = N.t
and type commit_key = C.t
and module Info = Info
module Portable :
Portable with type commit := t and type hash := H.t and module Info = Info
end
end
module type Maker = sig
module Info : Info.S
module Make (H : Type.S) : S with type hash = H.t and module Info = Info
end
module type Store = sig
(** {1 Commit Store} *)
include Indexable.S
module Info : Info.S
(** Commit info. *)
(** [Val] provides functions for commit values. *)
module Val :
S_generic_key
with type t = value
and type commit_key = key
and module Info := Info
module Hash : Hash.Typed with type t = hash and type value = value
module Node : Node.Store with type key = Val.node_key
(** [Node] is the underlying node store. *)
val merge : [> read_write ] t -> info:Info.f -> key option Merge.t
* [ merge ] is the 3 - way merge function for commit keys .
end
module type History = sig
(** {1 Commit History} *)
type 'a t
(** The type for store handles. *)
type node_key [@@deriving irmin]
(** The type for node keys. *)
type commit_key [@@deriving irmin]
(** The type for commit keys. *)
type v [@@deriving irmin]
(** The type for commit objects. *)
type info [@@deriving irmin]
(** The type for commit info. *)
val v :
[> write ] t ->
node:node_key ->
parents:commit_key list ->
info:info ->
(commit_key * v) Lwt.t
(** Create a new commit. *)
val parents : [> read ] t -> commit_key -> commit_key list Lwt.t
(** Get the commit parents.
Commits form a append-only, fully functional, partial-order
data-structure: every commit carries the list of its immediate
predecessors. *)
val merge : [> read_write ] t -> info:(unit -> info) -> commit_key Merge.t
* [ merge t ] is the 3 - way merge function for commit .
val lcas :
[> read ] t ->
?max_depth:int ->
?n:int ->
commit_key ->
commit_key ->
(commit_key list, [ `Max_depth_reached | `Too_many_lcas ]) result Lwt.t
* Find the lowest common ancestors
{ { : } lca } between two
commits .
{{:} lca} between two
commits. *)
val lca :
[> read_write ] t ->
info:(unit -> info) ->
?max_depth:int ->
?n:int ->
commit_key list ->
(commit_key option, Merge.conflict) result Lwt.t
* Compute the lowest common ancestors ancestor of a list of commits by
recursively calling { ! } and merging the results .
If one of the merges results in a conflict , or if a call to { ! }
returns either [ Error ` Max_depth_reached ] or [ Error ` Too_many_lcas ] then
the function returns the same error .
recursively calling {!lcas} and merging the results.
If one of the merges results in a conflict, or if a call to {!lcas}
returns either [Error `Max_depth_reached] or [Error `Too_many_lcas] then
the function returns the same error. *)
val three_way_merge :
[> read_write ] t ->
info:(unit -> info) ->
?max_depth:int ->
?n:int ->
commit_key ->
commit_key ->
(commit_key, Merge.conflict) result Lwt.t
* Compute the { ! } of the two commit and 3 - way merge the result .
val closure :
[> read ] t ->
min:commit_key list ->
max:commit_key list ->
commit_key list Lwt.t
(** Same as {{!Node.Graph.closure} Node.Graph.closure} but for the history
graph. *)
val iter :
[> read ] t ->
min:commit_key list ->
max:commit_key list ->
?commit:(commit_key -> unit Lwt.t) ->
?edge:(commit_key -> commit_key -> unit Lwt.t) ->
?skip:(commit_key -> bool Lwt.t) ->
?rev:bool ->
unit ->
unit Lwt.t
(** Same as {{!Node.Graph.iter} Node.Graph.iter} but for traversing the
history graph. *)
end
module type Sigs = sig
module type S = S
module type Maker = Maker
(** [Maker] provides a simple implementation of commit values, parameterized
by commit info. *)
module Maker (I : Info.S) : Maker with module Info = I
(** [Generic_key] generalises the concept of "commit" to one that supports
object keys that are not strictly equal to hashes. *)
module Generic_key : sig
module type S = S_generic_key
module type Maker = Maker_generic_key
module Maker (I : Info.S) : Maker with module Info = I
module Store
(I : Info.S)
(N : Node.Store)
(S : Indexable.S)
(H : Hash.S with type t = S.hash)
(V : S
with type node_key = N.key
and type commit_key = S.key
and type t = S.value
and module Info := I) :
Store
with type 'a t = 'a N.t * 'a S.t
and type key = S.key
and type value = S.value
and module Info = I
and type hash = S.hash
and module Val = V
include Maker with module Info = Info.Default
end
(** V1 serialisation. *)
module V1 : sig
module Info : Info.S with type t = Info.Default.t
(** Serialisation format for V1 info. *)
module Make (Hash : Hash.S) (C : Generic_key.S with module Info := Info) : sig
include
Generic_key.S
with module Info = Info
and type node_key = C.node_key
and type commit_key = C.commit_key
val import : C.t -> t
val export : t -> C.t
end
end
module Portable : sig
(** Portable form of a commit implementation that can be constructed from a
concrete representation and used in computing hashes. Conceptually, a
[Commit.Portable.t] is a [Commit.t] in which all internal keys have been
replaced with the hashes of the values they point to.
As with {!Node.Portable}, computations over portable values must commute
with those over [t]s. *)
(** A node implementation with hashes for keys is trivially portable: *)
module Of_commit (S : S) :
Portable
with type commit := S.t
and type t = S.t
and type hash = S.hash
and module Info = S.Info
module type S = Portable
end
module type Store = Store
(** [Store] specifies the signature for commit stores. *)
(** [Store] creates a new commit store. *)
module Store
(I : Info.S)
(N : Node.Store)
(S : Content_addressable.S with type key = N.key)
(H : Hash.S with type t = S.key)
(V : S with type hash = S.key and type t = S.value and module Info := I) :
Store
with type 'a t = 'a N.t * 'a S.t
and type key = S.key
and type hash = S.key
and type value = S.value
and module Info = I
and module Val = V
module type History = History
(** [History] specifies the signature for commit history. The history is
represented as a partial-order of commits and basic functions to search
through that history are provided.
Every commit can point to an entry point in a node graph, where
user-defined contents are stored. *)
(** Build a commit history. *)
module History (C : Store) :
History
with type 'a t = 'a C.t
and type v = C.Val.t
and type node_key = C.Node.key
and type commit_key = C.key
and type info = C.Info.t
include Maker with module Info = Info.Default
end
| null | https://raw.githubusercontent.com/mirage/irmin/abeee121a6db7b085b3c68af50ef24a8d8f9ed05/src/irmin/commit_intf.ml | ocaml | * {1 Commit values}
* The type for commit values.
* Type for node keys.
* Type for commit keys.
* The type for commit info.
* Create a commit.
* The underlying node key.
* The commit parents.
* The commit info.
* {1 Commit Store}
* Commit info.
* [Val] provides functions for commit values.
* [Node] is the underlying node store.
* {1 Commit History}
* The type for store handles.
* The type for node keys.
* The type for commit keys.
* The type for commit objects.
* The type for commit info.
* Create a new commit.
* Get the commit parents.
Commits form a append-only, fully functional, partial-order
data-structure: every commit carries the list of its immediate
predecessors.
* Same as {{!Node.Graph.closure} Node.Graph.closure} but for the history
graph.
* Same as {{!Node.Graph.iter} Node.Graph.iter} but for traversing the
history graph.
* [Maker] provides a simple implementation of commit values, parameterized
by commit info.
* [Generic_key] generalises the concept of "commit" to one that supports
object keys that are not strictly equal to hashes.
* V1 serialisation.
* Serialisation format for V1 info.
* Portable form of a commit implementation that can be constructed from a
concrete representation and used in computing hashes. Conceptually, a
[Commit.Portable.t] is a [Commit.t] in which all internal keys have been
replaced with the hashes of the values they point to.
As with {!Node.Portable}, computations over portable values must commute
with those over [t]s.
* A node implementation with hashes for keys is trivially portable:
* [Store] specifies the signature for commit stores.
* [Store] creates a new commit store.
* [History] specifies the signature for commit history. The history is
represented as a partial-order of commits and basic functions to search
through that history are provided.
Every commit can point to an entry point in a node graph, where
user-defined contents are stored.
* Build a commit history. |
* Copyright ( c ) 2013 - 2022 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2013-2022 Thomas Gazagnaire <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open! Import
module type S_generic_key = sig
type t [@@deriving irmin]
type node_key [@@deriving irmin]
type commit_key [@@deriving irmin]
module Info : Info.S
val v : info:Info.t -> node:node_key -> parents:commit_key list -> t
val node : t -> node_key
val parents : t -> commit_key list
val info : t -> Info.t
end
module type S = sig
type hash [@@deriving irmin]
* @inline
include S_generic_key with type node_key = hash and type commit_key = hash
end
module type Portable = sig
include S
type commit
val of_commit : commit -> t
end
open struct
module S_is_a_generic_key (X : S) : S_generic_key = X
end
module type Maker_generic_key = sig
module Info : Info.S
module Make
(H : Type.S)
(N : Key.S with type hash = H.t)
(C : Key.S with type hash = H.t) : sig
include
S_generic_key
with type node_key = N.t
and type commit_key = C.t
and module Info = Info
module Portable :
Portable with type commit := t and type hash := H.t and module Info = Info
end
module Make_v2
(H : Type.S)
(N : Key.S with type hash = H.t)
(C : Key.S with type hash = H.t) : sig
include
S_generic_key
with type node_key = N.t
and type commit_key = C.t
and module Info = Info
module Portable :
Portable with type commit := t and type hash := H.t and module Info = Info
end
end
module type Maker = sig
module Info : Info.S
module Make (H : Type.S) : S with type hash = H.t and module Info = Info
end
module type Store = sig
include Indexable.S
module Info : Info.S
module Val :
S_generic_key
with type t = value
and type commit_key = key
and module Info := Info
module Hash : Hash.Typed with type t = hash and type value = value
module Node : Node.Store with type key = Val.node_key
val merge : [> read_write ] t -> info:Info.f -> key option Merge.t
* [ merge ] is the 3 - way merge function for commit keys .
end
module type History = sig
type 'a t
type node_key [@@deriving irmin]
type commit_key [@@deriving irmin]
type v [@@deriving irmin]
type info [@@deriving irmin]
val v :
[> write ] t ->
node:node_key ->
parents:commit_key list ->
info:info ->
(commit_key * v) Lwt.t
val parents : [> read ] t -> commit_key -> commit_key list Lwt.t
val merge : [> read_write ] t -> info:(unit -> info) -> commit_key Merge.t
* [ merge t ] is the 3 - way merge function for commit .
val lcas :
[> read ] t ->
?max_depth:int ->
?n:int ->
commit_key ->
commit_key ->
(commit_key list, [ `Max_depth_reached | `Too_many_lcas ]) result Lwt.t
* Find the lowest common ancestors
{ { : } lca } between two
commits .
{{:} lca} between two
commits. *)
val lca :
[> read_write ] t ->
info:(unit -> info) ->
?max_depth:int ->
?n:int ->
commit_key list ->
(commit_key option, Merge.conflict) result Lwt.t
* Compute the lowest common ancestors ancestor of a list of commits by
recursively calling { ! } and merging the results .
If one of the merges results in a conflict , or if a call to { ! }
returns either [ Error ` Max_depth_reached ] or [ Error ` Too_many_lcas ] then
the function returns the same error .
recursively calling {!lcas} and merging the results.
If one of the merges results in a conflict, or if a call to {!lcas}
returns either [Error `Max_depth_reached] or [Error `Too_many_lcas] then
the function returns the same error. *)
val three_way_merge :
[> read_write ] t ->
info:(unit -> info) ->
?max_depth:int ->
?n:int ->
commit_key ->
commit_key ->
(commit_key, Merge.conflict) result Lwt.t
* Compute the { ! } of the two commit and 3 - way merge the result .
val closure :
[> read ] t ->
min:commit_key list ->
max:commit_key list ->
commit_key list Lwt.t
val iter :
[> read ] t ->
min:commit_key list ->
max:commit_key list ->
?commit:(commit_key -> unit Lwt.t) ->
?edge:(commit_key -> commit_key -> unit Lwt.t) ->
?skip:(commit_key -> bool Lwt.t) ->
?rev:bool ->
unit ->
unit Lwt.t
end
module type Sigs = sig
module type S = S
module type Maker = Maker
module Maker (I : Info.S) : Maker with module Info = I
module Generic_key : sig
module type S = S_generic_key
module type Maker = Maker_generic_key
module Maker (I : Info.S) : Maker with module Info = I
module Store
(I : Info.S)
(N : Node.Store)
(S : Indexable.S)
(H : Hash.S with type t = S.hash)
(V : S
with type node_key = N.key
and type commit_key = S.key
and type t = S.value
and module Info := I) :
Store
with type 'a t = 'a N.t * 'a S.t
and type key = S.key
and type value = S.value
and module Info = I
and type hash = S.hash
and module Val = V
include Maker with module Info = Info.Default
end
module V1 : sig
module Info : Info.S with type t = Info.Default.t
module Make (Hash : Hash.S) (C : Generic_key.S with module Info := Info) : sig
include
Generic_key.S
with module Info = Info
and type node_key = C.node_key
and type commit_key = C.commit_key
val import : C.t -> t
val export : t -> C.t
end
end
module Portable : sig
module Of_commit (S : S) :
Portable
with type commit := S.t
and type t = S.t
and type hash = S.hash
and module Info = S.Info
module type S = Portable
end
module type Store = Store
module Store
(I : Info.S)
(N : Node.Store)
(S : Content_addressable.S with type key = N.key)
(H : Hash.S with type t = S.key)
(V : S with type hash = S.key and type t = S.value and module Info := I) :
Store
with type 'a t = 'a N.t * 'a S.t
and type key = S.key
and type hash = S.key
and type value = S.value
and module Info = I
and module Val = V
module type History = History
module History (C : Store) :
History
with type 'a t = 'a C.t
and type v = C.Val.t
and type node_key = C.Node.key
and type commit_key = C.key
and type info = C.Info.t
include Maker with module Info = Info.Default
end
|
3b5fbe46db53a20c496ff73e00c5cbc6253361d09ffbf611657531c932e0c653 | berke/aurochs | cgidemo.ml | (* Cgi-bin *)
open Aurochs_pack
open Peg
open Cgi
open Xml
open Pffpsf
let error_html msg =
[
U(N("h1", [], [D"Error"]));
U(N("p", [], [D msg]))
]
let br = O("br",[])
let paragraph x = N("p",[],x)
let div taxon child = N("div", [{n="class"; v=S taxon}], child)
let span taxon child = N("span", [{n="class"; v=S taxon}], child)
let textarea ~name ~rows ~cols ?(content="") () =
U(N("textarea",
[{n="rows"; v=I rows};
{n="cols"; v=I cols};
{n="name"; v=S name}],
[D content]))
let submit ~name ~value () =
U(N("input",
[{n="type"; v=S"submit"};
{n="value"; v=S value};
{n="name"; v=S name}],
[]))
let make_html x =
html_xml
(
N("html",
[{n="xmlns"; v=S""};
{n="lang"; v=S"en"};
{n="xml:lang"; v=S"en"}],
[
N("head", [],
[
U(N("title", [], [D"The Aurochs parser generator"]));
O("meta",
[{n="http-equiv"; v=S"Content-Type"};
{n="content"; v=S"text/html; charset=utf-8"}]);
O("meta",
[{n="name"; v=S"keyword"};
{n="content"; v=S"Aurochs, parse expression grammar, PEG, parsing, Ocaml, Java, C, memoization"}]);
O("link",
[{n="rel"; v=S"stylesheet"};
{n="type"; v=S"text/css"};
{n="href"; v=S"/default.css"};
{n="media"; v=S"screen"}]);
O("link",
[{n="rel"; v=S"icon"};
{n="type"; v=S"image/png"};
{n="href"; v=S"/icon.png"}])
]
);
N("body", [],
(paragraph [D"Return to the"; N("a",[{n="href";v=S"/"}],[D"Aurochs homepage"])]) ::
x);
]
)
)
type 'a model =
{
m_grammar : string;
m_input : string;
m_output : 'a;
m_info : Buffer.t
}
let model0 =
let (_, (grammar, input)) = List.hd Examples.examples in
{
m_grammar = grammar;
m_input = input;
m_output = paragraph [D"Welcome to the Aurochs parser generator on-line demonstration! Please feel comfortable and try a few grammars."];
m_info = Buffer.create 8
}
let reply h = reply_html (fun oc -> output_xml oc (make_html h))
let view model =
reply
[
U(N("h1", [], [D"Aurochs parser"]));
N("form", [{n="action"; v=S"demo.cgi"}; {n="method";v=S"post"}],
[
paragraph [D"PEG grammar:"];
textarea ~name:"grammar" ~rows:25 ~cols:80 ~content:model.m_grammar ();
br;
paragraph [D"Input:"];
textarea ~name:"input" ~rows:5 ~cols:80 ~content:model.m_input ();
br;
submit ~name:"submit" ~value:"Parse" ();
(* Examples *)
div "examples"
[
paragraph [D"Some pre-defined examples:"];
N("ul", [],
List.map
(fun (name, _) -> N("li", [], [submit ~name:"example" ~value:name ()]))
Examples.examples)
]
]
);
model.m_output;
div "info" [U(N("pre", [], [D(Buffer.contents model.m_info)]))]
]
let grammar_limit = 1000
let input_limit = 1000
let workload_limit = 1000000
let workload program input =
let m = String.length input in
let n =
Aurochs.get_choice_count program *
Aurochs.get_production_count program
in
m * n
let ( |< ) f g x = f (g x)
let split u i =
let m = String.length u in
let i = max 0 (min m i) in
if i = m then
(u, "")
else
if i = 0 then
("", u)
else
(String.sub u 0 i, String.sub u i (m - i))
let error text position =
let (u, v) = split text position in
U(
N("pre",[],
[
D u;
span "marker" [D " "];
span "highlight" [D v];
D " "
]
)
)
let convert_tree t =
let rec loop = function
| Token t -> div "token" [D t]
| Node(name, attrs, child) ->
let attrs' =
List.map
(fun (aname, aval) -> span "attribute" [D aname; D"="; D aval])
attrs
in
match child with
| [] ->
div "node"
[span "node-name"
(List.concat[
[D("<" ^ name)];
attrs';
[D("/>")]
])
]
| _ ->
div "node"
(List.concat
[
[span "node-name"
(List.concat[
[D("<" ^ name)];
attrs';
[D">"]
])];
List.map loop child;
[span "node-name" [D("</" ^ name ^">")]];
]
)
in
loop t
let compute ~grammar ~input ?example () =
let (grammar, input) =
match example with
| None -> (grammar, input)
| Some name -> List.assoc name Examples.examples
in
let info = Buffer.create 256 in
let model =
{
model0 with
m_grammar = grammar;
m_input = input;
m_info = info;
}
in
let err x = { model with m_output = div "error" x } in
if String.length grammar > grammar_limit then
err [paragraph [D"Grammar too big for on-line version"]]
else if String.length input > input_limit then
err [paragraph [D"Input too big for on-line version"]]
else
try
let bin =
try
!Aurochs.compiler grammar
with
| x -> raise (Aurochs.Compile_error x)
in
let prog = Aurochs.program_of_binary bin in
let w = workload prog input in
bf info "Total %d productions\n" (Aurochs.get_production_count prog);
bf info "Total %d choices\n" (Aurochs.get_choice_count prog);
bf info "Total %d constructors\n" (Aurochs.get_constructor_count prog);
bf info "Total %d attributes\n" (Aurochs.get_attribute_count prog);
bf info "Workload %d units\n" w;
if w > workload_limit then
err [paragraph [D(sf "Workload of %d is too high for on-line version, limit is %d" w workload_limit)]]
else
let t = Aurochs.parse_generic prog input in
let output = convert_tree t in
{ model with m_output = div "tree" [output] }
with
| Check.Error u -> err [paragraph [D(sf "Grammar error: %s" u)]]
| Aurochs.Compile_error(Aurochs.Error u|Check.Error u) -> err [paragraph [D(sf "Error in grammar: %s" u)]]
| Aurochs.Compile_error(Aurochs.Parse_error n) ->
err
[
paragraph [D(sf "Parse error in grammar at %d" n)];
error grammar n
]
| Aurochs.Parse_error n ->
err
[
paragraph [D(sf "Parse error in input at %d" n)];
error input n
]
| Aurochs.Compile_error x -> err [paragraph [D(sf "Error in grammar: %s" (Printexc.to_string x))]]
| Aurochs.Error u -> err [paragraph [D(sf "Parse error in input: %s" u)]]
| Canonify.Error u -> err [paragraph [D(sf "Can't canonify grammar: %s" u)]]
| x -> err [paragraph [D(sf "Exception: %s" (Printexc.to_string x))]]
let _ =
(*let host = remote_host in*)
match invocation_method () with
| GET -> view model0
| POST ->
let form = Form.parse_form_from_stream (Stream.of_channel stdin) in
let gs key = Form.get_value form Form.to_string key in
let model =
compute
~grammar:(gs "grammar")
~input:(gs "input")
?example:(Form.get_value form ~default:None (Form.some |< Form.to_string) "example")
()
in
view model
| null | https://raw.githubusercontent.com/berke/aurochs/637bdc0d4682772837f9e44112212e7f20ab96ff/examples/cgidemo/cgidemo.ml | ocaml | Cgi-bin
Examples
let host = remote_host in |
open Aurochs_pack
open Peg
open Cgi
open Xml
open Pffpsf
let error_html msg =
[
U(N("h1", [], [D"Error"]));
U(N("p", [], [D msg]))
]
let br = O("br",[])
let paragraph x = N("p",[],x)
let div taxon child = N("div", [{n="class"; v=S taxon}], child)
let span taxon child = N("span", [{n="class"; v=S taxon}], child)
let textarea ~name ~rows ~cols ?(content="") () =
U(N("textarea",
[{n="rows"; v=I rows};
{n="cols"; v=I cols};
{n="name"; v=S name}],
[D content]))
let submit ~name ~value () =
U(N("input",
[{n="type"; v=S"submit"};
{n="value"; v=S value};
{n="name"; v=S name}],
[]))
let make_html x =
html_xml
(
N("html",
[{n="xmlns"; v=S""};
{n="lang"; v=S"en"};
{n="xml:lang"; v=S"en"}],
[
N("head", [],
[
U(N("title", [], [D"The Aurochs parser generator"]));
O("meta",
[{n="http-equiv"; v=S"Content-Type"};
{n="content"; v=S"text/html; charset=utf-8"}]);
O("meta",
[{n="name"; v=S"keyword"};
{n="content"; v=S"Aurochs, parse expression grammar, PEG, parsing, Ocaml, Java, C, memoization"}]);
O("link",
[{n="rel"; v=S"stylesheet"};
{n="type"; v=S"text/css"};
{n="href"; v=S"/default.css"};
{n="media"; v=S"screen"}]);
O("link",
[{n="rel"; v=S"icon"};
{n="type"; v=S"image/png"};
{n="href"; v=S"/icon.png"}])
]
);
N("body", [],
(paragraph [D"Return to the"; N("a",[{n="href";v=S"/"}],[D"Aurochs homepage"])]) ::
x);
]
)
)
type 'a model =
{
m_grammar : string;
m_input : string;
m_output : 'a;
m_info : Buffer.t
}
let model0 =
let (_, (grammar, input)) = List.hd Examples.examples in
{
m_grammar = grammar;
m_input = input;
m_output = paragraph [D"Welcome to the Aurochs parser generator on-line demonstration! Please feel comfortable and try a few grammars."];
m_info = Buffer.create 8
}
let reply h = reply_html (fun oc -> output_xml oc (make_html h))
let view model =
reply
[
U(N("h1", [], [D"Aurochs parser"]));
N("form", [{n="action"; v=S"demo.cgi"}; {n="method";v=S"post"}],
[
paragraph [D"PEG grammar:"];
textarea ~name:"grammar" ~rows:25 ~cols:80 ~content:model.m_grammar ();
br;
paragraph [D"Input:"];
textarea ~name:"input" ~rows:5 ~cols:80 ~content:model.m_input ();
br;
submit ~name:"submit" ~value:"Parse" ();
div "examples"
[
paragraph [D"Some pre-defined examples:"];
N("ul", [],
List.map
(fun (name, _) -> N("li", [], [submit ~name:"example" ~value:name ()]))
Examples.examples)
]
]
);
model.m_output;
div "info" [U(N("pre", [], [D(Buffer.contents model.m_info)]))]
]
let grammar_limit = 1000
let input_limit = 1000
let workload_limit = 1000000
let workload program input =
let m = String.length input in
let n =
Aurochs.get_choice_count program *
Aurochs.get_production_count program
in
m * n
let ( |< ) f g x = f (g x)
let split u i =
let m = String.length u in
let i = max 0 (min m i) in
if i = m then
(u, "")
else
if i = 0 then
("", u)
else
(String.sub u 0 i, String.sub u i (m - i))
let error text position =
let (u, v) = split text position in
U(
N("pre",[],
[
D u;
span "marker" [D " "];
span "highlight" [D v];
D " "
]
)
)
let convert_tree t =
let rec loop = function
| Token t -> div "token" [D t]
| Node(name, attrs, child) ->
let attrs' =
List.map
(fun (aname, aval) -> span "attribute" [D aname; D"="; D aval])
attrs
in
match child with
| [] ->
div "node"
[span "node-name"
(List.concat[
[D("<" ^ name)];
attrs';
[D("/>")]
])
]
| _ ->
div "node"
(List.concat
[
[span "node-name"
(List.concat[
[D("<" ^ name)];
attrs';
[D">"]
])];
List.map loop child;
[span "node-name" [D("</" ^ name ^">")]];
]
)
in
loop t
let compute ~grammar ~input ?example () =
let (grammar, input) =
match example with
| None -> (grammar, input)
| Some name -> List.assoc name Examples.examples
in
let info = Buffer.create 256 in
let model =
{
model0 with
m_grammar = grammar;
m_input = input;
m_info = info;
}
in
let err x = { model with m_output = div "error" x } in
if String.length grammar > grammar_limit then
err [paragraph [D"Grammar too big for on-line version"]]
else if String.length input > input_limit then
err [paragraph [D"Input too big for on-line version"]]
else
try
let bin =
try
!Aurochs.compiler grammar
with
| x -> raise (Aurochs.Compile_error x)
in
let prog = Aurochs.program_of_binary bin in
let w = workload prog input in
bf info "Total %d productions\n" (Aurochs.get_production_count prog);
bf info "Total %d choices\n" (Aurochs.get_choice_count prog);
bf info "Total %d constructors\n" (Aurochs.get_constructor_count prog);
bf info "Total %d attributes\n" (Aurochs.get_attribute_count prog);
bf info "Workload %d units\n" w;
if w > workload_limit then
err [paragraph [D(sf "Workload of %d is too high for on-line version, limit is %d" w workload_limit)]]
else
let t = Aurochs.parse_generic prog input in
let output = convert_tree t in
{ model with m_output = div "tree" [output] }
with
| Check.Error u -> err [paragraph [D(sf "Grammar error: %s" u)]]
| Aurochs.Compile_error(Aurochs.Error u|Check.Error u) -> err [paragraph [D(sf "Error in grammar: %s" u)]]
| Aurochs.Compile_error(Aurochs.Parse_error n) ->
err
[
paragraph [D(sf "Parse error in grammar at %d" n)];
error grammar n
]
| Aurochs.Parse_error n ->
err
[
paragraph [D(sf "Parse error in input at %d" n)];
error input n
]
| Aurochs.Compile_error x -> err [paragraph [D(sf "Error in grammar: %s" (Printexc.to_string x))]]
| Aurochs.Error u -> err [paragraph [D(sf "Parse error in input: %s" u)]]
| Canonify.Error u -> err [paragraph [D(sf "Can't canonify grammar: %s" u)]]
| x -> err [paragraph [D(sf "Exception: %s" (Printexc.to_string x))]]
let _ =
match invocation_method () with
| GET -> view model0
| POST ->
let form = Form.parse_form_from_stream (Stream.of_channel stdin) in
let gs key = Form.get_value form Form.to_string key in
let model =
compute
~grammar:(gs "grammar")
~input:(gs "input")
?example:(Form.get_value form ~default:None (Form.some |< Form.to_string) "example")
()
in
view model
|
98050d2e93e8970b81a8e4861d63f4c48ab85fea12da358342d1208cf6ee0746 | siraben/mini-haskell | classy.hs | ------------------------------------------------------------------------
A mini Haskell compiler with typeclasses .
Originally written by , modified by
------------------------------------------------------------------------
Delete code below and uncomment the block to compile in GHC
{-
{-# LANGUAGE FlexibleInstances #-}
# LANGUAGE OverlappingInstances #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE CPP #
# LANGUAGE TupleSections #
# LANGUAGE NoMonomorphismRestriction #
module Compiler where
import Prelude (Char, Int, String, succ)
import Data.Char (chr, ord)
import qualified Prelude
a <= b = if a Prelude.<= b then True else False
(*) = (Prelude.*)
(+) = (Prelude.+)
(-) = (Prelude.-)
(/) = Prelude.div
(%) = Prelude.mod
class Eq a where { (==) :: a -> a -> Bool };
class Show a where { show :: a -> String };
class Functor f where { fmap :: (a -> b) -> f a -> f b };
class Applicative f where { pure :: a -> f a; (<*>) :: f (a -> b) -> f a -> f b };
class Monad m where { return :: a -> m a ; (>>=) :: m a -> (a -> m b) -> m b};
instance Eq Char where { (==) x y = if x Prelude.== y then True else False };
instance Eq Int where { (==) x y = if x Prelude.== y then True else False };
instance Show Char where { show = Prelude.show };
infixr 5 ++;
infixr 9 .;
infixl 4 <*> , <$> , <* , *>;
infixl 3 <|>, <||>;
infixr 0 $;
infixl 7 *;
infixl 6 + , -;
-}
infixr 5 :, ++;
infixr 9 .;
infixl 4 <*> , <$> , <* , *>;
infixl 3 <|>, <||>;
infixr 0 $;
infixl 7 *;
infixl 6 + , -;
(*) = (.*.);
(+) = (.+.);
(-) = (.-.);
(%) = (.%.);
(/) = (./.);
Delete code above and uncomment the block to compile in GHC
undefined = undefined;
($) f = f;
id x = x;
const x y = x;
flip f x y = f y x;
(&) x f = f x;
(<$>) = fmap;
liftA2 f x = (<*>) (fmap f x);
(*>) = liftA2 $ \x y -> y;
(<*) = liftA2 const;
data Bool = True | False;
data Maybe a = Nothing | Just a;
data Either a b = Left a | Right b;
data Error a = Error String | Okay a;
-- fpair = flip curry
fpair p f = case p of { (,) x y -> f x y };
fst p = case p of { (,) x y -> x };
snd p = case p of { (,) x y -> y };
first f p = fpair p $ \x y -> (f x, y);
second f p = fpair p $ \x y -> (x, f y);
ife a b c = case a of { True -> b ; False -> c };
not a = case a of { True -> False; False -> True };
(.) f g x = f (g x);
(||) f g = ife f True (ife g True False);
(&&) f g = ife f (ife g True False) False;
(<) a b = not (a == b) && (a <= b);
-- fold a list
-- flist :: [a] -> b -> (a -> [a] -> b) -> b
flst xs n c = case xs of { [] -> n; (:) h t -> c h t };
-- (==) on lists
lstEq xs ys = case xs of
{ [] -> flst ys True (\h t -> False)
; (:) x xt -> flst ys False (\y yt -> ife (x == y) (lstEq xt yt) False)
};
instance Eq a => Eq [a] where { (==) = lstEq };
(/=) x y = not (x == y);
Append two lists
(++) xs ys = flst xs ys (\x xt -> x:xt ++ ys);
-- maybe :: b -> (a -> b) -> Maybe a -> b
maybe n j m = case m of { Nothing -> n; Just x -> j x };
-- fold a maybe
-- fmaybe :: Maybe a -> b -> (a -> b) -> b
fmaybe m n j = case m of { Nothing -> n; Just x -> j x };
instance Show a => Show (Maybe a) where
{ show = maybe "Nothing" (\x -> "Just " ++ show x) };
instance Functor Maybe where
{ fmap f = maybe Nothing (Just . f) };
instance Applicative Maybe where
{ pure = Just ; (<*>) f y = maybe Nothing (`fmap` y) f};
instance Monad Maybe where
{ return = Just ; (>>=) ma f = maybe Nothing f ma };
fromMaybe a m = fmaybe m a id;
foldr c n l = flst l n (\h t -> c h (foldr c n t));
TODO : should have type
: : Monoid a = > ( a - > a - > a ) - > [ a ] - > a
-- Later, when we add foldables and traversables, it should be
: : ( Monoid m , Foldable t ) = > ( m - > m - > m ) - > t m - > m
' : : ( a - > a - > a ) - > [ a ] - > Maybe a
foldr1' c l =
flst
l
Nothing
(\h t ->
foldr
(\x m -> Just (fmaybe m x (c x)))
Nothing
l);
foldl f a bs = foldr (\b g x -> g (f x b)) id bs a;
foldl1 ' : : ( p - > p - > p ) - > [ p ] - > Maybe p
See above comments on the status of '
foldl1' f l = flst l Nothing (\x xs -> Just (foldl f x xs));
elem k = foldr (\x t -> ife (x == k) True t) False;
find f = foldr (\x t -> ife (f x) (Just x) t) Nothing;
concat = foldr (++) [];
itemize c = [c];
map f = foldr (\x xs -> f x : xs) [];
concatMap f l = concat (map f l);
instance Functor [] where { fmap = map };
instance Monad [] where { return = itemize ; (>>=) = flip concatMap };
instance Applicative [] where
{ pure = itemize
; (<*>) fs xs = fs >>= \f -> xs >>= \x -> return $ f x};
prependToAll s l = flst l [] (\x xs -> s : x : prependToAll s xs);
intersperse s l = flst l [] (\x xs -> x : prependToAll s xs);
-- Show a non-empty list
intercalate d = concat . intersperse d;
unwords = intercalate " ";
showList' l = "[" ++ intercalate "," (map show l) ++ "]";
showList l = case l of {
[] -> "[]";
(:) x xs -> showList' l
};
mapconcat f l = concat (map f l);
escapeC c = ife (c == '\n') "\\n"
(ife (c == '\\') "\\\\"
[c]);
showString s = "\"" ++ mapconcat escapeC s ++ "\"";
ifz n = ife (0 == n);
showInt' n = ifz n id (showInt' (n/10) . (:) (chr (48+(n%10))));
showInt n = ifz n ('0':) (showInt' n);
N.B. using show on Ints will make GHC fail to compile to due GHC
-- having multiple numeric types.
instance Show Int where { show n = showInt n "" };
instance Show String where { show = showString };
instance Show a => Show [a] where { show = showList };
any f = foldr (\x t -> ife (f x) True t) False;
-- lookupWith :: (a -> b -> Bool) -> a -> [(b, a)] -> Maybe a
lookupWith eq s =
foldr (\h t -> fpair h (\k v -> ife (eq s k) (Just v) t)) Nothing;
lstLookup = lookupWith (==);
reverse = foldl (flip (:)) [];
zipWith f xs ys =
case xs of
{ [] -> []
; (:) x xt ->
case ys of
{ [] -> []
; (:) y yt -> f x y : zipWith f xt yt
}
};
zip = zipWith (,);
-- Representation of types
-- type ctor. type var. type app.
data Type = TC String | TV String | TAp Type Type;
-- Representation of AST
data Ast
= R String -- raw combinator assembly
| V String -- variable
| A Ast Ast -- application
| L String Ast -- lambda abstraction
| Proof Pred; -- proof for typeclass instantiation?
-- * instance environment
-- * definitions, including those of instances
-- * Typed ASTs, ready for compilation, including ADTs and methods,
-- e.g. (==), (Eq a => a -> a -> Bool, select-==)
data Neat =
Neat
[(String, [Qual])]
[Either (String, Ast) (String, (Qual, [(String, Ast)]))]
[(String, (Qual, Ast))];
Parser combinators ( applicative style )
From the paper " Parsec : A practical parsing library "
-- Written in a contrived way for use with mini-Haskell (e.g. no
-- nested pattern matching)
-- Position is a line, column
data Pos = Pos Int Int;
data State = State String Pos;
data Parsec a = Parsec (State -> Consumed a);
data Msg = Msg Pos String [String];
data Reply a = Err Msg
| Ok a State Msg;
data Consumed a = Empty (Reply a)
| Consumed (Reply a);
parens s = '(':(s ++ ")");
showPos p = case p of { Pos r c -> unwords ["row:" , show r
, "col: " , show c]};
instance Show Pos where { show = showPos };
showState s = case s of { State s p -> unwords [show s, parens (show p)]};
instance Show State where { show = showState };
showMsg m = case m of { >
-- unwords ["Msg", show pos, show s1, show s2]};
-- instance Show Msg where
-- { show = showMsg };
showReply r = case r of { unwords [ " Err " , show m ]
-- ; Ok a s m -> unwords ["Ok", show a, show s, show m]};
-- instance Show a => Show (Reply a) where { show = showReply };
-- showConsumed c = case c of { Empty m -> unwords ["Empty", show m]
-- ; Consumed m -> unwords ["Consumed", show m] };
-- instance Show a => Show (Consumed a) where
-- { show = showConsumed };
-- fromString :: String -> State
fromString s = State s (Pos 1 1);
parsec : : Parsec a - > State - > Consumed a
parsec p = case p of { Parsec f -> f };
parse : : Parsec a - > String - > Consumed a
parse p s = parsec p (fromString s);
bind : : Parsec a - > ( a - > Parsec b ) - > Parsec b
bind p f = Parsec $
\state -> case parsec p state of
{ Empty m ->
case m of
{ Err msg -> Empty (Err msg)
; Ok x state' msg -> parsec (f x) state' }
; Consumed m ->
Consumed
(case m of
{ Err msg -> Err msg
; Ok x state' msg ->
case parsec (f x) state' of
{ Empty m -> m
; Consumed m -> m}})};
parsecpure : : a - > Parsec a
parsecpure x = Parsec $ \state ->
case state of
{ State s pos -> Empty (Ok x state (Msg pos [] [])) };
instance Monad Parsec where
{ return = parsecpure
; (>>=) = bind };
instance Functor Parsec where
{ fmap f x = x >>= \x -> parsecpure (f x) };
instance Applicative Parsec where
{ pure = parsecpure
; (<*>) x y = x >>= \f -> y >>= \x -> parsecpure (f x) };
-- nextPos :: Pos -> Char -> Pos
nextPos p c = case p of
{ Pos line col ->
ife (c == '\n') (Pos (line + 1) 0) (Pos line (col + 1))};
sat : : ( Bool ) - > Parsec
sat test = Parsec $ \state ->
case state of
{ State input pos ->
case input of
{ [] -> Empty (Err (Msg pos "end of input" []))
; (:) c cs ->
ife (test c)
(let { newPos = nextPos pos c
; newState = State cs newPos }
in Consumed (Ok c newState
(Msg pos [] [])))
(Empty (Err (Msg pos [c] [])))}};
mergeMsg m1 m2 = case m1 of
{ Msg pos inp exp1 ->
case m2 of
{ Msg _ _ exp2 -> Msg pos inp (exp1 ++ exp2)}};
mergeOk x inp msg1 msg2 = Empty (Ok x inp (mergeMsg msg1 msg2));
mergeError msg1 msg2 = Empty (Err (mergeMsg msg1 msg2));
( < | > ) : : Parsec a - > Parsec a - > Parsec a
Given two parsers p , q , run p on the input . If it fails , then
continue by is not backtracked before running
-- q.
-- p <|> q ::= <p> | <q>
(<|>) p q = Parsec $
\state ->
case parsec p state of
{ Empty m ->
case m of
{ Err msg1 ->
case parsec q state of
{ Empty m ->
case m of
{ Err msg2 ->
mergeError msg1 msg2
; Ok x inp msg2 ->
mergeOk x inp msg1 msg2 }
; Consumed m -> Consumed m }
; Ok x inp msg1 ->
case parsec q state of
{ Empty m ->
case m of
{ Err msg2 ->
mergeOk x inp msg1 msg2
; Ok _ _ msg2 ->
mergeOk x inp msg1 msg2 }
; Consumed m -> Consumed m }}
; Consumed m -> Consumed m };
-- Run parser p, if it consumed input and failed, pretend like it
-- didn't consume anything.
try p = Parsec $ \state -> case parsec p state of
{ Empty m -> Empty m
; Consumed m ->
case m of
{ Err msg ->
Empty (Err msg)
; Ok x st msg ->
Consumed (Ok x st msg)}};
(<||>) p q = try p <|> q;
-- many p ::= <p>*
many p = liftA2 (:) p (many p) <||> pure [];
-- many1 p ::= <p>+
many1 p = liftA2 (:) p (many p);
expect m exp = case m of { Msg pos inp _ -> Msg pos inp [exp] };
( < ? > ) : : Parsec a - > String - > Parsec a
(<?>) p exp = Parsec $ \state ->
case parsec p state of
{ Empty m ->
Empty
(case m of
{ Err msg ->
Err (expect msg exp)
; Ok x st msg ->
Ok x st (expect msg exp)})
; Consumed m -> Consumed m };
item = sat (const True);
-- sepBy1 p sep ::= <p> (<sep> <p>)*
sepBy1 p sep = liftA2 (:) p (many (sep *> p));
sepBy p sep = sepBy1 p sep <||> pure [];
char c = sat (== c) <?> show c;
string s =
case s of
{ [] -> pure []
; (:) c cs -> char c *> string cs *> pure s};
-- between x y p ::= <x> <p> <y>
between x y p = x *> (p <* y);
Parse line comments
-- com ::= '-' '-' <char c: c != '\n'>* '\n'
com = char '-' *> between (char '-') (char '\n') (many (sat (/= '\n')));
-- Block comments
-- notComEnd ::= <char c: c != '-'> | '-' <char c: c != '}'>
notComEnd = (sat (/= '-') <|> (char '-' *> sat (/= '}'))) *> pure [];
-- blockcom ::= "{-" (<blockcom> | <notComEnd>) "-}"
blockcom = let { content = many (blockcom <||> notComEnd) }
in between (string "{-") (string "-}") content *> pure [];
Parse whitespace
sp =
many
((pure <$> sat (\c -> (c == ' ') || (c == '\n'))) <|> com <|> blockcom);
-- Tokenize a parser, producing a parser that consumes trailing
-- whitespace.
-- tok p ::= <p> <sp>
tok p = p <* sp;
-- Parse a character (tokenized)
tokc = tok . char;
-- wantWith :: (a -> Bool) -> String -> Parser a -> Parser a
-- TODO: Consider backtracking the input on failure (similar to sat)?
wantWith pred str p = Parsec $ \s ->
case parsec p s of
{ Empty m ->
Empty (case m of
{ Err m -> Err m
; Ok a state' m -> ife (pred a)
(Ok a state' m)
(Err (expect m str)) })
; Consumed m ->
Consumed (case m of
{ Err m -> Err m
; Ok a state' m ->
ife (pred a)
(Ok a state' m)
(Err (expect m str))})
};
want : : Eq a = > a - > a - > Parser a
want f s = wantWith (== s) s f;
-- paren a ::= '(' <a> ')'
paren = between (tokc '(') (tokc ')');
-- lower ::= 'a' | 'b' | 'c' ... 'z' | '_'
lower = sat (\x -> ((x <= 'z') && ('a' <= x)) || (x == '_')) <?> "lower";
-- upper ::= 'A' | 'B' | 'C' ... 'Z'
upper = sat (\x -> (x <= 'Z') && ('A' <= x)) <?> "upper";
-- digit ::= '0' | '1' | '2' ... '9'
digit = sat (\x -> (x <= '9') && ('0' <= x)) <?> "digit";
-- alpha ::= <lower> | <upper>
alpha = (lower <|> upper) <?> "alpha";
-- varLex ::= <lower> (<alpha> | <digit> | '\'')*
varLex = liftA2 (:) lower (many (alpha <|> digit <|> char '\''));
Constructor identifier
conId = tok (liftA2 (:) upper (many (alpha <|> digit <|> char '\'')));
keyword s = tok (want varLex s);
varId = tok (wantWith (\s -> not ((s == "of") || (s == "where"))) "variable" varLex);
-- Operator characters
opLex = many1 (sat (`elem` ":!#$%&*+./<=>?@\\^|-~"));
Operators
op = tok opLex <|> between (tokc '`') (tokc '`') varId;
var = varId <|> paren (tok opLex);
anyOne = pure <$> tok (sat (const True));
-- Lambda
-- lam r ::= '\\' <varId>+ "->" <r>
lam r =
tokc '\\' *>
liftA2 (flip (foldr L)) (many1 varId) (char '-' *> (tokc '>' *> r));
listify = fmap (foldr (\h t -> A (A (V ":") h) t) (V "[]"));
-- Escape characters
escChar = char '\\' *> (sat (`elem` "'\"\\") <|> (const '\n' <$> char 'n'));
litOne delim = (\c -> R ('#' : pure c)) <$> (escChar <||> sat (/= delim));
Integer literals
litInt = R . ('(' :) . (++ ")") <$> tok (many1 digit);
-- String literals
Notice that we do not consume whitespace after parsing the first " ,
-- hence the use of char.
litStr = listify (between (char '"') (tokc '"') (many (litOne '"')));
-- Character literals
litChar = between (char '\'') (tokc '\'') (litOne '\'');
lit = litStr <|> litChar <|> litInt;
r : : = ' [ ' < sepBy r ' , ' > ' ] '
sqLst r = listify (between (tokc '[') (tokc ']') (sepBy r (tokc ',')));
-- alt r ::= ((<conId> | '(' (':' | ',') ')') | "[]") <varId>* "->" r
alt r =
(,) <$>
(conId <||> (pure <$> paren (tokc ':' <|> tokc ',')) <||>
liftA2 (:) (tokc '[') (pure <$> tokc ']')) <*>
liftA2 (flip (foldr L)) (many varId) (char '-' *> (tokc '>' *> r));
braceSep f : : = ' { ' < sepBy f ' ; ' > ' } '
braceSep f = between (tokc '{') (tokc '}') (sepBy f (tokc ';'));
-- alts r ::= <braceSep <alt r>>
alts r = braceSep (alt r);
cas' x as = foldl A (V (concatMap (('|' :) . fst) as)) (x : map snd as);
-- Case expressions
-- cas r ::= "case" r "of" <alts r>
cas r = liftA2 cas' (between (keyword "case") (keyword "of") r) (alts r);
-- thenComma r ::= ',' <r>
thenComma r =
tokc ',' *> (((\x y -> A (A (V ",") y) x) <$> r) <||> pure (A (V ",")));
parenExpr r : : = < r > ( < op > | < r > )
parenExpr r =
liftA2
(&)
r
(((\v a -> A (V v) a) <$> op) <||> thenComma r <||> pure id);
-- rightSect r ::= (<op> | ',') <r>
rightSect r =
((\v a -> A (A (V "\\C") (V v)) a) <$> (op <|> (pure <$> tokc ','))) <*> r;
-- Sections
-- section ::= '(' (<parenExpr r> | <rightSect r>) ')'
section r = paren (parenExpr r <|> rightSect r);
-- isFree :: String -> Ast -> Bool
-- Checks if a string v occurs free in expr.
isFree v expr = case expr of
{ R s -> False
; V s -> s == v
; A x y -> isFree v x || isFree v y
; L w t -> (v /= w) && isFree v t
; Proof _ -> False
};
maybeFix s x = ife (isFree s x) (A (V "\\Y") (L s x)) x;
-- Definitions
-- def r ::= <var> <varId>* '=' <r>
def r =
liftA2 (,) var (flip (foldr L) <$> many varId <*> (tokc '=' *> r));
Convert a list of let bindings and the let body into a single AST .
addLets ls x =
foldr (\p t -> fpair p (\name def -> A (L name t) $ maybeFix name def)) x ls;
let r : : = " let " ' { ' < sepBy def r > ' } ' " in " < r >
letin r =
liftA2
addLets
(between (keyword "let") (keyword "in") (braceSep (def r)))
r;
atom r : : = < letin r > | < sqLst r > | < cas r > | < lam r > | < section r >
-- | '(' ',' ')' | (<conId> | <var>) | <lit>
atom r =
letin r <|>
sqLst r <||>
cas r <|>
lam r <||>
section r <||>
(paren (tokc ',') *> pure (V ",")) <||>
(V <$> (conId <|> var)) <||>
lit;
aexp r = fromMaybe undefined . foldl1' A <$> many1 (atom r);
fix f = f (fix f);
Parse infix operators
infix infixl infixr
data Assoc = NAssoc | LAssoc | RAssoc;
instance Show Assoc where
{ show a =
case a of
{ NAssoc -> "NAssoc"
; LAssoc -> "LAssoc"
; RAssoc -> "RAssoc" } };
eqAssoc x y = case x of
{ NAssoc -> case y of { NAssoc -> True ; LAssoc -> False ; RAssoc -> False }
; LAssoc -> case y of { NAssoc -> False ; LAssoc -> True ; RAssoc -> False }
; RAssoc -> case y of { NAssoc -> False ; LAssoc -> False ; RAssoc -> True }
};
instance Eq Assoc where { (==) = eqAssoc };
precOf s precTab = fmaybe (lstLookup s precTab) 5 fst;
assocOf s precTab = fmaybe (lstLookup s precTab) LAssoc snd;
opWithPrec precTab n = wantWith (\s -> n == precOf s precTab) "precTab" op;
-- opFold'
: : [ ( String , ( a , Assoc ) ) ] - > Ast - > [ ( String , Ast ) ] - > Maybe Ast
opFold' precTab e xs =
case xs of
{ [] -> Just e
; (:) x xt ->
case find
(\y ->
not (assocOf (fst x) precTab == assocOf (fst y) precTab))
xt of
{ Nothing ->
case assocOf (fst x) precTab of
{ NAssoc ->
case xt of
{ [] -> Just $ fpair x (\op y -> A (A (V op) e) y)
; (:) y yt -> Nothing }
; LAssoc -> Just $ foldl (\a b -> fpair b (\op y -> A (A (V op) a) y)) e xs
; RAssoc ->
Just $ foldr (\a b -> fpair a (\op y e -> A (A (V op) e) (b y))) id xs e }
; Just y -> Nothing }};
expr precTab =
fix $ \r n ->
ife
(n <= 9)
((fromMaybe undefined .) . opFold' precTab <$> r (succ n) <*>
many (liftA2 (,) (opWithPrec precTab n) (r (succ n))))
(aexp (r 0));
data Constr = Constr String [Type];
data Pred = Pred String Type;
data Qual = Qual [Pred] Type;
data Top = Adt Type [Constr]
| Def (String, Ast)
| Class String Type [(String, Type)]
| Inst String Qual [(String, Ast)];
-- arrow type constructor
arr a = TAp (TAp (TC "->") a);
Parse type applications
bType r = fromMaybe undefined . foldl1' TAp <$> many1 r;
Parse types
_type r = fromMaybe undefined . foldr1' arr <$> sepBy (bType r) (tok (want opLex "->"));
typeConstant =
(\s -> ife (s == "String") (TAp (TC "[]") (TC "Int")) (TC s)) <$> conId;
aType =
paren
(liftA2
(&)
(_type aType)
((tokc ',' *> ((\a b -> TAp (TAp (TC ",") b) a) <$> _type aType)) <||>
pure id)) <||>
typeConstant <||>
(TV <$> varId) <||>
(tokc '[' *>
(tokc ']' *> pure (TC "[]") <||>
TAp (TC "[]") <$> (_type aType <* tokc ']')));
simpleType c vs = foldl TAp (TC c) (map TV vs);
-- Data declarations
-- TODO: Add type, newtype declarations, deriving?
adt : : = " data " ' = ' < conId > < varId > * < sepBy ( < conId > | < aType > * ) ' | ' >
adt =
liftA2
Adt
(between (keyword "data") (tokc '=') (liftA2 simpleType conId (many varId)))
(sepBy (liftA2 Constr conId (many aType)) (tokc '|'));
-- Precedence
-- prec ::= <digit> <sp>
prec = (\c -> ord c - ord '0') <$> tok digit;
fixityList a n = fmap (, (n, a));
-- Fixity declaration
fixityDecl " kw " a : : = " kw " < prec > < sepBy < op > ' , ' > ' ; '
fixityDecl kw a =
between
(keyword kw)
(tokc ';')
(liftA2 (fixityList a) prec (sepBy op (tokc ',')));
fixity =
fixityDecl "infixl" LAssoc <||>
fixityDecl "infixr" RAssoc <||>
fixityDecl "infix" NAssoc;
noQual = Qual [];
genDecl : : = " : : " < _ type aType >
genDecl = liftA2 (,) var (char ':' *> tokc ':' *> _type aType);
-- Class declarations
-- classDecl ::= "class" <conId> <varId> "where" <braceSep genDecl>
classDecl =
keyword "class" *>
(Class <$> conId <*> (TV <$> varId) <*> (keyword "where" *> braceSep genDecl));
inst : : = < _ type aType >
inst = _type aType;
-- Instance declarations
-- instDecl r ::= "instance" (<conId> <inst> "=>")? <conId> <inst>
-- "where" <braceSep <def r>>
instDecl r =
keyword "instance" *>
((\ps cl ty defs -> Inst cl (Qual ps ty) defs) <$>
(liftA2 ((pure .) . Pred) conId (inst <* (char '=' *> tokc '>')) <||>
pure []) <*>
conId <*>
inst <*>
(keyword "where" *> braceSep (def r)));
-- Top level declarations
tops : : = < sepBy ( < adt > | < def > | < classDecl > | < instDecl > ) ' ; ' >
tops precTab =
sepBy
(adt <||> Def <$> def (expr precTab 0) <||> classDecl <||>
instDecl (expr precTab 0))
(tokc ';');
-- A program consists of whitespace, followed by fixity declarations,
-- then top level declarations
-- program' ::= <sp> <fixity>* <tops>
program' = sp *> (concat <$> many fixity) >>= tops;
eqPre = case parse program' $
"class Eq a where { (==) :: a -> a -> Bool };\n" ++
"class Show a where { show :: a -> String };\n" ++
"class Functor f where { fmap :: (a -> b) -> f a -> f b };\n" ++
"class Applicative f where { pure :: a -> f a; (<*>) :: f (a -> b) -> f a -> f b };\n" ++
"class Monad m where { return :: a -> m a ; (>>=) :: m a -> (a -> m b) -> m b};\n" ++
"instance Eq Int where { (==) = intEq };\n" of
{ Empty m ->
case m of
-- TODO: replace with show msg
{ Err msg -> undefined
; Ok l _ _ -> l}
; Consumed m ->
case m of
-- TODO: replace with show msg
{ Err msg -> undefined
; Ok l _ _ -> l}
};
program =
((eqPre ++
-- data [] a = [] | (:) a ([] a)
[ Adt
(TAp (TC "[]") (TV "a"))
[Constr "[]" [], Constr ":" [TV "a", TAp (TC "[]") (TV "a")]]
-- data (,) a b = (,) a b
, Adt (TAp (TAp (TC ",") (TV "a")) (TV "b")) [Constr "," [TV "a", TV "b"]]
]) ++) <$>
program';
-- Primitives
-- prims :: [(String, (Qual, Ast))]
prims =
let { ii = arr (TC "Int") (TC "Int")
; iii = arr (TC "Int") ii
; bin s = R $ "``BT`T" ++ s }
in map (second (first noQual)) $
[ ("\\Y", (arr (arr (TV "a") (TV "a")) (TV "a"), R "Y"))
, ( "\\C"
, ( arr
(arr (TV "a") (arr (TV "b") (TV "c")))
(arr (TV "b") (arr (TV "a") (TV "c")))
, R "C"))
, ("intEq", (arr (TC "Int") (arr (TC "Int") (TC "Bool")), bin "="))
, ("<=", (arr (TC "Int") (arr (TC "Int") (TC "Bool")), bin "L"))
, ("chr", (ii, R "I"))
, ("ord", (ii, R "I"))
, ("succ", (ii, R "`T`(1)+"))
] ++
map (\s -> ('.':s ++ ".", (iii, bin s))) ["+", "-", "*", "/", "%"];
-- Total variant
rank ds v =
let { loop l v c =
case l of
{ [] -> Nothing
; (:) x xs ->
ife (v == fst x) (Just ('[' : showInt c "]")) (loop xs v (succ c)) } }
in loop ds v 0;
-- showC :: [(String, b)] -> Ast -> String
-- Total version of showC
showC ds t = case t of
{ R s -> Just s
; V v -> rank ds v
; A x y -> liftA2 (\a b -> '`':a ++ b) (showC ds x) (showC ds y)
; L w t -> Nothing
; Proof _ -> Nothing
};
encoding of lambda calculus terms
-- z s lift ast abs. app.
data LC = Ze | Su LC | Pass Ast | La LC | App LC LC;
Convert the AST into a nameless representation
-- debruijn :: [String] -> Ast -> LC
debruijn n e = case e of
{ R s -> pure $ Pass (R s)
; V v -> pure $ foldr (\h m -> ife (h == v) Ze (Su m)) (Pass (V v)) n
; A x y -> App <$> debruijn n x <*> debruijn n y
; L s t -> La <$> debruijn (s:n) t
; Proof _ -> Nothing
};
See Kiselyov 's paper - " Lambda to SKI , semantically " , pages 10 - 11
-- V C N W
data Sem = Defer | Closed Ast | Need Sem | Weak Sem;
-- ($$) algorithm
-- ($$), case Defer
-- Parameters: r == self
ldef r y = case y of
{ -- (V, V) -> N (C S.(S $! I $! I))
Defer -> Need (Closed (A (A (R "S") (R "I")) (R "I")))
-- (V, C d) -> N (C S.(kC $! kI $! d))
; Closed d -> Need (Closed (A (R "T") d))
( V , N e ) - > N ( C S.(kS $ ! ) $ $ e )
; Need e -> Need (r (Closed (A (R "S") (R "I"))) e)
( V , W e ) - > N ( C ( S.(kS $ ! ) ) $ $ e )
; Weak e -> Need (r (Closed (R "T")) e)
};
-- ($$), case Closed
d is the argument to Closed ( i.e. r ( Closed d ) y = ... )
lclo r d y = case y of
{ -- (C d, V) -> N (C d)
Defer -> Need (Closed d)
-- (C d1, C d2) -> C (S.(d1 $! d2))
; Closed dd -> Closed (A d dd)
-- (C d, N e) -> N (C S.(kB $! d) $$ e)
; Need e -> Need (r (Closed (A (R "B") d)) e)
-- (C d, W e) -> W (C d $$ e)
; Weak e -> Weak (r (Closed d) e)
};
-- ($$), case Need
-- e is the argument to Need (i.e. lnee r (Need e) y = ...)
lnee r e y = case y of
{ -- (N e, V) -> N (C S.kS $$ e $$ C S.kI)
Defer -> Need (r (r (Closed (R "S")) e) (Closed (R "I")))
( N e , C d ) - > N ( C S.(kC $ ! kC $ ! d ) $ $ e )
; Closed d -> Need (r (Closed (A (R "R") d)) e)
( N e1 , N e2 ) - > N ( ( C S.kS ) $ $ e1 $ $ e2 )
; Need ee -> Need (r (r (Closed (R "S")) e) ee)
( N e1 , W e2 ) - > N ( ( C S.kC ) $ $ e1 $ $ e2 )
; Weak ee -> Need (r (r (Closed (R "C")) e) ee)
};
-- ($$), case Weak
-- e is the argument to Weak (i.e. lweak r (Weak e) y = ...)
lwea r e y = case y of
{ -- (W e, V) -> N e
Defer -> Need e
-- (W e, C d) -> W (e $$ C d)
; Closed d -> Weak (r e (Closed d))
( W e1 , N e2 ) - > N ( ( C S.kB ) $ $ e1 $ $ e2 )
; Need ee -> Need (r (r (Closed (R "B")) e) ee)
( W e1 , W e2 ) - > W ( e1 $ $ e2 )
; Weak ee -> Weak (r e ee)
};
-- ($$), the full thing.
babsa x y = case x of
{ Defer -> ldef babsa y
; Closed d -> lclo babsa d y
; Need e -> lnee babsa e y
; Weak e -> lwea babsa e y
};
Full bracket abstraction algorithm , from De Bruijn to combinators
-- babs :: LC -> Sem
babs t = case t of
{ -- let z : (a*y, a) repr = V
Ze -> Defer
-- let s: (b*y, a) repr -> (_*(b*y), a) repr = fun e -> W e
-- Looks like this version recurs on e.
; Su e -> Weak (babs e)
A lifted AST is closed .
; Pass s -> Closed s
See " lam " function on page 10 of Kiselyov
-- Lambda abstraction
; La t -> case babs t of
{ -- V -> C S.kI
Defer -> Closed (R "I")
-- C d -> C S.(kK $! d)
-- Remark: d is a closed body of a lambda abstraction, so the
-- variable being abstracted over is not used and thus we can
-- use the K combinator
; Closed d -> Closed (A (R "K") d)
-- N e -> e
; Need e -> e
W e - > ( C S.kK ) $ $ e
; Weak e -> babsa (Closed (R "K")) e
}
-- Application
; App x y -> babsa (babs x) (babs y)
};
-- Convert an AST into debruijn form, then perform bracket abstraction,
-- return if and only if we have a closed form.
-- nolam :: Ast -> Maybe Ast
nolam x = debruijn [] x >>= \x ->
case babs x of
{ Defer -> Nothing
; Closed d -> Just d
; Need e -> Nothing
; Weak e -> Nothing
};
dump tab ds =
case ds of
{ [] -> return []
; (:) h t ->
nolam (snd h) >>= \a ->
showC tab a >>= \b ->
dump tab t >>= \c ->
return (b ++ (';' : c)) };
asm ds = dump ds ds;
-- Apply substitutions to a tree
apply sub t = case t of
{ TC v -> t
-- Lookup v in the substitutions, if not found, replace it with t
; TV v -> fromMaybe t (lstLookup v sub)
; TAp a b -> TAp (apply sub a) (apply sub b)
};
Combine two substitution lists while applying the substitutions in
the first .
(@@) s1 s2 = map (second (apply s1)) s2 ++ s1;
-- Occurs check
-- occurs :: String -> Type -> Bool
occurs s t = case t of
{ TC v -> False
; TV v -> s == v
; TAp a b -> occurs s a || occurs s b
};
-- Bind the type variable s to the type t
varBind s t = case t of
{ -- Just (pure (s, t)) is clearer
TC v -> pure (pure (s, t))
-- Binding a variable with another variable
; TV v -> ife (v == s) (pure []) (pure (pure (s, t)))
-- Infinite types not allowed
; TAp a b -> ife (occurs s t) Nothing (pure (pure (s, t)))
};
Most general unifier . Given two type trees , possibly return the
-- assignments that make them equal.
-- We pass unify as an argument to achieve mutual recursion.
mgu unify t u = case t of
{ TC a -> case u of
{ TC b -> ife (a == b) (pure []) Nothing
; TV b -> varBind b t
; TAp a b -> Nothing
}
; TV a -> varBind a u
; TAp a b -> case u of
{ TC b -> Nothing
; TV b -> varBind b t
; TAp c d -> unify b d (mgu unify a c)
}
};
unify a b =
maybe Nothing (\s -> fmap (@@ s) (mgu unify (apply s a) (apply s b)));
-- instantiate' ::
-- Type -> Int -> [(String, Type)] -> ((Type, Int), [(String, Type)])
instantiate' t n tab = case t of
{ TC s -> ((t, n), tab)
; TV s -> case lstLookup s tab of
{ Nothing -> let { va = TV (s ++ '_':showInt n "") }
in ((va, n + 1), (s, va):tab)
; Just v -> ((v, n), tab)
}
; TAp x y ->
fpair (instantiate' x n tab) $ \tn1 tab1 ->
fpair tn1 $ \t1 n1 ->
fpair (instantiate' y n1 tab1) $ \tn2 tab2 ->
fpair tn2 $ \t2 n2 -> ((TAp t1 t2, n2), tab2)
};
instantiatePred pred xyz =
case pred of
{ Pred s t ->
fpair xyz $ \xy tab ->
fpair xy $ \out n ->
first (first ((: out) . Pred s)) (instantiate' t n tab) };
-- instantiate :: Qual -> Int -> (Qual, Int)
instantiate qt n =
case qt of
{ Qual ps t ->
fpair (foldr instantiatePred (([], n), []) ps) $ \xy tab ->
fpair xy $ \ps1 n1 -> first (Qual ps1) (fst (instantiate' t n1 tab)) };
-- type SymTab = [(String, (Qual, Ast))];
type Subst = [ ( String , Type ) ] ;
-- infer' ::
-- [(String, (Qual, b))]
-- -> [(String, Type)]
-- -> Ast
-- -> (Maybe [(String, Type)], Int)
-- -> ((Type, Ast), (Maybe [(String, Type)], Int))
infer' typed loc ast csn =
fpair csn $ \cs n ->
let { va = TV ('_' : showInt n "") }
in case ast of
Raw code is treated as Int
R s -> ((TC "Int", ast), csn)
; V s ->
fmaybe
(lstLookup s loc)
(fmaybe (lstLookup s typed) undefined $ \ta ->
fpair (instantiate (fst ta) n) $ \q n1 ->
case q of {
Qual preds ty ->
((ty, foldl A ast (map Proof preds)), (cs, n1))
})
(flip (,) csn . flip (,) ast)
; A x y ->
fpair (infer' typed loc x (cs, n + 1)) $ \tax csn1 ->
fpair tax $ \tx ax ->
fpair (infer' typed loc y csn1) $ \tay csn2 ->
fpair tay $ \ty ay ->
((va, A ax ay), first (unify tx (arr ty va)) csn2)
-- Lambda abstraction. Infer the body of the lambda with
the substitution list extended with s : = < newvar >
; L s x ->
first
(\ta -> fpair ta $ \t a -> (arr va t, L s a))
(infer' typed ((s, va) : loc) x (cs, n + 1))
; Proof _ -> undefined };
onType f pred = case pred of { Pred s t -> Pred s (f t) };
-- typeEq :: Type -> Type -> Bool
typeEq t u = case t of
{ TC s -> case u of
{ TC t -> t == s
; TV _ -> False
; TAp _ _ -> False
}
; TV s -> case u of
{ TC _ -> False
; TV t -> t == s
; TAp _ _ -> False
}
; TAp a b -> case u of
{ TC _ -> False
; TV _ -> False
; TAp c d -> typeEq a c && typeEq b d
}
};
instance Eq Type where { (==) = typeEq };
predEq p q = case p of { Pred s a -> case q of { Pred t b ->
(s == t) && (a == b) }};
instance Eq Pred where { (==) = predEq };
predApply sub = onType (apply sub);
all f = foldr ((&&) . f) True;
filter f = foldr (\x xs -> ife (f x) (x:xs) xs) [];
intersect xs ys = filter (\x -> fmaybe (find (== x) ys) False (const True)) xs;
merge s1 s2 =
ife
(all (\v -> apply s1 (TV v) == apply s2 (TV v)) $
map fst s1 `intersect` map fst s2)
(Just $ s1 ++ s2)
Nothing;
match h t = case h of
{ TC a -> case t of
{ TC b -> ife (a == b) (return []) Nothing
; TV b -> Nothing
; TAp a b -> Nothing
}
; TV a -> return [(a, t)]
; TAp a b -> case t of
{ TC b -> Nothing
; TV b -> Nothing
; TAp c d -> match a c >>= \ac ->
match b d >>= \bd ->
merge ac bd}};
matchPred h p = case p of { Pred _ t -> match h t };
-- TODO: Add support for printing of infix type operators.
showType t = case t of
{ TC s -> s
; TV s -> s
; TAp a b -> concat ["(", showType a, " ", showType b, ")"]
};
instance Show Type where { show = showType };
showPred p = case p of { Pred s t -> s ++ (' ':show t) ++ " => "};
findInst r qn p insts =
case insts of
{ [] ->
fpair qn $ \q n ->
let { v = '*' : showInt n "" }
in (((p, v) : q, n + 1), V v)
; (:) i is ->
case i of {
Qual ps h ->
case matchPred h p of
{ Nothing -> findInst r qn p is
; Just u ->
foldl
(\qnt p ->
fpair qnt $ \qn1 t -> second (A t) (r (predApply u p) qn1))
( qn
, V (case p of
{ Pred s _ -> showPred $ Pred s h }))
ps }}};
findProof is pred psn = fpair psn $ \ps n -> case lookupWith (==) pred ps of
{ Nothing -> case pred of { Pred s t -> case lstLookup s is of
{ Nothing -> undefined -- No instances!
; Just insts -> findInst (findProof is) psn pred insts
}}
; Just s -> (psn, V s)
};
prove' ienv sub psn a = case a of
{ R _ -> (psn, a)
; V _ -> (psn, a)
; A x y -> let { p1 = prove' ienv sub psn x } in fpair p1 $ \psn1 x1 ->
second (A x1) (prove' ienv sub psn1 y)
; L s t -> second (L s) (prove' ienv sub psn t)
; Proof raw -> findProof ienv (predApply sub raw) psn
};
-- prove :: [(String, [Qual])] -> (Type, Ast) -> Subst -> (Qual, Ast)
prove ienv ta sub =
fpair ta $ \t a ->
fpair (prove' ienv sub ([], 0) a) $ \psn x ->
fpair psn $ \ps _ ->
(Qual (map fst ps) (apply sub t), foldr (L . snd) x ps);
dictVars ps n =
flst ps ([], n) $ \p pt ->
first ((p, '*' : showInt n "") :) (dictVars pt $ n + 1);
qi = Qual of instance , e.g. t = > [ t ] - > [ t ] - > Bool
inferMethod ienv typed qi def = fpair def $ \s expr ->
fpair (infer' typed [] expr (Just [], 0)) $ \ta msn ->
case lstLookup s typed of
{ Nothing -> undefined -- No such method.
e.g. qac = Eq a = > a - > a - > Bool , some AST ( product of single method )
; Just qac -> fpair msn $ \ms n -> case ms of
{ Nothing -> undefined -- Type check fails.
; Just sub -> fpair (instantiate (fst qac) n) $ \q1 n1 -> case q1 of
{ Qual psc tc -> case psc of
Unreachable .
; (:) headPred shouldBeNull -> case qi of { Qual psi ti ->
case headPred of { Pred _ headT -> case match headT ti of
{ Nothing -> undefined
-- e.g. Eq t => [t] -> [t] -> Bool
-- instantiate and match it against type of ta
; Just subc ->
fpair (instantiate (Qual psi $ apply subc tc) n1) $ \q2 n2 ->
case q2 of { Qual ps2 t2 -> fpair ta $ \tx ax ->
case match (apply sub tx) t2 of
{ Nothing -> undefined -- Class/instance type conflict.
; Just subx -> snd $ prove' ienv (subx @@ sub) (dictVars ps2 0) ax
}}}}}}}}};
genProduct ds = foldr L (L "*" $ foldl A (V "*") $ map V ds) ds;
inferInst ienv typed inst = fpair inst $ \cl qds -> fpair qds $ \q ds ->
case q of { Qual ps t -> let { s = showPred $ Pred cl t } in
(s, (,) (noQual $ TC "DICT") $ maybeFix s $
foldr (L . snd)
(foldl A (genProduct $ map fst ds)
(map (inferMethod ienv typed q) ds))
(fst $ dictVars ps 0)
)
};
inferDefs ienv defs typed =
flst defs (Right $ reverse typed) $ \edef rest ->
case edef of
{ Left def ->
fpair def $ \s expr ->
fpair (infer' typed [] (maybeFix s expr) (Just [], 0)) $ \ta msn ->
fpair msn $ \ms _ ->
case fmap (prove ienv ta) ms of
{ Nothing -> Left ("bad type: " ++ s)
; Just qa -> inferDefs ienv rest ((s, qa) : typed)}
; Right inst -> inferDefs ienv rest (inferInst ienv typed inst : typed)};
conOf con = case con of { Constr s _ -> s };
mkCase t cs =
( concatMap (('|' :) . conOf) cs
, ( noQual $
arr t $
foldr
(arr .
(\c ->
case c of
{ Constr _ ts -> foldr arr (TV "case") ts }))
(TV "case")
cs
, L "x" $ V "x"));
mkStrs = snd . foldl (\p u -> fpair p (\s l -> ('*':s, s : l))) ("*", []);
For example , creates ` Just = \x a b - > b x ` .
Scott encoding
scottEncode vs s ts = foldr L (foldl (\a b -> A a (V b)) (V s) ts) (ts ++ vs);
scottConstr t cs c = case c of { Constr s ts -> (s,
( noQual $ foldr arr t ts
, scottEncode (map conOf cs) s $ mkStrs ts)) };
mkAdtDefs t cs = mkCase t cs : map (scottConstr t cs) cs;
fneat neat f = case neat of { Neat a b c -> f a b c };
select f xs acc =
flst xs (Nothing, acc) $ \x xt ->
ife (f x) (Just x, xt ++ acc) (select f xt (x : acc));
addInstance s q is = fpair (select ((== s) . fst) is []) $ \m xs -> case m of
{ Nothing -> (s, [q]):xs
; Just sqs -> second (q:) sqs:xs
};
mkSel ms s = L "*" $ A (V "*") $ foldr (L . ('*' :) . fst) (V $ '*' : s) ms;
untangle = foldr (\top acc -> fneat acc $ \ienv fs typed -> case top of
{ Adt t cs -> Neat ienv fs (mkAdtDefs t cs ++ typed)
; Def f -> Neat ienv (Left f : fs) typed
; Class classId v ms -> Neat ienv fs (
map (\st -> fpair st $ \s t -> (s, (Qual [Pred classId v] t, mkSel ms s))) ms
++ typed)
; Inst cl q ds -> Neat (addInstance cl q ienv) (Right (cl, (q, ds)):fs) typed
}) (Neat [] [] prims);
infer prog = fneat (untangle prog) inferDefs;
showQual q = case q of { Qual ps t -> concatMap showPred ps ++ show t };
instance Show Qual where { show = showQual };
dumpTypes' m =
case m of
{ Err msg -> "parse error"
; Ok prog _ _ ->
case infer prog of
{ Left err -> err
; Right typed ->
concatMap
(\p -> fpair p $ \s qa -> s ++ " :: " ++ show (fst qa) ++ "\n")
typed}};
dumpTypes s = case parse program s of
{ Empty m -> dumpTypes' m
; Consumed m -> dumpTypes' m };
-- TODO: replace with show msg
compile' m = case m of
{ Err msg -> "parse error"
; Ok prog _ _ ->
case infer prog of
{ Left err -> err
; Right qas -> fromMaybe undefined (asm $ map (second snd) qas)}};
compile s = case parse program s of
{ Empty m -> compile' m
; Consumed m -> compile' m };
| null | https://raw.githubusercontent.com/siraben/mini-haskell/d2f30da94cda0fa511a0cea75febbb03f54ce3de/classy.hs | haskell | ----------------------------------------------------------------------
----------------------------------------------------------------------
{-# LANGUAGE FlexibleInstances #
# LANGUAGE TypeSynonymInstances #
fpair = flip curry
fold a list
flist :: [a] -> b -> (a -> [a] -> b) -> b
(==) on lists
maybe :: b -> (a -> b) -> Maybe a -> b
fold a maybe
fmaybe :: Maybe a -> b -> (a -> b) -> b
Later, when we add foldables and traversables, it should be
Show a non-empty list
having multiple numeric types.
lookupWith :: (a -> b -> Bool) -> a -> [(b, a)] -> Maybe a
Representation of types
type ctor. type var. type app.
Representation of AST
raw combinator assembly
variable
application
lambda abstraction
proof for typeclass instantiation?
* instance environment
* definitions, including those of instances
* Typed ASTs, ready for compilation, including ADTs and methods,
e.g. (==), (Eq a => a -> a -> Bool, select-==)
Written in a contrived way for use with mini-Haskell (e.g. no
nested pattern matching)
Position is a line, column
unwords ["Msg", show pos, show s1, show s2]};
instance Show Msg where
{ show = showMsg };
; Ok a s m -> unwords ["Ok", show a, show s, show m]};
instance Show a => Show (Reply a) where { show = showReply };
showConsumed c = case c of { Empty m -> unwords ["Empty", show m]
; Consumed m -> unwords ["Consumed", show m] };
instance Show a => Show (Consumed a) where
{ show = showConsumed };
fromString :: String -> State
nextPos :: Pos -> Char -> Pos
q.
p <|> q ::= <p> | <q>
Run parser p, if it consumed input and failed, pretend like it
didn't consume anything.
many p ::= <p>*
many1 p ::= <p>+
sepBy1 p sep ::= <p> (<sep> <p>)*
between x y p ::= <x> <p> <y>
com ::= '-' '-' <char c: c != '\n'>* '\n'
Block comments
notComEnd ::= <char c: c != '-'> | '-' <char c: c != '}'>
blockcom ::= "{-" (<blockcom> | <notComEnd>) "-}"
Tokenize a parser, producing a parser that consumes trailing
whitespace.
tok p ::= <p> <sp>
Parse a character (tokenized)
wantWith :: (a -> Bool) -> String -> Parser a -> Parser a
TODO: Consider backtracking the input on failure (similar to sat)?
paren a ::= '(' <a> ')'
lower ::= 'a' | 'b' | 'c' ... 'z' | '_'
upper ::= 'A' | 'B' | 'C' ... 'Z'
digit ::= '0' | '1' | '2' ... '9'
alpha ::= <lower> | <upper>
varLex ::= <lower> (<alpha> | <digit> | '\'')*
Operator characters
Lambda
lam r ::= '\\' <varId>+ "->" <r>
Escape characters
String literals
hence the use of char.
Character literals
alt r ::= ((<conId> | '(' (':' | ',') ')') | "[]") <varId>* "->" r
alts r ::= <braceSep <alt r>>
Case expressions
cas r ::= "case" r "of" <alts r>
thenComma r ::= ',' <r>
rightSect r ::= (<op> | ',') <r>
Sections
section ::= '(' (<parenExpr r> | <rightSect r>) ')'
isFree :: String -> Ast -> Bool
Checks if a string v occurs free in expr.
Definitions
def r ::= <var> <varId>* '=' <r>
| '(' ',' ')' | (<conId> | <var>) | <lit>
opFold'
arrow type constructor
Data declarations
TODO: Add type, newtype declarations, deriving?
Precedence
prec ::= <digit> <sp>
Fixity declaration
Class declarations
classDecl ::= "class" <conId> <varId> "where" <braceSep genDecl>
Instance declarations
instDecl r ::= "instance" (<conId> <inst> "=>")? <conId> <inst>
"where" <braceSep <def r>>
Top level declarations
A program consists of whitespace, followed by fixity declarations,
then top level declarations
program' ::= <sp> <fixity>* <tops>
TODO: replace with show msg
TODO: replace with show msg
data [] a = [] | (:) a ([] a)
data (,) a b = (,) a b
Primitives
prims :: [(String, (Qual, Ast))]
Total variant
showC :: [(String, b)] -> Ast -> String
Total version of showC
z s lift ast abs. app.
debruijn :: [String] -> Ast -> LC
V C N W
($$) algorithm
($$), case Defer
Parameters: r == self
(V, V) -> N (C S.(S $! I $! I))
(V, C d) -> N (C S.(kC $! kI $! d))
($$), case Closed
(C d, V) -> N (C d)
(C d1, C d2) -> C (S.(d1 $! d2))
(C d, N e) -> N (C S.(kB $! d) $$ e)
(C d, W e) -> W (C d $$ e)
($$), case Need
e is the argument to Need (i.e. lnee r (Need e) y = ...)
(N e, V) -> N (C S.kS $$ e $$ C S.kI)
($$), case Weak
e is the argument to Weak (i.e. lweak r (Weak e) y = ...)
(W e, V) -> N e
(W e, C d) -> W (e $$ C d)
($$), the full thing.
babs :: LC -> Sem
let z : (a*y, a) repr = V
let s: (b*y, a) repr -> (_*(b*y), a) repr = fun e -> W e
Looks like this version recurs on e.
Lambda abstraction
V -> C S.kI
C d -> C S.(kK $! d)
Remark: d is a closed body of a lambda abstraction, so the
variable being abstracted over is not used and thus we can
use the K combinator
N e -> e
Application
Convert an AST into debruijn form, then perform bracket abstraction,
return if and only if we have a closed form.
nolam :: Ast -> Maybe Ast
Apply substitutions to a tree
Lookup v in the substitutions, if not found, replace it with t
Occurs check
occurs :: String -> Type -> Bool
Bind the type variable s to the type t
Just (pure (s, t)) is clearer
Binding a variable with another variable
Infinite types not allowed
assignments that make them equal.
We pass unify as an argument to achieve mutual recursion.
instantiate' ::
Type -> Int -> [(String, Type)] -> ((Type, Int), [(String, Type)])
instantiate :: Qual -> Int -> (Qual, Int)
type SymTab = [(String, (Qual, Ast))];
infer' ::
[(String, (Qual, b))]
-> [(String, Type)]
-> Ast
-> (Maybe [(String, Type)], Int)
-> ((Type, Ast), (Maybe [(String, Type)], Int))
Lambda abstraction. Infer the body of the lambda with
typeEq :: Type -> Type -> Bool
TODO: Add support for printing of infix type operators.
No instances!
prove :: [(String, [Qual])] -> (Type, Ast) -> Subst -> (Qual, Ast)
No such method.
Type check fails.
e.g. Eq t => [t] -> [t] -> Bool
instantiate and match it against type of ta
Class/instance type conflict.
TODO: replace with show msg | A mini Haskell compiler with typeclasses .
Originally written by , modified by
Delete code below and uncomment the block to compile in GHC
# LANGUAGE OverlappingInstances #
# LANGUAGE CPP #
# LANGUAGE TupleSections #
# LANGUAGE NoMonomorphismRestriction #
module Compiler where
import Prelude (Char, Int, String, succ)
import Data.Char (chr, ord)
import qualified Prelude
a <= b = if a Prelude.<= b then True else False
(*) = (Prelude.*)
(+) = (Prelude.+)
(-) = (Prelude.-)
(/) = Prelude.div
(%) = Prelude.mod
class Eq a where { (==) :: a -> a -> Bool };
class Show a where { show :: a -> String };
class Functor f where { fmap :: (a -> b) -> f a -> f b };
class Applicative f where { pure :: a -> f a; (<*>) :: f (a -> b) -> f a -> f b };
class Monad m where { return :: a -> m a ; (>>=) :: m a -> (a -> m b) -> m b};
instance Eq Char where { (==) x y = if x Prelude.== y then True else False };
instance Eq Int where { (==) x y = if x Prelude.== y then True else False };
instance Show Char where { show = Prelude.show };
infixr 5 ++;
infixr 9 .;
infixl 4 <*> , <$> , <* , *>;
infixl 3 <|>, <||>;
infixr 0 $;
infixl 7 *;
infixl 6 + , -;
-}
infixr 5 :, ++;
infixr 9 .;
infixl 4 <*> , <$> , <* , *>;
infixl 3 <|>, <||>;
infixr 0 $;
infixl 7 *;
infixl 6 + , -;
(*) = (.*.);
(+) = (.+.);
(-) = (.-.);
(%) = (.%.);
(/) = (./.);
Delete code above and uncomment the block to compile in GHC
undefined = undefined;
($) f = f;
id x = x;
const x y = x;
flip f x y = f y x;
(&) x f = f x;
(<$>) = fmap;
liftA2 f x = (<*>) (fmap f x);
(*>) = liftA2 $ \x y -> y;
(<*) = liftA2 const;
data Bool = True | False;
data Maybe a = Nothing | Just a;
data Either a b = Left a | Right b;
data Error a = Error String | Okay a;
fpair p f = case p of { (,) x y -> f x y };
fst p = case p of { (,) x y -> x };
snd p = case p of { (,) x y -> y };
first f p = fpair p $ \x y -> (f x, y);
second f p = fpair p $ \x y -> (x, f y);
ife a b c = case a of { True -> b ; False -> c };
not a = case a of { True -> False; False -> True };
(.) f g x = f (g x);
(||) f g = ife f True (ife g True False);
(&&) f g = ife f (ife g True False) False;
(<) a b = not (a == b) && (a <= b);
flst xs n c = case xs of { [] -> n; (:) h t -> c h t };
lstEq xs ys = case xs of
{ [] -> flst ys True (\h t -> False)
; (:) x xt -> flst ys False (\y yt -> ife (x == y) (lstEq xt yt) False)
};
instance Eq a => Eq [a] where { (==) = lstEq };
(/=) x y = not (x == y);
Append two lists
(++) xs ys = flst xs ys (\x xt -> x:xt ++ ys);
maybe n j m = case m of { Nothing -> n; Just x -> j x };
fmaybe m n j = case m of { Nothing -> n; Just x -> j x };
instance Show a => Show (Maybe a) where
{ show = maybe "Nothing" (\x -> "Just " ++ show x) };
instance Functor Maybe where
{ fmap f = maybe Nothing (Just . f) };
instance Applicative Maybe where
{ pure = Just ; (<*>) f y = maybe Nothing (`fmap` y) f};
instance Monad Maybe where
{ return = Just ; (>>=) ma f = maybe Nothing f ma };
fromMaybe a m = fmaybe m a id;
foldr c n l = flst l n (\h t -> c h (foldr c n t));
TODO : should have type
: : Monoid a = > ( a - > a - > a ) - > [ a ] - > a
: : ( Monoid m , Foldable t ) = > ( m - > m - > m ) - > t m - > m
' : : ( a - > a - > a ) - > [ a ] - > Maybe a
foldr1' c l =
flst
l
Nothing
(\h t ->
foldr
(\x m -> Just (fmaybe m x (c x)))
Nothing
l);
foldl f a bs = foldr (\b g x -> g (f x b)) id bs a;
foldl1 ' : : ( p - > p - > p ) - > [ p ] - > Maybe p
See above comments on the status of '
foldl1' f l = flst l Nothing (\x xs -> Just (foldl f x xs));
elem k = foldr (\x t -> ife (x == k) True t) False;
find f = foldr (\x t -> ife (f x) (Just x) t) Nothing;
concat = foldr (++) [];
itemize c = [c];
map f = foldr (\x xs -> f x : xs) [];
concatMap f l = concat (map f l);
instance Functor [] where { fmap = map };
instance Monad [] where { return = itemize ; (>>=) = flip concatMap };
instance Applicative [] where
{ pure = itemize
; (<*>) fs xs = fs >>= \f -> xs >>= \x -> return $ f x};
prependToAll s l = flst l [] (\x xs -> s : x : prependToAll s xs);
intersperse s l = flst l [] (\x xs -> x : prependToAll s xs);
intercalate d = concat . intersperse d;
unwords = intercalate " ";
showList' l = "[" ++ intercalate "," (map show l) ++ "]";
showList l = case l of {
[] -> "[]";
(:) x xs -> showList' l
};
mapconcat f l = concat (map f l);
escapeC c = ife (c == '\n') "\\n"
(ife (c == '\\') "\\\\"
[c]);
showString s = "\"" ++ mapconcat escapeC s ++ "\"";
ifz n = ife (0 == n);
showInt' n = ifz n id (showInt' (n/10) . (:) (chr (48+(n%10))));
showInt n = ifz n ('0':) (showInt' n);
N.B. using show on Ints will make GHC fail to compile to due GHC
instance Show Int where { show n = showInt n "" };
instance Show String where { show = showString };
instance Show a => Show [a] where { show = showList };
any f = foldr (\x t -> ife (f x) True t) False;
lookupWith eq s =
foldr (\h t -> fpair h (\k v -> ife (eq s k) (Just v) t)) Nothing;
lstLookup = lookupWith (==);
reverse = foldl (flip (:)) [];
zipWith f xs ys =
case xs of
{ [] -> []
; (:) x xt ->
case ys of
{ [] -> []
; (:) y yt -> f x y : zipWith f xt yt
}
};
zip = zipWith (,);
data Type = TC String | TV String | TAp Type Type;
data Ast
data Neat =
Neat
[(String, [Qual])]
[Either (String, Ast) (String, (Qual, [(String, Ast)]))]
[(String, (Qual, Ast))];
Parser combinators ( applicative style )
From the paper " Parsec : A practical parsing library "
data Pos = Pos Int Int;
data State = State String Pos;
data Parsec a = Parsec (State -> Consumed a);
data Msg = Msg Pos String [String];
data Reply a = Err Msg
| Ok a State Msg;
data Consumed a = Empty (Reply a)
| Consumed (Reply a);
parens s = '(':(s ++ ")");
showPos p = case p of { Pos r c -> unwords ["row:" , show r
, "col: " , show c]};
instance Show Pos where { show = showPos };
showState s = case s of { State s p -> unwords [show s, parens (show p)]};
instance Show State where { show = showState };
showMsg m = case m of { >
showReply r = case r of { unwords [ " Err " , show m ]
fromString s = State s (Pos 1 1);
parsec : : Parsec a - > State - > Consumed a
parsec p = case p of { Parsec f -> f };
parse : : Parsec a - > String - > Consumed a
parse p s = parsec p (fromString s);
bind : : Parsec a - > ( a - > Parsec b ) - > Parsec b
bind p f = Parsec $
\state -> case parsec p state of
{ Empty m ->
case m of
{ Err msg -> Empty (Err msg)
; Ok x state' msg -> parsec (f x) state' }
; Consumed m ->
Consumed
(case m of
{ Err msg -> Err msg
; Ok x state' msg ->
case parsec (f x) state' of
{ Empty m -> m
; Consumed m -> m}})};
parsecpure : : a - > Parsec a
parsecpure x = Parsec $ \state ->
case state of
{ State s pos -> Empty (Ok x state (Msg pos [] [])) };
instance Monad Parsec where
{ return = parsecpure
; (>>=) = bind };
instance Functor Parsec where
{ fmap f x = x >>= \x -> parsecpure (f x) };
instance Applicative Parsec where
{ pure = parsecpure
; (<*>) x y = x >>= \f -> y >>= \x -> parsecpure (f x) };
nextPos p c = case p of
{ Pos line col ->
ife (c == '\n') (Pos (line + 1) 0) (Pos line (col + 1))};
sat : : ( Bool ) - > Parsec
sat test = Parsec $ \state ->
case state of
{ State input pos ->
case input of
{ [] -> Empty (Err (Msg pos "end of input" []))
; (:) c cs ->
ife (test c)
(let { newPos = nextPos pos c
; newState = State cs newPos }
in Consumed (Ok c newState
(Msg pos [] [])))
(Empty (Err (Msg pos [c] [])))}};
mergeMsg m1 m2 = case m1 of
{ Msg pos inp exp1 ->
case m2 of
{ Msg _ _ exp2 -> Msg pos inp (exp1 ++ exp2)}};
mergeOk x inp msg1 msg2 = Empty (Ok x inp (mergeMsg msg1 msg2));
mergeError msg1 msg2 = Empty (Err (mergeMsg msg1 msg2));
( < | > ) : : Parsec a - > Parsec a - > Parsec a
Given two parsers p , q , run p on the input . If it fails , then
continue by is not backtracked before running
(<|>) p q = Parsec $
\state ->
case parsec p state of
{ Empty m ->
case m of
{ Err msg1 ->
case parsec q state of
{ Empty m ->
case m of
{ Err msg2 ->
mergeError msg1 msg2
; Ok x inp msg2 ->
mergeOk x inp msg1 msg2 }
; Consumed m -> Consumed m }
; Ok x inp msg1 ->
case parsec q state of
{ Empty m ->
case m of
{ Err msg2 ->
mergeOk x inp msg1 msg2
; Ok _ _ msg2 ->
mergeOk x inp msg1 msg2 }
; Consumed m -> Consumed m }}
; Consumed m -> Consumed m };
try p = Parsec $ \state -> case parsec p state of
{ Empty m -> Empty m
; Consumed m ->
case m of
{ Err msg ->
Empty (Err msg)
; Ok x st msg ->
Consumed (Ok x st msg)}};
(<||>) p q = try p <|> q;
many p = liftA2 (:) p (many p) <||> pure [];
many1 p = liftA2 (:) p (many p);
expect m exp = case m of { Msg pos inp _ -> Msg pos inp [exp] };
( < ? > ) : : Parsec a - > String - > Parsec a
(<?>) p exp = Parsec $ \state ->
case parsec p state of
{ Empty m ->
Empty
(case m of
{ Err msg ->
Err (expect msg exp)
; Ok x st msg ->
Ok x st (expect msg exp)})
; Consumed m -> Consumed m };
item = sat (const True);
sepBy1 p sep = liftA2 (:) p (many (sep *> p));
sepBy p sep = sepBy1 p sep <||> pure [];
char c = sat (== c) <?> show c;
string s =
case s of
{ [] -> pure []
; (:) c cs -> char c *> string cs *> pure s};
between x y p = x *> (p <* y);
Parse line comments
com = char '-' *> between (char '-') (char '\n') (many (sat (/= '\n')));
notComEnd = (sat (/= '-') <|> (char '-' *> sat (/= '}'))) *> pure [];
blockcom = let { content = many (blockcom <||> notComEnd) }
in between (string "{-") (string "-}") content *> pure [];
Parse whitespace
sp =
many
((pure <$> sat (\c -> (c == ' ') || (c == '\n'))) <|> com <|> blockcom);
tok p = p <* sp;
tokc = tok . char;
wantWith pred str p = Parsec $ \s ->
case parsec p s of
{ Empty m ->
Empty (case m of
{ Err m -> Err m
; Ok a state' m -> ife (pred a)
(Ok a state' m)
(Err (expect m str)) })
; Consumed m ->
Consumed (case m of
{ Err m -> Err m
; Ok a state' m ->
ife (pred a)
(Ok a state' m)
(Err (expect m str))})
};
want : : Eq a = > a - > a - > Parser a
want f s = wantWith (== s) s f;
paren = between (tokc '(') (tokc ')');
lower = sat (\x -> ((x <= 'z') && ('a' <= x)) || (x == '_')) <?> "lower";
upper = sat (\x -> (x <= 'Z') && ('A' <= x)) <?> "upper";
digit = sat (\x -> (x <= '9') && ('0' <= x)) <?> "digit";
alpha = (lower <|> upper) <?> "alpha";
varLex = liftA2 (:) lower (many (alpha <|> digit <|> char '\''));
Constructor identifier
conId = tok (liftA2 (:) upper (many (alpha <|> digit <|> char '\'')));
keyword s = tok (want varLex s);
varId = tok (wantWith (\s -> not ((s == "of") || (s == "where"))) "variable" varLex);
opLex = many1 (sat (`elem` ":!#$%&*+./<=>?@\\^|-~"));
Operators
op = tok opLex <|> between (tokc '`') (tokc '`') varId;
var = varId <|> paren (tok opLex);
anyOne = pure <$> tok (sat (const True));
lam r =
tokc '\\' *>
liftA2 (flip (foldr L)) (many1 varId) (char '-' *> (tokc '>' *> r));
listify = fmap (foldr (\h t -> A (A (V ":") h) t) (V "[]"));
escChar = char '\\' *> (sat (`elem` "'\"\\") <|> (const '\n' <$> char 'n'));
litOne delim = (\c -> R ('#' : pure c)) <$> (escChar <||> sat (/= delim));
Integer literals
litInt = R . ('(' :) . (++ ")") <$> tok (many1 digit);
Notice that we do not consume whitespace after parsing the first " ,
litStr = listify (between (char '"') (tokc '"') (many (litOne '"')));
litChar = between (char '\'') (tokc '\'') (litOne '\'');
lit = litStr <|> litChar <|> litInt;
r : : = ' [ ' < sepBy r ' , ' > ' ] '
sqLst r = listify (between (tokc '[') (tokc ']') (sepBy r (tokc ',')));
alt r =
(,) <$>
(conId <||> (pure <$> paren (tokc ':' <|> tokc ',')) <||>
liftA2 (:) (tokc '[') (pure <$> tokc ']')) <*>
liftA2 (flip (foldr L)) (many varId) (char '-' *> (tokc '>' *> r));
braceSep f : : = ' { ' < sepBy f ' ; ' > ' } '
braceSep f = between (tokc '{') (tokc '}') (sepBy f (tokc ';'));
alts r = braceSep (alt r);
cas' x as = foldl A (V (concatMap (('|' :) . fst) as)) (x : map snd as);
cas r = liftA2 cas' (between (keyword "case") (keyword "of") r) (alts r);
thenComma r =
tokc ',' *> (((\x y -> A (A (V ",") y) x) <$> r) <||> pure (A (V ",")));
parenExpr r : : = < r > ( < op > | < r > )
parenExpr r =
liftA2
(&)
r
(((\v a -> A (V v) a) <$> op) <||> thenComma r <||> pure id);
rightSect r =
((\v a -> A (A (V "\\C") (V v)) a) <$> (op <|> (pure <$> tokc ','))) <*> r;
section r = paren (parenExpr r <|> rightSect r);
isFree v expr = case expr of
{ R s -> False
; V s -> s == v
; A x y -> isFree v x || isFree v y
; L w t -> (v /= w) && isFree v t
; Proof _ -> False
};
maybeFix s x = ife (isFree s x) (A (V "\\Y") (L s x)) x;
def r =
liftA2 (,) var (flip (foldr L) <$> many varId <*> (tokc '=' *> r));
Convert a list of let bindings and the let body into a single AST .
addLets ls x =
foldr (\p t -> fpair p (\name def -> A (L name t) $ maybeFix name def)) x ls;
let r : : = " let " ' { ' < sepBy def r > ' } ' " in " < r >
letin r =
liftA2
addLets
(between (keyword "let") (keyword "in") (braceSep (def r)))
r;
atom r : : = < letin r > | < sqLst r > | < cas r > | < lam r > | < section r >
atom r =
letin r <|>
sqLst r <||>
cas r <|>
lam r <||>
section r <||>
(paren (tokc ',') *> pure (V ",")) <||>
(V <$> (conId <|> var)) <||>
lit;
aexp r = fromMaybe undefined . foldl1' A <$> many1 (atom r);
fix f = f (fix f);
Parse infix operators
infix infixl infixr
data Assoc = NAssoc | LAssoc | RAssoc;
instance Show Assoc where
{ show a =
case a of
{ NAssoc -> "NAssoc"
; LAssoc -> "LAssoc"
; RAssoc -> "RAssoc" } };
eqAssoc x y = case x of
{ NAssoc -> case y of { NAssoc -> True ; LAssoc -> False ; RAssoc -> False }
; LAssoc -> case y of { NAssoc -> False ; LAssoc -> True ; RAssoc -> False }
; RAssoc -> case y of { NAssoc -> False ; LAssoc -> False ; RAssoc -> True }
};
instance Eq Assoc where { (==) = eqAssoc };
precOf s precTab = fmaybe (lstLookup s precTab) 5 fst;
assocOf s precTab = fmaybe (lstLookup s precTab) LAssoc snd;
opWithPrec precTab n = wantWith (\s -> n == precOf s precTab) "precTab" op;
: : [ ( String , ( a , Assoc ) ) ] - > Ast - > [ ( String , Ast ) ] - > Maybe Ast
opFold' precTab e xs =
case xs of
{ [] -> Just e
; (:) x xt ->
case find
(\y ->
not (assocOf (fst x) precTab == assocOf (fst y) precTab))
xt of
{ Nothing ->
case assocOf (fst x) precTab of
{ NAssoc ->
case xt of
{ [] -> Just $ fpair x (\op y -> A (A (V op) e) y)
; (:) y yt -> Nothing }
; LAssoc -> Just $ foldl (\a b -> fpair b (\op y -> A (A (V op) a) y)) e xs
; RAssoc ->
Just $ foldr (\a b -> fpair a (\op y e -> A (A (V op) e) (b y))) id xs e }
; Just y -> Nothing }};
expr precTab =
fix $ \r n ->
ife
(n <= 9)
((fromMaybe undefined .) . opFold' precTab <$> r (succ n) <*>
many (liftA2 (,) (opWithPrec precTab n) (r (succ n))))
(aexp (r 0));
data Constr = Constr String [Type];
data Pred = Pred String Type;
data Qual = Qual [Pred] Type;
data Top = Adt Type [Constr]
| Def (String, Ast)
| Class String Type [(String, Type)]
| Inst String Qual [(String, Ast)];
arr a = TAp (TAp (TC "->") a);
Parse type applications
bType r = fromMaybe undefined . foldl1' TAp <$> many1 r;
Parse types
_type r = fromMaybe undefined . foldr1' arr <$> sepBy (bType r) (tok (want opLex "->"));
typeConstant =
(\s -> ife (s == "String") (TAp (TC "[]") (TC "Int")) (TC s)) <$> conId;
aType =
paren
(liftA2
(&)
(_type aType)
((tokc ',' *> ((\a b -> TAp (TAp (TC ",") b) a) <$> _type aType)) <||>
pure id)) <||>
typeConstant <||>
(TV <$> varId) <||>
(tokc '[' *>
(tokc ']' *> pure (TC "[]") <||>
TAp (TC "[]") <$> (_type aType <* tokc ']')));
simpleType c vs = foldl TAp (TC c) (map TV vs);
adt : : = " data " ' = ' < conId > < varId > * < sepBy ( < conId > | < aType > * ) ' | ' >
adt =
liftA2
Adt
(between (keyword "data") (tokc '=') (liftA2 simpleType conId (many varId)))
(sepBy (liftA2 Constr conId (many aType)) (tokc '|'));
prec = (\c -> ord c - ord '0') <$> tok digit;
fixityList a n = fmap (, (n, a));
fixityDecl " kw " a : : = " kw " < prec > < sepBy < op > ' , ' > ' ; '
fixityDecl kw a =
between
(keyword kw)
(tokc ';')
(liftA2 (fixityList a) prec (sepBy op (tokc ',')));
fixity =
fixityDecl "infixl" LAssoc <||>
fixityDecl "infixr" RAssoc <||>
fixityDecl "infix" NAssoc;
noQual = Qual [];
genDecl : : = " : : " < _ type aType >
genDecl = liftA2 (,) var (char ':' *> tokc ':' *> _type aType);
classDecl =
keyword "class" *>
(Class <$> conId <*> (TV <$> varId) <*> (keyword "where" *> braceSep genDecl));
inst : : = < _ type aType >
inst = _type aType;
instDecl r =
keyword "instance" *>
((\ps cl ty defs -> Inst cl (Qual ps ty) defs) <$>
(liftA2 ((pure .) . Pred) conId (inst <* (char '=' *> tokc '>')) <||>
pure []) <*>
conId <*>
inst <*>
(keyword "where" *> braceSep (def r)));
tops : : = < sepBy ( < adt > | < def > | < classDecl > | < instDecl > ) ' ; ' >
tops precTab =
sepBy
(adt <||> Def <$> def (expr precTab 0) <||> classDecl <||>
instDecl (expr precTab 0))
(tokc ';');
program' = sp *> (concat <$> many fixity) >>= tops;
eqPre = case parse program' $
"class Eq a where { (==) :: a -> a -> Bool };\n" ++
"class Show a where { show :: a -> String };\n" ++
"class Functor f where { fmap :: (a -> b) -> f a -> f b };\n" ++
"class Applicative f where { pure :: a -> f a; (<*>) :: f (a -> b) -> f a -> f b };\n" ++
"class Monad m where { return :: a -> m a ; (>>=) :: m a -> (a -> m b) -> m b};\n" ++
"instance Eq Int where { (==) = intEq };\n" of
{ Empty m ->
case m of
{ Err msg -> undefined
; Ok l _ _ -> l}
; Consumed m ->
case m of
{ Err msg -> undefined
; Ok l _ _ -> l}
};
program =
((eqPre ++
[ Adt
(TAp (TC "[]") (TV "a"))
[Constr "[]" [], Constr ":" [TV "a", TAp (TC "[]") (TV "a")]]
, Adt (TAp (TAp (TC ",") (TV "a")) (TV "b")) [Constr "," [TV "a", TV "b"]]
]) ++) <$>
program';
prims =
let { ii = arr (TC "Int") (TC "Int")
; iii = arr (TC "Int") ii
; bin s = R $ "``BT`T" ++ s }
in map (second (first noQual)) $
[ ("\\Y", (arr (arr (TV "a") (TV "a")) (TV "a"), R "Y"))
, ( "\\C"
, ( arr
(arr (TV "a") (arr (TV "b") (TV "c")))
(arr (TV "b") (arr (TV "a") (TV "c")))
, R "C"))
, ("intEq", (arr (TC "Int") (arr (TC "Int") (TC "Bool")), bin "="))
, ("<=", (arr (TC "Int") (arr (TC "Int") (TC "Bool")), bin "L"))
, ("chr", (ii, R "I"))
, ("ord", (ii, R "I"))
, ("succ", (ii, R "`T`(1)+"))
] ++
map (\s -> ('.':s ++ ".", (iii, bin s))) ["+", "-", "*", "/", "%"];
rank ds v =
let { loop l v c =
case l of
{ [] -> Nothing
; (:) x xs ->
ife (v == fst x) (Just ('[' : showInt c "]")) (loop xs v (succ c)) } }
in loop ds v 0;
showC ds t = case t of
{ R s -> Just s
; V v -> rank ds v
; A x y -> liftA2 (\a b -> '`':a ++ b) (showC ds x) (showC ds y)
; L w t -> Nothing
; Proof _ -> Nothing
};
encoding of lambda calculus terms
data LC = Ze | Su LC | Pass Ast | La LC | App LC LC;
Convert the AST into a nameless representation
debruijn n e = case e of
{ R s -> pure $ Pass (R s)
; V v -> pure $ foldr (\h m -> ife (h == v) Ze (Su m)) (Pass (V v)) n
; A x y -> App <$> debruijn n x <*> debruijn n y
; L s t -> La <$> debruijn (s:n) t
; Proof _ -> Nothing
};
See Kiselyov 's paper - " Lambda to SKI , semantically " , pages 10 - 11
data Sem = Defer | Closed Ast | Need Sem | Weak Sem;
ldef r y = case y of
Defer -> Need (Closed (A (A (R "S") (R "I")) (R "I")))
; Closed d -> Need (Closed (A (R "T") d))
( V , N e ) - > N ( C S.(kS $ ! ) $ $ e )
; Need e -> Need (r (Closed (A (R "S") (R "I"))) e)
( V , W e ) - > N ( C ( S.(kS $ ! ) ) $ $ e )
; Weak e -> Need (r (Closed (R "T")) e)
};
d is the argument to Closed ( i.e. r ( Closed d ) y = ... )
lclo r d y = case y of
Defer -> Need (Closed d)
; Closed dd -> Closed (A d dd)
; Need e -> Need (r (Closed (A (R "B") d)) e)
; Weak e -> Weak (r (Closed d) e)
};
lnee r e y = case y of
Defer -> Need (r (r (Closed (R "S")) e) (Closed (R "I")))
( N e , C d ) - > N ( C S.(kC $ ! kC $ ! d ) $ $ e )
; Closed d -> Need (r (Closed (A (R "R") d)) e)
( N e1 , N e2 ) - > N ( ( C S.kS ) $ $ e1 $ $ e2 )
; Need ee -> Need (r (r (Closed (R "S")) e) ee)
( N e1 , W e2 ) - > N ( ( C S.kC ) $ $ e1 $ $ e2 )
; Weak ee -> Need (r (r (Closed (R "C")) e) ee)
};
lwea r e y = case y of
Defer -> Need e
; Closed d -> Weak (r e (Closed d))
( W e1 , N e2 ) - > N ( ( C S.kB ) $ $ e1 $ $ e2 )
; Need ee -> Need (r (r (Closed (R "B")) e) ee)
( W e1 , W e2 ) - > W ( e1 $ $ e2 )
; Weak ee -> Weak (r e ee)
};
babsa x y = case x of
{ Defer -> ldef babsa y
; Closed d -> lclo babsa d y
; Need e -> lnee babsa e y
; Weak e -> lwea babsa e y
};
Full bracket abstraction algorithm , from De Bruijn to combinators
babs t = case t of
Ze -> Defer
; Su e -> Weak (babs e)
A lifted AST is closed .
; Pass s -> Closed s
See " lam " function on page 10 of Kiselyov
; La t -> case babs t of
Defer -> Closed (R "I")
; Closed d -> Closed (A (R "K") d)
; Need e -> e
W e - > ( C S.kK ) $ $ e
; Weak e -> babsa (Closed (R "K")) e
}
; App x y -> babsa (babs x) (babs y)
};
nolam x = debruijn [] x >>= \x ->
case babs x of
{ Defer -> Nothing
; Closed d -> Just d
; Need e -> Nothing
; Weak e -> Nothing
};
dump tab ds =
case ds of
{ [] -> return []
; (:) h t ->
nolam (snd h) >>= \a ->
showC tab a >>= \b ->
dump tab t >>= \c ->
return (b ++ (';' : c)) };
asm ds = dump ds ds;
apply sub t = case t of
{ TC v -> t
; TV v -> fromMaybe t (lstLookup v sub)
; TAp a b -> TAp (apply sub a) (apply sub b)
};
Combine two substitution lists while applying the substitutions in
the first .
(@@) s1 s2 = map (second (apply s1)) s2 ++ s1;
occurs s t = case t of
{ TC v -> False
; TV v -> s == v
; TAp a b -> occurs s a || occurs s b
};
varBind s t = case t of
TC v -> pure (pure (s, t))
; TV v -> ife (v == s) (pure []) (pure (pure (s, t)))
; TAp a b -> ife (occurs s t) Nothing (pure (pure (s, t)))
};
Most general unifier . Given two type trees , possibly return the
mgu unify t u = case t of
{ TC a -> case u of
{ TC b -> ife (a == b) (pure []) Nothing
; TV b -> varBind b t
; TAp a b -> Nothing
}
; TV a -> varBind a u
; TAp a b -> case u of
{ TC b -> Nothing
; TV b -> varBind b t
; TAp c d -> unify b d (mgu unify a c)
}
};
unify a b =
maybe Nothing (\s -> fmap (@@ s) (mgu unify (apply s a) (apply s b)));
instantiate' t n tab = case t of
{ TC s -> ((t, n), tab)
; TV s -> case lstLookup s tab of
{ Nothing -> let { va = TV (s ++ '_':showInt n "") }
in ((va, n + 1), (s, va):tab)
; Just v -> ((v, n), tab)
}
; TAp x y ->
fpair (instantiate' x n tab) $ \tn1 tab1 ->
fpair tn1 $ \t1 n1 ->
fpair (instantiate' y n1 tab1) $ \tn2 tab2 ->
fpair tn2 $ \t2 n2 -> ((TAp t1 t2, n2), tab2)
};
instantiatePred pred xyz =
case pred of
{ Pred s t ->
fpair xyz $ \xy tab ->
fpair xy $ \out n ->
first (first ((: out) . Pred s)) (instantiate' t n tab) };
instantiate qt n =
case qt of
{ Qual ps t ->
fpair (foldr instantiatePred (([], n), []) ps) $ \xy tab ->
fpair xy $ \ps1 n1 -> first (Qual ps1) (fst (instantiate' t n1 tab)) };
type Subst = [ ( String , Type ) ] ;
infer' typed loc ast csn =
fpair csn $ \cs n ->
let { va = TV ('_' : showInt n "") }
in case ast of
Raw code is treated as Int
R s -> ((TC "Int", ast), csn)
; V s ->
fmaybe
(lstLookup s loc)
(fmaybe (lstLookup s typed) undefined $ \ta ->
fpair (instantiate (fst ta) n) $ \q n1 ->
case q of {
Qual preds ty ->
((ty, foldl A ast (map Proof preds)), (cs, n1))
})
(flip (,) csn . flip (,) ast)
; A x y ->
fpair (infer' typed loc x (cs, n + 1)) $ \tax csn1 ->
fpair tax $ \tx ax ->
fpair (infer' typed loc y csn1) $ \tay csn2 ->
fpair tay $ \ty ay ->
((va, A ax ay), first (unify tx (arr ty va)) csn2)
the substitution list extended with s : = < newvar >
; L s x ->
first
(\ta -> fpair ta $ \t a -> (arr va t, L s a))
(infer' typed ((s, va) : loc) x (cs, n + 1))
; Proof _ -> undefined };
onType f pred = case pred of { Pred s t -> Pred s (f t) };
typeEq t u = case t of
{ TC s -> case u of
{ TC t -> t == s
; TV _ -> False
; TAp _ _ -> False
}
; TV s -> case u of
{ TC _ -> False
; TV t -> t == s
; TAp _ _ -> False
}
; TAp a b -> case u of
{ TC _ -> False
; TV _ -> False
; TAp c d -> typeEq a c && typeEq b d
}
};
instance Eq Type where { (==) = typeEq };
predEq p q = case p of { Pred s a -> case q of { Pred t b ->
(s == t) && (a == b) }};
instance Eq Pred where { (==) = predEq };
predApply sub = onType (apply sub);
all f = foldr ((&&) . f) True;
filter f = foldr (\x xs -> ife (f x) (x:xs) xs) [];
intersect xs ys = filter (\x -> fmaybe (find (== x) ys) False (const True)) xs;
merge s1 s2 =
ife
(all (\v -> apply s1 (TV v) == apply s2 (TV v)) $
map fst s1 `intersect` map fst s2)
(Just $ s1 ++ s2)
Nothing;
match h t = case h of
{ TC a -> case t of
{ TC b -> ife (a == b) (return []) Nothing
; TV b -> Nothing
; TAp a b -> Nothing
}
; TV a -> return [(a, t)]
; TAp a b -> case t of
{ TC b -> Nothing
; TV b -> Nothing
; TAp c d -> match a c >>= \ac ->
match b d >>= \bd ->
merge ac bd}};
matchPred h p = case p of { Pred _ t -> match h t };
showType t = case t of
{ TC s -> s
; TV s -> s
; TAp a b -> concat ["(", showType a, " ", showType b, ")"]
};
instance Show Type where { show = showType };
showPred p = case p of { Pred s t -> s ++ (' ':show t) ++ " => "};
findInst r qn p insts =
case insts of
{ [] ->
fpair qn $ \q n ->
let { v = '*' : showInt n "" }
in (((p, v) : q, n + 1), V v)
; (:) i is ->
case i of {
Qual ps h ->
case matchPred h p of
{ Nothing -> findInst r qn p is
; Just u ->
foldl
(\qnt p ->
fpair qnt $ \qn1 t -> second (A t) (r (predApply u p) qn1))
( qn
, V (case p of
{ Pred s _ -> showPred $ Pred s h }))
ps }}};
findProof is pred psn = fpair psn $ \ps n -> case lookupWith (==) pred ps of
{ Nothing -> case pred of { Pred s t -> case lstLookup s is of
; Just insts -> findInst (findProof is) psn pred insts
}}
; Just s -> (psn, V s)
};
prove' ienv sub psn a = case a of
{ R _ -> (psn, a)
; V _ -> (psn, a)
; A x y -> let { p1 = prove' ienv sub psn x } in fpair p1 $ \psn1 x1 ->
second (A x1) (prove' ienv sub psn1 y)
; L s t -> second (L s) (prove' ienv sub psn t)
; Proof raw -> findProof ienv (predApply sub raw) psn
};
prove ienv ta sub =
fpair ta $ \t a ->
fpair (prove' ienv sub ([], 0) a) $ \psn x ->
fpair psn $ \ps _ ->
(Qual (map fst ps) (apply sub t), foldr (L . snd) x ps);
dictVars ps n =
flst ps ([], n) $ \p pt ->
first ((p, '*' : showInt n "") :) (dictVars pt $ n + 1);
qi = Qual of instance , e.g. t = > [ t ] - > [ t ] - > Bool
inferMethod ienv typed qi def = fpair def $ \s expr ->
fpair (infer' typed [] expr (Just [], 0)) $ \ta msn ->
case lstLookup s typed of
e.g. qac = Eq a = > a - > a - > Bool , some AST ( product of single method )
; Just qac -> fpair msn $ \ms n -> case ms of
; Just sub -> fpair (instantiate (fst qac) n) $ \q1 n1 -> case q1 of
{ Qual psc tc -> case psc of
Unreachable .
; (:) headPred shouldBeNull -> case qi of { Qual psi ti ->
case headPred of { Pred _ headT -> case match headT ti of
{ Nothing -> undefined
; Just subc ->
fpair (instantiate (Qual psi $ apply subc tc) n1) $ \q2 n2 ->
case q2 of { Qual ps2 t2 -> fpair ta $ \tx ax ->
case match (apply sub tx) t2 of
; Just subx -> snd $ prove' ienv (subx @@ sub) (dictVars ps2 0) ax
}}}}}}}}};
genProduct ds = foldr L (L "*" $ foldl A (V "*") $ map V ds) ds;
inferInst ienv typed inst = fpair inst $ \cl qds -> fpair qds $ \q ds ->
case q of { Qual ps t -> let { s = showPred $ Pred cl t } in
(s, (,) (noQual $ TC "DICT") $ maybeFix s $
foldr (L . snd)
(foldl A (genProduct $ map fst ds)
(map (inferMethod ienv typed q) ds))
(fst $ dictVars ps 0)
)
};
inferDefs ienv defs typed =
flst defs (Right $ reverse typed) $ \edef rest ->
case edef of
{ Left def ->
fpair def $ \s expr ->
fpair (infer' typed [] (maybeFix s expr) (Just [], 0)) $ \ta msn ->
fpair msn $ \ms _ ->
case fmap (prove ienv ta) ms of
{ Nothing -> Left ("bad type: " ++ s)
; Just qa -> inferDefs ienv rest ((s, qa) : typed)}
; Right inst -> inferDefs ienv rest (inferInst ienv typed inst : typed)};
conOf con = case con of { Constr s _ -> s };
mkCase t cs =
( concatMap (('|' :) . conOf) cs
, ( noQual $
arr t $
foldr
(arr .
(\c ->
case c of
{ Constr _ ts -> foldr arr (TV "case") ts }))
(TV "case")
cs
, L "x" $ V "x"));
mkStrs = snd . foldl (\p u -> fpair p (\s l -> ('*':s, s : l))) ("*", []);
For example , creates ` Just = \x a b - > b x ` .
Scott encoding
scottEncode vs s ts = foldr L (foldl (\a b -> A a (V b)) (V s) ts) (ts ++ vs);
scottConstr t cs c = case c of { Constr s ts -> (s,
( noQual $ foldr arr t ts
, scottEncode (map conOf cs) s $ mkStrs ts)) };
mkAdtDefs t cs = mkCase t cs : map (scottConstr t cs) cs;
fneat neat f = case neat of { Neat a b c -> f a b c };
select f xs acc =
flst xs (Nothing, acc) $ \x xt ->
ife (f x) (Just x, xt ++ acc) (select f xt (x : acc));
addInstance s q is = fpair (select ((== s) . fst) is []) $ \m xs -> case m of
{ Nothing -> (s, [q]):xs
; Just sqs -> second (q:) sqs:xs
};
mkSel ms s = L "*" $ A (V "*") $ foldr (L . ('*' :) . fst) (V $ '*' : s) ms;
untangle = foldr (\top acc -> fneat acc $ \ienv fs typed -> case top of
{ Adt t cs -> Neat ienv fs (mkAdtDefs t cs ++ typed)
; Def f -> Neat ienv (Left f : fs) typed
; Class classId v ms -> Neat ienv fs (
map (\st -> fpair st $ \s t -> (s, (Qual [Pred classId v] t, mkSel ms s))) ms
++ typed)
; Inst cl q ds -> Neat (addInstance cl q ienv) (Right (cl, (q, ds)):fs) typed
}) (Neat [] [] prims);
infer prog = fneat (untangle prog) inferDefs;
showQual q = case q of { Qual ps t -> concatMap showPred ps ++ show t };
instance Show Qual where { show = showQual };
dumpTypes' m =
case m of
{ Err msg -> "parse error"
; Ok prog _ _ ->
case infer prog of
{ Left err -> err
; Right typed ->
concatMap
(\p -> fpair p $ \s qa -> s ++ " :: " ++ show (fst qa) ++ "\n")
typed}};
dumpTypes s = case parse program s of
{ Empty m -> dumpTypes' m
; Consumed m -> dumpTypes' m };
compile' m = case m of
{ Err msg -> "parse error"
; Ok prog _ _ ->
case infer prog of
{ Left err -> err
; Right qas -> fromMaybe undefined (asm $ map (second snd) qas)}};
compile s = case parse program s of
{ Empty m -> compile' m
; Consumed m -> compile' m };
|
8c7e36d8793a7bef65d2e758b57ca7216d49657ab5db45f059009e61aa8fb89e | auser/beehive | system_controller.erl | %%%-------------------------------------------------------------------
File :
Author :
%%% Description :
%%%
Created : Thu Dec 31 12:52:24 PST 2009
%%%-------------------------------------------------------------------
-module (system_controller).
-include ("http.hrl").
-export ([get/2, post/2, put/2, delete/2]).
get(_, _Data) ->
{"beehive", ["routes"]}.
% /system/reload/config
post(["reload", "config"], _Data) ->
node_manager:read_bee_configs(),
{ok, "reloaded"};
post(["reload", "system"], _Data) ->
node_manager:reload_system(),
{ok, "reloaded"};
post(["reload"], Data) ->
auth_utils:run_if_admin(fun(_) ->
misc_utils:reload_all()
end, Data);
post(_Path, _Data) -> app_error("unhandled").
put(_Path, _Data) -> app_error("unhandled").
delete(_Path, _Data) -> app_error("unhandled").
app_error(Msg) ->
{struct, [{error, misc_utils:to_bin(Msg)}]}.
| null | https://raw.githubusercontent.com/auser/beehive/dfe257701b21c56a50af73c8203ecac60ed21991/lib/erlang/apps/beehive/src/bh_rest/app_controllers/system_controller.erl | erlang | -------------------------------------------------------------------
Description :
-------------------------------------------------------------------
/system/reload/config | File :
Author :
Created : Thu Dec 31 12:52:24 PST 2009
-module (system_controller).
-include ("http.hrl").
-export ([get/2, post/2, put/2, delete/2]).
get(_, _Data) ->
{"beehive", ["routes"]}.
post(["reload", "config"], _Data) ->
node_manager:read_bee_configs(),
{ok, "reloaded"};
post(["reload", "system"], _Data) ->
node_manager:reload_system(),
{ok, "reloaded"};
post(["reload"], Data) ->
auth_utils:run_if_admin(fun(_) ->
misc_utils:reload_all()
end, Data);
post(_Path, _Data) -> app_error("unhandled").
put(_Path, _Data) -> app_error("unhandled").
delete(_Path, _Data) -> app_error("unhandled").
app_error(Msg) ->
{struct, [{error, misc_utils:to_bin(Msg)}]}.
|
20e06d2c29f5e7817538589d016994aff2d633d3ca204ecf12939858b9551e86 | seckcoder/course-compiler | r2_12.rkt | (if (if (not #t) #f #t) 42 777)
| null | https://raw.githubusercontent.com/seckcoder/course-compiler/4363e5b3e15eaa7553902c3850b6452de80b2ef6/tests/student-tests/r2_12.rkt | racket | (if (if (not #t) #f #t) 42 777)
| |
77ab730bfb8e00d440b68527e1a98f1d5f02818fa5ee1a95aa2736b0f563251d | y-taka-23/miso-vue-example | Main.hs | {-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Aeson (FromJSON (..), Value (Object),
decodeStrict, (.:))
import qualified Data.JSString as J
import JavaScript.Web.XMLHttpRequest
import Miso
import Miso.String (fromMisoString, ms)
main :: IO ()
main = do
startApp App {
initialAction = FetchCommits
, model = initialModel
, update = updateModel
, view = viewModel
, subs = []
, events = defaultEvents
, mountPoint = Just "main"
}
type Branch = String
data Model = Model {
branches :: [Branch]
, currentBranch :: Branch
, commits :: Maybe [Commit]
} deriving (Eq, Show)
data Commit = Commit {
cmtSHA :: String
, cmtURL :: String
, cmtMessage :: String
, cmtAuthorName :: String
, cmtAuthorURL :: String
, cmtDate :: String
} deriving (Eq, Show)
instance FromJSON Commit where
parseJSON (Object v) = do
Object cmt <- v .: "commit"
Object cmtAuthor <- cmt .: "author"
Object author <- v .: "author"
sha <- v .: "sha"
url <- v .: "html_url"
msg <- cmt .: "message"
name <- cmtAuthor .: "name"
date <- cmtAuthor .: "date"
aUrl <- author .: "html_url"
pure $ Commit sha url msg name aUrl date
data Action =
FetchCommits
| SetCommits (Maybe [Commit])
| SetBranch Branch
initialModel :: Model
initialModel = Model {
branches = [ "master", "0.21.1.0" ]
, currentBranch = "master"
, commits = Nothing
}
updateModel :: Action -> Model -> Effect Action Model
updateModel FetchCommits model = model <# do
mCommits <- fetchCommits (currentBranch model)
pure $ SetCommits mCommits
updateModel (SetCommits mCommits) model =
noEff model { commits = mCommits }
updateModel (SetBranch branch) model =
model { currentBranch = branch } <# do
pure FetchCommits
fetchCommits :: Branch -> IO (Maybe [Commit])
fetchCommits branch = do
Just json <- contents <$> xhrByteString req
pure $ decodeStrict json
where
req = Request
{ reqMethod = GET
, reqURI = J.pack url
, reqLogin = Nothing
, reqHeaders = []
, reqWithCredentials = False
, reqData = NoData
}
url =
"="
++ branch
viewModel :: Model -> View Action
viewModel model = div_ [ id_ "demo" ] $
[ h1_ [] [ text "Latest Miso Commits" ] ]
++ concatMap (toRadioButton $ currentBranch model) (branches model)
++ [ p_ [] [ text . ms $ "dmjio/miso@" ++ currentBranch model ] ]
++ [ ul_ [] $ maybe [] (map toListItem) (commits model) ]
toRadioButton :: Branch -> Branch -> [View Action]
toRadioButton current branch =
let msb = ms branch
in [ input_
[ type_ "radio"
, id_ msb
, value_ msb
, name_ "branch"
, checked_ $ current == branch
, onChange (SetBranch . fromMisoString)
]
, label_ [ for_ msb ] [ text msb ]
]
toListItem :: Commit -> View action
toListItem commit = li_ [] [
a_ [
href_ . ms $ cmtURL commit
, target_ "_blank"
, class_ "commit"
] [
text . ms . take 7 $ cmtSHA commit
]
, text " - "
, span_ [ class_ "message" ] [ text . ms . trancate $ cmtMessage commit ]
, br_ []
, text "by "
, span_
[ class_ "author" ]
[ a_ [
href_ . ms $ cmtAuthorURL commit
, target_ "_blank"
] [
text . ms $ cmtAuthorName commit
]
]
, text " at "
, span_ [ class_ "date" ] [ text . ms . formatDate $ cmtDate commit ]
]
trancate :: String -> String
trancate = takeWhile (/= '\n')
formatDate :: String -> String
formatDate = map (\c -> if c `elem` ['T', 'Z'] then ' ' else c)
| null | https://raw.githubusercontent.com/y-taka-23/miso-vue-example/4d3fe5a129fa8e126ed4aa786db58c454e79add4/github-commits/Main.hs | haskell | # LANGUAGE OverloadedStrings # |
module Main where
import Data.Aeson (FromJSON (..), Value (Object),
decodeStrict, (.:))
import qualified Data.JSString as J
import JavaScript.Web.XMLHttpRequest
import Miso
import Miso.String (fromMisoString, ms)
main :: IO ()
main = do
startApp App {
initialAction = FetchCommits
, model = initialModel
, update = updateModel
, view = viewModel
, subs = []
, events = defaultEvents
, mountPoint = Just "main"
}
type Branch = String
data Model = Model {
branches :: [Branch]
, currentBranch :: Branch
, commits :: Maybe [Commit]
} deriving (Eq, Show)
data Commit = Commit {
cmtSHA :: String
, cmtURL :: String
, cmtMessage :: String
, cmtAuthorName :: String
, cmtAuthorURL :: String
, cmtDate :: String
} deriving (Eq, Show)
instance FromJSON Commit where
parseJSON (Object v) = do
Object cmt <- v .: "commit"
Object cmtAuthor <- cmt .: "author"
Object author <- v .: "author"
sha <- v .: "sha"
url <- v .: "html_url"
msg <- cmt .: "message"
name <- cmtAuthor .: "name"
date <- cmtAuthor .: "date"
aUrl <- author .: "html_url"
pure $ Commit sha url msg name aUrl date
data Action =
FetchCommits
| SetCommits (Maybe [Commit])
| SetBranch Branch
initialModel :: Model
initialModel = Model {
branches = [ "master", "0.21.1.0" ]
, currentBranch = "master"
, commits = Nothing
}
updateModel :: Action -> Model -> Effect Action Model
updateModel FetchCommits model = model <# do
mCommits <- fetchCommits (currentBranch model)
pure $ SetCommits mCommits
updateModel (SetCommits mCommits) model =
noEff model { commits = mCommits }
updateModel (SetBranch branch) model =
model { currentBranch = branch } <# do
pure FetchCommits
fetchCommits :: Branch -> IO (Maybe [Commit])
fetchCommits branch = do
Just json <- contents <$> xhrByteString req
pure $ decodeStrict json
where
req = Request
{ reqMethod = GET
, reqURI = J.pack url
, reqLogin = Nothing
, reqHeaders = []
, reqWithCredentials = False
, reqData = NoData
}
url =
"="
++ branch
viewModel :: Model -> View Action
viewModel model = div_ [ id_ "demo" ] $
[ h1_ [] [ text "Latest Miso Commits" ] ]
++ concatMap (toRadioButton $ currentBranch model) (branches model)
++ [ p_ [] [ text . ms $ "dmjio/miso@" ++ currentBranch model ] ]
++ [ ul_ [] $ maybe [] (map toListItem) (commits model) ]
toRadioButton :: Branch -> Branch -> [View Action]
toRadioButton current branch =
let msb = ms branch
in [ input_
[ type_ "radio"
, id_ msb
, value_ msb
, name_ "branch"
, checked_ $ current == branch
, onChange (SetBranch . fromMisoString)
]
, label_ [ for_ msb ] [ text msb ]
]
toListItem :: Commit -> View action
toListItem commit = li_ [] [
a_ [
href_ . ms $ cmtURL commit
, target_ "_blank"
, class_ "commit"
] [
text . ms . take 7 $ cmtSHA commit
]
, text " - "
, span_ [ class_ "message" ] [ text . ms . trancate $ cmtMessage commit ]
, br_ []
, text "by "
, span_
[ class_ "author" ]
[ a_ [
href_ . ms $ cmtAuthorURL commit
, target_ "_blank"
] [
text . ms $ cmtAuthorName commit
]
]
, text " at "
, span_ [ class_ "date" ] [ text . ms . formatDate $ cmtDate commit ]
]
trancate :: String -> String
trancate = takeWhile (/= '\n')
formatDate :: String -> String
formatDate = map (\c -> if c `elem` ['T', 'Z'] then ' ' else c)
|
3240614fb33195f41d39d1401ca2fec5a902a230e4bd8f6b5d83d9771d776569 | digitallyinduced/ihp | QQ.hs | # LANGUAGE TemplateHaskell , UndecidableInstances , BangPatterns , PackageImports , FlexibleInstances , OverloadedStrings #
|
Module : IHP.HSX.QQ
Description : Defines the @[hsx||]@ syntax
Copyright : ( c ) digitally induced GmbH , 2022
Module: IHP.HSX.QQ
Description: Defines the @[hsx||]@ syntax
Copyright: (c) digitally induced GmbH, 2022
-}
module IHP.HSX.QQ (hsx) where
import Prelude
import Data.Text (Text)
import IHP.HSX.Parser
import qualified "template-haskell" Language.Haskell.TH as TH
import qualified "template-haskell" Language.Haskell.TH.Syntax as TH
import Language.Haskell.TH.Quote
import Text.Blaze.Html5 ((!))
import qualified Text.Blaze.Html5 as Html5
import Text.Blaze.Html (Html)
import Text.Blaze.Internal (attribute, MarkupM (Parent, Leaf), StaticString (..))
import Data.String.Conversions
import IHP.HSX.ToHtml
import qualified Text.Megaparsec as Megaparsec
import qualified Text.Blaze.Html.Renderer.String as BlazeString
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import Data.List (foldl')
hsx :: QuasiQuoter
hsx = QuasiQuoter {
quoteExp = quoteHsxExpression,
quotePat = error "quotePat: not defined",
quoteDec = error "quoteDec: not defined",
quoteType = error "quoteType: not defined"
}
quoteHsxExpression :: String -> TH.ExpQ
quoteHsxExpression code = do
hsxPosition <- findHSXPosition
extensions <- TH.extsEnabled
expression <- case parseHsx hsxPosition extensions (cs code) of
Left error -> fail (Megaparsec.errorBundlePretty error)
Right result -> pure result
compileToHaskell expression
where
findHSXPosition = do
loc <- TH.location
let (line, col) = TH.loc_start loc
pure $ Megaparsec.SourcePos (TH.loc_filename loc) (Megaparsec.mkPos line) (Megaparsec.mkPos col)
compileToHaskell :: Node -> TH.ExpQ
compileToHaskell (Node name attributes children isLeaf) =
let
renderedChildren = TH.listE $ map compileToHaskell children
stringAttributes = TH.listE $ map toStringAttribute attributes
openTag :: Text
openTag = "<" <> tag
tag :: Text
tag = cs name
in
if isLeaf
then
let
closeTag :: Text
closeTag = ">"
in [| (applyAttributes (Leaf (textToStaticString $(TH.lift tag)) (textToStaticString $(TH.lift openTag)) (textToStaticString $(TH.lift closeTag)) ()) $(stringAttributes)) |]
else
let
closeTag :: Text
closeTag = "</" <> tag <> ">"
in [| (applyAttributes (makeParent (textToStaticString $(TH.lift name)) (textToStaticString $(TH.lift openTag)) (textToStaticString $(TH.lift closeTag)) $renderedChildren) $(stringAttributes)) |]
compileToHaskell (Children children) =
let
renderedChildren = TH.listE $ map compileToHaskell children
in [| mconcat $(renderedChildren) |]
compileToHaskell (TextNode value) = [| Html5.preEscapedText value |]
compileToHaskell (PreEscapedTextNode value) = [| Html5.preEscapedText value |]
compileToHaskell (SplicedNode expression) = [| toHtml $(pure expression) |]
compileToHaskell (CommentNode value) = [| Html5.textComment value |]
toStringAttribute :: Attribute -> TH.ExpQ
toStringAttribute (StaticAttribute name (TextValue value)) = do
let nameWithSuffix = " " <> name <> "=\""
if Text.null value
then [| \h -> h ! ((attribute (Html5.textTag name) (Html5.textTag nameWithSuffix)) mempty) |]
else [| \h -> h ! ((attribute (Html5.textTag name) (Html5.textTag nameWithSuffix)) (Html5.preEscapedTextValue value)) |]
toStringAttribute (StaticAttribute name (ExpressionValue expression)) = let nameWithSuffix = " " <> name <> "=\"" in [| applyAttribute name nameWithSuffix $(pure expression) |]
toStringAttribute (SpreadAttributes expression) = [| spreadAttributes $(pure expression) |]
spreadAttributes :: ApplyAttribute value => [(Text, value)] -> Html5.Html -> Html5.Html
spreadAttributes attributes html = applyAttributes html $ map (\(name, value) -> applyAttribute name (" " <> name <> "=\"") value) attributes
{-# INLINE spreadAttributes #-}
applyAttributes :: Html5.Html -> [Html5.Html -> Html5.Html] -> Html5.Html
applyAttributes element attributes = foldl' (\element attribute -> attribute element) element attributes
# INLINE applyAttributes #
makeParent :: StaticString -> StaticString -> StaticString -> [Html] -> Html
makeParent tag openTag closeTag children = Parent tag openTag closeTag (mconcat children)
# INLINE makeParent #
textToStaticString :: Text -> StaticString
textToStaticString text = StaticString (Text.unpack text ++) (Text.encodeUtf8 text) text
# INLINE textToStaticString #
class ApplyAttribute value where
applyAttribute :: Text -> Text -> value -> (Html5.Html -> Html5.Html)
instance ApplyAttribute Bool where
applyAttribute attr attr' True h = h ! (attribute (Html5.textTag attr) (Html5.textTag attr') (Html5.textValue value))
where
value = if "data-" `Text.isPrefixOf` attr
then "true" -- "true" for data attributes
normal html boolean attriubtes , like < input disabled="disabled"/ > , see -microsyntaxes.html#boolean-attributes
applyAttribute attr attr' false h | "data-" `Text.isPrefixOf` attr = h ! (attribute (Html5.textTag attr) (Html5.textTag attr') "false") -- data attribute set to "false"
applyAttribute attr attr' false h = h -- html boolean attribute, like <input disabled/> will be dropped as there is no other way to specify that it's set to false
# INLINE applyAttribute #
instance ApplyAttribute attribute => ApplyAttribute (Maybe attribute) where
applyAttribute attr attr' (Just value) h = applyAttribute attr attr' value h
applyAttribute attr attr' Nothing h = h
# INLINE applyAttribute #
instance ApplyAttribute Html5.AttributeValue where
applyAttribute attr attr' value h = h ! (attribute (Html5.textTag attr) (Html5.textTag attr') value)
# INLINE applyAttribute #
instance {-# OVERLAPPABLE #-} ConvertibleStrings value Html5.AttributeValue => ApplyAttribute value where
applyAttribute attr attr' value h = applyAttribute attr attr' ((cs value) :: Html5.AttributeValue) h
# INLINE applyAttribute #
instance Show (MarkupM ()) where
show html = BlazeString.renderHtml html
| null | https://raw.githubusercontent.com/digitallyinduced/ihp/f8afa474de77a8ae5ba6626f9d9878889653a6cb/ihp-hsx/IHP/HSX/QQ.hs | haskell | # INLINE spreadAttributes #
"true" for data attributes
data attribute set to "false"
html boolean attribute, like <input disabled/> will be dropped as there is no other way to specify that it's set to false
# OVERLAPPABLE # | # LANGUAGE TemplateHaskell , UndecidableInstances , BangPatterns , PackageImports , FlexibleInstances , OverloadedStrings #
|
Module : IHP.HSX.QQ
Description : Defines the @[hsx||]@ syntax
Copyright : ( c ) digitally induced GmbH , 2022
Module: IHP.HSX.QQ
Description: Defines the @[hsx||]@ syntax
Copyright: (c) digitally induced GmbH, 2022
-}
module IHP.HSX.QQ (hsx) where
import Prelude
import Data.Text (Text)
import IHP.HSX.Parser
import qualified "template-haskell" Language.Haskell.TH as TH
import qualified "template-haskell" Language.Haskell.TH.Syntax as TH
import Language.Haskell.TH.Quote
import Text.Blaze.Html5 ((!))
import qualified Text.Blaze.Html5 as Html5
import Text.Blaze.Html (Html)
import Text.Blaze.Internal (attribute, MarkupM (Parent, Leaf), StaticString (..))
import Data.String.Conversions
import IHP.HSX.ToHtml
import qualified Text.Megaparsec as Megaparsec
import qualified Text.Blaze.Html.Renderer.String as BlazeString
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import Data.List (foldl')
hsx :: QuasiQuoter
hsx = QuasiQuoter {
quoteExp = quoteHsxExpression,
quotePat = error "quotePat: not defined",
quoteDec = error "quoteDec: not defined",
quoteType = error "quoteType: not defined"
}
quoteHsxExpression :: String -> TH.ExpQ
quoteHsxExpression code = do
hsxPosition <- findHSXPosition
extensions <- TH.extsEnabled
expression <- case parseHsx hsxPosition extensions (cs code) of
Left error -> fail (Megaparsec.errorBundlePretty error)
Right result -> pure result
compileToHaskell expression
where
findHSXPosition = do
loc <- TH.location
let (line, col) = TH.loc_start loc
pure $ Megaparsec.SourcePos (TH.loc_filename loc) (Megaparsec.mkPos line) (Megaparsec.mkPos col)
compileToHaskell :: Node -> TH.ExpQ
compileToHaskell (Node name attributes children isLeaf) =
let
renderedChildren = TH.listE $ map compileToHaskell children
stringAttributes = TH.listE $ map toStringAttribute attributes
openTag :: Text
openTag = "<" <> tag
tag :: Text
tag = cs name
in
if isLeaf
then
let
closeTag :: Text
closeTag = ">"
in [| (applyAttributes (Leaf (textToStaticString $(TH.lift tag)) (textToStaticString $(TH.lift openTag)) (textToStaticString $(TH.lift closeTag)) ()) $(stringAttributes)) |]
else
let
closeTag :: Text
closeTag = "</" <> tag <> ">"
in [| (applyAttributes (makeParent (textToStaticString $(TH.lift name)) (textToStaticString $(TH.lift openTag)) (textToStaticString $(TH.lift closeTag)) $renderedChildren) $(stringAttributes)) |]
compileToHaskell (Children children) =
let
renderedChildren = TH.listE $ map compileToHaskell children
in [| mconcat $(renderedChildren) |]
compileToHaskell (TextNode value) = [| Html5.preEscapedText value |]
compileToHaskell (PreEscapedTextNode value) = [| Html5.preEscapedText value |]
compileToHaskell (SplicedNode expression) = [| toHtml $(pure expression) |]
compileToHaskell (CommentNode value) = [| Html5.textComment value |]
toStringAttribute :: Attribute -> TH.ExpQ
toStringAttribute (StaticAttribute name (TextValue value)) = do
let nameWithSuffix = " " <> name <> "=\""
if Text.null value
then [| \h -> h ! ((attribute (Html5.textTag name) (Html5.textTag nameWithSuffix)) mempty) |]
else [| \h -> h ! ((attribute (Html5.textTag name) (Html5.textTag nameWithSuffix)) (Html5.preEscapedTextValue value)) |]
toStringAttribute (StaticAttribute name (ExpressionValue expression)) = let nameWithSuffix = " " <> name <> "=\"" in [| applyAttribute name nameWithSuffix $(pure expression) |]
toStringAttribute (SpreadAttributes expression) = [| spreadAttributes $(pure expression) |]
spreadAttributes :: ApplyAttribute value => [(Text, value)] -> Html5.Html -> Html5.Html
spreadAttributes attributes html = applyAttributes html $ map (\(name, value) -> applyAttribute name (" " <> name <> "=\"") value) attributes
applyAttributes :: Html5.Html -> [Html5.Html -> Html5.Html] -> Html5.Html
applyAttributes element attributes = foldl' (\element attribute -> attribute element) element attributes
# INLINE applyAttributes #
makeParent :: StaticString -> StaticString -> StaticString -> [Html] -> Html
makeParent tag openTag closeTag children = Parent tag openTag closeTag (mconcat children)
# INLINE makeParent #
textToStaticString :: Text -> StaticString
textToStaticString text = StaticString (Text.unpack text ++) (Text.encodeUtf8 text) text
# INLINE textToStaticString #
class ApplyAttribute value where
applyAttribute :: Text -> Text -> value -> (Html5.Html -> Html5.Html)
instance ApplyAttribute Bool where
applyAttribute attr attr' True h = h ! (attribute (Html5.textTag attr) (Html5.textTag attr') (Html5.textValue value))
where
value = if "data-" `Text.isPrefixOf` attr
normal html boolean attriubtes , like < input disabled="disabled"/ > , see -microsyntaxes.html#boolean-attributes
# INLINE applyAttribute #
instance ApplyAttribute attribute => ApplyAttribute (Maybe attribute) where
applyAttribute attr attr' (Just value) h = applyAttribute attr attr' value h
applyAttribute attr attr' Nothing h = h
# INLINE applyAttribute #
instance ApplyAttribute Html5.AttributeValue where
applyAttribute attr attr' value h = h ! (attribute (Html5.textTag attr) (Html5.textTag attr') value)
# INLINE applyAttribute #
applyAttribute attr attr' value h = applyAttribute attr attr' ((cs value) :: Html5.AttributeValue) h
# INLINE applyAttribute #
instance Show (MarkupM ()) where
show html = BlazeString.renderHtml html
|
a99b45bd4273c3e96fb6e3789d5b2ce7f16394f8cd7f88ae461244b4b91e291c | alan-j-hu/ocaml-textmate-language | tmLanguage.ml | include Common
include Reader
include Tokenizer
| null | https://raw.githubusercontent.com/alan-j-hu/ocaml-textmate-language/06f80e6d1cdf99949514281f1705a1a29604ff75/src/tmLanguage.ml | ocaml | include Common
include Reader
include Tokenizer
| |
052756fc25325f0d66d6b3dec4f7853ed84e5046ba4dfe190556356aeba34dd2 | smucclaw/dsl | NLG.hs | {-# OPTIONS_GHC -Wno-name-shadowing #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs, NamedFieldPuns, FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
module LS.NLP.NLG where
import LS.NLP.NL4
import LS.NLP.NL4Transformations
import LS.Types
import LS.Rule (Rule(..))
import PGF
import Data.Maybe (catMaybes)
import qualified Data.Text as Text
import qualified AnyAll as AA
import System.Environment (lookupEnv)
import Paths_natural4
import Data.Foldable as F
data NLGEnv = NLGEnv
{ gfGrammar :: PGF
, gfLang :: Language
, gfParse :: Type -> Text.Text -> [Expr]
, gfLin :: Expr -> Text.Text
, verbose :: Bool
}
allLangs :: IO [Language]
allLangs = do
grammarFile <- getDataFileName $ gfPath "NL4.pgf"
gr <- readPGF grammarFile
pure $ languages gr
getLang :: String -> Language
getLang str = case readLanguage str of
Nothing -> error $ "language " <> str <> " not found"
Just l -> l
myNLGEnv :: Language -> IO NLGEnv
myNLGEnv lang = do
mpn <- lookupEnv "MP_NLG"
let verbose = maybe False (read :: String -> Bool) mpn
grammarFile <- getDataFileName $ gfPath "NL4.pgf"
gr <- readPGF grammarFile
let eng = getLang "NL4Eng"
myParse typ txt = parse gr eng typ (Text.unpack txt)
let myLin = rmBIND . Text.pack . linearize gr lang
pure $ NLGEnv gr lang myParse myLin verbose
rmBIND :: Text.Text -> Text.Text
rmBIND = Text.replace " &+ " ""
gfPath :: String -> String
gfPath x = "grammars/" ++ x
-----------------------------------------------------------------------------
-- Main
WIP : crude way of keeping track of whether we 're in hence , lest or whatever
data RecursionLevel = TopLevel | MyHence Int | MyLest Int
deriving (Eq,Ord,Show)
getLevel :: RecursionLevel -> Int
getLevel l = case l of
TopLevel -> 2
MyHence i -> i
MyLest i -> i
debugNesting :: RecursionLevel -> (Text.Text, Text.Text)
debugNesting TopLevel = (Text.pack "", Text.pack "")
debugNesting (MyHence _) = (Text.pack "Follow by:", Text.pack "")
debugNesting (MyLest _) = (Text.pack "In case of failure:", Text.pack "")
nlg :: NLGEnv -> Rule -> IO Text.Text
nlg = nlg' TopLevel
nlg' :: RecursionLevel -> NLGEnv -> Rule -> IO Text.Text
nlg' thl env rule = case rule of
Regulative {subj,upon,temporal,cond,who,deontic,action,lest,hence} -> do
let subjExpr = introduceSubj $ parseSubj env subj
deonticExpr = parseDeontic deontic
actionExpr = parseAction env action
whoSubjExpr = case who of
Just w -> GSubjWho subjExpr (bsWho2gfWho (parseWhoBS env w))
Nothing -> subjExpr
ruleText = gfLin env $ gf $ GRegulative whoSubjExpr deonticExpr actionExpr
TODO : does n't work once we add another language
Just u ->
let uponExpr = gf $ GadvUPON $ parseUpon env u
in gfLin env uponExpr <> ", "
Nothing -> mempty
tcText = case temporal of
Just t -> " " <> (gfLin env $ gf $ parseTemporal env t)
Nothing -> mempty
condText = case cond of
Just c ->
let condExpr = gf $ pastTense $ bsCond2gfCond (parseCondBS env c)
in ". If " <> gfLin env condExpr <> ", "
Nothing -> mempty
ruleTextDebug = Text.unwords [prefix, uponText <> ruleText <> tcText <> condText, suffix]
lestText <- case lest of
Just r -> do
rt <- nlg' (MyLest i) env r
pure $ pad rt
Nothing -> pure mempty
henceText <- case hence of
Just r -> do
rt <- nlg' (MyHence i) env r
pure $ pad rt
Nothing -> pure mempty
pure $ Text.strip $ Text.unlines [ruleTextDebug, henceText, lestText]
Hornlike {clauses} -> do
let headLins = gfLin env . gf . parseConstraint env . hHead <$> clauses -- :: [GConstraint] -- this will not become a question
parseBodyHC cl = case hBody cl of
Just bs -> gfLin env $ gf $ bsConstraint2gfConstraint $ parseConstraintBS env bs
Nothing -> mempty
bodyLins = parseBodyHC <$> clauses
pure $ Text.unlines $ headLins <> ["when"] <> bodyLins
RuleAlias mt -> do
let ruleText = gfLin env $ gf $ parseSubj env $ mkLeafPT $ mt2text mt
ruleTextDebug = Text.unwords [prefix, ruleText, suffix]
pure $ Text.strip ruleTextDebug
DefNameAlias {} -> pure mempty
_ -> pure $ "NLG.hs is under construction, we don't support yet " <> Text.pack (show rule)
where
(prefix,suffix) = debugNesting thl
i = getLevel thl + 2
pad x = Text.replicate i " " <> x
-- | rewrite statements into questions, for use by the Q&A web UI
--
-- +-----------------+-----------------------------------------------------+
| input | the data breach , occurs on or after 1 Feb 2022 |
| output | Did the data breach occur on or after 1 Feb 2022 ? |
-- +-----------------+-----------------------------------------------------+
| input | Organisation , NOT , is a Public Agency |
-- | intermediate | (AA.Not (...) :: BoolStructT |
| output | Is the Organisation a Public Agency ? |
-- +-----------------+-----------------------------------------------------+
| input | Claim Count < = 2 |
| intermediate | RPConstraint ( RPMT [ " Claim Count " ] ) |
-- | | (RPMT ["2"]) :: RelationalPredicate |
| output | Have there been more than two claims ? |
-- +-----------------+-----------------------------------------------------+
ruleQuestions :: NLGEnv -> Maybe (MultiTerm,MultiTerm) -> Rule -> IO [AA.OptionallyLabeledBoolStruct Text.Text]
ruleQuestions env alias rule = do
case rule of
Regulative {subj,who,cond,upon} -> text
Hornlike {clauses} -> do
print "---"
print $ ruleQnTrees env alias rule
print "---"
text
Constitutive {cond} -> text
DefNameAlias {} -> pure [] -- no questions needed to produce from DefNameAlias
_ -> pure [AA.Leaf $ Text.pack ("ruleQuestions: doesn't work yet for " <> show rule)]
where
text = pure $ fmap (linBStext env) (concat $ ruleQnTrees env alias rule)
ruleQnTrees :: NLGEnv -> Maybe (MultiTerm,MultiTerm) -> Rule -> [[BoolStructGText]]
ruleQnTrees env alias rule = do
let (youExpr, orgExpr) =
case alias of
Just (you,org) ->
case parseSubj env . mkLeafPT . mt2text <$> [you, org] of
[y,o] -> (y,o) -- both are parsed
_ -> (GYou, GYou) -- dummy values
Nothing -> (GYou, GYou) -- dummy values
case rule of
Regulative {subj,who,cond,upon} -> do
let subjExpr = parseSubj env subj
aliasExpr = if subjExpr==orgExpr then youExpr else referSubj subjExpr
qWhoTrees = mkWhoText env GqPREPOST (GqWHO aliasExpr) <$> who
qCondTrees = mkCondText env GqPREPOST GqCOND <$> cond
qUponTrees = mkUponText env (GqUPON aliasExpr) <$> upon
return $ catMaybes [qWhoTrees, qCondTrees, qUponTrees]
Hornlike {clauses} -> do
let bodyTrees = fmap (mkConstraintText env GqPREPOST GqCONSTR) . hBody <$> clauses
return $ catMaybes bodyTrees
Constitutive {cond} -> do
let qCondTrees = mkCondText env GqPREPOST GqCOND <$> cond
return $ catMaybes [qCondTrees]
DefNameAlias {} -> pure []
_ -> pure []
linBStext :: NLGEnv -> BoolStructGText -> AA.OptionallyLabeledBoolStruct Text.Text
linBStext env = mapBSLabel (gfLin env . gf) (gfLin env . gf)
mkWhoText :: NLGEnv -> (GPrePost -> GText) -> (GWho -> GText) -> BoolStructR -> BoolStructGText
mkWhoText env f g bsr = mapBSLabel f g $ aggregateBoolStruct (gfLang env) $ parseWhoBS env bsr
mkCondText :: NLGEnv -> (GPrePost -> GText) -> (GCond -> GText) -> BoolStructR -> BoolStructGText
mkCondText env f g bsr = mapBSLabel f g $ aggregateBoolStruct (gfLang env) $ parseCondBS env bsr
mkConstraintText :: NLGEnv -> (GPrePost -> GText) -> (GConstraint -> GText) -> BoolStructR -> BoolStructGText
mkConstraintText env f g bsr = mapBSLabel f g $ aggregateBoolStruct (gfLang env) $ parseConstraintBS env bsr
mkUponText :: NLGEnv -> (GUpon -> GText) -> ParamText -> BoolStructGText
mkUponText env f pt = AA.Leaf (f $ parseUpon env pt)
-- mkUponText :: NLGEnv -> (GUpon -> GText) -> ParamText -> AA.OptionallyLabeledBoolStruct Text.Text
-- mkUponText env f = AA.Leaf . gfLin env . gf . f . parseUpon env
nlgQuestion :: NLGEnv -> Rule -> IO [Text.Text]
nlgQuestion env rl = do
questionsInABoolStruct <- ruleQuestions env Nothing rl -- TODO: the Nothing means there is no AKA
pure $ concatMap F.toList questionsInABoolStruct
-----------------------------------------------------------------------------
Parsing fields into GF categories – all typed , no allowed
-- Special constructions for the fields that are BoolStructR
parseConstraintBS :: NLGEnv -> BoolStructR -> BoolStructConstraint
parseConstraintBS env = mapBSLabel (parsePrePost env) (parseConstraint env)
parseWhoBS :: NLGEnv -> BoolStructR -> BoolStructWho
parseWhoBS env = mapBSLabel (parsePrePost env) (parseWho env)
parseCondBS :: NLGEnv -> BoolStructR -> BoolStructCond
parseCondBS env = mapBSLabel (parsePrePost env) (parseCond env)
-- not really parsing, just converting nL4 constructors to GF constructors
parseDeontic :: Deontic -> GDeontic
parseDeontic DMust = GMUST
parseDeontic DMay = GMAY
parseDeontic DShant = GSHANT
parseTComparison :: TComparison -> GTComparison
parseTComparison TBefore = GBEFORE
parseTComparison TAfter = GAFTER
parseTComparison TBy = GBY
parseTComparison TOn = GON
parseTComparison TVague = GVAGUE
parseDate :: MultiTerm -> GDate
parseDate mt = case Text.words $ mt2text mt of
[d, m, y] -> GMkDate (tDay d) (tMonth m) (mkYear y)
_ -> GMkDate (LexDay "Day1") (LexMonth "Jan") dummyYear
where
dummyYear = mkYear "1970"
mkYear :: Text.Text -> GYear
mkYear y = GMkYear (LexYearComponent y1) (LexYearComponent y2) (LexYearComponent y3) (LexYearComponent y4)
where [y1, y2, y3, y4] = splitYear y
splitYear :: Text.Text -> [String]
splitYear y = case ["Y" <> [d] | d <- Text.unpack y] of
xs@[_, _, _, _] -> xs
_ -> ["Y2", "Y0", "Y0", "Y0"]
tDay :: Text.Text -> GDay
tDay t = LexDay ("Day"<> Text.unpack t)
tMonth :: Text.Text -> GMonth
tMonth = LexMonth . Text.unpack
TODO : stop using * 2text , instead use the internal structure
-- "respond" :| [] -> respond : VP
" demand " :| [ " an explanation for your inaction " ] - > demand : V2 , NP complement , call
" assess " :| [ " if it is a Notifiable Data Breach " ] - > assess : VS , S complement , call ComplS2
parseAction :: NLGEnv -> BoolStructP -> GAction
parseAction env bsp = let txt = bsp2text bsp in
case parseAny "Action" env txt of
[] -> error $ msg "Action" txt
x:_ -> fg x
parseSubj :: NLGEnv -> BoolStructP -> GSubj
parseSubj env bsp = let txt = bsp2text bsp in
case parseAny "Subj" env txt of
[] -> error $ msg "Subj" txt
x:_ -> fg x
parseWho :: NLGEnv -> RelationalPredicate -> GWho
parseWho env rp = let txt = rp2text rp in
case parseAny "Who" env txt of
[] -> error $ msg "Who" txt
x:_ -> fg x
parseCond :: NLGEnv -> RelationalPredicate -> GCond
parseCond env (RPConstraint c (RPTC t) d) = GRPConstraint cond tc date
where
cond = parseCond env (RPMT c)
tc = parseTComparison t
date = parseDate d
parseCond env rp = let txt = rp2text rp in
case parseAny "Cond" env txt of
[] -> error $ msg "Cond" txt
x:_ -> fg x
parseUpon :: NLGEnv -> ParamText -> GUpon
parseUpon env pt = let txt = pt2text pt in
case parseAny "Upon" env txt of
[] -> error $ msg "Upon" txt
x:_ -> fg x
parseTemporal :: NLGEnv -> TemporalConstraint Text.Text -> GTemporal
parseTemporal env (TemporalConstraint t (Just int) text) = GTemporalConstraint tc digits unit
where
tc = parseTComparison t
digits = mkDigits int
unit = parseTimeUnit text
mkDigits :: Integer -> GDigits
mkDigits i = case [LexDig $ "D_" <> [d] | d <- show i] of
[] -> GIDig (LexDig "D_0") -- shouldn't happen, TODO alert user?
[dig] -> GIDig dig
xs -> foldr GIIDig (GIDig (last xs)) (init xs)
parseTemporal _ (TemporalConstraint tc Nothing text) = undefined
parseTimeUnit :: Text.Text -> GTimeUnit
parseTimeUnit text = case take 3 $ Text.unpack $ Text.toLower text of
"day" -> GDay_Unit
"mon" -> GMonth_Unit
"yea" -> GYear_Unit
xs -> error $ "unrecognised unit of time: " <> Text.unpack text
parseConstraint :: NLGEnv -> RelationalPredicate -> GConstraint
parseConstraint env (RPBoolStructR a RPis (AA.Not b)) = case (nps,vps) of
(np:_, vp:_) -> GRPleafS (fg np) (flipPolarity $ fg vp)
_ -> GrecoverRPis (tString aTxt) (tString $ Text.unwords ["not", bTxt])
where
aTxt = mt2text a
bTxt = bsr2text b
nps = parseAnyNoRecover "NP" env aTxt
vps = parseAnyNoRecover "VPS" env $ Text.unwords ["is", bTxt]
tString :: Text.Text -> GString
tString = GString . read . Text.unpack
parseConstraint env (RPConstraint a RPis b) = case (nps,vps) of
(np:_, vp:_) -> GRPleafS (fg np) (fg vp)
_ -> GrecoverRPis (tString aTxt) (tString bTxt)
where
aTxt = mt2text a
bTxt = mt2text b
nps = parseAnyNoRecover "NP" env aTxt
vps = parseAnyNoRecover "VPS" env $ Text.unwords ["is", bTxt]
tString :: Text.Text -> GString
tString = GString . read . Text.unpack
parseConstraint env rp = let txt = rp2text rp in
case parseAny "Constraint" env txt of
[] -> error $ msg "Constraint" txt
x:_ -> fg x
parsePrePost :: NLGEnv -> Text.Text -> GPrePost
parsePrePost env txt =
case parseAny "PrePost" env txt of
[] -> GrecoverUnparsedPrePost $ GString $ Text.unpack txt
x:_ -> fg x
-- TODO: later if grammar is ambiguous, should we rank trees here?
parseAny :: String -> NLGEnv -> Text.Text -> [Expr]
parseAny cat env txt = res
where
typ = case readType cat of
Nothing -> error $ unwords ["category", cat, "not found among", show $ categories (gfGrammar env)]
Just t -> t
res = case gfParse env typ txt of
[] -> [mkApp (mkCId $ "recoverUnparsed"<>cat) [mkStr $ Text.unpack txt]]
xs -> xs
parseAnyNoRecover :: String -> NLGEnv -> Text.Text -> [Expr]
parseAnyNoRecover cat env = gfParse env typ
where
typ = case readType cat of
Nothing -> error $ unwords ["category", cat, "not found among", show $ categories (gfGrammar env)]
Just t -> t
msg :: String -> Text.Text -> String
msg typ txt = "parse" <> typ <> ": failed to parse " <> Text.unpack txt
----------------------------------------------------------------------------- | null | https://raw.githubusercontent.com/smucclaw/dsl/c4519c02c303b836c738f20b6f3fdda69516803a/lib/haskell/natural4/src/LS/NLP/NLG.hs | haskell | # OPTIONS_GHC -Wno-name-shadowing #
# LANGUAGE OverloadedStrings #
# LANGUAGE GADTs, NamedFieldPuns, FlexibleContexts #
# LANGUAGE RankNTypes #
---------------------------------------------------------------------------
Main
:: [GConstraint] -- this will not become a question
| rewrite statements into questions, for use by the Q&A web UI
+-----------------+-----------------------------------------------------+
+-----------------+-----------------------------------------------------+
| intermediate | (AA.Not (...) :: BoolStructT |
+-----------------+-----------------------------------------------------+
| | (RPMT ["2"]) :: RelationalPredicate |
+-----------------+-----------------------------------------------------+
no questions needed to produce from DefNameAlias
both are parsed
dummy values
dummy values
mkUponText :: NLGEnv -> (GUpon -> GText) -> ParamText -> AA.OptionallyLabeledBoolStruct Text.Text
mkUponText env f = AA.Leaf . gfLin env . gf . f . parseUpon env
TODO: the Nothing means there is no AKA
---------------------------------------------------------------------------
Special constructions for the fields that are BoolStructR
not really parsing, just converting nL4 constructors to GF constructors
"respond" :| [] -> respond : VP
shouldn't happen, TODO alert user?
TODO: later if grammar is ambiguous, should we rank trees here?
--------------------------------------------------------------------------- |
module LS.NLP.NLG where
import LS.NLP.NL4
import LS.NLP.NL4Transformations
import LS.Types
import LS.Rule (Rule(..))
import PGF
import Data.Maybe (catMaybes)
import qualified Data.Text as Text
import qualified AnyAll as AA
import System.Environment (lookupEnv)
import Paths_natural4
import Data.Foldable as F
data NLGEnv = NLGEnv
{ gfGrammar :: PGF
, gfLang :: Language
, gfParse :: Type -> Text.Text -> [Expr]
, gfLin :: Expr -> Text.Text
, verbose :: Bool
}
allLangs :: IO [Language]
allLangs = do
grammarFile <- getDataFileName $ gfPath "NL4.pgf"
gr <- readPGF grammarFile
pure $ languages gr
getLang :: String -> Language
getLang str = case readLanguage str of
Nothing -> error $ "language " <> str <> " not found"
Just l -> l
myNLGEnv :: Language -> IO NLGEnv
myNLGEnv lang = do
mpn <- lookupEnv "MP_NLG"
let verbose = maybe False (read :: String -> Bool) mpn
grammarFile <- getDataFileName $ gfPath "NL4.pgf"
gr <- readPGF grammarFile
let eng = getLang "NL4Eng"
myParse typ txt = parse gr eng typ (Text.unpack txt)
let myLin = rmBIND . Text.pack . linearize gr lang
pure $ NLGEnv gr lang myParse myLin verbose
rmBIND :: Text.Text -> Text.Text
rmBIND = Text.replace " &+ " ""
gfPath :: String -> String
gfPath x = "grammars/" ++ x
WIP : crude way of keeping track of whether we 're in hence , lest or whatever
data RecursionLevel = TopLevel | MyHence Int | MyLest Int
deriving (Eq,Ord,Show)
getLevel :: RecursionLevel -> Int
getLevel l = case l of
TopLevel -> 2
MyHence i -> i
MyLest i -> i
debugNesting :: RecursionLevel -> (Text.Text, Text.Text)
debugNesting TopLevel = (Text.pack "", Text.pack "")
debugNesting (MyHence _) = (Text.pack "Follow by:", Text.pack "")
debugNesting (MyLest _) = (Text.pack "In case of failure:", Text.pack "")
nlg :: NLGEnv -> Rule -> IO Text.Text
nlg = nlg' TopLevel
nlg' :: RecursionLevel -> NLGEnv -> Rule -> IO Text.Text
nlg' thl env rule = case rule of
Regulative {subj,upon,temporal,cond,who,deontic,action,lest,hence} -> do
let subjExpr = introduceSubj $ parseSubj env subj
deonticExpr = parseDeontic deontic
actionExpr = parseAction env action
whoSubjExpr = case who of
Just w -> GSubjWho subjExpr (bsWho2gfWho (parseWhoBS env w))
Nothing -> subjExpr
ruleText = gfLin env $ gf $ GRegulative whoSubjExpr deonticExpr actionExpr
TODO : does n't work once we add another language
Just u ->
let uponExpr = gf $ GadvUPON $ parseUpon env u
in gfLin env uponExpr <> ", "
Nothing -> mempty
tcText = case temporal of
Just t -> " " <> (gfLin env $ gf $ parseTemporal env t)
Nothing -> mempty
condText = case cond of
Just c ->
let condExpr = gf $ pastTense $ bsCond2gfCond (parseCondBS env c)
in ". If " <> gfLin env condExpr <> ", "
Nothing -> mempty
ruleTextDebug = Text.unwords [prefix, uponText <> ruleText <> tcText <> condText, suffix]
lestText <- case lest of
Just r -> do
rt <- nlg' (MyLest i) env r
pure $ pad rt
Nothing -> pure mempty
henceText <- case hence of
Just r -> do
rt <- nlg' (MyHence i) env r
pure $ pad rt
Nothing -> pure mempty
pure $ Text.strip $ Text.unlines [ruleTextDebug, henceText, lestText]
Hornlike {clauses} -> do
parseBodyHC cl = case hBody cl of
Just bs -> gfLin env $ gf $ bsConstraint2gfConstraint $ parseConstraintBS env bs
Nothing -> mempty
bodyLins = parseBodyHC <$> clauses
pure $ Text.unlines $ headLins <> ["when"] <> bodyLins
RuleAlias mt -> do
let ruleText = gfLin env $ gf $ parseSubj env $ mkLeafPT $ mt2text mt
ruleTextDebug = Text.unwords [prefix, ruleText, suffix]
pure $ Text.strip ruleTextDebug
DefNameAlias {} -> pure mempty
_ -> pure $ "NLG.hs is under construction, we don't support yet " <> Text.pack (show rule)
where
(prefix,suffix) = debugNesting thl
i = getLevel thl + 2
pad x = Text.replicate i " " <> x
| input | the data breach , occurs on or after 1 Feb 2022 |
| output | Did the data breach occur on or after 1 Feb 2022 ? |
| input | Organisation , NOT , is a Public Agency |
| output | Is the Organisation a Public Agency ? |
| input | Claim Count < = 2 |
| intermediate | RPConstraint ( RPMT [ " Claim Count " ] ) |
| output | Have there been more than two claims ? |
ruleQuestions :: NLGEnv -> Maybe (MultiTerm,MultiTerm) -> Rule -> IO [AA.OptionallyLabeledBoolStruct Text.Text]
ruleQuestions env alias rule = do
case rule of
Regulative {subj,who,cond,upon} -> text
Hornlike {clauses} -> do
print "---"
print $ ruleQnTrees env alias rule
print "---"
text
Constitutive {cond} -> text
_ -> pure [AA.Leaf $ Text.pack ("ruleQuestions: doesn't work yet for " <> show rule)]
where
text = pure $ fmap (linBStext env) (concat $ ruleQnTrees env alias rule)
ruleQnTrees :: NLGEnv -> Maybe (MultiTerm,MultiTerm) -> Rule -> [[BoolStructGText]]
ruleQnTrees env alias rule = do
let (youExpr, orgExpr) =
case alias of
Just (you,org) ->
case parseSubj env . mkLeafPT . mt2text <$> [you, org] of
case rule of
Regulative {subj,who,cond,upon} -> do
let subjExpr = parseSubj env subj
aliasExpr = if subjExpr==orgExpr then youExpr else referSubj subjExpr
qWhoTrees = mkWhoText env GqPREPOST (GqWHO aliasExpr) <$> who
qCondTrees = mkCondText env GqPREPOST GqCOND <$> cond
qUponTrees = mkUponText env (GqUPON aliasExpr) <$> upon
return $ catMaybes [qWhoTrees, qCondTrees, qUponTrees]
Hornlike {clauses} -> do
let bodyTrees = fmap (mkConstraintText env GqPREPOST GqCONSTR) . hBody <$> clauses
return $ catMaybes bodyTrees
Constitutive {cond} -> do
let qCondTrees = mkCondText env GqPREPOST GqCOND <$> cond
return $ catMaybes [qCondTrees]
DefNameAlias {} -> pure []
_ -> pure []
linBStext :: NLGEnv -> BoolStructGText -> AA.OptionallyLabeledBoolStruct Text.Text
linBStext env = mapBSLabel (gfLin env . gf) (gfLin env . gf)
mkWhoText :: NLGEnv -> (GPrePost -> GText) -> (GWho -> GText) -> BoolStructR -> BoolStructGText
mkWhoText env f g bsr = mapBSLabel f g $ aggregateBoolStruct (gfLang env) $ parseWhoBS env bsr
mkCondText :: NLGEnv -> (GPrePost -> GText) -> (GCond -> GText) -> BoolStructR -> BoolStructGText
mkCondText env f g bsr = mapBSLabel f g $ aggregateBoolStruct (gfLang env) $ parseCondBS env bsr
mkConstraintText :: NLGEnv -> (GPrePost -> GText) -> (GConstraint -> GText) -> BoolStructR -> BoolStructGText
mkConstraintText env f g bsr = mapBSLabel f g $ aggregateBoolStruct (gfLang env) $ parseConstraintBS env bsr
mkUponText :: NLGEnv -> (GUpon -> GText) -> ParamText -> BoolStructGText
mkUponText env f pt = AA.Leaf (f $ parseUpon env pt)
nlgQuestion :: NLGEnv -> Rule -> IO [Text.Text]
nlgQuestion env rl = do
pure $ concatMap F.toList questionsInABoolStruct
Parsing fields into GF categories – all typed , no allowed
parseConstraintBS :: NLGEnv -> BoolStructR -> BoolStructConstraint
parseConstraintBS env = mapBSLabel (parsePrePost env) (parseConstraint env)
parseWhoBS :: NLGEnv -> BoolStructR -> BoolStructWho
parseWhoBS env = mapBSLabel (parsePrePost env) (parseWho env)
parseCondBS :: NLGEnv -> BoolStructR -> BoolStructCond
parseCondBS env = mapBSLabel (parsePrePost env) (parseCond env)
parseDeontic :: Deontic -> GDeontic
parseDeontic DMust = GMUST
parseDeontic DMay = GMAY
parseDeontic DShant = GSHANT
parseTComparison :: TComparison -> GTComparison
parseTComparison TBefore = GBEFORE
parseTComparison TAfter = GAFTER
parseTComparison TBy = GBY
parseTComparison TOn = GON
parseTComparison TVague = GVAGUE
parseDate :: MultiTerm -> GDate
parseDate mt = case Text.words $ mt2text mt of
[d, m, y] -> GMkDate (tDay d) (tMonth m) (mkYear y)
_ -> GMkDate (LexDay "Day1") (LexMonth "Jan") dummyYear
where
dummyYear = mkYear "1970"
mkYear :: Text.Text -> GYear
mkYear y = GMkYear (LexYearComponent y1) (LexYearComponent y2) (LexYearComponent y3) (LexYearComponent y4)
where [y1, y2, y3, y4] = splitYear y
splitYear :: Text.Text -> [String]
splitYear y = case ["Y" <> [d] | d <- Text.unpack y] of
xs@[_, _, _, _] -> xs
_ -> ["Y2", "Y0", "Y0", "Y0"]
tDay :: Text.Text -> GDay
tDay t = LexDay ("Day"<> Text.unpack t)
tMonth :: Text.Text -> GMonth
tMonth = LexMonth . Text.unpack
TODO : stop using * 2text , instead use the internal structure
" demand " :| [ " an explanation for your inaction " ] - > demand : V2 , NP complement , call
" assess " :| [ " if it is a Notifiable Data Breach " ] - > assess : VS , S complement , call ComplS2
parseAction :: NLGEnv -> BoolStructP -> GAction
parseAction env bsp = let txt = bsp2text bsp in
case parseAny "Action" env txt of
[] -> error $ msg "Action" txt
x:_ -> fg x
parseSubj :: NLGEnv -> BoolStructP -> GSubj
parseSubj env bsp = let txt = bsp2text bsp in
case parseAny "Subj" env txt of
[] -> error $ msg "Subj" txt
x:_ -> fg x
parseWho :: NLGEnv -> RelationalPredicate -> GWho
parseWho env rp = let txt = rp2text rp in
case parseAny "Who" env txt of
[] -> error $ msg "Who" txt
x:_ -> fg x
parseCond :: NLGEnv -> RelationalPredicate -> GCond
parseCond env (RPConstraint c (RPTC t) d) = GRPConstraint cond tc date
where
cond = parseCond env (RPMT c)
tc = parseTComparison t
date = parseDate d
parseCond env rp = let txt = rp2text rp in
case parseAny "Cond" env txt of
[] -> error $ msg "Cond" txt
x:_ -> fg x
parseUpon :: NLGEnv -> ParamText -> GUpon
parseUpon env pt = let txt = pt2text pt in
case parseAny "Upon" env txt of
[] -> error $ msg "Upon" txt
x:_ -> fg x
parseTemporal :: NLGEnv -> TemporalConstraint Text.Text -> GTemporal
parseTemporal env (TemporalConstraint t (Just int) text) = GTemporalConstraint tc digits unit
where
tc = parseTComparison t
digits = mkDigits int
unit = parseTimeUnit text
mkDigits :: Integer -> GDigits
mkDigits i = case [LexDig $ "D_" <> [d] | d <- show i] of
[dig] -> GIDig dig
xs -> foldr GIIDig (GIDig (last xs)) (init xs)
parseTemporal _ (TemporalConstraint tc Nothing text) = undefined
parseTimeUnit :: Text.Text -> GTimeUnit
parseTimeUnit text = case take 3 $ Text.unpack $ Text.toLower text of
"day" -> GDay_Unit
"mon" -> GMonth_Unit
"yea" -> GYear_Unit
xs -> error $ "unrecognised unit of time: " <> Text.unpack text
parseConstraint :: NLGEnv -> RelationalPredicate -> GConstraint
parseConstraint env (RPBoolStructR a RPis (AA.Not b)) = case (nps,vps) of
(np:_, vp:_) -> GRPleafS (fg np) (flipPolarity $ fg vp)
_ -> GrecoverRPis (tString aTxt) (tString $ Text.unwords ["not", bTxt])
where
aTxt = mt2text a
bTxt = bsr2text b
nps = parseAnyNoRecover "NP" env aTxt
vps = parseAnyNoRecover "VPS" env $ Text.unwords ["is", bTxt]
tString :: Text.Text -> GString
tString = GString . read . Text.unpack
parseConstraint env (RPConstraint a RPis b) = case (nps,vps) of
(np:_, vp:_) -> GRPleafS (fg np) (fg vp)
_ -> GrecoverRPis (tString aTxt) (tString bTxt)
where
aTxt = mt2text a
bTxt = mt2text b
nps = parseAnyNoRecover "NP" env aTxt
vps = parseAnyNoRecover "VPS" env $ Text.unwords ["is", bTxt]
tString :: Text.Text -> GString
tString = GString . read . Text.unpack
parseConstraint env rp = let txt = rp2text rp in
case parseAny "Constraint" env txt of
[] -> error $ msg "Constraint" txt
x:_ -> fg x
parsePrePost :: NLGEnv -> Text.Text -> GPrePost
parsePrePost env txt =
case parseAny "PrePost" env txt of
[] -> GrecoverUnparsedPrePost $ GString $ Text.unpack txt
x:_ -> fg x
parseAny :: String -> NLGEnv -> Text.Text -> [Expr]
parseAny cat env txt = res
where
typ = case readType cat of
Nothing -> error $ unwords ["category", cat, "not found among", show $ categories (gfGrammar env)]
Just t -> t
res = case gfParse env typ txt of
[] -> [mkApp (mkCId $ "recoverUnparsed"<>cat) [mkStr $ Text.unpack txt]]
xs -> xs
parseAnyNoRecover :: String -> NLGEnv -> Text.Text -> [Expr]
parseAnyNoRecover cat env = gfParse env typ
where
typ = case readType cat of
Nothing -> error $ unwords ["category", cat, "not found among", show $ categories (gfGrammar env)]
Just t -> t
msg :: String -> Text.Text -> String
msg typ txt = "parse" <> typ <> ": failed to parse " <> Text.unpack txt
|
5743aff8d6a7dd413062c6d772b1a8eea824176ec76b10dfd32b98f91ddf382e | dhess/sicp-solutions | ex2.68.scm | (define (make-leaf symbol weight)
(list 'leaf symbol weight))
(define (leaf? object)
(eq? (car object) 'leaf))
(define (symbol-leaf x) (cadr x))
(define (weight-leaf x) (caddr x))
(define (make-code-tree left right)
(list left
right
(append (symbols left) (symbols right))
(+ (weight left) (weight right))))
(define (left-branch tree) (car tree))
(define (right-branch tree) (cadr tree))
(define (symbols tree)
(if (leaf? tree)
(list (symbol-leaf tree))
(caddr tree)))
(define (weight tree)
(if (leaf? tree)
(weight-leaf tree)
(cadddr tree)))
(define (decode bits tree)
(define (decode-1 bits current-branch)
(if (null? bits)
'()
(let ((next-branch
(choose-branch (car bits) current-branch)))
(if (leaf? next-branch)
(cons (symbol-leaf next-branch)
(decode-1 (cdr bits) tree))
(decode-1 (cdr bits) next-branch)))))
(decode-1 bits tree))
(define (choose-branch bit branch)
(cond ((= bit 0) (left-branch branch))
((= bit 1) (right-branch branch))
(else (error "bad bit -- CHOOSE-BRANCH" bit))))
(define (adjoin-set x set)
(cond ((null? set) (list x))
((< (weight x) (weight (car set))) (cons x set))
(else (cons (car set)
(adjoin-set x (cdr set))))))
(define (make-leaf-set pairs)
(if (null? pairs)
'()
(let ((pair (car pairs)))
(adjoin-set (make-leaf (car pair)
(cadr pair))
(make-leaf-set (cdr pairs))))))
(define sample-tree
(make-code-tree (make-leaf 'A 4)
(make-code-tree
(make-leaf 'B 2)
(make-code-tree (make-leaf 'D 1)
(make-leaf 'C 1)))))
(define sample-message '(0 1 1 0 0 1 0 1 0 1 1 1 0))
(decode sample-message sample-tree)
(define (encode message tree)
(if (null? message)
'()
(append (encode-symbol (car message) tree)
(encode (cdr message) tree))))
(define (encode-symbol symbol tree)
(if (leaf? tree)
(if (eq? symbol (symbol-leaf tree))
'()
#f)
(let ((left (encode-symbol symbol
(left-branch tree))))
(if (list? left)
(cons 0 left)
(let ((right (encode-symbol symbol
(right-branch tree))))
(if (list? right)
(cons 1 right)
(error "bad symbol -- ENCODE-SYMBOL" symbol)))))))
| null | https://raw.githubusercontent.com/dhess/sicp-solutions/2cf78db98917e9cb1252efda76fddc8e45fe4140/chap2/ex2.68.scm | scheme | (define (make-leaf symbol weight)
(list 'leaf symbol weight))
(define (leaf? object)
(eq? (car object) 'leaf))
(define (symbol-leaf x) (cadr x))
(define (weight-leaf x) (caddr x))
(define (make-code-tree left right)
(list left
right
(append (symbols left) (symbols right))
(+ (weight left) (weight right))))
(define (left-branch tree) (car tree))
(define (right-branch tree) (cadr tree))
(define (symbols tree)
(if (leaf? tree)
(list (symbol-leaf tree))
(caddr tree)))
(define (weight tree)
(if (leaf? tree)
(weight-leaf tree)
(cadddr tree)))
(define (decode bits tree)
(define (decode-1 bits current-branch)
(if (null? bits)
'()
(let ((next-branch
(choose-branch (car bits) current-branch)))
(if (leaf? next-branch)
(cons (symbol-leaf next-branch)
(decode-1 (cdr bits) tree))
(decode-1 (cdr bits) next-branch)))))
(decode-1 bits tree))
(define (choose-branch bit branch)
(cond ((= bit 0) (left-branch branch))
((= bit 1) (right-branch branch))
(else (error "bad bit -- CHOOSE-BRANCH" bit))))
(define (adjoin-set x set)
(cond ((null? set) (list x))
((< (weight x) (weight (car set))) (cons x set))
(else (cons (car set)
(adjoin-set x (cdr set))))))
(define (make-leaf-set pairs)
(if (null? pairs)
'()
(let ((pair (car pairs)))
(adjoin-set (make-leaf (car pair)
(cadr pair))
(make-leaf-set (cdr pairs))))))
(define sample-tree
(make-code-tree (make-leaf 'A 4)
(make-code-tree
(make-leaf 'B 2)
(make-code-tree (make-leaf 'D 1)
(make-leaf 'C 1)))))
(define sample-message '(0 1 1 0 0 1 0 1 0 1 1 1 0))
(decode sample-message sample-tree)
(define (encode message tree)
(if (null? message)
'()
(append (encode-symbol (car message) tree)
(encode (cdr message) tree))))
(define (encode-symbol symbol tree)
(if (leaf? tree)
(if (eq? symbol (symbol-leaf tree))
'()
#f)
(let ((left (encode-symbol symbol
(left-branch tree))))
(if (list? left)
(cons 0 left)
(let ((right (encode-symbol symbol
(right-branch tree))))
(if (list? right)
(cons 1 right)
(error "bad symbol -- ENCODE-SYMBOL" symbol)))))))
| |
4f65b6aa8b7a0f79ce6a3581c8e7734711c378d47bea2d4b88f85358297db84a | yoshihiro503/ocamltter | api_intf.ml | open Spotlib.Spot
open Meta_conv.Open
open OCamltter_oauth
open Camlon
open Ocaml_conv.Default
open Json_conv.Default
module Json = struct
include Tiny_json.Json
let json_of_t x = x
let t_of_json ?trace:_ x = Ok x
let ocaml_of_t t = Ocaml.String (show t)
let t_of_ocaml = Ocaml_conv.Helper.of_deconstr (function
| Ocaml.String s -> parse s
| _ -> failwith "Ocaml.String expected")
end
type 'json mc_leftovers = (string * 'json) list [@@deriving conv{ocaml}]
type status_id = int64 [@@deriving conv{ocaml; json}]
module Time : sig
type t [@@deriving conv{ocaml; json}]
include Mtypes.Comparable with type t := t
val from_unix : float -> t
val to_unix : t -> float
val from_string : string -> t
val to_string : t -> string
end = struct
open Util.Date
: it is named ` date ` , but for time .
let parse_date st =
We d Apr 21 02:29:17 +0000 2010
let mon = pmonth & String.sub st 4 3 in
let day = int_of_string & String.sub st 8 2 in
let h = int_of_string & String.sub st 11 2 in
let m = int_of_string & String.sub st 14 2 in
let s = int_of_string & String.sub st 17 2 in
let year = int_of_string & String.sub st 26 4 in
make_from_gmt year mon day h m s
in
Sat , 17 Apr 2010 08:23:55 +0000
let mon = pmonth & String.sub st 8 3 in
let day = int_of_string & String.sub st 5 2 in
let h = int_of_string & String.sub st 17 2 in
let m = int_of_string & String.sub st 20 2 in
let s = int_of_string & String.sub st 23 2 in
let year = int_of_string & String.sub st 12 4 in
make_from_gmt year mon day h m s
in
try parse_date01 st with
| _ -> parse_date02 st
type t = {
printable : string lazy_t;
tick : float lazy_t
} [@@deriving conv{ocaml}]
type _t = t
include Mtypes.Make_comparable(struct
type t = _t
let compare t1 t2 = compare (Lazy.force t1.tick) (Lazy.force t2.tick)
end)
let from_unix f =
{ printable = Lazy.from_val (Printf.sprintf "@%.0f" f);
tick = Lazy.from_val f
}
let to_unix t = Lazy.force t.tick
let from_string s =
{ printable = Lazy.from_val s;
tick = lazy (parse_date s)
}
let to_string t = Lazy.force t.printable
open Json
let json_of_t t = String (to_string t)
let t_of_json = Json_conv.Helper.of_deconstr (function
| String s -> from_string s
| _ -> failwith "Time.t_of_json: String expected")
let t_of_json_exn = Json_conv.exn t_of_json
end
module Text : sig
(* HTML encoded text *)
type t = string [@@deriving conv{ocaml; json}]
end = struct
type t = string [@@deriving conv{ocaml}]
open Json
let t_of_json = Json_conv.Helper.of_deconstr (function
| String s -> Http.html_decode s
| _ -> failwith "Text.t_of_json: String expected")
let t_of_json_exn = Json_conv.exn t_of_json
let json_of_t _t = Json.String(Http.html_encode _t)
end
module Client : sig
type t = string * (Xml.xml, exn) Result.t [@@deriving conv{ocaml; json}]
val name : t -> string
end = struct
type t = string * (Xml.xml, exn) Result.t
open Meta_conv
open Json
let t_of_json = Json_conv.Helper.of_deconstr & function
| String s -> s, Result.catch_exn & fun () -> Xml.parse_string s
| _ -> failwith "Client.t_of_json: String expected"
let t_of_json_exn = Json_conv.exn t_of_json
let json_of_t (s, _) = String s
let t_of_ocaml ?trace:_ _ = assert false
let t_of_ocaml_exn ?trace:_ _ = assert false
let ocaml_of_t (s, _) = Ocaml.String s
let name = function
| (_, Ok (Xml.PCData client_name))
| (_, Ok (Xml.Tag ("a", _, [Xml.PCData client_name]))) ->
client_name
| (s, Ok _) -> s
| (s, Error _) -> s
end
module User = struct
(* Lots of optional fields! *)
type details = <
is_translator : bool;
(* profile_background_color : string; *)
(* notifications : bool option; *)
profile_image_url_https : string;
url : string option;
(* profile_background_image_url_https : string *)
created_at : Time.t;
(* profile_background_image_url : string; *)
(* utc_offset : float *)
(* profile_link_color : string *)
name : string;
default_profile : bool;
screen_name : string;
lang : string;
protected : bool;
statuses_count : int64;
location : string option;
(* profile_use_background_image : bool; *)
(* profile_text_color : string; *)
(* contributors_enabled : bool; *)
listed_count : int64;
time_zone : string option;
description : string;
profile_image_url : string;
(* profile_sidebar_border_color : string *)
following : bool option;
geo_enabled : bool;
(* profile_background_tile : bool *)
followers_count : int64;
(* profile_sidebar_fill_color : string; *)
verified : bool;
(* status : Twitter.Json.Object ... *)
(* default_profile_image : bool *)
follow_request_sent : bool option;
friends_count : int64;
favourites_count : int64
> [@@deriving conv{ocaml; json}]
type t = <
unknowns : Json.t mc_leftovers;
id : int64;
details : details mc_option_embeded;
> [@@deriving conv{ocaml; json}]
type ts = t list [@@deriving conv{ocaml; json}]
let format x = Ocaml.format_no_poly_with ocaml_of_t x
end
module Hashtag = struct
type t = <
text : string;
indices : (int * int) (*+ location in the tweet string *)
> [@@deriving conv{ocaml; json}]
end
module URL = struct
type t = <
unknown : Json.t mc_leftovers;
url : string;
expanded_url : string;
display_url : string;
> [@@deriving conv{ocaml; json}]
end
module UserMention = struct
type t = <
unknown : Json.t mc_leftovers;
screen_name : string;
name : string;
id : int64;
indices : int * int;
> [@@deriving conv{ocaml; json}]
end
module Entities = struct
type t = <
unknown : Json.t mc_leftovers;
hashtags : Hashtag.t list;
urls : URL.t list;
user_mentions : UserMention.t list;
> [@@deriving conv{ocaml; json}]
end
module Tweet = struct
: exclude_replies ... unknown
include_user_entities ... unknown
include_user_entities ... unknown
*)
type t = <
unknowns : Json.t mc_leftovers;
id : int64;
user : User.t;
text : Text.t;
truncated : bool;
RE or RT
in_reply_to_user_id : int64 option;
in_reply_to_screen_name : string option;
RT
created_at : Time.t;
source : Client.t; (* html piece *)
geo : Json.t option;
coordinates : Json.t option;
place : Json.t option;
contributors : Json.t option; (* something can be trimed... *)
retweet_count : int;
favorited : bool;
retweeted : bool;
possibly_sensitive : bool mc_option;
entities : Entities.t mc_option;
> [@@deriving conv{ocaml; json}]
type ts = t list [@@deriving conv{ocaml; json}]
let format x = Ocaml.format_no_poly_with ocaml_of_t x
let format_ts x = Ocaml.format_no_poly_with ocaml_of_ts x
end
module Search_tweets = struct
module Search_metadata = struct
type t = <
unknowns : Json.t mc_leftovers;
query : string;
next_results : string mc_option; (* url GET piece for next search *)
refresh_url : string; (* url GET piece for refresh *)
count : int;
max_id : int64;
since_id : int64;
completed_in : float;
> [@@deriving conv{ocaml; json}]
end
type t = <
unknowns : Json.t mc_leftovers;
statuses : Tweet.t list;
search_metadata : Search_metadata.t;
> [@@deriving conv{ocaml; json}]
let format x = Ocaml.format_no_poly_with ocaml_of_t x
end
module Rate_limit_status = struct
type limit = <
limit : float;
remaining : float;
reset : float; (*+ unix epoch *)
> [@@deriving conv{ocaml; json}]
type t = <
rate_limit_context : < access_token : string >;
resources : <
lists : <
subscribers : limit [@conv.as {json="/lists/subscribers"}];
list : limit [@conv.as {json="/lists/list"}];
memberships : limit [@conv.as {json="/lists/memberships"}];
ownerships : limit [@conv.as {json="/lists/ownerships"}];
subscriptions : limit [@conv.as {json="/lists/subscriptions"}];
members : limit [@conv.as {json="/lists/members"}];
subscribers_show : limit [@conv.as {json="/lists/subscribers/show"}];
statuses : limit [@conv.as {json="/lists/statuses"}];
members_show : limit [@conv.as {json="/lists/members/show"}];
show : limit [@conv.as {json="/lists/show"}];
>;
application : <
rate_limit_status : limit [@conv.as {json="/application/rate_limit_status"}];
>;
friendships : <
incoming : limit [@conv.as {json="/friendships/incoming"}];
lookup : limit [@conv.as {json="/friendships/lookup"}];
outgoing : limit [@conv.as {json="/friendships/outgoing"}];
no_retweets_ids : limit [@conv.as {json="/friendships/no_retweets/ids"}];
show : limit [@conv.as {json="/friendships/show"}];
>;
blocks : <
ids : limit [@conv.as {json="/blocks/ids"}];
list : limit [@conv.as {json="/blocks/list"}];
>;
geo : <
similar_places : limit [@conv.as {json="/geo/similar_places"}];
search : limit [@conv.as {json="/geo/search"}];
reverse_geocode : limit [@conv.as {json="/geo/reverse_geocode"}];
place_id : limit [@conv.as {json="/geo/id/:place_id"}];
>;
users : <
profile_banner : limit [@conv.as {json="/users/profile_banner"}];
suggestions_members : limit [@conv.as {json="/users/suggestions/:slug/members"}];
show : limit [@conv.as {json="/users/show/:id"}];
suggestions : limit [@conv.as {json="/users/suggestions"}];
lookup : limit [@conv.as {json="/users/lookup"}];
search : limit [@conv.as {json="/users/search"}];
suggestions_slug : limit [@conv.as {json="/users/suggestions/:slug"}];
report_spam : limit [@conv.as {json="/users/report_spam"}];
derived_info : limit [@conv.as {json="/users/derived_info"}];
>;
followers : <
list : limit [@conv.as {json="/followers/list"}];
ids : limit [@conv.as {json="/followers/ids"}];
>;
statuses : <
mentions_timeline : limit [@conv.as {json="/statuses/mentions_timeline"}];
show : limit [@conv.as {json="/statuses/show/:id"}];
oembed : limit [@conv.as {json="/statuses/oembed"}];
retweeters_ids : limit [@conv.as {json="/statuses/retweeters/ids"}];
home_timeline : limit [@conv.as {json="/statuses/home_timeline"}];
user_timeline : limit [@conv.as {json="/statuses/user_timeline"}];
retweets : limit [@conv.as {json="/statuses/retweets/:id"}];
retweets_of_me : limit [@conv.as {json="/statuses/retweets_of_me"}];
friends : limit [@conv.as {json="/statuses/friends"}];
lookup : limit [@conv.as {json="/statuses/lookup"}];
>;
help : <
privacy : limit [@conv.as {json="/help/privacy"}];
tos : limit [@conv.as {json="/help/tos"}];
configuration : limit [@conv.as {json="/help/configuration"}];
languages : limit [@conv.as {json="/help/languages"}];
settings : limit [@conv.as {json="/help/settings"}];
>;
friends : <
ids : limit [@conv.as {json="/friends/ids"}];
list : limit [@conv.as {json="/friends/list"}];
following_ids : limit [@conv.as {json="/friends/following/ids"}];
following_list : limit [@conv.as {json="/friends/following/list"}];
>;
direct_messages : <
show : limit [@conv.as {json="/direct_messages/show"}];
sent_and_received : limit [@conv.as {json="/direct_messages/sent_and_received"}];
sent : limit [@conv.as {json="/direct_messages/sent"}];
direct_messages : limit [@conv.as {json="/direct_messages"}];
>;
account : <
verify_credentials : limit [@conv.as {json="/account/verify_credentials"}];
settings : limit [@conv.as {json="/account/settings"}];
login_verification_enrollment : limit [@conv.as {json="/account/login_verification_enrollment"}];
update_profile : limit [@conv.as {json="/account/update_profile"}];
>;
favorites : <
list : limit [@conv.as {json="/favorites/list"}];
>;
saved_searches : <
destroy : limit [@conv.as {json="/saved_searches/destroy/:id"}];
list : limit [@conv.as {json="/saved_searches/list"}];
show : limit [@conv.as {json="/saved_searches/show/:id"}];
>;
search : <
tweets : limit [@conv.as {json="/search/tweets"}];
>;
trends : <
available : limit [@conv.as {json="/trends/available"}];
place : limit [@conv.as {json="/trends/place"}];
closest : limit [@conv.as {json="/trends/closest"}];
>;
mutes : <
users_list : limit [@conv.as {json="/mutes/users/list"}];
users_ids : limit [@conv.as {json="/mutes/users/ids"}];
>;
device : <
token : limit [@conv.as {json="/device/token"}];
>;
>
> [@@deriving conv{ocaml; json}]
let format x = Ocaml.format_no_poly_with ocaml_of_t x
end
| null | https://raw.githubusercontent.com/yoshihiro503/ocamltter/be7ac68c8076bc2ca8ccec216d6647c94ec9f814/twitter/api_intf.ml | ocaml | HTML encoded text
Lots of optional fields!
profile_background_color : string;
notifications : bool option;
profile_background_image_url_https : string
profile_background_image_url : string;
utc_offset : float
profile_link_color : string
profile_use_background_image : bool;
profile_text_color : string;
contributors_enabled : bool;
profile_sidebar_border_color : string
profile_background_tile : bool
profile_sidebar_fill_color : string;
status : Twitter.Json.Object ...
default_profile_image : bool
+ location in the tweet string
html piece
something can be trimed...
url GET piece for next search
url GET piece for refresh
+ unix epoch | open Spotlib.Spot
open Meta_conv.Open
open OCamltter_oauth
open Camlon
open Ocaml_conv.Default
open Json_conv.Default
module Json = struct
include Tiny_json.Json
let json_of_t x = x
let t_of_json ?trace:_ x = Ok x
let ocaml_of_t t = Ocaml.String (show t)
let t_of_ocaml = Ocaml_conv.Helper.of_deconstr (function
| Ocaml.String s -> parse s
| _ -> failwith "Ocaml.String expected")
end
type 'json mc_leftovers = (string * 'json) list [@@deriving conv{ocaml}]
type status_id = int64 [@@deriving conv{ocaml; json}]
module Time : sig
type t [@@deriving conv{ocaml; json}]
include Mtypes.Comparable with type t := t
val from_unix : float -> t
val to_unix : t -> float
val from_string : string -> t
val to_string : t -> string
end = struct
open Util.Date
: it is named ` date ` , but for time .
let parse_date st =
We d Apr 21 02:29:17 +0000 2010
let mon = pmonth & String.sub st 4 3 in
let day = int_of_string & String.sub st 8 2 in
let h = int_of_string & String.sub st 11 2 in
let m = int_of_string & String.sub st 14 2 in
let s = int_of_string & String.sub st 17 2 in
let year = int_of_string & String.sub st 26 4 in
make_from_gmt year mon day h m s
in
Sat , 17 Apr 2010 08:23:55 +0000
let mon = pmonth & String.sub st 8 3 in
let day = int_of_string & String.sub st 5 2 in
let h = int_of_string & String.sub st 17 2 in
let m = int_of_string & String.sub st 20 2 in
let s = int_of_string & String.sub st 23 2 in
let year = int_of_string & String.sub st 12 4 in
make_from_gmt year mon day h m s
in
try parse_date01 st with
| _ -> parse_date02 st
type t = {
printable : string lazy_t;
tick : float lazy_t
} [@@deriving conv{ocaml}]
type _t = t
include Mtypes.Make_comparable(struct
type t = _t
let compare t1 t2 = compare (Lazy.force t1.tick) (Lazy.force t2.tick)
end)
let from_unix f =
{ printable = Lazy.from_val (Printf.sprintf "@%.0f" f);
tick = Lazy.from_val f
}
let to_unix t = Lazy.force t.tick
let from_string s =
{ printable = Lazy.from_val s;
tick = lazy (parse_date s)
}
let to_string t = Lazy.force t.printable
open Json
let json_of_t t = String (to_string t)
let t_of_json = Json_conv.Helper.of_deconstr (function
| String s -> from_string s
| _ -> failwith "Time.t_of_json: String expected")
let t_of_json_exn = Json_conv.exn t_of_json
end
module Text : sig
type t = string [@@deriving conv{ocaml; json}]
end = struct
type t = string [@@deriving conv{ocaml}]
open Json
let t_of_json = Json_conv.Helper.of_deconstr (function
| String s -> Http.html_decode s
| _ -> failwith "Text.t_of_json: String expected")
let t_of_json_exn = Json_conv.exn t_of_json
let json_of_t _t = Json.String(Http.html_encode _t)
end
module Client : sig
type t = string * (Xml.xml, exn) Result.t [@@deriving conv{ocaml; json}]
val name : t -> string
end = struct
type t = string * (Xml.xml, exn) Result.t
open Meta_conv
open Json
let t_of_json = Json_conv.Helper.of_deconstr & function
| String s -> s, Result.catch_exn & fun () -> Xml.parse_string s
| _ -> failwith "Client.t_of_json: String expected"
let t_of_json_exn = Json_conv.exn t_of_json
let json_of_t (s, _) = String s
let t_of_ocaml ?trace:_ _ = assert false
let t_of_ocaml_exn ?trace:_ _ = assert false
let ocaml_of_t (s, _) = Ocaml.String s
let name = function
| (_, Ok (Xml.PCData client_name))
| (_, Ok (Xml.Tag ("a", _, [Xml.PCData client_name]))) ->
client_name
| (s, Ok _) -> s
| (s, Error _) -> s
end
module User = struct
type details = <
is_translator : bool;
profile_image_url_https : string;
url : string option;
created_at : Time.t;
name : string;
default_profile : bool;
screen_name : string;
lang : string;
protected : bool;
statuses_count : int64;
location : string option;
listed_count : int64;
time_zone : string option;
description : string;
profile_image_url : string;
following : bool option;
geo_enabled : bool;
followers_count : int64;
verified : bool;
follow_request_sent : bool option;
friends_count : int64;
favourites_count : int64
> [@@deriving conv{ocaml; json}]
type t = <
unknowns : Json.t mc_leftovers;
id : int64;
details : details mc_option_embeded;
> [@@deriving conv{ocaml; json}]
type ts = t list [@@deriving conv{ocaml; json}]
let format x = Ocaml.format_no_poly_with ocaml_of_t x
end
module Hashtag = struct
type t = <
text : string;
> [@@deriving conv{ocaml; json}]
end
module URL = struct
type t = <
unknown : Json.t mc_leftovers;
url : string;
expanded_url : string;
display_url : string;
> [@@deriving conv{ocaml; json}]
end
module UserMention = struct
type t = <
unknown : Json.t mc_leftovers;
screen_name : string;
name : string;
id : int64;
indices : int * int;
> [@@deriving conv{ocaml; json}]
end
module Entities = struct
type t = <
unknown : Json.t mc_leftovers;
hashtags : Hashtag.t list;
urls : URL.t list;
user_mentions : UserMention.t list;
> [@@deriving conv{ocaml; json}]
end
module Tweet = struct
: exclude_replies ... unknown
include_user_entities ... unknown
include_user_entities ... unknown
*)
type t = <
unknowns : Json.t mc_leftovers;
id : int64;
user : User.t;
text : Text.t;
truncated : bool;
RE or RT
in_reply_to_user_id : int64 option;
in_reply_to_screen_name : string option;
RT
created_at : Time.t;
geo : Json.t option;
coordinates : Json.t option;
place : Json.t option;
retweet_count : int;
favorited : bool;
retweeted : bool;
possibly_sensitive : bool mc_option;
entities : Entities.t mc_option;
> [@@deriving conv{ocaml; json}]
type ts = t list [@@deriving conv{ocaml; json}]
let format x = Ocaml.format_no_poly_with ocaml_of_t x
let format_ts x = Ocaml.format_no_poly_with ocaml_of_ts x
end
module Search_tweets = struct
module Search_metadata = struct
type t = <
unknowns : Json.t mc_leftovers;
query : string;
count : int;
max_id : int64;
since_id : int64;
completed_in : float;
> [@@deriving conv{ocaml; json}]
end
type t = <
unknowns : Json.t mc_leftovers;
statuses : Tweet.t list;
search_metadata : Search_metadata.t;
> [@@deriving conv{ocaml; json}]
let format x = Ocaml.format_no_poly_with ocaml_of_t x
end
module Rate_limit_status = struct
type limit = <
limit : float;
remaining : float;
> [@@deriving conv{ocaml; json}]
type t = <
rate_limit_context : < access_token : string >;
resources : <
lists : <
subscribers : limit [@conv.as {json="/lists/subscribers"}];
list : limit [@conv.as {json="/lists/list"}];
memberships : limit [@conv.as {json="/lists/memberships"}];
ownerships : limit [@conv.as {json="/lists/ownerships"}];
subscriptions : limit [@conv.as {json="/lists/subscriptions"}];
members : limit [@conv.as {json="/lists/members"}];
subscribers_show : limit [@conv.as {json="/lists/subscribers/show"}];
statuses : limit [@conv.as {json="/lists/statuses"}];
members_show : limit [@conv.as {json="/lists/members/show"}];
show : limit [@conv.as {json="/lists/show"}];
>;
application : <
rate_limit_status : limit [@conv.as {json="/application/rate_limit_status"}];
>;
friendships : <
incoming : limit [@conv.as {json="/friendships/incoming"}];
lookup : limit [@conv.as {json="/friendships/lookup"}];
outgoing : limit [@conv.as {json="/friendships/outgoing"}];
no_retweets_ids : limit [@conv.as {json="/friendships/no_retweets/ids"}];
show : limit [@conv.as {json="/friendships/show"}];
>;
blocks : <
ids : limit [@conv.as {json="/blocks/ids"}];
list : limit [@conv.as {json="/blocks/list"}];
>;
geo : <
similar_places : limit [@conv.as {json="/geo/similar_places"}];
search : limit [@conv.as {json="/geo/search"}];
reverse_geocode : limit [@conv.as {json="/geo/reverse_geocode"}];
place_id : limit [@conv.as {json="/geo/id/:place_id"}];
>;
users : <
profile_banner : limit [@conv.as {json="/users/profile_banner"}];
suggestions_members : limit [@conv.as {json="/users/suggestions/:slug/members"}];
show : limit [@conv.as {json="/users/show/:id"}];
suggestions : limit [@conv.as {json="/users/suggestions"}];
lookup : limit [@conv.as {json="/users/lookup"}];
search : limit [@conv.as {json="/users/search"}];
suggestions_slug : limit [@conv.as {json="/users/suggestions/:slug"}];
report_spam : limit [@conv.as {json="/users/report_spam"}];
derived_info : limit [@conv.as {json="/users/derived_info"}];
>;
followers : <
list : limit [@conv.as {json="/followers/list"}];
ids : limit [@conv.as {json="/followers/ids"}];
>;
statuses : <
mentions_timeline : limit [@conv.as {json="/statuses/mentions_timeline"}];
show : limit [@conv.as {json="/statuses/show/:id"}];
oembed : limit [@conv.as {json="/statuses/oembed"}];
retweeters_ids : limit [@conv.as {json="/statuses/retweeters/ids"}];
home_timeline : limit [@conv.as {json="/statuses/home_timeline"}];
user_timeline : limit [@conv.as {json="/statuses/user_timeline"}];
retweets : limit [@conv.as {json="/statuses/retweets/:id"}];
retweets_of_me : limit [@conv.as {json="/statuses/retweets_of_me"}];
friends : limit [@conv.as {json="/statuses/friends"}];
lookup : limit [@conv.as {json="/statuses/lookup"}];
>;
help : <
privacy : limit [@conv.as {json="/help/privacy"}];
tos : limit [@conv.as {json="/help/tos"}];
configuration : limit [@conv.as {json="/help/configuration"}];
languages : limit [@conv.as {json="/help/languages"}];
settings : limit [@conv.as {json="/help/settings"}];
>;
friends : <
ids : limit [@conv.as {json="/friends/ids"}];
list : limit [@conv.as {json="/friends/list"}];
following_ids : limit [@conv.as {json="/friends/following/ids"}];
following_list : limit [@conv.as {json="/friends/following/list"}];
>;
direct_messages : <
show : limit [@conv.as {json="/direct_messages/show"}];
sent_and_received : limit [@conv.as {json="/direct_messages/sent_and_received"}];
sent : limit [@conv.as {json="/direct_messages/sent"}];
direct_messages : limit [@conv.as {json="/direct_messages"}];
>;
account : <
verify_credentials : limit [@conv.as {json="/account/verify_credentials"}];
settings : limit [@conv.as {json="/account/settings"}];
login_verification_enrollment : limit [@conv.as {json="/account/login_verification_enrollment"}];
update_profile : limit [@conv.as {json="/account/update_profile"}];
>;
favorites : <
list : limit [@conv.as {json="/favorites/list"}];
>;
saved_searches : <
destroy : limit [@conv.as {json="/saved_searches/destroy/:id"}];
list : limit [@conv.as {json="/saved_searches/list"}];
show : limit [@conv.as {json="/saved_searches/show/:id"}];
>;
search : <
tweets : limit [@conv.as {json="/search/tweets"}];
>;
trends : <
available : limit [@conv.as {json="/trends/available"}];
place : limit [@conv.as {json="/trends/place"}];
closest : limit [@conv.as {json="/trends/closest"}];
>;
mutes : <
users_list : limit [@conv.as {json="/mutes/users/list"}];
users_ids : limit [@conv.as {json="/mutes/users/ids"}];
>;
device : <
token : limit [@conv.as {json="/device/token"}];
>;
>
> [@@deriving conv{ocaml; json}]
let format x = Ocaml.format_no_poly_with ocaml_of_t x
end
|
c9efa192f42fc38e8cb1467bc44db4ce1d3e9a937d29d27222648c8f8b399dc9 | harpocrates/inline-rust | Main.hs | # LANGUAGE TemplateHaskell , QuasiQuotes , CPP #
#ifdef darwin_HOST_OS
# OPTIONS_GHC -optl - Wl,-all_load #
#else
# OPTIONS_GHC -optl - Wl,--whole - archive #
#endif
module Main where
import Language.Rust.Inline
import SimpleTypes
import GhcUnboxedTypes
import PointerTypes
import FunctionPointerTypes
import PreludeTypes
import AlgebraicDataTypes
import Data.Word
import Test.Hspec
import Foreign.Storable
import Foreign.Ptr
import Foreign.Marshal.Array
extendContext basic
setCrateRoot []
[rust|
mod GhcUnboxedTypes;
mod SimpleTypes;
mod PointerTypes;
mod FunctionPointerTypes;
mod PreludeTypes;
mod AlgebraicDataTypes;
pub use GhcUnboxedTypes::*;
pub use SimpleTypes::*;
pub use PointerTypes::*;
pub use FunctionPointerTypes::*;
pub use PreludeTypes::*;
pub use AlgebraicDataTypes::*;
|]
main :: IO ()
main = hspec $
describe "Rust quasiquoter" $ do
simpleTypes
ghcUnboxedTypes
pointerTypes
funcPointerTypes
preludeTypes
algebraicDataTypes
| null | https://raw.githubusercontent.com/harpocrates/inline-rust/5ecff8c92526000e5fc358a2dfede9b60ef59a1a/tests/Main.hs | haskell | whole - archive # | # LANGUAGE TemplateHaskell , QuasiQuotes , CPP #
#ifdef darwin_HOST_OS
# OPTIONS_GHC -optl - Wl,-all_load #
#else
#endif
module Main where
import Language.Rust.Inline
import SimpleTypes
import GhcUnboxedTypes
import PointerTypes
import FunctionPointerTypes
import PreludeTypes
import AlgebraicDataTypes
import Data.Word
import Test.Hspec
import Foreign.Storable
import Foreign.Ptr
import Foreign.Marshal.Array
extendContext basic
setCrateRoot []
[rust|
mod GhcUnboxedTypes;
mod SimpleTypes;
mod PointerTypes;
mod FunctionPointerTypes;
mod PreludeTypes;
mod AlgebraicDataTypes;
pub use GhcUnboxedTypes::*;
pub use SimpleTypes::*;
pub use PointerTypes::*;
pub use FunctionPointerTypes::*;
pub use PreludeTypes::*;
pub use AlgebraicDataTypes::*;
|]
main :: IO ()
main = hspec $
describe "Rust quasiquoter" $ do
simpleTypes
ghcUnboxedTypes
pointerTypes
funcPointerTypes
preludeTypes
algebraicDataTypes
|
55919e3d32267c68c651a4ea72178462665a3b741e110ad88be3ce8e8b500347 | yutopp/rill | functions.ml |
* Copyright 2020 - .
*
* Distributed under the Boost Software License , Version 1.0 .
* ( See accompanying file LICENSE_1_0.txt or copy at
* )
* Copyright yutopp 2020 - .
*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* )
*)
open! Base
module Ast = Syntax.Ast
let linkage_of ast =
match ast with
| Ast.{ kind = DeclExternFunc { name; params; ret_ty; symbol_name; _ }; span }
->
let linkage =
match symbol_name with
| Ast.{ kind = LitString s; _ } -> Typing.Type.LinkageC s
| _ -> failwith "[ICE] unexpected token"
in
linkage
| Ast.{ kind = DefFunc { name; params; ret_ty; _ }; span } ->
let linkage = Typing.Type.LinkageRillc in
linkage
| Ast.{ kind = DeclFunc { name; params; ret_ty; _ }; span } ->
let linkage = Typing.Type.LinkageRillc in
linkage
| _ -> failwith "[ICE] unexpected node"
| null | https://raw.githubusercontent.com/yutopp/rill/375b67c03ab2087d0a2a833bd9e80f3e51e2694f/rillc/lib/sema/functions.ml | ocaml |
* Copyright 2020 - .
*
* Distributed under the Boost Software License , Version 1.0 .
* ( See accompanying file LICENSE_1_0.txt or copy at
* )
* Copyright yutopp 2020 - .
*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* )
*)
open! Base
module Ast = Syntax.Ast
let linkage_of ast =
match ast with
| Ast.{ kind = DeclExternFunc { name; params; ret_ty; symbol_name; _ }; span }
->
let linkage =
match symbol_name with
| Ast.{ kind = LitString s; _ } -> Typing.Type.LinkageC s
| _ -> failwith "[ICE] unexpected token"
in
linkage
| Ast.{ kind = DefFunc { name; params; ret_ty; _ }; span } ->
let linkage = Typing.Type.LinkageRillc in
linkage
| Ast.{ kind = DeclFunc { name; params; ret_ty; _ }; span } ->
let linkage = Typing.Type.LinkageRillc in
linkage
| _ -> failwith "[ICE] unexpected node"
| |
95c6318f3e8f4fb03c219a97e4eeffbc9e0d0856f3c355cbe729df220510cce4 | BillHallahan/G2 | M18.hs | {-@ LIQUID "--no-termination" @-}
module M19 (main) where
@ main : : Int - > { b : | b } @
main :: Int -> Bool
main flag =
case while flag (0, 0) of
(_, j) -> if flag >= 0 then j == 100 else True
@ while : : flag : Int
- > xs : { xs:(Int , Int ) | ( flag > = 0 = > x_Tuple21 xs = = x_Tuple22 xs ) & & x_Tuple21 xs < = 100 }
- > { ys:(Int , Int ) | ( flag > = 0 = > x_Tuple21 ys = = x_Tuple22 ys ) & & x_Tuple21 ys = = 100}@
-> xs:{ xs:(Int, Int) | (flag >= 0 => x_Tuple21 xs == x_Tuple22 xs) && x_Tuple21 xs <= 100 }
-> { ys:(Int, Int) | (flag >= 0 => x_Tuple21 ys == x_Tuple22 ys) && x_Tuple21 ys == 100}@-}
while :: Int -> (Int, Int) -> (Int, Int)
while flag (b, j) = if b < 100
then while flag (if flag >= 0
then (b + 1, j + 1)
else (b + 1, j)
)
else (b, j) | null | https://raw.githubusercontent.com/BillHallahan/G2/f2584eb2ec211aed73b3ccd88c6e232c3cf4386d/tests/LiquidInf/Paper/Eval/CompareVerified/M18.hs | haskell | @ LIQUID "--no-termination" @ |
module M19 (main) where
@ main : : Int - > { b : | b } @
main :: Int -> Bool
main flag =
case while flag (0, 0) of
(_, j) -> if flag >= 0 then j == 100 else True
@ while : : flag : Int
- > xs : { xs:(Int , Int ) | ( flag > = 0 = > x_Tuple21 xs = = x_Tuple22 xs ) & & x_Tuple21 xs < = 100 }
- > { ys:(Int , Int ) | ( flag > = 0 = > x_Tuple21 ys = = x_Tuple22 ys ) & & x_Tuple21 ys = = 100}@
-> xs:{ xs:(Int, Int) | (flag >= 0 => x_Tuple21 xs == x_Tuple22 xs) && x_Tuple21 xs <= 100 }
-> { ys:(Int, Int) | (flag >= 0 => x_Tuple21 ys == x_Tuple22 ys) && x_Tuple21 ys == 100}@-}
while :: Int -> (Int, Int) -> (Int, Int)
while flag (b, j) = if b < 100
then while flag (if flag >= 0
then (b + 1, j + 1)
else (b + 1, j)
)
else (b, j) |
00e6227f91832ed1674eb6850e2be9e782d624da0669a2c8d4eda49a8b982609 | clojure/core.rrb-vector | test_common.cljs | (ns clojure.core.rrb-vector.test-common
(:require [clojure.test :as test :refer [deftest testing is are]]
[clojure.core.reducers :as r]
[clojure.core.rrb-vector.test-utils :as u]
[clojure.core.rrb-vector :as fv]
[clojure.core.rrb-vector.debug :as dv]
[clojure.core.rrb-vector.debug-platform-dependent :as pd]))
;; The intent is to keep this file as close to
src / test / clojure / clojure / core / rrb_vector / test_common.clj as
possible , so that when we start requiring Clojure 1.7.0 and later
;; for this library, this file and that one can be replaced with a
common test file with the suffix .cljc
(dv/set-debug-opts! dv/full-debug-opts)
(deftest test-slicing
(testing "slicing"
(is (dv/check-subvec u/extra-checks?
32000 10 29999 1234 18048 10123 10191))))
(deftest test-splicing
(testing "splicing"
(is (dv/check-catvec u/extra-checks?
1025 1025 3245 1025 32768 1025 1025 10123 1025 1025))
(is (dv/check-catvec u/extra-checks?
10 40 40 40 40 40 40 40 40))
(is (apply dv/check-catvec u/extra-checks? (repeat 30 33)))
(is (dv/check-catvec u/extra-checks?
26091 31388 1098 43443 46195 4484 48099 7905
13615 601 13878 250 10611 9271 53170))
Order that catvec will perform splicev calls :
(let [my-splice (if u/extra-checks? dv/checking-splicev fv/catvec)
counts [26091 31388 1098 43443 46195 4484 48099 7905
13615 601 13878 250 10611 9271 53170]
prefix-sums (reductions + counts)
ranges (map range (cons 0 prefix-sums) prefix-sums)
[v01 v02 v03 v04 v05 v06 v07 v08
v09 v10 v11 v12 v13 v14 v15] (map fv/vec ranges)
top level call
top level call
top level call
recurse level 1 catvec call
recurse level 1 catvec call
recurse level 1 catvec call
recurse level 2 catvec call
recurse level 2 catvec call
recurse level 2 catvec call
recurse level 3 catvec call
recurse level 3 catvec call
recurse level 2 catvec call
recurse level 1 catvec call
top level call
exp-val (range (last prefix-sums))]
(is (= -1 (dv/first-diff v01-15 exp-val)))
(is (= -1 (dv/first-diff (into v01-04 v05-15) exp-val))))))
(deftest test-reduce
(let [v1 (vec (range 128))
v2 (fv/vec (range 128))]
(testing "reduce"
(is (= (reduce + v1) (reduce + v2))))
(testing "reduce-kv"
(is (= (reduce-kv + 0 v1) (reduce-kv + 0 v2))))))
(deftest test-reduce-2
(let [my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)
v1 (my-subvec (dv/cvec (range 1003)) 500)
v2 (dv/cvec (range 500 1003))]
(is (= (reduce + 0 v1)
(reduce + 0 v2)
(reduce + 0 (r/map identity (seq v1)))
(reduce + 0 (r/map identity (seq v2)))))))
(deftest test-reduce-3
(let [v0 (vec [])
rv0 (fv/vec [])]
(testing "reduce"
(is (= (reduce + v0) (reduce + rv0))))
(testing "reduce-kv"
(is (= (reduce-kv + 0 v0) (reduce-kv + 0 rv0))))))
(deftest test-seq
(let [v (fv/vec (range 128))
s (seq v)]
(testing "seq contents"
(is (= v s)))
(testing "chunked-seq?"
(is (chunked-seq? s)))
(testing "internal-reduce"
(is (satisfies? IReduce
s)))))
(deftest test-assoc
(let [my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(let [v1 (fv/vec (range 40000))
v2 (reduce (fn [out [k v]]
(assoc out k v))
(assoc v1 40000 :foo)
(map-indexed vector (rseq v1)))]
(is (= (concat (rseq v1) [:foo]) v2)))
(are [i] (= :foo
(-> (range 40000)
(fv/vec)
(my-subvec i)
(assoc 10 :foo)
(nth 10)))
1 32 1024 32768)))
(deftest test-assoc!
(let [my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(let [v1 (fv/vec (range 40000))
v2 (persistent!
(reduce (fn [out [k v]]
(assoc! out k v))
(assoc! (transient v1) 40000 :foo)
(map-indexed vector (rseq v1))))]
(is (= (concat (rseq v1) [:foo]) v2)))
(are [i] (= :foo
(-> (range 40000)
(fv/vec)
(my-subvec i)
(transient)
(assoc! 10 :foo)
(persistent!)
(nth 10)))
1 32 1024 32768)))
(deftest test-relaxed
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)]
(is (= (into (my-catvec (dv/cvec (range 123)) (dv/cvec (range 68)))
(range 64))
(concat (range 123) (range 68) (range 64))))
(is (= (dv/slow-into (my-catvec (dv/cvec (range 123)) (dv/cvec (range 68)))
(range 64))
(concat (range 123) (range 68) (range 64))))))
(deftest test-hasheq
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)]
CRRBV-25
(let [v1 (dv/cvec (range 1024))
v2 (dv/cvec (range 1024))
v3 (my-catvec (dv/cvec (range 512)) (dv/cvec (range 512 1024)))
s1 (seq v1)
s2 (seq v2)
s3 (seq v3)]
(is (= (hash v1) (hash v2) (hash v3) (hash s1) (hash s2) (hash s3)))
(is (= (hash (nthnext s1 120))
(hash (nthnext s2 120))
(hash (nthnext s3 120)))))))
(deftest test-reduce-subvec-catvec
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(letfn [(insert-by-sub-catvec [v n]
(my-catvec (my-subvec v 0 n) (dv/cvec ['x])
(my-subvec v n)))
(repeated-subvec-catvec [i]
(reduce insert-by-sub-catvec (dv/cvec (range i)) (range i 0 -1)))]
(is (= (repeated-subvec-catvec 2371)
(interleave (range 2371) (repeat 'x)))))))
(def pos-infinity ##Inf)
(deftest test-reduce-subvec-catvec2
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(letfn [(insert-by-sub-catvec [v n]
(my-catvec (my-subvec v 0 n) (dv/cvec ['x])
(my-subvec v n)))
(repeated-subvec-catvec [i]
(reduce insert-by-sub-catvec
(dv/cvec (range i))
(take i (interleave (range (quot i 2) pos-infinity)
(range (quot i 2) pos-infinity)))))]
(let [n 2371
v (repeated-subvec-catvec n)]
(is (every? #(or (integer? %) (= 'x %)) v))
(is (= (count v) (* 2 n)))))))
(deftest test-splice-high-subtree-branch-count
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)
x (fv/vec (repeat 1145 \a))
y (my-catvec (my-subvec x 0 778)
(my-subvec x 778 779)
(dv/cvec [1])
(my-subvec x 779))
z (my-catvec (my-subvec y 0 780)
(dv/cvec [2])
(my-subvec y 780 781)
(my-subvec y 781))
res (my-catvec (my-subvec z 0 780)
(dv/cvec [])
(dv/cvec [3])
(my-subvec z 781))
expected (concat (repeat 779 \a) [1] [3] (repeat 366 \a))]
(is (= res expected))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; This problem reproduction code is from CRRBV-12 ticket:
;; -12
;; I would prefer to have all of the data that is the value of
;; crrbv-12-data read from a separate file, but it is not terribly
;; long, and having it in the code avoids having to figure out how to
find and read the file on N different JavaScript runtime
;; environments, for the ClojureScript version of the test.
(def crrbv-12-data
[7912 7831 5393 5795 6588 2394 6403 6237 6152 5890 6507 6388 6100
7400 6340 7624 6379 5430 6335 5883 5570 6220 6319 6442 5666 3901
6974 5440 6626 7782 6760 6066 7763 9547 5585 6724 5407 5675 7727
7666 6845 6658 5409 7304 7291 5826 6523 5529 7387 6275 7193 5563
6572 7150 2949 1133 7312 7267 7135 7787 5812 7372 4295 5937 2931
4846 6149 1901 6680 7319 7845 7517 6722 6535 6362 5457 6649 7757
7463 6755 7436 6364 7361 7174 6048 6657 6533 5763 6074 6744 6734
5668 61 3842 5395 6489 1723 6248 7664 6645 5943 5428 6995 6688 7088
6305 6198 6197 5765 3691 7157 7305 7631 6058 6655 7846 7746 686 6024
6473 6150 5951 1761 7900 7084 5637 6607 5561 5772 7232 8512 6249
7377 5437 4830 6939 6355 7100 7884 951 6765 7054 1367 4580 7284 5414
7344 7525 5801 6374 6685 6737 4413 7353 1851 5973 7538 7116 6359
6605 6743 6153 7398 4757 6623 7546 7013 7091 7501 5749 6368 7911
6675 3246 6304 6469 6868 7701 5768 6369 6996 6346 6171 5884 6757
7615 5986 9904 5982 7049 6011 7716 6646 6178 6636 6637 7700 3390
6107 6938 2513 5663 5309 5673 7069 6615 5825 7183 5600 2188 5807
7635 7257 4803 6740 5865 6869 6968 7404 5124 7565 6169 7681 6181
5427 9861 7669 5936 5588 5463 6059 5695 5784 6768 6922 5720 6229
9173 6486 6399 6013 5517 7198 7320 6970 5969 7593 7351 7622 6561
5739 6433 6452 6320 6979 6260 6763 5539 6292 7133 6571 6108 7455
8470 7148 7597 6935 6865 7852 6549 6506 5425 6552 5551 5612 7230 809
2694 6408 6783 7626 6703 2754 1015 6809 7584 5473 6165 7105 6447
5856 6739 5564 7886 7856 7355 5814 919 6900 6257 118 7259 7419 6278
7619 6401 5970 7537 2899 6012 7190 5500 6122 5817 7620 6402 5811
5412 6822 5643 6138 5948 5523 4884 6460 5828 7159 5405 6224 7192
8669 5827 538 7416 6598 5577 6769 7547 7323 6748 6398 1505 6211 6466
6699 6207 6444 6863 7646 5917 6796 5619 6282 354 6418 5687 2536 6238
1166 6376 3852 5955 188 7218 7477 6926 7694 7253 5880 5424 7392 6337
7438 7814 3205 6336 6465 6812 1102 6468 6034 6133 5849 7578 7863
5761 6372 7568 5813 6380 6481 6942 7676 5552 7015 7120 7838 5684
6101 6834 6092 7917 6124 867 7187 5527 7488 5900 6267 6443 724 6073
6608 6407 6040 5540 6061 5554 5469 6255 6542 7336 2272 6921 1078
5593 7045 5013 6870 6712 6537 6785 6333 5892 6633 7522 6697 5915
5567 6606 5820 7653 7554 6932 5824 9330 8780 7203 7204 7519 7633
6529 7564 5718 7605 6579 7621 4462 6009 6950 6430 5911 5946 6877
7830 6570 7421 6449 6684 8425 5983 5846 5505 6097 5773 5781 6463
6867 5774 6601 1577 5642 6959 6251 7741 7391 6036 6892 5097 6874
6580 6348 5904 6709 5976 7411 7223 6252 7414 6813 4378 5888 5546
6385 401 5912 7828 7775 5925 6151 7648 5810 7673 6250 5808 7251 1407
5644 7439 7901 1964 6631 6858 7630 7771 2892 946 6397 5443 5715 5665
7306 6233 5566 5447 7011 6314 2054 5786 2170 6901 6077 6239 7791
6960 7891 7878 7758 5829 7611 7059 5455 6654 6459 6949 7406 7854
5805 6564 7033 6445 5939 6706 6103 7614 7902 6527 7479 6196 6484
3521 7269 6055 7331 6184 6746 6936 5891 6687 5771 7136 6625 7865
5864 6704 7726 5842 6295 6910 5277 7528 5689 5674 7457 7086 5220 317
7720 6720 5913 7098 5450 7275 7521 7826 7007 6378 7277 6844 7177
5482 97 6730 7861 5601 6000 6039 6953 5624 6450 6736 7492 5499 5822
7276 2889 7102 6648 6291 865 7348 7330 1449 6719 5550 7326 6338 6714
7805 7082 6377 2791 7876 5870 7107 7505 5416 7057 6021 7037 6331
5698 6721 5180 7390 5938 9067 7215 4566 8051 6557 6161 5894 1379
7335 2602 6520 7199 6878 6366 6948 7202 4791 7338 7442 5987 7099
7632 5453 4755 4947 7786 6254 7103 7595 6670 6485 6117 6756 6339
7240 7609 6853 6299 7205 4857 7511 576 5835 5396 5997 5508 6413 6219
5403 7686 9189 6634 5503 6801 7508 5611 7667 7572 7587 6015 7153
7340 6279 5646 2004 2708 7119 5737 3258 7427 6204 6476 6511 2300
7055 5389 6984 5438 6002 6272 5756 5734 6913 6425 6847 5657 6357
6862 6030 5522 6943 3518 6139 6671 7764 6493 5691 6082 4635 6640
6898 7262 9391 6828 2277 6690 6464 5759 7441 6622 1262 7114 6294
7070 6539 6788 6167 7824 6382 2512 7322 5992 7696 5445 5538 6140
7151 6409 7085 6166 6263 1194 5544 7141 5906 2939 7389 7290 6491
6322 8324 7341 7246 5610 7536 6946 7540 7760 6293 5589 7009 7822
5456 6805 5841 7722 5559 7265 6903 3517 1243 6078 7180 6147 8063
7395 7551 5460 6421 7567 6546 6941 6301 5486 7347 6479 5990 5932
6881 7737 6051 7375 5762 6897 2967 7297 7263 6965 6752 6158 7556
6794 7641 7628 2374 6289 7286 7581 6008 491 6919 9157 7002 6585 7960
6967 7692 7128 5680 5037 5752 6223 5989 7545 6584 7282 6221 871 6116
5484 6350 6266 6889 6216 1892 924 5875 7658 5461 5410 8352 7072 5724
6931 6050 6125 5519 6711 7518 6613 7576 7989 5603 7214 6664 2933
5839 7454 9353 6512 7242 7768 6037 6567 6673 8438 7364 5406 6080 577
6895 5742 5722 6944 6273 5965 5464 6876 7719 7311 7258 6829 7280
6028 5740 9162 9858 6695 7239 6972 7025 7147 7039 6226 6135 7219
6477 6708 767 5432 7405 7580 3790 372 7523 6597 5922 6105 5434 9587
6173 7739 5984 5854 2153 6912 7476 7598 5985 5874 8723 5628 5496
7352 4829 6483 7211 6933 5545 7544 5444 5790 8223 1089 6676 5667
6749 6777 5429 6347 5399 5662 6446 5524 6909 5415 7742 6343 5921
7160 7175 7026 1838 6894 4355 52 6192 5341 6945 7366 7816 2006 7380
6531 6904 5958 6270 6069 5574 7349 7212 5256 6010 6961 2825 6691
7792 6017 6888 7707 6693 6456 5871 7238 7780 7256 5630 7744 6855
5077 6958 6046 6707 6530 6501 7298 5636 6121 1105 6243 5541 6814
6732 7500 6866 7093 7745 7030 4338 6517 5991 6458 6213 4695 5542
7853 5926 6550 5230 7432 7006 5858 7677 6495 7310 6432 7487 7670
7674 6245 7315 7893 4360 940 6303 5757 7697 7506 5491 1309 7695 2214
5553 6964 7403 7302 6589 7851 7186 6193 2964 6242 6545 7012 7010
5448 5767 6647 7610 7485 6509 6083 6525 5607 9982 6244 7832 7213
6308 1320 7092 5656 6342 7864 7140 2577 104 1343 6786 7654 6156 5584
6818 5604 6681 6038 6056 6594 6603 7040 5468 5957 7229 6735 5510
6700 7725 7431 7154 7682 6558 7158 7470 7749 5400 5397 7247 6582
5832 7041 7325 5777 6759 6577 6195 7895 9626 7042 6026 6741 7811
7942 8926 1499 6772 7561 5565 3587 7273 6172 7428 6787 7181 5754
7579 5535 5543 5818 7264 1854 6998 7425 5394 6661 6562 375 2990])
(defn quicksort [my-catvec v]
(if (<= (count v) 1)
v
(let [[x & xs] v]
(my-catvec (quicksort my-catvec (dv/cvec (filter #(<= % x) xs)))
(dv/cvec [x])
(quicksort my-catvec (dv/cvec (filter #(> % x) xs)))))))
(defn ascending? [coll]
(every? (fn [[a b]] (<= a b))
(partition 2 1 coll)))
(deftest test-crrbv-12
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
v (dv/cvec crrbv-12-data)]
(testing "Ascending order after quicksort"
(is (ascending? (quicksort my-catvec v))))
(testing "Repeated catvec followed by pop"
(is (= [] (nth (iterate pop
(nth (iterate #(my-catvec (dv/cvec [0]) %) [])
963))
963))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn npe-for-1025-then-pop! [kind]
(let [my-pop! (if u/extra-checks? dv/checking-pop! pop!)
bfactor-squared (* 32 32)
mk-vector (case kind
:object-array fv/vector)
boundary 54
v1 (-> (mk-vector)
(into (range boundary))
(into (range boundary (inc bfactor-squared))))
v2 (-> (mk-vector)
(into (range bfactor-squared))
(transient)
(my-pop!)
(persistent!))
v3 (-> (mk-vector)
(into (range boundary))
(into (range boundary (inc bfactor-squared)))
(transient)
(my-pop!)
(persistent!))
v4 (-> (mk-vector)
(into (range (inc bfactor-squared)))
(transient)
(my-pop!)
(persistent!))]
(is (= (seq v1) (range (inc bfactor-squared))))
(is (= (seq v2) (range (dec bfactor-squared))))
This used to fail with core.rrb - vector version 0.0.14 with
NullPointerException while traversing the seq on clj . It gets
a different kind of error with cljs .
(is (= (seq v3) (range bfactor-squared)))
This one caused a NullPointerException with version 0.0.14
;; while traversing the seq
(is (= (seq v4) (range bfactor-squared)))))
(deftest test-npe-for-1025-then-pop!
(doseq [kind [:object-array]]
(npe-for-1025-then-pop! kind)))
;; This problem reproduction code is slightly modified from a version
provided in a comment by on 2018 - Dec-09 for this issue :
;; -20
(defn play [my-vector my-catvec my-subvec players rounds]
(letfn [(swap [marbles split-ndx]
(my-catvec
(my-subvec marbles split-ndx)
(my-subvec marbles 0 split-ndx)))
(rotl [marbles n]
(swap marbles (mod n (count marbles))))
(rotr [marbles n]
(swap marbles (mod (- (count marbles) n) (count marbles))))
(place-marble
[marbles marble]
(let [marbles (rotl marbles 2)]
[(my-catvec (my-vector marble) marbles) 0]))
(remove-marble [marbles marble]
(let [marbles (rotr marbles 7)
first-marble (nth marbles 0)]
[(my-subvec marbles 1) (+ marble first-marble)]))
(play-round [marbles round]
(if (zero? (mod round 23))
(remove-marble marbles round)
(place-marble marbles round)))
(add-score [scores player round-score]
(if (zero? round-score)
scores
(assoc scores player (+ (get scores player 0) round-score))))]
(loop [marbles (my-vector 0)
round 1
player 1
scores {}
ret []]
(let [[marbles round-score] (play-round marbles round)
scores (add-score scores player round-score)]
(if (> round rounds)
(conj ret {:round round :marbles marbles})
(recur marbles
(inc round)
(if (= player players) 1 (inc player))
scores
(conj ret {:round round :marbles marbles})))))))
(defn play-core [& args]
(apply play clojure.core/vector clojure.core/into clojure.core/subvec args))
(defn play-rrbv [& args]
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(apply play fv/vector my-catvec my-subvec args)))
(deftest test-crrbv-20
This one passes
(is (= (play-core 10 1128)
(play-rrbv 10 1128)))
This ends up with ( play - rrbv 10 1129 ) throwing an exception , with
core.rrb - vector version 0.0.14
(is (= (play-core 10 1129)
(play-rrbv 10 1129)))
;; The previous test demonstrates a bug in the transient RRB vector
implementation . The one below demonstrated a similar bug in the
persistent RRB vector implementation in version 0.0.14 .
(let [v1128 (:marbles (last (play-rrbv 10 1128)))
v1129-pre (-> v1128
(fv/subvec 2)
(conj 2001))]
(is (every? integer? (conj v1129-pre 2002)))))
(deftest test-crrbv-21
;; The following sequence of operations gave a different exception
than the above with core.rrb - vector version 0.0.14 , and was a
;; different root cause with a distinct fix required. I do not
;; recall whether it was the same root cause as
npe - for-1025 - then - pop ! but both test cases are included for extra
;; testing goodness.
(let [v1128 (:marbles (last (play-rrbv 10 1128)))
vpop1 (reduce (fn [v i] (pop v))
v1128 (range 1026))]
(is (every? integer? (pop vpop1)))
;; The transient version below gave a similar exception with
version 0.0.14 , but the call stack went through the transient
;; version of popTail, rather than the persistent version of
;; popTail that the one above does.
(is (every? integer? (persistent! (pop! (transient vpop1)))))))
(deftest test-crrbv-22
(testing "pop! from a regular transient vector with 32*32+1 elements"
(let [v1025 (into (fv/vector) (range 1025))]
(is (= (persistent! (pop! (transient v1025)))
(range 1024)))))
(testing "pop from a persistent regular vector with 32*32+1 elements"
(let [v1025 (into (fv/vector) (range 1025))]
(is (= (pop v1025)
(range 1024))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; This code was copied from
;;
;; mentioned in issue
;; -14
(defn puzzle-b [n my-vec my-catvec my-subvec]
(letfn [(remove-at [arr idx]
(my-catvec (my-subvec arr 0 idx) (my-subvec arr (inc idx))))
(create-arr [size]
(my-vec (range 1 (inc size))))
(fv-rest [arr]
(my-subvec arr 1))
(calculate-opposite [n]
(int (/ n 2)))
(move [elfs]
(let [lc (count elfs)]
(if (= 1 lc)
{:ok (first elfs)}
(let [current (first elfs)
opposite-pos (calculate-opposite lc)
_ (assert (> opposite-pos 0))
_ (assert (< opposite-pos lc))
opposite-elf (nth elfs opposite-pos)
other2 (fv-rest (remove-at elfs opposite-pos))]
(my-catvec other2 (dv/cvec [current]))))))
(puzzle-b-sample [elfs round]
(let [elfs2 (move elfs)]
;;(println "round=" round "# elfs=" (count elfs))
(if (:ok elfs2)
(:ok elfs2)
(recur elfs2 (inc round)))))]
(puzzle-b-sample (create-arr n) 1)))
(defn puzzle-b-core [n]
(puzzle-b n clojure.core/vec clojure.core/into clojure.core/subvec))
(defn get-shift [v]
(.-shift v))
(defn vstats [v]
(str "cnt=" (count v)
" shift=" (get-shift v)
" %=" (pd/format "%5.1f" (* 100.0 (dv/fraction-full v)))))
;;(def custom-catvec-data (atom []))
(defn custom-catvec [& args]
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
n ( count @custom - catvec - data )
max-arg-shift (apply max (map get-shift args))
ret (apply my-catvec args)
ret-shift (get-shift ret)]
(when (or (>= ret-shift 30)
(> ret-shift max-arg-shift))
(doall (map-indexed
(fn [idx v]
(println (str "custom-catvec ENTER v" idx " " (vstats v))))
args))
(println (str "custom-catvec LEAVE ret " (vstats ret))))
( swap ! custom - catvec - data conj { : args args : ret ret } )
( println " custom - catvec RECRD in index " n " of @custom - catvec - data " )
ret))
(defn puzzle-b-rrbv [n]
(let [my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(puzzle-b n fv/vec custom-catvec my-subvec)))
(deftest test-crrbv-14
This one passes
(u/reset-optimizer-counts!)
(is (= (puzzle-b-core 977)
(puzzle-b-rrbv 977)))
(u/print-optimizer-counts)
( puzzle - b - rrbv 978 ) throws
;; ArrayIndexOutOfBoundsException
(u/reset-optimizer-counts!)
(is (integer? (puzzle-b-rrbv 978)))
(u/print-optimizer-counts))
(deftest test-crrbv-30
(let [v1 [1 2 3]
tv1 (transient [1 2 3])
fv1 (fv/vector 1 2 3)
tfv1 (transient (fv/vector 1 2 3))]
(doseq [[v msg] [[v1 ""]
[fv1 ""]
[(map #(nth v1 %) [0 1 2]) "#(nth v1 %)"]
[(map #(v1 %) [0 1 2]) "#(v1 %)"]
[(map #(nth tv1 %) [0 1 2]) "#(nth tv1 %)"]
[(map #(tv1 %) [0 1 2]) "#(tv1 %)"]
[(map #(nth fv1 %) [0 1 2]) "#(nth fv1 %)"]
[(map #(fv1 %) [0 1 2]) "#(fv1 %)"]
[(map #(nth tfv1 %) [0 1 2]) "#(nth tfv1 %)"]
[(map #(tfv1 %) [0 1 2]) "#(tfv1 %)"]]]
(is (= '(1 2 3) v) (str "Failing case: " msg)))))
| null | https://raw.githubusercontent.com/clojure/core.rrb-vector/88c2f814b47c0bbc4092dad82be2ec783ed2961f/src/test/cljs/clojure/core/rrb_vector/test_common.cljs | clojure | The intent is to keep this file as close to
for this library, this file and that one can be replaced with a
This problem reproduction code is from CRRBV-12 ticket:
-12
I would prefer to have all of the data that is the value of
crrbv-12-data read from a separate file, but it is not terribly
long, and having it in the code avoids having to figure out how to
environments, for the ClojureScript version of the test.
while traversing the seq
This problem reproduction code is slightly modified from a version
-20
The previous test demonstrates a bug in the transient RRB vector
The following sequence of operations gave a different exception
different root cause with a distinct fix required. I do not
recall whether it was the same root cause as
testing goodness.
The transient version below gave a similar exception with
version of popTail, rather than the persistent version of
popTail that the one above does.
This code was copied from
mentioned in issue
-14
(println "round=" round "# elfs=" (count elfs))
(def custom-catvec-data (atom []))
ArrayIndexOutOfBoundsException | (ns clojure.core.rrb-vector.test-common
(:require [clojure.test :as test :refer [deftest testing is are]]
[clojure.core.reducers :as r]
[clojure.core.rrb-vector.test-utils :as u]
[clojure.core.rrb-vector :as fv]
[clojure.core.rrb-vector.debug :as dv]
[clojure.core.rrb-vector.debug-platform-dependent :as pd]))
src / test / clojure / clojure / core / rrb_vector / test_common.clj as
possible , so that when we start requiring Clojure 1.7.0 and later
common test file with the suffix .cljc
(dv/set-debug-opts! dv/full-debug-opts)
(deftest test-slicing
(testing "slicing"
(is (dv/check-subvec u/extra-checks?
32000 10 29999 1234 18048 10123 10191))))
(deftest test-splicing
(testing "splicing"
(is (dv/check-catvec u/extra-checks?
1025 1025 3245 1025 32768 1025 1025 10123 1025 1025))
(is (dv/check-catvec u/extra-checks?
10 40 40 40 40 40 40 40 40))
(is (apply dv/check-catvec u/extra-checks? (repeat 30 33)))
(is (dv/check-catvec u/extra-checks?
26091 31388 1098 43443 46195 4484 48099 7905
13615 601 13878 250 10611 9271 53170))
Order that catvec will perform splicev calls :
(let [my-splice (if u/extra-checks? dv/checking-splicev fv/catvec)
counts [26091 31388 1098 43443 46195 4484 48099 7905
13615 601 13878 250 10611 9271 53170]
prefix-sums (reductions + counts)
ranges (map range (cons 0 prefix-sums) prefix-sums)
[v01 v02 v03 v04 v05 v06 v07 v08
v09 v10 v11 v12 v13 v14 v15] (map fv/vec ranges)
top level call
top level call
top level call
recurse level 1 catvec call
recurse level 1 catvec call
recurse level 1 catvec call
recurse level 2 catvec call
recurse level 2 catvec call
recurse level 2 catvec call
recurse level 3 catvec call
recurse level 3 catvec call
recurse level 2 catvec call
recurse level 1 catvec call
top level call
exp-val (range (last prefix-sums))]
(is (= -1 (dv/first-diff v01-15 exp-val)))
(is (= -1 (dv/first-diff (into v01-04 v05-15) exp-val))))))
(deftest test-reduce
(let [v1 (vec (range 128))
v2 (fv/vec (range 128))]
(testing "reduce"
(is (= (reduce + v1) (reduce + v2))))
(testing "reduce-kv"
(is (= (reduce-kv + 0 v1) (reduce-kv + 0 v2))))))
(deftest test-reduce-2
(let [my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)
v1 (my-subvec (dv/cvec (range 1003)) 500)
v2 (dv/cvec (range 500 1003))]
(is (= (reduce + 0 v1)
(reduce + 0 v2)
(reduce + 0 (r/map identity (seq v1)))
(reduce + 0 (r/map identity (seq v2)))))))
(deftest test-reduce-3
(let [v0 (vec [])
rv0 (fv/vec [])]
(testing "reduce"
(is (= (reduce + v0) (reduce + rv0))))
(testing "reduce-kv"
(is (= (reduce-kv + 0 v0) (reduce-kv + 0 rv0))))))
(deftest test-seq
(let [v (fv/vec (range 128))
s (seq v)]
(testing "seq contents"
(is (= v s)))
(testing "chunked-seq?"
(is (chunked-seq? s)))
(testing "internal-reduce"
(is (satisfies? IReduce
s)))))
(deftest test-assoc
(let [my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(let [v1 (fv/vec (range 40000))
v2 (reduce (fn [out [k v]]
(assoc out k v))
(assoc v1 40000 :foo)
(map-indexed vector (rseq v1)))]
(is (= (concat (rseq v1) [:foo]) v2)))
(are [i] (= :foo
(-> (range 40000)
(fv/vec)
(my-subvec i)
(assoc 10 :foo)
(nth 10)))
1 32 1024 32768)))
(deftest test-assoc!
(let [my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(let [v1 (fv/vec (range 40000))
v2 (persistent!
(reduce (fn [out [k v]]
(assoc! out k v))
(assoc! (transient v1) 40000 :foo)
(map-indexed vector (rseq v1))))]
(is (= (concat (rseq v1) [:foo]) v2)))
(are [i] (= :foo
(-> (range 40000)
(fv/vec)
(my-subvec i)
(transient)
(assoc! 10 :foo)
(persistent!)
(nth 10)))
1 32 1024 32768)))
(deftest test-relaxed
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)]
(is (= (into (my-catvec (dv/cvec (range 123)) (dv/cvec (range 68)))
(range 64))
(concat (range 123) (range 68) (range 64))))
(is (= (dv/slow-into (my-catvec (dv/cvec (range 123)) (dv/cvec (range 68)))
(range 64))
(concat (range 123) (range 68) (range 64))))))
(deftest test-hasheq
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)]
CRRBV-25
(let [v1 (dv/cvec (range 1024))
v2 (dv/cvec (range 1024))
v3 (my-catvec (dv/cvec (range 512)) (dv/cvec (range 512 1024)))
s1 (seq v1)
s2 (seq v2)
s3 (seq v3)]
(is (= (hash v1) (hash v2) (hash v3) (hash s1) (hash s2) (hash s3)))
(is (= (hash (nthnext s1 120))
(hash (nthnext s2 120))
(hash (nthnext s3 120)))))))
(deftest test-reduce-subvec-catvec
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(letfn [(insert-by-sub-catvec [v n]
(my-catvec (my-subvec v 0 n) (dv/cvec ['x])
(my-subvec v n)))
(repeated-subvec-catvec [i]
(reduce insert-by-sub-catvec (dv/cvec (range i)) (range i 0 -1)))]
(is (= (repeated-subvec-catvec 2371)
(interleave (range 2371) (repeat 'x)))))))
(def pos-infinity ##Inf)
(deftest test-reduce-subvec-catvec2
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(letfn [(insert-by-sub-catvec [v n]
(my-catvec (my-subvec v 0 n) (dv/cvec ['x])
(my-subvec v n)))
(repeated-subvec-catvec [i]
(reduce insert-by-sub-catvec
(dv/cvec (range i))
(take i (interleave (range (quot i 2) pos-infinity)
(range (quot i 2) pos-infinity)))))]
(let [n 2371
v (repeated-subvec-catvec n)]
(is (every? #(or (integer? %) (= 'x %)) v))
(is (= (count v) (* 2 n)))))))
(deftest test-splice-high-subtree-branch-count
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)
x (fv/vec (repeat 1145 \a))
y (my-catvec (my-subvec x 0 778)
(my-subvec x 778 779)
(dv/cvec [1])
(my-subvec x 779))
z (my-catvec (my-subvec y 0 780)
(dv/cvec [2])
(my-subvec y 780 781)
(my-subvec y 781))
res (my-catvec (my-subvec z 0 780)
(dv/cvec [])
(dv/cvec [3])
(my-subvec z 781))
expected (concat (repeat 779 \a) [1] [3] (repeat 366 \a))]
(is (= res expected))))
find and read the file on N different JavaScript runtime
(def crrbv-12-data
[7912 7831 5393 5795 6588 2394 6403 6237 6152 5890 6507 6388 6100
7400 6340 7624 6379 5430 6335 5883 5570 6220 6319 6442 5666 3901
6974 5440 6626 7782 6760 6066 7763 9547 5585 6724 5407 5675 7727
7666 6845 6658 5409 7304 7291 5826 6523 5529 7387 6275 7193 5563
6572 7150 2949 1133 7312 7267 7135 7787 5812 7372 4295 5937 2931
4846 6149 1901 6680 7319 7845 7517 6722 6535 6362 5457 6649 7757
7463 6755 7436 6364 7361 7174 6048 6657 6533 5763 6074 6744 6734
5668 61 3842 5395 6489 1723 6248 7664 6645 5943 5428 6995 6688 7088
6305 6198 6197 5765 3691 7157 7305 7631 6058 6655 7846 7746 686 6024
6473 6150 5951 1761 7900 7084 5637 6607 5561 5772 7232 8512 6249
7377 5437 4830 6939 6355 7100 7884 951 6765 7054 1367 4580 7284 5414
7344 7525 5801 6374 6685 6737 4413 7353 1851 5973 7538 7116 6359
6605 6743 6153 7398 4757 6623 7546 7013 7091 7501 5749 6368 7911
6675 3246 6304 6469 6868 7701 5768 6369 6996 6346 6171 5884 6757
7615 5986 9904 5982 7049 6011 7716 6646 6178 6636 6637 7700 3390
6107 6938 2513 5663 5309 5673 7069 6615 5825 7183 5600 2188 5807
7635 7257 4803 6740 5865 6869 6968 7404 5124 7565 6169 7681 6181
5427 9861 7669 5936 5588 5463 6059 5695 5784 6768 6922 5720 6229
9173 6486 6399 6013 5517 7198 7320 6970 5969 7593 7351 7622 6561
5739 6433 6452 6320 6979 6260 6763 5539 6292 7133 6571 6108 7455
8470 7148 7597 6935 6865 7852 6549 6506 5425 6552 5551 5612 7230 809
2694 6408 6783 7626 6703 2754 1015 6809 7584 5473 6165 7105 6447
5856 6739 5564 7886 7856 7355 5814 919 6900 6257 118 7259 7419 6278
7619 6401 5970 7537 2899 6012 7190 5500 6122 5817 7620 6402 5811
5412 6822 5643 6138 5948 5523 4884 6460 5828 7159 5405 6224 7192
8669 5827 538 7416 6598 5577 6769 7547 7323 6748 6398 1505 6211 6466
6699 6207 6444 6863 7646 5917 6796 5619 6282 354 6418 5687 2536 6238
1166 6376 3852 5955 188 7218 7477 6926 7694 7253 5880 5424 7392 6337
7438 7814 3205 6336 6465 6812 1102 6468 6034 6133 5849 7578 7863
5761 6372 7568 5813 6380 6481 6942 7676 5552 7015 7120 7838 5684
6101 6834 6092 7917 6124 867 7187 5527 7488 5900 6267 6443 724 6073
6608 6407 6040 5540 6061 5554 5469 6255 6542 7336 2272 6921 1078
5593 7045 5013 6870 6712 6537 6785 6333 5892 6633 7522 6697 5915
5567 6606 5820 7653 7554 6932 5824 9330 8780 7203 7204 7519 7633
6529 7564 5718 7605 6579 7621 4462 6009 6950 6430 5911 5946 6877
7830 6570 7421 6449 6684 8425 5983 5846 5505 6097 5773 5781 6463
6867 5774 6601 1577 5642 6959 6251 7741 7391 6036 6892 5097 6874
6580 6348 5904 6709 5976 7411 7223 6252 7414 6813 4378 5888 5546
6385 401 5912 7828 7775 5925 6151 7648 5810 7673 6250 5808 7251 1407
5644 7439 7901 1964 6631 6858 7630 7771 2892 946 6397 5443 5715 5665
7306 6233 5566 5447 7011 6314 2054 5786 2170 6901 6077 6239 7791
6960 7891 7878 7758 5829 7611 7059 5455 6654 6459 6949 7406 7854
5805 6564 7033 6445 5939 6706 6103 7614 7902 6527 7479 6196 6484
3521 7269 6055 7331 6184 6746 6936 5891 6687 5771 7136 6625 7865
5864 6704 7726 5842 6295 6910 5277 7528 5689 5674 7457 7086 5220 317
7720 6720 5913 7098 5450 7275 7521 7826 7007 6378 7277 6844 7177
5482 97 6730 7861 5601 6000 6039 6953 5624 6450 6736 7492 5499 5822
7276 2889 7102 6648 6291 865 7348 7330 1449 6719 5550 7326 6338 6714
7805 7082 6377 2791 7876 5870 7107 7505 5416 7057 6021 7037 6331
5698 6721 5180 7390 5938 9067 7215 4566 8051 6557 6161 5894 1379
7335 2602 6520 7199 6878 6366 6948 7202 4791 7338 7442 5987 7099
7632 5453 4755 4947 7786 6254 7103 7595 6670 6485 6117 6756 6339
7240 7609 6853 6299 7205 4857 7511 576 5835 5396 5997 5508 6413 6219
5403 7686 9189 6634 5503 6801 7508 5611 7667 7572 7587 6015 7153
7340 6279 5646 2004 2708 7119 5737 3258 7427 6204 6476 6511 2300
7055 5389 6984 5438 6002 6272 5756 5734 6913 6425 6847 5657 6357
6862 6030 5522 6943 3518 6139 6671 7764 6493 5691 6082 4635 6640
6898 7262 9391 6828 2277 6690 6464 5759 7441 6622 1262 7114 6294
7070 6539 6788 6167 7824 6382 2512 7322 5992 7696 5445 5538 6140
7151 6409 7085 6166 6263 1194 5544 7141 5906 2939 7389 7290 6491
6322 8324 7341 7246 5610 7536 6946 7540 7760 6293 5589 7009 7822
5456 6805 5841 7722 5559 7265 6903 3517 1243 6078 7180 6147 8063
7395 7551 5460 6421 7567 6546 6941 6301 5486 7347 6479 5990 5932
6881 7737 6051 7375 5762 6897 2967 7297 7263 6965 6752 6158 7556
6794 7641 7628 2374 6289 7286 7581 6008 491 6919 9157 7002 6585 7960
6967 7692 7128 5680 5037 5752 6223 5989 7545 6584 7282 6221 871 6116
5484 6350 6266 6889 6216 1892 924 5875 7658 5461 5410 8352 7072 5724
6931 6050 6125 5519 6711 7518 6613 7576 7989 5603 7214 6664 2933
5839 7454 9353 6512 7242 7768 6037 6567 6673 8438 7364 5406 6080 577
6895 5742 5722 6944 6273 5965 5464 6876 7719 7311 7258 6829 7280
6028 5740 9162 9858 6695 7239 6972 7025 7147 7039 6226 6135 7219
6477 6708 767 5432 7405 7580 3790 372 7523 6597 5922 6105 5434 9587
6173 7739 5984 5854 2153 6912 7476 7598 5985 5874 8723 5628 5496
7352 4829 6483 7211 6933 5545 7544 5444 5790 8223 1089 6676 5667
6749 6777 5429 6347 5399 5662 6446 5524 6909 5415 7742 6343 5921
7160 7175 7026 1838 6894 4355 52 6192 5341 6945 7366 7816 2006 7380
6531 6904 5958 6270 6069 5574 7349 7212 5256 6010 6961 2825 6691
7792 6017 6888 7707 6693 6456 5871 7238 7780 7256 5630 7744 6855
5077 6958 6046 6707 6530 6501 7298 5636 6121 1105 6243 5541 6814
6732 7500 6866 7093 7745 7030 4338 6517 5991 6458 6213 4695 5542
7853 5926 6550 5230 7432 7006 5858 7677 6495 7310 6432 7487 7670
7674 6245 7315 7893 4360 940 6303 5757 7697 7506 5491 1309 7695 2214
5553 6964 7403 7302 6589 7851 7186 6193 2964 6242 6545 7012 7010
5448 5767 6647 7610 7485 6509 6083 6525 5607 9982 6244 7832 7213
6308 1320 7092 5656 6342 7864 7140 2577 104 1343 6786 7654 6156 5584
6818 5604 6681 6038 6056 6594 6603 7040 5468 5957 7229 6735 5510
6700 7725 7431 7154 7682 6558 7158 7470 7749 5400 5397 7247 6582
5832 7041 7325 5777 6759 6577 6195 7895 9626 7042 6026 6741 7811
7942 8926 1499 6772 7561 5565 3587 7273 6172 7428 6787 7181 5754
7579 5535 5543 5818 7264 1854 6998 7425 5394 6661 6562 375 2990])
(defn quicksort [my-catvec v]
(if (<= (count v) 1)
v
(let [[x & xs] v]
(my-catvec (quicksort my-catvec (dv/cvec (filter #(<= % x) xs)))
(dv/cvec [x])
(quicksort my-catvec (dv/cvec (filter #(> % x) xs)))))))
(defn ascending? [coll]
(every? (fn [[a b]] (<= a b))
(partition 2 1 coll)))
(deftest test-crrbv-12
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
v (dv/cvec crrbv-12-data)]
(testing "Ascending order after quicksort"
(is (ascending? (quicksort my-catvec v))))
(testing "Repeated catvec followed by pop"
(is (= [] (nth (iterate pop
(nth (iterate #(my-catvec (dv/cvec [0]) %) [])
963))
963))))))
(defn npe-for-1025-then-pop! [kind]
(let [my-pop! (if u/extra-checks? dv/checking-pop! pop!)
bfactor-squared (* 32 32)
mk-vector (case kind
:object-array fv/vector)
boundary 54
v1 (-> (mk-vector)
(into (range boundary))
(into (range boundary (inc bfactor-squared))))
v2 (-> (mk-vector)
(into (range bfactor-squared))
(transient)
(my-pop!)
(persistent!))
v3 (-> (mk-vector)
(into (range boundary))
(into (range boundary (inc bfactor-squared)))
(transient)
(my-pop!)
(persistent!))
v4 (-> (mk-vector)
(into (range (inc bfactor-squared)))
(transient)
(my-pop!)
(persistent!))]
(is (= (seq v1) (range (inc bfactor-squared))))
(is (= (seq v2) (range (dec bfactor-squared))))
This used to fail with core.rrb - vector version 0.0.14 with
NullPointerException while traversing the seq on clj . It gets
a different kind of error with cljs .
(is (= (seq v3) (range bfactor-squared)))
This one caused a NullPointerException with version 0.0.14
(is (= (seq v4) (range bfactor-squared)))))
(deftest test-npe-for-1025-then-pop!
(doseq [kind [:object-array]]
(npe-for-1025-then-pop! kind)))
provided in a comment by on 2018 - Dec-09 for this issue :
(defn play [my-vector my-catvec my-subvec players rounds]
(letfn [(swap [marbles split-ndx]
(my-catvec
(my-subvec marbles split-ndx)
(my-subvec marbles 0 split-ndx)))
(rotl [marbles n]
(swap marbles (mod n (count marbles))))
(rotr [marbles n]
(swap marbles (mod (- (count marbles) n) (count marbles))))
(place-marble
[marbles marble]
(let [marbles (rotl marbles 2)]
[(my-catvec (my-vector marble) marbles) 0]))
(remove-marble [marbles marble]
(let [marbles (rotr marbles 7)
first-marble (nth marbles 0)]
[(my-subvec marbles 1) (+ marble first-marble)]))
(play-round [marbles round]
(if (zero? (mod round 23))
(remove-marble marbles round)
(place-marble marbles round)))
(add-score [scores player round-score]
(if (zero? round-score)
scores
(assoc scores player (+ (get scores player 0) round-score))))]
(loop [marbles (my-vector 0)
round 1
player 1
scores {}
ret []]
(let [[marbles round-score] (play-round marbles round)
scores (add-score scores player round-score)]
(if (> round rounds)
(conj ret {:round round :marbles marbles})
(recur marbles
(inc round)
(if (= player players) 1 (inc player))
scores
(conj ret {:round round :marbles marbles})))))))
(defn play-core [& args]
(apply play clojure.core/vector clojure.core/into clojure.core/subvec args))
(defn play-rrbv [& args]
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(apply play fv/vector my-catvec my-subvec args)))
(deftest test-crrbv-20
This one passes
(is (= (play-core 10 1128)
(play-rrbv 10 1128)))
This ends up with ( play - rrbv 10 1129 ) throwing an exception , with
core.rrb - vector version 0.0.14
(is (= (play-core 10 1129)
(play-rrbv 10 1129)))
implementation . The one below demonstrated a similar bug in the
persistent RRB vector implementation in version 0.0.14 .
(let [v1128 (:marbles (last (play-rrbv 10 1128)))
v1129-pre (-> v1128
(fv/subvec 2)
(conj 2001))]
(is (every? integer? (conj v1129-pre 2002)))))
(deftest test-crrbv-21
than the above with core.rrb - vector version 0.0.14 , and was a
npe - for-1025 - then - pop ! but both test cases are included for extra
(let [v1128 (:marbles (last (play-rrbv 10 1128)))
vpop1 (reduce (fn [v i] (pop v))
v1128 (range 1026))]
(is (every? integer? (pop vpop1)))
version 0.0.14 , but the call stack went through the transient
(is (every? integer? (persistent! (pop! (transient vpop1)))))))
(deftest test-crrbv-22
(testing "pop! from a regular transient vector with 32*32+1 elements"
(let [v1025 (into (fv/vector) (range 1025))]
(is (= (persistent! (pop! (transient v1025)))
(range 1024)))))
(testing "pop from a persistent regular vector with 32*32+1 elements"
(let [v1025 (into (fv/vector) (range 1025))]
(is (= (pop v1025)
(range 1024))))))
(defn puzzle-b [n my-vec my-catvec my-subvec]
(letfn [(remove-at [arr idx]
(my-catvec (my-subvec arr 0 idx) (my-subvec arr (inc idx))))
(create-arr [size]
(my-vec (range 1 (inc size))))
(fv-rest [arr]
(my-subvec arr 1))
(calculate-opposite [n]
(int (/ n 2)))
(move [elfs]
(let [lc (count elfs)]
(if (= 1 lc)
{:ok (first elfs)}
(let [current (first elfs)
opposite-pos (calculate-opposite lc)
_ (assert (> opposite-pos 0))
_ (assert (< opposite-pos lc))
opposite-elf (nth elfs opposite-pos)
other2 (fv-rest (remove-at elfs opposite-pos))]
(my-catvec other2 (dv/cvec [current]))))))
(puzzle-b-sample [elfs round]
(let [elfs2 (move elfs)]
(if (:ok elfs2)
(:ok elfs2)
(recur elfs2 (inc round)))))]
(puzzle-b-sample (create-arr n) 1)))
(defn puzzle-b-core [n]
(puzzle-b n clojure.core/vec clojure.core/into clojure.core/subvec))
(defn get-shift [v]
(.-shift v))
(defn vstats [v]
(str "cnt=" (count v)
" shift=" (get-shift v)
" %=" (pd/format "%5.1f" (* 100.0 (dv/fraction-full v)))))
(defn custom-catvec [& args]
(let [my-catvec (if u/extra-checks? dv/checking-catvec fv/catvec)
n ( count @custom - catvec - data )
max-arg-shift (apply max (map get-shift args))
ret (apply my-catvec args)
ret-shift (get-shift ret)]
(when (or (>= ret-shift 30)
(> ret-shift max-arg-shift))
(doall (map-indexed
(fn [idx v]
(println (str "custom-catvec ENTER v" idx " " (vstats v))))
args))
(println (str "custom-catvec LEAVE ret " (vstats ret))))
( swap ! custom - catvec - data conj { : args args : ret ret } )
( println " custom - catvec RECRD in index " n " of @custom - catvec - data " )
ret))
(defn puzzle-b-rrbv [n]
(let [my-subvec (if u/extra-checks? dv/checking-subvec fv/subvec)]
(puzzle-b n fv/vec custom-catvec my-subvec)))
(deftest test-crrbv-14
This one passes
(u/reset-optimizer-counts!)
(is (= (puzzle-b-core 977)
(puzzle-b-rrbv 977)))
(u/print-optimizer-counts)
( puzzle - b - rrbv 978 ) throws
(u/reset-optimizer-counts!)
(is (integer? (puzzle-b-rrbv 978)))
(u/print-optimizer-counts))
(deftest test-crrbv-30
(let [v1 [1 2 3]
tv1 (transient [1 2 3])
fv1 (fv/vector 1 2 3)
tfv1 (transient (fv/vector 1 2 3))]
(doseq [[v msg] [[v1 ""]
[fv1 ""]
[(map #(nth v1 %) [0 1 2]) "#(nth v1 %)"]
[(map #(v1 %) [0 1 2]) "#(v1 %)"]
[(map #(nth tv1 %) [0 1 2]) "#(nth tv1 %)"]
[(map #(tv1 %) [0 1 2]) "#(tv1 %)"]
[(map #(nth fv1 %) [0 1 2]) "#(nth fv1 %)"]
[(map #(fv1 %) [0 1 2]) "#(fv1 %)"]
[(map #(nth tfv1 %) [0 1 2]) "#(nth tfv1 %)"]
[(map #(tfv1 %) [0 1 2]) "#(tfv1 %)"]]]
(is (= '(1 2 3) v) (str "Failing case: " msg)))))
|
0088d9f9e224aab959738e0893367f201c29b080e13ee2e7f02f76a84acecca2 | reflectionalist/S9fES | search-path.scm | Scheme 9 from Empty Space , Function Library
By , 2010
; Placed in the Public Domain
;
( search - path ) = = > string | # f
;
Search the Unix search path STRING2 for the executable STRING1 . Return
the full path of the first executable found or # F if not executable
named STRING1 can be found in the given path .
; STRING2 is a colon-separated list of paths, e.g.:
;
; "/bin:/usr/bin:/usr/local/bin"
;
SEARCH - PATH uses ACCESS with mode ACCESS - X - OK to check whether a
; file is executable.
;
; (Example): (search-path "vi" "/bin:/usr/bin") ==> "/usr/bin/vi"
(require-extension sys-unix)
(load-from-library "string-split.scm")
(define (search-path file path)
(let loop ((path (string-split #\: path)))
(cond ((null? path)
#f)
((let ((loc (string-append (car path) "/" file)))
(and (sys:access loc sys:access-x-ok)
loc))
=> (lambda (x) x))
(else
(loop (cdr path))))))
| null | https://raw.githubusercontent.com/reflectionalist/S9fES/0ade11593cf35f112e197026886fc819042058dd/ext/search-path.scm | scheme | Placed in the Public Domain
STRING2 is a colon-separated list of paths, e.g.:
"/bin:/usr/bin:/usr/local/bin"
file is executable.
(Example): (search-path "vi" "/bin:/usr/bin") ==> "/usr/bin/vi" | Scheme 9 from Empty Space , Function Library
By , 2010
( search - path ) = = > string | # f
Search the Unix search path STRING2 for the executable STRING1 . Return
the full path of the first executable found or # F if not executable
named STRING1 can be found in the given path .
SEARCH - PATH uses ACCESS with mode ACCESS - X - OK to check whether a
(require-extension sys-unix)
(load-from-library "string-split.scm")
(define (search-path file path)
(let loop ((path (string-split #\: path)))
(cond ((null? path)
#f)
((let ((loc (string-append (car path) "/" file)))
(and (sys:access loc sys:access-x-ok)
loc))
=> (lambda (x) x))
(else
(loop (cdr path))))))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.