_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
309dac7745e9fb48b781aae2bba8dc0e0654fe85608e5b078633fa6362850b37 | nandor/llir-ocaml | t06ok.ml | (* TEST
flags = " -w a "
* setup-ocamlc.byte-build-env
** ocamlc.byte
*** check-ocamlc.byte-output
*)
(* OK (t = <m:t>) *)
module rec A : sig type t = <m:B.t> end = struct type t = <m:B.t> end
and B : sig type t = A.t end = struct type t = A.t end;;
| null | https://raw.githubusercontent.com/nandor/llir-ocaml/9c019f15c444e30c825b1673cbe827e0497868fe/testsuite/tests/typing-recmod/t06ok.ml | ocaml | TEST
flags = " -w a "
* setup-ocamlc.byte-build-env
** ocamlc.byte
*** check-ocamlc.byte-output
OK (t = <m:t>) |
module rec A : sig type t = <m:B.t> end = struct type t = <m:B.t> end
and B : sig type t = A.t end = struct type t = A.t end;;
|
6bf2a45a8a87920a6b921fb3b943b9f620efdf7b09d0e85e6161132fa7c702d1 | RobYang1024/OCaMOSS | main1.ml | open Ast
(* [is_value e] is whether [e] is a syntactic value *)
let is_value : expr -> bool = function
| I _ | B _ -> true
| Bop _ | Var _ | Let _ | If _ -> false
(* [subst e v x] is e{v/x}, that is, [e] with [v]
* substituted for [x]. *)
let rec subst e v x = match e with
| Var y -> if x=y then v else e
| B b -> B b (* NEW *)
| I n -> I n
| Bop(op,el,er) -> Bop(op, subst el v x, subst er v x) (* NEW *)
| Let(y,ebind,ebody) ->
let ebind' = subst ebind v x in
if x=y
then Let(y, ebind', ebody)
else Let(y, ebind', subst ebody v x)
| If(eguard,ethen,eelse) -> (* NEW *)
If(subst eguard v x, subst ethen v x, subst eelse v x)
(* A single step of evaluation. *)
let rec step = function
| I _ | B _ -> failwith "Does not step"
| Var _ -> failwith "Unbound variable"
| Bop(Plus, I n1, I n2) -> I (n1+n2) (* NEW *)
| Bop(Mult, I n1, I n2) -> I (n1*n2) (* NEW *)
| Bop(Leq, I n1, I n2) -> B (n1<=n2) (* NEW *)
| Bop(op, I n1, e2) -> Bop(op, I n1, step e2) (* NEW *)
| Bop(op, e1, e2) -> Bop(op, step e1, e2)
| Let(x,I n,e2) -> subst e2 (I n) x
| Let(x,B b,e2) -> subst e2 (B b) x
| Let(x,e1,e2) -> Let(x,step e1, e2)
| If(B true, e2, _) -> e2
| If(B false, _, e3) -> e3
| If(e1,e2,e3) -> If(step e1, e2, e3)
(* [eval e] is the [e -->* v] judgement. That is,
* keep applying [step] until a value is produced. *)
let rec eval : expr -> expr = fun e ->
if is_value e then e
else eval (step e)
Parse a string Io an ast
let parse s =
let lexbuf = Lexing.from_string s in
let ast = Parser.prog Lexer.read lexbuf in
ast
(* Extract a value from an ast node.
Raises Failure if the argument is a node containing a value. *)
let extract_value = function
| I i -> VI i
| B b -> VB b (* NEW *)
| _ -> failwith "Not a value"
(* Interpret an expression *)
let interp e =
e |> parse |> eval |> extract_value
| null | https://raw.githubusercontent.com/RobYang1024/OCaMOSS/090d31e8c4498ac3b9b74768fe011b924b2105ab/tests/test2/main1.ml | ocaml | [is_value e] is whether [e] is a syntactic value
[subst e v x] is e{v/x}, that is, [e] with [v]
* substituted for [x].
NEW
NEW
NEW
A single step of evaluation.
NEW
NEW
NEW
NEW
[eval e] is the [e -->* v] judgement. That is,
* keep applying [step] until a value is produced.
Extract a value from an ast node.
Raises Failure if the argument is a node containing a value.
NEW
Interpret an expression | open Ast
let is_value : expr -> bool = function
| I _ | B _ -> true
| Bop _ | Var _ | Let _ | If _ -> false
let rec subst e v x = match e with
| Var y -> if x=y then v else e
| I n -> I n
| Let(y,ebind,ebody) ->
let ebind' = subst ebind v x in
if x=y
then Let(y, ebind', ebody)
else Let(y, ebind', subst ebody v x)
If(subst eguard v x, subst ethen v x, subst eelse v x)
let rec step = function
| I _ | B _ -> failwith "Does not step"
| Var _ -> failwith "Unbound variable"
| Bop(op, e1, e2) -> Bop(op, step e1, e2)
| Let(x,I n,e2) -> subst e2 (I n) x
| Let(x,B b,e2) -> subst e2 (B b) x
| Let(x,e1,e2) -> Let(x,step e1, e2)
| If(B true, e2, _) -> e2
| If(B false, _, e3) -> e3
| If(e1,e2,e3) -> If(step e1, e2, e3)
let rec eval : expr -> expr = fun e ->
if is_value e then e
else eval (step e)
Parse a string Io an ast
let parse s =
let lexbuf = Lexing.from_string s in
let ast = Parser.prog Lexer.read lexbuf in
ast
let extract_value = function
| I i -> VI i
| _ -> failwith "Not a value"
let interp e =
e |> parse |> eval |> extract_value
|
b677177a27eac071649245b15add5d2cbb6420451bdf59d358f547db26f78c2b | lingnand/VIMonad | Square.hs | # LANGUAGE FlexibleInstances , MultiParamTypeClasses #
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Square
Copyright : ( c ) < >
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : none
-- Stability : unstable
-- Portability : unportable
--
-- A layout that splits the screen into a square area and the rest of the
-- screen.
-- This is probably only ever useful in combination with
" XMonad . Layout . Combo " .
It sticks one window in a square region , and makes the rest
-- of the windows live with what's left (in a full-screen sense).
--
-----------------------------------------------------------------------------
module XMonad.Layout.Square (
-- * Usage
-- $usage
Square(..) ) where
import XMonad
import XMonad.StackSet ( integrate )
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@ file:
--
> import XMonad . Layout . Square
--
An example layout using square together with " XMonad . Layout . Combo "
-- to make the very last area square:
--
> , combo ( combo ( mirror $ twoPane 0.03 0.85),1 ) ] ( twoPane 0.03 0.5 ) )
> [ ( twoPane 0.03 0.2,1),(combo [ ( twoPane 0.03 0.8,1),(square,1 ) ]
-- > [(tabbed,3),(tabbed,30),(tabbed,1),(tabbed,1)]
-- For detailed instructions on editing your key bindings, see
" XMonad . Doc . Extending#Editing_key_bindings " .
data Square a = Square deriving ( Read, Show )
instance LayoutClass Square a where
pureLayout Square r s = arrange (integrate s)
where arrange ws@(_:_) = map (\w->(w,rest)) (init ws) ++ [(last ws,sq)]
arrange [] = [] -- actually, this is an impossible case
(rest, sq) = splitSquare r
splitSquare :: Rectangle -> (Rectangle, Rectangle)
splitSquare (Rectangle x y w h)
| w > h = (Rectangle x y (w - h) h, Rectangle (x+fromIntegral (w-h)) y h h)
| otherwise = (Rectangle x y w (h-w), Rectangle x (y+fromIntegral (h-w)) w w)
| null | https://raw.githubusercontent.com/lingnand/VIMonad/048e419fc4ef57a5235dbaeef8890faf6956b574/XMonadContrib/XMonad/Layout/Square.hs | haskell | ---------------------------------------------------------------------------
|
Module : XMonad.Layout.Square
License : BSD3-style (see LICENSE)
Maintainer : none
Stability : unstable
Portability : unportable
A layout that splits the screen into a square area and the rest of the
screen.
This is probably only ever useful in combination with
of the windows live with what's left (in a full-screen sense).
---------------------------------------------------------------------------
* Usage
$usage
$usage
You can use this module with the following in your @~\/.xmonad\/xmonad.hs@ file:
to make the very last area square:
> [(tabbed,3),(tabbed,30),(tabbed,1),(tabbed,1)]
For detailed instructions on editing your key bindings, see
actually, this is an impossible case | # LANGUAGE FlexibleInstances , MultiParamTypeClasses #
Copyright : ( c ) < >
" XMonad . Layout . Combo " .
It sticks one window in a square region , and makes the rest
module XMonad.Layout.Square (
Square(..) ) where
import XMonad
import XMonad.StackSet ( integrate )
> import XMonad . Layout . Square
An example layout using square together with " XMonad . Layout . Combo "
> , combo ( combo ( mirror $ twoPane 0.03 0.85),1 ) ] ( twoPane 0.03 0.5 ) )
> [ ( twoPane 0.03 0.2,1),(combo [ ( twoPane 0.03 0.8,1),(square,1 ) ]
" XMonad . Doc . Extending#Editing_key_bindings " .
data Square a = Square deriving ( Read, Show )
instance LayoutClass Square a where
pureLayout Square r s = arrange (integrate s)
where arrange ws@(_:_) = map (\w->(w,rest)) (init ws) ++ [(last ws,sq)]
(rest, sq) = splitSquare r
splitSquare :: Rectangle -> (Rectangle, Rectangle)
splitSquare (Rectangle x y w h)
| w > h = (Rectangle x y (w - h) h, Rectangle (x+fromIntegral (w-h)) y h h)
| otherwise = (Rectangle x y w (h-w), Rectangle x (y+fromIntegral (h-w)) w w)
|
eed4a808a6ae6de5b590c4823f0dca3fcb37d38940e2383694d390e3f54e831e | c4-project/c4f | abstract_prim.ml | This file is part of c4f .
Copyright ( c ) 2018 - 2022 C4 Project
c4 t itself is licensed under the MIT License . See the LICENSE file in the
project root for more information .
Parts of c4 t are based on code from the Herdtools7 project
( ) : see the LICENSE.herd file in the
project root for more information .
Copyright (c) 2018-2022 C4 Project
c4t itself is licensed under the MIT License. See the LICENSE file in the
project root for more information.
Parts of c4t are based on code from the Herdtools7 project
() : see the LICENSE.herd file in the
project root for more information. *)
open Core (* for Fqueue *)
open Import
let constant : Ast_basic.Constant.t -> Fir.Constant.t Or_error.t = function
| Integer k -> Ok (Fir.Constant.int k)
| Char _ | Float _ -> Or_error.error_string "Unsupported constant type"
let defined_types : (Common.C_id.t, Fir.Type.Basic.t) List.Assoc.t Lazy.t =
lazy
Fir.Type.Basic.
[ (Common.C_id.of_string "atomic_bool", bool ~is_atomic:true ())
; (Common.C_id.of_string "atomic_int", int ~is_atomic:true ())
; (Common.C_id.of_string "bool", bool ()) ]
let defined_type_to_basic (t : Common.C_id.t) : Fir.Type.Basic.t Or_error.t =
t
|> List.Assoc.find ~equal:Common.C_id.equal (Lazy.force defined_types)
|> Result.of_option
~error:
(Error.create_s
[%message "Unknown defined type" ~got:(t : Common.C_id.t)] )
let partition_qualifiers :
[> Ast.Decl_spec.t] list
-> [> Ast.Type_spec.t] list
* [> Ast_basic.Storage_class_spec.t | Ast_basic.Type_qual.t] list =
List.partition_map ~f:(function
| #Ast.Type_spec.t as ts -> First ts
| #Ast_basic.Storage_class_spec.t as ss -> Second ss
| #Ast_basic.Type_qual.t as qs -> Second qs )
let type_specs_to_basic (specs : [> Ast.Type_spec.t] list) :
Fir.Type.Basic.t Or_error.t =
Or_error.Let_syntax.(
match%bind Tx.List.one specs with
| `Int -> return (Fir.Type.Basic.int ())
| `Defined_type t -> defined_type_to_basic t
| #Ast.Type_spec.t as spec ->
Or_error.error_s
[%message
"This type isn't supported (yet)" ~got:(spec : Ast.Type_spec.t)])
let qualifier_to_flags :
[> Ast_basic.Storage_class_spec.t | Ast_basic.Type_qual.t]
-> bool Or_error.t = function
| `Volatile -> Ok true
| #Ast_basic.Type_qual.t as qual ->
Or_error.error_s
[%message
"This type qualifier isn't supported (yet)"
~got:(qual : Ast_basic.Type_qual.t)]
| #Ast_basic.Storage_class_spec.t as spec ->
Or_error.error_s
[%message
"This storage-class specifier isn't supported (yet)"
~got:(spec : Ast_basic.Storage_class_spec.t)]
let qualifiers_to_flags
(quals : [> Ast_basic.Storage_class_spec.t | Ast_basic.Type_qual.t] list)
: bool Or_error.t =
Or_error.Let_syntax.(
TODO(@MattWindsor91 ): other qualifiers ?
let%map vs = Tx.Or_error.combine_map quals ~f:qualifier_to_flags in
List.exists vs ~f:Fn.id)
let qualifiers_to_type (quals : [> Ast.Decl_spec.t] list) ~(is_pointer : bool)
: Fir.Type.t Or_error.t =
let tspecs, rquals = partition_qualifiers quals in
Or_error.Let_syntax.(
let%map basic = type_specs_to_basic tspecs
and is_volatile = qualifiers_to_flags rquals in
Fir.Type.make basic ~is_pointer ~is_volatile)
let declarator_to_id :
Ast.Declarator.t -> (C4f_common.C_id.t * bool) Or_error.t = function
| {pointer= Some [[]]; direct= Id id} -> Ok (id, true)
| {pointer= Some _; _} as decl ->
Or_error.error_s
[%message
"Complex pointers not supported yet"
~declarator:(decl : Ast.Declarator.t)]
| {pointer= None; direct= Id id} -> Ok (id, false)
| x ->
Or_error.error_s
[%message
"Unsupported direct declarator"
~got:(x.direct : Ast.Direct_declarator.t)]
let identifier_to_constant (id : Common.C_id.t) : Fir.Constant.t option =
match Common.C_id.to_string id with
| "true" -> Some Fir.Constant.truth
| "false" -> Some Fir.Constant.falsehood
| _ -> None
let not_constant (x : Ast.Expr.t) : Fir.Constant.t Or_error.t =
Or_error.error_s
[%message "Expression not supported (must be constant)" (x : Ast.Expr.t)]
let value_of_initialiser : Ast.Initialiser.t -> Fir.Constant.t Or_error.t =
function
| Assign (Constant v) ->
TODO(@MattWindsor91 ): Boolean initialisers are n't covered by this
case , as C99 Boolean ' constant 's are identifiers .
case, as C99 Boolean 'constant's are identifiers. *)
constant v
| Assign (Identifier k) -> (
match identifier_to_constant k with
| Some k -> Ok k
| None -> not_constant (Identifier k) )
| Assign x -> not_constant x
| List _ -> Or_error.error_string "List initialisers not supported"
let decl (d : Ast.Decl.t) : Fir.Initialiser.t Common.C_named.t Or_error.t =
Or_error.Let_syntax.(
let%bind {declarator; initialiser} = Tx.List.one d.declarator in
let%bind name, is_pointer = declarator_to_id declarator in
let%bind ty = qualifiers_to_type d.qualifiers ~is_pointer in
Ideally , we 'd forbid empty initialisers entirely here , but
occasionally outputs them ( eg for compare - exchanges ) . This is a
compromise that slightly changes semantics .
occasionally outputs them (eg for compare-exchanges). This is a
compromise that slightly changes semantics. *)
let%map value =
Option.value_map initialiser ~f:value_of_initialiser
~default:(Ok (Fir.Constant.zero_of_type ty))
in
Common.C_named.make ~name {Fir.Initialiser.ty; value})
let param_decl : Ast.Param_decl.t -> Fir.Type.t Common.C_named.t Or_error.t =
function
| {declarator= `Abstract _; _} ->
Or_error.error_string "Abstract parameter declarators not supported"
| {qualifiers; declarator= `Concrete declarator} ->
Or_error.Let_syntax.(
let%bind name, is_pointer = declarator_to_id declarator in
let%map ty = qualifiers_to_type qualifiers ~is_pointer in
Common.C_named.make ty ~name)
let rec expr_to_lvalue : Ast.Expr.t -> Fir.Lvalue.t Or_error.t = function
| Identifier id -> Ok (Accessor.construct Fir.Lvalue.variable id)
| Brackets expr -> expr_to_lvalue expr
| Prefix (`Deref, expr) ->
Or_error.(
expr |> expr_to_lvalue >>| Accessor.construct Fir.Lvalue.deref)
| ( Prefix _ | Postfix _ | Binary _ | Ternary _ | Cast _ | Call _
| Subscript _ | Field _ | Sizeof_type _ | String _ | Constant _ ) as e ->
Or_error.error_s
[%message "Expected an lvalue here" ~got:(e : Ast.Expr.t)]
let rec expr_to_address : Ast.Expr.t -> Fir.Address.t Or_error.t = function
| Prefix (`Ref, expr) ->
Or_error.(
expr |> expr_to_address >>| Accessor.construct Fir.Address.ref)
| expr ->
Or_error.(
expr |> expr_to_lvalue >>| Accessor.construct Fir.Address.lvalue)
let lvalue_to_identifier (lv : Fir.Lvalue.t) : C4f_common.C_id.t Or_error.t =
if Fir.Lvalue.is_deref lv then
Or_error.error_s
[%message "Expected identifier" ~got:(lv : Fir.Lvalue.t)]
else Ok lv.@(Fir.Lvalue.variable_of)
let expr_to_identifier (expr : Ast.Expr.t) : C4f_common.C_id.t Or_error.t =
Or_error.(expr |> expr_to_lvalue >>= lvalue_to_identifier)
let expr_to_memory_order (expr : Ast.Expr.t) : Fir.Mem_order.t Or_error.t =
Or_error.Let_syntax.(
let%bind id = expr_to_identifier expr in
id |> Common.C_id.to_string |> Fir.Mem_order.of_string_option
|> Result.of_option
~error:
(Error.create_s
[%message
"Unsupported memory order" ~got:(id : C4f_common.C_id.t)] ))
let sift_decls (maybe_decl_list : ([> `Decl of 'd] as 'a) list) :
('d list * 'a list) Or_error.t =
Or_error.(
Tx.List.With_errors.fold_m maybe_decl_list
~init:(Fqueue.empty, Fqueue.empty) ~f:(fun (decls, rest) -> function
| `Decl d ->
if Fqueue.is_empty rest then return (Fqueue.enqueue decls d, rest)
else error_string "Declarations must go before code."
| item -> return (decls, Fqueue.enqueue rest item) )
>>| fun (decls, rest) -> (Fqueue.to_list decls, Fqueue.to_list rest))
| null | https://raw.githubusercontent.com/c4-project/c4f/8939477732861789abc807c8c1532a302b2848a5/lib/litmus_c/src/abstract_prim.ml | ocaml | for Fqueue | This file is part of c4f .
Copyright ( c ) 2018 - 2022 C4 Project
c4 t itself is licensed under the MIT License . See the LICENSE file in the
project root for more information .
Parts of c4 t are based on code from the Herdtools7 project
( ) : see the LICENSE.herd file in the
project root for more information .
Copyright (c) 2018-2022 C4 Project
c4t itself is licensed under the MIT License. See the LICENSE file in the
project root for more information.
Parts of c4t are based on code from the Herdtools7 project
() : see the LICENSE.herd file in the
project root for more information. *)
open Import
let constant : Ast_basic.Constant.t -> Fir.Constant.t Or_error.t = function
| Integer k -> Ok (Fir.Constant.int k)
| Char _ | Float _ -> Or_error.error_string "Unsupported constant type"
let defined_types : (Common.C_id.t, Fir.Type.Basic.t) List.Assoc.t Lazy.t =
lazy
Fir.Type.Basic.
[ (Common.C_id.of_string "atomic_bool", bool ~is_atomic:true ())
; (Common.C_id.of_string "atomic_int", int ~is_atomic:true ())
; (Common.C_id.of_string "bool", bool ()) ]
let defined_type_to_basic (t : Common.C_id.t) : Fir.Type.Basic.t Or_error.t =
t
|> List.Assoc.find ~equal:Common.C_id.equal (Lazy.force defined_types)
|> Result.of_option
~error:
(Error.create_s
[%message "Unknown defined type" ~got:(t : Common.C_id.t)] )
let partition_qualifiers :
[> Ast.Decl_spec.t] list
-> [> Ast.Type_spec.t] list
* [> Ast_basic.Storage_class_spec.t | Ast_basic.Type_qual.t] list =
List.partition_map ~f:(function
| #Ast.Type_spec.t as ts -> First ts
| #Ast_basic.Storage_class_spec.t as ss -> Second ss
| #Ast_basic.Type_qual.t as qs -> Second qs )
let type_specs_to_basic (specs : [> Ast.Type_spec.t] list) :
Fir.Type.Basic.t Or_error.t =
Or_error.Let_syntax.(
match%bind Tx.List.one specs with
| `Int -> return (Fir.Type.Basic.int ())
| `Defined_type t -> defined_type_to_basic t
| #Ast.Type_spec.t as spec ->
Or_error.error_s
[%message
"This type isn't supported (yet)" ~got:(spec : Ast.Type_spec.t)])
let qualifier_to_flags :
[> Ast_basic.Storage_class_spec.t | Ast_basic.Type_qual.t]
-> bool Or_error.t = function
| `Volatile -> Ok true
| #Ast_basic.Type_qual.t as qual ->
Or_error.error_s
[%message
"This type qualifier isn't supported (yet)"
~got:(qual : Ast_basic.Type_qual.t)]
| #Ast_basic.Storage_class_spec.t as spec ->
Or_error.error_s
[%message
"This storage-class specifier isn't supported (yet)"
~got:(spec : Ast_basic.Storage_class_spec.t)]
let qualifiers_to_flags
(quals : [> Ast_basic.Storage_class_spec.t | Ast_basic.Type_qual.t] list)
: bool Or_error.t =
Or_error.Let_syntax.(
TODO(@MattWindsor91 ): other qualifiers ?
let%map vs = Tx.Or_error.combine_map quals ~f:qualifier_to_flags in
List.exists vs ~f:Fn.id)
let qualifiers_to_type (quals : [> Ast.Decl_spec.t] list) ~(is_pointer : bool)
: Fir.Type.t Or_error.t =
let tspecs, rquals = partition_qualifiers quals in
Or_error.Let_syntax.(
let%map basic = type_specs_to_basic tspecs
and is_volatile = qualifiers_to_flags rquals in
Fir.Type.make basic ~is_pointer ~is_volatile)
let declarator_to_id :
Ast.Declarator.t -> (C4f_common.C_id.t * bool) Or_error.t = function
| {pointer= Some [[]]; direct= Id id} -> Ok (id, true)
| {pointer= Some _; _} as decl ->
Or_error.error_s
[%message
"Complex pointers not supported yet"
~declarator:(decl : Ast.Declarator.t)]
| {pointer= None; direct= Id id} -> Ok (id, false)
| x ->
Or_error.error_s
[%message
"Unsupported direct declarator"
~got:(x.direct : Ast.Direct_declarator.t)]
let identifier_to_constant (id : Common.C_id.t) : Fir.Constant.t option =
match Common.C_id.to_string id with
| "true" -> Some Fir.Constant.truth
| "false" -> Some Fir.Constant.falsehood
| _ -> None
let not_constant (x : Ast.Expr.t) : Fir.Constant.t Or_error.t =
Or_error.error_s
[%message "Expression not supported (must be constant)" (x : Ast.Expr.t)]
let value_of_initialiser : Ast.Initialiser.t -> Fir.Constant.t Or_error.t =
function
| Assign (Constant v) ->
TODO(@MattWindsor91 ): Boolean initialisers are n't covered by this
case , as C99 Boolean ' constant 's are identifiers .
case, as C99 Boolean 'constant's are identifiers. *)
constant v
| Assign (Identifier k) -> (
match identifier_to_constant k with
| Some k -> Ok k
| None -> not_constant (Identifier k) )
| Assign x -> not_constant x
| List _ -> Or_error.error_string "List initialisers not supported"
let decl (d : Ast.Decl.t) : Fir.Initialiser.t Common.C_named.t Or_error.t =
Or_error.Let_syntax.(
let%bind {declarator; initialiser} = Tx.List.one d.declarator in
let%bind name, is_pointer = declarator_to_id declarator in
let%bind ty = qualifiers_to_type d.qualifiers ~is_pointer in
Ideally , we 'd forbid empty initialisers entirely here , but
occasionally outputs them ( eg for compare - exchanges ) . This is a
compromise that slightly changes semantics .
occasionally outputs them (eg for compare-exchanges). This is a
compromise that slightly changes semantics. *)
let%map value =
Option.value_map initialiser ~f:value_of_initialiser
~default:(Ok (Fir.Constant.zero_of_type ty))
in
Common.C_named.make ~name {Fir.Initialiser.ty; value})
let param_decl : Ast.Param_decl.t -> Fir.Type.t Common.C_named.t Or_error.t =
function
| {declarator= `Abstract _; _} ->
Or_error.error_string "Abstract parameter declarators not supported"
| {qualifiers; declarator= `Concrete declarator} ->
Or_error.Let_syntax.(
let%bind name, is_pointer = declarator_to_id declarator in
let%map ty = qualifiers_to_type qualifiers ~is_pointer in
Common.C_named.make ty ~name)
let rec expr_to_lvalue : Ast.Expr.t -> Fir.Lvalue.t Or_error.t = function
| Identifier id -> Ok (Accessor.construct Fir.Lvalue.variable id)
| Brackets expr -> expr_to_lvalue expr
| Prefix (`Deref, expr) ->
Or_error.(
expr |> expr_to_lvalue >>| Accessor.construct Fir.Lvalue.deref)
| ( Prefix _ | Postfix _ | Binary _ | Ternary _ | Cast _ | Call _
| Subscript _ | Field _ | Sizeof_type _ | String _ | Constant _ ) as e ->
Or_error.error_s
[%message "Expected an lvalue here" ~got:(e : Ast.Expr.t)]
let rec expr_to_address : Ast.Expr.t -> Fir.Address.t Or_error.t = function
| Prefix (`Ref, expr) ->
Or_error.(
expr |> expr_to_address >>| Accessor.construct Fir.Address.ref)
| expr ->
Or_error.(
expr |> expr_to_lvalue >>| Accessor.construct Fir.Address.lvalue)
let lvalue_to_identifier (lv : Fir.Lvalue.t) : C4f_common.C_id.t Or_error.t =
if Fir.Lvalue.is_deref lv then
Or_error.error_s
[%message "Expected identifier" ~got:(lv : Fir.Lvalue.t)]
else Ok lv.@(Fir.Lvalue.variable_of)
let expr_to_identifier (expr : Ast.Expr.t) : C4f_common.C_id.t Or_error.t =
Or_error.(expr |> expr_to_lvalue >>= lvalue_to_identifier)
let expr_to_memory_order (expr : Ast.Expr.t) : Fir.Mem_order.t Or_error.t =
Or_error.Let_syntax.(
let%bind id = expr_to_identifier expr in
id |> Common.C_id.to_string |> Fir.Mem_order.of_string_option
|> Result.of_option
~error:
(Error.create_s
[%message
"Unsupported memory order" ~got:(id : C4f_common.C_id.t)] ))
let sift_decls (maybe_decl_list : ([> `Decl of 'd] as 'a) list) :
('d list * 'a list) Or_error.t =
Or_error.(
Tx.List.With_errors.fold_m maybe_decl_list
~init:(Fqueue.empty, Fqueue.empty) ~f:(fun (decls, rest) -> function
| `Decl d ->
if Fqueue.is_empty rest then return (Fqueue.enqueue decls d, rest)
else error_string "Declarations must go before code."
| item -> return (decls, Fqueue.enqueue rest item) )
>>| fun (decls, rest) -> (Fqueue.to_list decls, Fqueue.to_list rest))
|
1299f64b27cfc8b3b1c9b3f4fd27bc67e6c792db8016d3aa27f339dbae3f363d | mage2tv/magento-cache-clean | static_content.cljs | (ns magento.static-content
(:require [file.system :as fs]
[log.log :as log]))
(defn static-content-base-dir [base-dir]
(str base-dir "pub/static/"))
(defn static-content-area-dir [base-dir area]
(str (static-content-base-dir base-dir) area))
(defn view-preprocessed-base-dir [base-dir]
(str base-dir "var/view_preprocessed/pub/static/"))
(defn view-preprocessed-area-dir [base-dir area]
(str (view-preprocessed-base-dir base-dir) area))
(defn static-content-theme-locale-dirs [base-dir area]
(filter fs/dir? (fs/ls-dive (static-content-area-dir base-dir area) 2)))
(defn view-preprocessed-theme-locale-dirs [base-dir area]
(filter fs/dir? (fs/ls-dive (view-preprocessed-area-dir base-dir area) 2)))
(defn- static-files-in-locale-dir [base-dir area file]
(let [dirs (into (static-content-theme-locale-dirs base-dir area)
(view-preprocessed-theme-locale-dirs base-dir area))]
(filter #(= file (fs/basename %)) (mapcat fs/ls dirs))))
(defn js-translation-files [base-dir area]
(static-files-in-locale-dir base-dir area "js-translation.json"))
(defn requirejs-config-files [base-dir area]
(static-files-in-locale-dir base-dir area "requirejs-config.js"))
(defn- rm-files [dir]
(when (fs/dir? dir)
(fs/rm-files-recursive dir)
(log/debug "Removed files in" dir)))
(defn clean [base-dir area]
(log/notice "Removing static content area" area)
(rm-files (static-content-area-dir base-dir area))
(rm-files (view-preprocessed-area-dir base-dir area))
(rm-files (view-preprocessed-area-dir base-dir "app"))
(rm-files (view-preprocessed-area-dir base-dir "vendor")))
| null | https://raw.githubusercontent.com/mage2tv/magento-cache-clean/67d4ce3f06cb42eccceff436580cdfe0ddfc5deb/src/magento/static_content.cljs | clojure | (ns magento.static-content
(:require [file.system :as fs]
[log.log :as log]))
(defn static-content-base-dir [base-dir]
(str base-dir "pub/static/"))
(defn static-content-area-dir [base-dir area]
(str (static-content-base-dir base-dir) area))
(defn view-preprocessed-base-dir [base-dir]
(str base-dir "var/view_preprocessed/pub/static/"))
(defn view-preprocessed-area-dir [base-dir area]
(str (view-preprocessed-base-dir base-dir) area))
(defn static-content-theme-locale-dirs [base-dir area]
(filter fs/dir? (fs/ls-dive (static-content-area-dir base-dir area) 2)))
(defn view-preprocessed-theme-locale-dirs [base-dir area]
(filter fs/dir? (fs/ls-dive (view-preprocessed-area-dir base-dir area) 2)))
(defn- static-files-in-locale-dir [base-dir area file]
(let [dirs (into (static-content-theme-locale-dirs base-dir area)
(view-preprocessed-theme-locale-dirs base-dir area))]
(filter #(= file (fs/basename %)) (mapcat fs/ls dirs))))
(defn js-translation-files [base-dir area]
(static-files-in-locale-dir base-dir area "js-translation.json"))
(defn requirejs-config-files [base-dir area]
(static-files-in-locale-dir base-dir area "requirejs-config.js"))
(defn- rm-files [dir]
(when (fs/dir? dir)
(fs/rm-files-recursive dir)
(log/debug "Removed files in" dir)))
(defn clean [base-dir area]
(log/notice "Removing static content area" area)
(rm-files (static-content-area-dir base-dir area))
(rm-files (view-preprocessed-area-dir base-dir area))
(rm-files (view-preprocessed-area-dir base-dir "app"))
(rm-files (view-preprocessed-area-dir base-dir "vendor")))
| |
c5064cc745b83361f0f2da11b1aa4137bec95fb926eba1ac5ebec8563171ef80 | threatgrid/ctia | static_auth_anonymous_test.clj | (ns ctia.http.handler.static-auth-anonymous-test
(:require [ctia.test-helpers.core :as helpers :refer [GET with-properties]]
[ctia.test-helpers.es :as es-helpers]
[clojure.test :refer [deftest is use-fixtures]]
[schema.test :refer [validate-schemas]]))
(defn fixture-anonymous-readonly-access
[t]
(with-properties
["ctia.auth.static.readonly-for-anonymous" true]
(t)))
(use-fixtures :each
validate-schemas
es-helpers/fixture-properties:es-store
(helpers/fixture-properties:static-auth "kitara" "tearbending")
fixture-anonymous-readonly-access
helpers/fixture-ctia)
(deftest anonymous-readonly-access-test
(let [app (helpers/get-current-app)
{status :status}
(GET app
(str "ctia/judgement/search")
:query-params {:query "*"}
:headers {"Authorization" "bloodbending"})
_ (is (= 200 status))
{status :status}
(GET app
(str "ctia/judgement/search")
:query-params {:query "*"})]
(is (= 200 status))))
| null | https://raw.githubusercontent.com/threatgrid/ctia/ae7d940e323a99dde747e37e2be2898d7705dcaa/test/ctia/http/handler/static_auth_anonymous_test.clj | clojure | (ns ctia.http.handler.static-auth-anonymous-test
(:require [ctia.test-helpers.core :as helpers :refer [GET with-properties]]
[ctia.test-helpers.es :as es-helpers]
[clojure.test :refer [deftest is use-fixtures]]
[schema.test :refer [validate-schemas]]))
(defn fixture-anonymous-readonly-access
[t]
(with-properties
["ctia.auth.static.readonly-for-anonymous" true]
(t)))
(use-fixtures :each
validate-schemas
es-helpers/fixture-properties:es-store
(helpers/fixture-properties:static-auth "kitara" "tearbending")
fixture-anonymous-readonly-access
helpers/fixture-ctia)
(deftest anonymous-readonly-access-test
(let [app (helpers/get-current-app)
{status :status}
(GET app
(str "ctia/judgement/search")
:query-params {:query "*"}
:headers {"Authorization" "bloodbending"})
_ (is (= 200 status))
{status :status}
(GET app
(str "ctia/judgement/search")
:query-params {:query "*"})]
(is (= 200 status))))
| |
f46e1b1d41b88fc7983805fc36c729413cd4fbecf285e2b7faba6fa263b1df53 | expipiplus1/vulkan | Promoted_From_VK_KHR_shader_float16_int8.hs | {-# language CPP #-}
No documentation found for Chapter " Promoted_From_VK_KHR_shader_float16_int8 "
module Vulkan.Core12.Promoted_From_VK_KHR_shader_float16_int8 ( PhysicalDeviceShaderFloat16Int8Features(..)
, StructureType(..)
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES))
import Vulkan.Core10.Enums.StructureType (StructureType(..))
-- | VkPhysicalDeviceShaderFloat16Int8Features - Structure describing
features supported by VK_KHR_shader_float16_int8
--
-- = Members
--
-- This structure describes the following features:
--
-- = Description
--
-- If the 'PhysicalDeviceShaderFloat16Int8Features' structure is included
-- in the @pNext@ chain of the
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2 '
-- structure passed to
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2 ' ,
-- it is filled in to indicate whether each corresponding feature is
-- supported. 'PhysicalDeviceShaderFloat16Int8Features' /can/ also be used
in the @pNext@ chain of ' Vulkan . Core10.Device . DeviceCreateInfo ' to
-- selectively enable these features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_KHR_shader_float16_int8 VK_KHR_shader_float16_int8>,
< -extensions/html/vkspec.html#VK_VERSION_1_2 VK_VERSION_1_2 > ,
' Vulkan . Core10.FundamentalTypes . Bool32 ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceShaderFloat16Int8Features = PhysicalDeviceShaderFloat16Int8Features
{ -- | #extension-features-shaderFloat16# @shaderFloat16@ indicates whether
16 - bit floats ( halfs ) are supported in shader code . This also indicates
whether shader modules declare the @Float16@ capability . However ,
-- this only enables a subset of the storage classes that SPIR-V allows for
the @Float16@ SPIR - V capability : Declaring and using 16 - bit floats in
the @Private@ , @Workgroup@ ( for non - Block variables ) , and @Function@
-- storage classes is enabled, while declaring them in the interface
storage classes ( e.g. , @UniformConstant@ , , @StorageBuffer@ ,
@Input@ , @Output@ , and @PushConstant@ ) is not enabled .
shaderFloat16 :: Bool
| # extension - features - shaderInt8 # @shaderInt8@ indicates whether 8 - bit
-- integers (signed and unsigned) are supported in shader code. This also
indicates whether shader modules declare the capability .
-- However, this only enables a subset of the storage classes that SPIR-V
allows for the SPIR - V capability : Declaring and using 8 - bit
-- integers in the @Private@, @Workgroup@ (for non-Block variables), and
-- @Function@ storage classes is enabled, while declaring them in the
interface storage classes ( e.g. , @UniformConstant@ , ,
@StorageBuffer@ , @Input@ , @Output@ , and @PushConstant@ ) is not enabled .
shaderInt8 :: Bool
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceShaderFloat16Int8Features)
#endif
deriving instance Show PhysicalDeviceShaderFloat16Int8Features
instance ToCStruct PhysicalDeviceShaderFloat16Int8Features where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceShaderFloat16Int8Features{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (shaderFloat16))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (shaderInt8))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceShaderFloat16Int8Features where
peekCStruct p = do
shaderFloat16 <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
shaderInt8 <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32))
pure $ PhysicalDeviceShaderFloat16Int8Features
(bool32ToBool shaderFloat16) (bool32ToBool shaderInt8)
instance Storable PhysicalDeviceShaderFloat16Int8Features where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceShaderFloat16Int8Features where
zero = PhysicalDeviceShaderFloat16Int8Features
zero
zero
| null | https://raw.githubusercontent.com/expipiplus1/vulkan/b1e33d1031779b4740c279c68879d05aee371659/src/Vulkan/Core12/Promoted_From_VK_KHR_shader_float16_int8.hs | haskell | # language CPP #
| VkPhysicalDeviceShaderFloat16Int8Features - Structure describing
= Members
This structure describes the following features:
= Description
If the 'PhysicalDeviceShaderFloat16Int8Features' structure is included
in the @pNext@ chain of the
structure passed to
it is filled in to indicate whether each corresponding feature is
supported. 'PhysicalDeviceShaderFloat16Int8Features' /can/ also be used
selectively enable these features.
== Valid Usage (Implicit)
= See Also
<-extensions/html/vkspec.html#VK_KHR_shader_float16_int8 VK_KHR_shader_float16_int8>,
| #extension-features-shaderFloat16# @shaderFloat16@ indicates whether
this only enables a subset of the storage classes that SPIR-V allows for
storage classes is enabled, while declaring them in the interface
integers (signed and unsigned) are supported in shader code. This also
However, this only enables a subset of the storage classes that SPIR-V
integers in the @Private@, @Workgroup@ (for non-Block variables), and
@Function@ storage classes is enabled, while declaring them in the | No documentation found for Chapter " Promoted_From_VK_KHR_shader_float16_int8 "
module Vulkan.Core12.Promoted_From_VK_KHR_shader_float16_int8 ( PhysicalDeviceShaderFloat16Int8Features(..)
, StructureType(..)
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES))
import Vulkan.Core10.Enums.StructureType (StructureType(..))
features supported by VK_KHR_shader_float16_int8
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2 '
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2 ' ,
in the @pNext@ chain of ' Vulkan . Core10.Device . DeviceCreateInfo ' to
< -extensions/html/vkspec.html#VK_VERSION_1_2 VK_VERSION_1_2 > ,
' Vulkan . Core10.FundamentalTypes . Bool32 ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceShaderFloat16Int8Features = PhysicalDeviceShaderFloat16Int8Features
16 - bit floats ( halfs ) are supported in shader code . This also indicates
whether shader modules declare the @Float16@ capability . However ,
the @Float16@ SPIR - V capability : Declaring and using 16 - bit floats in
the @Private@ , @Workgroup@ ( for non - Block variables ) , and @Function@
storage classes ( e.g. , @UniformConstant@ , , @StorageBuffer@ ,
@Input@ , @Output@ , and @PushConstant@ ) is not enabled .
shaderFloat16 :: Bool
| # extension - features - shaderInt8 # @shaderInt8@ indicates whether 8 - bit
indicates whether shader modules declare the capability .
allows for the SPIR - V capability : Declaring and using 8 - bit
interface storage classes ( e.g. , @UniformConstant@ , ,
@StorageBuffer@ , @Input@ , @Output@ , and @PushConstant@ ) is not enabled .
shaderInt8 :: Bool
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceShaderFloat16Int8Features)
#endif
deriving instance Show PhysicalDeviceShaderFloat16Int8Features
instance ToCStruct PhysicalDeviceShaderFloat16Int8Features where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceShaderFloat16Int8Features{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (shaderFloat16))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (shaderInt8))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceShaderFloat16Int8Features where
peekCStruct p = do
shaderFloat16 <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
shaderInt8 <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32))
pure $ PhysicalDeviceShaderFloat16Int8Features
(bool32ToBool shaderFloat16) (bool32ToBool shaderInt8)
instance Storable PhysicalDeviceShaderFloat16Int8Features where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceShaderFloat16Int8Features where
zero = PhysicalDeviceShaderFloat16Int8Features
zero
zero
|
f777c9bcbfe9485ad6cfcdc162f11c182523dcfc3d5c3282e6c39e973e31ffd1 | mindreframer/clojure-stuff | project.clj | (defproject io.hoplon/twitter.bootstrap "0.2.0"
:description "Hoplon wrapper for Twitter's Bootstrap framework."
:url "-util/hoplon/twitter.bootstrap"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.5.1"]
[tailrecursion/hoplon "5.10.3"]
[io.hoplon.vendor/twitter.bootstrap "3.2.0-0"]])
| null | https://raw.githubusercontent.com/mindreframer/clojure-stuff/1e761b2dacbbfbeec6f20530f136767e788e0fe3/github.com/tailrecursion/hoplon/contrib/twitter.bootstrap/project.clj | clojure | (defproject io.hoplon/twitter.bootstrap "0.2.0"
:description "Hoplon wrapper for Twitter's Bootstrap framework."
:url "-util/hoplon/twitter.bootstrap"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.5.1"]
[tailrecursion/hoplon "5.10.3"]
[io.hoplon.vendor/twitter.bootstrap "3.2.0-0"]])
| |
fe2f8249621eb90ea4b2992b25e27db59da27b7d167bb719e6f1cc4b0319c15f | scalaris-team/scalaris | rm_leases_SUITE.erl | 2012 - 2016 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
%% @doc Unit tests for slide_leases
%% @end
%% @version $Id$
-module(rm_leases_SUITE).
-author('').
-vsn('$Id').
-compile(export_all).
-include("scalaris.hrl").
-include("unittest.hrl").
-include("client_types.hrl").
groups() ->
[{tester_tests, [sequence], [
tester_type_check_rm_leases
]},
{kill_tests, [sequence], [
test_single_kill
]},
{add_tests, [sequence], [
test_single_add,
test_double_add,
test_triple_add
]},
{partition_tests,[sequence], [
test_network_partition
]},
{rm_loop_tests, [sequence], [
propose_new_neighbor
]},
{repeater, [{repeat, 30}], [{group, kill_tests} , {group, add_tests},
{group, rm_loop_tests}, {group, partition_tests}]}
].
all() ->
[
{group, tester_tests},
{group, kill_tests},
{group, add_tests},
{group, rm_loop_tests},
{group, partition_tests}
].
suite() -> [ {timetrap, {seconds, 40}} ].
group(tester_tests) ->
[{timetrap, {seconds, 400}}];
group(partition_tests) ->
[{timetrap, {seconds, 400}}];
group(_) ->
suite().
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_group(Group, Config) -> unittest_helper:init_per_group(Group, Config).
end_per_group(Group, Config) -> unittest_helper:end_per_group(Group, Config).
init_per_testcase(TestCase, Config) ->
case TestCase of
test_network_partition ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
%% this test case requires an even number of nodes
unittest_helper:make_symmetric_ring([{scale_ring_size_by, 2}, {config,
[{log_path, PrivDir},
{leases, true}]}]),
ok;
_ ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_symmetric_ring([{config, [{log_path, PrivDir},
{leases, true}]}]),
ok
end,
[{stop_ring, true} | Config].
end_per_testcase(_TestCase, _Config) ->
ok.
tester_type_check_rm_leases(_Config) ->
Count = 500,
config:write(no_print_ring_data, true),
%% [{modulename, [excludelist = {fun, arity}]}]
Modules =
[ {rm_leases,
[
{start_link, 1},
{start_gen_component,5}, %% unsupported types
{on, 2},
{get_takeovers, 1} %% sends messages
],
[
{compare_and_fix_rm_with_leases, 5}, %% cannot create dht_node_state (reference for bulkowner)
{propose_new_neighbors, 1}, %% sends messages
{prepare_takeover, 3} %% cannot create dht_node_state (reference for bulkowner)
]}
],
%% join a dht_node group to be able to call lease trigger functions
pid_groups:join(pid_groups:group_with(dht_node)),
_ = [ tester:type_check_module(Mod, Excl, ExclPriv, Count)
|| {Mod, Excl, ExclPriv} <- Modules ],
true.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% kill unit tests
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_single_kill(_Config) ->
case config:read(replication_factor) < 4 of
true ->
log:log("skipped: this test case is likely to fail for small replication factors"),
ok;
false ->
NrOfNodes = api_vm:number_of_nodes(),
log:log("kill nodes", []),
synchronous_kill(NrOfNodes),
%% timer:sleep(5000), % enable to see rest of protocol
ok
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% add unit tests
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_single_add(_Config) ->
%% log:log("join nodes", []),
log:log("add nodes", []),
synchronous_add(config:read(replication_factor), config:read(replication_factor)+1),
%timer:sleep(5000), % enable to see rest of protocol
ok.
test_double_add(_Config) ->
%% log:log("join nodes", []),
log:log("add nodes", []),
synchronous_add(config:read(replication_factor), config:read(replication_factor)+2),
%timer:sleep(5000), % enable to see rest of protocol
ok.
test_triple_add(_Config) ->
%% log:log("join nodes", []),
log:log("add nodes", []),
synchronous_add(config:read(replication_factor), config:read(replication_factor)+3),
%timer:sleep(5000), % enable to see rest of protocol
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
rm_loop unit tests
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
propose_new_neighbor(_Config) ->
lease_helper:wait_for_ring_size(config:read(replication_factor)),
lease_helper:wait_for_correct_ring(),
MainNode = pid_groups:group_with(dht_node),
% main node
RMLeasesPid = pid_groups:pid_of(MainNode, rm_leases),
DHTNodePid = pid_groups:pid_of(MainNode, dht_node),
% fake death
{_Pred, PredRange, Lease} = get_pred_info(DHTNodePid),
Result = {qread_done, fake_reqid, fake_round, fake_old_write_round, Lease},
Msg = {read_after_rm_change, PredRange, Result},
TakeoversBefore = rm_leases:get_takeovers(RMLeasesPid),
ct:pal("+wait_for_messages_after ~w", [gb_trees:to_list(TakeoversBefore)]),
wait_for_messages_after(RMLeasesPid, [merge_after_rm_change], %get_node_for_new_neighbor],
fun () ->
comm:send_local(RMLeasesPid, Msg)
end),
ct:pal("-wait_for_messages_after ~w", [gb_trees:to_list(TakeoversBefore)]),
AllRMMsgs = [read_after_rm_change, takeover_after_rm_change,
merge_after_rm_change, merge_after_leave,
get_node_for_new_neighbor, get_takeovers],
ct:pal("+test_quiescence"),
test_quiescence(RMLeasesPid, AllRMMsgs, 100),
ct:pal("-test_quiescence"),
TakeoversBefore = rm_leases:get_takeovers(RMLeasesPid),
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% network partition unit tests
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_network_partition(_Config) ->
%% We create a ring with an even number of nodes. For the odd
%% nodes, we stop lease renewal and wait for the leases to time
out . After that , we propose to the first ( even ) node to
%% takeover the last (odd) node.
DHTNodes = pid_groups:find_all(dht_node),
IdsAndNodes = lists:sort(
[
begin
comm:send_local(Node, {get_state, comm:this(), [node_id]}),
receive
{get_state_response, [{node_id, Id}]} ->
{Id, Node}
end
end
|| Node <- DHTNodes]),
OddNodes = iterate_even_odd(IdsAndNodes,
fun(Val, Even) ->
case Even of
true ->
ok;
false ->
{val, Val}
end
end),
DHTNodePid = hd(DHTNodes),
%% stop odd nodes
_ = [lease_helper:intercept_lease_renew(Node) || {_Id, Node} <- OddNodes],
lease_helper:wait_for_number_of_valid_active_leases(length(DHTNodes) div 2),
RMLeasesPid = pid_groups:pid_of(pid_groups:group_of(DHTNodePid), rm_leases),
%% propose takeover
{_Pred, PredRange, Lease} = get_pred_info(DHTNodePid),
Result = {qread_done, fake_reqid, fake_round, fake_old_write_round, Lease},
Msg = {read_after_rm_change, PredRange, Result},
comm : send_local(RMLeasesPid , Msg ) ,
%% what do we expect to happen? takeover and merge should succeed
wait_for_messages_after(RMLeasesPid, [merge_after_rm_change],
fun () ->
comm:send_local(RMLeasesPid, Msg)
end),
ok.
iterate_even_odd(L, F) ->
iterate_even_odd1(L, F, true, []).
iterate_even_odd1([], _F, _Flag, Acc) ->
Acc;
iterate_even_odd1([Val|Rest], F, Even, Acc) ->
case F(Val, Even) of
{val, Value} ->
iterate_even_odd1(Rest, F, not Even, [Value|Acc]);
_ ->
iterate_even_odd1(Rest, F, not Even, Acc)
end.
get_pred_info(Pid) ->
comm:send_local(Pid, {get_state, comm:this(), neighbors}),
Neighbors = receive
{get_state_response, Neighbors2} -> Neighbors2
end,
@todo could add fake node ? ? !
PredNode = nodelist:pred(Neighbors),
PredPid = node:pidX(PredNode),
comm:send(PredPid, {get_state, comm:this(), my_range}),
PredRange = receive
{get_state_response, PredRange2} -> PredRange2
end,
LeaseId = l_on_cseq:id(PredRange),
{ok, Lease} = l_on_cseq:read(LeaseId),
{PredPid, PredRange, Lease}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% join helper
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
join_test(Current, TargetSize) ->
lease_helper:wait_for_ring_size(Current),
lease_helper:wait_for_correct_ring(),
join_until(Current, TargetSize),
true.
join_until(Current, TargetSize) ->
joiner_helper(Current, TargetSize).
joiner_helper(Target, Target) ->
ok;
joiner_helper(Current, Target) ->
synchronous_join(Current+1),
joiner_helper(Current+1, Target).
synchronous_join(TargetSize) ->
_ = api_vm:add_nodes(1),
check_ring_state(TargetSize).
check_ring_state(TargetSize) ->
lease_helper:wait_for_ring_size(TargetSize),
lease_helper:wait_for_correct_ring(),
lease_helper:wait_for_correct_leases(TargetSize).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% kill helper
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
synchronous_kill(Current) ->
_ = api_vm:kill_nodes(1),
ct:pal("wait for ring size"),
lease_helper:wait_for_ring_size(Current - 1),
ct:pal("wait for correct ring"),
lease_helper:wait_for_correct_ring(),
ct:pal("wait for correct leases"),
lease_helper:wait_for_correct_leases(Current - 1).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% add helper
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
synchronous_add(Current, Current) ->
ok;
synchronous_add(Current, _TargetSize) ->
ct:pal("================================== adding node ========================"),
_ = api_vm:add_nodes(1),
ct:pal("wait for ring size"),
lease_helper:wait_for_ring_size(Current + 1),
ct:pal("wait for correct ring"),
lease_helper:wait_for_correct_ring(),
ct:pal("wait for correct leases"),
util:wait_for(fun () -> admin:check_leases(Current + 1) end, 10000),
%lease_helper:wait_for_correct_leases(Current + 1),
ct:pal("================================== adding node done ========================").
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% intercepting and blocking
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
wait_for_messages_after(Pid, Msgs, F) ->
gen_component:bp_set_cond(Pid, watch_for_msgs_filter(self(), Msgs), wait_for_message),
F(),
receive
{saw_message, _Msg} ->
gen_component:bp_del(Pid, wait_for_message)
end,
ok.
test_quiescence(Pid, Msgs, Timeout) ->
gen_component:bp_set_cond(Pid, watch_for_msgs_filter(self(), Msgs), test_quiescence),
receive
{saw_message, Msg} ->
gen_component:bp_del(Pid, test_quiescence),
?ct_fail("expected quiescence, but got ~w", [Msg])
after Timeout ->
gen_component:bp_del(Pid, test_quiescence),
ok
end.
watch_for_msgs_filter(Pid, Msgs) ->
fun (Message, _State) ->
ct : pal("saw ~w ~ n ~ w ~ n ~ w ~ n " , [ Message , lists : member(Message , Msgs ) , Msgs ] ) ,
case lists:member(element(1, Message), Msgs) of
true ->
comm:send_local(Pid, {saw_message, Message}),
false;
_ ->
false
end
end.
| null | https://raw.githubusercontent.com/scalaris-team/scalaris/feb894d54e642bb3530e709e730156b0ecc1635f/test/rm_leases_SUITE.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc Unit tests for slide_leases
@end
@version $Id$
this test case requires an even number of nodes
[{modulename, [excludelist = {fun, arity}]}]
unsupported types
sends messages
cannot create dht_node_state (reference for bulkowner)
sends messages
cannot create dht_node_state (reference for bulkowner)
join a dht_node group to be able to call lease trigger functions
kill unit tests
timer:sleep(5000), % enable to see rest of protocol
add unit tests
log:log("join nodes", []),
timer:sleep(5000), % enable to see rest of protocol
log:log("join nodes", []),
timer:sleep(5000), % enable to see rest of protocol
log:log("join nodes", []),
timer:sleep(5000), % enable to see rest of protocol
main node
fake death
get_node_for_new_neighbor],
network partition unit tests
We create a ring with an even number of nodes. For the odd
nodes, we stop lease renewal and wait for the leases to time
takeover the last (odd) node.
stop odd nodes
propose takeover
what do we expect to happen? takeover and merge should succeed
join helper
kill helper
add helper
lease_helper:wait_for_correct_leases(Current + 1),
intercepting and blocking
| 2012 - 2016 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
-module(rm_leases_SUITE).
-author('').
-vsn('$Id').
-compile(export_all).
-include("scalaris.hrl").
-include("unittest.hrl").
-include("client_types.hrl").
groups() ->
[{tester_tests, [sequence], [
tester_type_check_rm_leases
]},
{kill_tests, [sequence], [
test_single_kill
]},
{add_tests, [sequence], [
test_single_add,
test_double_add,
test_triple_add
]},
{partition_tests,[sequence], [
test_network_partition
]},
{rm_loop_tests, [sequence], [
propose_new_neighbor
]},
{repeater, [{repeat, 30}], [{group, kill_tests} , {group, add_tests},
{group, rm_loop_tests}, {group, partition_tests}]}
].
all() ->
[
{group, tester_tests},
{group, kill_tests},
{group, add_tests},
{group, rm_loop_tests},
{group, partition_tests}
].
suite() -> [ {timetrap, {seconds, 40}} ].
group(tester_tests) ->
[{timetrap, {seconds, 400}}];
group(partition_tests) ->
[{timetrap, {seconds, 400}}];
group(_) ->
suite().
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_group(Group, Config) -> unittest_helper:init_per_group(Group, Config).
end_per_group(Group, Config) -> unittest_helper:end_per_group(Group, Config).
init_per_testcase(TestCase, Config) ->
case TestCase of
test_network_partition ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_symmetric_ring([{scale_ring_size_by, 2}, {config,
[{log_path, PrivDir},
{leases, true}]}]),
ok;
_ ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_symmetric_ring([{config, [{log_path, PrivDir},
{leases, true}]}]),
ok
end,
[{stop_ring, true} | Config].
end_per_testcase(_TestCase, _Config) ->
ok.
tester_type_check_rm_leases(_Config) ->
Count = 500,
config:write(no_print_ring_data, true),
Modules =
[ {rm_leases,
[
{start_link, 1},
{on, 2},
],
[
]}
],
pid_groups:join(pid_groups:group_with(dht_node)),
_ = [ tester:type_check_module(Mod, Excl, ExclPriv, Count)
|| {Mod, Excl, ExclPriv} <- Modules ],
true.
test_single_kill(_Config) ->
case config:read(replication_factor) < 4 of
true ->
log:log("skipped: this test case is likely to fail for small replication factors"),
ok;
false ->
NrOfNodes = api_vm:number_of_nodes(),
log:log("kill nodes", []),
synchronous_kill(NrOfNodes),
ok
end.
test_single_add(_Config) ->
log:log("add nodes", []),
synchronous_add(config:read(replication_factor), config:read(replication_factor)+1),
ok.
test_double_add(_Config) ->
log:log("add nodes", []),
synchronous_add(config:read(replication_factor), config:read(replication_factor)+2),
ok.
test_triple_add(_Config) ->
log:log("add nodes", []),
synchronous_add(config:read(replication_factor), config:read(replication_factor)+3),
ok.
rm_loop unit tests
propose_new_neighbor(_Config) ->
lease_helper:wait_for_ring_size(config:read(replication_factor)),
lease_helper:wait_for_correct_ring(),
MainNode = pid_groups:group_with(dht_node),
RMLeasesPid = pid_groups:pid_of(MainNode, rm_leases),
DHTNodePid = pid_groups:pid_of(MainNode, dht_node),
{_Pred, PredRange, Lease} = get_pred_info(DHTNodePid),
Result = {qread_done, fake_reqid, fake_round, fake_old_write_round, Lease},
Msg = {read_after_rm_change, PredRange, Result},
TakeoversBefore = rm_leases:get_takeovers(RMLeasesPid),
ct:pal("+wait_for_messages_after ~w", [gb_trees:to_list(TakeoversBefore)]),
fun () ->
comm:send_local(RMLeasesPid, Msg)
end),
ct:pal("-wait_for_messages_after ~w", [gb_trees:to_list(TakeoversBefore)]),
AllRMMsgs = [read_after_rm_change, takeover_after_rm_change,
merge_after_rm_change, merge_after_leave,
get_node_for_new_neighbor, get_takeovers],
ct:pal("+test_quiescence"),
test_quiescence(RMLeasesPid, AllRMMsgs, 100),
ct:pal("-test_quiescence"),
TakeoversBefore = rm_leases:get_takeovers(RMLeasesPid),
ok.
test_network_partition(_Config) ->
out . After that , we propose to the first ( even ) node to
DHTNodes = pid_groups:find_all(dht_node),
IdsAndNodes = lists:sort(
[
begin
comm:send_local(Node, {get_state, comm:this(), [node_id]}),
receive
{get_state_response, [{node_id, Id}]} ->
{Id, Node}
end
end
|| Node <- DHTNodes]),
OddNodes = iterate_even_odd(IdsAndNodes,
fun(Val, Even) ->
case Even of
true ->
ok;
false ->
{val, Val}
end
end),
DHTNodePid = hd(DHTNodes),
_ = [lease_helper:intercept_lease_renew(Node) || {_Id, Node} <- OddNodes],
lease_helper:wait_for_number_of_valid_active_leases(length(DHTNodes) div 2),
RMLeasesPid = pid_groups:pid_of(pid_groups:group_of(DHTNodePid), rm_leases),
{_Pred, PredRange, Lease} = get_pred_info(DHTNodePid),
Result = {qread_done, fake_reqid, fake_round, fake_old_write_round, Lease},
Msg = {read_after_rm_change, PredRange, Result},
comm : send_local(RMLeasesPid , Msg ) ,
wait_for_messages_after(RMLeasesPid, [merge_after_rm_change],
fun () ->
comm:send_local(RMLeasesPid, Msg)
end),
ok.
iterate_even_odd(L, F) ->
iterate_even_odd1(L, F, true, []).
iterate_even_odd1([], _F, _Flag, Acc) ->
Acc;
iterate_even_odd1([Val|Rest], F, Even, Acc) ->
case F(Val, Even) of
{val, Value} ->
iterate_even_odd1(Rest, F, not Even, [Value|Acc]);
_ ->
iterate_even_odd1(Rest, F, not Even, Acc)
end.
get_pred_info(Pid) ->
comm:send_local(Pid, {get_state, comm:this(), neighbors}),
Neighbors = receive
{get_state_response, Neighbors2} -> Neighbors2
end,
@todo could add fake node ? ? !
PredNode = nodelist:pred(Neighbors),
PredPid = node:pidX(PredNode),
comm:send(PredPid, {get_state, comm:this(), my_range}),
PredRange = receive
{get_state_response, PredRange2} -> PredRange2
end,
LeaseId = l_on_cseq:id(PredRange),
{ok, Lease} = l_on_cseq:read(LeaseId),
{PredPid, PredRange, Lease}.
join_test(Current, TargetSize) ->
lease_helper:wait_for_ring_size(Current),
lease_helper:wait_for_correct_ring(),
join_until(Current, TargetSize),
true.
join_until(Current, TargetSize) ->
joiner_helper(Current, TargetSize).
joiner_helper(Target, Target) ->
ok;
joiner_helper(Current, Target) ->
synchronous_join(Current+1),
joiner_helper(Current+1, Target).
synchronous_join(TargetSize) ->
_ = api_vm:add_nodes(1),
check_ring_state(TargetSize).
check_ring_state(TargetSize) ->
lease_helper:wait_for_ring_size(TargetSize),
lease_helper:wait_for_correct_ring(),
lease_helper:wait_for_correct_leases(TargetSize).
synchronous_kill(Current) ->
_ = api_vm:kill_nodes(1),
ct:pal("wait for ring size"),
lease_helper:wait_for_ring_size(Current - 1),
ct:pal("wait for correct ring"),
lease_helper:wait_for_correct_ring(),
ct:pal("wait for correct leases"),
lease_helper:wait_for_correct_leases(Current - 1).
synchronous_add(Current, Current) ->
ok;
synchronous_add(Current, _TargetSize) ->
ct:pal("================================== adding node ========================"),
_ = api_vm:add_nodes(1),
ct:pal("wait for ring size"),
lease_helper:wait_for_ring_size(Current + 1),
ct:pal("wait for correct ring"),
lease_helper:wait_for_correct_ring(),
ct:pal("wait for correct leases"),
util:wait_for(fun () -> admin:check_leases(Current + 1) end, 10000),
ct:pal("================================== adding node done ========================").
wait_for_messages_after(Pid, Msgs, F) ->
gen_component:bp_set_cond(Pid, watch_for_msgs_filter(self(), Msgs), wait_for_message),
F(),
receive
{saw_message, _Msg} ->
gen_component:bp_del(Pid, wait_for_message)
end,
ok.
test_quiescence(Pid, Msgs, Timeout) ->
gen_component:bp_set_cond(Pid, watch_for_msgs_filter(self(), Msgs), test_quiescence),
receive
{saw_message, Msg} ->
gen_component:bp_del(Pid, test_quiescence),
?ct_fail("expected quiescence, but got ~w", [Msg])
after Timeout ->
gen_component:bp_del(Pid, test_quiescence),
ok
end.
watch_for_msgs_filter(Pid, Msgs) ->
fun (Message, _State) ->
ct : pal("saw ~w ~ n ~ w ~ n ~ w ~ n " , [ Message , lists : member(Message , Msgs ) , Msgs ] ) ,
case lists:member(element(1, Message), Msgs) of
true ->
comm:send_local(Pid, {saw_message, Message}),
false;
_ ->
false
end
end.
|
748dfe253976ffd57cc54370b440e766cddc56c3567fc7aaa14d2e1fa1245fc8 | bos/rwh | RandomSupply.hs | module RandomSupply (randomsIO) where
{-- snippet randomsIO --}
import Supply
import System.Random hiding (next)
randomsIO :: Random a => IO [a]
randomsIO =
getStdRandom $ \g ->
let (a, b) = split g
in (randoms a, b)
{-- /snippet randomsIO --}
| null | https://raw.githubusercontent.com/bos/rwh/7fd1e467d54aef832f5476ebf5f4f6a898a895d1/examples/ch15/RandomSupply.hs | haskell | - snippet randomsIO -
- /snippet randomsIO - | module RandomSupply (randomsIO) where
import Supply
import System.Random hiding (next)
randomsIO :: Random a => IO [a]
randomsIO =
getStdRandom $ \g ->
let (a, b) = split g
in (randoms a, b)
|
a33d65566780b58f7ce1c5c793eb860c34a557f9a99fd3463a6ee26effd4c3bf | input-output-hk/offchain-metadata-tools | Server.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
module Test.Cardano.Metadata.Server
( tests
) where
import Data.Aeson ( ToJSON )
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy.Char8 as BLC
import Data.Foldable
import qualified Data.HashMap.Strict as HM
import Data.String ( fromString )
import qualified Data.Text as T
import Network.HTTP.Types ( hContentType, methodPost )
import Network.Wai.Test ( SResponse )
import Prelude hiding ( read )
import Test.Hspec
import Test.Hspec.Wai
import Test.Tasty ( TestTree )
import Test.Tasty.Hspec
import Cardano.Metadata.Server
import Cardano.Metadata.Server.Types
( BatchRequest (BatchRequest), BatchResponse (BatchResponse) )
import Cardano.Metadata.Store.Simple ( simpleStore )
import Cardano.Metadata.Store.Types
import Cardano.Metadata.Types.Common
( AttestedProperty (AttestedProperty)
, HashFn (SHA256)
, PreImage (PreImage)
, Subject (Subject)
, seqZero
, unSubject
)
import qualified Cardano.Metadata.Types.Weakly as Weakly
tests :: IO TestTree
tests = do
intf <- simpleStore mempty
testSpec "Server tests" (spec_server intf)
spec_server
:: StoreInterface Subject Weakly.Metadata
-> Spec
spec_server intf@(StoreInterface { storeWrite = write }) = do
let
subject1 = Subject "3"
subject1Str = BC.pack . T.unpack . unSubject $ subject1
subject2 = Subject "4"
preImg = (Aeson.toJSON $ AttestedProperty (PreImage "6d792d676f6775656e2d736372697074" SHA256) [] seqZero)
owner = (Aeson.toJSON $ AttestedProperty (Aeson.String "me") [] seqZero)
odd1 = Aeson.String "odd"
random = Aeson.String "random"
entry1 = Weakly.Metadata subject1 (HM.fromList [("owner", owner), ("odd", odd1), ("preImage", preImg)])
entry2 = Weakly.Metadata subject2 (HM.fromList [("random", random)])
testData =
[ (subject1, entry1)
, (subject2, entry2)
]
runIO $ traverse_ (uncurry write) testData
with (pure $ webApp intf) $ do
describe "GET /metadata/{subject}" $ do
it "should return 404 if subject doesn't exist" $
get "/metadata/bad"
`shouldRespondWith`
"Requested subject 'bad' not found" { matchStatus = 404 }
it "should return the subject if it does exist" $
get ("/metadata/" <> subject1Str)
`shouldRespondWith`
(matchingJSON entry1) { matchStatus = 200 }
describe "GET /metadata/{subject}/properties" $ do
it "should return 404 if subject doesn't exist" $
get "/metadata/bad/properties"
`shouldRespondWith`
"Requested subject 'bad' not found" { matchStatus = 404 }
it "should return the properties of the subject if it does exist" $
get ("/metadata/" <> subject1Str <> "/properties")
`shouldRespondWith`
(matchingJSON entry1) { matchStatus = 200 }
describe "GET /metadata/{subject}/properties/{property}" $ do
it "should return 404 if subject doesn't exist" $
get "/metadata/bad/properties/owner"
`shouldRespondWith`
"Requested subject 'bad' not found" { matchStatus = 404 }
it "should return 404 if property doesn't exist" $
get ("/metadata/" <> subject1Str <> "/properties/bad")
`shouldRespondWith`
"Requested subject '3' does not have the property 'bad'" { matchStatus = 404 }
it "should return the property if it does exist" $ do
get ("/metadata/" <> subject1Str <> "/properties/owner")
`shouldRespondWith`
(matchingJSON $ (Weakly.Metadata subject1 (HM.singleton "owner" owner))) { matchStatus = 200 }
get ("/metadata/" <> subject1Str <> "/properties/subject")
`shouldRespondWith`
(matchingJSON $ Weakly.Metadata subject1 mempty) { matchStatus = 200 }
get ("/metadata/" <> subject1Str <> "/properties/preImage")
`shouldRespondWith`
(matchingJSON $ Weakly.Metadata subject1 (HM.singleton "preImage" preImg)) { matchStatus = 200 }
get "/metadata/3/properties/odd"
`shouldRespondWith`
(matchingJSON (Weakly.Metadata subject1 (HM.singleton "odd" odd1))) { matchStatus = 200 }
describe "GET /metadata/query" $ do
it "should return empty response if subject not found" $ do
batchRequest (BatchRequest ["bad"] (Just []))
`shouldRespondWith`
(matchingJSON $ BatchResponse [])
batchRequest (BatchRequest ["bad"] (Just ["owner"]))
`shouldRespondWith`
(matchingJSON $ BatchResponse [])
it "should ignore subjects not found, returning subjects that were found" $
batchRequest (BatchRequest [subject1, "bad"] (Just []))
`shouldRespondWith`
(matchingJSON $ BatchResponse [Weakly.Metadata "3" mempty])
it "should return partial response if subject found but property not" $
batchRequest (BatchRequest [subject1] (Just ["bad"]))
`shouldRespondWith`
(matchingJSON $ BatchResponse [Weakly.Metadata "3" mempty])
it "should ignore properties not found, returning properties that were found" $
batchRequest (BatchRequest [subject1] (Just ["owner", "bad"]))
`shouldRespondWith`
(matchingJSON $ BatchResponse [Weakly.Metadata "3" (HM.singleton "owner" owner)])
it "should return a batch response" $ do
batchRequest (BatchRequest [subject1] (Just ["owner", "subject", "preImage"]))
`shouldRespondWith`
(matchingJSON $ BatchResponse [Weakly.Metadata "3" (HM.fromList [("owner", owner), ("preImage", preImg)])])
batchRequest (BatchRequest [subject1, subject2] (Just ["owner", "subject"]))
`shouldRespondWith`
(matchingJSON $ BatchResponse [ Weakly.Metadata "3" (HM.singleton "owner" owner)
, Weakly.Metadata "4" mempty
])
it "should return all properties if key 'properties' not present in JSON request" $
batchRequest (BatchRequest [subject1, subject2] Nothing)
`shouldRespondWith`
(matchingJSON $ BatchResponse
[ entry1
, entry2
])
batchRequest :: BatchRequest -> WaiSession st SResponse
batchRequest batchReq =
request
methodPost
"/metadata/query"
[ (hContentType, "application/json") ]
(Aeson.encode $ batchReq)
matchingJSON :: ToJSON a => a -> ResponseMatcher
matchingJSON = fromString . BLC.unpack . Aeson.encode
| null | https://raw.githubusercontent.com/input-output-hk/offchain-metadata-tools/794f08cedbf555e9d207bccc45c08abbcf98add9/metadata-lib/test/Test/Cardano/Metadata/Server.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes # | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
module Test.Cardano.Metadata.Server
( tests
) where
import Data.Aeson ( ToJSON )
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy.Char8 as BLC
import Data.Foldable
import qualified Data.HashMap.Strict as HM
import Data.String ( fromString )
import qualified Data.Text as T
import Network.HTTP.Types ( hContentType, methodPost )
import Network.Wai.Test ( SResponse )
import Prelude hiding ( read )
import Test.Hspec
import Test.Hspec.Wai
import Test.Tasty ( TestTree )
import Test.Tasty.Hspec
import Cardano.Metadata.Server
import Cardano.Metadata.Server.Types
( BatchRequest (BatchRequest), BatchResponse (BatchResponse) )
import Cardano.Metadata.Store.Simple ( simpleStore )
import Cardano.Metadata.Store.Types
import Cardano.Metadata.Types.Common
( AttestedProperty (AttestedProperty)
, HashFn (SHA256)
, PreImage (PreImage)
, Subject (Subject)
, seqZero
, unSubject
)
import qualified Cardano.Metadata.Types.Weakly as Weakly
tests :: IO TestTree
tests = do
intf <- simpleStore mempty
testSpec "Server tests" (spec_server intf)
spec_server
:: StoreInterface Subject Weakly.Metadata
-> Spec
spec_server intf@(StoreInterface { storeWrite = write }) = do
let
subject1 = Subject "3"
subject1Str = BC.pack . T.unpack . unSubject $ subject1
subject2 = Subject "4"
preImg = (Aeson.toJSON $ AttestedProperty (PreImage "6d792d676f6775656e2d736372697074" SHA256) [] seqZero)
owner = (Aeson.toJSON $ AttestedProperty (Aeson.String "me") [] seqZero)
odd1 = Aeson.String "odd"
random = Aeson.String "random"
entry1 = Weakly.Metadata subject1 (HM.fromList [("owner", owner), ("odd", odd1), ("preImage", preImg)])
entry2 = Weakly.Metadata subject2 (HM.fromList [("random", random)])
testData =
[ (subject1, entry1)
, (subject2, entry2)
]
runIO $ traverse_ (uncurry write) testData
with (pure $ webApp intf) $ do
describe "GET /metadata/{subject}" $ do
it "should return 404 if subject doesn't exist" $
get "/metadata/bad"
`shouldRespondWith`
"Requested subject 'bad' not found" { matchStatus = 404 }
it "should return the subject if it does exist" $
get ("/metadata/" <> subject1Str)
`shouldRespondWith`
(matchingJSON entry1) { matchStatus = 200 }
describe "GET /metadata/{subject}/properties" $ do
it "should return 404 if subject doesn't exist" $
get "/metadata/bad/properties"
`shouldRespondWith`
"Requested subject 'bad' not found" { matchStatus = 404 }
it "should return the properties of the subject if it does exist" $
get ("/metadata/" <> subject1Str <> "/properties")
`shouldRespondWith`
(matchingJSON entry1) { matchStatus = 200 }
describe "GET /metadata/{subject}/properties/{property}" $ do
it "should return 404 if subject doesn't exist" $
get "/metadata/bad/properties/owner"
`shouldRespondWith`
"Requested subject 'bad' not found" { matchStatus = 404 }
it "should return 404 if property doesn't exist" $
get ("/metadata/" <> subject1Str <> "/properties/bad")
`shouldRespondWith`
"Requested subject '3' does not have the property 'bad'" { matchStatus = 404 }
it "should return the property if it does exist" $ do
get ("/metadata/" <> subject1Str <> "/properties/owner")
`shouldRespondWith`
(matchingJSON $ (Weakly.Metadata subject1 (HM.singleton "owner" owner))) { matchStatus = 200 }
get ("/metadata/" <> subject1Str <> "/properties/subject")
`shouldRespondWith`
(matchingJSON $ Weakly.Metadata subject1 mempty) { matchStatus = 200 }
get ("/metadata/" <> subject1Str <> "/properties/preImage")
`shouldRespondWith`
(matchingJSON $ Weakly.Metadata subject1 (HM.singleton "preImage" preImg)) { matchStatus = 200 }
get "/metadata/3/properties/odd"
`shouldRespondWith`
(matchingJSON (Weakly.Metadata subject1 (HM.singleton "odd" odd1))) { matchStatus = 200 }
describe "GET /metadata/query" $ do
it "should return empty response if subject not found" $ do
batchRequest (BatchRequest ["bad"] (Just []))
`shouldRespondWith`
(matchingJSON $ BatchResponse [])
batchRequest (BatchRequest ["bad"] (Just ["owner"]))
`shouldRespondWith`
(matchingJSON $ BatchResponse [])
it "should ignore subjects not found, returning subjects that were found" $
batchRequest (BatchRequest [subject1, "bad"] (Just []))
`shouldRespondWith`
(matchingJSON $ BatchResponse [Weakly.Metadata "3" mempty])
it "should return partial response if subject found but property not" $
batchRequest (BatchRequest [subject1] (Just ["bad"]))
`shouldRespondWith`
(matchingJSON $ BatchResponse [Weakly.Metadata "3" mempty])
it "should ignore properties not found, returning properties that were found" $
batchRequest (BatchRequest [subject1] (Just ["owner", "bad"]))
`shouldRespondWith`
(matchingJSON $ BatchResponse [Weakly.Metadata "3" (HM.singleton "owner" owner)])
it "should return a batch response" $ do
batchRequest (BatchRequest [subject1] (Just ["owner", "subject", "preImage"]))
`shouldRespondWith`
(matchingJSON $ BatchResponse [Weakly.Metadata "3" (HM.fromList [("owner", owner), ("preImage", preImg)])])
batchRequest (BatchRequest [subject1, subject2] (Just ["owner", "subject"]))
`shouldRespondWith`
(matchingJSON $ BatchResponse [ Weakly.Metadata "3" (HM.singleton "owner" owner)
, Weakly.Metadata "4" mempty
])
it "should return all properties if key 'properties' not present in JSON request" $
batchRequest (BatchRequest [subject1, subject2] Nothing)
`shouldRespondWith`
(matchingJSON $ BatchResponse
[ entry1
, entry2
])
batchRequest :: BatchRequest -> WaiSession st SResponse
batchRequest batchReq =
request
methodPost
"/metadata/query"
[ (hContentType, "application/json") ]
(Aeson.encode $ batchReq)
matchingJSON :: ToJSON a => a -> ResponseMatcher
matchingJSON = fromString . BLC.unpack . Aeson.encode
|
01792367c3dd1b726b9fa31962762c8800147e68cdc006c8467fc7964f180a1c | grafi-tt/tatsuki | Binary.hs | -- | This module exports a quasiquoter for binary integer literals.
--
-- Example usage:
--
-- @
import Language . Literals . Binary
import Data . Word
--
not : : Word32
not [ b| 0 | ] = [ b| 1 | ]
not [ b| 1 | ] = [ b| 0 | ]
-- @
module Language.Literals.Binary where
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Syntax
import Data.Bits
b = QuasiQuoter
{ quoteExp = return . LitE . IntegerL . readBinary
, quotePat = return . LitP . IntegerL . readBinary
, quoteType = error "No quasiquoter for types."
, quoteDec = error "No quasiquoter for declarations." }
readBinary :: String -> Integer
readBinary = foldl f 0 where
f x '0' = shift x 1
f x '1' = shift x 1 + 1
f x ' ' = x
f x _ = error "Not a valid binary literal."
| null | https://raw.githubusercontent.com/grafi-tt/tatsuki/2f85835c5aab83c3c60999a37a51fa4794c84285/Language/Literals/Binary.hs | haskell | | This module exports a quasiquoter for binary integer literals.
Example usage:
@
@ | import Language . Literals . Binary
import Data . Word
not : : Word32
not [ b| 0 | ] = [ b| 1 | ]
not [ b| 1 | ] = [ b| 0 | ]
module Language.Literals.Binary where
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Syntax
import Data.Bits
b = QuasiQuoter
{ quoteExp = return . LitE . IntegerL . readBinary
, quotePat = return . LitP . IntegerL . readBinary
, quoteType = error "No quasiquoter for types."
, quoteDec = error "No quasiquoter for declarations." }
readBinary :: String -> Integer
readBinary = foldl f 0 where
f x '0' = shift x 1
f x '1' = shift x 1 + 1
f x ' ' = x
f x _ = error "Not a valid binary literal."
|
157417b35ad0c78fccf4abd16064a8f7fa08d9f46b46714c6208e0c752a0f001 | EFanZh/EOPL-Exercises | exercise-2.5-test.rkt | #lang racket/base
(require rackunit)
(require "../solutions/exercise-2.5.rkt")
(check-eqv? (apply-env (extend-env 'a 7 (empty-env)) 'a) 7)
(check-eqv? (apply-env (extend-env 'a
5
(extend-env 'a
7
(empty-env)))
'a)
5)
(check-eqv? (apply-env (extend-env 'b
5
(extend-env 'a
7
(empty-env)))
'a)
7)
(check-eqv? (apply-env (extend-env 'b
5
(extend-env 'a
7
(empty-env)))
'b)
5)
| null | https://raw.githubusercontent.com/EFanZh/EOPL-Exercises/11667f1e84a1a3e300c2182630b56db3e3d9246a/tests/exercise-2.5-test.rkt | racket | #lang racket/base
(require rackunit)
(require "../solutions/exercise-2.5.rkt")
(check-eqv? (apply-env (extend-env 'a 7 (empty-env)) 'a) 7)
(check-eqv? (apply-env (extend-env 'a
5
(extend-env 'a
7
(empty-env)))
'a)
5)
(check-eqv? (apply-env (extend-env 'b
5
(extend-env 'a
7
(empty-env)))
'a)
7)
(check-eqv? (apply-env (extend-env 'b
5
(extend-env 'a
7
(empty-env)))
'b)
5)
| |
3023575c605c59675af9d71bd3cd3367b552ded1cecdab5bbe0b1c0b6414ac3c | brainly/flood | flood_fsm.erl | -module(flood_fsm).
-author('').
-behaviour(gen_fsm).
-export([start_link/3, init/1, terminate/3]).
-export([connected/2, connected/3, disconnected/2, disconnected/3]).
-export([handle_info/3, handle_sync_event/4, code_change/4]).
-export([status/1, connect/1, disconnect/1, kill/1]).
-record(fsm_data, {url, transport, data, request_id}).
-include("socketio.hrl").
%% Gen Server callbacks
start_link({Host, Port, Endpoint}, Session, Metadata) ->
gen_fsm:start_link(?MODULE, {binary_to_list(Host) ++ ":" ++ integer_to_list(Port) ++ binary_to_list(Endpoint),
Session,
Metadata},
[]).
init({Url, Session, Metadata}) ->
case flood_session:init([{<<"server.url">>, Url} | Metadata], Session) of
{noreply, UserData} ->
Data = #fsm_data{url = Url,
data = UserData,
transport = undefined},
flood:inc(all_users),
flood:inc(alive_users),
flood:inc(disconnected_users),
process_flag(trap_exit, true), % So we can clean up later.
do_connect(Data),
{ok, disconnected, Data};
{stop, Reason, _UserData} ->
{stop, Reason};
{reply, _Replies, _UserData} ->
{stop, unable_to_initialize}
end.
terminate(Reason, State, Data) ->
lager:info("FSM terminated:~n- State: ~p~n- Data: ~p~n- Reason: ~p", [State, Data, Reason]),
flood:inc(terminated_users),
flood:dec(alive_users),
case State of
disconnected -> flood:dec(disconnected_users);
connected -> flood:dec(connected_users)
end,
ok.
FSM event handlers
connected(Event, _From, Data) ->
%% TODO Use this instead of handle_sync_event
connected(Event, Data).
connected(Event, Data) ->
case Event of
{disconnect, NewData} ->
lager:info("Disconnecting..."), % Transition to disconnected state and make sure
do_disconnect(NewData), % it handles attempts to reconnect.
lager:info("Disconnected!"),
flood:dec(connected_users),
flood:inc(disconnected_users),
{next_state, disconnected, NewData};
{connect, NewData = #fsm_data{url = NewUrl, transport = Transport}} ->
case new_request(Transport, NewUrl) of
undefined -> do_connect(Data),
flood:dec(connected_users),
flood:inc(disconnected_users),
{next_state, disconnected, NewData};
NewRequestId -> {next_state, connected, NewData#fsm_data{request_id = NewRequestId}}
end;
{reconnect, NewData = #fsm_data{url = NewUrl, transport = Transport}} ->
case Transport of
<<"xhr-polling">> ->
case new_request(Transport, NewUrl) of
undefined -> do_connect(Data),
flood:dec(connected_users),
flood:inc(disconected_users),
{next_state, disconnected, NewData};
NewRequestId -> {next_state, connected, NewData#fsm_data{request_id = NewRequestId}}
end;
_ ->
NOTE WebSocked does n't need no reconnections .
{next_state, connected, NewData}
end;
{terminate, LastData} ->
lager:info("Terminating..."),
{stop, normal, LastData};
{timeout, _Ref, Name} ->
handle_timeout(connected, Name, Data);
_ ->
{next_state, connected, Data}
end.
disconnected(Event, _From, Data) ->
%% TODO Use this instead of handle_sync_event
disconnected(Event, Data).
disconnected(Event, Data) ->
case Event of
{connect, NewData = #fsm_data{url = NewUrl, transport = Transport}} ->
lager:info("Connecting..."),
%% Cancel an ongoing request (if any) before starting a new one.
case new_request(Transport, NewUrl) of
undefined -> lager:info("Unable to connect!"),
lager:info("Attempting to reconnect..."),
do_connect(Data),
{next_state, disconnected, NewData};
NewRequestId -> lager:info("Connected!"),
flood:dec(disconnected_users),
flood:inc(connected_users),
{next_state, connected, NewData#fsm_data{request_id = NewRequestId}}
end;
{terminate, LastData} ->
lager:info("Terminating..."),
{stop, normal, LastData};
_ ->
{next_state, disconnected, Data}
end.
handle_info(Info, State, Data) ->
case Info of
{ibrowse_async_headers, _RequestId, _Code, _Headers} ->
{next_state, State, Data};
{ibrowse_async_response, _RequestId, {error, Why}} ->
lager:info("Connection closed: ~p", [Why]),
do_disconnect(Data),
{next_state, State, Data};
{ibrowse_async_response_timeout, _RequestId} ->
lager:info("Connection closed: ~p", [async_response_timeout]),
do_disconnect(Data),
{next_state, State, Data};
{ibrowse_async_response, _RequestId, Msg} ->
flood:inc(http_incomming),
%% FIXME This is fuuugly. Defuglyfy
case Data#fsm_data.transport of
undefined ->
lager:info("Received a Socket.IO handshake."),
[Sid, Heartbeat, Timeout, TransportsBin] = binary:split(Msg, <<":">>, [global]),
Transports = binary:split(TransportsBin, <<",">>, [global]),
Metadata = [{<<"server.sid">>, Sid},
{<<"server.heartbeat_timeout">>, binary_to_integer(Heartbeat) * 1000},
{<<"server.reconnect_timeout">>, binary_to_integer(Timeout) * 1000},
{<<"server.available_trasports">>, Transports}],
%% NOTE Assumes they are actually available.
UserData = Data#fsm_data.data,
Transport = flood_session:get_metadata(<<"session.transport">>, UserData),
true = lists:member(Transport, Transports),
Url = Data#fsm_data.url ++ binary_to_list(Transport) ++ "/" ++ binary_to_list(Sid),
NewUserData = flood_session:add_metadata([{<<"server.url">>, Url} | Metadata], Data#fsm_data.data),
NewData = Data#fsm_data{transport = Transport, url = Url, data = NewUserData},
do_connect(NewData),
{next_state, connected, NewData};
<<"websocket">> ->
lager:error("Received a HTTP reply while in WebSocket mode!"),
{next_state, State, Data};
<<"xhr-polling">> ->
%% NOTE Assumes that POST requests receive empty replies.
case Msg of
<<>> -> {next_state, State, Data};
_ -> handle_socketio(connected, Msg, Data)
end
end;
{ibrowse_async_response_end, _RequestId} ->
{next_state, State, Data};
{ws, _Pid, {started, _State}} ->
{next_state, State, Data};
{ws, _Pid, {text, Msg}} ->
flood:inc(ws_incomming),
handle_socketio(connected, Msg, Data);
{ws, _Pid, {closed, Why}} ->
lager:info("Connection closed: ~p", [Why]),
do_disconnect(Data),
{next_state, State, Data};
{'EXIT', _Pid, Reason} ->
lager:info("FSM terminating: ~p", [Reason]),
do_terminate(Data),
{stop, Reason, Data}
end.
handle_sync_event(Event, _From, State, Data) ->
%% TODO Move these to Module:StateName/3
case Event of
status -> {reply, State, State, Data};
disconnect -> do_disconnect(Data),
{reply, State, State, Data};
terminate -> do_terminate(Data),
{reply, terminated, State, Data}
end.
code_change(_OldVsn, State, _Data, _Extra) ->
lager:warning("Unhandled code change."),
{ok, State}.
%% External functions
status(Pid) ->
send_event(Pid, status).
connect(Pid) ->
send_event(Pid, connect).
disconnect(Pid) ->
send_event(Pid, disconnect).
kill(Pid) ->
send_event(Pid, terminate).
Internal functions
send_event(Pid, Event) ->
gen_fsm:sync_send_all_state_event(Pid, Event).
do_connect(Data) ->
gen_fsm:send_event(self(), {connect, Data}).
do_disconnect(Data) ->
gen_fsm:send_event(self(), {disconnect, Data}).
do_reconnect(Data) ->
gen_fsm:send_event(self(), {reconnect, Data}).
do_terminate(Data) ->
gen_fsm:send_event(self(), {terminate, Data}).
new_request(undefined, Url) ->
new_request(<<"xhr-polling">>, Url);
new_request(<<"xhr-polling">>, Url) ->
flood:inc(http_outgoing),
{_, RequestId} = ibrowse:send_req("http://" ++ Url,
[{"connection","keep-alive"},
{"content-type", "text/plain;charset=UTF-8"},
{"content-length", "0"},
{"origin", "null"},
{"accept","*/*"},
{"accept-encoding","gzip,deflate,sdch"},
{"accept-language","pl-PL,pl;q=0.8,en-US;q=0.6,en;q=0.4"}],
get,
[],
[{stream_to, self()},
{response_format, binary}]),
RequestId;
new_request(<<"websocket">>, Url) ->
case flood_ws_client:start_link(self(), "ws://" ++ Url) of
{ok, Pid} -> Pid;
{error, _Error} -> undefined
end.
send_data(Msgs, Data) ->
send_data(Data#fsm_data.transport, Msgs, Data).
send_data(<<"websocket">>, Msgs, Data) ->
HandlerPid = Data#fsm_data.request_id,
lists:map(fun(Msg) ->
flood:inc(ws_outgoing),
HandlerPid ! {text, socketio_parser:encode(Msg)}
end,
Msgs);
send_data(<<"xhr-polling">>, Msgs, Data) ->
flood:inc(http_outgoing),
N = erlang:now(),
T = element(3, N) + element(2, N) * element(1, N) * 1000,
Url = "http://" ++ Data#fsm_data.url ++ "?t=" ++ integer_to_list(T),
Encoded = socketio_parser:encode_batch(Msgs),
ibrowse:send_req(Url,
[{"connection","keep-alive"},
{"content-type", "text/plain;charset=UTF-8"},
{"content-length", integer_to_list(byte_size(Encoded))},
{"origin","null"},
{"content-type","text/plain;charset=UTF-8"},
{"accept","*/*"},
{"accept-encoding","gzip,deflate,sdch"},
{"accept-language","pl-PL,pl;q=0.8,en-US;q=0.6,en;q=0.4"}],
post,
Encoded,
[{stream_to, self()},
{response_format, binary}]).
handle_socketio(State, Msg, Data) ->
Msgs = socketio_parser:decode_maybe_batch(Msg),
UserData = Data#fsm_data.data,
case flood_session:handle_socketio(Msgs, UserData) of
{reply, Replies, NewUserData} -> NewData = Data#fsm_data{data = NewUserData},
send_data(Replies, NewData),
do_reconnect(NewData),
{next_state, State, NewData};
{noreply, NewUserData} -> NewData = Data#fsm_data{data = NewUserData},
do_reconnect(NewData),
{next_state, State, NewData};
{stop, Reason, NewUserData} -> {stop, Reason, Data#fsm_data{data = NewUserData}}
end.
handle_timeout(State, Name, Data) ->
UserState = Data#fsm_data.data,
case flood_session:handle_timeout(Name, UserState) of
{noreply, NewUserData} -> {next_state, State, Data#fsm_data{data = NewUserData}};
{reply, Replies, NewUserData} -> NewData = Data#fsm_data{data = NewUserData},
send_data(Replies, NewData),
{next_state, State, NewData};
{stop, Reason, NewUserData} -> {stop, Reason, Data#fsm_data{data = NewUserData}}
end.
| null | https://raw.githubusercontent.com/brainly/flood/0ee7f436d3ca5e9b89e608e60ca3486ecd3689c4/src/flood_fsm.erl | erlang | Gen Server callbacks
So we can clean up later.
TODO Use this instead of handle_sync_event
Transition to disconnected state and make sure
it handles attempts to reconnect.
TODO Use this instead of handle_sync_event
Cancel an ongoing request (if any) before starting a new one.
FIXME This is fuuugly. Defuglyfy
NOTE Assumes they are actually available.
NOTE Assumes that POST requests receive empty replies.
TODO Move these to Module:StateName/3
External functions | -module(flood_fsm).
-author('').
-behaviour(gen_fsm).
-export([start_link/3, init/1, terminate/3]).
-export([connected/2, connected/3, disconnected/2, disconnected/3]).
-export([handle_info/3, handle_sync_event/4, code_change/4]).
-export([status/1, connect/1, disconnect/1, kill/1]).
-record(fsm_data, {url, transport, data, request_id}).
-include("socketio.hrl").
start_link({Host, Port, Endpoint}, Session, Metadata) ->
gen_fsm:start_link(?MODULE, {binary_to_list(Host) ++ ":" ++ integer_to_list(Port) ++ binary_to_list(Endpoint),
Session,
Metadata},
[]).
init({Url, Session, Metadata}) ->
case flood_session:init([{<<"server.url">>, Url} | Metadata], Session) of
{noreply, UserData} ->
Data = #fsm_data{url = Url,
data = UserData,
transport = undefined},
flood:inc(all_users),
flood:inc(alive_users),
flood:inc(disconnected_users),
do_connect(Data),
{ok, disconnected, Data};
{stop, Reason, _UserData} ->
{stop, Reason};
{reply, _Replies, _UserData} ->
{stop, unable_to_initialize}
end.
terminate(Reason, State, Data) ->
lager:info("FSM terminated:~n- State: ~p~n- Data: ~p~n- Reason: ~p", [State, Data, Reason]),
flood:inc(terminated_users),
flood:dec(alive_users),
case State of
disconnected -> flood:dec(disconnected_users);
connected -> flood:dec(connected_users)
end,
ok.
FSM event handlers
connected(Event, _From, Data) ->
connected(Event, Data).
connected(Event, Data) ->
case Event of
{disconnect, NewData} ->
lager:info("Disconnected!"),
flood:dec(connected_users),
flood:inc(disconnected_users),
{next_state, disconnected, NewData};
{connect, NewData = #fsm_data{url = NewUrl, transport = Transport}} ->
case new_request(Transport, NewUrl) of
undefined -> do_connect(Data),
flood:dec(connected_users),
flood:inc(disconnected_users),
{next_state, disconnected, NewData};
NewRequestId -> {next_state, connected, NewData#fsm_data{request_id = NewRequestId}}
end;
{reconnect, NewData = #fsm_data{url = NewUrl, transport = Transport}} ->
case Transport of
<<"xhr-polling">> ->
case new_request(Transport, NewUrl) of
undefined -> do_connect(Data),
flood:dec(connected_users),
flood:inc(disconected_users),
{next_state, disconnected, NewData};
NewRequestId -> {next_state, connected, NewData#fsm_data{request_id = NewRequestId}}
end;
_ ->
NOTE WebSocked does n't need no reconnections .
{next_state, connected, NewData}
end;
{terminate, LastData} ->
lager:info("Terminating..."),
{stop, normal, LastData};
{timeout, _Ref, Name} ->
handle_timeout(connected, Name, Data);
_ ->
{next_state, connected, Data}
end.
disconnected(Event, _From, Data) ->
disconnected(Event, Data).
disconnected(Event, Data) ->
case Event of
{connect, NewData = #fsm_data{url = NewUrl, transport = Transport}} ->
lager:info("Connecting..."),
case new_request(Transport, NewUrl) of
undefined -> lager:info("Unable to connect!"),
lager:info("Attempting to reconnect..."),
do_connect(Data),
{next_state, disconnected, NewData};
NewRequestId -> lager:info("Connected!"),
flood:dec(disconnected_users),
flood:inc(connected_users),
{next_state, connected, NewData#fsm_data{request_id = NewRequestId}}
end;
{terminate, LastData} ->
lager:info("Terminating..."),
{stop, normal, LastData};
_ ->
{next_state, disconnected, Data}
end.
handle_info(Info, State, Data) ->
case Info of
{ibrowse_async_headers, _RequestId, _Code, _Headers} ->
{next_state, State, Data};
{ibrowse_async_response, _RequestId, {error, Why}} ->
lager:info("Connection closed: ~p", [Why]),
do_disconnect(Data),
{next_state, State, Data};
{ibrowse_async_response_timeout, _RequestId} ->
lager:info("Connection closed: ~p", [async_response_timeout]),
do_disconnect(Data),
{next_state, State, Data};
{ibrowse_async_response, _RequestId, Msg} ->
flood:inc(http_incomming),
case Data#fsm_data.transport of
undefined ->
lager:info("Received a Socket.IO handshake."),
[Sid, Heartbeat, Timeout, TransportsBin] = binary:split(Msg, <<":">>, [global]),
Transports = binary:split(TransportsBin, <<",">>, [global]),
Metadata = [{<<"server.sid">>, Sid},
{<<"server.heartbeat_timeout">>, binary_to_integer(Heartbeat) * 1000},
{<<"server.reconnect_timeout">>, binary_to_integer(Timeout) * 1000},
{<<"server.available_trasports">>, Transports}],
UserData = Data#fsm_data.data,
Transport = flood_session:get_metadata(<<"session.transport">>, UserData),
true = lists:member(Transport, Transports),
Url = Data#fsm_data.url ++ binary_to_list(Transport) ++ "/" ++ binary_to_list(Sid),
NewUserData = flood_session:add_metadata([{<<"server.url">>, Url} | Metadata], Data#fsm_data.data),
NewData = Data#fsm_data{transport = Transport, url = Url, data = NewUserData},
do_connect(NewData),
{next_state, connected, NewData};
<<"websocket">> ->
lager:error("Received a HTTP reply while in WebSocket mode!"),
{next_state, State, Data};
<<"xhr-polling">> ->
case Msg of
<<>> -> {next_state, State, Data};
_ -> handle_socketio(connected, Msg, Data)
end
end;
{ibrowse_async_response_end, _RequestId} ->
{next_state, State, Data};
{ws, _Pid, {started, _State}} ->
{next_state, State, Data};
{ws, _Pid, {text, Msg}} ->
flood:inc(ws_incomming),
handle_socketio(connected, Msg, Data);
{ws, _Pid, {closed, Why}} ->
lager:info("Connection closed: ~p", [Why]),
do_disconnect(Data),
{next_state, State, Data};
{'EXIT', _Pid, Reason} ->
lager:info("FSM terminating: ~p", [Reason]),
do_terminate(Data),
{stop, Reason, Data}
end.
handle_sync_event(Event, _From, State, Data) ->
case Event of
status -> {reply, State, State, Data};
disconnect -> do_disconnect(Data),
{reply, State, State, Data};
terminate -> do_terminate(Data),
{reply, terminated, State, Data}
end.
code_change(_OldVsn, State, _Data, _Extra) ->
lager:warning("Unhandled code change."),
{ok, State}.
status(Pid) ->
send_event(Pid, status).
connect(Pid) ->
send_event(Pid, connect).
disconnect(Pid) ->
send_event(Pid, disconnect).
kill(Pid) ->
send_event(Pid, terminate).
Internal functions
send_event(Pid, Event) ->
gen_fsm:sync_send_all_state_event(Pid, Event).
do_connect(Data) ->
gen_fsm:send_event(self(), {connect, Data}).
do_disconnect(Data) ->
gen_fsm:send_event(self(), {disconnect, Data}).
do_reconnect(Data) ->
gen_fsm:send_event(self(), {reconnect, Data}).
do_terminate(Data) ->
gen_fsm:send_event(self(), {terminate, Data}).
new_request(undefined, Url) ->
new_request(<<"xhr-polling">>, Url);
new_request(<<"xhr-polling">>, Url) ->
flood:inc(http_outgoing),
{_, RequestId} = ibrowse:send_req("http://" ++ Url,
[{"connection","keep-alive"},
{"content-type", "text/plain;charset=UTF-8"},
{"content-length", "0"},
{"origin", "null"},
{"accept","*/*"},
{"accept-encoding","gzip,deflate,sdch"},
{"accept-language","pl-PL,pl;q=0.8,en-US;q=0.6,en;q=0.4"}],
get,
[],
[{stream_to, self()},
{response_format, binary}]),
RequestId;
new_request(<<"websocket">>, Url) ->
case flood_ws_client:start_link(self(), "ws://" ++ Url) of
{ok, Pid} -> Pid;
{error, _Error} -> undefined
end.
send_data(Msgs, Data) ->
send_data(Data#fsm_data.transport, Msgs, Data).
send_data(<<"websocket">>, Msgs, Data) ->
HandlerPid = Data#fsm_data.request_id,
lists:map(fun(Msg) ->
flood:inc(ws_outgoing),
HandlerPid ! {text, socketio_parser:encode(Msg)}
end,
Msgs);
send_data(<<"xhr-polling">>, Msgs, Data) ->
flood:inc(http_outgoing),
N = erlang:now(),
T = element(3, N) + element(2, N) * element(1, N) * 1000,
Url = "http://" ++ Data#fsm_data.url ++ "?t=" ++ integer_to_list(T),
Encoded = socketio_parser:encode_batch(Msgs),
ibrowse:send_req(Url,
[{"connection","keep-alive"},
{"content-type", "text/plain;charset=UTF-8"},
{"content-length", integer_to_list(byte_size(Encoded))},
{"origin","null"},
{"content-type","text/plain;charset=UTF-8"},
{"accept","*/*"},
{"accept-encoding","gzip,deflate,sdch"},
{"accept-language","pl-PL,pl;q=0.8,en-US;q=0.6,en;q=0.4"}],
post,
Encoded,
[{stream_to, self()},
{response_format, binary}]).
handle_socketio(State, Msg, Data) ->
Msgs = socketio_parser:decode_maybe_batch(Msg),
UserData = Data#fsm_data.data,
case flood_session:handle_socketio(Msgs, UserData) of
{reply, Replies, NewUserData} -> NewData = Data#fsm_data{data = NewUserData},
send_data(Replies, NewData),
do_reconnect(NewData),
{next_state, State, NewData};
{noreply, NewUserData} -> NewData = Data#fsm_data{data = NewUserData},
do_reconnect(NewData),
{next_state, State, NewData};
{stop, Reason, NewUserData} -> {stop, Reason, Data#fsm_data{data = NewUserData}}
end.
handle_timeout(State, Name, Data) ->
UserState = Data#fsm_data.data,
case flood_session:handle_timeout(Name, UserState) of
{noreply, NewUserData} -> {next_state, State, Data#fsm_data{data = NewUserData}};
{reply, Replies, NewUserData} -> NewData = Data#fsm_data{data = NewUserData},
send_data(Replies, NewData),
{next_state, State, NewData};
{stop, Reason, NewUserData} -> {stop, Reason, Data#fsm_data{data = NewUserData}}
end.
|
4387b741e2fa59cb9b5ff1f92a1fa812a7b89b72737ac2d3d7cd06f082c3fe28 | leviroth/ocaml-reddit-api | uri_with_string_sexp.mli | * [ Uri_with_string_sexp ] is { ! module : Uri_sexp } with a different sexp
serialization :
[ sexp_of_t t = Atom ( Uri.to_string t ) ]
serialization:
[sexp_of_t t = Atom (Uri.to_string t)]
*)
open! Core
include module type of struct
include Uri_sexp
end
| null | https://raw.githubusercontent.com/leviroth/ocaml-reddit-api/972e6a03e2fe59d8e3f469a643eef41e44a4fc97/reddit_api_kernel/uri_with_string_sexp.mli | ocaml | * [ Uri_with_string_sexp ] is { ! module : Uri_sexp } with a different sexp
serialization :
[ sexp_of_t t = Atom ( Uri.to_string t ) ]
serialization:
[sexp_of_t t = Atom (Uri.to_string t)]
*)
open! Core
include module type of struct
include Uri_sexp
end
| |
019f9b178c325d908b00cdafcbd462dd4ae4bbe275ec2b8afc1e56e21e08aca3 | exoscale/clojure-kubernetes-client | v1_config_map_projection.clj | (ns clojure-kubernetes-client.specs.v1-config-map-projection
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-key-to-path :refer :all]
)
(:import (java.io File)))
(declare v1-config-map-projection-data v1-config-map-projection)
(def v1-config-map-projection-data
{
(ds/opt :items) (s/coll-of v1-key-to-path)
(ds/opt :name) string?
(ds/opt :optional) boolean?
})
(def v1-config-map-projection
(ds/spec
{:name ::v1-config-map-projection
:spec v1-config-map-projection-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v1_config_map_projection.clj | clojure | (ns clojure-kubernetes-client.specs.v1-config-map-projection
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-key-to-path :refer :all]
)
(:import (java.io File)))
(declare v1-config-map-projection-data v1-config-map-projection)
(def v1-config-map-projection-data
{
(ds/opt :items) (s/coll-of v1-key-to-path)
(ds/opt :name) string?
(ds/opt :optional) boolean?
})
(def v1-config-map-projection
(ds/spec
{:name ::v1-config-map-projection
:spec v1-config-map-projection-data}))
| |
a03ffd6c985b8f0039dcdc542535f649ea06ceb0803ac74c1c015ef953866c97 | TomerAberbach/programming-in-haskell-exercises | 6.hs | and :: [Bool] -> Bool
and [] = True
and (b:bs) = b && Main.and bs
concat :: [[a]] -> [a]
concat [] = []
concat (xs:xss) = xs ++ Main.concat xss
replicate :: Int -> a -> [a]
replicate 0 _ = []
replicate n v = v : Main.replicate (n - 1) v
(!!) :: [a] -> Int -> a
(x:xs) !! 0 = x
xs !! n = (tail xs) Main.!! (n - 1)
elem :: Eq a => a -> [a] -> Bool
elem _ [] = False
elem v (x:xs) = if v == x then True else Main.elem v xs
| null | https://raw.githubusercontent.com/TomerAberbach/programming-in-haskell-exercises/a66830529ebc9c4d84d0e4c6e0ad58041b46bc32/parts/1/chapters/6/6.hs | haskell | and :: [Bool] -> Bool
and [] = True
and (b:bs) = b && Main.and bs
concat :: [[a]] -> [a]
concat [] = []
concat (xs:xss) = xs ++ Main.concat xss
replicate :: Int -> a -> [a]
replicate 0 _ = []
replicate n v = v : Main.replicate (n - 1) v
(!!) :: [a] -> Int -> a
(x:xs) !! 0 = x
xs !! n = (tail xs) Main.!! (n - 1)
elem :: Eq a => a -> [a] -> Bool
elem _ [] = False
elem v (x:xs) = if v == x then True else Main.elem v xs
| |
4a50c272bd5930b794ee65982197ab2271ee47c123012ab2f3596f04c3110f36 | agocorona/TCache | Triggers.hs | # LANGUAGE ExistentialQuantification , DeriveDataTypeable #
module Data.TCache.Triggers(DBRef(..),Elem(..),Status(..),addTrigger,applyTriggers) where
import Data.TCache.IResource
import Data.TCache.Defs
import Data.Typeable
import Data.IORef
import System.IO.Unsafe
import Unsafe.Coerce
import GHC.Conc (STM, unsafeIOToSTM)
import Data.Maybe(fromMaybe, fromJust)
import Data.List(nubBy)
--import Debug.Trace
newtype TriggerType a= TriggerType (DBRef a -> Maybe a -> STM()) deriving Typeable
data CMTrigger= forall a.(IResource a, Typeable a) => CMTrigger !(DBRef a -> Maybe a -> STM())
cmtriggers :: IORef [(TypeRep ,[CMTrigger])]
# NOINLINE cmtriggers #
cmtriggers = unsafePerformIO $ newIORef []
| Add an user defined trigger to the list of triggers
Trriggers are called just before an object of the given type is created , modified or deleted .
The DBRef to the object and the new value is passed to the trigger .
The called trigger function has two parameters : the DBRef being accesed
( which still contains the old value ) , and the new value .
If the DBRef is being deleted , the second parameter is ' Nothing ' .
if the DBRef contains Nothing , then the object is being created
Trriggers are called just before an object of the given type is created, modified or deleted.
The DBRef to the object and the new value is passed to the trigger.
The called trigger function has two parameters: the DBRef being accesed
(which still contains the old value), and the new value.
If the DBRef is being deleted, the second parameter is 'Nothing'.
if the DBRef contains Nothing, then the object is being created
-}
addTrigger :: (IResource a, Typeable a) => (DBRef a -> Maybe a -> STM()) -> IO()
addTrigger tr = do
map' <- readIORef cmtriggers
writeIORef cmtriggers $
let ts = mbToList $ lookup atype map'
in nubByType $ (atype ,CMTrigger tr : ts) : map'
where
nubByType= nubBy (\(t,_)(t',_) -> t==t')
(_,atype:_)= splitTyConApp . typeOf $ TriggerType tr
mbToList :: Maybe [a] -> [a]
mbToList = fromMaybe []
| internally called when a DBRef is modified / deleted / created
applyTriggers:: (IResource a, Typeable a) => [DBRef a] -> [Maybe a] -> STM()
applyTriggers [] _ = return()
applyTriggers dbrfs mas = do
map' <- unsafeIOToSTM $ readIORef cmtriggers
let ts = mbToList $ lookup (typeOf $ fromJust (head mas)) map'
mapM_ f ts
where
f t= mapM2_ (f1 t) dbrfs mas
f1 ::(IResource a, Typeable a) => CMTrigger -> DBRef a -> Maybe a -> STM()
f1 (CMTrigger t)= unsafeCoerce t
mapM2_ :: Monad m => (t1 -> t2 -> m a) -> [t1] -> [t2] -> m ()
mapM2_ _ [] _ = return()
mapM2_ _ _ [] = return()
mapM2_ f (x:xs) (y:ys)= f x y >> mapM2_ f xs ys
| null | https://raw.githubusercontent.com/agocorona/TCache/72158de657f72c3b480cea1878b5cebfbfd65d13/Data/TCache/Triggers.hs | haskell | import Debug.Trace | # LANGUAGE ExistentialQuantification , DeriveDataTypeable #
module Data.TCache.Triggers(DBRef(..),Elem(..),Status(..),addTrigger,applyTriggers) where
import Data.TCache.IResource
import Data.TCache.Defs
import Data.Typeable
import Data.IORef
import System.IO.Unsafe
import Unsafe.Coerce
import GHC.Conc (STM, unsafeIOToSTM)
import Data.Maybe(fromMaybe, fromJust)
import Data.List(nubBy)
newtype TriggerType a= TriggerType (DBRef a -> Maybe a -> STM()) deriving Typeable
data CMTrigger= forall a.(IResource a, Typeable a) => CMTrigger !(DBRef a -> Maybe a -> STM())
cmtriggers :: IORef [(TypeRep ,[CMTrigger])]
# NOINLINE cmtriggers #
cmtriggers = unsafePerformIO $ newIORef []
| Add an user defined trigger to the list of triggers
Trriggers are called just before an object of the given type is created , modified or deleted .
The DBRef to the object and the new value is passed to the trigger .
The called trigger function has two parameters : the DBRef being accesed
( which still contains the old value ) , and the new value .
If the DBRef is being deleted , the second parameter is ' Nothing ' .
if the DBRef contains Nothing , then the object is being created
Trriggers are called just before an object of the given type is created, modified or deleted.
The DBRef to the object and the new value is passed to the trigger.
The called trigger function has two parameters: the DBRef being accesed
(which still contains the old value), and the new value.
If the DBRef is being deleted, the second parameter is 'Nothing'.
if the DBRef contains Nothing, then the object is being created
-}
addTrigger :: (IResource a, Typeable a) => (DBRef a -> Maybe a -> STM()) -> IO()
addTrigger tr = do
map' <- readIORef cmtriggers
writeIORef cmtriggers $
let ts = mbToList $ lookup atype map'
in nubByType $ (atype ,CMTrigger tr : ts) : map'
where
nubByType= nubBy (\(t,_)(t',_) -> t==t')
(_,atype:_)= splitTyConApp . typeOf $ TriggerType tr
mbToList :: Maybe [a] -> [a]
mbToList = fromMaybe []
| internally called when a DBRef is modified / deleted / created
applyTriggers:: (IResource a, Typeable a) => [DBRef a] -> [Maybe a] -> STM()
applyTriggers [] _ = return()
applyTriggers dbrfs mas = do
map' <- unsafeIOToSTM $ readIORef cmtriggers
let ts = mbToList $ lookup (typeOf $ fromJust (head mas)) map'
mapM_ f ts
where
f t= mapM2_ (f1 t) dbrfs mas
f1 ::(IResource a, Typeable a) => CMTrigger -> DBRef a -> Maybe a -> STM()
f1 (CMTrigger t)= unsafeCoerce t
mapM2_ :: Monad m => (t1 -> t2 -> m a) -> [t1] -> [t2] -> m ()
mapM2_ _ [] _ = return()
mapM2_ _ _ [] = return()
mapM2_ f (x:xs) (y:ys)= f x y >> mapM2_ f xs ys
|
0180190f545bb7f3a01da37a46791085ec446c94e1d719f314d6c0a83a768dc7 | startalkIM/ejabberd | node_flat_sql.erl | %%%----------------------------------------------------------------------
File : node_flat_sql.erl
Author :
Purpose : Standard PubSub node plugin with ODBC backend
Created : 1 Dec 2007 by
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2016 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
%%%
%%%----------------------------------------------------------------------
@doc The module < strong>{@module}</strong > is the default PubSub plugin .
< p > It is used as a default for all unknown PubSub node type . It can serve
%%% as a developer basis and reference to build its own custom pubsub node
%%% types.</p>
< p > PubSub plugin nodes are using the { @link gen_node } behaviour.</p >
-module(node_flat_sql).
-behaviour(gen_pubsub_node).
-author('').
-compile([{parse_transform, ejabberd_sql_pt}]).
-include("pubsub.hrl").
-include("jlib.hrl").
-include("ejabberd_sql_pt.hrl").
-export([init/3, terminate/2, options/0, features/0,
create_node_permission/6, create_node/2, delete_node/1,
purge_node/2, subscribe_node/8, unsubscribe_node/4,
publish_item/7, delete_item/4, remove_extra_items/3,
get_entity_affiliations/2, get_node_affiliations/1,
get_affiliation/2, set_affiliation/3,
get_entity_subscriptions/2, get_node_subscriptions/1,
get_subscriptions/2, set_subscriptions/4,
get_pending_nodes/2, get_states/1, get_state/2,
set_state/1, get_items/7, get_items/3, get_item/7,
get_item/2, set_item/1, get_item_name/3, node_to_path/1,
path_to_node/1,
get_entity_subscriptions_for_send_last/2, get_last_items/3]).
-export([decode_jid/1, encode_jid/1,
encode_jid_like/1,
decode_affiliation/1, decode_subscriptions/1,
encode_affiliation/1, encode_subscriptions/1,
encode_host/1,
encode_host_like/1]).
init(_Host, _ServerHost, _Opts) ->
%%pubsub_subscription_sql:init(),
ok.
terminate(_Host, _ServerHost) ->
ok.
options() ->
[{sql, true}, {rsm, true} | node_flat:options()].
features() ->
[<<"rsm">> | node_flat:features()].
create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access) ->
node_flat:create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access).
create_node(Nidx, Owner) ->
{_U, _S, _R} = OwnerKey = jid:tolower(jid:remove_resource(Owner)),
J = encode_jid(OwnerKey),
A = encode_affiliation(owner),
S = encode_subscriptions([]),
catch ejabberd_sql:sql_query_t(
?SQL("insert into pubsub_state("
"nodeid, jid, affiliation, subscriptions) "
"values (%(Nidx)d, %(J)s, %(A)s, %(S)s)")),
{result, {default, broadcast}}.
delete_node(Nodes) ->
Reply = lists:map(fun (#pubsub_node{id = Nidx} = PubsubNode) ->
Subscriptions = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(jid)s, @(subscriptions)s "
"from pubsub_state where nodeid=%(Nidx)d"))
of
{selected, RItems} ->
[{decode_jid(SJID), decode_subscriptions(Subs)} ||
{SJID, Subs} <- RItems];
_ ->
[]
end,
{PubsubNode, Subscriptions}
end, Nodes),
{result, {default, broadcast, Reply}}.
subscribe_node(Nidx, Sender, Subscriber, AccessModel,
SendLast, PresenceSubscription, RosterGroup, _Options) ->
SubKey = jid:tolower(Subscriber),
GenKey = jid:remove_resource(SubKey),
Authorized = jid:tolower(jid:remove_resource(Sender)) == GenKey,
{Affiliation, Subscriptions} = select_affiliation_subscriptions(Nidx, GenKey, SubKey),
Whitelisted = lists:member(Affiliation, [member, publisher, owner]),
PendingSubscription = lists:any(fun
({pending, _}) -> true;
(_) -> false
end,
Subscriptions),
Owner = Affiliation == owner,
if not Authorized ->
{error,
?ERR_EXTENDED((?ERR_BAD_REQUEST), <<"invalid-jid">>)};
(Affiliation == outcast) or (Affiliation == publish_only) ->
{error, ?ERR_FORBIDDEN};
PendingSubscription ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"pending-subscription">>)};
(AccessModel == presence) and (not PresenceSubscription) and (not Owner) ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"presence-subscription-required">>)};
(AccessModel == roster) and (not RosterGroup) and (not Owner) ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"not-in-roster-group">>)};
(AccessModel == whitelist) and (not Whitelisted) and (not Owner) ->
{error,
?ERR_EXTENDED((?ERR_NOT_ALLOWED), <<"closed-node">>)};
- >
%% % Payment is required for a subscription
%% {error, ?ERR_PAYMENT_REQUIRED};
%%ForbiddenAnonymous ->
%% % Requesting entity is anonymous
%% {error, ?ERR_FORBIDDEN};
true ->
{ result , SubId } = pubsub_subscription_sql : subscribe_node(Subscriber , Nidx , Options ) ,
{NewSub, SubId} = case Subscriptions of
[{subscribed, Id}|_] ->
{subscribed, Id};
[] ->
Id = pubsub_subscription_sql:make_subid(),
Sub = case AccessModel of
authorize -> pending;
_ -> subscribed
end,
update_subscription(Nidx, SubKey, [{Sub, Id} | Subscriptions]),
{Sub, Id}
end,
case {NewSub, SendLast} of
{subscribed, never} ->
{result, {default, subscribed, SubId}};
{subscribed, _} ->
{result, {default, subscribed, SubId, send_last}};
{_, _} ->
{result, {default, pending, SubId}}
end
end.
unsubscribe_node(Nidx, Sender, Subscriber, SubId) ->
SubKey = jid:tolower(Subscriber),
GenKey = jid:remove_resource(SubKey),
Authorized = jid:tolower(jid:remove_resource(Sender)) == GenKey,
{Affiliation, Subscriptions} = select_affiliation_subscriptions(Nidx, SubKey),
SubIdExists = case SubId of
<<>> -> false;
Binary when is_binary(Binary) -> true;
_ -> false
end,
if
%% Requesting entity is prohibited from unsubscribing entity
not Authorized ->
{error, ?ERR_FORBIDDEN};
%% Entity did not specify SubId
%%SubId == "", ?? ->
%% {error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
Invalid subscription identifier
%%InvalidSubId ->
%% {error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
%% Requesting entity is not a subscriber
Subscriptions == [] ->
{error,
?ERR_EXTENDED((?ERR_UNEXPECTED_REQUEST_CANCEL), <<"not-subscribed">>)};
%% Subid supplied, so use that.
SubIdExists ->
Sub = first_in_list(fun
({_, S}) when S == SubId -> true;
(_) -> false
end,
Subscriptions),
case Sub of
{value, S} ->
delete_subscription(SubKey, Nidx, S, Affiliation, Subscriptions),
{result, default};
false ->
{error,
?ERR_EXTENDED((?ERR_UNEXPECTED_REQUEST_CANCEL), <<"not-subscribed">>)}
end;
%% Asking to remove all subscriptions to the given node
SubId == all ->
[delete_subscription(SubKey, Nidx, S, Affiliation, Subscriptions)
|| S <- Subscriptions],
{result, default};
No subid supplied , but there 's only one matching subscription
length(Subscriptions) == 1 ->
delete_subscription(SubKey, Nidx, hd(Subscriptions), Affiliation, Subscriptions),
{result, default};
No subid and more than one possible subscription match .
true ->
{error,
?ERR_EXTENDED((?ERR_BAD_REQUEST), <<"subid-required">>)}
end.
delete_subscription(SubKey, Nidx, {Subscription, SubId}, Affiliation, Subscriptions) ->
NewSubs = Subscriptions -- [{Subscription, SubId}],
pubsub_subscription_sql : unsubscribe_node(SubKey , Nidx , SubId ) ,
case {Affiliation, NewSubs} of
{none, []} -> del_state(Nidx, SubKey);
_ -> update_subscription(Nidx, SubKey, NewSubs)
end.
publish_item(Nidx, Publisher, PublishModel, MaxItems, ItemId, Payload,
_PubOpts) ->
SubKey = jid:tolower(Publisher),
GenKey = jid:remove_resource(SubKey),
{Affiliation, Subscriptions} = select_affiliation_subscriptions(Nidx, GenKey, SubKey),
Subscribed = case PublishModel of
subscribers -> node_flat:is_subscribed(Subscriptions);
_ -> undefined
end,
if not ((PublishModel == open) or
(PublishModel == publishers) and
((Affiliation == owner)
or (Affiliation == publisher)
or (Affiliation == publish_only))
or (Subscribed == true)) ->
{error, ?ERR_FORBIDDEN};
true ->
if MaxItems > 0 ->
PubId = {p1_time_compat:timestamp(), SubKey},
set_item(#pubsub_item{itemid = {ItemId, Nidx},
creation = {p1_time_compat:timestamp(), GenKey},
modification = PubId,
payload = Payload}),
Items = [ItemId | itemids(Nidx, GenKey) -- [ItemId]],
{result, {_, OI}} = remove_extra_items(Nidx, MaxItems, Items),
{result, {default, broadcast, OI}};
true ->
{result, {default, broadcast, []}}
end
end.
remove_extra_items(_Nidx, unlimited, ItemIds) ->
{result, {ItemIds, []}};
remove_extra_items(Nidx, MaxItems, ItemIds) ->
NewItems = lists:sublist(ItemIds, MaxItems),
OldItems = lists:nthtail(length(NewItems), ItemIds),
del_items(Nidx, OldItems),
{result, {NewItems, OldItems}}.
delete_item(Nidx, Publisher, PublishModel, ItemId) ->
SubKey = jid:tolower(Publisher),
GenKey = jid:remove_resource(SubKey),
{result, Affiliation} = get_affiliation(Nidx, GenKey),
Allowed = Affiliation == publisher orelse
Affiliation == owner orelse
PublishModel == open orelse
case get_item(Nidx, ItemId) of
{result, #pubsub_item{creation = {_, GenKey}}} -> true;
_ -> false
end,
if not Allowed ->
{error, ?ERR_FORBIDDEN};
true ->
case del_item(Nidx, ItemId) of
{updated, 1} -> {result, {default, broadcast}};
_ -> {error, ?ERR_ITEM_NOT_FOUND}
end
end.
purge_node(Nidx, Owner) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
GenState = get_state(Nidx, GenKey),
case GenState of
#pubsub_state{affiliation = owner} ->
{result, States} = get_states(Nidx),
lists:foreach(fun
(#pubsub_state{items = []}) -> ok;
(#pubsub_state{items = Items}) -> del_items(Nidx, Items)
end,
States),
{result, {default, broadcast}};
_ ->
{error, ?ERR_FORBIDDEN}
end.
get_entity_affiliations(Host, Owner) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
H = encode_host(Host),
J = encode_jid(GenKey),
Reply = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(node)s, @(type)s, @(i.nodeid)d, @(affiliation)s "
"from pubsub_state i, pubsub_node n where "
"i.nodeid = n.nodeid and jid=%(J)s and host=%(H)s"))
of
{selected, RItems} ->
[{nodetree_tree_sql:raw_to_node(Host, {N, <<"">>, T, I}), decode_affiliation(A)}
|| {N, T, I, A} <- RItems];
_ ->
[]
end,
{result, Reply}.
get_node_affiliations(Nidx) ->
Reply = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(jid)s, @(affiliation)s from pubsub_state "
"where nodeid=%(Nidx)d"))
of
{selected, RItems} ->
[{decode_jid(J), decode_affiliation(A)} || {J, A} <- RItems];
_ ->
[]
end,
{result, Reply}.
get_affiliation(Nidx, Owner) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
J = encode_jid(GenKey),
Reply = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(affiliation)s from pubsub_state "
"where nodeid=%(Nidx)d and jid=%(J)s"))
of
{selected, [{A}]} ->
decode_affiliation(A);
_ ->
none
end,
{result, Reply}.
set_affiliation(Nidx, Owner, Affiliation) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
{_, Subscriptions} = select_affiliation_subscriptions(Nidx, GenKey),
case {Affiliation, Subscriptions} of
{none, []} -> del_state(Nidx, GenKey);
_ -> update_affiliation(Nidx, GenKey, Affiliation)
end.
get_entity_subscriptions(Host, Owner) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
H = encode_host(Host),
SJ = encode_jid(SubKey),
GJ = encode_jid(GenKey),
GJLike = <<(encode_jid_like(GenKey))/binary, "/%">>,
Query =
case SubKey of
GenKey ->
?SQL("select @(node)s, @(type)s, @(i.nodeid)d,"
" @(jid)s, @(subscriptions)s "
"from pubsub_state i, pubsub_node n "
"where i.nodeid = n.nodeid and "
"(jid=%(GJ)s or jid like %(GJLike)s escape '^')"
" and host=%(H)s");
_ ->
?SQL("select @(node)s, @(type)s, @(i.nodeid)d,"
" @(jid)s, @(subscriptions)s "
"from pubsub_state i, pubsub_node n "
"where i.nodeid = n.nodeid and"
" jid in (%(SJ)s, %(GJ)s) and host=%(H)s")
end,
Reply = case catch ejabberd_sql:sql_query_t(Query) of
{selected, RItems} ->
lists:foldl(fun ({N, T, I, J, S}, Acc) ->
Node = nodetree_tree_sql:raw_to_node(Host, {N, <<"">>, T, I}),
Jid = decode_jid(J),
case decode_subscriptions(S) of
[] ->
[{Node, none, Jid} | Acc];
Subs ->
lists:foldl(fun ({Sub, SubId}, Acc2) ->
[{Node, Sub, SubId, Jid} | Acc2]
end,
Acc, Subs)
end
end,
[], RItems);
_ ->
[]
end,
{result, Reply}.
-spec get_entity_subscriptions_for_send_last(Host :: mod_pubsub:hostPubsub(),
Owner :: jid()) ->
{result, [{mod_pubsub:pubsubNode(),
mod_pubsub:subscription(),
mod_pubsub:subId(),
ljid()}]}.
get_entity_subscriptions_for_send_last(Host, Owner) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
H = encode_host(Host),
SJ = encode_jid(SubKey),
GJ = encode_jid(GenKey),
GJLike = <<(encode_jid_like(GenKey))/binary, "/%">>,
Query =
case SubKey of
GenKey ->
?SQL("select @(node)s, @(type)s, @(i.nodeid)d,"
" @(jid)s, @(subscriptions)s "
"from pubsub_state i, pubsub_node n, pubsub_node_option o "
"where i.nodeid = n.nodeid and n.nodeid = o.nodeid and name='send_last_published_item' "
"and val='on_sub_and_presence' and "
"(jid=%(GJ)s or jid like %(GJLike)s escape '^')"
" and host=%(H)s");
_ ->
?SQL("select @(node)s, @(type)s, @(i.nodeid)d,"
" @(jid)s, @(subscriptions)s "
"from pubsub_state i, pubsub_node n, pubsub_node_option o "
"where i.nodeid = n.nodeid and n.nodeid = o.nodeid and name='send_last_published_item' "
"and val='on_sub_and_presence' and"
" jid in (%(SJ)s, %(GJ)s) and host=%(H)s")
end,
Reply = case catch ejabberd_sql:sql_query_t(Query) of
{selected, RItems} ->
lists:foldl(fun ({N, T, I, J, S}, Acc) ->
Node = nodetree_tree_sql:raw_to_node(Host, {N, <<"">>, T, I}),
Jid = decode_jid(J),
case decode_subscriptions(S) of
[] ->
[{Node, none, Jid} | Acc];
Subs ->
lists:foldl(fun ({Sub, SubId}, Acc2) ->
[{Node, Sub, SubId, Jid}| Acc2]
end,
Acc, Subs)
end
end,
[], RItems);
_ ->
[]
end,
{result, Reply}.
get_node_subscriptions(Nidx) ->
Reply = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(jid)s, @(subscriptions)s from pubsub_state "
"where nodeid=%(Nidx)d"))
of
{selected, RItems} ->
lists:foldl(fun ({J, S}, Acc) ->
Jid = decode_jid(J),
case decode_subscriptions(S) of
[] ->
[{Jid, none} | Acc];
Subs ->
lists:foldl(fun ({Sub, SubId}, Acc2) ->
[{Jid, Sub, SubId} | Acc2]
end,
Acc, Subs)
end
end,
[], RItems);
_ ->
[]
end,
{result, Reply}.
get_subscriptions(Nidx, Owner) ->
SubKey = jid:tolower(Owner),
J = encode_jid(SubKey),
Reply = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(subscriptions)s from pubsub_state"
" where nodeid=%(Nidx)d and jid=%(J)s"))
of
{selected, [{S}]} ->
decode_subscriptions(S);
_ ->
[]
end,
{result, Reply}.
set_subscriptions(Nidx, Owner, Subscription, SubId) ->
SubKey = jid:tolower(Owner),
SubState = get_state_without_itemids(Nidx, SubKey),
case {SubId, SubState#pubsub_state.subscriptions} of
{_, []} ->
case Subscription of
none ->
{error,
?ERR_EXTENDED((?ERR_BAD_REQUEST), <<"not-subscribed">>)};
_ ->
new_subscription(Nidx, Owner, Subscription, SubState)
end;
{<<>>, [{_, SID}]} ->
case Subscription of
none -> unsub_with_subid(Nidx, SID, SubState);
_ -> replace_subscription({Subscription, SID}, SubState)
end;
{<<>>, [_ | _]} ->
{error,
?ERR_EXTENDED((?ERR_BAD_REQUEST), <<"subid-required">>)};
_ ->
case Subscription of
none -> unsub_with_subid(Nidx, SubId, SubState);
_ -> replace_subscription({Subscription, SubId}, SubState)
end
end.
replace_subscription(NewSub, SubState) ->
NewSubs = replace_subscription(NewSub, SubState#pubsub_state.subscriptions, []),
set_state(SubState#pubsub_state{subscriptions = NewSubs}).
replace_subscription(_, [], Acc) -> Acc;
replace_subscription({Sub, SubId}, [{_, SubId} | T], Acc) ->
replace_subscription({Sub, SubId}, T, [{Sub, SubId} | Acc]).
new_subscription(_Nidx, _Owner, Subscription, SubState) ->
{ result , SubId } = pubsub_subscription_sql : subscribe_node(Owner , Nidx , [ ] ) ,
SubId = pubsub_subscription_sql:make_subid(),
Subscriptions = [{Subscription, SubId} | SubState#pubsub_state.subscriptions],
set_state(SubState#pubsub_state{subscriptions = Subscriptions}),
{Subscription, SubId}.
unsub_with_subid(Nidx, SubId, SubState) ->
pubsub_subscription_sql : , Nidx , SubId ) ,
NewSubs = [{S, Sid}
|| {S, Sid} <- SubState#pubsub_state.subscriptions,
SubId =/= Sid],
case {NewSubs, SubState#pubsub_state.affiliation} of
{[], none} -> del_state(Nidx, element(1, SubState#pubsub_state.stateid));
_ -> set_state(SubState#pubsub_state{subscriptions = NewSubs})
end.
get_pending_nodes(Host, Owner) ->
GenKey = jid:remove_resource(jid:tolower(Owner)),
States = mnesia:match_object(#pubsub_state{stateid = {GenKey, '_'},
affiliation = owner, _ = '_'}),
Nidxxs = [Nidx || #pubsub_state{stateid = {_, Nidx}} <- States],
NodeTree = mod_pubsub:tree(Host),
Reply = mnesia:foldl(fun (#pubsub_state{stateid = {_, Nidx}} = S, Acc) ->
case lists:member(Nidx, Nidxxs) of
true ->
case get_nodes_helper(NodeTree, S) of
{value, Node} -> [Node | Acc];
false -> Acc
end;
false ->
Acc
end
end,
[], pubsub_state),
{result, Reply}.
get_nodes_helper(NodeTree, #pubsub_state{stateid = {_, N}, subscriptions = Subs}) ->
HasPending = fun
({pending, _}) -> true;
(pending) -> true;
(_) -> false
end,
case lists:any(HasPending, Subs) of
true ->
case NodeTree:get_node(N) of
#pubsub_node{nodeid = {_, Node}} -> {value, Node};
_ -> false
end;
false ->
false
end.
get_states(Nidx) ->
case catch
ejabberd_sql:sql_query_t(
?SQL("select @(jid)s, @(affiliation)s, @(subscriptions)s "
"from pubsub_state where nodeid=%(Nidx)d"))
of
{selected, RItems} ->
{result,
lists:map(fun ({SJID, Aff, Subs}) ->
JID = decode_jid(SJID),
#pubsub_state{stateid = {JID, Nidx},
items = itemids(Nidx, JID),
affiliation = decode_affiliation(Aff),
subscriptions = decode_subscriptions(Subs)}
end,
RItems)};
_ ->
{result, []}
end.
get_state(Nidx, JID) ->
State = get_state_without_itemids(Nidx, JID),
{SJID, _} = State#pubsub_state.stateid,
State#pubsub_state{items = itemids(Nidx, SJID)}.
-spec get_state_without_itemids(Nidx :: mod_pubsub:nodeIdx(), Key :: ljid()) ->
mod_pubsub:pubsubState().
get_state_without_itemids(Nidx, JID) ->
J = encode_jid(JID),
case catch
ejabberd_sql:sql_query_t(
?SQL("select @(jid)s, @(affiliation)s, @(subscriptions)s "
"from pubsub_state "
"where nodeid=%(Nidx)d and jid=%(J)s"))
of
{selected, [{SJID, Aff, Subs}]} ->
#pubsub_state{stateid = {decode_jid(SJID), Nidx},
affiliation = decode_affiliation(Aff),
subscriptions = decode_subscriptions(Subs)};
_ ->
#pubsub_state{stateid = {JID, Nidx}}
end.
set_state(State) ->
{_, Nidx} = State#pubsub_state.stateid,
set_state(Nidx, State).
set_state(Nidx, State) ->
{JID, _} = State#pubsub_state.stateid,
J = encode_jid(JID),
S = encode_subscriptions(State#pubsub_state.subscriptions),
A = encode_affiliation(State#pubsub_state.affiliation),
?SQL_UPSERT_T(
"pubsub_state",
["!nodeid=%(Nidx)d",
"!jid=%(J)s",
"affiliation=%(A)s",
"subscriptions=%(S)s"
]),
ok.
del_state(Nidx, JID) ->
J = encode_jid(JID),
catch ejabberd_sql:sql_query_t(
?SQL("delete from pubsub_state"
" where jid=%(J)s and nodeid=%(Nidx)d")),
ok.
%get_items(Nidx, _From) ->
% case catch
ejabberd_sql : sql_query_t([<<"select itemid , publisher , creation , modification , payload "
" from pubsub_item where ' " > > , Nidx ,
% <<"' order by modification desc;">>])
% of
% {selected,
[ < < " " > > , < < " publisher " > > , < < " creation " > > , < < " modification " > > , < < " payload " > > ] , RItems } - >
{ result , [ raw_to_item(Nidx , RItem ) || RItem < - RItems ] } ;
% _ ->
% {result, []}
% end.
get_items(Nidx, From, none) ->
MaxItems = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(val)s from pubsub_node_option "
"where nodeid=%(Nidx)d and name='max_items'"))
of
{selected, [{Value}]} ->
jlib:expr_to_term(Value);
_ ->
?MAXITEMS
end,
get_items(Nidx, From, #rsm_in{max = MaxItems});
get_items(Nidx, _From,
#rsm_in{max = M, direction = Direction, id = I, index = IncIndex}) ->
Max = ejabberd_sql:escape(jlib:i2l(M)),
{Way, Order} = case Direction of
aft when I == <<>> -> {<<"is not">>, <<"desc">>};
aft -> {<<"<">>, <<"desc">>};
before when I == <<>> -> {<<"is not">>, <<"asc">>};
before -> {<<">">>, <<"asc">>};
_ -> {<<"is not">>, <<"desc">>}
end,
SNidx = integer_to_binary(Nidx),
[AttrName, Id] = case I of
undefined when IncIndex =/= undefined ->
case catch
ejabberd_sql:sql_query_t([<<"select modification from pubsub_item pi "
"where exists ( select count(*) as count1 "
"from pubsub_item where nodeid='">>, SNidx,
<<"' and modification > pi.modification having count1 = ">>,
ejabberd_sql:escape(jlib:i2l(IncIndex)), <<" );">>])
of
{selected, [_], [[O]]} ->
[<<"modification">>, <<"'", O/binary, "'">>];
_ ->
[<<"modification">>, <<"null">>]
end;
undefined ->
[<<"modification">>, <<"null">>];
<<>> ->
[<<"modification">>, <<"null">>];
I ->
[A, B] = str:tokens(ejabberd_sql:escape(jlib:i2l(I)), <<"@">>),
[A, <<"'", B/binary, "'">>]
end,
Count = case catch
ejabberd_sql:sql_query_t([<<"select count(*) from pubsub_item where nodeid='">>, SNidx, <<"';">>])
of
{selected, [_], [[C]]} -> C;
_ -> <<"0">>
end,
Query = fun(mssql, _) ->
ejabberd_sql:sql_query_t(
[<<"select top ">>, jlib:i2l(Max),
<<" itemid, publisher, creation, modification, payload "
"from pubsub_item where nodeid='">>, SNidx,
<<"' and ">>, AttrName, <<" ">>, Way, <<" ">>, Id, <<" order by ">>,
AttrName, <<" ">>, Order, <<";">>]);
(_, _) ->
ejabberd_sql:sql_query_t(
[<<"select itemid, publisher, creation, modification, payload "
"from pubsub_item where nodeid='">>, SNidx,
<<"' and ">>, AttrName, <<" ">>, Way, <<" ">>, Id, <<" order by ">>,
AttrName, <<" ">>, Order, <<" limit ">>, jlib:i2l(Max), <<" ;">>])
end,
case catch ejabberd_sql:sql_query_t(Query) of
{selected,
[<<"itemid">>, <<"publisher">>, <<"creation">>, <<"modification">>, <<"payload">>], RItems} ->
case RItems of
[[_, _, _, F, _]|_] ->
Index = case catch
ejabberd_sql:sql_query_t([<<"select count(*) from pubsub_item "
"where nodeid='">>, SNidx, <<"' and ">>,
AttrName, <<" > '">>, F, <<"';">>])
of
{ selected , [ _ ] , [ { C } , { In } ] } - > [ string : , both , $ " ) , string : strip(In , both , $ " ) ] ;
{selected, [_], [[In]]} -> In;
_ -> <<"0">>
end,
[_, _, _, L, _] = lists:last(RItems),
RsmOut = #rsm_out{count = Count, index = Index,
first = <<"modification@", F/binary>>,
last = <<"modification@", (jlib:i2l(L))/binary>>},
{result, {[raw_to_item(Nidx, RItem) || RItem <- RItems], RsmOut}};
[] ->
{result, {[], #rsm_out{count = Count}}}
end;
_ ->
{result, {[], none}}
end.
get_items(Nidx, JID, AccessModel, PresenceSubscription, RosterGroup, _SubId, RSM) ->
SubKey = jid:tolower(JID),
GenKey = jid:remove_resource(SubKey),
{Affiliation, Subscriptions} = select_affiliation_subscriptions(Nidx, GenKey, SubKey),
Whitelisted = node_flat:can_fetch_item(Affiliation, Subscriptions),
if %%SubId == "", ?? ->
%% Entity has multiple subscriptions to the node but does not specify a subscription ID
%{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
%%InvalidSubId ->
%% Entity is subscribed but specifies an invalid subscription ID
%{error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
(Affiliation == outcast) or (Affiliation == publish_only) ->
{error, ?ERR_FORBIDDEN};
(AccessModel == presence) and not PresenceSubscription ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"presence-subscription-required">>)};
(AccessModel == roster) and not RosterGroup ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"not-in-roster-group">>)};
(AccessModel == whitelist) and not Whitelisted ->
{error,
?ERR_EXTENDED((?ERR_NOT_ALLOWED), <<"closed-node">>)};
(AccessModel == authorize) and not Whitelisted ->
{error, ?ERR_FORBIDDEN};
- >
%% % Payment is required for a subscription
%% {error, ?ERR_PAYMENT_REQUIRED};
true ->
get_items(Nidx, JID, RSM)
end.
get_last_items(Nidx, _From, Count) ->
Limit = jlib:i2l(Count),
SNidx = integer_to_binary(Nidx),
Query = fun(mssql, _) ->
ejabberd_sql:sql_query_t(
[<<"select top ">>, Limit,
<<" itemid, publisher, creation, modification, payload "
"from pubsub_item where nodeid='">>, SNidx,
<<"' order by modification desc ;">>]);
(_, _) ->
ejabberd_sql:sql_query_t(
[<<"select itemid, publisher, creation, modification, payload "
"from pubsub_item where nodeid='">>, SNidx,
<<"' order by modification desc limit ">>, Limit, <<";">>])
end,
case catch ejabberd_sql:sql_query_t(Query) of
{selected,
[<<"itemid">>, <<"publisher">>, <<"creation">>, <<"modification">>, <<"payload">>], RItems} ->
{result, [raw_to_item(Nidx, RItem) || RItem <- RItems]};
_ ->
{result, []}
end.
get_item(Nidx, ItemId) ->
case catch ejabberd_sql:sql_query_t(
?SQL("select @(itemid)s, @(publisher)s, @(creation)s,"
" @(modification)s, @(payload)s from pubsub_item"
" where nodeid=%(Nidx)d and itemid=%(ItemId)s"))
of
{selected, [RItem]} ->
{result, raw_to_item(Nidx, RItem)};
{selected, []} ->
{error, ?ERR_ITEM_NOT_FOUND};
{'EXIT', _} ->
{error, ?ERRT_INTERNAL_SERVER_ERROR(?MYLANG, <<"Database failure">>)}
end.
get_item(Nidx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, _SubId) ->
SubKey = jid:tolower(JID),
GenKey = jid:remove_resource(SubKey),
{Affiliation, Subscriptions} = select_affiliation_subscriptions(Nidx, GenKey, SubKey),
Whitelisted = node_flat:can_fetch_item(Affiliation, Subscriptions),
if %%SubId == "", ?? ->
%% Entity has multiple subscriptions to the node but does not specify a subscription ID
%{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
%%InvalidSubId ->
%% Entity is subscribed but specifies an invalid subscription ID
%{error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
(Affiliation == outcast) or (Affiliation == publish_only) ->
{error, ?ERR_FORBIDDEN};
(AccessModel == presence) and not PresenceSubscription ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"presence-subscription-required">>)};
(AccessModel == roster) and not RosterGroup ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"not-in-roster-group">>)};
(AccessModel == whitelist) and not Whitelisted ->
{error,
?ERR_EXTENDED((?ERR_NOT_ALLOWED), <<"closed-node">>)};
(AccessModel == authorize) and not Whitelisted ->
{error, ?ERR_FORBIDDEN};
- >
%% % Payment is required for a subscription
%% {error, ?ERR_PAYMENT_REQUIRED};
true ->
get_item(Nidx, ItemId)
end.
set_item(Item) ->
{ItemId, Nidx} = Item#pubsub_item.itemid,
{C, _} = Item#pubsub_item.creation,
{M, JID} = Item#pubsub_item.modification,
P = encode_jid(JID),
Payload = Item#pubsub_item.payload,
XML = str:join([fxml:element_to_binary(X) || X<-Payload], <<>>),
S = fun ({T1, T2, T3}) ->
str:join([jlib:i2l(T1, 6), jlib:i2l(T2, 6), jlib:i2l(T3, 6)], <<":">>)
end,
SM = S(M),
SC = S(C),
?SQL_UPSERT_T(
"pubsub_item",
["!nodeid=%(Nidx)d",
"!itemid=%(ItemId)s",
"publisher=%(P)s",
"modification=%(SM)s",
"payload=%(XML)s",
"-creation=%(SC)s"
]),
ok.
del_item(Nidx, ItemId) ->
catch ejabberd_sql:sql_query_t(
?SQL("delete from pubsub_item where itemid=%(ItemId)s"
" and nodeid=%(Nidx)d")).
del_items(_, []) ->
ok;
del_items(Nidx, [ItemId]) ->
del_item(Nidx, ItemId);
del_items(Nidx, ItemIds) ->
I = str:join([[<<"'">>, ejabberd_sql:escape(X), <<"'">>] || X <- ItemIds], <<",">>),
SNidx = integer_to_binary(Nidx),
catch
ejabberd_sql:sql_query_t([<<"delete from pubsub_item where itemid in (">>,
I, <<") and nodeid='">>, SNidx, <<"';">>]).
get_item_name(_Host, _Node, Id) ->
Id.
node_to_path(Node) ->
node_flat:node_to_path(Node).
path_to_node(Path) ->
node_flat:path_to_node(Path).
first_in_list(_Pred, []) ->
false;
first_in_list(Pred, [H | T]) ->
case Pred(H) of
true -> {value, H};
_ -> first_in_list(Pred, T)
end.
itemids(Nidx, {_U, _S, _R} = JID) ->
SJID = encode_jid(JID),
SJIDLike = <<(ejabberd_sql:escape(encode_jid_like(JID)))/binary, "/%">>,
case catch
ejabberd_sql:sql_query_t(
?SQL("select @(itemid)s from pubsub_item where "
"nodeid=%(Nidx)d and (publisher=%(SJID)s"
" or publisher like %(SJIDLike)s escape '^') "
"order by modification desc"))
of
{selected, RItems} ->
[ItemId || {ItemId} <- RItems];
_ ->
[]
end.
select_affiliation_subscriptions(Nidx, JID) ->
J = encode_jid(JID),
case catch
ejabberd_sql:sql_query_t(
?SQL("select @(affiliation)s, @(subscriptions)s from "
" pubsub_state where nodeid=%(Nidx)d and jid=%(J)s"))
of
{selected, [{A, S}]} ->
{decode_affiliation(A), decode_subscriptions(S)};
_ ->
{none, []}
end.
select_affiliation_subscriptions(Nidx, JID, JID) ->
select_affiliation_subscriptions(Nidx, JID);
select_affiliation_subscriptions(Nidx, GenKey, SubKey) ->
{result, Affiliation} = get_affiliation(Nidx, GenKey),
{result, BareJidSubs} = get_subscriptions(Nidx, GenKey),
{result, FullJidSubs} = get_subscriptions(Nidx, SubKey),
{Affiliation, BareJidSubs++FullJidSubs}.
update_affiliation(Nidx, JID, Affiliation) ->
J = encode_jid(JID),
A = encode_affiliation(Affiliation),
?SQL_UPSERT_T(
"pubsub_state",
["!nodeid=%(Nidx)d",
"!jid=%(J)s",
"affiliation=%(A)s",
"-subscriptions=''"
]).
update_subscription(Nidx, JID, Subscription) ->
J = encode_jid(JID),
S = encode_subscriptions(Subscription),
?SQL_UPSERT_T(
"pubsub_state",
["!nodeid=%(Nidx)d",
"!jid=%(J)s",
"subscriptions=%(S)s",
"-affiliation='n'"
]).
-spec decode_jid(SJID :: binary()) -> ljid().
decode_jid(SJID) ->
jid:tolower(jid:from_string(SJID)).
-spec decode_affiliation(Arg :: binary()) -> atom().
decode_affiliation(<<"o">>) -> owner;
decode_affiliation(<<"p">>) -> publisher;
decode_affiliation(<<"u">>) -> publish_only;
decode_affiliation(<<"m">>) -> member;
decode_affiliation(<<"c">>) -> outcast;
decode_affiliation(_) -> none.
-spec decode_subscription(Arg :: binary()) -> atom().
decode_subscription(<<"s">>) -> subscribed;
decode_subscription(<<"p">>) -> pending;
decode_subscription(<<"u">>) -> unconfigured;
decode_subscription(_) -> none.
-spec decode_subscriptions(Subscriptions :: binary()) -> [] | [{atom(), binary()},...].
decode_subscriptions(Subscriptions) ->
lists:foldl(fun (Subscription, Acc) ->
case str:tokens(Subscription, <<":">>) of
[S, SubId] -> [{decode_subscription(S), SubId} | Acc];
_ -> Acc
end
end,
[], str:tokens(Subscriptions, <<",">>)).
-spec encode_jid(JID :: ljid()) -> binary().
encode_jid(JID) ->
jid:to_string(JID).
-spec encode_jid_like(JID :: ljid()) -> binary().
encode_jid_like(JID) ->
ejabberd_sql:escape_like_arg_circumflex(jid:to_string(JID)).
-spec encode_host(Host :: host()) -> binary().
encode_host({_U, _S, _R} = LJID) -> encode_jid(LJID);
encode_host(Host) -> Host.
-spec encode_host_like(Host :: host()) -> binary().
encode_host_like({_U, _S, _R} = LJID) -> ejabberd_sql:escape(encode_jid_like(LJID));
encode_host_like(Host) ->
ejabberd_sql:escape(ejabberd_sql:escape_like_arg_circumflex(Host)).
-spec encode_affiliation(Arg :: atom()) -> binary().
encode_affiliation(owner) -> <<"o">>;
encode_affiliation(publisher) -> <<"p">>;
encode_affiliation(publish_only) -> <<"u">>;
encode_affiliation(member) -> <<"m">>;
encode_affiliation(outcast) -> <<"c">>;
encode_affiliation(_) -> <<"n">>.
-spec encode_subscription(Arg :: atom()) -> binary().
encode_subscription(subscribed) -> <<"s">>;
encode_subscription(pending) -> <<"p">>;
encode_subscription(unconfigured) -> <<"u">>;
encode_subscription(_) -> <<"n">>.
-spec encode_subscriptions(Subscriptions :: [] | [{atom(), binary()},...]) -> binary().
encode_subscriptions(Subscriptions) ->
str:join([<<(encode_subscription(S))/binary, ":", SubId/binary>>
|| {S, SubId} <- Subscriptions], <<",">>).
%%% record getter/setter
raw_to_item(Nidx, [ItemId, SJID, Creation, Modification, XML]) ->
raw_to_item(Nidx, {ItemId, SJID, Creation, Modification, XML});
raw_to_item(Nidx, {ItemId, SJID, Creation, Modification, XML}) ->
JID = decode_jid(SJID),
ToTime = fun (Str) ->
[T1, T2, T3] = str:tokens(Str, <<":">>),
{jlib:l2i(T1), jlib:l2i(T2), jlib:l2i(T3)}
end,
Payload = case fxml_stream:parse_element(XML) of
{error, _Reason} -> [];
El -> [El]
end,
#pubsub_item{itemid = {ItemId, Nidx},
creation = {ToTime(Creation), JID},
modification = {ToTime(Modification), JID},
payload = Payload}.
| null | https://raw.githubusercontent.com/startalkIM/ejabberd/718d86cd2f5681099fad14dab5f2541ddc612c8b/src/node_flat_sql.erl | erlang | ----------------------------------------------------------------------
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
----------------------------------------------------------------------
as a developer basis and reference to build its own custom pubsub node
types.</p>
pubsub_subscription_sql:init(),
% Payment is required for a subscription
{error, ?ERR_PAYMENT_REQUIRED};
ForbiddenAnonymous ->
% Requesting entity is anonymous
{error, ?ERR_FORBIDDEN};
Requesting entity is prohibited from unsubscribing entity
Entity did not specify SubId
SubId == "", ?? ->
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
InvalidSubId ->
{error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
Requesting entity is not a subscriber
Subid supplied, so use that.
Asking to remove all subscriptions to the given node
get_items(Nidx, _From) ->
case catch
<<"' order by modification desc;">>])
of
{selected,
_ ->
{result, []}
end.
SubId == "", ?? ->
Entity has multiple subscriptions to the node but does not specify a subscription ID
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
InvalidSubId ->
Entity is subscribed but specifies an invalid subscription ID
{error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
% Payment is required for a subscription
{error, ?ERR_PAYMENT_REQUIRED};
SubId == "", ?? ->
Entity has multiple subscriptions to the node but does not specify a subscription ID
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
InvalidSubId ->
Entity is subscribed but specifies an invalid subscription ID
{error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
% Payment is required for a subscription
{error, ?ERR_PAYMENT_REQUIRED};
record getter/setter | File : node_flat_sql.erl
Author :
Purpose : Standard PubSub node plugin with ODBC backend
Created : 1 Dec 2007 by
ejabberd , Copyright ( C ) 2002 - 2016 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
@doc The module < strong>{@module}</strong > is the default PubSub plugin .
< p > It is used as a default for all unknown PubSub node type . It can serve
< p > PubSub plugin nodes are using the { @link gen_node } behaviour.</p >
-module(node_flat_sql).
-behaviour(gen_pubsub_node).
-author('').
-compile([{parse_transform, ejabberd_sql_pt}]).
-include("pubsub.hrl").
-include("jlib.hrl").
-include("ejabberd_sql_pt.hrl").
-export([init/3, terminate/2, options/0, features/0,
create_node_permission/6, create_node/2, delete_node/1,
purge_node/2, subscribe_node/8, unsubscribe_node/4,
publish_item/7, delete_item/4, remove_extra_items/3,
get_entity_affiliations/2, get_node_affiliations/1,
get_affiliation/2, set_affiliation/3,
get_entity_subscriptions/2, get_node_subscriptions/1,
get_subscriptions/2, set_subscriptions/4,
get_pending_nodes/2, get_states/1, get_state/2,
set_state/1, get_items/7, get_items/3, get_item/7,
get_item/2, set_item/1, get_item_name/3, node_to_path/1,
path_to_node/1,
get_entity_subscriptions_for_send_last/2, get_last_items/3]).
-export([decode_jid/1, encode_jid/1,
encode_jid_like/1,
decode_affiliation/1, decode_subscriptions/1,
encode_affiliation/1, encode_subscriptions/1,
encode_host/1,
encode_host_like/1]).
init(_Host, _ServerHost, _Opts) ->
ok.
terminate(_Host, _ServerHost) ->
ok.
options() ->
[{sql, true}, {rsm, true} | node_flat:options()].
features() ->
[<<"rsm">> | node_flat:features()].
create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access) ->
node_flat:create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access).
create_node(Nidx, Owner) ->
{_U, _S, _R} = OwnerKey = jid:tolower(jid:remove_resource(Owner)),
J = encode_jid(OwnerKey),
A = encode_affiliation(owner),
S = encode_subscriptions([]),
catch ejabberd_sql:sql_query_t(
?SQL("insert into pubsub_state("
"nodeid, jid, affiliation, subscriptions) "
"values (%(Nidx)d, %(J)s, %(A)s, %(S)s)")),
{result, {default, broadcast}}.
delete_node(Nodes) ->
Reply = lists:map(fun (#pubsub_node{id = Nidx} = PubsubNode) ->
Subscriptions = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(jid)s, @(subscriptions)s "
"from pubsub_state where nodeid=%(Nidx)d"))
of
{selected, RItems} ->
[{decode_jid(SJID), decode_subscriptions(Subs)} ||
{SJID, Subs} <- RItems];
_ ->
[]
end,
{PubsubNode, Subscriptions}
end, Nodes),
{result, {default, broadcast, Reply}}.
subscribe_node(Nidx, Sender, Subscriber, AccessModel,
SendLast, PresenceSubscription, RosterGroup, _Options) ->
SubKey = jid:tolower(Subscriber),
GenKey = jid:remove_resource(SubKey),
Authorized = jid:tolower(jid:remove_resource(Sender)) == GenKey,
{Affiliation, Subscriptions} = select_affiliation_subscriptions(Nidx, GenKey, SubKey),
Whitelisted = lists:member(Affiliation, [member, publisher, owner]),
PendingSubscription = lists:any(fun
({pending, _}) -> true;
(_) -> false
end,
Subscriptions),
Owner = Affiliation == owner,
if not Authorized ->
{error,
?ERR_EXTENDED((?ERR_BAD_REQUEST), <<"invalid-jid">>)};
(Affiliation == outcast) or (Affiliation == publish_only) ->
{error, ?ERR_FORBIDDEN};
PendingSubscription ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"pending-subscription">>)};
(AccessModel == presence) and (not PresenceSubscription) and (not Owner) ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"presence-subscription-required">>)};
(AccessModel == roster) and (not RosterGroup) and (not Owner) ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"not-in-roster-group">>)};
(AccessModel == whitelist) and (not Whitelisted) and (not Owner) ->
{error,
?ERR_EXTENDED((?ERR_NOT_ALLOWED), <<"closed-node">>)};
- >
true ->
{ result , SubId } = pubsub_subscription_sql : subscribe_node(Subscriber , Nidx , Options ) ,
{NewSub, SubId} = case Subscriptions of
[{subscribed, Id}|_] ->
{subscribed, Id};
[] ->
Id = pubsub_subscription_sql:make_subid(),
Sub = case AccessModel of
authorize -> pending;
_ -> subscribed
end,
update_subscription(Nidx, SubKey, [{Sub, Id} | Subscriptions]),
{Sub, Id}
end,
case {NewSub, SendLast} of
{subscribed, never} ->
{result, {default, subscribed, SubId}};
{subscribed, _} ->
{result, {default, subscribed, SubId, send_last}};
{_, _} ->
{result, {default, pending, SubId}}
end
end.
unsubscribe_node(Nidx, Sender, Subscriber, SubId) ->
SubKey = jid:tolower(Subscriber),
GenKey = jid:remove_resource(SubKey),
Authorized = jid:tolower(jid:remove_resource(Sender)) == GenKey,
{Affiliation, Subscriptions} = select_affiliation_subscriptions(Nidx, SubKey),
SubIdExists = case SubId of
<<>> -> false;
Binary when is_binary(Binary) -> true;
_ -> false
end,
if
not Authorized ->
{error, ?ERR_FORBIDDEN};
Invalid subscription identifier
Subscriptions == [] ->
{error,
?ERR_EXTENDED((?ERR_UNEXPECTED_REQUEST_CANCEL), <<"not-subscribed">>)};
SubIdExists ->
Sub = first_in_list(fun
({_, S}) when S == SubId -> true;
(_) -> false
end,
Subscriptions),
case Sub of
{value, S} ->
delete_subscription(SubKey, Nidx, S, Affiliation, Subscriptions),
{result, default};
false ->
{error,
?ERR_EXTENDED((?ERR_UNEXPECTED_REQUEST_CANCEL), <<"not-subscribed">>)}
end;
SubId == all ->
[delete_subscription(SubKey, Nidx, S, Affiliation, Subscriptions)
|| S <- Subscriptions],
{result, default};
No subid supplied , but there 's only one matching subscription
length(Subscriptions) == 1 ->
delete_subscription(SubKey, Nidx, hd(Subscriptions), Affiliation, Subscriptions),
{result, default};
No subid and more than one possible subscription match .
true ->
{error,
?ERR_EXTENDED((?ERR_BAD_REQUEST), <<"subid-required">>)}
end.
delete_subscription(SubKey, Nidx, {Subscription, SubId}, Affiliation, Subscriptions) ->
NewSubs = Subscriptions -- [{Subscription, SubId}],
pubsub_subscription_sql : unsubscribe_node(SubKey , Nidx , SubId ) ,
case {Affiliation, NewSubs} of
{none, []} -> del_state(Nidx, SubKey);
_ -> update_subscription(Nidx, SubKey, NewSubs)
end.
publish_item(Nidx, Publisher, PublishModel, MaxItems, ItemId, Payload,
_PubOpts) ->
SubKey = jid:tolower(Publisher),
GenKey = jid:remove_resource(SubKey),
{Affiliation, Subscriptions} = select_affiliation_subscriptions(Nidx, GenKey, SubKey),
Subscribed = case PublishModel of
subscribers -> node_flat:is_subscribed(Subscriptions);
_ -> undefined
end,
if not ((PublishModel == open) or
(PublishModel == publishers) and
((Affiliation == owner)
or (Affiliation == publisher)
or (Affiliation == publish_only))
or (Subscribed == true)) ->
{error, ?ERR_FORBIDDEN};
true ->
if MaxItems > 0 ->
PubId = {p1_time_compat:timestamp(), SubKey},
set_item(#pubsub_item{itemid = {ItemId, Nidx},
creation = {p1_time_compat:timestamp(), GenKey},
modification = PubId,
payload = Payload}),
Items = [ItemId | itemids(Nidx, GenKey) -- [ItemId]],
{result, {_, OI}} = remove_extra_items(Nidx, MaxItems, Items),
{result, {default, broadcast, OI}};
true ->
{result, {default, broadcast, []}}
end
end.
remove_extra_items(_Nidx, unlimited, ItemIds) ->
{result, {ItemIds, []}};
remove_extra_items(Nidx, MaxItems, ItemIds) ->
NewItems = lists:sublist(ItemIds, MaxItems),
OldItems = lists:nthtail(length(NewItems), ItemIds),
del_items(Nidx, OldItems),
{result, {NewItems, OldItems}}.
delete_item(Nidx, Publisher, PublishModel, ItemId) ->
SubKey = jid:tolower(Publisher),
GenKey = jid:remove_resource(SubKey),
{result, Affiliation} = get_affiliation(Nidx, GenKey),
Allowed = Affiliation == publisher orelse
Affiliation == owner orelse
PublishModel == open orelse
case get_item(Nidx, ItemId) of
{result, #pubsub_item{creation = {_, GenKey}}} -> true;
_ -> false
end,
if not Allowed ->
{error, ?ERR_FORBIDDEN};
true ->
case del_item(Nidx, ItemId) of
{updated, 1} -> {result, {default, broadcast}};
_ -> {error, ?ERR_ITEM_NOT_FOUND}
end
end.
purge_node(Nidx, Owner) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
GenState = get_state(Nidx, GenKey),
case GenState of
#pubsub_state{affiliation = owner} ->
{result, States} = get_states(Nidx),
lists:foreach(fun
(#pubsub_state{items = []}) -> ok;
(#pubsub_state{items = Items}) -> del_items(Nidx, Items)
end,
States),
{result, {default, broadcast}};
_ ->
{error, ?ERR_FORBIDDEN}
end.
get_entity_affiliations(Host, Owner) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
H = encode_host(Host),
J = encode_jid(GenKey),
Reply = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(node)s, @(type)s, @(i.nodeid)d, @(affiliation)s "
"from pubsub_state i, pubsub_node n where "
"i.nodeid = n.nodeid and jid=%(J)s and host=%(H)s"))
of
{selected, RItems} ->
[{nodetree_tree_sql:raw_to_node(Host, {N, <<"">>, T, I}), decode_affiliation(A)}
|| {N, T, I, A} <- RItems];
_ ->
[]
end,
{result, Reply}.
get_node_affiliations(Nidx) ->
Reply = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(jid)s, @(affiliation)s from pubsub_state "
"where nodeid=%(Nidx)d"))
of
{selected, RItems} ->
[{decode_jid(J), decode_affiliation(A)} || {J, A} <- RItems];
_ ->
[]
end,
{result, Reply}.
get_affiliation(Nidx, Owner) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
J = encode_jid(GenKey),
Reply = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(affiliation)s from pubsub_state "
"where nodeid=%(Nidx)d and jid=%(J)s"))
of
{selected, [{A}]} ->
decode_affiliation(A);
_ ->
none
end,
{result, Reply}.
set_affiliation(Nidx, Owner, Affiliation) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
{_, Subscriptions} = select_affiliation_subscriptions(Nidx, GenKey),
case {Affiliation, Subscriptions} of
{none, []} -> del_state(Nidx, GenKey);
_ -> update_affiliation(Nidx, GenKey, Affiliation)
end.
get_entity_subscriptions(Host, Owner) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
H = encode_host(Host),
SJ = encode_jid(SubKey),
GJ = encode_jid(GenKey),
GJLike = <<(encode_jid_like(GenKey))/binary, "/%">>,
Query =
case SubKey of
GenKey ->
?SQL("select @(node)s, @(type)s, @(i.nodeid)d,"
" @(jid)s, @(subscriptions)s "
"from pubsub_state i, pubsub_node n "
"where i.nodeid = n.nodeid and "
"(jid=%(GJ)s or jid like %(GJLike)s escape '^')"
" and host=%(H)s");
_ ->
?SQL("select @(node)s, @(type)s, @(i.nodeid)d,"
" @(jid)s, @(subscriptions)s "
"from pubsub_state i, pubsub_node n "
"where i.nodeid = n.nodeid and"
" jid in (%(SJ)s, %(GJ)s) and host=%(H)s")
end,
Reply = case catch ejabberd_sql:sql_query_t(Query) of
{selected, RItems} ->
lists:foldl(fun ({N, T, I, J, S}, Acc) ->
Node = nodetree_tree_sql:raw_to_node(Host, {N, <<"">>, T, I}),
Jid = decode_jid(J),
case decode_subscriptions(S) of
[] ->
[{Node, none, Jid} | Acc];
Subs ->
lists:foldl(fun ({Sub, SubId}, Acc2) ->
[{Node, Sub, SubId, Jid} | Acc2]
end,
Acc, Subs)
end
end,
[], RItems);
_ ->
[]
end,
{result, Reply}.
-spec get_entity_subscriptions_for_send_last(Host :: mod_pubsub:hostPubsub(),
Owner :: jid()) ->
{result, [{mod_pubsub:pubsubNode(),
mod_pubsub:subscription(),
mod_pubsub:subId(),
ljid()}]}.
get_entity_subscriptions_for_send_last(Host, Owner) ->
SubKey = jid:tolower(Owner),
GenKey = jid:remove_resource(SubKey),
H = encode_host(Host),
SJ = encode_jid(SubKey),
GJ = encode_jid(GenKey),
GJLike = <<(encode_jid_like(GenKey))/binary, "/%">>,
Query =
case SubKey of
GenKey ->
?SQL("select @(node)s, @(type)s, @(i.nodeid)d,"
" @(jid)s, @(subscriptions)s "
"from pubsub_state i, pubsub_node n, pubsub_node_option o "
"where i.nodeid = n.nodeid and n.nodeid = o.nodeid and name='send_last_published_item' "
"and val='on_sub_and_presence' and "
"(jid=%(GJ)s or jid like %(GJLike)s escape '^')"
" and host=%(H)s");
_ ->
?SQL("select @(node)s, @(type)s, @(i.nodeid)d,"
" @(jid)s, @(subscriptions)s "
"from pubsub_state i, pubsub_node n, pubsub_node_option o "
"where i.nodeid = n.nodeid and n.nodeid = o.nodeid and name='send_last_published_item' "
"and val='on_sub_and_presence' and"
" jid in (%(SJ)s, %(GJ)s) and host=%(H)s")
end,
Reply = case catch ejabberd_sql:sql_query_t(Query) of
{selected, RItems} ->
lists:foldl(fun ({N, T, I, J, S}, Acc) ->
Node = nodetree_tree_sql:raw_to_node(Host, {N, <<"">>, T, I}),
Jid = decode_jid(J),
case decode_subscriptions(S) of
[] ->
[{Node, none, Jid} | Acc];
Subs ->
lists:foldl(fun ({Sub, SubId}, Acc2) ->
[{Node, Sub, SubId, Jid}| Acc2]
end,
Acc, Subs)
end
end,
[], RItems);
_ ->
[]
end,
{result, Reply}.
get_node_subscriptions(Nidx) ->
Reply = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(jid)s, @(subscriptions)s from pubsub_state "
"where nodeid=%(Nidx)d"))
of
{selected, RItems} ->
lists:foldl(fun ({J, S}, Acc) ->
Jid = decode_jid(J),
case decode_subscriptions(S) of
[] ->
[{Jid, none} | Acc];
Subs ->
lists:foldl(fun ({Sub, SubId}, Acc2) ->
[{Jid, Sub, SubId} | Acc2]
end,
Acc, Subs)
end
end,
[], RItems);
_ ->
[]
end,
{result, Reply}.
get_subscriptions(Nidx, Owner) ->
SubKey = jid:tolower(Owner),
J = encode_jid(SubKey),
Reply = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(subscriptions)s from pubsub_state"
" where nodeid=%(Nidx)d and jid=%(J)s"))
of
{selected, [{S}]} ->
decode_subscriptions(S);
_ ->
[]
end,
{result, Reply}.
set_subscriptions(Nidx, Owner, Subscription, SubId) ->
SubKey = jid:tolower(Owner),
SubState = get_state_without_itemids(Nidx, SubKey),
case {SubId, SubState#pubsub_state.subscriptions} of
{_, []} ->
case Subscription of
none ->
{error,
?ERR_EXTENDED((?ERR_BAD_REQUEST), <<"not-subscribed">>)};
_ ->
new_subscription(Nidx, Owner, Subscription, SubState)
end;
{<<>>, [{_, SID}]} ->
case Subscription of
none -> unsub_with_subid(Nidx, SID, SubState);
_ -> replace_subscription({Subscription, SID}, SubState)
end;
{<<>>, [_ | _]} ->
{error,
?ERR_EXTENDED((?ERR_BAD_REQUEST), <<"subid-required">>)};
_ ->
case Subscription of
none -> unsub_with_subid(Nidx, SubId, SubState);
_ -> replace_subscription({Subscription, SubId}, SubState)
end
end.
replace_subscription(NewSub, SubState) ->
NewSubs = replace_subscription(NewSub, SubState#pubsub_state.subscriptions, []),
set_state(SubState#pubsub_state{subscriptions = NewSubs}).
replace_subscription(_, [], Acc) -> Acc;
replace_subscription({Sub, SubId}, [{_, SubId} | T], Acc) ->
replace_subscription({Sub, SubId}, T, [{Sub, SubId} | Acc]).
new_subscription(_Nidx, _Owner, Subscription, SubState) ->
{ result , SubId } = pubsub_subscription_sql : subscribe_node(Owner , Nidx , [ ] ) ,
SubId = pubsub_subscription_sql:make_subid(),
Subscriptions = [{Subscription, SubId} | SubState#pubsub_state.subscriptions],
set_state(SubState#pubsub_state{subscriptions = Subscriptions}),
{Subscription, SubId}.
unsub_with_subid(Nidx, SubId, SubState) ->
pubsub_subscription_sql : , Nidx , SubId ) ,
NewSubs = [{S, Sid}
|| {S, Sid} <- SubState#pubsub_state.subscriptions,
SubId =/= Sid],
case {NewSubs, SubState#pubsub_state.affiliation} of
{[], none} -> del_state(Nidx, element(1, SubState#pubsub_state.stateid));
_ -> set_state(SubState#pubsub_state{subscriptions = NewSubs})
end.
get_pending_nodes(Host, Owner) ->
GenKey = jid:remove_resource(jid:tolower(Owner)),
States = mnesia:match_object(#pubsub_state{stateid = {GenKey, '_'},
affiliation = owner, _ = '_'}),
Nidxxs = [Nidx || #pubsub_state{stateid = {_, Nidx}} <- States],
NodeTree = mod_pubsub:tree(Host),
Reply = mnesia:foldl(fun (#pubsub_state{stateid = {_, Nidx}} = S, Acc) ->
case lists:member(Nidx, Nidxxs) of
true ->
case get_nodes_helper(NodeTree, S) of
{value, Node} -> [Node | Acc];
false -> Acc
end;
false ->
Acc
end
end,
[], pubsub_state),
{result, Reply}.
get_nodes_helper(NodeTree, #pubsub_state{stateid = {_, N}, subscriptions = Subs}) ->
HasPending = fun
({pending, _}) -> true;
(pending) -> true;
(_) -> false
end,
case lists:any(HasPending, Subs) of
true ->
case NodeTree:get_node(N) of
#pubsub_node{nodeid = {_, Node}} -> {value, Node};
_ -> false
end;
false ->
false
end.
get_states(Nidx) ->
case catch
ejabberd_sql:sql_query_t(
?SQL("select @(jid)s, @(affiliation)s, @(subscriptions)s "
"from pubsub_state where nodeid=%(Nidx)d"))
of
{selected, RItems} ->
{result,
lists:map(fun ({SJID, Aff, Subs}) ->
JID = decode_jid(SJID),
#pubsub_state{stateid = {JID, Nidx},
items = itemids(Nidx, JID),
affiliation = decode_affiliation(Aff),
subscriptions = decode_subscriptions(Subs)}
end,
RItems)};
_ ->
{result, []}
end.
get_state(Nidx, JID) ->
State = get_state_without_itemids(Nidx, JID),
{SJID, _} = State#pubsub_state.stateid,
State#pubsub_state{items = itemids(Nidx, SJID)}.
-spec get_state_without_itemids(Nidx :: mod_pubsub:nodeIdx(), Key :: ljid()) ->
mod_pubsub:pubsubState().
get_state_without_itemids(Nidx, JID) ->
J = encode_jid(JID),
case catch
ejabberd_sql:sql_query_t(
?SQL("select @(jid)s, @(affiliation)s, @(subscriptions)s "
"from pubsub_state "
"where nodeid=%(Nidx)d and jid=%(J)s"))
of
{selected, [{SJID, Aff, Subs}]} ->
#pubsub_state{stateid = {decode_jid(SJID), Nidx},
affiliation = decode_affiliation(Aff),
subscriptions = decode_subscriptions(Subs)};
_ ->
#pubsub_state{stateid = {JID, Nidx}}
end.
set_state(State) ->
{_, Nidx} = State#pubsub_state.stateid,
set_state(Nidx, State).
set_state(Nidx, State) ->
{JID, _} = State#pubsub_state.stateid,
J = encode_jid(JID),
S = encode_subscriptions(State#pubsub_state.subscriptions),
A = encode_affiliation(State#pubsub_state.affiliation),
?SQL_UPSERT_T(
"pubsub_state",
["!nodeid=%(Nidx)d",
"!jid=%(J)s",
"affiliation=%(A)s",
"subscriptions=%(S)s"
]),
ok.
del_state(Nidx, JID) ->
J = encode_jid(JID),
catch ejabberd_sql:sql_query_t(
?SQL("delete from pubsub_state"
" where jid=%(J)s and nodeid=%(Nidx)d")),
ok.
ejabberd_sql : sql_query_t([<<"select itemid , publisher , creation , modification , payload "
" from pubsub_item where ' " > > , Nidx ,
[ < < " " > > , < < " publisher " > > , < < " creation " > > , < < " modification " > > , < < " payload " > > ] , RItems } - >
{ result , [ raw_to_item(Nidx , RItem ) || RItem < - RItems ] } ;
get_items(Nidx, From, none) ->
MaxItems = case catch
ejabberd_sql:sql_query_t(
?SQL("select @(val)s from pubsub_node_option "
"where nodeid=%(Nidx)d and name='max_items'"))
of
{selected, [{Value}]} ->
jlib:expr_to_term(Value);
_ ->
?MAXITEMS
end,
get_items(Nidx, From, #rsm_in{max = MaxItems});
get_items(Nidx, _From,
#rsm_in{max = M, direction = Direction, id = I, index = IncIndex}) ->
Max = ejabberd_sql:escape(jlib:i2l(M)),
{Way, Order} = case Direction of
aft when I == <<>> -> {<<"is not">>, <<"desc">>};
aft -> {<<"<">>, <<"desc">>};
before when I == <<>> -> {<<"is not">>, <<"asc">>};
before -> {<<">">>, <<"asc">>};
_ -> {<<"is not">>, <<"desc">>}
end,
SNidx = integer_to_binary(Nidx),
[AttrName, Id] = case I of
undefined when IncIndex =/= undefined ->
case catch
ejabberd_sql:sql_query_t([<<"select modification from pubsub_item pi "
"where exists ( select count(*) as count1 "
"from pubsub_item where nodeid='">>, SNidx,
<<"' and modification > pi.modification having count1 = ">>,
ejabberd_sql:escape(jlib:i2l(IncIndex)), <<" );">>])
of
{selected, [_], [[O]]} ->
[<<"modification">>, <<"'", O/binary, "'">>];
_ ->
[<<"modification">>, <<"null">>]
end;
undefined ->
[<<"modification">>, <<"null">>];
<<>> ->
[<<"modification">>, <<"null">>];
I ->
[A, B] = str:tokens(ejabberd_sql:escape(jlib:i2l(I)), <<"@">>),
[A, <<"'", B/binary, "'">>]
end,
Count = case catch
ejabberd_sql:sql_query_t([<<"select count(*) from pubsub_item where nodeid='">>, SNidx, <<"';">>])
of
{selected, [_], [[C]]} -> C;
_ -> <<"0">>
end,
Query = fun(mssql, _) ->
ejabberd_sql:sql_query_t(
[<<"select top ">>, jlib:i2l(Max),
<<" itemid, publisher, creation, modification, payload "
"from pubsub_item where nodeid='">>, SNidx,
<<"' and ">>, AttrName, <<" ">>, Way, <<" ">>, Id, <<" order by ">>,
AttrName, <<" ">>, Order, <<";">>]);
(_, _) ->
ejabberd_sql:sql_query_t(
[<<"select itemid, publisher, creation, modification, payload "
"from pubsub_item where nodeid='">>, SNidx,
<<"' and ">>, AttrName, <<" ">>, Way, <<" ">>, Id, <<" order by ">>,
AttrName, <<" ">>, Order, <<" limit ">>, jlib:i2l(Max), <<" ;">>])
end,
case catch ejabberd_sql:sql_query_t(Query) of
{selected,
[<<"itemid">>, <<"publisher">>, <<"creation">>, <<"modification">>, <<"payload">>], RItems} ->
case RItems of
[[_, _, _, F, _]|_] ->
Index = case catch
ejabberd_sql:sql_query_t([<<"select count(*) from pubsub_item "
"where nodeid='">>, SNidx, <<"' and ">>,
AttrName, <<" > '">>, F, <<"';">>])
of
{ selected , [ _ ] , [ { C } , { In } ] } - > [ string : , both , $ " ) , string : strip(In , both , $ " ) ] ;
{selected, [_], [[In]]} -> In;
_ -> <<"0">>
end,
[_, _, _, L, _] = lists:last(RItems),
RsmOut = #rsm_out{count = Count, index = Index,
first = <<"modification@", F/binary>>,
last = <<"modification@", (jlib:i2l(L))/binary>>},
{result, {[raw_to_item(Nidx, RItem) || RItem <- RItems], RsmOut}};
[] ->
{result, {[], #rsm_out{count = Count}}}
end;
_ ->
{result, {[], none}}
end.
get_items(Nidx, JID, AccessModel, PresenceSubscription, RosterGroup, _SubId, RSM) ->
SubKey = jid:tolower(JID),
GenKey = jid:remove_resource(SubKey),
{Affiliation, Subscriptions} = select_affiliation_subscriptions(Nidx, GenKey, SubKey),
Whitelisted = node_flat:can_fetch_item(Affiliation, Subscriptions),
(Affiliation == outcast) or (Affiliation == publish_only) ->
{error, ?ERR_FORBIDDEN};
(AccessModel == presence) and not PresenceSubscription ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"presence-subscription-required">>)};
(AccessModel == roster) and not RosterGroup ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"not-in-roster-group">>)};
(AccessModel == whitelist) and not Whitelisted ->
{error,
?ERR_EXTENDED((?ERR_NOT_ALLOWED), <<"closed-node">>)};
(AccessModel == authorize) and not Whitelisted ->
{error, ?ERR_FORBIDDEN};
- >
true ->
get_items(Nidx, JID, RSM)
end.
get_last_items(Nidx, _From, Count) ->
Limit = jlib:i2l(Count),
SNidx = integer_to_binary(Nidx),
Query = fun(mssql, _) ->
ejabberd_sql:sql_query_t(
[<<"select top ">>, Limit,
<<" itemid, publisher, creation, modification, payload "
"from pubsub_item where nodeid='">>, SNidx,
<<"' order by modification desc ;">>]);
(_, _) ->
ejabberd_sql:sql_query_t(
[<<"select itemid, publisher, creation, modification, payload "
"from pubsub_item where nodeid='">>, SNidx,
<<"' order by modification desc limit ">>, Limit, <<";">>])
end,
case catch ejabberd_sql:sql_query_t(Query) of
{selected,
[<<"itemid">>, <<"publisher">>, <<"creation">>, <<"modification">>, <<"payload">>], RItems} ->
{result, [raw_to_item(Nidx, RItem) || RItem <- RItems]};
_ ->
{result, []}
end.
get_item(Nidx, ItemId) ->
case catch ejabberd_sql:sql_query_t(
?SQL("select @(itemid)s, @(publisher)s, @(creation)s,"
" @(modification)s, @(payload)s from pubsub_item"
" where nodeid=%(Nidx)d and itemid=%(ItemId)s"))
of
{selected, [RItem]} ->
{result, raw_to_item(Nidx, RItem)};
{selected, []} ->
{error, ?ERR_ITEM_NOT_FOUND};
{'EXIT', _} ->
{error, ?ERRT_INTERNAL_SERVER_ERROR(?MYLANG, <<"Database failure">>)}
end.
get_item(Nidx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, _SubId) ->
SubKey = jid:tolower(JID),
GenKey = jid:remove_resource(SubKey),
{Affiliation, Subscriptions} = select_affiliation_subscriptions(Nidx, GenKey, SubKey),
Whitelisted = node_flat:can_fetch_item(Affiliation, Subscriptions),
(Affiliation == outcast) or (Affiliation == publish_only) ->
{error, ?ERR_FORBIDDEN};
(AccessModel == presence) and not PresenceSubscription ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"presence-subscription-required">>)};
(AccessModel == roster) and not RosterGroup ->
{error,
?ERR_EXTENDED((?ERR_NOT_AUTHORIZED), <<"not-in-roster-group">>)};
(AccessModel == whitelist) and not Whitelisted ->
{error,
?ERR_EXTENDED((?ERR_NOT_ALLOWED), <<"closed-node">>)};
(AccessModel == authorize) and not Whitelisted ->
{error, ?ERR_FORBIDDEN};
- >
true ->
get_item(Nidx, ItemId)
end.
set_item(Item) ->
{ItemId, Nidx} = Item#pubsub_item.itemid,
{C, _} = Item#pubsub_item.creation,
{M, JID} = Item#pubsub_item.modification,
P = encode_jid(JID),
Payload = Item#pubsub_item.payload,
XML = str:join([fxml:element_to_binary(X) || X<-Payload], <<>>),
S = fun ({T1, T2, T3}) ->
str:join([jlib:i2l(T1, 6), jlib:i2l(T2, 6), jlib:i2l(T3, 6)], <<":">>)
end,
SM = S(M),
SC = S(C),
?SQL_UPSERT_T(
"pubsub_item",
["!nodeid=%(Nidx)d",
"!itemid=%(ItemId)s",
"publisher=%(P)s",
"modification=%(SM)s",
"payload=%(XML)s",
"-creation=%(SC)s"
]),
ok.
del_item(Nidx, ItemId) ->
catch ejabberd_sql:sql_query_t(
?SQL("delete from pubsub_item where itemid=%(ItemId)s"
" and nodeid=%(Nidx)d")).
del_items(_, []) ->
ok;
del_items(Nidx, [ItemId]) ->
del_item(Nidx, ItemId);
del_items(Nidx, ItemIds) ->
I = str:join([[<<"'">>, ejabberd_sql:escape(X), <<"'">>] || X <- ItemIds], <<",">>),
SNidx = integer_to_binary(Nidx),
catch
ejabberd_sql:sql_query_t([<<"delete from pubsub_item where itemid in (">>,
I, <<") and nodeid='">>, SNidx, <<"';">>]).
get_item_name(_Host, _Node, Id) ->
Id.
node_to_path(Node) ->
node_flat:node_to_path(Node).
path_to_node(Path) ->
node_flat:path_to_node(Path).
first_in_list(_Pred, []) ->
false;
first_in_list(Pred, [H | T]) ->
case Pred(H) of
true -> {value, H};
_ -> first_in_list(Pred, T)
end.
itemids(Nidx, {_U, _S, _R} = JID) ->
SJID = encode_jid(JID),
SJIDLike = <<(ejabberd_sql:escape(encode_jid_like(JID)))/binary, "/%">>,
case catch
ejabberd_sql:sql_query_t(
?SQL("select @(itemid)s from pubsub_item where "
"nodeid=%(Nidx)d and (publisher=%(SJID)s"
" or publisher like %(SJIDLike)s escape '^') "
"order by modification desc"))
of
{selected, RItems} ->
[ItemId || {ItemId} <- RItems];
_ ->
[]
end.
select_affiliation_subscriptions(Nidx, JID) ->
J = encode_jid(JID),
case catch
ejabberd_sql:sql_query_t(
?SQL("select @(affiliation)s, @(subscriptions)s from "
" pubsub_state where nodeid=%(Nidx)d and jid=%(J)s"))
of
{selected, [{A, S}]} ->
{decode_affiliation(A), decode_subscriptions(S)};
_ ->
{none, []}
end.
select_affiliation_subscriptions(Nidx, JID, JID) ->
select_affiliation_subscriptions(Nidx, JID);
select_affiliation_subscriptions(Nidx, GenKey, SubKey) ->
{result, Affiliation} = get_affiliation(Nidx, GenKey),
{result, BareJidSubs} = get_subscriptions(Nidx, GenKey),
{result, FullJidSubs} = get_subscriptions(Nidx, SubKey),
{Affiliation, BareJidSubs++FullJidSubs}.
update_affiliation(Nidx, JID, Affiliation) ->
J = encode_jid(JID),
A = encode_affiliation(Affiliation),
?SQL_UPSERT_T(
"pubsub_state",
["!nodeid=%(Nidx)d",
"!jid=%(J)s",
"affiliation=%(A)s",
"-subscriptions=''"
]).
update_subscription(Nidx, JID, Subscription) ->
J = encode_jid(JID),
S = encode_subscriptions(Subscription),
?SQL_UPSERT_T(
"pubsub_state",
["!nodeid=%(Nidx)d",
"!jid=%(J)s",
"subscriptions=%(S)s",
"-affiliation='n'"
]).
-spec decode_jid(SJID :: binary()) -> ljid().
decode_jid(SJID) ->
jid:tolower(jid:from_string(SJID)).
-spec decode_affiliation(Arg :: binary()) -> atom().
decode_affiliation(<<"o">>) -> owner;
decode_affiliation(<<"p">>) -> publisher;
decode_affiliation(<<"u">>) -> publish_only;
decode_affiliation(<<"m">>) -> member;
decode_affiliation(<<"c">>) -> outcast;
decode_affiliation(_) -> none.
-spec decode_subscription(Arg :: binary()) -> atom().
decode_subscription(<<"s">>) -> subscribed;
decode_subscription(<<"p">>) -> pending;
decode_subscription(<<"u">>) -> unconfigured;
decode_subscription(_) -> none.
-spec decode_subscriptions(Subscriptions :: binary()) -> [] | [{atom(), binary()},...].
decode_subscriptions(Subscriptions) ->
lists:foldl(fun (Subscription, Acc) ->
case str:tokens(Subscription, <<":">>) of
[S, SubId] -> [{decode_subscription(S), SubId} | Acc];
_ -> Acc
end
end,
[], str:tokens(Subscriptions, <<",">>)).
-spec encode_jid(JID :: ljid()) -> binary().
encode_jid(JID) ->
jid:to_string(JID).
-spec encode_jid_like(JID :: ljid()) -> binary().
encode_jid_like(JID) ->
ejabberd_sql:escape_like_arg_circumflex(jid:to_string(JID)).
-spec encode_host(Host :: host()) -> binary().
encode_host({_U, _S, _R} = LJID) -> encode_jid(LJID);
encode_host(Host) -> Host.
-spec encode_host_like(Host :: host()) -> binary().
encode_host_like({_U, _S, _R} = LJID) -> ejabberd_sql:escape(encode_jid_like(LJID));
encode_host_like(Host) ->
ejabberd_sql:escape(ejabberd_sql:escape_like_arg_circumflex(Host)).
-spec encode_affiliation(Arg :: atom()) -> binary().
encode_affiliation(owner) -> <<"o">>;
encode_affiliation(publisher) -> <<"p">>;
encode_affiliation(publish_only) -> <<"u">>;
encode_affiliation(member) -> <<"m">>;
encode_affiliation(outcast) -> <<"c">>;
encode_affiliation(_) -> <<"n">>.
-spec encode_subscription(Arg :: atom()) -> binary().
encode_subscription(subscribed) -> <<"s">>;
encode_subscription(pending) -> <<"p">>;
encode_subscription(unconfigured) -> <<"u">>;
encode_subscription(_) -> <<"n">>.
-spec encode_subscriptions(Subscriptions :: [] | [{atom(), binary()},...]) -> binary().
encode_subscriptions(Subscriptions) ->
str:join([<<(encode_subscription(S))/binary, ":", SubId/binary>>
|| {S, SubId} <- Subscriptions], <<",">>).
raw_to_item(Nidx, [ItemId, SJID, Creation, Modification, XML]) ->
raw_to_item(Nidx, {ItemId, SJID, Creation, Modification, XML});
raw_to_item(Nidx, {ItemId, SJID, Creation, Modification, XML}) ->
JID = decode_jid(SJID),
ToTime = fun (Str) ->
[T1, T2, T3] = str:tokens(Str, <<":">>),
{jlib:l2i(T1), jlib:l2i(T2), jlib:l2i(T3)}
end,
Payload = case fxml_stream:parse_element(XML) of
{error, _Reason} -> [];
El -> [El]
end,
#pubsub_item{itemid = {ItemId, Nidx},
creation = {ToTime(Creation), JID},
modification = {ToTime(Modification), JID},
payload = Payload}.
|
423cefa131a1222ecd75614c0fcf2c07b583047a96e695e3a727a7306b52cd80 | coq/coq | ppextend.mli | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
open Constrexpr
* { 6 Pretty - print . }
type ppbox =
| PpHB
| PpHOVB of int
| PpHVB of int
| PpVB of int
type ppcut =
| PpBrk of int * int
| PpFnl
val ppcmd_of_box : ppbox -> Pp.t -> Pp.t
val ppcmd_of_cut : ppcut -> Pp.t
* { 6 Printing rules for notations }
type pattern_quote_style = QuotedPattern | NotQuotedPattern
(** Declare and look for the printing rule for symbolic notations *)
type unparsing =
| UnpMetaVar of entry_relative_level * Extend.side option
| UnpBinderMetaVar of entry_relative_level * pattern_quote_style
| UnpListMetaVar of entry_relative_level * unparsing list * Extend.side option
| UnpBinderListMetaVar of
bool (* true if open binder *) *
bool (* true if printed with a quote *) *
unparsing list
| UnpTerminal of string
| UnpBox of ppbox * unparsing Loc.located list
| UnpCut of ppcut
type unparsing_rule = unparsing list
val unparsing_eq : unparsing -> unparsing -> bool
type notation_printing_rules = {
notation_printing_unparsing : unparsing_rule;
notation_printing_level : entry_level;
}
type generic_notation_printing_rules = {
notation_printing_reserved : bool;
notation_printing_rules : notation_printing_rules;
}
val declare_generic_notation_printing_rules : notation -> generic_notation_printing_rules -> unit
val declare_specific_notation_printing_rules : specific_notation -> notation_printing_rules -> unit
val has_generic_notation_printing_rule : notation -> bool
val find_generic_notation_printing_rule : notation -> generic_notation_printing_rules
val find_specific_notation_printing_rule : specific_notation -> notation_printing_rules
val find_notation_printing_rule : notation_with_optional_scope option -> notation -> notation_printing_rules
| null | https://raw.githubusercontent.com/coq/coq/f66b58cc7e6a8e245b35c3858989181825c591ce/printing/ppextend.mli | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
* Declare and look for the printing rule for symbolic notations
true if open binder
true if printed with a quote | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
open Constrexpr
* { 6 Pretty - print . }
type ppbox =
| PpHB
| PpHOVB of int
| PpHVB of int
| PpVB of int
type ppcut =
| PpBrk of int * int
| PpFnl
val ppcmd_of_box : ppbox -> Pp.t -> Pp.t
val ppcmd_of_cut : ppcut -> Pp.t
* { 6 Printing rules for notations }
type pattern_quote_style = QuotedPattern | NotQuotedPattern
type unparsing =
| UnpMetaVar of entry_relative_level * Extend.side option
| UnpBinderMetaVar of entry_relative_level * pattern_quote_style
| UnpListMetaVar of entry_relative_level * unparsing list * Extend.side option
| UnpBinderListMetaVar of
unparsing list
| UnpTerminal of string
| UnpBox of ppbox * unparsing Loc.located list
| UnpCut of ppcut
type unparsing_rule = unparsing list
val unparsing_eq : unparsing -> unparsing -> bool
type notation_printing_rules = {
notation_printing_unparsing : unparsing_rule;
notation_printing_level : entry_level;
}
type generic_notation_printing_rules = {
notation_printing_reserved : bool;
notation_printing_rules : notation_printing_rules;
}
val declare_generic_notation_printing_rules : notation -> generic_notation_printing_rules -> unit
val declare_specific_notation_printing_rules : specific_notation -> notation_printing_rules -> unit
val has_generic_notation_printing_rule : notation -> bool
val find_generic_notation_printing_rule : notation -> generic_notation_printing_rules
val find_specific_notation_printing_rule : specific_notation -> notation_printing_rules
val find_notation_printing_rule : notation_with_optional_scope option -> notation -> notation_printing_rules
|
a6d521013ec47b9e740a46bab9eb58df32c621e7be700c3b7e80ffa251092ebf | processone/ejabberd-contrib | mod_filter.erl | %%%----------------------------------------------------------------------
%%% File : mod_filter.erl
Author : < >
%%% Purpose : flexible filtering by server policy
Created : 21 Sep 2005 by < >
%%%----------------------------------------------------------------------
-module(mod_filter).
-author('').
-behaviour(gen_mod).
-export([start/2, stop/1, depends/2, mod_options/1, filter_packet/1, mod_doc/0]).
-include("logger.hrl").
-include_lib("xmpp/include/xmpp.hrl").
-dialyzer({no_match, [check_stanza_type/2, check_access/1]}).
start(_Host, _Opts) ->
ejabberd_hooks:add(filter_packet, global, ?MODULE, filter_packet, 100).
stop(_Host) ->
ejabberd_hooks:delete(filter_packet, global, ?MODULE, filter_packet, 100).
%% Return drop to drop the packet, or the original input to let it through.
%% From and To are jid records.
filter_packet(drop) ->
drop;
filter_packet(Packet) ->
From = xmpp:get_from(Packet),
To = xmpp:get_to(Packet),
%% It probably doesn't make any sense to block packets to oneself.
R = if From#jid.luser == To#jid.luser,
From#jid.lserver == To#jid.lserver ->
Packet;
true ->
check_stanza(Packet)
end,
?DEBUG("filtering packet...~nFrom: ~p~nTo: ~p~nPacket: ~p~nResult: ~p",
[From, To, Packet, R]),
case R of
{drop, _} -> drop;
{drop, _, _} -> drop;
_ -> R
end.
check_stanza(Packet) ->
AccessRule = case element(1, Packet) of
presence ->
mod_filter_presence;
message ->
mod_filter_message;
iq ->
mod_filter_iq
end,
check_stanza_type(AccessRule, Packet).
check_stanza_type(AccessRule, Packet) ->
FromAccess = acl:match_rule(global, AccessRule, xmpp:get_from(Packet)),
case FromAccess of
allow ->
check_access(Packet);
deny ->
{drop, AccessRule, sender};
ToAccessRule ->
ToAccess = acl:match_rule(global, ToAccessRule, xmpp:get_to(Packet)),
case ToAccess of
allow ->
check_access(Packet);
deny ->
{drop, AccessRule, receiver}
end
end.
check_access(Packet) ->
%% Beginning of a complicated ACL matching procedure.
%% The access option given to the module applies to senders.
%% XXX: there are no "global" module options, and we don't know
%% anymore what "host" we are on. Thus hardcoding access rule.
AccessRule = gen_mod : get_module_opt(global , ? MODULE , access , all ) ,
AccessRule = mod_filter,
FromAccess = acl:match_rule(global, AccessRule, xmpp:get_from(Packet)),
%% If the rule results in 'allow' or 'deny', treat that as the
%% result. Else it is a rule to be applied to the receiver.
case FromAccess of
allow ->
Packet;
deny ->
{drop, sender};
ToAccessRule ->
ToAccess = acl:match_rule(global, ToAccessRule, xmpp:get_to(Packet)),
case ToAccess of
allow ->
Packet;
deny ->
{drop, receiver}
end
end.
depends(_Host, _Opts) ->
[].
mod_options(_) -> [].
mod_doc() -> #{}.
| null | https://raw.githubusercontent.com/processone/ejabberd-contrib/037c3749f331c8783666d45157e857ef5e7df42c/mod_filter/src/mod_filter.erl | erlang | ----------------------------------------------------------------------
File : mod_filter.erl
Purpose : flexible filtering by server policy
----------------------------------------------------------------------
Return drop to drop the packet, or the original input to let it through.
From and To are jid records.
It probably doesn't make any sense to block packets to oneself.
Beginning of a complicated ACL matching procedure.
The access option given to the module applies to senders.
XXX: there are no "global" module options, and we don't know
anymore what "host" we are on. Thus hardcoding access rule.
If the rule results in 'allow' or 'deny', treat that as the
result. Else it is a rule to be applied to the receiver. | Author : < >
Created : 21 Sep 2005 by < >
-module(mod_filter).
-author('').
-behaviour(gen_mod).
-export([start/2, stop/1, depends/2, mod_options/1, filter_packet/1, mod_doc/0]).
-include("logger.hrl").
-include_lib("xmpp/include/xmpp.hrl").
-dialyzer({no_match, [check_stanza_type/2, check_access/1]}).
start(_Host, _Opts) ->
ejabberd_hooks:add(filter_packet, global, ?MODULE, filter_packet, 100).
stop(_Host) ->
ejabberd_hooks:delete(filter_packet, global, ?MODULE, filter_packet, 100).
filter_packet(drop) ->
drop;
filter_packet(Packet) ->
From = xmpp:get_from(Packet),
To = xmpp:get_to(Packet),
R = if From#jid.luser == To#jid.luser,
From#jid.lserver == To#jid.lserver ->
Packet;
true ->
check_stanza(Packet)
end,
?DEBUG("filtering packet...~nFrom: ~p~nTo: ~p~nPacket: ~p~nResult: ~p",
[From, To, Packet, R]),
case R of
{drop, _} -> drop;
{drop, _, _} -> drop;
_ -> R
end.
check_stanza(Packet) ->
AccessRule = case element(1, Packet) of
presence ->
mod_filter_presence;
message ->
mod_filter_message;
iq ->
mod_filter_iq
end,
check_stanza_type(AccessRule, Packet).
check_stanza_type(AccessRule, Packet) ->
FromAccess = acl:match_rule(global, AccessRule, xmpp:get_from(Packet)),
case FromAccess of
allow ->
check_access(Packet);
deny ->
{drop, AccessRule, sender};
ToAccessRule ->
ToAccess = acl:match_rule(global, ToAccessRule, xmpp:get_to(Packet)),
case ToAccess of
allow ->
check_access(Packet);
deny ->
{drop, AccessRule, receiver}
end
end.
check_access(Packet) ->
AccessRule = gen_mod : get_module_opt(global , ? MODULE , access , all ) ,
AccessRule = mod_filter,
FromAccess = acl:match_rule(global, AccessRule, xmpp:get_from(Packet)),
case FromAccess of
allow ->
Packet;
deny ->
{drop, sender};
ToAccessRule ->
ToAccess = acl:match_rule(global, ToAccessRule, xmpp:get_to(Packet)),
case ToAccess of
allow ->
Packet;
deny ->
{drop, receiver}
end
end.
depends(_Host, _Opts) ->
[].
mod_options(_) -> [].
mod_doc() -> #{}.
|
ef23c84a21287dbecab2cf0fd38b943df2a1270d104d37da184154c61bbb2bb0 | lasp-lang/partisan | partisan_app.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2015 Helium Systems , Inc. All Rights Reserved .
Copyright ( c ) 2016 . All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(partisan_app).
-behaviour(application).
-include("partisan.hrl").
-export([start/2, stop/1]).
%% =============================================================================
%% API
%% =============================================================================
%% -----------------------------------------------------------------------------
%% @doc Starts the application.
%% @end
%% -----------------------------------------------------------------------------
start(_StartType, _StartArgs) ->
case partisan_sup:start_link() of
{ok, Pid} ->
{ok, Pid};
Other ->
{error, Other}
end.
%% -----------------------------------------------------------------------------
%% @doc Stop the application.
%% @end
%% -----------------------------------------------------------------------------
stop(_State) ->
ok.
| null | https://raw.githubusercontent.com/lasp-lang/partisan/3bb12965f5e84e7856a1718b48dc9d644f6ab1aa/src/partisan_app.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
=============================================================================
API
=============================================================================
-----------------------------------------------------------------------------
@doc Starts the application.
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc Stop the application.
@end
----------------------------------------------------------------------------- | Copyright ( c ) 2015 Helium Systems , Inc. All Rights Reserved .
Copyright ( c ) 2016 . All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(partisan_app).
-behaviour(application).
-include("partisan.hrl").
-export([start/2, stop/1]).
start(_StartType, _StartArgs) ->
case partisan_sup:start_link() of
{ok, Pid} ->
{ok, Pid};
Other ->
{error, Other}
end.
stop(_State) ->
ok.
|
33c1bdc0b0c940377e7bc64082bc69859eaf34435881fabc5c31e571f4734f28 | gsakkas/rite | 1990.ml |
let rec myAppend l n = match l with | [] -> [n] | h::t -> h :: (myAppend t n);;
let explode s =
let rec go i =
if i >= (String.length s) then [] else (s.[i]) :: (go (i + 1)) in
go 0;;
let rec listCompare l k =
if ((List.hd l) = []) && ((List.hd k) = [])
then true
else
if (List.hd l) = (List.hd k)
then listCompare (List.tl l) (List.tl k)
else false;;
let rec listReverse l =
match l with | [] -> [] | h::t -> myAppend (listReverse t) h;;
let palindrome w = listCompare (explode w) (listReverse (explode w));;
fix
let rec myAppend l n = match l with | [ ] - > [ n ] | h::t - > h : : ( myAppend t n ) ; ;
let explode s =
let rec go i =
if i > = ( String.length s ) then [ ] else ( s.[i ] ) : : ( go ( i + 1 ) ) in
go 0 ; ;
let rec l =
match l with | [ ] - > [ ] | h::t - > myAppend ( listReverse t ) h ; ;
let palindrome w = ( explode w ) = ( ( explode w ) ) ; ;
let rec myAppend l n = match l with | [] -> [n] | h::t -> h :: (myAppend t n);;
let explode s =
let rec go i =
if i >= (String.length s) then [] else (s.[i]) :: (go (i + 1)) in
go 0;;
let rec listReverse l =
match l with | [] -> [] | h::t -> myAppend (listReverse t) h;;
let palindrome w = (explode w) = (listReverse (explode w));;
*)
changed spans
( 20,20)-(20,69 )
explode w = ( explode w )
BopG ( AppG [ EmptyG ] ) ( AppG [ EmptyG ] )
(20,20)-(20,69)
explode w = listReverse (explode w)
BopG (AppG [EmptyG]) (AppG [EmptyG])
*)
type error slice
( 4,4)-(7,9 )
( 4,13)-(7,7 )
( 5,3)-(7,7 )
( 6,44)-(6,51 )
( 6,44)-(6,67 )
( 6,45)-(6,50 )
( 6,55)-(6,67 )
( 6,56)-(6,58 )
( 7,3)-(7,5 )
( 7,3)-(7,7 )
( 10,28)-(10,46 )
( 10,29)-(10,40 )
( 10,30)-(10,37 )
( 10,38)-(10,39 )
( 10,43)-(10,45 )
( 13,8)-(13,19 )
( 13,8)-(13,33 )
( 13,9)-(13,16 )
( 13,17)-(13,18 )
( 13,22)-(13,33 )
( 13,23)-(13,30 )
( 13,31)-(13,32 )
( 14,10)-(14,21 )
( 14,10)-(14,45 )
( 14,22)-(14,33 )
( 14,23)-(14,30 )
( 14,31)-(14,32 )
( 20,20)-(20,31 )
( 20,20)-(20,69 )
( 20,32)-(20,43 )
( 20,33)-(20,40 )
(4,4)-(7,9)
(4,13)-(7,7)
(5,3)-(7,7)
(6,44)-(6,51)
(6,44)-(6,67)
(6,45)-(6,50)
(6,55)-(6,67)
(6,56)-(6,58)
(7,3)-(7,5)
(7,3)-(7,7)
(10,28)-(10,46)
(10,29)-(10,40)
(10,30)-(10,37)
(10,38)-(10,39)
(10,43)-(10,45)
(13,8)-(13,19)
(13,8)-(13,33)
(13,9)-(13,16)
(13,17)-(13,18)
(13,22)-(13,33)
(13,23)-(13,30)
(13,31)-(13,32)
(14,10)-(14,21)
(14,10)-(14,45)
(14,22)-(14,33)
(14,23)-(14,30)
(14,31)-(14,32)
(20,20)-(20,31)
(20,20)-(20,69)
(20,32)-(20,43)
(20,33)-(20,40)
*)
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/data/sp14_min/1990.ml | ocaml |
let rec myAppend l n = match l with | [] -> [n] | h::t -> h :: (myAppend t n);;
let explode s =
let rec go i =
if i >= (String.length s) then [] else (s.[i]) :: (go (i + 1)) in
go 0;;
let rec listCompare l k =
if ((List.hd l) = []) && ((List.hd k) = [])
then true
else
if (List.hd l) = (List.hd k)
then listCompare (List.tl l) (List.tl k)
else false;;
let rec listReverse l =
match l with | [] -> [] | h::t -> myAppend (listReverse t) h;;
let palindrome w = listCompare (explode w) (listReverse (explode w));;
fix
let rec myAppend l n = match l with | [ ] - > [ n ] | h::t - > h : : ( myAppend t n ) ; ;
let explode s =
let rec go i =
if i > = ( String.length s ) then [ ] else ( s.[i ] ) : : ( go ( i + 1 ) ) in
go 0 ; ;
let rec l =
match l with | [ ] - > [ ] | h::t - > myAppend ( listReverse t ) h ; ;
let palindrome w = ( explode w ) = ( ( explode w ) ) ; ;
let rec myAppend l n = match l with | [] -> [n] | h::t -> h :: (myAppend t n);;
let explode s =
let rec go i =
if i >= (String.length s) then [] else (s.[i]) :: (go (i + 1)) in
go 0;;
let rec listReverse l =
match l with | [] -> [] | h::t -> myAppend (listReverse t) h;;
let palindrome w = (explode w) = (listReverse (explode w));;
*)
changed spans
( 20,20)-(20,69 )
explode w = ( explode w )
BopG ( AppG [ EmptyG ] ) ( AppG [ EmptyG ] )
(20,20)-(20,69)
explode w = listReverse (explode w)
BopG (AppG [EmptyG]) (AppG [EmptyG])
*)
type error slice
( 4,4)-(7,9 )
( 4,13)-(7,7 )
( 5,3)-(7,7 )
( 6,44)-(6,51 )
( 6,44)-(6,67 )
( 6,45)-(6,50 )
( 6,55)-(6,67 )
( 6,56)-(6,58 )
( 7,3)-(7,5 )
( 7,3)-(7,7 )
( 10,28)-(10,46 )
( 10,29)-(10,40 )
( 10,30)-(10,37 )
( 10,38)-(10,39 )
( 10,43)-(10,45 )
( 13,8)-(13,19 )
( 13,8)-(13,33 )
( 13,9)-(13,16 )
( 13,17)-(13,18 )
( 13,22)-(13,33 )
( 13,23)-(13,30 )
( 13,31)-(13,32 )
( 14,10)-(14,21 )
( 14,10)-(14,45 )
( 14,22)-(14,33 )
( 14,23)-(14,30 )
( 14,31)-(14,32 )
( 20,20)-(20,31 )
( 20,20)-(20,69 )
( 20,32)-(20,43 )
( 20,33)-(20,40 )
(4,4)-(7,9)
(4,13)-(7,7)
(5,3)-(7,7)
(6,44)-(6,51)
(6,44)-(6,67)
(6,45)-(6,50)
(6,55)-(6,67)
(6,56)-(6,58)
(7,3)-(7,5)
(7,3)-(7,7)
(10,28)-(10,46)
(10,29)-(10,40)
(10,30)-(10,37)
(10,38)-(10,39)
(10,43)-(10,45)
(13,8)-(13,19)
(13,8)-(13,33)
(13,9)-(13,16)
(13,17)-(13,18)
(13,22)-(13,33)
(13,23)-(13,30)
(13,31)-(13,32)
(14,10)-(14,21)
(14,10)-(14,45)
(14,22)-(14,33)
(14,23)-(14,30)
(14,31)-(14,32)
(20,20)-(20,31)
(20,20)-(20,69)
(20,32)-(20,43)
(20,33)-(20,40)
*)
| |
f071d336515a9cdb39a8511e94b16331e1a1c53c3c601405a31f0c56e77654c7 | cloudant-labs/couchdb-erlfdb | erlfdb_subspace.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(erlfdb_subspace).
-record(erlfdb_subspace, {
prefix
}).
-export([
create/1,
create/2,
add/2,
key/1,
pack/1,
pack/2,
pack_vs/1,
pack_vs/2,
unpack/2,
range/1,
range/2,
contains/2,
subspace/2
]).
-define(PREFIX(S), S#erlfdb_subspace.prefix).
create(Tuple) ->
create(Tuple, <<>>).
create(#erlfdb_subspace{} = Subspace, Tuple) when is_tuple(Tuple) ->
create(Tuple, ?PREFIX(Subspace));
create(Tuple, Prefix) when is_tuple(Tuple), is_binary(Prefix) ->
#erlfdb_subspace{
prefix = erlfdb_tuple:pack(Tuple, Prefix)
}.
add(#erlfdb_subspace{} = Subspace, Item) ->
create({Item}, ?PREFIX(Subspace)).
key(#erlfdb_subspace{} = Subspace) ->
Subspace#erlfdb_subspace.prefix.
pack(Subspace) ->
pack(Subspace, {}).
pack(#erlfdb_subspace{} = Subspace, Tuple) when is_tuple(Tuple) ->
erlfdb_tuple:pack(Tuple, ?PREFIX(Subspace)).
pack_vs(Subspace) ->
pack_vs(Subspace, {}).
pack_vs(#erlfdb_subspace{} = Subspace, Tuple) when is_tuple(Tuple) ->
erlfdb_tuple:pack_vs(Tuple, ?PREFIX(Subspace)).
unpack(#erlfdb_subspace{} = Subspace, Key) ->
case contains(Subspace, Key) of
true ->
Prefix = ?PREFIX(Subspace),
SubKey = binary:part(Key, size(Prefix), size(Key) - size(Prefix)),
erlfdb_tuple:unpack(SubKey);
false ->
erlang:error({key_not_in_subspace, Subspace, Key})
end.
range(Subspace) ->
range(Subspace, {}).
range(#erlfdb_subspace{} = Subspace, Tuple) when is_tuple(Tuple) ->
Prefix = ?PREFIX(Subspace),
PrefixLen = size(Prefix),
{Start, End} = erlfdb_tuple:range(Tuple),
{
<<Prefix:PrefixLen/binary, Start/binary>>,
<<Prefix:PrefixLen/binary, End/binary>>
}.
contains(#erlfdb_subspace{} = Subspace, Key) ->
Prefix = ?PREFIX(Subspace),
PrefLen = size(Prefix),
case Key of
<<Prefix:PrefLen/binary, _/binary>> ->
true;
_ ->
false
end.
subspace(#erlfdb_subspace{} = Subspace, Tuple) ->
create(Subspace, Tuple).
| null | https://raw.githubusercontent.com/cloudant-labs/couchdb-erlfdb/510664facbc28c946960db2d12b3baf33923f4ea/src/erlfdb_subspace.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(erlfdb_subspace).
-record(erlfdb_subspace, {
prefix
}).
-export([
create/1,
create/2,
add/2,
key/1,
pack/1,
pack/2,
pack_vs/1,
pack_vs/2,
unpack/2,
range/1,
range/2,
contains/2,
subspace/2
]).
-define(PREFIX(S), S#erlfdb_subspace.prefix).
create(Tuple) ->
create(Tuple, <<>>).
create(#erlfdb_subspace{} = Subspace, Tuple) when is_tuple(Tuple) ->
create(Tuple, ?PREFIX(Subspace));
create(Tuple, Prefix) when is_tuple(Tuple), is_binary(Prefix) ->
#erlfdb_subspace{
prefix = erlfdb_tuple:pack(Tuple, Prefix)
}.
add(#erlfdb_subspace{} = Subspace, Item) ->
create({Item}, ?PREFIX(Subspace)).
key(#erlfdb_subspace{} = Subspace) ->
Subspace#erlfdb_subspace.prefix.
pack(Subspace) ->
pack(Subspace, {}).
pack(#erlfdb_subspace{} = Subspace, Tuple) when is_tuple(Tuple) ->
erlfdb_tuple:pack(Tuple, ?PREFIX(Subspace)).
pack_vs(Subspace) ->
pack_vs(Subspace, {}).
pack_vs(#erlfdb_subspace{} = Subspace, Tuple) when is_tuple(Tuple) ->
erlfdb_tuple:pack_vs(Tuple, ?PREFIX(Subspace)).
unpack(#erlfdb_subspace{} = Subspace, Key) ->
case contains(Subspace, Key) of
true ->
Prefix = ?PREFIX(Subspace),
SubKey = binary:part(Key, size(Prefix), size(Key) - size(Prefix)),
erlfdb_tuple:unpack(SubKey);
false ->
erlang:error({key_not_in_subspace, Subspace, Key})
end.
range(Subspace) ->
range(Subspace, {}).
range(#erlfdb_subspace{} = Subspace, Tuple) when is_tuple(Tuple) ->
Prefix = ?PREFIX(Subspace),
PrefixLen = size(Prefix),
{Start, End} = erlfdb_tuple:range(Tuple),
{
<<Prefix:PrefixLen/binary, Start/binary>>,
<<Prefix:PrefixLen/binary, End/binary>>
}.
contains(#erlfdb_subspace{} = Subspace, Key) ->
Prefix = ?PREFIX(Subspace),
PrefLen = size(Prefix),
case Key of
<<Prefix:PrefLen/binary, _/binary>> ->
true;
_ ->
false
end.
subspace(#erlfdb_subspace{} = Subspace, Tuple) ->
create(Subspace, Tuple).
|
c568d715c142cc6c984913515d7346dbb4fd7c944e44d1f20916909b0a5e2682 | savonet/ocaml-posix | posix_uname_types.mli | open Ctypes
* types for < sys / utsname.h >
module Def (S : Cstubs.Types.TYPE) : sig
* type for [ struct utsname ]
module Utsname : sig
type t
val t : t structure S.typ
val sysname : (char carray, t structure) S.field
val nodename : (char carray, t structure) S.field
val release : (char carray, t structure) S.field
val version : (char carray, t structure) S.field
val machine : (char carray, t structure) S.field
end
end
| null | https://raw.githubusercontent.com/savonet/ocaml-posix/d46d011de16154b34324eae6ee6e7010138cf4af/posix-uname/src/types/posix_uname_types.mli | ocaml | open Ctypes
* types for < sys / utsname.h >
module Def (S : Cstubs.Types.TYPE) : sig
* type for [ struct utsname ]
module Utsname : sig
type t
val t : t structure S.typ
val sysname : (char carray, t structure) S.field
val nodename : (char carray, t structure) S.field
val release : (char carray, t structure) S.field
val version : (char carray, t structure) S.field
val machine : (char carray, t structure) S.field
end
end
| |
8d974d7afd8da921c2be0652d6ab87a10ffdaa0bcfd500349af6c11cc14ab952 | codedownio/sandwich | Main.hs | # LANGUAGE TypeOperators #
# LANGUAGE DataKinds #
# LANGUAGE CPP #
module Main where
import Control.Concurrent
import Control.Exception.Safe
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Logger (LogLevel(..))
import Data.String.Interpolate
import Data.Time.Clock
import Test.Sandwich
import Test.Sandwich.Formatters.FailureReport
import Test.Sandwich.Formatters.LogSaver
import Test.Sandwich.Formatters.Print
#ifndef mingw32_HOST_OS
import Test.Sandwich.Formatters.TerminalUI
#endif
data Database = Database String
deriving Show
data Foo = Foo { fooInt :: Int, fooString :: String, fooBar :: Bar } deriving (Show, Eq)
data Bar = Bar { barInt :: Int, barString :: String } deriving (Show, Eq)
data Baz = Baz Int String Bar deriving (Show, Eq)
data Simple = Simple { simpleInt :: Int } deriving (Show, Eq)
database = Label :: Label "database" Database
otherDatabase = Label :: Label "otherDatabase" Database
documentation :: TopSpec
documentation = describe "arithmetic" $ do
it "tests addition" $ do
(2 + 2) `shouldBe` 4
it "tests subtraction" $ do
warn "Having some trouble getting this test to pass..."
(2 - 2) `shouldBe` 1
verySimple :: TopSpec
verySimple = do
it "succeeds" (return ())
it "tries shouldBe" (2 `shouldBe` 3)
it "tries shouldBe with Foo" (Foo 2 "asdf" (Bar 2 "asdf") `shouldBe` Foo 3 "fdsa" (Bar 3 "fdsa"))
it "tries shouldBe with Baz" (Baz 2 "asdf" (Bar 2 "asdf") `shouldBe` Baz 3 "fdsa" (Bar 3 "fdsa"))
it "tries shouldBe with list" ([1, 2, 3] `shouldBe` [4, 5, 6])
it "tries shouldBe with tuple" ((1, 2, 3) `shouldBe` (4, 5, 6))
it "tries shouldBe with list of constructors" ([Simple 1, Simple 2] `shouldBe` [Simple 3, Simple 4])
it "tries shouldNotBe" (2 `shouldNotBe` 2)
it "is pending" $ pending
it "is pending with message" $ pendingWith "Not implemented yet..."
it "throws an exception" $ do
2 `shouldBe` 2
throwIO $ userError "Want a stacktrace here"
3 ` shouldBe ` 4
3 `shouldBe` 3
it "does some logging" $ do
debug "debug message"
info "info message"
warn "warn message"
logError "error message"
cancelling :: TopSpec
cancelling = do
before "succeeds" (debug "before called") $ do
it "sleeps forever" $ do
forever $ liftIO $ threadDelay 1
it "succeeds after 1 second" $ do
liftIO $ threadDelay 1000000
return ()
cancellingIntroduce :: TopSpec
cancellingIntroduce = do
introduce "alloc sleeps forever" database ((forever $ liftIO $ threadDelay 1000000) >> return (Database "foo")) (const $ return ()) $ do
it "sleeps forever" $ do
forever $ liftIO $ threadDelay 1
it "succeeds after 1 second" $ do
liftIO $ threadDelay 1000000
return ()
manyRows :: TopSpec
manyRows = do
forM_ [(0 :: Int)..100] $ \n ->
it [i|does the thing #{n}|] (2 `shouldBe` 2)
simple :: TopSpec
simple = do
it "does the thing 1" sleepThenSucceed
it "does the thing 2" sleepThenSucceed
it "does the thing 3" sleepThenFail
describe "should happen sequentially" $ do
it "sequential 1" sleepThenSucceed
it "sequential 2" sleepThenFail
it "sequential 3" sleepThenSucceed
it "does the thing 4" sleepThenFail
it "does the thing 5" sleepThenSucceed
it "does the thing 6" sleepThenSucceed
medium :: TopSpec
medium = do
it "does the first thing" sleepThenSucceed
it "does the 1.5 thing" sleepThenFail
it "does the 1.8 thing" sleepThenFail
describe "should happen sequentially" $ do
it "sequential 1" sleepThenSucceed
it "sequential 2" sleepThenFail
it "sequential 3" sleepThenSucceed
describe "should happen in parallel" $ parallel $ do
it "sequential 1" sleepThenSucceed
it "sequential 2" sleepThenSucceed
it "sequential 3" sleepThenSucceed
around "some around" (\action -> debug "around1" >> action >> debug "around2") $ do
it "does 1" sleepThenSucceed -- pending
it "does 2" sleepThenSucceed -- pending
introduceWith "Database around" database (\action -> void $ action (Database "foo")) $ do
it "uses the DB" $ do
db <- getContext database
debug [i|Got db: #{db}|]
liftIO $ threadDelay (3 * 10^6)
introduce "Database" database (debug "making DB" >> (return $ Database "outer")) (const $ return ()) $ do
it "uses the DB 1" $ do
db <- getContext database
debug [i|Got db: #{db}|]
introduce "Database again" database (return $ Database "shadowing") (const $ return ()) $ do
introduce "Database again" otherDatabase (return $ Database "other") (const $ return ()) $ do
it "uses the DB 2" $ do
db <- getContext database
debug [i|Got db: #{db}|]
otherDb <- getContext otherDatabase
debug [i|Got otherDb: #{otherDb}|]
it "does a thing sequentially" $ sleepThenFail
it "does a thing sequentially 2" $ sleepThenSucceed
it "does a thing sequentially 3" $ sleepThenSucceed
it "does a thing sequentially 4" $ sleepThenSucceed
afterEach "after each" (return ()) $ do
beforeEach "before each" (return ()) $ do
it "does the first thing" sleepThenSucceed
it "does the second thing" sleepThenSucceed
it "does the third thing" sleepThenSucceed
describe "nested stuff" $ do
it "does a nested thing" sleepThenSucceed
it "does foo" sleepThenFail
it "does bar" sleepThenSucceed
after "after" (debug "doing after") $ do
it "has a thing after it" $ sleepThenSucceed
introduceFailure :: TopSpec
introduceFailure = do
introduceWith "Database around" database (\action -> liftIO $ throwIO $ userError "Failed to get DB") $ do
introduce "Database" database (debug "making DB" >> (return $ Database "outer")) (const $ return ()) $ do
it "uses the DB 1" $ do
db <- getContext database
debug [i|Got db: #{db}|]
introduceWithInterrupt :: TopSpec
introduceWithInterrupt = do
introduceWith "Database around" database (\action -> liftIO $ threadDelay 999999999999999) $ do
it "uses the DB 1" $ do
db <- getContext database
debug [i|Got db: #{db}|]
beforeExceptionSafetyNested :: TopSpec
beforeExceptionSafetyNested = before "before label" (liftIO $ throwIO $ userError "OH NO") $ do
it "does thing 1" $ return ()
it "does thing 2" $ return ()
describe "nested things" $ do
it "does nested thing 1" $ return ()
it "does nested thing 2" $ return ()
longLogs :: TopSpec
longLogs = do
it "does thing 1" $
shouldFail (2 `shouldBe` 3)
it "does thing 2" $
shouldFailPredicate (\case
Reason {} -> True
_ -> False) (2 `shouldBe` 3)
it "does thing 3" $ do
forM_ [(0 :: Int)..200] $ \n -> debug [i|Log entry #{n}|]
it "does thing 4" $ return ()
it "does thing 5" $ return ()
main :: IO ()
main = runSandwichWithCommandLineArgs options documentation
where
options = defaultOptions {
optionsTestArtifactsDirectory = TestArtifactsGeneratedDirectory "test_runs" (show <$> getCurrentTime)
, optionsFormatters = [SomeFormatter defaultLogSaverFormatter]
, optionsProjectRoot = Just "sandwich"
}
-- * Util
sleepThenSucceed :: ExampleM context ()
sleepThenSucceed = do
liftIO $ threadDelay (2 * 10^1)
liftIO $ threadDelay ( 2 * 10 ^ 5 )
liftIO $ threadDelay ( 1 * 10 ^ 6 )
sleepThenFail :: ExampleM context ()
sleepThenFail = do
liftIO $ threadDelay (2 * 10^1)
liftIO $ threadDelay ( 2 * 10 ^ 5 )
liftIO $ threadDelay ( 1 * 10 ^ 6 )
2 `shouldBe` 3
| null | https://raw.githubusercontent.com/codedownio/sandwich/80c5244a47a29c3ec20d68afec89297e014847b9/sandwich/app/Main.hs | haskell | pending
pending
* Util | # LANGUAGE TypeOperators #
# LANGUAGE DataKinds #
# LANGUAGE CPP #
module Main where
import Control.Concurrent
import Control.Exception.Safe
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Logger (LogLevel(..))
import Data.String.Interpolate
import Data.Time.Clock
import Test.Sandwich
import Test.Sandwich.Formatters.FailureReport
import Test.Sandwich.Formatters.LogSaver
import Test.Sandwich.Formatters.Print
#ifndef mingw32_HOST_OS
import Test.Sandwich.Formatters.TerminalUI
#endif
data Database = Database String
deriving Show
data Foo = Foo { fooInt :: Int, fooString :: String, fooBar :: Bar } deriving (Show, Eq)
data Bar = Bar { barInt :: Int, barString :: String } deriving (Show, Eq)
data Baz = Baz Int String Bar deriving (Show, Eq)
data Simple = Simple { simpleInt :: Int } deriving (Show, Eq)
database = Label :: Label "database" Database
otherDatabase = Label :: Label "otherDatabase" Database
documentation :: TopSpec
documentation = describe "arithmetic" $ do
it "tests addition" $ do
(2 + 2) `shouldBe` 4
it "tests subtraction" $ do
warn "Having some trouble getting this test to pass..."
(2 - 2) `shouldBe` 1
verySimple :: TopSpec
verySimple = do
it "succeeds" (return ())
it "tries shouldBe" (2 `shouldBe` 3)
it "tries shouldBe with Foo" (Foo 2 "asdf" (Bar 2 "asdf") `shouldBe` Foo 3 "fdsa" (Bar 3 "fdsa"))
it "tries shouldBe with Baz" (Baz 2 "asdf" (Bar 2 "asdf") `shouldBe` Baz 3 "fdsa" (Bar 3 "fdsa"))
it "tries shouldBe with list" ([1, 2, 3] `shouldBe` [4, 5, 6])
it "tries shouldBe with tuple" ((1, 2, 3) `shouldBe` (4, 5, 6))
it "tries shouldBe with list of constructors" ([Simple 1, Simple 2] `shouldBe` [Simple 3, Simple 4])
it "tries shouldNotBe" (2 `shouldNotBe` 2)
it "is pending" $ pending
it "is pending with message" $ pendingWith "Not implemented yet..."
it "throws an exception" $ do
2 `shouldBe` 2
throwIO $ userError "Want a stacktrace here"
3 ` shouldBe ` 4
3 `shouldBe` 3
it "does some logging" $ do
debug "debug message"
info "info message"
warn "warn message"
logError "error message"
cancelling :: TopSpec
cancelling = do
before "succeeds" (debug "before called") $ do
it "sleeps forever" $ do
forever $ liftIO $ threadDelay 1
it "succeeds after 1 second" $ do
liftIO $ threadDelay 1000000
return ()
cancellingIntroduce :: TopSpec
cancellingIntroduce = do
introduce "alloc sleeps forever" database ((forever $ liftIO $ threadDelay 1000000) >> return (Database "foo")) (const $ return ()) $ do
it "sleeps forever" $ do
forever $ liftIO $ threadDelay 1
it "succeeds after 1 second" $ do
liftIO $ threadDelay 1000000
return ()
manyRows :: TopSpec
manyRows = do
forM_ [(0 :: Int)..100] $ \n ->
it [i|does the thing #{n}|] (2 `shouldBe` 2)
simple :: TopSpec
simple = do
it "does the thing 1" sleepThenSucceed
it "does the thing 2" sleepThenSucceed
it "does the thing 3" sleepThenFail
describe "should happen sequentially" $ do
it "sequential 1" sleepThenSucceed
it "sequential 2" sleepThenFail
it "sequential 3" sleepThenSucceed
it "does the thing 4" sleepThenFail
it "does the thing 5" sleepThenSucceed
it "does the thing 6" sleepThenSucceed
medium :: TopSpec
medium = do
it "does the first thing" sleepThenSucceed
it "does the 1.5 thing" sleepThenFail
it "does the 1.8 thing" sleepThenFail
describe "should happen sequentially" $ do
it "sequential 1" sleepThenSucceed
it "sequential 2" sleepThenFail
it "sequential 3" sleepThenSucceed
describe "should happen in parallel" $ parallel $ do
it "sequential 1" sleepThenSucceed
it "sequential 2" sleepThenSucceed
it "sequential 3" sleepThenSucceed
around "some around" (\action -> debug "around1" >> action >> debug "around2") $ do
introduceWith "Database around" database (\action -> void $ action (Database "foo")) $ do
it "uses the DB" $ do
db <- getContext database
debug [i|Got db: #{db}|]
liftIO $ threadDelay (3 * 10^6)
introduce "Database" database (debug "making DB" >> (return $ Database "outer")) (const $ return ()) $ do
it "uses the DB 1" $ do
db <- getContext database
debug [i|Got db: #{db}|]
introduce "Database again" database (return $ Database "shadowing") (const $ return ()) $ do
introduce "Database again" otherDatabase (return $ Database "other") (const $ return ()) $ do
it "uses the DB 2" $ do
db <- getContext database
debug [i|Got db: #{db}|]
otherDb <- getContext otherDatabase
debug [i|Got otherDb: #{otherDb}|]
it "does a thing sequentially" $ sleepThenFail
it "does a thing sequentially 2" $ sleepThenSucceed
it "does a thing sequentially 3" $ sleepThenSucceed
it "does a thing sequentially 4" $ sleepThenSucceed
afterEach "after each" (return ()) $ do
beforeEach "before each" (return ()) $ do
it "does the first thing" sleepThenSucceed
it "does the second thing" sleepThenSucceed
it "does the third thing" sleepThenSucceed
describe "nested stuff" $ do
it "does a nested thing" sleepThenSucceed
it "does foo" sleepThenFail
it "does bar" sleepThenSucceed
after "after" (debug "doing after") $ do
it "has a thing after it" $ sleepThenSucceed
introduceFailure :: TopSpec
introduceFailure = do
introduceWith "Database around" database (\action -> liftIO $ throwIO $ userError "Failed to get DB") $ do
introduce "Database" database (debug "making DB" >> (return $ Database "outer")) (const $ return ()) $ do
it "uses the DB 1" $ do
db <- getContext database
debug [i|Got db: #{db}|]
introduceWithInterrupt :: TopSpec
introduceWithInterrupt = do
introduceWith "Database around" database (\action -> liftIO $ threadDelay 999999999999999) $ do
it "uses the DB 1" $ do
db <- getContext database
debug [i|Got db: #{db}|]
beforeExceptionSafetyNested :: TopSpec
beforeExceptionSafetyNested = before "before label" (liftIO $ throwIO $ userError "OH NO") $ do
it "does thing 1" $ return ()
it "does thing 2" $ return ()
describe "nested things" $ do
it "does nested thing 1" $ return ()
it "does nested thing 2" $ return ()
longLogs :: TopSpec
longLogs = do
it "does thing 1" $
shouldFail (2 `shouldBe` 3)
it "does thing 2" $
shouldFailPredicate (\case
Reason {} -> True
_ -> False) (2 `shouldBe` 3)
it "does thing 3" $ do
forM_ [(0 :: Int)..200] $ \n -> debug [i|Log entry #{n}|]
it "does thing 4" $ return ()
it "does thing 5" $ return ()
main :: IO ()
main = runSandwichWithCommandLineArgs options documentation
where
options = defaultOptions {
optionsTestArtifactsDirectory = TestArtifactsGeneratedDirectory "test_runs" (show <$> getCurrentTime)
, optionsFormatters = [SomeFormatter defaultLogSaverFormatter]
, optionsProjectRoot = Just "sandwich"
}
sleepThenSucceed :: ExampleM context ()
sleepThenSucceed = do
liftIO $ threadDelay (2 * 10^1)
liftIO $ threadDelay ( 2 * 10 ^ 5 )
liftIO $ threadDelay ( 1 * 10 ^ 6 )
sleepThenFail :: ExampleM context ()
sleepThenFail = do
liftIO $ threadDelay (2 * 10^1)
liftIO $ threadDelay ( 2 * 10 ^ 5 )
liftIO $ threadDelay ( 1 * 10 ^ 6 )
2 `shouldBe` 3
|
ec2b6aefc3111621d82df386b1a3be69321c77313cf014b902b6345ccd2bcf77 | mirage/irmin | snapshot_intf.ml |
* Copyright ( c ) 2018 - 2022 Tarides < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2018-2022 Tarides <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open! Import
module type Args = sig
module Hash : Irmin.Hash.S
module Fm : File_manager.S
module Dispatcher : Dispatcher.S with module Fm = Fm
module Inode :
Inode.Persistent
with type hash := Hash.t
and type key = Hash.t Pack_key.t
and type file_manager = Fm.t
and type dispatcher = Dispatcher.t
module Contents_pack :
Pack_store.S
with type hash := Hash.t
and type key = Hash.t Pack_key.t
and type dispatcher = Dispatcher.t
end
module type Sigs = sig
module Make (Args : Args) : sig
open Args
module Export : sig
type t
val v : Irmin.config -> read Contents_pack.t -> read Inode.Pack.t -> t
val run :
?on_disk:[ `Path of string ] ->
t ->
(Contents_pack.value -> unit Lwt.t) ->
(Inode.Snapshot.inode -> unit Lwt.t) ->
Hash.t Pack_key.t * Pack_value.Kind.t ->
int Lwt.t
val close :
t ->
( unit,
[> `Double_close
| `Index_failure of string
| `Io_misc of Fm.Io.misc_error
| `Pending_flush
| `Ro_not_allowed ] )
result
end
module Import : sig
type t
val v :
?on_disk:[ `Path of string | `Reuse ] ->
int ->
read Contents_pack.t ->
read Inode.Pack.t ->
t
val save_contents : t -> Contents_pack.value -> Hash.t Pack_key.t Lwt.t
val save_inodes : t -> Inode.Snapshot.inode -> Hash.t Pack_key.t Lwt.t
val close : t -> unit
end
end
end
| null | https://raw.githubusercontent.com/mirage/irmin/39afb7bad3c5fbdfb22409d92a04e3f225a96cd2/src/irmin-pack/unix/snapshot_intf.ml | ocaml |
* Copyright ( c ) 2018 - 2022 Tarides < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2018-2022 Tarides <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open! Import
module type Args = sig
module Hash : Irmin.Hash.S
module Fm : File_manager.S
module Dispatcher : Dispatcher.S with module Fm = Fm
module Inode :
Inode.Persistent
with type hash := Hash.t
and type key = Hash.t Pack_key.t
and type file_manager = Fm.t
and type dispatcher = Dispatcher.t
module Contents_pack :
Pack_store.S
with type hash := Hash.t
and type key = Hash.t Pack_key.t
and type dispatcher = Dispatcher.t
end
module type Sigs = sig
module Make (Args : Args) : sig
open Args
module Export : sig
type t
val v : Irmin.config -> read Contents_pack.t -> read Inode.Pack.t -> t
val run :
?on_disk:[ `Path of string ] ->
t ->
(Contents_pack.value -> unit Lwt.t) ->
(Inode.Snapshot.inode -> unit Lwt.t) ->
Hash.t Pack_key.t * Pack_value.Kind.t ->
int Lwt.t
val close :
t ->
( unit,
[> `Double_close
| `Index_failure of string
| `Io_misc of Fm.Io.misc_error
| `Pending_flush
| `Ro_not_allowed ] )
result
end
module Import : sig
type t
val v :
?on_disk:[ `Path of string | `Reuse ] ->
int ->
read Contents_pack.t ->
read Inode.Pack.t ->
t
val save_contents : t -> Contents_pack.value -> Hash.t Pack_key.t Lwt.t
val save_inodes : t -> Inode.Snapshot.inode -> Hash.t Pack_key.t Lwt.t
val close : t -> unit
end
end
end
| |
11f5d4f5b363021957365949c0a092e8b92fb0a966724d8bdff25644dc9005c6 | gafiatulin/codewars | Palindrome.hs | Palindrome for your Dome
--
module Codewars.Kata.Palindrome where
import Prelude hiding (reverse)
import Data.Char (toLower, isAlphaNum)
import Control.Arrow ((&&&))
isPalindrome :: String -> Bool
isPalindrome = uncurry (==) . (id &&& reverse') . map toLower . filter isAlphaNum
where reverse' [] = []
reverse' [x] = [x]
reverse' (x:xs) = reverse' xs ++ [x]
| null | https://raw.githubusercontent.com/gafiatulin/codewars/535db608333e854be93ecfc165686a2162264fef/src/6%20kyu/Palindrome.hs | haskell | Palindrome for your Dome
module Codewars.Kata.Palindrome where
import Prelude hiding (reverse)
import Data.Char (toLower, isAlphaNum)
import Control.Arrow ((&&&))
isPalindrome :: String -> Bool
isPalindrome = uncurry (==) . (id &&& reverse') . map toLower . filter isAlphaNum
where reverse' [] = []
reverse' [x] = [x]
reverse' (x:xs) = reverse' xs ++ [x]
| |
7e351c52fb579ccc60a44f8b479c0937b6d35024312d1d0d26385c5b1be4369c | fission-codes/fission | Types.hs | module Fission.CLI.Parser.Command.Generate.Credentials.Types (Options (..)) where
import Fission.Prelude
data Options = CommandOnly
deriving (Show, Eq) | null | https://raw.githubusercontent.com/fission-codes/fission/849c935ff990bee16d68e6151ff120579afbced6/fission-cli/library/Fission/CLI/Parser/Command/Generate/Credentials/Types.hs | haskell | module Fission.CLI.Parser.Command.Generate.Credentials.Types (Options (..)) where
import Fission.Prelude
data Options = CommandOnly
deriving (Show, Eq) | |
60c80974028eda07e506089494393654630188d8ce6ae337b5106916e0824a56 | EMSL-NMR-EPR/Haskell-MFAPipe-Executable | Constants.hs | -----------------------------------------------------------------------------
-- |
-- Module : Language.INCA.Constants
Copyright : 2016 - 17 Pacific Northwest National Laboratory
-- License : ECL-2.0 (see the LICENSE file in the distribution)
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
This module exports constants for the representation of INCA syntax .
-----------------------------------------------------------------------------
module Language.INCA.Constants
( -- * Constants
-- ** Metabolic flux variables
cINCAFluxVarSeparator
, cINCAFluxVarExchange , cINCAFluxVarTransport
, cINCAFluxVarDirectionForwards , cINCAFluxVarDirectionBackwards
-- ** Metabolite variables
, cINCAMetaboliteVarSeparator
, cINCAMetaboliteVarCompartmentNameExtracellular
) where
| Separator for the name and direction of a metabolic flux variable in INCA syntax .
cINCAFluxVarSeparator :: Char
cINCAFluxVarSeparator = '.'
# INLINE cINCAFluxVarSeparator #
| Prefix for metabolic flux variables in INCA syntax that denote exchange chemical reactions .
cINCAFluxVarExchange :: Char
cINCAFluxVarExchange = 'v'
# INLINE cINCAFluxVarExchange #
| Prefix for metabolic flux variables in INCA syntax that denote transport chemical reactions .
cINCAFluxVarTransport :: Char
cINCAFluxVarTransport = 'b'
# INLINE cINCAFluxVarTransport #
| Suffix for metabolic flux variables in INCA syntax that denote forwards chemical reactions .
cINCAFluxVarDirectionForwards :: Char
cINCAFluxVarDirectionForwards = 'f'
# INLINE cINCAFluxVarDirectionForwards #
| Suffix for metabolic flux variables in INCA syntax that denote backwards chemical reactions .
cINCAFluxVarDirectionBackwards :: Char
cINCAFluxVarDirectionBackwards = 'b'
# INLINE cINCAFluxVarDirectionBackwards #
| Separator for the name and compartment name of a metabolite variable in INCA syntax .
cINCAMetaboliteVarSeparator :: Char
cINCAMetaboliteVarSeparator = '.'
# INLINE cINCAMetaboliteVarSeparator #
| Suffix for metabolite variables in INCA syntax that denote extracellular metabolites .
cINCAMetaboliteVarCompartmentNameExtracellular :: String
cINCAMetaboliteVarCompartmentNameExtracellular = "ext"
# INLINE cINCAMetaboliteVarCompartmentNameExtracellular #
| null | https://raw.githubusercontent.com/EMSL-NMR-EPR/Haskell-MFAPipe-Executable/8a7fd13202d3b6b7380af52d86e851e995a9b53e/MFAPipe/src/Language/INCA/Constants.hs | haskell | ---------------------------------------------------------------------------
|
Module : Language.INCA.Constants
License : ECL-2.0 (see the LICENSE file in the distribution)
Maintainer :
Stability : experimental
Portability : portable
---------------------------------------------------------------------------
* Constants
** Metabolic flux variables
** Metabolite variables | Copyright : 2016 - 17 Pacific Northwest National Laboratory
This module exports constants for the representation of INCA syntax .
module Language.INCA.Constants
cINCAFluxVarSeparator
, cINCAFluxVarExchange , cINCAFluxVarTransport
, cINCAFluxVarDirectionForwards , cINCAFluxVarDirectionBackwards
, cINCAMetaboliteVarSeparator
, cINCAMetaboliteVarCompartmentNameExtracellular
) where
| Separator for the name and direction of a metabolic flux variable in INCA syntax .
cINCAFluxVarSeparator :: Char
cINCAFluxVarSeparator = '.'
# INLINE cINCAFluxVarSeparator #
| Prefix for metabolic flux variables in INCA syntax that denote exchange chemical reactions .
cINCAFluxVarExchange :: Char
cINCAFluxVarExchange = 'v'
# INLINE cINCAFluxVarExchange #
| Prefix for metabolic flux variables in INCA syntax that denote transport chemical reactions .
cINCAFluxVarTransport :: Char
cINCAFluxVarTransport = 'b'
# INLINE cINCAFluxVarTransport #
| Suffix for metabolic flux variables in INCA syntax that denote forwards chemical reactions .
cINCAFluxVarDirectionForwards :: Char
cINCAFluxVarDirectionForwards = 'f'
# INLINE cINCAFluxVarDirectionForwards #
| Suffix for metabolic flux variables in INCA syntax that denote backwards chemical reactions .
cINCAFluxVarDirectionBackwards :: Char
cINCAFluxVarDirectionBackwards = 'b'
# INLINE cINCAFluxVarDirectionBackwards #
| Separator for the name and compartment name of a metabolite variable in INCA syntax .
cINCAMetaboliteVarSeparator :: Char
cINCAMetaboliteVarSeparator = '.'
# INLINE cINCAMetaboliteVarSeparator #
| Suffix for metabolite variables in INCA syntax that denote extracellular metabolites .
cINCAMetaboliteVarCompartmentNameExtracellular :: String
cINCAMetaboliteVarCompartmentNameExtracellular = "ext"
# INLINE cINCAMetaboliteVarCompartmentNameExtracellular #
|
fabfb4d25a37c2fc7db75c0c06758e7169fc73b8d68ff6a53314d91f192fcff4 | MMagueta/MMaguetaGuix | install.scm | Copyright © 2019 < >
Copyright © 2019 < >
;;;
;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;; (at your option) any later version.
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program. If not, see </>.
;; Generate a bootable image (e.g. for USB sticks, etc.) with:
$ guix system disk - image nongnu / system / install.scm
(define-module (nongnu system install)
#:use-module (gnu system)
#:use-module (gnu system install)
#:use-module (nongnu packages linux)
#:export (installation-os-nonfree))
(define installation-os-nonfree
(operating-system
(inherit installation-os)
(kernel linux)
(firmware (list linux-firmware))))
installation-os-nonfree
| null | https://raw.githubusercontent.com/MMagueta/MMaguetaGuix/0490ffc33bc7d269e2b586637a76e7fe7914fbfb/nongnu/system/install.scm | scheme |
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
Generate a bootable image (e.g. for USB sticks, etc.) with: | Copyright © 2019 < >
Copyright © 2019 < >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
$ guix system disk - image nongnu / system / install.scm
(define-module (nongnu system install)
#:use-module (gnu system)
#:use-module (gnu system install)
#:use-module (nongnu packages linux)
#:export (installation-os-nonfree))
(define installation-os-nonfree
(operating-system
(inherit installation-os)
(kernel linux)
(firmware (list linux-firmware))))
installation-os-nonfree
|
9c778f9e46a60d5323d9b258b49078ccdf490417d65ba7aaa2e175fe26c6bb1b | Ptival/yugioh | CurseOfDragon.hs | module YuGiOh.Card.CurseOfDragon
( curseOfDragon,
)
where
import YuGiOh.Attribute
import YuGiOh.Card
import YuGiOh.Type
curseOfDragon :: Card
curseOfDragon =
Card
{ _name = "Curse of Dragon",
_attribute = Dark,
_level = 5,
_monsterType = Dragon,
_description = "A wicked dragon that taps into dark forces to execute a powerful attack.",
_attack = 2000,
_defense = 1500
}
| null | https://raw.githubusercontent.com/Ptival/yugioh/855e8f22c3ff322c7ca1d0a1fa96c50a54724aeb/lib/YuGiOh/Card/CurseOfDragon.hs | haskell | module YuGiOh.Card.CurseOfDragon
( curseOfDragon,
)
where
import YuGiOh.Attribute
import YuGiOh.Card
import YuGiOh.Type
curseOfDragon :: Card
curseOfDragon =
Card
{ _name = "Curse of Dragon",
_attribute = Dark,
_level = 5,
_monsterType = Dragon,
_description = "A wicked dragon that taps into dark forces to execute a powerful attack.",
_attack = 2000,
_defense = 1500
}
| |
4ee3633963b73826a6259bb09536a07aaabd07638edff7327157ac7b0badd9e2 | ahrefs/devkit | daemon.ml | (** daemon utilities *)
module U = ExtUnix.Specific
let log = Log.from "daemon"
let logfile = ref None
let pidfile = ref None
let runas = ref None
let foreground = ref false
let managed = ref false
* global flag indicating that process should exit ,
[ manage ] will automatically set this flag on SIGTERM unless default signal handling is overriden
[manage] will automatically set this flag on SIGTERM unless default signal handling is overriden
*)
let should_exit_ = ref false
(** [should_exit_lwt] usage is discouraged.
Use [wait_exit] instead, which makes it harder to ignore "should exit" state and loop infinitely
*)
let (should_exit_lwt,signal_exit_lwt) = Lwt.wait ()
let should_exit () = !should_exit_
let should_run () = not !should_exit_
(** exception to be raised by functions that wish to signal premature termination due to [!should_exit = true] *)
exception ShouldExit
let signal_exit =
let do_lwt = lazy (Lwt.wakeup_later signal_exit_lwt ()) in
(* invariant: should_exit_ = (Lwt.state should_exit_lwt = Lwt.Return) *)
fun () -> should_exit_ := true; Lazy.force do_lwt
* @raise ShouldExit if [ should_exit ] condition is set , otherwise do nothing
let break () = if !should_exit_ then raise ShouldExit
* wait until [ should_exit ] is set and raise [ ShouldExit ]
let wait_exit =
(* NOTE
Bind to should_exit_lwt only once, because every bind will create an immutable waiter on
should_exit_lwt's sleeper, that is only removed after should_exit_lwt thread terminates.
*)
let thread = lazy (Lwt.bind should_exit_lwt (fun () -> Lwt.fail ShouldExit)) in
fun () -> Lazy.force thread
(** [break_lwt = Lwt.wrap break] *)
let break_lwt () = Lwt.wrap break
* [ unless_exit x ] resolves promise [ x ] or raises [ ShouldExit ]
let unless_exit x = Lwt.pick [wait_exit (); x]
let get_args () =
[
("-loglevel", Arg.String Log.set_loglevels, " ([<facil|prefix*>=]debug|info|warn|error[,])+");
ExtArg.may_str "logfile" logfile "<file> Log file";
ExtArg.may_str "pidfile" pidfile "<file> PID file";
"-runas",
Arg.String (fun name -> try runas := Some (Unix.getpwnam name) with exn -> Exn.fail ~exn "runas: unknown user %s" name),
"<user> run as specified user";
"-fg", Arg.Set foreground, " Stay in foreground";
]
let args = get_args ()
let install_signal_handlers () =
let unix_stderr s =
let s = Log.State.format_simple `Info log#facility s in
try
let (_:int) = Unix.write_substring Unix.stderr s 0 (String.length s) in ()
with _ ->
() (* do not fail, can be ENOSPC *)
in
Signal.set [Sys.sigpipe] ignore;
Signal.set [Sys.sigusr1] (fun _ -> Log.reopen !logfile);
Signal.set [Sys.sigusr2] begin fun _ ->
match Signal.is_safe_output () with
| true -> Memory.log_stats (); Memory.reclaim ()
| false ->
(* output directly to fd to prevent deadlock, but breaks buffering *)
Memory.get_stats () |> List.iter unix_stderr;
Memory.reclaim_s () |> unix_stderr
end;
Signal.set_exit signal_exit
let manage () =
match !managed with
| true -> () (* be smart *)
| false ->
(*
this will fail if files don't exists :(
(* fail before fork if something is wrong *)
Option.may (fun path -> Unix.(access path [R_OK;W_OK])) !logfile;
Option.may (fun path -> Unix.(access path [R_OK;W_OK])) !pidfile;
*)
Option.may Nix.check_pidfile !pidfile; (* check pidfile before fork to fail early *)
if not !foreground then Nix.daemonize ();
begin match !runas with
| None -> ()
| Some pw ->
let uid = pw.Unix.pw_uid and gid = pw.Unix.pw_gid in
U.setreuid uid uid;
U.setregid gid gid;
end;
Log.reopen !logfile; (* immediately after fork *)
Log.read_env_config ();
write pidfile after fork !
if Option.is_some !logfile then
begin
log #info "run: %s" Nix.cmdline;
log #info "GC settings: %s" (Action.gc_settings ());
end;
install_signal_handlers ();
Nix.raise_limits ();
managed := true;
()
| null | https://raw.githubusercontent.com/ahrefs/devkit/559c2df8f6eacb091e0eac38f508c45b6567bdd8/daemon.ml | ocaml | * daemon utilities
* [should_exit_lwt] usage is discouraged.
Use [wait_exit] instead, which makes it harder to ignore "should exit" state and loop infinitely
* exception to be raised by functions that wish to signal premature termination due to [!should_exit = true]
invariant: should_exit_ = (Lwt.state should_exit_lwt = Lwt.Return)
NOTE
Bind to should_exit_lwt only once, because every bind will create an immutable waiter on
should_exit_lwt's sleeper, that is only removed after should_exit_lwt thread terminates.
* [break_lwt = Lwt.wrap break]
do not fail, can be ENOSPC
output directly to fd to prevent deadlock, but breaks buffering
be smart
this will fail if files don't exists :(
(* fail before fork if something is wrong
check pidfile before fork to fail early
immediately after fork |
module U = ExtUnix.Specific
let log = Log.from "daemon"
let logfile = ref None
let pidfile = ref None
let runas = ref None
let foreground = ref false
let managed = ref false
* global flag indicating that process should exit ,
[ manage ] will automatically set this flag on SIGTERM unless default signal handling is overriden
[manage] will automatically set this flag on SIGTERM unless default signal handling is overriden
*)
let should_exit_ = ref false
let (should_exit_lwt,signal_exit_lwt) = Lwt.wait ()
let should_exit () = !should_exit_
let should_run () = not !should_exit_
exception ShouldExit
let signal_exit =
let do_lwt = lazy (Lwt.wakeup_later signal_exit_lwt ()) in
fun () -> should_exit_ := true; Lazy.force do_lwt
* @raise ShouldExit if [ should_exit ] condition is set , otherwise do nothing
let break () = if !should_exit_ then raise ShouldExit
* wait until [ should_exit ] is set and raise [ ShouldExit ]
let wait_exit =
let thread = lazy (Lwt.bind should_exit_lwt (fun () -> Lwt.fail ShouldExit)) in
fun () -> Lazy.force thread
let break_lwt () = Lwt.wrap break
* [ unless_exit x ] resolves promise [ x ] or raises [ ShouldExit ]
let unless_exit x = Lwt.pick [wait_exit (); x]
let get_args () =
[
("-loglevel", Arg.String Log.set_loglevels, " ([<facil|prefix*>=]debug|info|warn|error[,])+");
ExtArg.may_str "logfile" logfile "<file> Log file";
ExtArg.may_str "pidfile" pidfile "<file> PID file";
"-runas",
Arg.String (fun name -> try runas := Some (Unix.getpwnam name) with exn -> Exn.fail ~exn "runas: unknown user %s" name),
"<user> run as specified user";
"-fg", Arg.Set foreground, " Stay in foreground";
]
let args = get_args ()
let install_signal_handlers () =
let unix_stderr s =
let s = Log.State.format_simple `Info log#facility s in
try
let (_:int) = Unix.write_substring Unix.stderr s 0 (String.length s) in ()
with _ ->
in
Signal.set [Sys.sigpipe] ignore;
Signal.set [Sys.sigusr1] (fun _ -> Log.reopen !logfile);
Signal.set [Sys.sigusr2] begin fun _ ->
match Signal.is_safe_output () with
| true -> Memory.log_stats (); Memory.reclaim ()
| false ->
Memory.get_stats () |> List.iter unix_stderr;
Memory.reclaim_s () |> unix_stderr
end;
Signal.set_exit signal_exit
let manage () =
match !managed with
| false ->
Option.may (fun path -> Unix.(access path [R_OK;W_OK])) !logfile;
Option.may (fun path -> Unix.(access path [R_OK;W_OK])) !pidfile;
*)
if not !foreground then Nix.daemonize ();
begin match !runas with
| None -> ()
| Some pw ->
let uid = pw.Unix.pw_uid and gid = pw.Unix.pw_gid in
U.setreuid uid uid;
U.setregid gid gid;
end;
Log.read_env_config ();
write pidfile after fork !
if Option.is_some !logfile then
begin
log #info "run: %s" Nix.cmdline;
log #info "GC settings: %s" (Action.gc_settings ());
end;
install_signal_handlers ();
Nix.raise_limits ();
managed := true;
()
|
64a87a8efdd83616d23061838c154a1763857e120682e6bc62c783ae299823ac | Shinmera/vpetjam | strings.lisp | go-backwards-in-ui "Back"
new-game "New Game"
credits-menu "View Credits"
wishlist-cta "Check out Kandria on Steam!"
exit-game "Quit Game"
basic-seed "Basic Seed"
personal-seed "Hypo Sensoron 2"
hued-seed "Rainbow+ Infusion"
slow-seed "Kalmson Type-B"
fast-seed "Greeneron"
hat-seed "Accessory Genome"
settings-menu "Settings"
master-volume "Master Volume"
effect-volume "Effects Volume"
music-volume "Music Volume"
screen-resolution "Resolution"
should-application-fullscreen "Fullscreen"
activate-vsync "VSync"
user-interface-scale-factor "UI Scale"
tutorial "Tutorial"
tutorial-1 "You have just inherited a small vegetable growing farm and have your sights set on riches!"
tutorial-2 "With a couple genetically enhanced seeds in hand, you set out to grow rare veggies. Pick up a seed and plant it in any of the growing zones."
tutorial-4 "You can earn money by selling creatures through the drop box, and buy more exotic seeds with the computer up top."
tutorial-3 "Select veggies with rare traits and combine their DNA in the splicer. Who knows what you can come up with!"
tutorial-5 "(Psst: If you run out of money, just type motherlode)"
| null | https://raw.githubusercontent.com/Shinmera/vpetjam/6919704aecb42d3c27cb920b641da69bcc5dca66/lang/eng/strings.lisp | lisp | go-backwards-in-ui "Back"
new-game "New Game"
credits-menu "View Credits"
wishlist-cta "Check out Kandria on Steam!"
exit-game "Quit Game"
basic-seed "Basic Seed"
personal-seed "Hypo Sensoron 2"
hued-seed "Rainbow+ Infusion"
slow-seed "Kalmson Type-B"
fast-seed "Greeneron"
hat-seed "Accessory Genome"
settings-menu "Settings"
master-volume "Master Volume"
effect-volume "Effects Volume"
music-volume "Music Volume"
screen-resolution "Resolution"
should-application-fullscreen "Fullscreen"
activate-vsync "VSync"
user-interface-scale-factor "UI Scale"
tutorial "Tutorial"
tutorial-1 "You have just inherited a small vegetable growing farm and have your sights set on riches!"
tutorial-2 "With a couple genetically enhanced seeds in hand, you set out to grow rare veggies. Pick up a seed and plant it in any of the growing zones."
tutorial-4 "You can earn money by selling creatures through the drop box, and buy more exotic seeds with the computer up top."
tutorial-3 "Select veggies with rare traits and combine their DNA in the splicer. Who knows what you can come up with!"
tutorial-5 "(Psst: If you run out of money, just type motherlode)"
| |
6ae60aa2e65032f5b3fc0f2045373d81d35bf10d0437c722fcc37a8d81b914ee | bhauman/certifiable | sha.clj | (ns certifiable.sha
(:require [clojure.string :as string])
(:import [java.security MessageDigest]
[java.nio.charset StandardCharsets]))
;; taken from -digest/blob/master/src/digest.clj
(defn- signature
[^MessageDigest algorithm]
(let [size (* 2 (.getDigestLength algorithm))
sig (.toString (BigInteger. 1 (.digest algorithm)) 16)
padding (string/join (repeat (- size (count sig)) "0"))]
(str padding sig)))
(defn sha-signature [s]
(let [md (MessageDigest/getInstance "SHA-256")]
(.reset md)
(.update md (.getBytes ^String s StandardCharsets/UTF_8))
(signature md)))
(defn sha-signature-short [s]
(subs (sha-signature s) 0 7))
#_(sha-digest-short "abasdf")
| null | https://raw.githubusercontent.com/bhauman/certifiable/ccf40e29d56dff3ce3592e6cc8fc45b40c569ea0/src/certifiable/sha.clj | clojure | taken from -digest/blob/master/src/digest.clj | (ns certifiable.sha
(:require [clojure.string :as string])
(:import [java.security MessageDigest]
[java.nio.charset StandardCharsets]))
(defn- signature
[^MessageDigest algorithm]
(let [size (* 2 (.getDigestLength algorithm))
sig (.toString (BigInteger. 1 (.digest algorithm)) 16)
padding (string/join (repeat (- size (count sig)) "0"))]
(str padding sig)))
(defn sha-signature [s]
(let [md (MessageDigest/getInstance "SHA-256")]
(.reset md)
(.update md (.getBytes ^String s StandardCharsets/UTF_8))
(signature md)))
(defn sha-signature-short [s]
(subs (sha-signature s) 0 7))
#_(sha-digest-short "abasdf")
|
e74ff63566c6801e66b3f2a8316802c015228236594e0f9bca02a5483c3f0de8 | proglang/ldgv | PrettySyntax.hs | # OPTIONS_GHC -Wno - orphans #
# OPTIONS_GHC -Wno - incomplete - patterns #
# LANGUAGE LambdaCase #
module PrettySyntax (Pretty(), pretty, pshow) where
import Kinds
import Syntax
import ProcessEnvironment
import Data.Text.Prettyprint.Doc
import qualified Data.Set as Set
pshow :: Pretty a => a -> String
pshow x = show (pretty x)
instance Pretty Constraint where
pretty (t1 :<: t2) = pretty t1 <+> pretty "<:" <+> pretty t2
instance Pretty Multiplicity where
pretty MMany = mempty
pretty MOne = pretty "!"
instance Pretty Occurrence where
pretty Many = pretty "_"
pretty One = pretty '1'
pretty Zero = pretty '0'
instance Pretty Kind where
pretty k = pretty (show k)
instance Pretty TypeSegment where
pretty (Seg SegSend x t) = pretty "!" <> ptyped x t
pretty (Seg SegRecv x t) = pretty "?" <> ptyped x t
pretty (Seg (SegFun m) x t) = pretty "Pi" <> pretty m <> ptyped x t
pretty (Seg (SegPair) x t) = pretty "Sg" <> ptyped x t
plab :: String -> Doc ann
plab = pretty
-- pretty "'" <> -- seem built into the lab string
ptyped' :: Ident -> Type -> Doc ann
ptyped' ('#':_) t1 =
pretty t1
ptyped' id t1 =
parens (pretty id <+> colon <+> pretty t1)
ptyped :: Ident -> Type -> Doc ann
ptyped ('#':_) t1 =
pretty t1 <> dot
ptyped id t1 =
parens (pretty id <+> colon <+> pretty t1)
instance Pretty Type where
pretty TUnit = pretty "()"
pretty TInt = pretty "Int"
pretty TNat = pretty "Nat"
pretty TBot = pretty "_|_"
pretty TDyn = pretty "★"
pretty TDouble = pretty "Double"
pretty TString = pretty "String"
-- the bool indicates whether the type needs to be dualized
pretty (TName b s) = (if b then pretty "~" else mempty) <> pretty s
pretty (TVar b s) = (if b then pretty "~" else mempty) <> brackets (pretty s)
pretty (TLab (str:strs)) = braces (plab str <> foldr f mempty strs)
where
f str rest = comma <+> plab str <> rest
pretty (TFun m id t1 t2) =
pretty m <> ptyped' id t1 <+> pretty "->" <+> pretty t2
pretty (TPair id t1 t2) =
brackets (pretty id <+> colon <+> pretty t1 <> comma <+> pretty t2)
pretty (TSend id t1 t2) =
pretty "!" <> ptyped id t1 <+> pretty t2
pretty (TRecv id t1 t2) =
pretty "?" <> ptyped id t1 <+> pretty t2
pretty (TCase e (st : sts)) =
pcase e (st : sts)
pretty (TEqn e1 e2 t) =
pretty "{{" <> pretty e1 <+> equals <> equals <+> pretty e2 <+> colon <+> pretty t <> pretty "}}"
pretty (TSingle x) =
pretty "S" <> parens (pretty x)
pretty (TNatRec e tz y ts) =
pretty "natrec" <+> pretty e <+>
braces (pretty "Z:" <+> pretty tz <> comma <+>
pretty "S_:" <+> pretty y <+> dot <+> pretty ts)
-- print as a telescope
pretty (TAbs id t1 t2) =
ptyped id t1 <+> pretty t2
pcase :: Pretty a => Exp -> [(String, a)] -> Doc ann
pcase e (st : sts) =
pretty "case" <+> pretty e <+>
braces (g st <> foldr f mempty sts)
where g (s, t) = plab s <> colon <> pretty t
f st rest = comma <+> g st <> rest
instance Pretty Exp where
pretty (Let id e1 e2) =
pretty "let" <+> pretty id <+> equals <+> pretty e1 <+> pretty "in" <+>
pretty e2
pretty (Var id) =
pretty id
pretty (Lit l) =
pretty l
pretty (Math m) =
pretty m
pretty (Lam m id t e) =
pretty "𝜆" <> pretty m <+> ptyped id t <+>
pretty e
pretty (Rec f x e1 e0) =
pretty "rec" <+> pretty f <+> pretty x <>
colon <> pretty e1 <+> pretty e0
pretty (App e1 e2) =
pretty e1 <+> pretty e2
pretty (Pair m id e1 e2) =
angles (pretty m <> pretty id <+> equals <+> pretty e1 <> comma <+> pretty e2)
pretty (LetPair x y e1 e2) =
pretty "let" <+> angles (pretty x <> comma <> pretty y) <+> equals <+> pretty e1 <+> pretty "in" <+> pretty e2
pretty (Fst e) = pretty "fst" <+> pretty e
pretty (Snd e) = pretty "snd" <+> pretty e
pretty (Fork e) = pretty "fork" <+> pretty e
pretty (New t) = pretty "new" <+> pretty t
pretty (Send e) = pretty "send" <+> pretty e
pretty (Recv e) = pretty "recv" <+> pretty e
pretty (Case e ses) =
pcase e ses
pretty (Cast e t1 t2) =
pretty "(" <+> pretty e <+> pretty ":" <+> pretty t1 <+> pretty "⇒" <+> pretty t2 <+> pretty ")"
pretty (Succ e) =
pretty "succ" <+> pretty e
pretty (NatRec e ez x t y tyy es) =
pretty "natrec" <+> pretty e <+>
braces (pretty ez <> comma <+>
pretty x <> dot <+>
pretty t <> dot <+> ptyped y tyy <+> pretty es)
pretty (NewNatRec f n a ty ezero n1 esucc) =
pretty "new_natrec" <+>
parens (pretty f <> colon <> pretty n <> dot <> pretty a <> pretty ty) <+>
braces (pretty ezero <> comma <+>
pretty n1 <> dot <+> pretty esucc)
instance Pretty Literal where
pretty = \case
LInt i -> pretty i
LNat n -> pretty n
LDouble d -> pretty d
LString s -> pretty s
LLab s -> plab s
LUnit -> pretty "()"
instance Pretty e => Pretty (MathOp e) where
pretty = \case
Add a b -> pretty a <+> pretty "+" <+> pretty b
Sub a b -> pretty a <+> pretty "-" <+> pretty b
Mul a b -> pretty a <+> pretty "*" <+> pretty b
Div a b -> pretty a <+> pretty "/" <+> pretty b
Neg a -> pretty "-" <> pretty a
instance Pretty Value where
pretty = \case
VUnit -> pretty "()"
VLabel s -> pretty s
VInt i -> pretty $ show i
VDouble d -> pretty $ show d
VString s -> pretty $ show s
VChan _ _ -> pretty "VChan"
VSend v -> pretty "VSend"
VPair a b -> pretty "<" <+> pretty a <+> pretty ", " <+> pretty b <+> pretty ">"
VType t -> pretty t
VFunc _ s exp -> pretty "λ" <+> pretty s <+> pretty " (" <+> pretty exp <+> pretty ")"
VDynCast v t -> pretty "(" <+> pretty v <+> pretty " : " <+> pretty t <+> pretty " ⇒ ★)"
VFuncCast v ft1 ft2 -> pretty "(" <+> pretty v <+> pretty " : " <+> pretty ft1 <+> pretty " ⇒ " <+> pretty ft2 <+> pretty ")"
VRec {} -> pretty "VRec"
VNewNatRec {} -> pretty "VNewNatRec"
instance Pretty FuncType where
pretty (FuncType _ s t1 t2) = pretty "Π(" <+> pretty s <+> pretty ":" <+> pretty t1 <+> pretty ")" <+> pretty t2
instance Pretty GType where
pretty = \case
GUnit -> pretty "()"
GLabel ls -> braces (plab str <> foldr f mempty strs)
where
ll = Set.toList ls
str = head ll
strs = tail ll
f str rest = comma <+> plab str <> rest
GFunc s -> pretty "Π(" <+> pretty s <+> pretty ":★)★"
GPair -> pretty "Σ(" <+> pretty ":★)★"
GNat -> pretty "Nat"
GNatLeq n -> pretty "Nat(" <+> pretty n <+> pretty ")"
GInt -> pretty "Int"
GDouble -> pretty "Double"
GString -> pretty "String"
instance Pretty Decl where
pretty = \case
DType s _ k t -> pretty "type " <+> pretty s <+> pretty " : " <+> pretty k <+> pretty " = " <+> pretty t
DSig s _ t -> pretty "val " <+> pretty s <+> pretty " : " <+> pretty t
DFun s _ exp _ -> pretty "val " <+> pretty s <+> pretty " = " <+> pretty exp
instance Pretty NFType where
pretty = \case
NFBot -> pretty "⊥"
NFDyn -> pretty "★"
NFFunc (FuncType _ s t1 t2) -> pretty "Π(" <+> pretty s <+> pretty ":" <+> pretty t1 <+> pretty ")" <+> pretty t2
NFPair (FuncType _ s t1 t2) -> pretty "Σ(" <+> pretty s <+> pretty ":" <+> pretty t1 <+> pretty ")" <+> pretty t2
NFGType gt -> pretty gt
| null | https://raw.githubusercontent.com/proglang/ldgv/cabb502b81e831eece1c80ee70d520ac85e908c7/src/PrettySyntax.hs | haskell | pretty "'" <> -- seem built into the lab string
the bool indicates whether the type needs to be dualized
print as a telescope | # OPTIONS_GHC -Wno - orphans #
# OPTIONS_GHC -Wno - incomplete - patterns #
# LANGUAGE LambdaCase #
module PrettySyntax (Pretty(), pretty, pshow) where
import Kinds
import Syntax
import ProcessEnvironment
import Data.Text.Prettyprint.Doc
import qualified Data.Set as Set
pshow :: Pretty a => a -> String
pshow x = show (pretty x)
instance Pretty Constraint where
pretty (t1 :<: t2) = pretty t1 <+> pretty "<:" <+> pretty t2
instance Pretty Multiplicity where
pretty MMany = mempty
pretty MOne = pretty "!"
instance Pretty Occurrence where
pretty Many = pretty "_"
pretty One = pretty '1'
pretty Zero = pretty '0'
instance Pretty Kind where
pretty k = pretty (show k)
instance Pretty TypeSegment where
pretty (Seg SegSend x t) = pretty "!" <> ptyped x t
pretty (Seg SegRecv x t) = pretty "?" <> ptyped x t
pretty (Seg (SegFun m) x t) = pretty "Pi" <> pretty m <> ptyped x t
pretty (Seg (SegPair) x t) = pretty "Sg" <> ptyped x t
plab :: String -> Doc ann
plab = pretty
ptyped' :: Ident -> Type -> Doc ann
ptyped' ('#':_) t1 =
pretty t1
ptyped' id t1 =
parens (pretty id <+> colon <+> pretty t1)
ptyped :: Ident -> Type -> Doc ann
ptyped ('#':_) t1 =
pretty t1 <> dot
ptyped id t1 =
parens (pretty id <+> colon <+> pretty t1)
instance Pretty Type where
pretty TUnit = pretty "()"
pretty TInt = pretty "Int"
pretty TNat = pretty "Nat"
pretty TBot = pretty "_|_"
pretty TDyn = pretty "★"
pretty TDouble = pretty "Double"
pretty TString = pretty "String"
pretty (TName b s) = (if b then pretty "~" else mempty) <> pretty s
pretty (TVar b s) = (if b then pretty "~" else mempty) <> brackets (pretty s)
pretty (TLab (str:strs)) = braces (plab str <> foldr f mempty strs)
where
f str rest = comma <+> plab str <> rest
pretty (TFun m id t1 t2) =
pretty m <> ptyped' id t1 <+> pretty "->" <+> pretty t2
pretty (TPair id t1 t2) =
brackets (pretty id <+> colon <+> pretty t1 <> comma <+> pretty t2)
pretty (TSend id t1 t2) =
pretty "!" <> ptyped id t1 <+> pretty t2
pretty (TRecv id t1 t2) =
pretty "?" <> ptyped id t1 <+> pretty t2
pretty (TCase e (st : sts)) =
pcase e (st : sts)
pretty (TEqn e1 e2 t) =
pretty "{{" <> pretty e1 <+> equals <> equals <+> pretty e2 <+> colon <+> pretty t <> pretty "}}"
pretty (TSingle x) =
pretty "S" <> parens (pretty x)
pretty (TNatRec e tz y ts) =
pretty "natrec" <+> pretty e <+>
braces (pretty "Z:" <+> pretty tz <> comma <+>
pretty "S_:" <+> pretty y <+> dot <+> pretty ts)
pretty (TAbs id t1 t2) =
ptyped id t1 <+> pretty t2
pcase :: Pretty a => Exp -> [(String, a)] -> Doc ann
pcase e (st : sts) =
pretty "case" <+> pretty e <+>
braces (g st <> foldr f mempty sts)
where g (s, t) = plab s <> colon <> pretty t
f st rest = comma <+> g st <> rest
instance Pretty Exp where
pretty (Let id e1 e2) =
pretty "let" <+> pretty id <+> equals <+> pretty e1 <+> pretty "in" <+>
pretty e2
pretty (Var id) =
pretty id
pretty (Lit l) =
pretty l
pretty (Math m) =
pretty m
pretty (Lam m id t e) =
pretty "𝜆" <> pretty m <+> ptyped id t <+>
pretty e
pretty (Rec f x e1 e0) =
pretty "rec" <+> pretty f <+> pretty x <>
colon <> pretty e1 <+> pretty e0
pretty (App e1 e2) =
pretty e1 <+> pretty e2
pretty (Pair m id e1 e2) =
angles (pretty m <> pretty id <+> equals <+> pretty e1 <> comma <+> pretty e2)
pretty (LetPair x y e1 e2) =
pretty "let" <+> angles (pretty x <> comma <> pretty y) <+> equals <+> pretty e1 <+> pretty "in" <+> pretty e2
pretty (Fst e) = pretty "fst" <+> pretty e
pretty (Snd e) = pretty "snd" <+> pretty e
pretty (Fork e) = pretty "fork" <+> pretty e
pretty (New t) = pretty "new" <+> pretty t
pretty (Send e) = pretty "send" <+> pretty e
pretty (Recv e) = pretty "recv" <+> pretty e
pretty (Case e ses) =
pcase e ses
pretty (Cast e t1 t2) =
pretty "(" <+> pretty e <+> pretty ":" <+> pretty t1 <+> pretty "⇒" <+> pretty t2 <+> pretty ")"
pretty (Succ e) =
pretty "succ" <+> pretty e
pretty (NatRec e ez x t y tyy es) =
pretty "natrec" <+> pretty e <+>
braces (pretty ez <> comma <+>
pretty x <> dot <+>
pretty t <> dot <+> ptyped y tyy <+> pretty es)
pretty (NewNatRec f n a ty ezero n1 esucc) =
pretty "new_natrec" <+>
parens (pretty f <> colon <> pretty n <> dot <> pretty a <> pretty ty) <+>
braces (pretty ezero <> comma <+>
pretty n1 <> dot <+> pretty esucc)
instance Pretty Literal where
pretty = \case
LInt i -> pretty i
LNat n -> pretty n
LDouble d -> pretty d
LString s -> pretty s
LLab s -> plab s
LUnit -> pretty "()"
instance Pretty e => Pretty (MathOp e) where
pretty = \case
Add a b -> pretty a <+> pretty "+" <+> pretty b
Sub a b -> pretty a <+> pretty "-" <+> pretty b
Mul a b -> pretty a <+> pretty "*" <+> pretty b
Div a b -> pretty a <+> pretty "/" <+> pretty b
Neg a -> pretty "-" <> pretty a
instance Pretty Value where
pretty = \case
VUnit -> pretty "()"
VLabel s -> pretty s
VInt i -> pretty $ show i
VDouble d -> pretty $ show d
VString s -> pretty $ show s
VChan _ _ -> pretty "VChan"
VSend v -> pretty "VSend"
VPair a b -> pretty "<" <+> pretty a <+> pretty ", " <+> pretty b <+> pretty ">"
VType t -> pretty t
VFunc _ s exp -> pretty "λ" <+> pretty s <+> pretty " (" <+> pretty exp <+> pretty ")"
VDynCast v t -> pretty "(" <+> pretty v <+> pretty " : " <+> pretty t <+> pretty " ⇒ ★)"
VFuncCast v ft1 ft2 -> pretty "(" <+> pretty v <+> pretty " : " <+> pretty ft1 <+> pretty " ⇒ " <+> pretty ft2 <+> pretty ")"
VRec {} -> pretty "VRec"
VNewNatRec {} -> pretty "VNewNatRec"
instance Pretty FuncType where
pretty (FuncType _ s t1 t2) = pretty "Π(" <+> pretty s <+> pretty ":" <+> pretty t1 <+> pretty ")" <+> pretty t2
instance Pretty GType where
pretty = \case
GUnit -> pretty "()"
GLabel ls -> braces (plab str <> foldr f mempty strs)
where
ll = Set.toList ls
str = head ll
strs = tail ll
f str rest = comma <+> plab str <> rest
GFunc s -> pretty "Π(" <+> pretty s <+> pretty ":★)★"
GPair -> pretty "Σ(" <+> pretty ":★)★"
GNat -> pretty "Nat"
GNatLeq n -> pretty "Nat(" <+> pretty n <+> pretty ")"
GInt -> pretty "Int"
GDouble -> pretty "Double"
GString -> pretty "String"
instance Pretty Decl where
pretty = \case
DType s _ k t -> pretty "type " <+> pretty s <+> pretty " : " <+> pretty k <+> pretty " = " <+> pretty t
DSig s _ t -> pretty "val " <+> pretty s <+> pretty " : " <+> pretty t
DFun s _ exp _ -> pretty "val " <+> pretty s <+> pretty " = " <+> pretty exp
instance Pretty NFType where
pretty = \case
NFBot -> pretty "⊥"
NFDyn -> pretty "★"
NFFunc (FuncType _ s t1 t2) -> pretty "Π(" <+> pretty s <+> pretty ":" <+> pretty t1 <+> pretty ")" <+> pretty t2
NFPair (FuncType _ s t1 t2) -> pretty "Σ(" <+> pretty s <+> pretty ":" <+> pretty t1 <+> pretty ")" <+> pretty t2
NFGType gt -> pretty gt
|
82c4c0202522a18e287ce51ddd86e98261789a06de42b9609ba773ca7eebf2f9 | pa-ba/compdata-param | Ordering.hs | {-# LANGUAGE TemplateHaskell, FlexibleInstances, IncoherentInstances,
ScopedTypeVariables #-}
--------------------------------------------------------------------------------
-- |
Module : Data . Comp . . Multi . Derive . Ordering
Copyright : ( c ) 2011 ,
-- License : BSD3
Maintainer : < >
-- Stability : experimental
Portability : non - portable ( GHC Extensions )
--
-- Automatically derive instances of @OrdHD@.
--
--------------------------------------------------------------------------------
module Data.Comp.Param.Multi.Derive.Ordering
(
OrdHD(..),
makeOrdHD
) where
import Data.Comp.Param.Multi.FreshM hiding (Name)
import Data.Comp.Param.Multi.Ordering
import Data.Comp.Derive.Utils
import Data.Comp.Param.Derive.Utils
import Data.Maybe
import Data.List
import Language.Haskell.TH hiding (Cxt)
import Control.Monad (liftM)
compList :: [Ordering] -> Ordering
compList = fromMaybe EQ . find (/= EQ)
| Derive an instance of ' OrdHD ' for a type constructor of any parametric
kind taking at least three arguments .
kind taking at least three arguments. -}
makeOrdHD :: Name -> Q [Dec]
makeOrdHD fname = do
Just (DataInfo _ name args constrs _) <- abstractNewtypeQ $ reify fname
let args' = init args
-- covariant argument
let coArg :: Type = VarT $ tyVarBndrName $ last args'
-- contravariant argument
let conArg :: Type = VarT $ tyVarBndrName $ last $ init args'
let argNames = map (VarT . tyVarBndrName) (init $ init args')
let complType = foldl AppT (ConT name) argNames
let classType = AppT (ConT ''OrdHD) complType
constrs' :: [(Name,[Type], Maybe Type)] <- mapM normalConExp constrs
compareHDDecl <- funD 'compareHD (compareHDClauses conArg coArg constrs')
let context = map (\arg -> mkClassP ''Ord [arg]) argNames
return [mkInstanceD context classType [compareHDDecl]]
where compareHDClauses :: Type -> Type -> [(Name,[Type], Maybe Type)] -> [ClauseQ]
compareHDClauses _ _ [] = []
compareHDClauses conArg coArg constrs =
let constrs' = constrs `zip` [1..]
constPairs = [(x,y)| x<-constrs', y <- constrs']
in map (genClause conArg coArg) constPairs
genClause conArg coArg ((c,n),(d,m))
| n == m = genEqClause conArg coArg c
| n < m = genLtClause c d
| otherwise = genGtClause c d
genEqClause :: Type -> Type -> (Name,[Type], Maybe Type) -> ClauseQ
genEqClause conArg' coArg' (constr, args, gadtTy) = do
varXs <- newNames (length args) "x"
varYs <- newNames (length args) "y"
let patX = ConP constr $ map VarP varXs
let patY = ConP constr $ map VarP varYs
let (conArg, coArg) = getTernaryFArgs conArg' coArg' gadtTy
body <- eqDBody conArg coArg (zip3 varXs varYs args)
return $ Clause [patX, patY] (NormalB body) []
eqDBody :: Type -> Type -> [(Name, Name, Type)] -> ExpQ
eqDBody conArg coArg x =
[|liftM compList (sequence $(listE $ map (eqDB conArg coArg) x))|]
eqDB :: Type -> Type -> (Name, Name, Type) -> ExpQ
eqDB conArg coArg (x, y, tp)
| not (containsType tp conArg) &&
not (containsType tp coArg) =
[| return $ compare $(varE x) $(varE y) |]
| otherwise =
case tp of
AppT a _
| a == coArg -> [| pcompare $(varE x) $(varE y) |]
AppT (AppT ArrowT (AppT a _)) _
| a == conArg ->
[| withName (\v -> pcompare ($(varE x) $ nameCoerce v)
($(varE y) $ nameCoerce v)) |]
SigT tp' _ ->
eqDB conArg coArg (x, y, tp')
_ ->
if containsType tp conArg then
[| compareHD $(varE x) $(varE y) |]
else
[| pcompare $(varE x) $(varE y) |]
genLtClause (c, _, _) (d, _, _) =
clause [recP c [], recP d []] (normalB [| return LT |]) []
genGtClause (c, _, _) (d, _, _) =
clause [recP c [], recP d []] (normalB [| return GT |]) []
| null | https://raw.githubusercontent.com/pa-ba/compdata-param/5d6b0afa95a27fd3233f86e5efc6e6a6080f4236/src/Data/Comp/Param/Multi/Derive/Ordering.hs | haskell | # LANGUAGE TemplateHaskell, FlexibleInstances, IncoherentInstances,
ScopedTypeVariables #
------------------------------------------------------------------------------
|
License : BSD3
Stability : experimental
Automatically derive instances of @OrdHD@.
------------------------------------------------------------------------------
covariant argument
contravariant argument | Module : Data . Comp . . Multi . Derive . Ordering
Copyright : ( c ) 2011 ,
Maintainer : < >
Portability : non - portable ( GHC Extensions )
module Data.Comp.Param.Multi.Derive.Ordering
(
OrdHD(..),
makeOrdHD
) where
import Data.Comp.Param.Multi.FreshM hiding (Name)
import Data.Comp.Param.Multi.Ordering
import Data.Comp.Derive.Utils
import Data.Comp.Param.Derive.Utils
import Data.Maybe
import Data.List
import Language.Haskell.TH hiding (Cxt)
import Control.Monad (liftM)
compList :: [Ordering] -> Ordering
compList = fromMaybe EQ . find (/= EQ)
| Derive an instance of ' OrdHD ' for a type constructor of any parametric
kind taking at least three arguments .
kind taking at least three arguments. -}
makeOrdHD :: Name -> Q [Dec]
makeOrdHD fname = do
Just (DataInfo _ name args constrs _) <- abstractNewtypeQ $ reify fname
let args' = init args
let coArg :: Type = VarT $ tyVarBndrName $ last args'
let conArg :: Type = VarT $ tyVarBndrName $ last $ init args'
let argNames = map (VarT . tyVarBndrName) (init $ init args')
let complType = foldl AppT (ConT name) argNames
let classType = AppT (ConT ''OrdHD) complType
constrs' :: [(Name,[Type], Maybe Type)] <- mapM normalConExp constrs
compareHDDecl <- funD 'compareHD (compareHDClauses conArg coArg constrs')
let context = map (\arg -> mkClassP ''Ord [arg]) argNames
return [mkInstanceD context classType [compareHDDecl]]
where compareHDClauses :: Type -> Type -> [(Name,[Type], Maybe Type)] -> [ClauseQ]
compareHDClauses _ _ [] = []
compareHDClauses conArg coArg constrs =
let constrs' = constrs `zip` [1..]
constPairs = [(x,y)| x<-constrs', y <- constrs']
in map (genClause conArg coArg) constPairs
genClause conArg coArg ((c,n),(d,m))
| n == m = genEqClause conArg coArg c
| n < m = genLtClause c d
| otherwise = genGtClause c d
genEqClause :: Type -> Type -> (Name,[Type], Maybe Type) -> ClauseQ
genEqClause conArg' coArg' (constr, args, gadtTy) = do
varXs <- newNames (length args) "x"
varYs <- newNames (length args) "y"
let patX = ConP constr $ map VarP varXs
let patY = ConP constr $ map VarP varYs
let (conArg, coArg) = getTernaryFArgs conArg' coArg' gadtTy
body <- eqDBody conArg coArg (zip3 varXs varYs args)
return $ Clause [patX, patY] (NormalB body) []
eqDBody :: Type -> Type -> [(Name, Name, Type)] -> ExpQ
eqDBody conArg coArg x =
[|liftM compList (sequence $(listE $ map (eqDB conArg coArg) x))|]
eqDB :: Type -> Type -> (Name, Name, Type) -> ExpQ
eqDB conArg coArg (x, y, tp)
| not (containsType tp conArg) &&
not (containsType tp coArg) =
[| return $ compare $(varE x) $(varE y) |]
| otherwise =
case tp of
AppT a _
| a == coArg -> [| pcompare $(varE x) $(varE y) |]
AppT (AppT ArrowT (AppT a _)) _
| a == conArg ->
[| withName (\v -> pcompare ($(varE x) $ nameCoerce v)
($(varE y) $ nameCoerce v)) |]
SigT tp' _ ->
eqDB conArg coArg (x, y, tp')
_ ->
if containsType tp conArg then
[| compareHD $(varE x) $(varE y) |]
else
[| pcompare $(varE x) $(varE y) |]
genLtClause (c, _, _) (d, _, _) =
clause [recP c [], recP d []] (normalB [| return LT |]) []
genGtClause (c, _, _) (d, _, _) =
clause [recP c [], recP d []] (normalB [| return GT |]) []
|
8e99cf5f2489aca5cfaacdb70433b6d8144a2296fef78e57ad3a45364e882072 | PacktPublishing/Haskell-High-Performance-Programming | top-level.hs |
import Data.IORef
import System.IO.Unsafe (unsafePerformIO)
import Control.Concurrent.STM
globalVar :: IORef Int
globalVar = unsafePerformIO (newIORef 0)
| null | https://raw.githubusercontent.com/PacktPublishing/Haskell-High-Performance-Programming/2b1bfdb8102129be41e8d79c7e9caf12100c5556/Chapter07/top-level.hs | haskell |
import Data.IORef
import System.IO.Unsafe (unsafePerformIO)
import Control.Concurrent.STM
globalVar :: IORef Int
globalVar = unsafePerformIO (newIORef 0)
| |
60f9234e4becd2557805eeb158bd71e35e3f9f65cca1daa6e9f206d4ecac8c76 | rollacaster/sketches | particle_force.cljs | (ns sketches.nature-of-code.particle-systems.particle-force
(:require [quil.core :as q :include-macros true]
[quil.middleware :as md]
[sketches.vector :as v]))
(defn create-particle [location]
{:location location
:velocity [(- (rand 2) 1) (- (rand 2) 2)]
:acceleration [0 0.05]
:lifespan 255.0})
(defn update-particle [{:keys [acceleration velocity location lifespan] :as particle}]
(let [velocity (v/add velocity acceleration)
location (v/add velocity location)
lifespan (- lifespan 2.0)]
(-> particle
(assoc :velocity velocity)
(assoc :location location)
(assoc :lifespan lifespan))))
(defn apply-force [particle force]
(update particle :acceleration #(v/add % force)))
(defn display [{:keys [lifespan] [x y] :location}]
(q/stroke 0 lifespan)
(q/fill 0 lifespan)
(q/ellipse x y 8 8))
(defn is-dead [{:keys [lifespan]}]
(< lifespan 0.0))
(defn setup []
(create-particle [(/ (q/width) 2) (/ (q/height) 2)]))
(defn update-state [particle]
(if (is-dead particle)
(create-particle [(/ (q/width) 2) (/ (q/height) 2)])
(-> particle
(apply-force [-0.01 0.01])
update-particle)))
(defn draw [particle]
(q/background 255)
(display particle))
(defn run [host]
(q/defsketch particle-force
:host host
:setup setup
:draw draw
:update update-state
:middleware [md/fun-mode]
:size [300 300]))
| null | https://raw.githubusercontent.com/rollacaster/sketches/ba79fccf2a37139de9193ed2ea7a6cc04b63fad0/src/sketches/nature_of_code/particle_systems/particle_force.cljs | clojure | (ns sketches.nature-of-code.particle-systems.particle-force
(:require [quil.core :as q :include-macros true]
[quil.middleware :as md]
[sketches.vector :as v]))
(defn create-particle [location]
{:location location
:velocity [(- (rand 2) 1) (- (rand 2) 2)]
:acceleration [0 0.05]
:lifespan 255.0})
(defn update-particle [{:keys [acceleration velocity location lifespan] :as particle}]
(let [velocity (v/add velocity acceleration)
location (v/add velocity location)
lifespan (- lifespan 2.0)]
(-> particle
(assoc :velocity velocity)
(assoc :location location)
(assoc :lifespan lifespan))))
(defn apply-force [particle force]
(update particle :acceleration #(v/add % force)))
(defn display [{:keys [lifespan] [x y] :location}]
(q/stroke 0 lifespan)
(q/fill 0 lifespan)
(q/ellipse x y 8 8))
(defn is-dead [{:keys [lifespan]}]
(< lifespan 0.0))
(defn setup []
(create-particle [(/ (q/width) 2) (/ (q/height) 2)]))
(defn update-state [particle]
(if (is-dead particle)
(create-particle [(/ (q/width) 2) (/ (q/height) 2)])
(-> particle
(apply-force [-0.01 0.01])
update-particle)))
(defn draw [particle]
(q/background 255)
(display particle))
(defn run [host]
(q/defsketch particle-force
:host host
:setup setup
:draw draw
:update update-state
:middleware [md/fun-mode]
:size [300 300]))
| |
99d2976eea9952ffe4dc7576b67c9efba7270923c352802ad069ef87fb2202a0 | BranchTaken/Hemlock | test_hypot.ml | open! Basis.Rudiments
open! Basis
open Real
let test () =
let rec fn xys = begin
match xys with
| [] -> ()
| (x, y) :: xys' -> begin
File.Fmt.stdout
|> Fmt.fmt "x="
|> pp x
|> Fmt.fmt " y="
|> pp y
|> Fmt.fmt ": hypot="
|> pp (hypot x y)
|> Fmt.fmt "\n"
|> ignore;
fn xys'
end
end in
fn [(3., 4.); (4., 3.); (-3., -4.);
(-0., -3.); (0., -3.);
(3., inf); (nan, inf); (neg_inf, nan);
]
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/f3604ceda4f75cf18b6ee2b1c2f3c5759ad495a5/bootstrap/test/basis/real/test_hypot.ml | ocaml | open! Basis.Rudiments
open! Basis
open Real
let test () =
let rec fn xys = begin
match xys with
| [] -> ()
| (x, y) :: xys' -> begin
File.Fmt.stdout
|> Fmt.fmt "x="
|> pp x
|> Fmt.fmt " y="
|> pp y
|> Fmt.fmt ": hypot="
|> pp (hypot x y)
|> Fmt.fmt "\n"
|> ignore;
fn xys'
end
end in
fn [(3., 4.); (4., 3.); (-3., -4.);
(-0., -3.); (0., -3.);
(3., inf); (nan, inf); (neg_inf, nan);
]
let _ = test ()
| |
90decafb6d6d93b96311a7d8ae16a50ad633a8ae68080a269e8e3b75ff2161a7 | ucsd-progsys/dsolve | cbv.ml | datatype lamexp with nat =
{n:nat} One(n+1)
| {n:nat} Shift(n+1) of lamexp(n)
| {n:nat | n > 0} Lam(n-1) of lamexp(n)
| {n:nat} App(n) of lamexp(n) * lamexp(n)
datatype closure = {n:nat} Clo of lamexp(n) * closure list(n)
fun evaluate (e) =
let
fun eval (One, c :: _) = c
| eval (Shift e, _ :: env) = eval (e, env)
| eval (e as Lam _, env) = Clo (e, env)
| eval (App (e1, e2), env) =
let
val Clo (Lam body, env') = eval (e1, env)
val clo = eval (e2, env)
in
eval (body, clo :: env')
end
withtype {n:nat} lamexp(n) * closure list(n) -> closure
in
eval (e, [])
end
withtype lamexp(0) -> closure | null | https://raw.githubusercontent.com/ucsd-progsys/dsolve/bfbbb8ed9bbf352d74561e9f9127ab07b7882c0c/tests/POPL2008/xiog/DMLex/cbv.ml | ocaml | datatype lamexp with nat =
{n:nat} One(n+1)
| {n:nat} Shift(n+1) of lamexp(n)
| {n:nat | n > 0} Lam(n-1) of lamexp(n)
| {n:nat} App(n) of lamexp(n) * lamexp(n)
datatype closure = {n:nat} Clo of lamexp(n) * closure list(n)
fun evaluate (e) =
let
fun eval (One, c :: _) = c
| eval (Shift e, _ :: env) = eval (e, env)
| eval (e as Lam _, env) = Clo (e, env)
| eval (App (e1, e2), env) =
let
val Clo (Lam body, env') = eval (e1, env)
val clo = eval (e2, env)
in
eval (body, clo :: env')
end
withtype {n:nat} lamexp(n) * closure list(n) -> closure
in
eval (e, [])
end
withtype lamexp(0) -> closure | |
8f293b53b3286f1fa924b58d0c79d554836fcbe44b08c9e5c248f968044d8f51 | ocaml-community/obus | monitor.ml |
* monitor.ml
* ----------
* Copyright : ( c ) 2008 , < >
* Licence : BSD3
*
* This file is a part of obus , an ocaml implementation of D - Bus .
* monitor.ml
* ----------
* Copyright : (c) 2008, Jeremie Dimino <>
* Licence : BSD3
*
* This file is a part of obus, an ocaml implementation of D-Bus.
*)
(* This sample illustrate the use of threads in D-Bus + use of
filters. Filters are part of the lowlevel api. *)
open Lwt
open OBus_bus
open OBus_message
open OBus_value
let filter what_bus message =
Format.printf "@[<hv 2>message intercepted on %s bus:@\n%a@]@." what_bus OBus_message.print message;
(* Drop the message so we do not respond to method call *)
None
let add_filter what_bus get_bus =
let%lwt bus = get_bus () in
let _ = Lwt_sequence.add_r (filter what_bus) (OBus_connection.incoming_filters bus) in
Lwt_list.iter_p
(fun typ -> OBus_bus.add_match bus (OBus_match.rule ~typ ()))
[ `Method_call; `Method_return; `Error; `Signal ]
let () = Lwt_main.run begin
let%lwt () = add_filter "session" OBus_bus.session <&> add_filter "system" OBus_bus.system in
let%lwt () = Lwt_io.printlf "type Ctrl+C to stop" in
fst (wait ())
end
| null | https://raw.githubusercontent.com/ocaml-community/obus/8d38ee6750587ae6519644630b75d53a0a011acd/examples/monitor.ml | ocaml | This sample illustrate the use of threads in D-Bus + use of
filters. Filters are part of the lowlevel api.
Drop the message so we do not respond to method call |
* monitor.ml
* ----------
* Copyright : ( c ) 2008 , < >
* Licence : BSD3
*
* This file is a part of obus , an ocaml implementation of D - Bus .
* monitor.ml
* ----------
* Copyright : (c) 2008, Jeremie Dimino <>
* Licence : BSD3
*
* This file is a part of obus, an ocaml implementation of D-Bus.
*)
open Lwt
open OBus_bus
open OBus_message
open OBus_value
let filter what_bus message =
Format.printf "@[<hv 2>message intercepted on %s bus:@\n%a@]@." what_bus OBus_message.print message;
None
let add_filter what_bus get_bus =
let%lwt bus = get_bus () in
let _ = Lwt_sequence.add_r (filter what_bus) (OBus_connection.incoming_filters bus) in
Lwt_list.iter_p
(fun typ -> OBus_bus.add_match bus (OBus_match.rule ~typ ()))
[ `Method_call; `Method_return; `Error; `Signal ]
let () = Lwt_main.run begin
let%lwt () = add_filter "session" OBus_bus.session <&> add_filter "system" OBus_bus.system in
let%lwt () = Lwt_io.printlf "type Ctrl+C to stop" in
fst (wait ())
end
|
9a45fec25ff5f18abc9556a58b2ab6adb7b0756dc822cbb0052537ffa664b026 | nikita-volkov/rebase | Enum.hs | module Rebase.GHC.Enum
(
module GHC.Enum
)
where
import GHC.Enum
| null | https://raw.githubusercontent.com/nikita-volkov/rebase/7c77a0443e80bdffd4488a4239628177cac0761b/library/Rebase/GHC/Enum.hs | haskell | module Rebase.GHC.Enum
(
module GHC.Enum
)
where
import GHC.Enum
| |
742fcb4121b0e22aded7a911b4c3d8e0bb7a731ec87a2983a2d7ce2269503ef9 | mbuczko/revolt | clean.clj | (ns revolt.tasks.clean
(:require [revolt.utils :as utils]
[clojure.tools.logging :as log]))
(defn delete-files-recursively
[fname & [silently]]
(letfn [(delete-f [file]
(when (.isDirectory file)
(doseq [child-file (.listFiles file)]
(delete-f child-file)))
(clojure.java.io/delete-file file silently))]
(delete-f (clojure.java.io/file fname))))
(defn invoke
[ctx {:keys [extra-paths]} target]
(utils/timed
(str "CLEAN " target)
(let [paths (into extra-paths [target "out"])]
(doseq [p paths]
(log/info "Cleaning path:" p)
(delete-files-recursively p true))
ctx)))
| null | https://raw.githubusercontent.com/mbuczko/revolt/65ef8de68d7aa77d1ced40e7d669ebcbba8a340e/src/revolt/tasks/clean.clj | clojure | (ns revolt.tasks.clean
(:require [revolt.utils :as utils]
[clojure.tools.logging :as log]))
(defn delete-files-recursively
[fname & [silently]]
(letfn [(delete-f [file]
(when (.isDirectory file)
(doseq [child-file (.listFiles file)]
(delete-f child-file)))
(clojure.java.io/delete-file file silently))]
(delete-f (clojure.java.io/file fname))))
(defn invoke
[ctx {:keys [extra-paths]} target]
(utils/timed
(str "CLEAN " target)
(let [paths (into extra-paths [target "out"])]
(doseq [p paths]
(log/info "Cleaning path:" p)
(delete-files-recursively p true))
ctx)))
| |
cd4bb5ab628257d75d61844ff7bde61e46c3ef80a9a8a1257525e3263fac2aed | kelamg/HtDP2e-workthrough | ex378.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex378) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(require 2htdp/image)
(require 2htdp/universe)
An FSM is a [ List - of 1Transition ]
A 1Transition is a list of two items :
( cons FSM - State ( cons FSM - State ' ( ) ) )
An FSM - State is a String that specifies a color
(define TEXT-SIZE 12)
(define TEXT-COLOR 'black)
; data examples
(define fsm-traffic
'(("red" "green") ("green" "yellow") ("yellow" "red")))
FSM FSM - State - > FSM - State
matches the keys pressed by a player with the given FSM
(define (simulate state0 transitions)
FSM - State
[to-draw
(lambda (current)
(overlay (text current TEXT-SIZE TEXT-COLOR)
(square 100 "solid" current)))]
[on-key
(lambda (current key-event)
(find transitions current))]))
; [X Y] [List-of [List X Y]] X -> Y
; finds the matching Y for the given X in alist
(define (find alist x)
(local ((define fm (assoc x alist)))
(if (cons? fm) (second fm) (error "not found"))))
| null | https://raw.githubusercontent.com/kelamg/HtDP2e-workthrough/ec05818d8b667a3c119bea8d1d22e31e72e0a958/HtDP/Intertwined-Data/ex378.rkt | racket | about the language level of this file in a form that our tools can easily process.
data examples
[X Y] [List-of [List X Y]] X -> Y
finds the matching Y for the given X in alist | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex378) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(require 2htdp/image)
(require 2htdp/universe)
An FSM is a [ List - of 1Transition ]
A 1Transition is a list of two items :
( cons FSM - State ( cons FSM - State ' ( ) ) )
An FSM - State is a String that specifies a color
(define TEXT-SIZE 12)
(define TEXT-COLOR 'black)
(define fsm-traffic
'(("red" "green") ("green" "yellow") ("yellow" "red")))
FSM FSM - State - > FSM - State
matches the keys pressed by a player with the given FSM
(define (simulate state0 transitions)
FSM - State
[to-draw
(lambda (current)
(overlay (text current TEXT-SIZE TEXT-COLOR)
(square 100 "solid" current)))]
[on-key
(lambda (current key-event)
(find transitions current))]))
(define (find alist x)
(local ((define fm (assoc x alist)))
(if (cons? fm) (second fm) (error "not found"))))
|
b4af06b173fc84daacd4c08a26d1510cda6427aac13d5e9f4d3828531a3ba2a0 | tonyg/pi-nothing | elf-hash.rkt | #lang racket/base
(provide elf-hash)
;; Per the SYSV ELF specification.
(define (elf-hash symbol-bytes)
(for/fold [(h 0)] [(b (in-bytes symbol-bytes))]
(let* ((h (bitwise-and #xffffffff (+ (arithmetic-shift h 4) b)))
(g (bitwise-and #xf0000000 h)))
(bitwise-and #xffffffff
(if (zero? g)
h
(bitwise-xor h (bitwise-and #xff (arithmetic-shift g -24))))
(bitwise-not g)))))
;;---------------------------------------------------------------------------
(module+ main
(require racket/match)
(let loop ((count 0))
(match (read)
[(? eof-object?)
(printf "elf-hash passed ~v test cases\n" count)]
[(list identifier-bytes expected-hash-value)
(define actual-hash-value (elf-hash identifier-bytes))
(when (not (= expected-hash-value actual-hash-value))
(error 'elf-hash "Incorrect result: expected ~v, got ~v for ~v"
expected-hash-value
actual-hash-value
identifier-bytes))
(loop (+ count 1))])))
| null | https://raw.githubusercontent.com/tonyg/pi-nothing/4b2b59d42babd72cfd007cb71ed295dcdbf53d4a/nothingc/elf-hash.rkt | racket | Per the SYSV ELF specification.
--------------------------------------------------------------------------- | #lang racket/base
(provide elf-hash)
(define (elf-hash symbol-bytes)
(for/fold [(h 0)] [(b (in-bytes symbol-bytes))]
(let* ((h (bitwise-and #xffffffff (+ (arithmetic-shift h 4) b)))
(g (bitwise-and #xf0000000 h)))
(bitwise-and #xffffffff
(if (zero? g)
h
(bitwise-xor h (bitwise-and #xff (arithmetic-shift g -24))))
(bitwise-not g)))))
(module+ main
(require racket/match)
(let loop ((count 0))
(match (read)
[(? eof-object?)
(printf "elf-hash passed ~v test cases\n" count)]
[(list identifier-bytes expected-hash-value)
(define actual-hash-value (elf-hash identifier-bytes))
(when (not (= expected-hash-value actual-hash-value))
(error 'elf-hash "Incorrect result: expected ~v, got ~v for ~v"
expected-hash-value
actual-hash-value
identifier-bytes))
(loop (+ count 1))])))
|
8abf0d86d2f68cee0582535b975cdbec043294f39762e5d86e5dfd97925ccb65 | ollef/Bidirectional | Type.hs | {-# LANGUAGE GADTs #-}
-- | Bidirectional typechecking for higher-rank polymorphism
-- Implementation of -sws.org/~neelk/bidir.pdf
module Type where
import Control.Applicative
import Control.Monad
import Data.Maybe
import Data.Monoid
import qualified Data.Set as S
import AST
import Context
import NameGen
import Pretty
-- | Algorithmic subtyping:
-- subtype Γ A B = Δ <=> Γ |- A <: B -| Δ
subtype :: Context -> Polytype -> Polytype -> NameGen Context
subtype gamma typ1 typ2 =
traceNS "subtype" (gamma, typ1, typ2) $
checkwftype gamma typ1 $ checkwftype gamma typ2 $
case (typ1, typ2) of
< :
(TVar alpha, TVar alpha') | alpha == alpha' -> return gamma
-- <:Unit
(TUnit, TUnit) -> return gamma
-- <:Exvar
(TExists alpha, TExists alpha')
| alpha == alpha' && alpha `elem` existentials gamma -> return gamma
-- <:->
(TFun a1 a2, TFun b1 b2) -> do
theta <- subtype gamma b1 a1
subtype theta (apply theta a2) (apply theta b2)
-- <:forallR
(a, TForall alpha b) -> do
-- Do alpha conversion to avoid clashes
alpha' <- freshTVar
dropMarker (CForall alpha') <$>
subtype (gamma >: CForall alpha') a (typeSubst (TVar alpha') alpha b)
-- <:forallL
(TForall alpha a, b) -> do
-- Do alpha conversion to avoid clashes
alpha' <- freshTVar
dropMarker (CMarker alpha') <$>
subtype (gamma >++ [CMarker alpha', CExists alpha'])
(typeSubst (TExists alpha') alpha a)
b
-- <:InstantiateL
(TExists alpha, a) | alpha `elem` existentials gamma
&& alpha `S.notMember` freeTVars a ->
instantiateL gamma alpha a
-- <:InstantiateR
(a, TExists alpha) | alpha `elem` existentials gamma
&& alpha `S.notMember` freeTVars a ->
instantiateR gamma a alpha
_ -> error $ "subtype, don't know what to do with: "
++ pretty (gamma, typ1, typ2)
-- | Algorithmic instantiation (left):
-- instantiateL Γ α A = Δ <=> Γ |- α^ :=< A -| Δ
instantiateL :: Context -> TVar -> Polytype -> NameGen Context
instantiateL gamma alpha a =
traceNS "instantiateL" (gamma, alpha, a) $
checkwftype gamma a $ checkwftype gamma (TExists alpha) $
case solve gamma alpha =<< monotype a of
-- InstLSolve
Just gamma' -> return gamma'
Nothing -> case a of
-- InstLReach
TExists beta
| ordered gamma alpha beta ->
return $ fromJust $ solve gamma beta (TExists alpha)
| otherwise ->
return $ fromJust $ solve gamma alpha (TExists beta)
-- InstLArr
TFun a1 a2 -> do
alpha1 <- freshTVar
alpha2 <- freshTVar
theta <- instantiateR (insertAt gamma (CExists alpha) $ context
[ CExists alpha2
, CExists alpha1
, CExistsSolved alpha $ TFun (TExists alpha1)
(TExists alpha2)
])
a1 alpha1
instantiateL theta alpha2 (apply theta a2)
-- InstLAIIR
TForall beta b -> do
-- Do alpha conversion to avoid clashes
beta' <- freshTVar
dropMarker (CForall beta') <$>
instantiateL (gamma >++ [CForall beta'])
alpha
(typeSubst (TVar beta') beta b)
_ -> error $ "The impossible happened! instantiateL: "
++ pretty (gamma, alpha, a)
-- | Algorithmic instantiation (right):
-- instantiateR Γ A α = Δ <=> Γ |- A =:< α -| Δ
instantiateR :: Context -> Polytype -> TVar -> NameGen Context
instantiateR gamma a alpha =
traceNS "instantiateR" (gamma, a, alpha) $
checkwftype gamma a $ checkwftype gamma (TExists alpha) $
case solve gamma alpha =<< monotype a of
Just gamma' -> return gamma'
Nothing -> case a of
InstRReach
TExists beta
| ordered gamma alpha beta ->
return $ fromJust $ solve gamma beta (TExists alpha)
| otherwise ->
return $ fromJust $ solve gamma alpha (TExists beta)
-- InstRArr
TFun a1 a2 -> do
alpha1 <- freshTVar
alpha2 <- freshTVar
theta <- instantiateL (insertAt gamma (CExists alpha) $ context
[ CExists alpha2
, CExists alpha1
, CExistsSolved alpha $ TFun (TExists alpha1)
(TExists alpha2)
])
alpha1
a1
instantiateR theta (apply theta a2) alpha2
-- InstRAIIL
TForall beta b -> do
-- Do alpha conversion to avoid clashes
beta' <- freshTVar
dropMarker (CMarker beta') <$>
instantiateR (gamma >++ [CMarker beta', CExists beta'])
(typeSubst (TExists beta') beta b)
alpha
_ -> error $ "The impossible happened! instantiateR: "
++ pretty (gamma, a, alpha)
-- | Type checking:
-- typecheck Γ e A = Δ <=> Γ |- e <= A -| Δ
typecheck :: Context -> Expr -> Polytype -> NameGen Context
typecheck gamma expr typ =
traceNS "typecheck" (gamma, expr, typ) $
checkwftype gamma typ $ case (expr, typ) of
1I
(EUnit, TUnit) -> return gamma
ForallI
(e, TForall alpha a) -> do
-- Do alpha conversion to avoid clashes
alpha' <- freshTVar
dropMarker (CForall alpha') <$>
typecheck (gamma >: CForall alpha') e (typeSubst (TVar alpha') alpha a)
-- ->I
(EAbs x e, TFun a b) -> do
x' <- freshVar
dropMarker (CVar x' a) <$>
typecheck (gamma >: CVar x' a) (subst (EVar x') x e) b
-- Sub
(e, b) -> do
(a, theta) <- typesynth gamma e
subtype theta (apply theta a) (apply theta b)
-- | Type synthesising:
-- typesynth Γ e = (A, Δ) <=> Γ |- e => A -| Δ
typesynth :: Context -> Expr -> NameGen (Polytype, Context)
typesynth gamma expr = traceNS "typesynth" (gamma, expr) $ checkwf gamma $
case expr of
Var
EVar x -> return
( fromMaybe (error $ "typesynth: not in scope " ++ pretty (expr, gamma))
(findVarType gamma x)
, gamma
)
-- Anno
EAnno e a -> do
delta <- typecheck gamma e a
return (a, delta)
1I= >
EUnit -> return (TUnit, gamma)
-- ->I= > Original rule
do
x '
alpha < - freshTVar
beta < - freshTVar
delta ( CVar x ' ( TExists alpha ) ) < $ >
( gamma > + + [ CExists alpha
, CExists beta
, CVar x ' ( TExists alpha )
] )
( subst ( EVar x ' ) x e )
( TExists beta )
return ( TFun ( TExists alpha ) ( TExists beta ) , delta )
--
-- ->I=> Original rule
EAbs x e -> do
x' <- freshVar
alpha <- freshTVar
beta <- freshTVar
delta <- dropMarker (CVar x' (TExists alpha)) <$>
typecheck (gamma >++ [ CExists alpha
, CExists beta
, CVar x' (TExists alpha)
])
(subst (EVar x') x e)
(TExists beta)
return (TFun (TExists alpha) (TExists beta), delta)
-- -}
-- {-
-- ->I=> Full Damas-Milner type inference
EAbs x e -> do
x' <- freshVar
alpha <- freshTVar
beta <- freshTVar
(delta, delta') <- breakMarker (CMarker alpha) <$>
typecheck (gamma >++ [ CMarker alpha
, CExists alpha
, CExists beta
, CVar x' (TExists alpha)
])
(subst (EVar x') x e)
(TExists beta)
let tau = apply delta' (TFun (TExists alpha) (TExists beta))
let evars = unsolved delta'
uvars <- replicateM (length evars) freshTVar
return ( tforalls uvars $ typeSubsts (zip (map TVar uvars) evars) tau
, delta)
-- -}
-- ->E
EApp e1 e2 -> do
(a, theta) <- typesynth gamma e1
typeapplysynth theta (apply theta a) e2
-- | Type application synthesising
-- typeapplysynth Γ A e = (C, Δ) <=> Γ |- A . e =>> C -| Δ
typeapplysynth :: Context -> Polytype -> Expr -> NameGen (Polytype, Context)
typeapplysynth gamma typ e = traceNS "typeapplysynth" (gamma, typ, e) $
checkwftype gamma typ $
case typ of
-- ForallApp
TForall alpha a -> do
-- Do alpha conversion to avoid clashes
alpha' <- freshTVar
typeapplysynth (gamma >: CExists alpha')
(typeSubst (TExists alpha') alpha a)
e
-- alpha^App
TExists alpha -> do
alpha1 <- freshTVar
alpha2 <- freshTVar
delta <- typecheck (insertAt gamma (CExists alpha) $ context
[ CExists alpha2
, CExists alpha1
, CExistsSolved alpha $ TFun (TExists alpha1)
(TExists alpha2)
])
e
(TExists alpha1)
return (TExists alpha2, delta)
-- ->App
TFun a c -> do
delta <- typecheck gamma e a
return (c, delta)
_ -> error $ "typeapplysynth: don't know what to do with: "
++ pretty (gamma, typ, e)
typesynthClosed :: Expr -> (Polytype, Context)
typesynthClosed e = let (a, gamma) = evalNameGen $ typesynth mempty e
in (apply gamma a, gamma)
-- Examples
( λx . x ) : t → t
eid = eabs "x" (var "x") -: tforall "t" (tvar "t" --> tvar "t")
Impredicative , so does n't
( λid . i d i d ( ) ) ( ( λx . x ) : t → t )
> tvar " t " ) ) $ $ var " i d " ) $ $ eunit ) $ $ eid
( λid . i d ( ) ) ( ( λx . x ) : t → t )
idunit = eabs "id" (var "id" $$ eunit) $$ eid
idid :: Expr -- id id
idid = (eid $$ eid) -: tforall "t" (tvar "t" --> tvar "t")
| null | https://raw.githubusercontent.com/ollef/Bidirectional/06b4dc4d5810446f3c9cbc9dc21a749a090c77cd/Type.hs | haskell | # LANGUAGE GADTs #
| Bidirectional typechecking for higher-rank polymorphism
Implementation of -sws.org/~neelk/bidir.pdf
| Algorithmic subtyping:
subtype Γ A B = Δ <=> Γ |- A <: B -| Δ
<:Unit
<:Exvar
<:->
<:forallR
Do alpha conversion to avoid clashes
<:forallL
Do alpha conversion to avoid clashes
<:InstantiateL
<:InstantiateR
| Algorithmic instantiation (left):
instantiateL Γ α A = Δ <=> Γ |- α^ :=< A -| Δ
InstLSolve
InstLReach
InstLArr
InstLAIIR
Do alpha conversion to avoid clashes
| Algorithmic instantiation (right):
instantiateR Γ A α = Δ <=> Γ |- A =:< α -| Δ
InstRArr
InstRAIIL
Do alpha conversion to avoid clashes
| Type checking:
typecheck Γ e A = Δ <=> Γ |- e <= A -| Δ
Do alpha conversion to avoid clashes
->I
Sub
| Type synthesising:
typesynth Γ e = (A, Δ) <=> Γ |- e => A -| Δ
Anno
->I= > Original rule
->I=> Original rule
-}
{-
->I=> Full Damas-Milner type inference
-}
->E
| Type application synthesising
typeapplysynth Γ A e = (C, Δ) <=> Γ |- A . e =>> C -| Δ
ForallApp
Do alpha conversion to avoid clashes
alpha^App
->App
Examples
> tvar "t")
id id
> tvar "t") | module Type where
import Control.Applicative
import Control.Monad
import Data.Maybe
import Data.Monoid
import qualified Data.Set as S
import AST
import Context
import NameGen
import Pretty
subtype :: Context -> Polytype -> Polytype -> NameGen Context
subtype gamma typ1 typ2 =
traceNS "subtype" (gamma, typ1, typ2) $
checkwftype gamma typ1 $ checkwftype gamma typ2 $
case (typ1, typ2) of
< :
(TVar alpha, TVar alpha') | alpha == alpha' -> return gamma
(TUnit, TUnit) -> return gamma
(TExists alpha, TExists alpha')
| alpha == alpha' && alpha `elem` existentials gamma -> return gamma
(TFun a1 a2, TFun b1 b2) -> do
theta <- subtype gamma b1 a1
subtype theta (apply theta a2) (apply theta b2)
(a, TForall alpha b) -> do
alpha' <- freshTVar
dropMarker (CForall alpha') <$>
subtype (gamma >: CForall alpha') a (typeSubst (TVar alpha') alpha b)
(TForall alpha a, b) -> do
alpha' <- freshTVar
dropMarker (CMarker alpha') <$>
subtype (gamma >++ [CMarker alpha', CExists alpha'])
(typeSubst (TExists alpha') alpha a)
b
(TExists alpha, a) | alpha `elem` existentials gamma
&& alpha `S.notMember` freeTVars a ->
instantiateL gamma alpha a
(a, TExists alpha) | alpha `elem` existentials gamma
&& alpha `S.notMember` freeTVars a ->
instantiateR gamma a alpha
_ -> error $ "subtype, don't know what to do with: "
++ pretty (gamma, typ1, typ2)
instantiateL :: Context -> TVar -> Polytype -> NameGen Context
instantiateL gamma alpha a =
traceNS "instantiateL" (gamma, alpha, a) $
checkwftype gamma a $ checkwftype gamma (TExists alpha) $
case solve gamma alpha =<< monotype a of
Just gamma' -> return gamma'
Nothing -> case a of
TExists beta
| ordered gamma alpha beta ->
return $ fromJust $ solve gamma beta (TExists alpha)
| otherwise ->
return $ fromJust $ solve gamma alpha (TExists beta)
TFun a1 a2 -> do
alpha1 <- freshTVar
alpha2 <- freshTVar
theta <- instantiateR (insertAt gamma (CExists alpha) $ context
[ CExists alpha2
, CExists alpha1
, CExistsSolved alpha $ TFun (TExists alpha1)
(TExists alpha2)
])
a1 alpha1
instantiateL theta alpha2 (apply theta a2)
TForall beta b -> do
beta' <- freshTVar
dropMarker (CForall beta') <$>
instantiateL (gamma >++ [CForall beta'])
alpha
(typeSubst (TVar beta') beta b)
_ -> error $ "The impossible happened! instantiateL: "
++ pretty (gamma, alpha, a)
instantiateR :: Context -> Polytype -> TVar -> NameGen Context
instantiateR gamma a alpha =
traceNS "instantiateR" (gamma, a, alpha) $
checkwftype gamma a $ checkwftype gamma (TExists alpha) $
case solve gamma alpha =<< monotype a of
Just gamma' -> return gamma'
Nothing -> case a of
InstRReach
TExists beta
| ordered gamma alpha beta ->
return $ fromJust $ solve gamma beta (TExists alpha)
| otherwise ->
return $ fromJust $ solve gamma alpha (TExists beta)
TFun a1 a2 -> do
alpha1 <- freshTVar
alpha2 <- freshTVar
theta <- instantiateL (insertAt gamma (CExists alpha) $ context
[ CExists alpha2
, CExists alpha1
, CExistsSolved alpha $ TFun (TExists alpha1)
(TExists alpha2)
])
alpha1
a1
instantiateR theta (apply theta a2) alpha2
TForall beta b -> do
beta' <- freshTVar
dropMarker (CMarker beta') <$>
instantiateR (gamma >++ [CMarker beta', CExists beta'])
(typeSubst (TExists beta') beta b)
alpha
_ -> error $ "The impossible happened! instantiateR: "
++ pretty (gamma, a, alpha)
typecheck :: Context -> Expr -> Polytype -> NameGen Context
typecheck gamma expr typ =
traceNS "typecheck" (gamma, expr, typ) $
checkwftype gamma typ $ case (expr, typ) of
1I
(EUnit, TUnit) -> return gamma
ForallI
(e, TForall alpha a) -> do
alpha' <- freshTVar
dropMarker (CForall alpha') <$>
typecheck (gamma >: CForall alpha') e (typeSubst (TVar alpha') alpha a)
(EAbs x e, TFun a b) -> do
x' <- freshVar
dropMarker (CVar x' a) <$>
typecheck (gamma >: CVar x' a) (subst (EVar x') x e) b
(e, b) -> do
(a, theta) <- typesynth gamma e
subtype theta (apply theta a) (apply theta b)
typesynth :: Context -> Expr -> NameGen (Polytype, Context)
typesynth gamma expr = traceNS "typesynth" (gamma, expr) $ checkwf gamma $
case expr of
Var
EVar x -> return
( fromMaybe (error $ "typesynth: not in scope " ++ pretty (expr, gamma))
(findVarType gamma x)
, gamma
)
EAnno e a -> do
delta <- typecheck gamma e a
return (a, delta)
1I= >
EUnit -> return (TUnit, gamma)
do
x '
alpha < - freshTVar
beta < - freshTVar
delta ( CVar x ' ( TExists alpha ) ) < $ >
( gamma > + + [ CExists alpha
, CExists beta
, CVar x ' ( TExists alpha )
] )
( subst ( EVar x ' ) x e )
( TExists beta )
return ( TFun ( TExists alpha ) ( TExists beta ) , delta )
EAbs x e -> do
x' <- freshVar
alpha <- freshTVar
beta <- freshTVar
delta <- dropMarker (CVar x' (TExists alpha)) <$>
typecheck (gamma >++ [ CExists alpha
, CExists beta
, CVar x' (TExists alpha)
])
(subst (EVar x') x e)
(TExists beta)
return (TFun (TExists alpha) (TExists beta), delta)
EAbs x e -> do
x' <- freshVar
alpha <- freshTVar
beta <- freshTVar
(delta, delta') <- breakMarker (CMarker alpha) <$>
typecheck (gamma >++ [ CMarker alpha
, CExists alpha
, CExists beta
, CVar x' (TExists alpha)
])
(subst (EVar x') x e)
(TExists beta)
let tau = apply delta' (TFun (TExists alpha) (TExists beta))
let evars = unsolved delta'
uvars <- replicateM (length evars) freshTVar
return ( tforalls uvars $ typeSubsts (zip (map TVar uvars) evars) tau
, delta)
EApp e1 e2 -> do
(a, theta) <- typesynth gamma e1
typeapplysynth theta (apply theta a) e2
typeapplysynth :: Context -> Polytype -> Expr -> NameGen (Polytype, Context)
typeapplysynth gamma typ e = traceNS "typeapplysynth" (gamma, typ, e) $
checkwftype gamma typ $
case typ of
TForall alpha a -> do
alpha' <- freshTVar
typeapplysynth (gamma >: CExists alpha')
(typeSubst (TExists alpha') alpha a)
e
TExists alpha -> do
alpha1 <- freshTVar
alpha2 <- freshTVar
delta <- typecheck (insertAt gamma (CExists alpha) $ context
[ CExists alpha2
, CExists alpha1
, CExistsSolved alpha $ TFun (TExists alpha1)
(TExists alpha2)
])
e
(TExists alpha1)
return (TExists alpha2, delta)
TFun a c -> do
delta <- typecheck gamma e a
return (c, delta)
_ -> error $ "typeapplysynth: don't know what to do with: "
++ pretty (gamma, typ, e)
typesynthClosed :: Expr -> (Polytype, Context)
typesynthClosed e = let (a, gamma) = evalNameGen $ typesynth mempty e
in (apply gamma a, gamma)
( λx . x ) : t → t
Impredicative , so does n't
( λid . i d i d ( ) ) ( ( λx . x ) : t → t )
> tvar " t " ) ) $ $ var " i d " ) $ $ eunit ) $ $ eid
( λid . i d ( ) ) ( ( λx . x ) : t → t )
idunit = eabs "id" (var "id" $$ eunit) $$ eid
|
372942d61c42dc2f9f0d3217ad5e978d57794dbf7b87b8823c16aeba4e88373e | ucsd-progsys/mist | omega.hs | ----------------------------------------------------------------------------
-- | The ST Monad ----------------------------------------------------------
----------------------------------------------------------------------------
measure mNil :: List [>Int] -> Bool
measure mCons :: List [>Int] -> Bool
measure mLength :: List [>Int] -> Int
empty as x:(List >Int) -> {v: Bool | v == mNil x}
empty = (0)
nil as {v: List >Int | (mNil v) /\ (mLength v = 0)}
nil = (0)
cons as x:Int -> xs:(List >Int) -> {v: List >Int | (mCons v) /\ (mLength v = mLength xs + 1)}
cons = (0)
first as {v: List >Int | mCons v} -> Int
first = (0)
rest as rs:{v: List >Int | mCons v} -> {v: List >Int | mLength v == mLength rs - 1 }
rest = (0)
undefined as rforall a. a
undefined = 0
-- bind as rforall a, b. forall s.
-- bw1:s ~> bw2:s ~> bw3:s ~>
ST < { v : s | v = bw1 } > { v : s | v = bw2 } > a - >
( x : a - > ST < { v : s | v = bw2 } > { v : s | v = bw3 } > b ) - >
-- ST <{v:s | v = bw1} >{v:s | v = bw3} >b
-- bind = undefined
bind as rforall a, b, p, q, r.
ST <p >q >a ->
(x:a -> ST <q >r >b) ->
ST <p >r >b
bind = undefined
pure as rforall a, p, q, s, t. x:a -> ST <p >q >a
pure = undefined
thenn as rforall a, b. forall p.
w1:p ~> w2:p ~> w3:p ~>
ST <{v:p | v = w1} >{v:p | v = w2} >a ->
ST <{v:p | v = w2} >{v:p | v = w3} >b ->
ST <{v:p | v = w1} >{v:p | v = w3} >b
thenn = \f g -> bind f (\underscore -> g)
fmap as rforall a, b, p, q, s, t.
(underscore:a -> b) ->
ST <p >q >a ->
ST <p >q >b
fmap = \f x -> bind x (\xx -> pure (f xx))
get as forall s. wg:s ~> Int -> ST <{v:s|v==wg} >{v:s|v==wg} >{v:s|v==wg}
get = undefined
put as forall s. wp:s -> ST <s >{v:s|v==wp} >Int
put = undefined
omega ::
(n:Int ~>
under:Int ->
(exists n2:{v: Int | v > n}.
(ST <{v: Int | v = n} >{v: Int | v = n2} >Int))) ->
(m:Int ~>
score:Int ->
(exists m2:{v: Int | v > m}.
ST <{v: Int | v = m} >{v: Int | v = m2} >Int))
omega = \f -> \score -> thenn (f 1) (omega f score)
foo :: x:Int ~>
under:Int ->
(exists x2:{v: Int | v > x}.
(ST <{v: Int | v = x} >{v: Int | v = x2} >Int))
foo = \under -> bind (get 6) (\y -> put (y + 4))
bar :: x:Int ~>
under:Int ->
ST <{v: Int | v = x} >{v: Int | v = x + 1} >Int
bar = \under -> bind (get 8) (\y -> put (y + 1))
main :: x:{v: Int | v > 3} ~> ST <{v: Int | v = x} >{v: Int | v > 3} >Int
main = thenn (omega foo 8) (omega bar 9)
| null | https://raw.githubusercontent.com/ucsd-progsys/mist/0a9345e73dc53ff8e8adb8bed78d0e3e0cdc6af0/tests/Tests/Integration/pos/omega.hs | haskell | --------------------------------------------------------------------------
| The ST Monad ----------------------------------------------------------
--------------------------------------------------------------------------
bind as rforall a, b. forall s.
bw1:s ~> bw2:s ~> bw3:s ~>
ST <{v:s | v = bw1} >{v:s | v = bw3} >b
bind = undefined |
measure mNil :: List [>Int] -> Bool
measure mCons :: List [>Int] -> Bool
measure mLength :: List [>Int] -> Int
empty as x:(List >Int) -> {v: Bool | v == mNil x}
empty = (0)
nil as {v: List >Int | (mNil v) /\ (mLength v = 0)}
nil = (0)
cons as x:Int -> xs:(List >Int) -> {v: List >Int | (mCons v) /\ (mLength v = mLength xs + 1)}
cons = (0)
first as {v: List >Int | mCons v} -> Int
first = (0)
rest as rs:{v: List >Int | mCons v} -> {v: List >Int | mLength v == mLength rs - 1 }
rest = (0)
undefined as rforall a. a
undefined = 0
ST < { v : s | v = bw1 } > { v : s | v = bw2 } > a - >
( x : a - > ST < { v : s | v = bw2 } > { v : s | v = bw3 } > b ) - >
bind as rforall a, b, p, q, r.
ST <p >q >a ->
(x:a -> ST <q >r >b) ->
ST <p >r >b
bind = undefined
pure as rforall a, p, q, s, t. x:a -> ST <p >q >a
pure = undefined
thenn as rforall a, b. forall p.
w1:p ~> w2:p ~> w3:p ~>
ST <{v:p | v = w1} >{v:p | v = w2} >a ->
ST <{v:p | v = w2} >{v:p | v = w3} >b ->
ST <{v:p | v = w1} >{v:p | v = w3} >b
thenn = \f g -> bind f (\underscore -> g)
fmap as rforall a, b, p, q, s, t.
(underscore:a -> b) ->
ST <p >q >a ->
ST <p >q >b
fmap = \f x -> bind x (\xx -> pure (f xx))
get as forall s. wg:s ~> Int -> ST <{v:s|v==wg} >{v:s|v==wg} >{v:s|v==wg}
get = undefined
put as forall s. wp:s -> ST <s >{v:s|v==wp} >Int
put = undefined
omega ::
(n:Int ~>
under:Int ->
(exists n2:{v: Int | v > n}.
(ST <{v: Int | v = n} >{v: Int | v = n2} >Int))) ->
(m:Int ~>
score:Int ->
(exists m2:{v: Int | v > m}.
ST <{v: Int | v = m} >{v: Int | v = m2} >Int))
omega = \f -> \score -> thenn (f 1) (omega f score)
foo :: x:Int ~>
under:Int ->
(exists x2:{v: Int | v > x}.
(ST <{v: Int | v = x} >{v: Int | v = x2} >Int))
foo = \under -> bind (get 6) (\y -> put (y + 4))
bar :: x:Int ~>
under:Int ->
ST <{v: Int | v = x} >{v: Int | v = x + 1} >Int
bar = \under -> bind (get 8) (\y -> put (y + 1))
main :: x:{v: Int | v > 3} ~> ST <{v: Int | v = x} >{v: Int | v > 3} >Int
main = thenn (omega foo 8) (omega bar 9)
|
edeff6305a7f55608e8835ddfe3e2d28bf12d98939a721cf2522f108211c99cb | brownplt/LambdaS5 | convert_assignment_test.ml | open Prelude
open Util
open OUnit2
open Ljs_convert_assignment
let suite =
let cmp before after = cmp before convert_assignment after in
let no_change code = no_change code convert_assignment in
"Test Less Mutation" >:::
[
"transform SetBang to Let" >::
(cmp
"x := 2; let (y = x) y"
"let (x = 2) let (y=x) y"
);
"lift assignment" >::
(cmp
"let (x = 1)
{x := 2; x}"
"let (x = 1)
let (x = 2)
x");
"pattern1: let(a=..) x:=a. ">::
(cmp
"let (x = undefined) {
{let (a = 1)
x:=a};
1}"
"let (x = undefined) {
{let (x = 1)
1}}");
"pattern1 again: let(a=..) x:=a. ">::
(no_change
"let (x = undefined) {
{let (a = 1)
let (b = 2)
x:=a};
1}");
"the setbang x and the usage of x are in different seq" >::
(no_change
"let (x = undefined) {
{let (y = 2) x := prim('+', y, 1)}; x
}"
);
(* todo setbang and recursive function *)
"the setbang x and the usage of x are in different seq" >::
(cmp
"let (x = undefined) {
{let (y = 2) x := y; x};
x
}"
"let (x = undefined) {
{let (x = 2) {
x;x
}}
}"
);
"setbang x in let x_v" >::
(cmp
"let (x = {let (y = 3)
let (w = 4)
let (z = {let (q = w) {T:=w; w}})
z})
{T := 12; T}"
"let (x = {let (y = 3)
let (w = 4)
let (z = {let (q = w) {T:=w; w}})
z})
{let (T = 12) T}"
);
"setbang in function" >::
(no_change
"let (T = undefined)
let (bar = func() {T})
let (foo = func() { T:=3; undefined })
{
foo();
bar()
}"
);
"let shadow" >::
NOTE : after arrange the sequence , the answer is no long
" let ( x = undefined ) {
{ let ( x = 2 ) x } ;
{ let ( x = 3 ) x }
} "
"let (x = undefined) {
{let (x = 2) x};
{let (x = 3) x}
}" *)
(cmp
"let (x = undefined) {
{x := 2; x};
{let (x = 3) x}
}"
"let (x = undefined) {
let (x = 2) {
x;
{let (x = 3) x}}
}"
);
"js func pattern" >::
(cmp
"let (foo = undefined)
{let (#strict=true)
{'use strict';
{let (%fobj = {let (x = 1) x})
foo := %fobj};
use(foo)}}"
"let (foo = undefined)
{let (#strict=true)
{'use strict';
{let (foo = {let (x = 1) x})
use(foo)}}}"
);
"js func pattern 2: function with strict mode" >::
(cmp
"let (foo = undefined)
{let (#strict=true)
{let (%fobj = {let (x = 1) x}) foo := %fobj};
use(foo)}"
"let (foo = undefined)
{let (#strict=true)
{let (foo = {let (x = 1) x})
use(foo)}}"
);
"js function pattern" >::
(cmp "{let (fobj16 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
foo := fobj16};
let (fun2 = foo)
use(fun2)"
"let (foo = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
let (fun2 = foo)
use(fun2)");
"js recursive function pattern" >::
(no_change
"{let (fobj16 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(this, args){
foo(undefined, mkargs())
}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
foo := fobj16};
let (fun2 = foo)
use(fun2)"
);
"js function patterns" >::
(cmp "{let (fobj16 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
foo := fobj16};
{let (fobj17 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
bar := fobj17};
{let (fobj18 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
zoo := fobj18};
let (fun2 = foo) {use(fun2)};
let (fun3 = bar) {use(fun3)};
let (fun4 = zoo) {use(fun4)}
"
"{let (foo = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
{let (bar = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
{let (zoo = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
{let (fun2 = foo) {use(fun2)};
let (fun3 = bar) {use(fun3)};
let (fun4 = zoo) {use(fun4)}}}}}
");
"js function patterns nested" >::
(cmp "let (foo = undefined){
{let (fobj16 =
{let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){
let (bar = undefined){
{let (fobj17 = {let (proto={[]})
let (parent=context)
let (thisfunc16 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc16];
thisfunc16}})
bar := fobj17};
use(bar)}
}]})
{proto['constructor' = thisfunc15]; thisfunc15}})
foo := fobj16};
let (fun2 = foo)
use(fun2)}"
"let (foo = undefined){
let (foo =
{let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){
let (bar = undefined){
{let (bar = {let (proto={[]})
let (parent=context)
let (thisfunc16 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc16];
thisfunc16}})
use(bar)}}
}]})
{proto['constructor' = thisfunc15]; thisfunc15}})
let (fun2 = foo)
use(fun2)}"
);
"js function patterns. A program is a function" >::
(no_change
"let (fobj16 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
foo := fobj16");
(* a in b's scope is the top-level a *)
"variable scope" >::
(no_change
"let (a = 1) {
let (b = func() {a}) {
a := 2;
b();
a
} }");
"variable scope" >::
(no_change
"let (a = 1) {
let (b = func() {a:=2}) {
a := 3;
b();
a
} }");
"variable scope" >::
(cmp
"let (a = 1) let (f = undefined) {
{let (b = func() {a:=2}) f:=b};
a := 3;
f();
a
}"
"let (a = 1) let (f = undefined) {
let (f = func() {a:=2}) {
a := 3;
f();
a}
}"
);
]
let _ =
run_test_tt_main suite
| null | https://raw.githubusercontent.com/brownplt/LambdaS5/f0bf5c7baf1daa4ead4e398ba7d430bedb7de9cf/src/optimization/test/convert_assignment_test.ml | ocaml | todo setbang and recursive function
a in b's scope is the top-level a | open Prelude
open Util
open OUnit2
open Ljs_convert_assignment
let suite =
let cmp before after = cmp before convert_assignment after in
let no_change code = no_change code convert_assignment in
"Test Less Mutation" >:::
[
"transform SetBang to Let" >::
(cmp
"x := 2; let (y = x) y"
"let (x = 2) let (y=x) y"
);
"lift assignment" >::
(cmp
"let (x = 1)
{x := 2; x}"
"let (x = 1)
let (x = 2)
x");
"pattern1: let(a=..) x:=a. ">::
(cmp
"let (x = undefined) {
{let (a = 1)
x:=a};
1}"
"let (x = undefined) {
{let (x = 1)
1}}");
"pattern1 again: let(a=..) x:=a. ">::
(no_change
"let (x = undefined) {
{let (a = 1)
let (b = 2)
x:=a};
1}");
"the setbang x and the usage of x are in different seq" >::
(no_change
"let (x = undefined) {
{let (y = 2) x := prim('+', y, 1)}; x
}"
);
"the setbang x and the usage of x are in different seq" >::
(cmp
"let (x = undefined) {
{let (y = 2) x := y; x};
x
}"
"let (x = undefined) {
{let (x = 2) {
x;x
}}
}"
);
"setbang x in let x_v" >::
(cmp
"let (x = {let (y = 3)
let (w = 4)
let (z = {let (q = w) {T:=w; w}})
z})
{T := 12; T}"
"let (x = {let (y = 3)
let (w = 4)
let (z = {let (q = w) {T:=w; w}})
z})
{let (T = 12) T}"
);
"setbang in function" >::
(no_change
"let (T = undefined)
let (bar = func() {T})
let (foo = func() { T:=3; undefined })
{
foo();
bar()
}"
);
"let shadow" >::
NOTE : after arrange the sequence , the answer is no long
" let ( x = undefined ) {
{ let ( x = 2 ) x } ;
{ let ( x = 3 ) x }
} "
"let (x = undefined) {
{let (x = 2) x};
{let (x = 3) x}
}" *)
(cmp
"let (x = undefined) {
{x := 2; x};
{let (x = 3) x}
}"
"let (x = undefined) {
let (x = 2) {
x;
{let (x = 3) x}}
}"
);
"js func pattern" >::
(cmp
"let (foo = undefined)
{let (#strict=true)
{'use strict';
{let (%fobj = {let (x = 1) x})
foo := %fobj};
use(foo)}}"
"let (foo = undefined)
{let (#strict=true)
{'use strict';
{let (foo = {let (x = 1) x})
use(foo)}}}"
);
"js func pattern 2: function with strict mode" >::
(cmp
"let (foo = undefined)
{let (#strict=true)
{let (%fobj = {let (x = 1) x}) foo := %fobj};
use(foo)}"
"let (foo = undefined)
{let (#strict=true)
{let (foo = {let (x = 1) x})
use(foo)}}"
);
"js function pattern" >::
(cmp "{let (fobj16 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
foo := fobj16};
let (fun2 = foo)
use(fun2)"
"let (foo = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
let (fun2 = foo)
use(fun2)");
"js recursive function pattern" >::
(no_change
"{let (fobj16 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(this, args){
foo(undefined, mkargs())
}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
foo := fobj16};
let (fun2 = foo)
use(fun2)"
);
"js function patterns" >::
(cmp "{let (fobj16 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
foo := fobj16};
{let (fobj17 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
bar := fobj17};
{let (fobj18 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
zoo := fobj18};
let (fun2 = foo) {use(fun2)};
let (fun3 = bar) {use(fun3)};
let (fun4 = zoo) {use(fun4)}
"
"{let (foo = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
{let (bar = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
{let (zoo = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
{let (fun2 = foo) {use(fun2)};
let (fun3 = bar) {use(fun3)};
let (fun4 = zoo) {use(fun4)}}}}}
");
"js function patterns nested" >::
(cmp "let (foo = undefined){
{let (fobj16 =
{let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){
let (bar = undefined){
{let (fobj17 = {let (proto={[]})
let (parent=context)
let (thisfunc16 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc16];
thisfunc16}})
bar := fobj17};
use(bar)}
}]})
{proto['constructor' = thisfunc15]; thisfunc15}})
foo := fobj16};
let (fun2 = foo)
use(fun2)}"
"let (foo = undefined){
let (foo =
{let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){
let (bar = undefined){
{let (bar = {let (proto={[]})
let (parent=context)
let (thisfunc16 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc16];
thisfunc16}})
use(bar)}}
}]})
{proto['constructor' = thisfunc15]; thisfunc15}})
let (fun2 = foo)
use(fun2)}"
);
"js function patterns. A program is a function" >::
(no_change
"let (fobj16 = {let (proto={[]})
let (parent=context)
let (thisfunc15 = {[#code: func(){1}]}) {
proto['constructor' = thisfunc15];
thisfunc15}})
foo := fobj16");
"variable scope" >::
(no_change
"let (a = 1) {
let (b = func() {a}) {
a := 2;
b();
a
} }");
"variable scope" >::
(no_change
"let (a = 1) {
let (b = func() {a:=2}) {
a := 3;
b();
a
} }");
"variable scope" >::
(cmp
"let (a = 1) let (f = undefined) {
{let (b = func() {a:=2}) f:=b};
a := 3;
f();
a
}"
"let (a = 1) let (f = undefined) {
let (f = func() {a:=2}) {
a := 3;
f();
a}
}"
);
]
let _ =
run_test_tt_main suite
|
af3a6ae08aa9a7d3032ab9aef0f6273518f41d56d915675c61b020f089fb3ebd | HunterYIboHu/htdp2-solution | ex98-answer.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex98-answer) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t write repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp")) #f)))
; physical constants
(define WORLD-WIDTH 200)
(define WORLD-HEIGHT (* 3/4 WORLD-WIDTH))
(define TANK-HEIGHT 8)
(define TANK-SPEED 5)
(define UFO-SPEED 8)
(define MISSLE-SPEED (* 2 UFO-SPEED))
; graphical constants
(define BKG (empty-scene WORLD-WIDTH WORLD-HEIGHT))
(define UFO
(underlay (rectangle 40 10 "solid" "orange")
(ellipse 15 15 "solid" "red")))
(define TANK (rectangle 20 TANK-HEIGHT "solid" "green"))
(define MISSILE
(rotate 180 (polygon (list (make-posn 0 0)
(make-posn 5 5)
(make-posn 10 0)
(make-posn 5 10))
"solid"
"blue")))
(define-struct aim [ufo tank])
(define-struct fired [ufo tank missile])
; A UFO is Posn
; interpreattion (make-posn x y) is the UFO's current location
; examples:
(define ufo-n (make-posn 50 50))
(define ufo-e (make-posn 79 34))
(define-struct tank [loc vel])
; A Tank is (make-tank Number Number)
; interpretation (make-tank x dx) means the tank is at position
; (x, TANK-HEIGHT) and that it moves dx per clock tick
; examples:
(define tank-n (make-tank 20 TANK-SPEED))
(define tank-e (make-tank 35 TANK-SPEED))
A is Posn
; interpretation (make-posn x y) is the missile's current
; location
; examples:
(define missile-n (make-posn 22 103))
(define missile-e (make-posn 56 23))
A SIGS is one of :
; - (make-aim UFO Tank)
; - (make-fired UFO Tank Missle)
; interpretation represents the state of the space invader game
(define before-fired
(make-aim (make-posn 20 10)
(make-tank 28 -3)))
(define just-fired
(make-fired (make-posn 20 10)
(make-tank 28 -3)
(make-posn 28 (- WORLD-HEIGHT TANK-HEIGHT 10))))
(define shotted
(make-fired (make-posn 20 100)
(make-tank 100 3)
(make-posn 22 103)))
; Tank Image -> Image
; adds t to the given image im
; examples:
(check-expect (tank-render tank-n BKG)
(place-image TANK
(tank-loc tank-n)
WORLD-HEIGHT
BKG))
(check-expect (tank-render tank-e BKG)
(place-image TANK
(tank-loc tank-e)
WORLD-HEIGHT
BKG))
(define (tank-render t im)
(place-image TANK (tank-loc t) WORLD-HEIGHT im))
; UFO Image -> Image
; adds u to the given image im
; examples:
(check-expect (ufo-render ufo-n BKG)
(place-image UFO
(posn-x ufo-n)
(posn-y ufo-n)
BKG))
(check-expect (ufo-render ufo-e BKG)
(place-image UFO
(posn-x ufo-e)
(posn-y ufo-e)
BKG))
(define (ufo-render u im)
(place-image UFO (posn-x u) (posn-y u) im))
; Missile Image -> Image
; adds m to the given image im
; examples:
(check-expect (missile-render missile-n BKG)
(place-image MISSILE
(posn-x missile-n)
(posn-y missile-n)
BKG))
(check-expect (missile-render missile-e BKG)
(place-image MISSILE
(posn-x missile-e)
(posn-y missile-e)
BKG))
(define (missile-render m im)
(place-image MISSILE (posn-x m) (posn-y m) im))
; SIGS -> Image
ads TANK , UFO , and possibly the MISSILE to BKG
; examples:
(check-expect (si-render before-fired)
(tank-render (aim-tank before-fired)
(ufo-render (aim-ufo before-fired)
BKG)))
(check-expect (si-render just-fired)
(tank-render (fired-tank just-fired)
(ufo-render (fired-ufo just-fired)
(missile-render (fired-missile just-fired)
BKG))))
(check-expect (si-render shotted)
(tank-render (fired-tank shotted)
(ufo-render (fired-ufo shotted)
(missile-render (fired-missile shotted)
BKG))))
(define (si-render s)
(cond [(aim? s)
(tank-render (aim-tank s)
(ufo-render (aim-ufo s) BKG))]
[(fired? s)
(tank-render (fired-tank s)
(ufo-render (fired-ufo s)
(missile-render (fired-missile s)
BKG)))]))
; Answer to ex98
the answer determine by the position of tank and ufo .
if ufo 's position is diffierent from tank 's , that is same .
; else will be different.
; Because the nested order will determine which picture is above the other.
; 练习98的答案
; 这题的答案取决于坦克和UFO的位置。如果这两个东西的位置没有重合,则顺序的改变没有影响;
; 如果其位置重合,则会有影响。这是因为:函数嵌套的顺序决定了那张图像在其他图像之上。 | null | https://raw.githubusercontent.com/HunterYIboHu/htdp2-solution/6182b4c2ef650ac7059f3c143f639d09cd708516/Chapter1/Section6/ex98-answer.rkt | racket | about the language level of this file in a form that our tools can easily process.
physical constants
graphical constants
A UFO is Posn
interpreattion (make-posn x y) is the UFO's current location
examples:
A Tank is (make-tank Number Number)
interpretation (make-tank x dx) means the tank is at position
(x, TANK-HEIGHT) and that it moves dx per clock tick
examples:
interpretation (make-posn x y) is the missile's current
location
examples:
- (make-aim UFO Tank)
- (make-fired UFO Tank Missle)
interpretation represents the state of the space invader game
Tank Image -> Image
adds t to the given image im
examples:
UFO Image -> Image
adds u to the given image im
examples:
Missile Image -> Image
adds m to the given image im
examples:
SIGS -> Image
examples:
Answer to ex98
else will be different.
Because the nested order will determine which picture is above the other.
练习98的答案
这题的答案取决于坦克和UFO的位置。如果这两个东西的位置没有重合,则顺序的改变没有影响;
如果其位置重合,则会有影响。这是因为:函数嵌套的顺序决定了那张图像在其他图像之上。 | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex98-answer) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t write repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp")) #f)))
(define WORLD-WIDTH 200)
(define WORLD-HEIGHT (* 3/4 WORLD-WIDTH))
(define TANK-HEIGHT 8)
(define TANK-SPEED 5)
(define UFO-SPEED 8)
(define MISSLE-SPEED (* 2 UFO-SPEED))
(define BKG (empty-scene WORLD-WIDTH WORLD-HEIGHT))
(define UFO
(underlay (rectangle 40 10 "solid" "orange")
(ellipse 15 15 "solid" "red")))
(define TANK (rectangle 20 TANK-HEIGHT "solid" "green"))
(define MISSILE
(rotate 180 (polygon (list (make-posn 0 0)
(make-posn 5 5)
(make-posn 10 0)
(make-posn 5 10))
"solid"
"blue")))
(define-struct aim [ufo tank])
(define-struct fired [ufo tank missile])
(define ufo-n (make-posn 50 50))
(define ufo-e (make-posn 79 34))
(define-struct tank [loc vel])
(define tank-n (make-tank 20 TANK-SPEED))
(define tank-e (make-tank 35 TANK-SPEED))
A is Posn
(define missile-n (make-posn 22 103))
(define missile-e (make-posn 56 23))
A SIGS is one of :
(define before-fired
(make-aim (make-posn 20 10)
(make-tank 28 -3)))
(define just-fired
(make-fired (make-posn 20 10)
(make-tank 28 -3)
(make-posn 28 (- WORLD-HEIGHT TANK-HEIGHT 10))))
(define shotted
(make-fired (make-posn 20 100)
(make-tank 100 3)
(make-posn 22 103)))
(check-expect (tank-render tank-n BKG)
(place-image TANK
(tank-loc tank-n)
WORLD-HEIGHT
BKG))
(check-expect (tank-render tank-e BKG)
(place-image TANK
(tank-loc tank-e)
WORLD-HEIGHT
BKG))
(define (tank-render t im)
(place-image TANK (tank-loc t) WORLD-HEIGHT im))
(check-expect (ufo-render ufo-n BKG)
(place-image UFO
(posn-x ufo-n)
(posn-y ufo-n)
BKG))
(check-expect (ufo-render ufo-e BKG)
(place-image UFO
(posn-x ufo-e)
(posn-y ufo-e)
BKG))
(define (ufo-render u im)
(place-image UFO (posn-x u) (posn-y u) im))
(check-expect (missile-render missile-n BKG)
(place-image MISSILE
(posn-x missile-n)
(posn-y missile-n)
BKG))
(check-expect (missile-render missile-e BKG)
(place-image MISSILE
(posn-x missile-e)
(posn-y missile-e)
BKG))
(define (missile-render m im)
(place-image MISSILE (posn-x m) (posn-y m) im))
ads TANK , UFO , and possibly the MISSILE to BKG
(check-expect (si-render before-fired)
(tank-render (aim-tank before-fired)
(ufo-render (aim-ufo before-fired)
BKG)))
(check-expect (si-render just-fired)
(tank-render (fired-tank just-fired)
(ufo-render (fired-ufo just-fired)
(missile-render (fired-missile just-fired)
BKG))))
(check-expect (si-render shotted)
(tank-render (fired-tank shotted)
(ufo-render (fired-ufo shotted)
(missile-render (fired-missile shotted)
BKG))))
(define (si-render s)
(cond [(aim? s)
(tank-render (aim-tank s)
(ufo-render (aim-ufo s) BKG))]
[(fired? s)
(tank-render (fired-tank s)
(ufo-render (fired-ufo s)
(missile-render (fired-missile s)
BKG)))]))
the answer determine by the position of tank and ufo .
if ufo 's position is diffierent from tank 's , that is same .
|
60114bae7dbaa6792163aaff46e155d270af88100e5d15738112d9b140e94923 | Ferada/cl-mock | package.lisp | -*- mode : lisp ; syntax : common - lisp ; coding : utf-8 - unix ; package : cl - user ; -*-
(in-package #:cl-user)
(defpackage #:cl-mock
(:use #:closer-common-lisp #:alexandria)
(:export
;; regular functions
#:maybe-fdefinition
#:set-fdefinition
#:set-or-unbind-fdefinition
#:call-with-function-bindings
#:progf
#:dflet
;; mocking of regular functions
#:*previous*
#:*arguments*
#:call-previous
#:register-mock
#:invocations
#:if-called
#:unhandled
#:answer
#:call-with-mocks
#:with-mocks
;; mocking of generic functions
))
| null | https://raw.githubusercontent.com/Ferada/cl-mock/714a4122970d836e65a5a33f6bf6c6d99116325c/src/package.lisp | lisp | syntax : common - lisp ; coding : utf-8 - unix ; package : cl - user ; -*-
regular functions
mocking of regular functions
mocking of generic functions |
(in-package #:cl-user)
(defpackage #:cl-mock
(:use #:closer-common-lisp #:alexandria)
(:export
#:maybe-fdefinition
#:set-fdefinition
#:set-or-unbind-fdefinition
#:call-with-function-bindings
#:progf
#:dflet
#:*previous*
#:*arguments*
#:call-previous
#:register-mock
#:invocations
#:if-called
#:unhandled
#:answer
#:call-with-mocks
#:with-mocks
))
|
543649be8a675a453831c1796d4638125460d57d31370061064ac839f9b8f3fe | malyn/lein-whimrepl | whimrepl.clj | (ns leiningen.whimrepl
"Start a repl session in a Vim-targetable server."
(:import (com.michaelalynmiller.jnaplatext.win32 CmdExeTyper ProcessUtils)
(com.michaelalynmiller.vimserver IVimDataHandler VimServer))
(:require [leiningen.repl :as lein.repl]
[whimrepl.version]))
List of ' repl ' modes that are safe to use with whimrepl .
(def safe-leinrepl-modes [":connect"])
(defn should-start-whimrepl?
"Returns true if the given 'lein repl' args are compatible with
whimrepl (or nil), else false."
[args]
(let [flag (first args)]
(or (nil? flag)
(some (partial = flag) safe-leinrepl-modes))))
(defn replname?
"Returns true if the given string is a valid whimrepl name,
else false."
[s]
(and (seq s)
(not= \: (first s))))
(defn split-replname-from-args
"Returns a vector of [replname args] given the command line
arguments to 'lein repl'. replname will be nil if a whimrepl
name was not provided on the command line."
[args]
(let [maybe-replname (first args)]
(if (replname? maybe-replname)
[maybe-replname (rest args)]
[nil (vec args)])))
(defn find-cmdexe-pid
"Returns the process id for our cmd.exe ancestor."
[]
(->> (ProcessUtils/getProcessAncestors)
(filter #(.endsWith (.getImageName %) "cmd.exe"))
first
.getProcessId))
(defn start-whimrepl
"Starts a whimrepl server with the given target replname."
[replname]
(let [typer (CmdExeTyper. (find-cmdexe-pid))
relay (proxy [IVimDataHandler] []
(handleReceivedText [text] (.write typer text)))
vimserver (VimServer. replname)]
(.start vimserver relay)
(printf "whimrepl %s available at %s\n" whimrepl.version/string replname)))
(defn ^:no-project-needed whimrepl
"Start a repl session in a Vim-targetable server.
USAGE: lein whimrepl [lein repl args]
This will start a whimrepl server and Leiningen REPL. The whimrepl
server will be given the same name as the current Leiningen project.
If whimrepl was started outside of a Leiningen project then the
default name of 'whimrepl' will be used. Either way, the final name
will be displayed at startup.
Arguments to 'lein repl' may also be supplied and will be passed
straight through without modification. Note that whimrepl will only
activate itself in 'lein repl' modes that are known to be compatible
with the Vim server environment -- it wouldn't make sense to start
whimrepl when using :headless mode, for example.
USAGE: lein whimrepl [replname] [lein repl args]
Same as above, but the supplied replname will be used instead of the
default name."
[project & args]
(let [[replname args] (split-replname-from-args args)
replname (or replname (:name project) "whimrepl")]
(if (should-start-whimrepl? args) (start-whimrepl replname))
(apply lein.repl/repl project args)))
| null | https://raw.githubusercontent.com/malyn/lein-whimrepl/69d69f6bc260a1d1f44d69a8a8f84f593ed7a1ae/src/leiningen/whimrepl.clj | clojure | (ns leiningen.whimrepl
"Start a repl session in a Vim-targetable server."
(:import (com.michaelalynmiller.jnaplatext.win32 CmdExeTyper ProcessUtils)
(com.michaelalynmiller.vimserver IVimDataHandler VimServer))
(:require [leiningen.repl :as lein.repl]
[whimrepl.version]))
List of ' repl ' modes that are safe to use with whimrepl .
(def safe-leinrepl-modes [":connect"])
(defn should-start-whimrepl?
"Returns true if the given 'lein repl' args are compatible with
whimrepl (or nil), else false."
[args]
(let [flag (first args)]
(or (nil? flag)
(some (partial = flag) safe-leinrepl-modes))))
(defn replname?
"Returns true if the given string is a valid whimrepl name,
else false."
[s]
(and (seq s)
(not= \: (first s))))
(defn split-replname-from-args
"Returns a vector of [replname args] given the command line
arguments to 'lein repl'. replname will be nil if a whimrepl
name was not provided on the command line."
[args]
(let [maybe-replname (first args)]
(if (replname? maybe-replname)
[maybe-replname (rest args)]
[nil (vec args)])))
(defn find-cmdexe-pid
"Returns the process id for our cmd.exe ancestor."
[]
(->> (ProcessUtils/getProcessAncestors)
(filter #(.endsWith (.getImageName %) "cmd.exe"))
first
.getProcessId))
(defn start-whimrepl
"Starts a whimrepl server with the given target replname."
[replname]
(let [typer (CmdExeTyper. (find-cmdexe-pid))
relay (proxy [IVimDataHandler] []
(handleReceivedText [text] (.write typer text)))
vimserver (VimServer. replname)]
(.start vimserver relay)
(printf "whimrepl %s available at %s\n" whimrepl.version/string replname)))
(defn ^:no-project-needed whimrepl
"Start a repl session in a Vim-targetable server.
USAGE: lein whimrepl [lein repl args]
This will start a whimrepl server and Leiningen REPL. The whimrepl
server will be given the same name as the current Leiningen project.
If whimrepl was started outside of a Leiningen project then the
default name of 'whimrepl' will be used. Either way, the final name
will be displayed at startup.
Arguments to 'lein repl' may also be supplied and will be passed
straight through without modification. Note that whimrepl will only
activate itself in 'lein repl' modes that are known to be compatible
with the Vim server environment -- it wouldn't make sense to start
whimrepl when using :headless mode, for example.
USAGE: lein whimrepl [replname] [lein repl args]
Same as above, but the supplied replname will be used instead of the
default name."
[project & args]
(let [[replname args] (split-replname-from-args args)
replname (or replname (:name project) "whimrepl")]
(if (should-start-whimrepl? args) (start-whimrepl replname))
(apply lein.repl/repl project args)))
| |
dd66b30a03695e896be9055864f5aa14790ea24a299839a48da69a991a36c8af | hercules-ci/hercules-ci-agent | Vector.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# OPTIONS_GHC -fno - warn - unused - matches #
-- | @std::vector@
--
Original author
module Hercules.CNix.Std.Vector
( stdVectorCtx,
instanceStdVector,
instanceStdVectorCopyable,
CStdVector,
StdVector (StdVector),
Hercules.CNix.Std.Vector.new,
size,
toVector,
toVectorP,
toListP,
toListFP,
Hercules.CNix.Std.Vector.toList,
Hercules.CNix.Std.Vector.fromList,
fromListFP,
pushBack,
pushBackP,
pushBackFP,
)
where
import Control.Exception (mask_)
import Data.Coerce (Coercible, coerce)
import Data.Foldable
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Storable.Mutable as VSM
import Foreign
import Foreign.C
import Hercules.CNix.Encapsulation
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Inline.Unsafe as CU
import Language.Haskell.TH
import Prelude
data CStdVector a
stdVectorCtx :: C.Context
stdVectorCtx = C.cppCtx `mappend` C.cppTypePairs [("std::vector", [t|CStdVector|])]
newtype StdVector a = StdVector (ForeignPtr (CStdVector a))
instance HasStdVector a => HasEncapsulation (CStdVector a) (StdVector a) where
moveToForeignPtrWrapper x = StdVector <$> newForeignPtr cDelete x
class HasStdVector a where
cNew :: IO (Ptr (CStdVector a))
cDelete :: FunPtr (Ptr (CStdVector a) -> IO ())
cSize :: Ptr (CStdVector a) -> IO CSize
cCopies :: Ptr (CStdVector a) -> Ptr (Ptr a) -> IO ()
cPushBackByPtr :: Ptr a -> Ptr (CStdVector a) -> IO ()
class HasStdVector a => HasStdVectorCopyable a where
cCopyTo :: Ptr (CStdVector a) -> Ptr a -> IO ()
cPushBack :: a -> Ptr (CStdVector a) -> IO ()
-- | Helper for defining templated instances
roll :: String -> Q [Dec] -> Q [Dec]
roll cType d =
concat
<$> sequence
[ C.include "<vector>",
C.include "<algorithm>",
C.substitute
[ ("T", const cType),
("VEC", \var -> "$(std::vector<" ++ cType ++ ">* " ++ var ++ ")")
]
d
]
instanceStdVector :: String -> DecsQ
instanceStdVector cType =
roll
cType
[d|
instance HasStdVector $(C.getHaskellType False cType) where
cNew = [CU.exp| std::vector<@T()>* { new std::vector<@T()>() } |]
cDelete = [C.funPtr| void deleteStdVector(std::vector<@T()>* vec) { delete vec; } |]
cSize vec = [CU.exp| size_t { @VEC(vec)->size() } |]
cCopies vec dstPtr =
[CU.block| void {
const std::vector<@T()>& vec = *@VEC(vec);
@T()** aim = $(@T()** dstPtr);
for (auto item : vec) {
*aim = new @T()(item);
aim++;
}
}|]
cPushBackByPtr ptr vec = [CU.exp| void { @VEC(vec)->push_back(*$(@T() *ptr)) } |]
|]
instanceStdVectorCopyable :: String -> DecsQ
instanceStdVectorCopyable cType =
roll
cType
[d|
instance HasStdVectorCopyable $(C.getHaskellType False cType) where
cCopyTo vec dstPtr =
[CU.block| void {
const std::vector<@T()>* vec = @VEC(vec);
std::copy(vec->begin(), vec->end(), $(@T()* dstPtr));
} |]
cPushBack value vec = [CU.exp| void { @VEC(vec)->push_back($(@T() value)) } |]
|]
new :: forall a. HasStdVector a => IO (StdVector a)
new = mask_ $ do
ptr <- cNew @a
StdVector <$> newForeignPtr cDelete ptr
size :: HasStdVector a => StdVector a -> IO Int
size (StdVector fptr) = fromIntegral <$> withForeignPtr fptr cSize
toVector :: (HasStdVectorCopyable a, Storable a) => StdVector a -> IO (VS.Vector a)
toVector stdVec@(StdVector stdVecFPtr) = do
vecSize <- size stdVec
hsVec <- VSM.new vecSize
withForeignPtr stdVecFPtr $ \stdVecPtr ->
VSM.unsafeWith hsVec $ \hsVecPtr ->
cCopyTo stdVecPtr hsVecPtr
VS.unsafeFreeze hsVec
toVectorP :: HasStdVector a => StdVector a -> IO (VS.Vector (Ptr a))
toVectorP stdVec@(StdVector stdVecFPtr) = do
vecSize <- size stdVec
hsVec <- VSM.new vecSize
withForeignPtr stdVecFPtr $ \stdVecPtr ->
VSM.unsafeWith hsVec $ \hsVecPtr ->
cCopies stdVecPtr hsVecPtr
VS.unsafeFreeze hsVec
fromList :: HasStdVectorCopyable a => [a] -> IO (StdVector a)
fromList as = do
vec <- Hercules.CNix.Std.Vector.new
for_ as $ \a -> pushBack vec a
pure vec
fromListFP :: (Coercible a' (ForeignPtr a), HasStdVector a) => [a'] -> IO (StdVector a)
fromListFP as = do
vec <- Hercules.CNix.Std.Vector.new
for_ as $ \a -> pushBackFP vec a
pure vec
toList :: (HasStdVectorCopyable a, Storable a) => StdVector a -> IO [a]
toList vec = VS.toList <$> toVector vec
toListP :: (HasStdVector a) => StdVector a -> IO [Ptr a]
toListP vec = VS.toList <$> toVectorP vec
toListFP :: (HasEncapsulation a b, HasStdVector a) => StdVector a -> IO [b]
toListFP vec = traverse moveToForeignPtrWrapper =<< toListP vec
pushBack :: HasStdVectorCopyable a => StdVector a -> a -> IO ()
pushBack (StdVector fptr) value = withForeignPtr fptr (cPushBack value)
pushBackP :: HasStdVector a => StdVector a -> Ptr a -> IO ()
pushBackP (StdVector fptr) valueP = withForeignPtr fptr (cPushBackByPtr valueP)
pushBackFP :: (Coercible a' (ForeignPtr a), HasStdVector a) => StdVector a -> a' -> IO ()
pushBackFP vec vfptr = withForeignPtr (coerce vfptr) (pushBackP vec)
| null | https://raw.githubusercontent.com/hercules-ci/hercules-ci-agent/ff5ab7f4db65a339849d91120c6028a5d2f13e34/hercules-ci-cnix-store/src/Hercules/CNix/Std/Vector.hs | haskell | # LANGUAGE OverloadedStrings #
| @std::vector@
| Helper for defining templated instances | # LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# OPTIONS_GHC -fno - warn - unused - matches #
Original author
module Hercules.CNix.Std.Vector
( stdVectorCtx,
instanceStdVector,
instanceStdVectorCopyable,
CStdVector,
StdVector (StdVector),
Hercules.CNix.Std.Vector.new,
size,
toVector,
toVectorP,
toListP,
toListFP,
Hercules.CNix.Std.Vector.toList,
Hercules.CNix.Std.Vector.fromList,
fromListFP,
pushBack,
pushBackP,
pushBackFP,
)
where
import Control.Exception (mask_)
import Data.Coerce (Coercible, coerce)
import Data.Foldable
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Storable.Mutable as VSM
import Foreign
import Foreign.C
import Hercules.CNix.Encapsulation
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Inline.Unsafe as CU
import Language.Haskell.TH
import Prelude
data CStdVector a
stdVectorCtx :: C.Context
stdVectorCtx = C.cppCtx `mappend` C.cppTypePairs [("std::vector", [t|CStdVector|])]
newtype StdVector a = StdVector (ForeignPtr (CStdVector a))
instance HasStdVector a => HasEncapsulation (CStdVector a) (StdVector a) where
moveToForeignPtrWrapper x = StdVector <$> newForeignPtr cDelete x
class HasStdVector a where
cNew :: IO (Ptr (CStdVector a))
cDelete :: FunPtr (Ptr (CStdVector a) -> IO ())
cSize :: Ptr (CStdVector a) -> IO CSize
cCopies :: Ptr (CStdVector a) -> Ptr (Ptr a) -> IO ()
cPushBackByPtr :: Ptr a -> Ptr (CStdVector a) -> IO ()
class HasStdVector a => HasStdVectorCopyable a where
cCopyTo :: Ptr (CStdVector a) -> Ptr a -> IO ()
cPushBack :: a -> Ptr (CStdVector a) -> IO ()
roll :: String -> Q [Dec] -> Q [Dec]
roll cType d =
concat
<$> sequence
[ C.include "<vector>",
C.include "<algorithm>",
C.substitute
[ ("T", const cType),
("VEC", \var -> "$(std::vector<" ++ cType ++ ">* " ++ var ++ ")")
]
d
]
instanceStdVector :: String -> DecsQ
instanceStdVector cType =
roll
cType
[d|
instance HasStdVector $(C.getHaskellType False cType) where
cNew = [CU.exp| std::vector<@T()>* { new std::vector<@T()>() } |]
cDelete = [C.funPtr| void deleteStdVector(std::vector<@T()>* vec) { delete vec; } |]
cSize vec = [CU.exp| size_t { @VEC(vec)->size() } |]
cCopies vec dstPtr =
[CU.block| void {
const std::vector<@T()>& vec = *@VEC(vec);
@T()** aim = $(@T()** dstPtr);
for (auto item : vec) {
*aim = new @T()(item);
aim++;
}
}|]
cPushBackByPtr ptr vec = [CU.exp| void { @VEC(vec)->push_back(*$(@T() *ptr)) } |]
|]
instanceStdVectorCopyable :: String -> DecsQ
instanceStdVectorCopyable cType =
roll
cType
[d|
instance HasStdVectorCopyable $(C.getHaskellType False cType) where
cCopyTo vec dstPtr =
[CU.block| void {
const std::vector<@T()>* vec = @VEC(vec);
std::copy(vec->begin(), vec->end(), $(@T()* dstPtr));
} |]
cPushBack value vec = [CU.exp| void { @VEC(vec)->push_back($(@T() value)) } |]
|]
new :: forall a. HasStdVector a => IO (StdVector a)
new = mask_ $ do
ptr <- cNew @a
StdVector <$> newForeignPtr cDelete ptr
size :: HasStdVector a => StdVector a -> IO Int
size (StdVector fptr) = fromIntegral <$> withForeignPtr fptr cSize
toVector :: (HasStdVectorCopyable a, Storable a) => StdVector a -> IO (VS.Vector a)
toVector stdVec@(StdVector stdVecFPtr) = do
vecSize <- size stdVec
hsVec <- VSM.new vecSize
withForeignPtr stdVecFPtr $ \stdVecPtr ->
VSM.unsafeWith hsVec $ \hsVecPtr ->
cCopyTo stdVecPtr hsVecPtr
VS.unsafeFreeze hsVec
toVectorP :: HasStdVector a => StdVector a -> IO (VS.Vector (Ptr a))
toVectorP stdVec@(StdVector stdVecFPtr) = do
vecSize <- size stdVec
hsVec <- VSM.new vecSize
withForeignPtr stdVecFPtr $ \stdVecPtr ->
VSM.unsafeWith hsVec $ \hsVecPtr ->
cCopies stdVecPtr hsVecPtr
VS.unsafeFreeze hsVec
fromList :: HasStdVectorCopyable a => [a] -> IO (StdVector a)
fromList as = do
vec <- Hercules.CNix.Std.Vector.new
for_ as $ \a -> pushBack vec a
pure vec
fromListFP :: (Coercible a' (ForeignPtr a), HasStdVector a) => [a'] -> IO (StdVector a)
fromListFP as = do
vec <- Hercules.CNix.Std.Vector.new
for_ as $ \a -> pushBackFP vec a
pure vec
toList :: (HasStdVectorCopyable a, Storable a) => StdVector a -> IO [a]
toList vec = VS.toList <$> toVector vec
toListP :: (HasStdVector a) => StdVector a -> IO [Ptr a]
toListP vec = VS.toList <$> toVectorP vec
toListFP :: (HasEncapsulation a b, HasStdVector a) => StdVector a -> IO [b]
toListFP vec = traverse moveToForeignPtrWrapper =<< toListP vec
pushBack :: HasStdVectorCopyable a => StdVector a -> a -> IO ()
pushBack (StdVector fptr) value = withForeignPtr fptr (cPushBack value)
pushBackP :: HasStdVector a => StdVector a -> Ptr a -> IO ()
pushBackP (StdVector fptr) valueP = withForeignPtr fptr (cPushBackByPtr valueP)
pushBackFP :: (Coercible a' (ForeignPtr a), HasStdVector a) => StdVector a -> a' -> IO ()
pushBackFP vec vfptr = withForeignPtr (coerce vfptr) (pushBackP vec)
|
008f26e74d93dee9afe9b29a1295936fd3a35ead2428f6879de3c1ee0a77bacd | adventuring/tootsville.net | messaging.lisp | ;;;; -*- lisp -*-
;;;
src / messaging.lisp is part of
;;;
Copyright © 2008 - 2017 Bruce - Robert Pocock ; © 2018 - 2021 The
Corporation for Inter - World Tourism and Adventuring ( ciwta.org ) .
;;;
This program is Free Software : you can redistribute it and/or
modify it under the terms of the GNU Affero General Public License
as published by the Free Software Foundation ; either version 3 of
the License , or ( at your option ) any later version .
;;;
;;; This program is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;; Affero General Public License for more details.
;;;
You should have received a copy of the GNU Affero General Public
;;; License along with this program. If not, see
;;; </>.
;;;
;;; You can reach CIWTA at /, or write to us at:
;;;
PO Box 23095
Oakland Park , FL 33307 - 3095
USA
(in-package :Tootsville)
(defvar *banhammer* (make-hash-table :test 'equal)
"A list of IP addresses which are banned from connecting.")
(defun broadcast (message &key near except)
"Broadcast MESSAGE to all ∞ Mode listeners connected who are near NEAR.
NEAR is a Toot character who is the epicenter of the message, which is
currently ignored.
EXCEPT is a user or Toot who does not need to receive the broadcast
message (usually the originator)"
(ws-broadcast *infinity-websocket-resource* message
:near near
:except (user-stream except))
(tcp-broadcast message)
(robot-broadcast message near :except except))
(defun unicast (message &optional (user (active-player)))
"Send MESSAGE directly to USER (which may be a Person or Toot)"
(if-let ((client (user-stream user)))
(with-websocket-disconnections (client)
(ws-unicast message client))
(if (robotp user)
(robot-unicast message user)
(v:warn :stream "Unable to transmit unicast message to ~a: not connected"
user))))
(defmethod peer-address ((Toot Toot))
"Get the peer address of TOOT.
Returns inet: + the Internet protocol address, for connected
users. For robots, returns robot:, and otherwise returns unknown:"
(if-let (stream (user-stream Toot))
(peer-address stream)
(if-let (robot (gethash (Toot-name Toot) *robots*))
"robot:"
"unknown:")))
(defmethod peer-address (stream)
"Get the Internet address of STREAM (a Hunchensocket stream)"
(format nil "inet:~a"
(string-trim " " (second (split-sequence #\: (second (split-sequence #\, (second (split-sequence #\" (format nil "~s" (slot-value stream 'hunchensocket::input-stream)))))))))))
(defun find-thread (name)
"Find any thread whose name includes NAME"
(remove-if-not (lambda (thread) (search name (thread-name thread)))
(all-threads)))
| null | https://raw.githubusercontent.com/adventuring/tootsville.net/985c11a91dd1a21b77d7378362d86cf1c031b22c/src/messaging.lisp | lisp | -*- lisp -*-
© 2018 - 2021 The
either version 3 of
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Affero General Public License for more details.
License along with this program. If not, see
</>.
You can reach CIWTA at /, or write to us at:
| src / messaging.lisp is part of
Corporation for Inter - World Tourism and Adventuring ( ciwta.org ) .
This program is Free Software : you can redistribute it and/or
modify it under the terms of the GNU Affero General Public License
the License , or ( at your option ) any later version .
You should have received a copy of the GNU Affero General Public
PO Box 23095
Oakland Park , FL 33307 - 3095
USA
(in-package :Tootsville)
(defvar *banhammer* (make-hash-table :test 'equal)
"A list of IP addresses which are banned from connecting.")
(defun broadcast (message &key near except)
"Broadcast MESSAGE to all ∞ Mode listeners connected who are near NEAR.
NEAR is a Toot character who is the epicenter of the message, which is
currently ignored.
EXCEPT is a user or Toot who does not need to receive the broadcast
message (usually the originator)"
(ws-broadcast *infinity-websocket-resource* message
:near near
:except (user-stream except))
(tcp-broadcast message)
(robot-broadcast message near :except except))
(defun unicast (message &optional (user (active-player)))
"Send MESSAGE directly to USER (which may be a Person or Toot)"
(if-let ((client (user-stream user)))
(with-websocket-disconnections (client)
(ws-unicast message client))
(if (robotp user)
(robot-unicast message user)
(v:warn :stream "Unable to transmit unicast message to ~a: not connected"
user))))
(defmethod peer-address ((Toot Toot))
"Get the peer address of TOOT.
Returns inet: + the Internet protocol address, for connected
users. For robots, returns robot:, and otherwise returns unknown:"
(if-let (stream (user-stream Toot))
(peer-address stream)
(if-let (robot (gethash (Toot-name Toot) *robots*))
"robot:"
"unknown:")))
(defmethod peer-address (stream)
"Get the Internet address of STREAM (a Hunchensocket stream)"
(format nil "inet:~a"
(string-trim " " (second (split-sequence #\: (second (split-sequence #\, (second (split-sequence #\" (format nil "~s" (slot-value stream 'hunchensocket::input-stream)))))))))))
(defun find-thread (name)
"Find any thread whose name includes NAME"
(remove-if-not (lambda (thread) (search name (thread-name thread)))
(all-threads)))
|
e35debdde6b1a971b14f0e9cbf77487803e9d0358d9af5d0e469b854574fed3a | mikeball/foundation | foundation_test.clj | (ns taoclj.foundation-test
(:require [clojure.test :refer :all]
[taoclj.foundation :refer :all]
[taoclj.foundation.tests-config :refer [tests-db]]
[taoclj.foundation.execution :refer [execute]]))
(deftest can-connect
(is (= (with-open [cnx (.getConnection tests-db)]
(execute cnx "select 'ehlo' as msg;"))
'(({:msg "ehlo"})))))
(deftest qry->single-statements
(is (= '({:msg "ehlo"})
(qry-> tests-db
(execute "select 'ehlo' as msg;")))))
(deftest trx->single-statements
(is (= '({:msg "ehlo"})
(trx-> tests-db
(execute "select 'ehlo' as msg;")))))
(deftest qry->multiple-statements
(is (= '[({:msg1 "ehlo1"}) ({:msg2 "ehlo2"})]
(qry-> tests-db
(execute "select 'ehlo1' as msg1;")
(execute "select 'ehlo2' as msg2;")))))
(deftest trx->multiple-statements
(is (= '[({:msg3 "ehlo3"}) ({:msg4 "ehlo4"})]
(trx-> tests-db
(execute "select 'ehlo3' as msg3;")
(execute "select 'ehlo4' as msg4;")))))
(deftest qry->no-result
(is (= nil ; nil may be bad choice for result on nothing found.
(qry-> tests-db
(execute "select 'ehlo' where true=false;")))))
(deftest trx->no-result
(is (= nil ; nil may be bad choice for result on nothing found.
(trx-> tests-db
(execute "select 'ehlo' where true=false;")))))
(deftest multiple-results-in-single-statement-returned
(is (= '(({:msg1 "ehlo1"}) ({:msg2 "ehlo2"}))
(qry-> tests-db
(execute "select 'ehlo1' as msg1; select 'ehlo2' as msg2;")))))
; qry-> returns false on errors
; ********** Select Tests ***********************
; (run-tests *ns*)
; (run-tests 'taoclj.foundation-test)
;; (trx-> datasource
( insert : users { : name " " : username " bob " : password " abc123 " } )
;; (insert :user-roles (with-rs 1 {:user-id (first rs)
;; :role-id item})))
;; (qry-> tests-db
;; (execute "SELECT id, name FROM insert_single_record;")
( first - result )
{ : : read - commited }
;; )
; insert multiple records with same fields
; insert multiple records with different fields
; error-is-thrown-when-mixing-maps-and-vectors
| null | https://raw.githubusercontent.com/mikeball/foundation/a0376f49759b1552f2f70e7585029b592b6fb346/test/taoclj/foundation_test.clj | clojure | nil may be bad choice for result on nothing found.
nil may be bad choice for result on nothing found.
qry-> returns false on errors
********** Select Tests ***********************
(run-tests *ns*)
(run-tests 'taoclj.foundation-test)
(trx-> datasource
(insert :user-roles (with-rs 1 {:user-id (first rs)
:role-id item})))
(qry-> tests-db
(execute "SELECT id, name FROM insert_single_record;")
)
insert multiple records with same fields
insert multiple records with different fields
error-is-thrown-when-mixing-maps-and-vectors | (ns taoclj.foundation-test
(:require [clojure.test :refer :all]
[taoclj.foundation :refer :all]
[taoclj.foundation.tests-config :refer [tests-db]]
[taoclj.foundation.execution :refer [execute]]))
(deftest can-connect
(is (= (with-open [cnx (.getConnection tests-db)]
(execute cnx "select 'ehlo' as msg;"))
'(({:msg "ehlo"})))))
(deftest qry->single-statements
(is (= '({:msg "ehlo"})
(qry-> tests-db
(execute "select 'ehlo' as msg;")))))
(deftest trx->single-statements
(is (= '({:msg "ehlo"})
(trx-> tests-db
(execute "select 'ehlo' as msg;")))))
(deftest qry->multiple-statements
(is (= '[({:msg1 "ehlo1"}) ({:msg2 "ehlo2"})]
(qry-> tests-db
(execute "select 'ehlo1' as msg1;")
(execute "select 'ehlo2' as msg2;")))))
(deftest trx->multiple-statements
(is (= '[({:msg3 "ehlo3"}) ({:msg4 "ehlo4"})]
(trx-> tests-db
(execute "select 'ehlo3' as msg3;")
(execute "select 'ehlo4' as msg4;")))))
(deftest qry->no-result
(qry-> tests-db
(execute "select 'ehlo' where true=false;")))))
(deftest trx->no-result
(trx-> tests-db
(execute "select 'ehlo' where true=false;")))))
(deftest multiple-results-in-single-statement-returned
(is (= '(({:msg1 "ehlo1"}) ({:msg2 "ehlo2"}))
(qry-> tests-db
(execute "select 'ehlo1' as msg1; select 'ehlo2' as msg2;")))))
( insert : users { : name " " : username " bob " : password " abc123 " } )
( first - result )
{ : : read - commited }
|
cfde3bc8e0f029ae47ef7c9514549608730e16186e79d8971dd4be551917b460 | imrehg/ypsilon | color.scm | #!nobacktrace
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
(library (ypsilon pango color)
(export pango_color_copy
pango_color_free
pango_color_get_type
pango_color_parse
pango_color_to_string)
(import (rnrs) (ypsilon ffi))
(define lib-name
(cond (on-linux "libpango-1.0.so.0")
(on-sunos "libpango-1.0.so.0")
(on-freebsd "libpango-1.0.so.0")
(on-openbsd "libpango-1.0.so.0")
(on-darwin "Gtk.framework/Gtk")
(on-windows "libpango-1.0-0.dll")
(else
(assertion-violation #f "can not locate Pango library, unknown operating system"))))
(define lib (load-shared-object lib-name))
(define-syntax define-function
(syntax-rules ()
((_ ret name args)
(define name (c-function lib lib-name ret name args)))))
;; PangoColor* pango_color_copy (const PangoColor* src)
(define-function void* pango_color_copy (void*))
;; void pango_color_free (PangoColor* color)
(define-function void pango_color_free (void*))
GType pango_color_get_type ( void )
(define-function unsigned-long pango_color_get_type ())
;; gboolean pango_color_parse (PangoColor* color, const char* spec)
(define-function int pango_color_parse (void* char*))
;; gchar* pango_color_to_string(const PangoColor* color)
(define-function char* pango_color_to_string (void*))
) ;[end]
| null | https://raw.githubusercontent.com/imrehg/ypsilon/e57a06ef5c66c1a88905b2be2fa791fa29848514/sitelib/ypsilon/pango/color.scm | scheme | PangoColor* pango_color_copy (const PangoColor* src)
void pango_color_free (PangoColor* color)
gboolean pango_color_parse (PangoColor* color, const char* spec)
gchar* pango_color_to_string(const PangoColor* color)
[end] | #!nobacktrace
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
(library (ypsilon pango color)
(export pango_color_copy
pango_color_free
pango_color_get_type
pango_color_parse
pango_color_to_string)
(import (rnrs) (ypsilon ffi))
(define lib-name
(cond (on-linux "libpango-1.0.so.0")
(on-sunos "libpango-1.0.so.0")
(on-freebsd "libpango-1.0.so.0")
(on-openbsd "libpango-1.0.so.0")
(on-darwin "Gtk.framework/Gtk")
(on-windows "libpango-1.0-0.dll")
(else
(assertion-violation #f "can not locate Pango library, unknown operating system"))))
(define lib (load-shared-object lib-name))
(define-syntax define-function
(syntax-rules ()
((_ ret name args)
(define name (c-function lib lib-name ret name args)))))
(define-function void* pango_color_copy (void*))
(define-function void pango_color_free (void*))
GType pango_color_get_type ( void )
(define-function unsigned-long pango_color_get_type ())
(define-function int pango_color_parse (void* char*))
(define-function char* pango_color_to_string (void*))
|
35bfeaff7ce6c445574587d75bd1ad3f319a80889d3e64bd14338f74ac863d6e | FlowForwarding/loom | simple_ne_logic.erl | %%------------------------------------------------------------------------------
Copyright 2014 FlowForwarding.org
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%-----------------------------------------------------------------------------
@author Erlang Solutions Ltd. < >
2014 FlowForwarding.org
%%% @doc
Simple Network executive logic .
%%% @end
-module(simple_ne_logic).
-behaviour(gen_server).
-define(SERVER, ?MODULE).
-define(STATE, simple_ne_logic_state).
-include("simple_ne_logger.hrl").
-include_lib("ofs_handler/include/ofs_handler.hrl").
-include_lib("of_protocol/include/of_protocol.hrl").
-record(?STATE, {
switches_table
}).
%% ------------------------------------------------------------------
%% API Function Exports
%% ------------------------------------------------------------------
-export([start_link/0]).
%% ------------------------------------------------------------------
gen_server Function Exports
%% ------------------------------------------------------------------
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
%% ------------------------------------------------------------------
%% API Function Definitions
%% ------------------------------------------------------------------
-export([
ofsh_init/5,
ofsh_connect/6,
ofsh_disconnect/2,
ofsh_failover/0,
ofsh_handle_message/2,
ofsh_handle_error/2,
ofsh_terminate/1,
switches/0,
send/2,
sync_send/2,
subscribe/2
]).
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
%% ----------------------------------------------------------------------------
%% Callback API
%% ----------------------------------------------------------------------------
% These functions are called from simple_ne_ofsh.erl.
-spec ofsh_init(handler_mode(), ipaddress(), datapath_id(), of_version(), connection()) -> ok.
ofsh_init(active, IpAddr, DatapathId, Version, Connection) ->
% new main connection
?INFO("new active connection: ~p ~p~n", [IpAddr, DatapathId]),
ok = gen_server:call(?MODULE, {init, IpAddr, DatapathId, Version, Connection}),
ok;
ofsh_init(standby, IpAddr, DatapathId, _Version, _Connection) ->
% new main connection
% TODO: this will never happen, failover not implemented ofs_handler.
?INFO("new standby connection: ~p ~p~n", [IpAddr, DatapathId]),
ok.
-spec ofsh_connect(handler_mode(), ipaddress(), datapath_id(), of_version(), connection(), auxid()) -> ok.
ofsh_connect(active, _IpAddr, DatapathId, _Version, _Connection, AuxId) ->
% new auxiliary connection
% The simple network executive doesn't need to capture the auxiliary
% connections, so they are not passed to the simple_ne_logic pid.
?INFO("new active aux connection: ~p ~p~n", [AuxId, DatapathId]),
ok;
ofsh_connect(standby, _IpAddr, DatapathId, _Version, _Connection, AuxId) ->
% new auxiliary connection
% TODO: this will never happen, failover not implemented ofs_handler.
?INFO("new standby aux connection: ~p ~p~n", [AuxId, DatapathId]),
ok.
-spec ofsh_disconnect(auxid(), datapath_id()) -> ok.
ofsh_disconnect(AuxId, DatapathId) ->
% lost an auxiliary connection
% The simple network executive is not tracking the auxiliary
% connections, so they are not passed to the simple_ne_logic pid.
?INFO("disconnect aux connection: ~p ~p~n", [AuxId, DatapathId]),
ok.
-spec ofsh_failover() -> ok.
ofsh_failover() ->
% ofs_handler failover
% TODO: this will never happen, failover not implemented ofs_handler.
?INFO("failover"),
ok.
-spec ofsh_handle_message(datapath_id(), ofp_message()) -> ok.
ofsh_handle_message(DatapathId, Msg) ->
% process a message from the switch.
% the simple network executive doesn't process any messages
% from the switch, so they are not passed to the simple_ne_logic pid.
?INFO("message in: ~p ~p~n", [DatapathId, Msg]),
ok.
-spec ofsh_handle_error(datapath_id(), error_reason()) -> ok.
ofsh_handle_error(DatapathId, Reason) ->
% Error on connection.
?INFO("rror in: ~p ~p~n", [DatapathId, Reason]),
ok.
-spec ofsh_terminate(datapath_id()) -> ok.
ofsh_terminate(DatapathId) ->
% lost the main connection
?INFO("disconnect main connection: ~p~n", [DatapathId]),
ok = gen_server:call(?MODULE, {terminate, DatapathId}),
ok.
%% ----------------------------------------------------------------------------
%% Utility API
%% ----------------------------------------------------------------------------
%% @doc
%% Returns the list of connected switches. The returned tuples have
%% the IP address of the switch (for calling simple_ne_logic
%% functions), the datapath id (for calling ofs_handler),
%% the open flow version number (for calling of_msg_lib), the
%% connection (for calling of_driver).
%% @end
-spec switches() -> [{ipaddress(), datapath_id(), of_version(), connection()}].
switches() ->
gen_server:call(?SERVER, switches).
%% @doc
%% Send ``Msg'' to the switch connected from ``IpAddr''. Returns
` ` not_found '' if there is no switch connected from ` ` IpAddrr '' , ` ` ok ''
%% if the message is sent successfully, or ``error'' if there was an error
%% sending the request to the switch.
%% @end
-spec send(ipaddress(), ofp_message()) -> not_found | ok | {error, error_reason()}.
send(IpAddr, Msg) ->
gen_server:call(?SERVER, {send, IpAddr, Msg}).
%% @doc
%% Send ``Msg'' to the switch connected from ``IpAddr'' and wait
%% for any replies. Returns
` ` not_found '' if there is no switch connected from ` ` IpAddrr '' ,
%% ``{ok, Reply}''
%% if the message is sent successfully, or ``error'' if there was an error
%% sending the request to the switch. ``Reply'' is ``no_reply'' if there
%% was no reply to the request, or ``Reply'' is an ``ofp_message'' record
%% that may be decoded with ``of_msg_lib:decode/1''.
%% @end
-spec sync_send(ipaddress(), ofp_message()) -> not_found | {ok, no_reply | ofp_message()} | {error, error_reason()}.
sync_send(IpAddr, Msg) ->
gen_server:call(?SERVER, {sync_send, IpAddr, Msg}).
%% @doc
Subscribe to messages received from ` ` IpAddr '' .
%% @end
-spec subscribe(ipaddress(), subscription_item()) -> ok.
subscribe(IpAddr, MsgType) ->
gen_server:call(?SERVER, {subscribe, IpAddr, MsgType}).
%% ------------------------------------------------------------------
gen_server Function Definitions
%% ------------------------------------------------------------------
init([]) ->
SwitchesTable = ets:new(switches, [bag, protected]),
State = #?STATE{switches_table = SwitchesTable},
{ok, State}.
handle_call({init, IpAddr, DatapathId, Version, Connection}, _From, State) ->
% Got the main connection, remember tha mapping between the ip address
% and the datapath id
ok = register_switch(IpAddr, DatapathId, Version, Connection, State),
{reply, ok, State};
handle_call({terminate, DatapathId}, _From, State) ->
ok = deregister_switch(DatapathId, State),
{reply, ok, State};
handle_call({sync_send, IpAddr, Msg}, _From, State) ->
Reply = do_sync_send(IpAddr, Msg, State),
{reply, Reply, State};
handle_call({send, IpAddr, Msg}, _From, State) ->
Reply = do_send(IpAddr, Msg, State),
{reply, Reply, State};
handle_call({subscribe, IpAddr, MsgType}, _From, State) ->
ok = do_subscribe(IpAddr, MsgType, State),
{reply, ok, State};
handle_call(switches, _From, State) ->
Reply = do_get_switches(State),
{reply, Reply, State};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% ------------------------------------------------------------------
%% Internal Function Definitions
%% ------------------------------------------------------------------
register_switch(IpAddr, DatapathId, Version, Connection, #?STATE{switches_table = Switches}) ->
true = ets:insert(Switches, {IpAddr, DatapathId, Version, Connection}),
ok.
deregister_switch(DatapathId, #?STATE{switches_table = Switches}) ->
[Object] = ets:match_object(Switches, {'_', DatapathId, '_', '_'}),
true = ets:delete_object(Switches, Object),
ok.
do_get_switches(#?STATE{switches_table = Switches}) ->
ets:tab2list(Switches).
find_switch(IpAddr, #?STATE{switches_table = Switches}) ->
% All LINC logical switches on the same capable switch connect
% with the IP address of the capable switch. There may be
% duplicates. The sne API doesn't really accomodate this, so
cheat by returning the first logical switch with the IP address .
case ets:lookup(Switches, IpAddr) of
[] -> not_found;
[{_, DatapathId, _, _}|_] -> DatapathId
end.
do_sync_send(IpAddr, Msg, State) ->
case find_switch(IpAddr, State) of
not_found -> not_found;
DatapathId ->
ofs_handler:sync_send(DatapathId, Msg)
end.
do_send(IpAddr, Msg, State) ->
case find_switch(IpAddr, State) of
not_found -> not_found;
DatapathId ->
ofs_handler:send(DatapathId, Msg)
end.
do_subscribe(IpAddr, MsgType, State) ->
case find_switch(IpAddr, State) of
not_found -> not_found;
DatapathId ->
% use our callback module to receive the handle_message.
ofs_handler:subscribe(DatapathId, simple_ne_ofsh, MsgType)
end.
| null | https://raw.githubusercontent.com/FlowForwarding/loom/86a9c5aa8b7d4776062365716c9a3dbbf3330bc5/simple_ne/apps/simple_ne/src/simple_ne_logic.erl | erlang | ------------------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------
API Function Exports
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
API Function Definitions
------------------------------------------------------------------
----------------------------------------------------------------------------
Callback API
----------------------------------------------------------------------------
These functions are called from simple_ne_ofsh.erl.
new main connection
new main connection
TODO: this will never happen, failover not implemented ofs_handler.
new auxiliary connection
The simple network executive doesn't need to capture the auxiliary
connections, so they are not passed to the simple_ne_logic pid.
new auxiliary connection
TODO: this will never happen, failover not implemented ofs_handler.
lost an auxiliary connection
The simple network executive is not tracking the auxiliary
connections, so they are not passed to the simple_ne_logic pid.
ofs_handler failover
TODO: this will never happen, failover not implemented ofs_handler.
process a message from the switch.
the simple network executive doesn't process any messages
from the switch, so they are not passed to the simple_ne_logic pid.
Error on connection.
lost the main connection
----------------------------------------------------------------------------
Utility API
----------------------------------------------------------------------------
@doc
Returns the list of connected switches. The returned tuples have
the IP address of the switch (for calling simple_ne_logic
functions), the datapath id (for calling ofs_handler),
the open flow version number (for calling of_msg_lib), the
connection (for calling of_driver).
@end
@doc
Send ``Msg'' to the switch connected from ``IpAddr''. Returns
if the message is sent successfully, or ``error'' if there was an error
sending the request to the switch.
@end
@doc
Send ``Msg'' to the switch connected from ``IpAddr'' and wait
for any replies. Returns
``{ok, Reply}''
if the message is sent successfully, or ``error'' if there was an error
sending the request to the switch. ``Reply'' is ``no_reply'' if there
was no reply to the request, or ``Reply'' is an ``ofp_message'' record
that may be decoded with ``of_msg_lib:decode/1''.
@end
@doc
@end
------------------------------------------------------------------
------------------------------------------------------------------
Got the main connection, remember tha mapping between the ip address
and the datapath id
------------------------------------------------------------------
Internal Function Definitions
------------------------------------------------------------------
All LINC logical switches on the same capable switch connect
with the IP address of the capable switch. There may be
duplicates. The sne API doesn't really accomodate this, so
use our callback module to receive the handle_message. | Copyright 2014 FlowForwarding.org
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author Erlang Solutions Ltd. < >
2014 FlowForwarding.org
Simple Network executive logic .
-module(simple_ne_logic).
-behaviour(gen_server).
-define(SERVER, ?MODULE).
-define(STATE, simple_ne_logic_state).
-include("simple_ne_logger.hrl").
-include_lib("ofs_handler/include/ofs_handler.hrl").
-include_lib("of_protocol/include/of_protocol.hrl").
-record(?STATE, {
switches_table
}).
-export([start_link/0]).
gen_server Function Exports
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-export([
ofsh_init/5,
ofsh_connect/6,
ofsh_disconnect/2,
ofsh_failover/0,
ofsh_handle_message/2,
ofsh_handle_error/2,
ofsh_terminate/1,
switches/0,
send/2,
sync_send/2,
subscribe/2
]).
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
-spec ofsh_init(handler_mode(), ipaddress(), datapath_id(), of_version(), connection()) -> ok.
ofsh_init(active, IpAddr, DatapathId, Version, Connection) ->
?INFO("new active connection: ~p ~p~n", [IpAddr, DatapathId]),
ok = gen_server:call(?MODULE, {init, IpAddr, DatapathId, Version, Connection}),
ok;
ofsh_init(standby, IpAddr, DatapathId, _Version, _Connection) ->
?INFO("new standby connection: ~p ~p~n", [IpAddr, DatapathId]),
ok.
-spec ofsh_connect(handler_mode(), ipaddress(), datapath_id(), of_version(), connection(), auxid()) -> ok.
ofsh_connect(active, _IpAddr, DatapathId, _Version, _Connection, AuxId) ->
?INFO("new active aux connection: ~p ~p~n", [AuxId, DatapathId]),
ok;
ofsh_connect(standby, _IpAddr, DatapathId, _Version, _Connection, AuxId) ->
?INFO("new standby aux connection: ~p ~p~n", [AuxId, DatapathId]),
ok.
-spec ofsh_disconnect(auxid(), datapath_id()) -> ok.
ofsh_disconnect(AuxId, DatapathId) ->
?INFO("disconnect aux connection: ~p ~p~n", [AuxId, DatapathId]),
ok.
-spec ofsh_failover() -> ok.
ofsh_failover() ->
?INFO("failover"),
ok.
-spec ofsh_handle_message(datapath_id(), ofp_message()) -> ok.
ofsh_handle_message(DatapathId, Msg) ->
?INFO("message in: ~p ~p~n", [DatapathId, Msg]),
ok.
-spec ofsh_handle_error(datapath_id(), error_reason()) -> ok.
ofsh_handle_error(DatapathId, Reason) ->
?INFO("rror in: ~p ~p~n", [DatapathId, Reason]),
ok.
-spec ofsh_terminate(datapath_id()) -> ok.
ofsh_terminate(DatapathId) ->
?INFO("disconnect main connection: ~p~n", [DatapathId]),
ok = gen_server:call(?MODULE, {terminate, DatapathId}),
ok.
-spec switches() -> [{ipaddress(), datapath_id(), of_version(), connection()}].
switches() ->
gen_server:call(?SERVER, switches).
` ` not_found '' if there is no switch connected from ` ` IpAddrr '' , ` ` ok ''
-spec send(ipaddress(), ofp_message()) -> not_found | ok | {error, error_reason()}.
send(IpAddr, Msg) ->
gen_server:call(?SERVER, {send, IpAddr, Msg}).
` ` not_found '' if there is no switch connected from ` ` IpAddrr '' ,
-spec sync_send(ipaddress(), ofp_message()) -> not_found | {ok, no_reply | ofp_message()} | {error, error_reason()}.
sync_send(IpAddr, Msg) ->
gen_server:call(?SERVER, {sync_send, IpAddr, Msg}).
Subscribe to messages received from ` ` IpAddr '' .
-spec subscribe(ipaddress(), subscription_item()) -> ok.
subscribe(IpAddr, MsgType) ->
gen_server:call(?SERVER, {subscribe, IpAddr, MsgType}).
gen_server Function Definitions
init([]) ->
SwitchesTable = ets:new(switches, [bag, protected]),
State = #?STATE{switches_table = SwitchesTable},
{ok, State}.
handle_call({init, IpAddr, DatapathId, Version, Connection}, _From, State) ->
ok = register_switch(IpAddr, DatapathId, Version, Connection, State),
{reply, ok, State};
handle_call({terminate, DatapathId}, _From, State) ->
ok = deregister_switch(DatapathId, State),
{reply, ok, State};
handle_call({sync_send, IpAddr, Msg}, _From, State) ->
Reply = do_sync_send(IpAddr, Msg, State),
{reply, Reply, State};
handle_call({send, IpAddr, Msg}, _From, State) ->
Reply = do_send(IpAddr, Msg, State),
{reply, Reply, State};
handle_call({subscribe, IpAddr, MsgType}, _From, State) ->
ok = do_subscribe(IpAddr, MsgType, State),
{reply, ok, State};
handle_call(switches, _From, State) ->
Reply = do_get_switches(State),
{reply, Reply, State};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
register_switch(IpAddr, DatapathId, Version, Connection, #?STATE{switches_table = Switches}) ->
true = ets:insert(Switches, {IpAddr, DatapathId, Version, Connection}),
ok.
deregister_switch(DatapathId, #?STATE{switches_table = Switches}) ->
[Object] = ets:match_object(Switches, {'_', DatapathId, '_', '_'}),
true = ets:delete_object(Switches, Object),
ok.
do_get_switches(#?STATE{switches_table = Switches}) ->
ets:tab2list(Switches).
find_switch(IpAddr, #?STATE{switches_table = Switches}) ->
cheat by returning the first logical switch with the IP address .
case ets:lookup(Switches, IpAddr) of
[] -> not_found;
[{_, DatapathId, _, _}|_] -> DatapathId
end.
do_sync_send(IpAddr, Msg, State) ->
case find_switch(IpAddr, State) of
not_found -> not_found;
DatapathId ->
ofs_handler:sync_send(DatapathId, Msg)
end.
do_send(IpAddr, Msg, State) ->
case find_switch(IpAddr, State) of
not_found -> not_found;
DatapathId ->
ofs_handler:send(DatapathId, Msg)
end.
do_subscribe(IpAddr, MsgType, State) ->
case find_switch(IpAddr, State) of
not_found -> not_found;
DatapathId ->
ofs_handler:subscribe(DatapathId, simple_ne_ofsh, MsgType)
end.
|
987c036bed6b75f87adea296e1382601db4883c74744acc9cf9544e41999d8d8 | rescript-association/genType | ext_bytes.mli | Copyright ( C ) 2015 - 2016 Bloomberg Finance L.P.
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
external unsafe_blit_string : string -> int -> bytes -> int -> int -> unit
= "caml_blit_string"
[@@noalloc]
(** Port the {!Bytes.escaped} from trunk to make it not locale sensitive *)
val escaped : bytes -> bytes
| null | https://raw.githubusercontent.com/rescript-association/genType/c44251e969fb10d27a38d2bdeff6a5f4d778594f/src/compiler-libs-406/ext_bytes.mli | ocaml | * Port the {!Bytes.escaped} from trunk to make it not locale sensitive | Copyright ( C ) 2015 - 2016 Bloomberg Finance L.P.
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
external unsafe_blit_string : string -> int -> bytes -> int -> int -> unit
= "caml_blit_string"
[@@noalloc]
val escaped : bytes -> bytes
|
c7055f391fdce4c6a4c3dd3b14a4ac0f835ddb87edfccc8c8665fc125f63be09 | haskell/process | Common.hs | # LANGUAGE CPP #
# LANGUAGE RecordWildCards #
module System.Process.Common
( CreateProcess (..)
, CmdSpec (..)
, StdStream (..)
, ProcessHandle(..)
, ProcessHandle__(..)
, ProcRetHandles (..)
, withFilePathException
, PHANDLE
, GroupID
, UserID
, modifyProcessHandle
, withProcessHandle
, fd_stdin
, fd_stdout
, fd_stderr
, mbFd
, mbPipe
, pfdToHandle
Avoid a warning on Windows
#ifdef WINDOWS
, CGid (..)
#else
, CGid
#endif
WINIO is only available on GHC 8.12 and up .
#if defined(__IO_MANAGER_WINIO__)
, HANDLE
, mbHANDLE
, mbPipeHANDLE
#endif
) where
import Control.Concurrent
import Control.Exception
import Data.String
import Foreign.Ptr
import Foreign.Storable
import System.Posix.Internals
import GHC.IO.Exception
import GHC.IO.Encoding
import qualified GHC.IO.FD as FD
import GHC.IO.Device
#if defined(__IO_MANAGER_WINIO__)
import GHC.IO.Handle.Windows
import GHC.IO.Windows.Handle (fromHANDLE, Io(), NativeHandle())
#endif
import GHC.IO.Handle.FD
import GHC.IO.Handle.Internals
import GHC.IO.Handle.Types hiding (ClosedHandle)
import System.IO.Error
import Data.Typeable
import System.IO (IOMode)
We do a minimal amount of CPP here to provide uniform data types across
Windows and POSIX .
#ifdef WINDOWS
import Data.Word (Word32)
import System.Win32.DebugApi (PHANDLE)
#if defined(__IO_MANAGER_WINIO__)
import System.Win32.Types (HANDLE)
#endif
#else
import System.Posix.Types
#endif
#ifdef WINDOWS
-- Define some missing types for Windows compatibility. Note that these values
-- will never actually be used, as the setuid/setgid system calls are not
applicable on Windows . No value of this type will ever exist .
newtype CGid = CGid Word32
deriving (Show, Eq)
type GroupID = CGid
type UserID = CGid
#else
type PHANDLE = CPid
#endif
data CreateProcess = CreateProcess{
^ Executable & arguments , or shell command . If ' cwd ' is ' Nothing ' , relative paths are resolved with respect to the current working directory . If ' cwd ' is provided , it is implementation - dependent whether relative paths are resolved with respect to ' cwd ' or the current working directory , so absolute paths should be used to ensure portability .
cwd :: Maybe FilePath, -- ^ Optional path to the working directory for the new process
env :: Maybe [(String,String)], -- ^ Optional environment (otherwise inherit from the current process)
^ How to determine stdin
std_out :: StdStream, -- ^ How to determine stdout
std_err :: StdStream, -- ^ How to determine stderr
^ Close all file descriptors except stdin , stdout and stderr in the new process ( on Windows , only works if std_in , std_out , and std_err are all Inherit ) . This implementation will call close on every fd from 3 to the maximum of open files , which can be slow for high maximum of open files .
create_group :: Bool, -- ^ Create a new process group
delegate_ctlc:: Bool, -- ^ Delegate control-C handling. Use this for interactive console processes to let them handle control-C themselves (see below for details).
--
On Windows this has no effect .
--
@since 1.2.0.0
detach_console :: Bool, -- ^ Use the windows DETACHED_PROCESS flag when creating the process; does nothing on other platforms.
--
@since 1.3.0.0
create_new_console :: Bool, -- ^ Use the windows CREATE_NEW_CONSOLE flag when creating the process; does nothing on other platforms.
--
-- Default: @False@
--
@since 1.3.0.0
^ Use to start the new process in a new session ; does nothing on other platforms .
--
@since 1.3.0.0
child_group :: Maybe GroupID, -- ^ Use posix setgid to set child process's group id; does nothing on other platforms.
--
-- Default: @Nothing@
--
-- @since 1.4.0.0
child_user :: Maybe UserID, -- ^ Use posix setuid to set child process's user id; does nothing on other platforms.
--
-- Default: @Nothing@
--
-- @since 1.4.0.0
^ On Windows systems this flag indicates that we should wait for the entire process tree
-- to finish before unblocking. On POSIX systems this flag is ignored. See $exec-on-windows for details.
--
-- Default: @False@
--
@since 1.5.0.0
} deriving (Show, Eq)
-- | contains the handles returned by a call to createProcess_Internal
data ProcRetHandles
= ProcRetHandles { hStdInput :: Maybe Handle
, hStdOutput :: Maybe Handle
, hStdError :: Maybe Handle
, procHandle :: ProcessHandle
}
data CmdSpec
= ShellCommand String
-- ^ A command line to execute using the shell
| RawCommand FilePath [String]
-- ^ The name of an executable with a list of arguments
--
The ' FilePath ' argument names the executable , and is interpreted
-- according to the platform's standard policy for searching for
-- executables. Specifically:
--
-- * on Unix systems the
-- < execvp(3)>
-- semantics is used, where if the executable filename does not
-- contain a slash (@/@) then the @PATH@ environment variable is
-- searched for the executable.
--
* on Windows systems the Win32 @CreateProcess@ semantics is used .
-- Briefly: if the filename does not contain a path, then the
-- directory containing the parent executable is searched, followed
-- by the current directory, then some standard locations, and
finally the current @PATH@. An @.exe@ extension is added if the
-- filename does not already have an extension. For full details
-- see the
-- <-us/library/windows/desktop/aa365527%28v=vs.85%29.aspx documentation>
for the Windows @SearchPath@ API .
deriving (Show, Eq)
| construct a ` ShellCommand ` from a string literal
--
@since 1.2.1.0
instance IsString CmdSpec where
fromString = ShellCommand
data StdStream
= Inherit -- ^ Inherit Handle from parent
| UseHandle Handle -- ^ Use the supplied Handle
| CreatePipe -- ^ Create a new pipe. The returned
-- @Handle@ will use the default encoding
-- and newline translation mode (just
like @Handle@s created by @openFile@ ) .
| NoStream -- ^ Close the stream's file descriptor without
-- passing a Handle. On POSIX systems this may
-- lead to strange behavior in the child process
-- because attempting to read or write after the
-- file has been closed throws an error. This
-- should only be used with child processes that
-- don't use the file descriptor at all. If you
-- wish to ignore the child process's output you
-- should either create a pipe and drain it
-- manually or pass a @Handle@ that writes to
-- @\/dev\/null@.
deriving (Eq, Show)
-- ----------------------------------------------------------------------------
-- ProcessHandle type
data ProcessHandle__ = OpenHandle { phdlProcessHandle :: PHANDLE }
-- | 'OpenExtHandle' is only applicable for
Windows platform . It represents [ Job
Objects]( / en - us / windows / win32 / procthread / job - objects ) .
| OpenExtHandle { phdlProcessHandle :: PHANDLE
-- ^ the process
, phdlJobHandle :: PHANDLE
-- ^ the job containing the process and
-- its subprocesses
}
| ClosedHandle ExitCode
| A handle to a process , which can be used to wait for termination
of the process using ' System . Process.waitForProcess ' .
None of the process - creation functions in this library wait for
termination : they all return a ' ProcessHandle ' which may be used
to wait for the process later .
On Windows a second wait method can be used to block for event
completion . This requires two handles . A process job handle and
a events handle to monitor .
of the process using 'System.Process.waitForProcess'.
None of the process-creation functions in this library wait for
termination: they all return a 'ProcessHandle' which may be used
to wait for the process later.
On Windows a second wait method can be used to block for event
completion. This requires two handles. A process job handle and
a events handle to monitor.
-}
data ProcessHandle
= ProcessHandle { phandle :: !(MVar ProcessHandle__)
, mb_delegate_ctlc :: !Bool
, waitpidLock :: !(MVar ())
}
withFilePathException :: FilePath -> IO a -> IO a
withFilePathException fpath act = handle mapEx act
where
mapEx ex = ioError (ioeSetFileName ex fpath)
modifyProcessHandle
:: ProcessHandle
-> (ProcessHandle__ -> IO (ProcessHandle__, a))
-> IO a
modifyProcessHandle (ProcessHandle m _ _) io = modifyMVar m io
withProcessHandle
:: ProcessHandle
-> (ProcessHandle__ -> IO a)
-> IO a
withProcessHandle (ProcessHandle m _ _) io = withMVar m io
fd_stdin, fd_stdout, fd_stderr :: FD
fd_stdin = 0
fd_stdout = 1
fd_stderr = 2
mbFd :: String -> FD -> StdStream -> IO FD
mbFd _ _std CreatePipe = return (-1)
mbFd _fun std Inherit = return std
mbFd _fn _std NoStream = return (-2)
mbFd fun _std (UseHandle hdl) =
withHandle fun hdl $ \Handle__{haDevice=dev,..} ->
case cast dev of
Just fd -> do
clear the O_NONBLOCK flag on this FD , if it is set , since
we 're exposing it externally ( see # 3316 )
fd' <- FD.setNonBlockingMode fd False
return (Handle__{haDevice=fd',..}, FD.fdFD fd')
Nothing ->
ioError (mkIOError illegalOperationErrorType
"createProcess" (Just hdl) Nothing
`ioeSetErrorString` "handle is not a file descriptor")
mbPipe :: StdStream -> Ptr FD -> IOMode -> IO (Maybe Handle)
mbPipe CreatePipe pfd mode = fmap Just (pfdToHandle pfd mode)
mbPipe _std _pfd _mode = return Nothing
pfdToHandle :: Ptr FD -> IOMode -> IO Handle
pfdToHandle pfd mode = do
fd <- peek pfd
let filepath = "fd:" ++ show fd
(fD,fd_type) <- FD.mkFD (fromIntegral fd) mode
avoid calling fstat ( )
False {-is_socket-}
False {-non-blocking-}
see # 3316
#if __GLASGOW_HASKELL__ >= 704
enc <- getLocaleEncoding
#else
let enc = localeEncoding
#endif
mkHandleFromFD fD' fd_type filepath mode False {-is_socket-} (Just enc)
#if defined(__IO_MANAGER_WINIO__)
-- It is not completely safe to pass the values -1 and -2 as HANDLE as it's an
unsigned type . -1 additionally is also the value for INVALID_HANDLE . However
-- it should be safe in this case since an invalid handle would be an error here
-- anyway and the chances of us getting a handle with a value of -2 is
-- astronomical. However, sometime in the future process should really use a
-- proper structure here.
mbHANDLE :: HANDLE -> StdStream -> IO HANDLE
mbHANDLE _std CreatePipe = return $ intPtrToPtr (-1)
mbHANDLE std Inherit = return std
mbHANDLE _std NoStream = return $ intPtrToPtr (-2)
mbHANDLE _std (UseHandle hdl) = handleToHANDLE hdl
mbPipeHANDLE :: StdStream -> Ptr HANDLE -> IOMode -> IO (Maybe Handle)
mbPipeHANDLE CreatePipe pfd mode =
do raw_handle <- peek pfd
let hwnd = fromHANDLE raw_handle :: Io NativeHandle
ident = "hwnd:" ++ show raw_handle
enc <- fmap Just getLocaleEncoding
Just <$> mkHandleFromHANDLE hwnd Stream ident mode enc
mbPipeHANDLE _std _pfd _mode = return Nothing
#endif
| null | https://raw.githubusercontent.com/haskell/process/e8fb187e793a6d47b34766c4a6b30c20c45e2a00/System/Process/Common.hs | haskell | Define some missing types for Windows compatibility. Note that these values
will never actually be used, as the setuid/setgid system calls are not
^ Optional path to the working directory for the new process
^ Optional environment (otherwise inherit from the current process)
^ How to determine stdout
^ How to determine stderr
^ Create a new process group
^ Delegate control-C handling. Use this for interactive console processes to let them handle control-C themselves (see below for details).
^ Use the windows DETACHED_PROCESS flag when creating the process; does nothing on other platforms.
^ Use the windows CREATE_NEW_CONSOLE flag when creating the process; does nothing on other platforms.
Default: @False@
^ Use posix setgid to set child process's group id; does nothing on other platforms.
Default: @Nothing@
@since 1.4.0.0
^ Use posix setuid to set child process's user id; does nothing on other platforms.
Default: @Nothing@
@since 1.4.0.0
to finish before unblocking. On POSIX systems this flag is ignored. See $exec-on-windows for details.
Default: @False@
| contains the handles returned by a call to createProcess_Internal
^ A command line to execute using the shell
^ The name of an executable with a list of arguments
according to the platform's standard policy for searching for
executables. Specifically:
* on Unix systems the
< execvp(3)>
semantics is used, where if the executable filename does not
contain a slash (@/@) then the @PATH@ environment variable is
searched for the executable.
Briefly: if the filename does not contain a path, then the
directory containing the parent executable is searched, followed
by the current directory, then some standard locations, and
filename does not already have an extension. For full details
see the
<-us/library/windows/desktop/aa365527%28v=vs.85%29.aspx documentation>
^ Inherit Handle from parent
^ Use the supplied Handle
^ Create a new pipe. The returned
@Handle@ will use the default encoding
and newline translation mode (just
^ Close the stream's file descriptor without
passing a Handle. On POSIX systems this may
lead to strange behavior in the child process
because attempting to read or write after the
file has been closed throws an error. This
should only be used with child processes that
don't use the file descriptor at all. If you
wish to ignore the child process's output you
should either create a pipe and drain it
manually or pass a @Handle@ that writes to
@\/dev\/null@.
----------------------------------------------------------------------------
ProcessHandle type
| 'OpenExtHandle' is only applicable for
^ the process
^ the job containing the process and
its subprocesses
is_socket
non-blocking
is_socket
It is not completely safe to pass the values -1 and -2 as HANDLE as it's an
it should be safe in this case since an invalid handle would be an error here
anyway and the chances of us getting a handle with a value of -2 is
astronomical. However, sometime in the future process should really use a
proper structure here. | # LANGUAGE CPP #
# LANGUAGE RecordWildCards #
module System.Process.Common
( CreateProcess (..)
, CmdSpec (..)
, StdStream (..)
, ProcessHandle(..)
, ProcessHandle__(..)
, ProcRetHandles (..)
, withFilePathException
, PHANDLE
, GroupID
, UserID
, modifyProcessHandle
, withProcessHandle
, fd_stdin
, fd_stdout
, fd_stderr
, mbFd
, mbPipe
, pfdToHandle
Avoid a warning on Windows
#ifdef WINDOWS
, CGid (..)
#else
, CGid
#endif
WINIO is only available on GHC 8.12 and up .
#if defined(__IO_MANAGER_WINIO__)
, HANDLE
, mbHANDLE
, mbPipeHANDLE
#endif
) where
import Control.Concurrent
import Control.Exception
import Data.String
import Foreign.Ptr
import Foreign.Storable
import System.Posix.Internals
import GHC.IO.Exception
import GHC.IO.Encoding
import qualified GHC.IO.FD as FD
import GHC.IO.Device
#if defined(__IO_MANAGER_WINIO__)
import GHC.IO.Handle.Windows
import GHC.IO.Windows.Handle (fromHANDLE, Io(), NativeHandle())
#endif
import GHC.IO.Handle.FD
import GHC.IO.Handle.Internals
import GHC.IO.Handle.Types hiding (ClosedHandle)
import System.IO.Error
import Data.Typeable
import System.IO (IOMode)
We do a minimal amount of CPP here to provide uniform data types across
Windows and POSIX .
#ifdef WINDOWS
import Data.Word (Word32)
import System.Win32.DebugApi (PHANDLE)
#if defined(__IO_MANAGER_WINIO__)
import System.Win32.Types (HANDLE)
#endif
#else
import System.Posix.Types
#endif
#ifdef WINDOWS
applicable on Windows . No value of this type will ever exist .
newtype CGid = CGid Word32
deriving (Show, Eq)
type GroupID = CGid
type UserID = CGid
#else
type PHANDLE = CPid
#endif
data CreateProcess = CreateProcess{
^ Executable & arguments , or shell command . If ' cwd ' is ' Nothing ' , relative paths are resolved with respect to the current working directory . If ' cwd ' is provided , it is implementation - dependent whether relative paths are resolved with respect to ' cwd ' or the current working directory , so absolute paths should be used to ensure portability .
^ How to determine stdin
^ Close all file descriptors except stdin , stdout and stderr in the new process ( on Windows , only works if std_in , std_out , and std_err are all Inherit ) . This implementation will call close on every fd from 3 to the maximum of open files , which can be slow for high maximum of open files .
On Windows this has no effect .
@since 1.2.0.0
@since 1.3.0.0
@since 1.3.0.0
^ Use to start the new process in a new session ; does nothing on other platforms .
@since 1.3.0.0
^ On Windows systems this flag indicates that we should wait for the entire process tree
@since 1.5.0.0
} deriving (Show, Eq)
data ProcRetHandles
= ProcRetHandles { hStdInput :: Maybe Handle
, hStdOutput :: Maybe Handle
, hStdError :: Maybe Handle
, procHandle :: ProcessHandle
}
data CmdSpec
= ShellCommand String
| RawCommand FilePath [String]
The ' FilePath ' argument names the executable , and is interpreted
* on Windows systems the Win32 @CreateProcess@ semantics is used .
finally the current @PATH@. An @.exe@ extension is added if the
for the Windows @SearchPath@ API .
deriving (Show, Eq)
| construct a ` ShellCommand ` from a string literal
@since 1.2.1.0
instance IsString CmdSpec where
fromString = ShellCommand
data StdStream
like @Handle@s created by @openFile@ ) .
deriving (Eq, Show)
data ProcessHandle__ = OpenHandle { phdlProcessHandle :: PHANDLE }
Windows platform . It represents [ Job
Objects]( / en - us / windows / win32 / procthread / job - objects ) .
| OpenExtHandle { phdlProcessHandle :: PHANDLE
, phdlJobHandle :: PHANDLE
}
| ClosedHandle ExitCode
| A handle to a process , which can be used to wait for termination
of the process using ' System . Process.waitForProcess ' .
None of the process - creation functions in this library wait for
termination : they all return a ' ProcessHandle ' which may be used
to wait for the process later .
On Windows a second wait method can be used to block for event
completion . This requires two handles . A process job handle and
a events handle to monitor .
of the process using 'System.Process.waitForProcess'.
None of the process-creation functions in this library wait for
termination: they all return a 'ProcessHandle' which may be used
to wait for the process later.
On Windows a second wait method can be used to block for event
completion. This requires two handles. A process job handle and
a events handle to monitor.
-}
data ProcessHandle
= ProcessHandle { phandle :: !(MVar ProcessHandle__)
, mb_delegate_ctlc :: !Bool
, waitpidLock :: !(MVar ())
}
withFilePathException :: FilePath -> IO a -> IO a
withFilePathException fpath act = handle mapEx act
where
mapEx ex = ioError (ioeSetFileName ex fpath)
modifyProcessHandle
:: ProcessHandle
-> (ProcessHandle__ -> IO (ProcessHandle__, a))
-> IO a
modifyProcessHandle (ProcessHandle m _ _) io = modifyMVar m io
withProcessHandle
:: ProcessHandle
-> (ProcessHandle__ -> IO a)
-> IO a
withProcessHandle (ProcessHandle m _ _) io = withMVar m io
fd_stdin, fd_stdout, fd_stderr :: FD
fd_stdin = 0
fd_stdout = 1
fd_stderr = 2
mbFd :: String -> FD -> StdStream -> IO FD
mbFd _ _std CreatePipe = return (-1)
mbFd _fun std Inherit = return std
mbFd _fn _std NoStream = return (-2)
mbFd fun _std (UseHandle hdl) =
withHandle fun hdl $ \Handle__{haDevice=dev,..} ->
case cast dev of
Just fd -> do
clear the O_NONBLOCK flag on this FD , if it is set , since
we 're exposing it externally ( see # 3316 )
fd' <- FD.setNonBlockingMode fd False
return (Handle__{haDevice=fd',..}, FD.fdFD fd')
Nothing ->
ioError (mkIOError illegalOperationErrorType
"createProcess" (Just hdl) Nothing
`ioeSetErrorString` "handle is not a file descriptor")
mbPipe :: StdStream -> Ptr FD -> IOMode -> IO (Maybe Handle)
mbPipe CreatePipe pfd mode = fmap Just (pfdToHandle pfd mode)
mbPipe _std _pfd _mode = return Nothing
pfdToHandle :: Ptr FD -> IOMode -> IO Handle
pfdToHandle pfd mode = do
fd <- peek pfd
let filepath = "fd:" ++ show fd
(fD,fd_type) <- FD.mkFD (fromIntegral fd) mode
avoid calling fstat ( )
see # 3316
#if __GLASGOW_HASKELL__ >= 704
enc <- getLocaleEncoding
#else
let enc = localeEncoding
#endif
#if defined(__IO_MANAGER_WINIO__)
unsigned type . -1 additionally is also the value for INVALID_HANDLE . However
mbHANDLE :: HANDLE -> StdStream -> IO HANDLE
mbHANDLE _std CreatePipe = return $ intPtrToPtr (-1)
mbHANDLE std Inherit = return std
mbHANDLE _std NoStream = return $ intPtrToPtr (-2)
mbHANDLE _std (UseHandle hdl) = handleToHANDLE hdl
mbPipeHANDLE :: StdStream -> Ptr HANDLE -> IOMode -> IO (Maybe Handle)
mbPipeHANDLE CreatePipe pfd mode =
do raw_handle <- peek pfd
let hwnd = fromHANDLE raw_handle :: Io NativeHandle
ident = "hwnd:" ++ show raw_handle
enc <- fmap Just getLocaleEncoding
Just <$> mkHandleFromHANDLE hwnd Stream ident mode enc
mbPipeHANDLE _std _pfd _mode = return Nothing
#endif
|
5b7c42b2faad8efeb1c18ca3c3ab698eb260c746308db2e8d39ea17626a4e592 | Haskell-Things/ImplicitCAD | ArgParser.hs | {- ORMOLU_DISABLE -}
Implicit CAD . Copyright ( C ) 2011 , ( )
Copyright ( C ) 2016 , ( )
-- Released under the GNU AGPLV3+, see LICENSE
-- FIXME: why is this required?
# LANGUAGE ScopedTypeVariables #
-- Allow us to use string literals for Text
{-# LANGUAGE OverloadedStrings #-}
module Graphics.Implicit.ExtOpenScad.Util.ArgParser (argument, doc, defaultTo, example, test, eulerCharacteristic, argMap) where
imported twice , once qualified . null from Data . Map conflicts with null from Prelude .
import Prelude(String, Maybe(Just, Nothing), ($), (<>), show, return, fmap, snd, filter, (.), fst, foldl1, not, (&&), (<$>), maybe)
import qualified Prelude as P (null)
import Graphics.Implicit.ExtOpenScad.Definitions (ArgParser(AP, APTest, APBranch, APTerminator, APFail, APExample), OVal (OError), TestInvariant(EulerCharacteristic), Symbol, VarLookup(VarLookup))
import Graphics.Implicit.ExtOpenScad.Util.OVal (fromOObj, toOObj, OTypeMirror)
import Graphics.Implicit.Definitions(ℕ)
imported twice , once qualified . null from Data . Map conflicts with null from Prelude .
import Data.Map (fromList, lookup, delete)
import qualified Data.Map as DM (null)
import Data.Maybe (isNothing, fromJust, isJust)
import Data.Text.Lazy (Text, pack, unpack)
import Control.Arrow (first)
* ArgParser building functions
-- ** argument and combinators
-- | Builds an argparser for the type that is expected from it.
-- FIXME: make a version of this that accepts multiple symbol names, so we can have h= and height=
argument :: forall desiredType. (OTypeMirror desiredType) => Symbol -> ArgParser desiredType
argument name =
AP name Nothing "" $ \oObjVal -> do
let
val :: Maybe desiredType
val = fromOObj oObjVal
errmsg :: Text
errmsg = case oObjVal of
OError err -> "error in computing value for argument " <> pack (show name)
<> ": " <> err
_ -> "arg " <> pack (show oObjVal) <> " not compatible with " <> pack (show name)
maybe (APFail errmsg) APTerminator val
# INLINABLE argument #
-- | Inline documentation.
doc :: forall a. ArgParser a -> Text -> ArgParser a
doc (AP name defMaybeVal _ next) newDoc = AP name defMaybeVal newDoc next
doc _ _ = APFail "Impossible! doc"
-- | An inline default value.
defaultTo :: forall a. (OTypeMirror a) => ArgParser a -> a -> ArgParser a
defaultTo (AP name _ doc' next) newDefVal =
AP name (Just $ toOObj newDefVal) doc' next
defaultTo _ _ = APFail "Impossible! defaultTo"
-- | An inline example.
example :: Text -> ArgParser ()
example str = APExample str (return ())
-- | Inline test and combinators.
test :: Text -> ArgParser ()
test str = APTest str [] (return ())
eulerCharacteristic :: ArgParser a -> ℕ -> ArgParser a
eulerCharacteristic (APTest str tests child) χ =
APTest str (EulerCharacteristic χ : tests) child
eulerCharacteristic _ _ = APFail "Impossible! eulerCharacteristic"
* Tools for handeling ArgParsers
| Apply arguments to an ArgParser
argMap ::
[(Maybe Symbol, OVal)] -- ^ arguments
^ ArgParser to apply them to
-> (Maybe a, [String]) -- ^ (result, error messages)
argMap args = argMap2 unnamedArgs (VarLookup $ fromList namedArgs) where
unnamedArgs = snd <$> filter (isNothing . fst) args
namedArgs = first fromJust <$> filter (isJust . fst) args
argMap2 :: [OVal] -> VarLookup -> ArgParser a -> (Maybe a, [String])
argMap2 unnamedArgs namedArgs (APBranch branches) =
foldl1 merge solutions where
solutions = fmap (argMap2 unnamedArgs namedArgs) branches
merge :: forall a. (Maybe a, [String]) -> (Maybe a, [String]) -> (Maybe a, [String])
merge a@(Just _, []) _ = a
merge _ b@(Just _, []) = b
merge a@(Just _, _) _ = a
merge (Nothing, _) a = a
FIXME : do n't use delete directly here , wrap it in StateC.hs
-- FIXME: generate a warning.
argMap2 unnamedArgs (VarLookup namedArgs) (AP name fallback _ f) =
case lookup name namedArgs of
Just a -> argMap2
unnamedArgs
(VarLookup $ delete name namedArgs)
(f a)
Nothing -> case unnamedArgs of
x:xs -> argMap2 xs (VarLookup namedArgs) (f x)
[] -> case fallback of
Just b -> argMap2 [] (VarLookup namedArgs) (f b)
Nothing -> (Nothing, ["No value and no default for argument " <> show name])
FIXME : do n't use map.null here , wrap it in StateC.hs .
-- FIXME: generate a warning.
argMap2 a (VarLookup b) (APTerminator val) =
(Just val, ["Unused arguments" | not (P.null a && DM.null b)])
argMap2 _ _ (APFail err) = (Nothing, [unpack err])
argMap2 a b (APExample _ child) = argMap2 a b child
argMap2 a b (APTest _ _ child) = argMap2 a b child
| null | https://raw.githubusercontent.com/Haskell-Things/ImplicitCAD/87f2aee4b3c958d11e988022f512d065b812f6b0/Graphics/Implicit/ExtOpenScad/Util/ArgParser.hs | haskell | ORMOLU_DISABLE
Released under the GNU AGPLV3+, see LICENSE
FIXME: why is this required?
Allow us to use string literals for Text
# LANGUAGE OverloadedStrings #
** argument and combinators
| Builds an argparser for the type that is expected from it.
FIXME: make a version of this that accepts multiple symbol names, so we can have h= and height=
| Inline documentation.
| An inline default value.
| An inline example.
| Inline test and combinators.
^ arguments
^ (result, error messages)
FIXME: generate a warning.
FIXME: generate a warning. | Implicit CAD . Copyright ( C ) 2011 , ( )
Copyright ( C ) 2016 , ( )
# LANGUAGE ScopedTypeVariables #
module Graphics.Implicit.ExtOpenScad.Util.ArgParser (argument, doc, defaultTo, example, test, eulerCharacteristic, argMap) where
imported twice , once qualified . null from Data . Map conflicts with null from Prelude .
import Prelude(String, Maybe(Just, Nothing), ($), (<>), show, return, fmap, snd, filter, (.), fst, foldl1, not, (&&), (<$>), maybe)
import qualified Prelude as P (null)
import Graphics.Implicit.ExtOpenScad.Definitions (ArgParser(AP, APTest, APBranch, APTerminator, APFail, APExample), OVal (OError), TestInvariant(EulerCharacteristic), Symbol, VarLookup(VarLookup))
import Graphics.Implicit.ExtOpenScad.Util.OVal (fromOObj, toOObj, OTypeMirror)
import Graphics.Implicit.Definitions(ℕ)
imported twice , once qualified . null from Data . Map conflicts with null from Prelude .
import Data.Map (fromList, lookup, delete)
import qualified Data.Map as DM (null)
import Data.Maybe (isNothing, fromJust, isJust)
import Data.Text.Lazy (Text, pack, unpack)
import Control.Arrow (first)
* ArgParser building functions
argument :: forall desiredType. (OTypeMirror desiredType) => Symbol -> ArgParser desiredType
argument name =
AP name Nothing "" $ \oObjVal -> do
let
val :: Maybe desiredType
val = fromOObj oObjVal
errmsg :: Text
errmsg = case oObjVal of
OError err -> "error in computing value for argument " <> pack (show name)
<> ": " <> err
_ -> "arg " <> pack (show oObjVal) <> " not compatible with " <> pack (show name)
maybe (APFail errmsg) APTerminator val
# INLINABLE argument #
doc :: forall a. ArgParser a -> Text -> ArgParser a
doc (AP name defMaybeVal _ next) newDoc = AP name defMaybeVal newDoc next
doc _ _ = APFail "Impossible! doc"
defaultTo :: forall a. (OTypeMirror a) => ArgParser a -> a -> ArgParser a
defaultTo (AP name _ doc' next) newDefVal =
AP name (Just $ toOObj newDefVal) doc' next
defaultTo _ _ = APFail "Impossible! defaultTo"
example :: Text -> ArgParser ()
example str = APExample str (return ())
test :: Text -> ArgParser ()
test str = APTest str [] (return ())
eulerCharacteristic :: ArgParser a -> ℕ -> ArgParser a
eulerCharacteristic (APTest str tests child) χ =
APTest str (EulerCharacteristic χ : tests) child
eulerCharacteristic _ _ = APFail "Impossible! eulerCharacteristic"
* Tools for handeling ArgParsers
| Apply arguments to an ArgParser
argMap ::
^ ArgParser to apply them to
argMap args = argMap2 unnamedArgs (VarLookup $ fromList namedArgs) where
unnamedArgs = snd <$> filter (isNothing . fst) args
namedArgs = first fromJust <$> filter (isJust . fst) args
argMap2 :: [OVal] -> VarLookup -> ArgParser a -> (Maybe a, [String])
argMap2 unnamedArgs namedArgs (APBranch branches) =
foldl1 merge solutions where
solutions = fmap (argMap2 unnamedArgs namedArgs) branches
merge :: forall a. (Maybe a, [String]) -> (Maybe a, [String]) -> (Maybe a, [String])
merge a@(Just _, []) _ = a
merge _ b@(Just _, []) = b
merge a@(Just _, _) _ = a
merge (Nothing, _) a = a
FIXME : do n't use delete directly here , wrap it in StateC.hs
argMap2 unnamedArgs (VarLookup namedArgs) (AP name fallback _ f) =
case lookup name namedArgs of
Just a -> argMap2
unnamedArgs
(VarLookup $ delete name namedArgs)
(f a)
Nothing -> case unnamedArgs of
x:xs -> argMap2 xs (VarLookup namedArgs) (f x)
[] -> case fallback of
Just b -> argMap2 [] (VarLookup namedArgs) (f b)
Nothing -> (Nothing, ["No value and no default for argument " <> show name])
FIXME : do n't use map.null here , wrap it in StateC.hs .
argMap2 a (VarLookup b) (APTerminator val) =
(Just val, ["Unused arguments" | not (P.null a && DM.null b)])
argMap2 _ _ (APFail err) = (Nothing, [unpack err])
argMap2 a b (APExample _ child) = argMap2 a b child
argMap2 a b (APTest _ _ child) = argMap2 a b child
|
40e2265bd99ddae28b04100879fe74aae74d513124c8dc6f67631bf088a5d255 | camsaul/toucan2 | select.clj | (ns toucan2.select
"Implementation of [[select]] and variations.
The args spec used by [[select]] lives in [[toucan2.query]], specifically `:toucan2.query/default-args`.
Code for building Honey SQL for a SELECT lives in [[toucan2.map-backend.honeysql2]].
### Functions that return primary keys
Functions that return primary keys such as [[select-pks-set]] determine which primary keys to return by
calling [[toucan2.model/select-pks-fn]], which is based on the model's implementation
of [[toucan2.model/primary-keys]]. Models with just a single primary key column will return primary keys 'unwrapped',
i.e., the values of that column will be returned directly. Models with compound primary keys (i.e., primary keys
consisting of more than one column) will be returned in vectors as if by calling `juxt`.
```clj
A model with a one - column primary key , : i d
(t2/select-pks-vec :models/venues :category \"bar\")
= > [ 1 2 ]
;; A model with a compound primary key, [:id :name]
(t2/select-pks-vec :models/venues.compound-key :category \"bar\")
= > [ [ 1 \"Tempest\ " ] [ 2 \"Ho 's Tavern\ " ] ]
```"
(:refer-clojure :exclude [count])
(:require
[clojure.spec.alpha :as s]
[toucan2.log :as log]
[toucan2.model :as model]
[toucan2.pipeline :as pipeline]
[toucan2.types :as types]))
(comment s/keep-me
types/keep-me)
(defn reducible-select
"Like [[select]], but returns an `IReduceInit`."
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[& unparsed-args]
(pipeline/reducible-unparsed :toucan.query-type/select.instances unparsed-args))
(defn select
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[& unparsed-args]
(pipeline/transduce-unparsed-with-default-rf :toucan.query-type/select.instances unparsed-args))
(defn select-one
"Like [[select]], but only fetches a single row, and returns only that row."
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[& unparsed-args]
(let [query-type :toucan.query-type/select.instances
rf (pipeline/default-rf query-type)
xform (pipeline/first-result-xform-fn query-type)]
(pipeline/transduce-unparsed (xform rf) query-type unparsed-args)))
(defn select-fn-reducible
"Like [[reducible-select]], but returns a reducible sequence of results of `(f row)`."
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f & unparsed-args]
(eduction
(map f)
(pipeline/reducible-unparsed :toucan.query-type/select.instances.fns unparsed-args)))
(defn select-fn-set
"Like [[select]], but returns a *set* of values of `(f instance)` for the results. Returns `nil` if the set is empty.
```clj
(t2/select-fn-set (comp str/upper-case :category) :models/venues :category \"bar\")
;; =>
#{\"BAR\"}
```"
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f & unparsed-args]
(let [rf (pipeline/with-init conj #{})
xform (map f)]
(not-empty (pipeline/transduce-unparsed (xform rf) :toucan.query-type/select.instances.fns unparsed-args))))
(defn select-fn-vec
"Like [[select]], but returns a *vector* of values of `(f instance)` for the results. Returns `nil` if the vector is
empty.
```clj
(t2/select-fn-vec (comp str/upper-case :category) :models/venues :category \"bar\")
;; =>
[\"BAR\" \"BAR\"]
```
NOTE: If your query does not specify an `:order-by` clause (or equivalent), the results are like indeterminate. Keep
this in mind!"
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f & unparsed-args]
(let [rf (pipeline/with-init conj [])
xform (map f)]
(not-empty (pipeline/transduce-unparsed (xform rf) :toucan.query-type/select.instances.fns unparsed-args))))
(defn select-one-fn
"Like [[select-one]], but applies `f` to the result.
```clj
(t2/select-one-fn :id :models/people :name \"Cam\")
= > 1
```"
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f & unparsed-args]
(let [query-type :toucan.query-type/select.instances.fns
rf (pipeline/with-init conj [])
xform (comp (map f)
(pipeline/first-result-xform-fn query-type))]
(pipeline/transduce-unparsed (xform rf) query-type unparsed-args)))
(defn select-pks-reducible
"Returns a reducible sequence of all primary keys
```clj
(into [] (t2/select-pks-reducible :models/venues :category \"bar\"))
= > [ 1 2 ]
```"
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[modelable & unparsed-args]
(apply select-fn-reducible (model/select-pks-fn modelable) modelable unparsed-args))
(defn select-pks-set
"Returns a *set* of all primary keys (as determined by [[toucan2.model/primary-keys]]
and [[toucan2.model/select-pks-fn]]) of instances matching the query. Models with just a single primary key columns
will be 'unwrapped' (i.e., the values of that column will be returned); models with compound primary keys (i.e., more
than one column) will be returned in vectors as if by calling `juxt`.
```clj
(t2/select-pks-set :models/venues :category \"bar\")
= > # { 1 2 }
```"
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[modelable & unparsed-args]
(apply select-fn-set (model/select-pks-fn modelable) modelable unparsed-args))
(defn select-pks-vec
"Returns a *vector* of all primary keys (as determined by [[toucan2.model/primary-keys]]
and [[toucan2.model/select-pks-fn]]) of instances matching the query. Models with just a single primary key columns
will be 'unwrapped' (i.e., the values of that column will be returned); models with compound primary keys (i.e., more
than one column) will be returned in vectors as if by calling `juxt`.
```clj
(t2/select-pks-vec :models/venues :category \"bar\")
= > [ 1 2 ]
```
NOTE: If your query does not specify an `:order-by` clause (or equivalent), the results are like indeterminate. Keep
this in mind!"
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[modelable & unparsed-args]
(apply select-fn-vec (model/select-pks-fn modelable) modelable unparsed-args))
(defn select-one-pk
"Return the primary key of the first row matching the query. Models with just a single primary key columns will be
models with compound primary keys ( i.e. , more than one
column) will be returned in vectors as if by calling `juxt`.
```clj
(t2/select-one-pk :models/people :name \"Cam\")
= > 1
```"
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[modelable & unparsed-args]
(apply select-one-fn (model/select-pks-fn modelable) modelable unparsed-args))
(defn select-fn->fn
"Return a map of `(f1 instance)` -> `(f2 instance)` for instances matching the query.
```clj
(t2/select-fn->fn :id (comp str/upper-case :name) :models/people)
= > { 1 \"CAM\ " , 2 \"SAM\ " , 3 \"PAM\ " , 4 \"TAM\ " }
```"
{:arglists '([f1 f2 modelable-columns & kv-args? query?]
[f1 f2 :conn connectable modelable-columns & kv-args? query?])}
[f1 f2 & unparsed-args]
(let [rf (pipeline/with-init conj {})
xform (map (juxt f1 f2))]
(pipeline/transduce-unparsed (xform rf) :toucan.query-type/select.instances unparsed-args)))
(defn select-fn->pk
"The inverse of [[select-pk->fn]]. Return a map of `(f instance)` -> *primary key* for instances matching the query.
```clj
(t2/select-fn->pk (comp str/upper-case :name) :models/people)
= > { \"CAM\ " 1 , \"SAM\ " 2 , \"PAM\ " 3 , \"TAM\ " 4 }
```"
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f modelable & args]
(let [pks-fn (model/select-pks-fn modelable)]
(apply select-fn->fn f pks-fn modelable args)))
(defn select-pk->fn
"The inverse of [[select-fn->pk]]. Return a map of *primary key* -> `(f instance)` for instances matching the query.
```clj
(t2/select-pk->fn (comp str/upper-case :name) :models/people)
= > { 1 \"CAM\ " , 2 \"SAM\ " , 3 \"PAM\ " , 4 \"TAM\ " }
```"
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f modelable & args]
(let [pks-fn (model/select-pks-fn modelable)]
(apply select-fn->fn pks-fn f modelable args)))
(defn- count-rf []
(let [logged-warning? (atom false)
log-warning (fn []
(when-not @logged-warning?
(log/warnf :results "Warning: inefficient count query. See documentation for toucan2.select/count.")
(reset! logged-warning? true)))]
(fn count-rf*
([] 0)
([acc] acc)
([acc row]
(if (:count row)
(+ acc (:count row))
(do (log-warning)
(inc acc)))))))
(defn count
"Like [[select]], but returns the number of rows that match in an efficient way.
### Implementation note:
The default Honey SQL 2 map query compilation backend builds an efficient
```sql
SELECT count(*) AS \"count\" FROM ...
```
query. Custom query compilation backends should do the equivalent by implementing [[toucan2.pipeline/build]] for the
query type `:toucan.query-type/select.count` and build a query that returns the key `:count`, If an efficient
implementation does not exist, this will fall back to simply counting all matching rows."
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[& unparsed-args]
(pipeline/transduce-unparsed (count-rf) :toucan.query-type/select.count unparsed-args))
(defn- exists?-rf
([] false)
([acc] acc)
([_acc row]
(if (contains? row :exists)
(let [exists (:exists row)
result (if (integer? exists)
(pos? exists)
(boolean exists))]
(if (true? result)
(reduced true)
false))
(do
(log/warnf :results "Warning: inefficient exists? query. See documentation for toucan2.select/exists?.")
(reduced true)))))
(defn exists?
"Like [[select]], but returns whether or not *any* rows match in an efficient way.
### Implementation note:
The default Honey SQL 2 map query compilation backend builds an efficient
```sql
SELECT exists(SELECT 1 FROM ... WHERE ...) AS exists
```"
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[& unparsed-args]
(pipeline/transduce-unparsed exists?-rf :toucan.query-type/select.exists unparsed-args))
| null | https://raw.githubusercontent.com/camsaul/toucan2/ac4bf71dc5ae34207d08197ec269af5043a657a1/src/toucan2/select.clj | clojure | A model with a compound primary key, [:id :name]
=>
=>
models with compound primary keys (i.e., more
models with compound primary keys (i.e., more | (ns toucan2.select
"Implementation of [[select]] and variations.
The args spec used by [[select]] lives in [[toucan2.query]], specifically `:toucan2.query/default-args`.
Code for building Honey SQL for a SELECT lives in [[toucan2.map-backend.honeysql2]].
### Functions that return primary keys
Functions that return primary keys such as [[select-pks-set]] determine which primary keys to return by
calling [[toucan2.model/select-pks-fn]], which is based on the model's implementation
of [[toucan2.model/primary-keys]]. Models with just a single primary key column will return primary keys 'unwrapped',
i.e., the values of that column will be returned directly. Models with compound primary keys (i.e., primary keys
consisting of more than one column) will be returned in vectors as if by calling `juxt`.
```clj
A model with a one - column primary key , : i d
(t2/select-pks-vec :models/venues :category \"bar\")
= > [ 1 2 ]
(t2/select-pks-vec :models/venues.compound-key :category \"bar\")
= > [ [ 1 \"Tempest\ " ] [ 2 \"Ho 's Tavern\ " ] ]
```"
(:refer-clojure :exclude [count])
(:require
[clojure.spec.alpha :as s]
[toucan2.log :as log]
[toucan2.model :as model]
[toucan2.pipeline :as pipeline]
[toucan2.types :as types]))
(comment s/keep-me
types/keep-me)
(defn reducible-select
"Like [[select]], but returns an `IReduceInit`."
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[& unparsed-args]
(pipeline/reducible-unparsed :toucan.query-type/select.instances unparsed-args))
(defn select
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[& unparsed-args]
(pipeline/transduce-unparsed-with-default-rf :toucan.query-type/select.instances unparsed-args))
(defn select-one
"Like [[select]], but only fetches a single row, and returns only that row."
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[& unparsed-args]
(let [query-type :toucan.query-type/select.instances
rf (pipeline/default-rf query-type)
xform (pipeline/first-result-xform-fn query-type)]
(pipeline/transduce-unparsed (xform rf) query-type unparsed-args)))
(defn select-fn-reducible
"Like [[reducible-select]], but returns a reducible sequence of results of `(f row)`."
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f & unparsed-args]
(eduction
(map f)
(pipeline/reducible-unparsed :toucan.query-type/select.instances.fns unparsed-args)))
(defn select-fn-set
"Like [[select]], but returns a *set* of values of `(f instance)` for the results. Returns `nil` if the set is empty.
```clj
(t2/select-fn-set (comp str/upper-case :category) :models/venues :category \"bar\")
#{\"BAR\"}
```"
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f & unparsed-args]
(let [rf (pipeline/with-init conj #{})
xform (map f)]
(not-empty (pipeline/transduce-unparsed (xform rf) :toucan.query-type/select.instances.fns unparsed-args))))
(defn select-fn-vec
"Like [[select]], but returns a *vector* of values of `(f instance)` for the results. Returns `nil` if the vector is
empty.
```clj
(t2/select-fn-vec (comp str/upper-case :category) :models/venues :category \"bar\")
[\"BAR\" \"BAR\"]
```
NOTE: If your query does not specify an `:order-by` clause (or equivalent), the results are like indeterminate. Keep
this in mind!"
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f & unparsed-args]
(let [rf (pipeline/with-init conj [])
xform (map f)]
(not-empty (pipeline/transduce-unparsed (xform rf) :toucan.query-type/select.instances.fns unparsed-args))))
(defn select-one-fn
"Like [[select-one]], but applies `f` to the result.
```clj
(t2/select-one-fn :id :models/people :name \"Cam\")
= > 1
```"
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f & unparsed-args]
(let [query-type :toucan.query-type/select.instances.fns
rf (pipeline/with-init conj [])
xform (comp (map f)
(pipeline/first-result-xform-fn query-type))]
(pipeline/transduce-unparsed (xform rf) query-type unparsed-args)))
(defn select-pks-reducible
"Returns a reducible sequence of all primary keys
```clj
(into [] (t2/select-pks-reducible :models/venues :category \"bar\"))
= > [ 1 2 ]
```"
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[modelable & unparsed-args]
(apply select-fn-reducible (model/select-pks-fn modelable) modelable unparsed-args))
(defn select-pks-set
"Returns a *set* of all primary keys (as determined by [[toucan2.model/primary-keys]]
and [[toucan2.model/select-pks-fn]]) of instances matching the query. Models with just a single primary key columns
than one column) will be returned in vectors as if by calling `juxt`.
```clj
(t2/select-pks-set :models/venues :category \"bar\")
= > # { 1 2 }
```"
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[modelable & unparsed-args]
(apply select-fn-set (model/select-pks-fn modelable) modelable unparsed-args))
(defn select-pks-vec
"Returns a *vector* of all primary keys (as determined by [[toucan2.model/primary-keys]]
and [[toucan2.model/select-pks-fn]]) of instances matching the query. Models with just a single primary key columns
than one column) will be returned in vectors as if by calling `juxt`.
```clj
(t2/select-pks-vec :models/venues :category \"bar\")
= > [ 1 2 ]
```
NOTE: If your query does not specify an `:order-by` clause (or equivalent), the results are like indeterminate. Keep
this in mind!"
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[modelable & unparsed-args]
(apply select-fn-vec (model/select-pks-fn modelable) modelable unparsed-args))
(defn select-one-pk
"Return the primary key of the first row matching the query. Models with just a single primary key columns will be
models with compound primary keys ( i.e. , more than one
column) will be returned in vectors as if by calling `juxt`.
```clj
(t2/select-one-pk :models/people :name \"Cam\")
= > 1
```"
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[modelable & unparsed-args]
(apply select-one-fn (model/select-pks-fn modelable) modelable unparsed-args))
(defn select-fn->fn
"Return a map of `(f1 instance)` -> `(f2 instance)` for instances matching the query.
```clj
(t2/select-fn->fn :id (comp str/upper-case :name) :models/people)
= > { 1 \"CAM\ " , 2 \"SAM\ " , 3 \"PAM\ " , 4 \"TAM\ " }
```"
{:arglists '([f1 f2 modelable-columns & kv-args? query?]
[f1 f2 :conn connectable modelable-columns & kv-args? query?])}
[f1 f2 & unparsed-args]
(let [rf (pipeline/with-init conj {})
xform (map (juxt f1 f2))]
(pipeline/transduce-unparsed (xform rf) :toucan.query-type/select.instances unparsed-args)))
(defn select-fn->pk
"The inverse of [[select-pk->fn]]. Return a map of `(f instance)` -> *primary key* for instances matching the query.
```clj
(t2/select-fn->pk (comp str/upper-case :name) :models/people)
= > { \"CAM\ " 1 , \"SAM\ " 2 , \"PAM\ " 3 , \"TAM\ " 4 }
```"
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f modelable & args]
(let [pks-fn (model/select-pks-fn modelable)]
(apply select-fn->fn f pks-fn modelable args)))
(defn select-pk->fn
"The inverse of [[select-fn->pk]]. Return a map of *primary key* -> `(f instance)` for instances matching the query.
```clj
(t2/select-pk->fn (comp str/upper-case :name) :models/people)
= > { 1 \"CAM\ " , 2 \"SAM\ " , 3 \"PAM\ " , 4 \"TAM\ " }
```"
{:arglists '([f modelable-columns & kv-args? query?]
[f :conn connectable modelable-columns & kv-args? query?])}
[f modelable & args]
(let [pks-fn (model/select-pks-fn modelable)]
(apply select-fn->fn pks-fn f modelable args)))
(defn- count-rf []
(let [logged-warning? (atom false)
log-warning (fn []
(when-not @logged-warning?
(log/warnf :results "Warning: inefficient count query. See documentation for toucan2.select/count.")
(reset! logged-warning? true)))]
(fn count-rf*
([] 0)
([acc] acc)
([acc row]
(if (:count row)
(+ acc (:count row))
(do (log-warning)
(inc acc)))))))
(defn count
"Like [[select]], but returns the number of rows that match in an efficient way.
### Implementation note:
The default Honey SQL 2 map query compilation backend builds an efficient
```sql
SELECT count(*) AS \"count\" FROM ...
```
query. Custom query compilation backends should do the equivalent by implementing [[toucan2.pipeline/build]] for the
query type `:toucan.query-type/select.count` and build a query that returns the key `:count`, If an efficient
implementation does not exist, this will fall back to simply counting all matching rows."
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[& unparsed-args]
(pipeline/transduce-unparsed (count-rf) :toucan.query-type/select.count unparsed-args))
(defn- exists?-rf
([] false)
([acc] acc)
([_acc row]
(if (contains? row :exists)
(let [exists (:exists row)
result (if (integer? exists)
(pos? exists)
(boolean exists))]
(if (true? result)
(reduced true)
false))
(do
(log/warnf :results "Warning: inefficient exists? query. See documentation for toucan2.select/exists?.")
(reduced true)))))
(defn exists?
"Like [[select]], but returns whether or not *any* rows match in an efficient way.
### Implementation note:
The default Honey SQL 2 map query compilation backend builds an efficient
```sql
SELECT exists(SELECT 1 FROM ... WHERE ...) AS exists
```"
{:arglists '([modelable-columns & kv-args? query?]
[:conn connectable modelable-columns & kv-args? query?])}
[& unparsed-args]
(pipeline/transduce-unparsed exists?-rf :toucan.query-type/select.exists unparsed-args))
|
3dc96dfee632f5b64a6e3405ee9ea0ed5219b5e3c094cfcf45485cf785b9a97e | VisionsGlobalEmpowerment/webchange | views.cljs | (ns webchange.admin.pages.class-profile.views
(:require
[re-frame.core :as re-frame]
[webchange.admin.pages.class-profile.state :as state]
[webchange.admin.pages.class-profile.course-assign.views :refer [assign-class-course]]
[webchange.admin.pages.class-profile.students-add.views :refer [class-students-add]]
[webchange.admin.pages.class-profile.students-list.views :refer [class-students-list]]
[webchange.admin.pages.class-profile.teacher-edit.views :refer [class-teacher-edit]]
[webchange.admin.pages.class-profile.teachers-add.views :refer [class-teachers-add]]
[webchange.admin.pages.class-profile.teachers-list.views :refer [class-teachers-list]]
[webchange.admin.widgets.class-form.views :refer [class-edit-form]]
[webchange.admin.widgets.no-data.views :refer [no-data]]
[webchange.admin.widgets.page.counter.views :refer [counter]]
[webchange.admin.widgets.page.views :as page]))
(defn- class-counter
[]
(let [{:keys [students teachers]} @(re-frame/subscribe [::state/class-stats])
courses @(re-frame/subscribe [::state/school-courses-number])
class-course @(re-frame/subscribe [::state/class-course])
readonly? @(re-frame/subscribe [::state/readonly?])
handle-add-student-click #(re-frame/dispatch [::state/open-add-student-form])
handle-manage-students-click #(re-frame/dispatch [::state/open-students-list])
handle-add-teacher-click #(re-frame/dispatch [::state/open-add-teacher-form])
handle-manage-teachers-click #(re-frame/dispatch [::state/open-teachers-list])
handle-manage-courses-click #(re-frame/dispatch [::state/open-assign-course-form])
handle-student-activities-click #(re-frame/dispatch [::state/open-students-activities])
add-button-props {:color "blue-1"
:chip "plus"
:chip-color "yellow-1"}]
[counter {:data [{:text "Teachers"
:icon "teachers"
:counter teachers
:actions [{:text "Manage Teachers"
:on-click handle-manage-teachers-click}
(when-not readonly?
(merge add-button-props
{:text "Add Teacher"
:on-click handle-add-teacher-click}))]}
{:text "Students"
:icon "students"
:counter students
:actions [{:text "Manage Students"
:on-click handle-manage-students-click}
(when-not readonly?
(merge add-button-props
{:text "Add Student"
:on-click handle-add-student-click}))]}
{:text "Students Activities"
:icon "games"
:counter students
:background "blue-2"
:actions [{:text "Students Activities"
:on-click handle-student-activities-click}]}
{:text (:name class-course)
:icon "courses"
:background "green-2"
:counter courses
:actions (when-not readonly?
[{:text "Manage Courses"
:on-click handle-manage-courses-click}])}]}]))
(defn- statistics
[]
[page/block {:title "Statistics"
:icon "statistics"}
[no-data]])
(defn- side-bar-class-form
[{:keys [class-id school-id]}]
(let [form-editable? @(re-frame/subscribe [::state/form-editable?])
readonly? @(re-frame/subscribe [::state/readonly?])
handle-edit-click #(re-frame/dispatch [::state/set-form-editable true])
handle-cancel-click #(re-frame/dispatch [::state/handle-class-edit-cancel])
handle-delete-click #(re-frame/dispatch [::state/handle-class-deleted])
handle-data-save #(re-frame/dispatch [::state/update-class-data %])]
[page/side-bar {:title "Class Info"
:icon "info"
:focused? form-editable?
:actions (when-not readonly?
(cond-> []
form-editable? (conj {:icon "close"
:on-click handle-cancel-click})
(not form-editable?) (conj {:icon "edit"
:on-click handle-edit-click})))}
[class-edit-form {:class-id class-id
:school-id school-id
:editable? form-editable?
:on-save handle-data-save
:on-cancel handle-cancel-click
:on-delete handle-delete-click}]]))
(defn- side-bar
[side-bar-props]
(let [{:keys [component props]} @(re-frame/subscribe [::state/side-bar])
props (merge side-bar-props props)]
(case component
:assign-course [assign-class-course props]
:class-form [side-bar-class-form props]
:students-add [class-students-add props]
:students-list [class-students-list props]
:teacher-edit [class-teacher-edit props]
:teachers-add [class-teachers-add props]
:teachers-list [class-teachers-list props]
nil)))
(defn page
[props]
(re-frame/dispatch [::state/init props])
(fn [props]
[page/page
[page/content {:title "Class Profile"
:icon "classes"}
[class-counter]
[statistics]]
[side-bar props]]))
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/2b14cfa0b116034312a382763e6aebd67e2f25a7/src/cljs/webchange/admin/pages/class_profile/views.cljs | clojure | (ns webchange.admin.pages.class-profile.views
(:require
[re-frame.core :as re-frame]
[webchange.admin.pages.class-profile.state :as state]
[webchange.admin.pages.class-profile.course-assign.views :refer [assign-class-course]]
[webchange.admin.pages.class-profile.students-add.views :refer [class-students-add]]
[webchange.admin.pages.class-profile.students-list.views :refer [class-students-list]]
[webchange.admin.pages.class-profile.teacher-edit.views :refer [class-teacher-edit]]
[webchange.admin.pages.class-profile.teachers-add.views :refer [class-teachers-add]]
[webchange.admin.pages.class-profile.teachers-list.views :refer [class-teachers-list]]
[webchange.admin.widgets.class-form.views :refer [class-edit-form]]
[webchange.admin.widgets.no-data.views :refer [no-data]]
[webchange.admin.widgets.page.counter.views :refer [counter]]
[webchange.admin.widgets.page.views :as page]))
(defn- class-counter
[]
(let [{:keys [students teachers]} @(re-frame/subscribe [::state/class-stats])
courses @(re-frame/subscribe [::state/school-courses-number])
class-course @(re-frame/subscribe [::state/class-course])
readonly? @(re-frame/subscribe [::state/readonly?])
handle-add-student-click #(re-frame/dispatch [::state/open-add-student-form])
handle-manage-students-click #(re-frame/dispatch [::state/open-students-list])
handle-add-teacher-click #(re-frame/dispatch [::state/open-add-teacher-form])
handle-manage-teachers-click #(re-frame/dispatch [::state/open-teachers-list])
handle-manage-courses-click #(re-frame/dispatch [::state/open-assign-course-form])
handle-student-activities-click #(re-frame/dispatch [::state/open-students-activities])
add-button-props {:color "blue-1"
:chip "plus"
:chip-color "yellow-1"}]
[counter {:data [{:text "Teachers"
:icon "teachers"
:counter teachers
:actions [{:text "Manage Teachers"
:on-click handle-manage-teachers-click}
(when-not readonly?
(merge add-button-props
{:text "Add Teacher"
:on-click handle-add-teacher-click}))]}
{:text "Students"
:icon "students"
:counter students
:actions [{:text "Manage Students"
:on-click handle-manage-students-click}
(when-not readonly?
(merge add-button-props
{:text "Add Student"
:on-click handle-add-student-click}))]}
{:text "Students Activities"
:icon "games"
:counter students
:background "blue-2"
:actions [{:text "Students Activities"
:on-click handle-student-activities-click}]}
{:text (:name class-course)
:icon "courses"
:background "green-2"
:counter courses
:actions (when-not readonly?
[{:text "Manage Courses"
:on-click handle-manage-courses-click}])}]}]))
(defn- statistics
[]
[page/block {:title "Statistics"
:icon "statistics"}
[no-data]])
(defn- side-bar-class-form
[{:keys [class-id school-id]}]
(let [form-editable? @(re-frame/subscribe [::state/form-editable?])
readonly? @(re-frame/subscribe [::state/readonly?])
handle-edit-click #(re-frame/dispatch [::state/set-form-editable true])
handle-cancel-click #(re-frame/dispatch [::state/handle-class-edit-cancel])
handle-delete-click #(re-frame/dispatch [::state/handle-class-deleted])
handle-data-save #(re-frame/dispatch [::state/update-class-data %])]
[page/side-bar {:title "Class Info"
:icon "info"
:focused? form-editable?
:actions (when-not readonly?
(cond-> []
form-editable? (conj {:icon "close"
:on-click handle-cancel-click})
(not form-editable?) (conj {:icon "edit"
:on-click handle-edit-click})))}
[class-edit-form {:class-id class-id
:school-id school-id
:editable? form-editable?
:on-save handle-data-save
:on-cancel handle-cancel-click
:on-delete handle-delete-click}]]))
(defn- side-bar
[side-bar-props]
(let [{:keys [component props]} @(re-frame/subscribe [::state/side-bar])
props (merge side-bar-props props)]
(case component
:assign-course [assign-class-course props]
:class-form [side-bar-class-form props]
:students-add [class-students-add props]
:students-list [class-students-list props]
:teacher-edit [class-teacher-edit props]
:teachers-add [class-teachers-add props]
:teachers-list [class-teachers-list props]
nil)))
(defn page
[props]
(re-frame/dispatch [::state/init props])
(fn [props]
[page/page
[page/content {:title "Class Profile"
:icon "classes"}
[class-counter]
[statistics]]
[side-bar props]]))
| |
9fd360666b7da7bc8dfeb8c3a2b600e9158f429ac3e8c1f010c35232674d95f5 | stabilized/clojurescript | closure.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns cljs.closure
"Compile ClojureScript to JavaScript with optimizations from Google
Closure Compiler producing runnable JavaScript.
The Closure Compiler (compiler.jar) must be on the classpath.
Use the 'build' function for end-to-end compilation.
build = compile -> add-dependencies -> optimize -> output
Two protocols are defined: IJavaScript and Compilable. The
Compilable protocol is satisfied by something which can return one
or more IJavaScripts.
With IJavaScript objects in hand, calling add-dependencies will
produce a sequence of IJavaScript objects which includes all
required dependencies from the Closure library and ClojureScript,
in dependency order. This function replaces the closurebuilder
tool.
The optimize function converts one or more IJavaScripts into a
single string of JavaScript source code using the Closure Compiler
API.
The produced output is either a single string of optimized
JavaScript or a deps file for use during development.
"
(:refer-clojure :exclude [compile])
(:require [cljs.util :as util]
[cljs.compiler :as comp]
[cljs.analyzer :as ana]
[cljs.source-map :as sm]
[cljs.env :as env]
[cljs.js-deps :as deps]
[clojure.java.io :as io]
[clojure.string :as string]
[clojure.data.json :as json])
(:import [java.io File BufferedInputStream]
[java.net URL]
[java.util.logging Level]
[java.util List Random]
[com.google.javascript.jscomp CompilerOptions CompilationLevel
CompilerOptions$LanguageMode SourceMap$Format
SourceMap$DetailLevel ClosureCodingConvention SourceFile
Result JSError CheckLevel DiagnosticGroups
CommandLineRunner AnonymousFunctionNamingPolicy]
[java.security MessageDigest]
[javax.xml.bind DatatypeConverter]
[java.nio.file Path Paths Files StandardWatchEventKinds WatchKey
WatchEvent FileVisitor FileVisitResult]
[com.sun.nio.file SensitivityWatchEventModifier]))
(def name-chars (map char (concat (range 48 57) (range 65 90) (range 97 122))))
(defn random-char []
(nth name-chars (.nextInt (Random.) (count name-chars))))
(defn random-string [length]
(apply str (take length (repeatedly random-char))))
;; Closure API
;; ===========
(defmulti js-source-file (fn [_ source] (class source)))
(defmethod js-source-file String [^String name ^String source]
(SourceFile/fromCode name source))
(defmethod js-source-file File [_ ^File source]
(SourceFile/fromFile source))
(defmethod js-source-file BufferedInputStream [^String name ^BufferedInputStream source]
(SourceFile/fromInputStream name source))
(defn set-options
"TODO: Add any other options that we would like to support."
[opts ^CompilerOptions compiler-options]
(when (contains? opts :pretty-print)
(set! (.prettyPrint compiler-options) (:pretty-print opts)))
(when (contains? opts :pseudo-names)
(set! (.generatePseudoNames compiler-options) (:pseudo-names opts)))
(when (contains? opts :anon-fn-naming-policy)
(let [policy (:anon-fn-naming-policy opts)]
(set! (.anonymousFunctionNaming compiler-options)
(case policy
:off AnonymousFunctionNamingPolicy/OFF
:unmapped AnonymousFunctionNamingPolicy/UNMAPPED
:mapped AnonymousFunctionNamingPolicy/MAPPED
(throw (IllegalArgumentException. (str "Invalid :anon-fn-naming-policy value " policy " - only :off, :unmapped, :mapped permitted")))))))
(when (contains? opts :language-in)
(case (:language-in opts)
:ecmascript5 (.setLanguageIn compiler-options CompilerOptions$LanguageMode/ECMASCRIPT5)
:ecmascript5-strict (.setLanguageIn compiler-options CompilerOptions$LanguageMode/ECMASCRIPT5_STRICT)
:ecmascript3 (.setLanguageIn compiler-options CompilerOptions$LanguageMode/ECMASCRIPT3)))
(when (contains? opts :language-out)
(case (:language-out opts)
:ecmascript5 (.setLanguageOut compiler-options CompilerOptions$LanguageMode/ECMASCRIPT5)
:ecmascript5-strict (.setLanguageOut compiler-options CompilerOptions$LanguageMode/ECMASCRIPT5_STRICT)
:ecmascript3 (.setLanguageOut compiler-options CompilerOptions$LanguageMode/ECMASCRIPT3)))
(when (contains? opts :print-input-delimiter)
(set! (.printInputDelimiter compiler-options)
(:print-input-delimiter opts))))
(def check-level
{:error CheckLevel/ERROR
:warning CheckLevel/WARNING
:off CheckLevel/OFF})
(def warning-types
{:access-controls DiagnosticGroups/ACCESS_CONTROLS
:ambiguous-function-decl DiagnosticGroups/AMBIGUOUS_FUNCTION_DECL
:debugger-statement-present DiagnosticGroups/DEBUGGER_STATEMENT_PRESENT
:check-regexp DiagnosticGroups/CHECK_REGEXP
:check-types DiagnosticGroups/CHECK_TYPES
:check-useless-code DiagnosticGroups/CHECK_USELESS_CODE
:check-variables DiagnosticGroups/CHECK_VARIABLES
:const DiagnosticGroups/CONST
:constant-property DiagnosticGroups/CONSTANT_PROPERTY
:deprecated DiagnosticGroups/DEPRECATED
:duplicate-message DiagnosticGroups/DUPLICATE_MESSAGE
:es5-strict DiagnosticGroups/ES5_STRICT
:externs-validation DiagnosticGroups/EXTERNS_VALIDATION
:fileoverview-jsdoc DiagnosticGroups/FILEOVERVIEW_JSDOC
:global-this DiagnosticGroups/GLOBAL_THIS
:internet-explorer-checks DiagnosticGroups/INTERNET_EXPLORER_CHECKS
:invalid-casts DiagnosticGroups/INVALID_CASTS
:missing-properties DiagnosticGroups/MISSING_PROPERTIES
:non-standard-jsdoc DiagnosticGroups/NON_STANDARD_JSDOC
:strict-module-dep-check DiagnosticGroups/STRICT_MODULE_DEP_CHECK
:tweaks DiagnosticGroups/TWEAKS
:undefined-names DiagnosticGroups/UNDEFINED_NAMES
:undefined-variables DiagnosticGroups/UNDEFINED_VARIABLES
:unknown-defines DiagnosticGroups/UNKNOWN_DEFINES
:visiblity DiagnosticGroups/VISIBILITY})
(defn ^CompilerOptions make-options
"Create a CompilerOptions object and set options from opts map."
[opts]
(let [level (case (:optimizations opts)
:advanced CompilationLevel/ADVANCED_OPTIMIZATIONS
:whitespace CompilationLevel/WHITESPACE_ONLY
:simple CompilationLevel/SIMPLE_OPTIMIZATIONS)
compiler-options (doto (CompilerOptions.)
(.setCodingConvention (ClosureCodingConvention.)))]
(doseq [[key val] (:closure-defines opts)]
(let [key (name key)]
(cond
(string? val) (.setDefineToStringLiteral compiler-options key val)
(integer? val) (.setDefineToIntegerLiteral compiler-options key val)
(float? val) (.setDefineToDoubleLiteral compiler-options key val)
(or (true? val)
(false? val)) (.setDefineToBooleanLiteral compiler-options key val)
:else (println "value for" key "must be string, int, float, or bool"))))
(doseq [[type level] (:closure-warnings opts)]
(. compiler-options
(setWarningLevel (type warning-types) (level check-level))))
(if-let [extra-annotations (:closure-extra-annotations opts)]
(. compiler-options (setExtraAnnotationNames (map name extra-annotations))))
(when (contains? opts :source-map)
(set! (.sourceMapOutputPath compiler-options)
(:source-map opts))
(set! (.sourceMapDetailLevel compiler-options)
SourceMap$DetailLevel/ALL)
(set! (.sourceMapFormat compiler-options)
SourceMap$Format/V3))
(do
(.setOptionsForCompilationLevel level compiler-options)
(set-options opts compiler-options)
compiler-options)))
(defn load-externs
"Externs are JavaScript files which contain empty definitions of
functions which will be provided by the environment. Any function in
an extern file will not be renamed during optimization.
Options may contain an :externs key with a list of file paths to
load. The :use-only-custom-externs flag may be used to indicate that
the default externs should be excluded."
[{:keys [externs use-only-custom-externs target ups-externs]}]
(let [filter-cp-js (fn [paths]
(for [p paths u (deps/find-js-classpath p)] u))
filter-js (fn [paths]
(for [p paths u (deps/find-js-resources p)] u))
add-target (fn [ext]
(cons (io/resource "cljs/externs.js")
(if (= :nodejs target)
(cons (io/resource "cljs/nodejs_externs.js")
(or ext []))
ext)))
load-js (fn [ext]
(map #(js-source-file (.getFile %) (slurp %)) ext))]
(let [js-sources (-> externs filter-js add-target load-js)
ups-sources (-> ups-externs filter-cp-js load-js)
all-sources (concat js-sources ups-sources)]
(if use-only-custom-externs
all-sources
(into all-sources (CommandLineRunner/getDefaultExterns))))))
(defn ^com.google.javascript.jscomp.Compiler make-closure-compiler []
(let [compiler (com.google.javascript.jscomp.Compiler.)]
(com.google.javascript.jscomp.Compiler/setLoggingLevel Level/WARNING)
compiler))
(defn report-failure [^Result result]
(let [errors (.errors result)
warnings (.warnings result)]
(doseq [next (seq errors)]
(println "ERROR:" (.toString ^JSError next)))
(doseq [next (seq warnings)]
(println "WARNING:" (.toString ^JSError next)))))
Protocols for IJavaScript and Compilable
;; ========================================
(defprotocol ISourceMap
(-source-url [this] "Return the CLJS source url")
(-source-map [this] "Return the CLJS compiler generated JS source mapping"))
(extend-protocol deps/IJavaScript
String
(-foreign? [this] false)
(-url [this] nil)
(-provides [this] (:provides (deps/parse-js-ns (string/split-lines this))))
(-requires [this] (:requires (deps/parse-js-ns (string/split-lines this))))
(-source [this] this)
clojure.lang.IPersistentMap
(-foreign? [this] (:foreign this))
(-url [this] (or (:url this)
(deps/to-url (:file this))))
(-provides [this] (map name (:provides this)))
(-requires [this] (map name (:requires this)))
(-source [this] (if-let [s (:source this)]
s (with-open [reader (io/reader (deps/-url this))]
(slurp reader)))))
(defrecord JavaScriptFile [foreign ^URL url ^URL source-url provides requires lines source-map]
deps/IJavaScript
(-foreign? [this] foreign)
(-url [this] url)
(-provides [this] provides)
(-requires [this] requires)
(-source [this]
(with-open [reader (io/reader url)]
(slurp reader)))
ISourceMap
(-source-url [this] source-url)
(-source-map [this] source-map))
(defn javascript-file
([foreign ^URL url provides requires]
(javascript-file foreign url nil provides requires nil nil))
([foreign ^URL url source-url provides requires lines source-map]
(assert (first provides) (str source-url " does not provide a namespace"))
(JavaScriptFile. foreign url source-url (map name provides) (map name requires) lines source-map)))
(defn map->javascript-file [m]
(javascript-file
(:foreign m)
(when-let [f (:file m)]
(deps/to-url f))
(when-let [sf (:source-file m)]
(deps/to-url sf))
(:provides m)
(:requires m)
(:lines m)
(:source-map m)))
(defn read-js
"Read a JavaScript file returning a map of file information."
[f]
(let [source (slurp f)
m (deps/parse-js-ns (string/split-lines source))]
(map->javascript-file (assoc m :file f))))
;; Compile
;; =======
(defprotocol Compilable
(-compile [this opts] "Returns one or more IJavaScripts."))
(defn compile-form-seq
"Compile a sequence of forms to a JavaScript source string."
[forms]
(comp/with-core-cljs nil
(fn []
(with-out-str
(binding [ana/*cljs-ns* 'cljs.user]
(doseq [form forms]
(comp/emit (ana/analyze (ana/empty-env) form))))))))
(defn compiled-file
"Given a map with at least a :file key, return a map with
{:file .. :provides .. :requires ..}.
Compiled files are cached so they will only be read once."
[m]
(let [path (.getPath (.toURL ^File (:file m)))
js (if (:provides m)
(map->javascript-file m)
(if-let [js (get-in @env/*compiler* [::compiled-cljs path])]
js
(read-js (:file m))))]
(swap! env/*compiler* update-in [::compiled-cljs] assoc path js)
js))
(defn compile
"Given a Compilable, compile it and return an IJavaScript."
[compilable opts]
(-compile compilable opts))
(defn compile-file
"Compile a single cljs file. If no output-file is specified, returns
a string of compiled JavaScript. With an output-file option, the
compiled JavaScript will written to this location and the function
returns a JavaScriptFile. In either case the return value satisfies
IJavaScript."
[^File file {:keys [output-file] :as opts}]
(if output-file
(let [out-file (io/file (util/output-directory opts) output-file)]
(compiled-file (comp/compile-file file out-file opts)))
(binding [ana/*cljs-file* (.getPath ^File file)]
(compile-form-seq (ana/forms-seq file)))))
(defn compile-dir
"Recursively compile all cljs files under the given source
directory. Return a list of JavaScriptFiles."
[^File src-dir opts]
(let [out-dir (util/output-directory opts)]
(map compiled-file
(comp/compile-root src-dir out-dir opts))))
(defn ^String path-from-jarfile
"Given the URL of a file within a jar, return the path of the file
from the root of the jar."
[^URL url]
(last (string/split (.getFile url) #"\.jar!/")))
(defn jar-file-to-disk
"Copy a file contained within a jar to disk. Return the created file."
[url out-dir]
(let [out-file (io/file out-dir (path-from-jarfile url))
content (with-open [reader (io/reader url)]
(slurp reader))]
(util/mkdirs out-file)
(spit out-file content)
out-file))
;; TODO: it would be nice if we could consolidate requires-compilation?
logic -
(defn compile-from-jar
"Compile a file from a jar."
[this {:keys [output-file] :as opts}]
(or (when output-file
(let [out-file (io/file (util/output-directory opts) output-file)]
(when (and (.exists out-file)
(= (util/compiled-by-version out-file)
(util/clojurescript-version)))
(compile-file
(io/file (util/output-directory opts)
(last (string/split (.getPath ^URL this) #"\.jar!/")))
opts))))
(let [file-on-disk (jar-file-to-disk this (util/output-directory opts))]
(-compile file-on-disk opts))))
(extend-protocol Compilable
File
(-compile [this opts]
(if (.isDirectory this)
(compile-dir this opts)
(compile-file this opts)))
URL
(-compile [this opts]
(case (.getProtocol this)
"file" (-compile (io/file this) opts)
"jar" (compile-from-jar this opts)))
clojure.lang.PersistentList
(-compile [this opts]
(compile-form-seq [this]))
String
(-compile [this opts] (-compile (io/file this) opts))
clojure.lang.PersistentVector
(-compile [this opts] (compile-form-seq this))
)
(comment
;; compile a file in memory
(-compile "samples/hello/src/hello/core.cljs" {})
;; compile a file to disk - see file @ 'out/clojure/set.js'
(-compile (io/resource "clojure/set.cljs") {:output-file "clojure/set.js"})
;; compile a project
(-compile (io/file "samples/hello/src") {})
;; compile a project with a custom output directory
(-compile (io/file "samples/hello/src") {:output-dir "my-output"})
;; compile a form
(-compile '(defn plus-one [x] (inc x)) {})
;; compile a vector of forms
(-compile '[(ns test.app (:require [goog.array :as array]))
(defn plus-one [x] (inc x))]
{})
)
(defn js-dependencies
"Given a sequence of Closure namespace strings, return the list of
all dependencies. The returned list includes all Google and
third-party library dependencies.
Third-party libraries are configured using the :libs option where
the value is a list of directories containing third-party
libraries."
[opts requires]
(loop [requires requires
visited (set requires)
deps #{}]
(if (seq requires)
(let [node (or (get (@env/*compiler* :js-dependency-index) (first requires))
(deps/find-classpath-lib (first requires)))
new-req (remove #(contains? visited %) (:requires node))]
(recur (into (rest requires) new-req)
(into visited new-req)
(conj deps node)))
(remove nil? deps))))
(comment
;; find dependencies
(js-dependencies {} ["goog.array"])
;; find dependencies in an external library
(js-dependencies {:libs ["closure/library/third_party/closure"]} ["goog.dom.query"])
)
(defn get-compiled-cljs
"Return an IJavaScript for this file. Compiled output will be
written to the working directory."
[opts {:keys [relative-path uri]}]
(let [js-file (comp/rename-to-js relative-path)]
(-compile uri (merge opts {:output-file js-file}))))
(defn cljs-source-for-namespace
"Returns a map containing :relative-path, :uri referring to the resource that
should contain the source for the given namespace name."
[ns]
(as-> (munge ns) %
(string/replace % \. \/)
(str % ".cljs")
{:relative-path % :uri (io/resource %)}))
(defn source-for-namespace
[ns compiler-env]
(let [ns-str (str (comp/munge ns))
path (string/replace ns-str \. \/)
relpath (str path ".cljs")]
(if-let [cljs-res (io/resource relpath)]
{:relative-path relpath :uri cljs-res}
(let [relpath (:file (get-in @compiler-env [:js-dependency-index ns-str]))]
(if-let [js-res (io/resource relpath)]
{:relative-path relpath :uri js-res}
(throw
(IllegalArgumentException. (str "Namespace " ns " does not exist"))))))))
(defn cljs-dependencies
"Given a list of all required namespaces, return a list of
IJavaScripts which are the cljs dependencies. The returned list will
not only include the explicitly required files but any transitive
dependencies as well. JavaScript files will be compiled to the
working directory if they do not already exist.
Only load dependencies from the classpath."
[opts requires]
(let [cljs-deps (fn [lib-names]
(->> (remove #(or ((@env/*compiler* :js-dependency-index) %)
(deps/find-classpath-lib %))
lib-names)
(map cljs-source-for-namespace)
(remove (comp nil? :uri))))]
(loop [required-files (cljs-deps requires)
visited (set required-files)
js-deps #{}]
(if (seq required-files)
(let [next-file (first required-files)
js (get-compiled-cljs opts next-file)
new-req (remove #(contains? visited %) (cljs-deps (deps/-requires js)))]
(recur (into (rest required-files) new-req)
(into visited new-req)
(conj js-deps js)))
(remove nil? js-deps)))))
(comment
only get
(cljs-dependencies {} ["goog.string" "cljs.core"])
get transitive
(cljs-dependencies {} ["clojure.string"])
;; don't get cljs.core twice
(cljs-dependencies {} ["cljs.core" "clojure.string"])
)
(defn add-dependencies
"Given one or more IJavaScript objects in dependency order, produce
a new sequence of IJavaScript objects which includes the input list
plus all dependencies in dependency order."
[opts & inputs]
(let [requires (mapcat deps/-requires inputs)
required-cljs (remove (set inputs) (cljs-dependencies opts requires))
required-js (js-dependencies opts (set (concat (mapcat deps/-requires required-cljs) requires)))
provided (mapcat deps/-provides (concat inputs required-cljs required-js))
unprovided (clojure.set/difference (set requires) (set provided) #{"constants-table"})]
(when (seq unprovided)
(ana/warning :unprovided @env/*compiler* {:unprovided (sort unprovided)}))
(cons
(javascript-file nil (deps/goog-resource "goog/base.js") ["goog"] nil)
(deps/dependency-order
(concat
(map
(fn [{:keys [foreign url file provides requires] :as js-map}]
(let [url (or url (io/resource file))]
(merge
(javascript-file foreign url provides requires)
js-map)))
required-js)
[(when (-> @env/*compiler* :opts :emit-constants)
(let [url (deps/to-url (str (util/output-directory opts) "/constants_table.js"))]
(javascript-file nil url url ["constants-table"] ["cljs.core"] nil nil)))]
required-cljs
inputs)))))
(defn preamble-from-paths [paths]
(when-let [missing (seq (remove io/resource paths))]
(ana/warning :preamble-missing @env/*compiler* {:missing (sort missing)}))
(let [resources (remove nil? (map io/resource paths))]
(str (string/join "\n" (map slurp resources)) "\n")))
(defn make-preamble [{:keys [target preamble hashbang]}]
(str (when (and (= :nodejs target) (not (false? hashbang)))
(str "#!" (or hashbang "/usr/bin/env node") "\n"))
(when preamble (preamble-from-paths preamble))))
(comment
;; add dependencies to literal js
(add-dependencies {} "goog.provide('test.app');\ngoog.require('cljs.core');")
(add-dependencies {} "goog.provide('test.app');\ngoog.require('goog.array');")
(add-dependencies {} (str "goog.provide('test.app');\n"
"goog.require('goog.array');\n"
"goog.require('clojure.set');"))
;; add dependencies with external lib
(add-dependencies {:libs ["closure/library/third_party/closure"]}
(str "goog.provide('test.app');\n"
"goog.require('goog.array');\n"
"goog.require('goog.dom.query');"))
;; add dependencies with foreign lib
(add-dependencies {:foreign-libs [{:file "samples/hello/src/hello/core.cljs"
:provides ["example.lib"]}]}
(str "goog.provide('test.app');\n"
"goog.require('example.lib');\n"))
add dependencies to a JavaScriptFile record
(add-dependencies {} (javascript-file false
(deps/to-url "samples/hello/src/hello/core.cljs")
["hello.core"]
["goog.array"]))
)
;; Optimize
;; ========
(defmulti javascript-name class)
(defmethod javascript-name URL [^URL url]
(if url (.getPath url) "cljs/user.js"))
(defmethod javascript-name String [s]
(if-let [name (first (deps/-provides s))] name "cljs/user.js"))
(defmethod javascript-name JavaScriptFile [js] (javascript-name (deps/-url js)))
(defn build-provides
"Given a vector of provides, builds required goog.provide statements"
[provides]
(apply str (map #(str "goog.provide('" % "');\n") provides)))
(defmethod js-source-file JavaScriptFile [_ js]
(when-let [url (deps/-url js)]
(js-source-file (javascript-name url) (io/input-stream url))))
(defn optimize
"Use the Closure Compiler to optimize one or more JavaScript files."
[opts & sources]
(let [closure-compiler (make-closure-compiler)
^List externs (load-externs opts)
compiler-options (make-options opts)
sources (if (= :whitespace (:optimizations opts))
(cons "var CLOSURE_NO_DEPS = true;" sources)
sources)
^List inputs (map #(js-source-file (javascript-name %) %) sources)
result ^Result (.compile closure-compiler externs inputs compiler-options)
preamble (make-preamble opts)
preamble-line-count (- (count (.split #"\r?\n" preamble -1)) 1)]
(if (.success result)
;; compiler.getSourceMap().reset()
(let [source (.toSource closure-compiler)]
(when-let [name (:source-map opts)]
(with-open [out (io/writer name)]
(.appendTo (.getSourceMap closure-compiler) out name))
(let [sm-json (-> (io/file name) slurp
(json/read-str :key-fn keyword))
closure-source-map (sm/decode-reverse sm-json)]
(loop [sources (seq sources)
relpaths {}
merged (sorted-map-by
(sm/source-compare
(remove nil?
(map (fn [source]
(if-let [^URL source-url (:source-url source)]
(.getPath source-url)
(if-let [^URL url (:url source)]
(.getPath url))))
sources))))]
(if sources
(let [source (first sources)]
(recur
(next sources)
(let [{:keys [provides source-url]} source]
(if (and provides source-url)
(assoc relpaths (.getPath ^URL source-url)
(util/ns->relpath (first provides)))
relpaths))
(if-let [url (:url source)]
(let [path (.getPath ^URL url)]
(if-let [compiled (get-in @env/*compiler* [::comp/compiled-cljs path])]
(if-let [source-url (:source-url source)]
(assoc merged (.getPath ^URL source-url)
(sm/merge-source-maps
(:source-map compiled)
(get closure-source-map path)))
merged)
(assoc merged path (get closure-source-map path))))
merged)))
(spit
(io/file name)
(sm/encode merged
{:preamble-line-count (+ preamble-line-count
(or (:foreign-deps-line-count opts) 0))
:lines (+ (:lineCount sm-json) preamble-line-count 2)
:file (:file sm-json)
:output-dir (util/output-directory opts)
:source-map (:source-map opts)
:source-map-path (:source-map-path opts)
:source-map-timestamp (:source-map-timestamp opts)
:source-map-pretty-print (:source-map-pretty-print opts)
:relpaths relpaths}))))))
source)
(report-failure result))))
(comment
optimize JavaScript strings
(optimize {:optimizations :whitespace} "var x = 3 + 2; alert(x);")
= > " var x=3 + 2;alert(x ) ; "
(optimize {:optimizations :simple} "var x = 3 + 2; alert(x);")
= > " var x=5;alert(x ) ; "
(optimize {:optimizations :advanced} "var x = 3 + 2; alert(x);")
;; => "alert(5);"
;; optimize a ClojureScript form
(optimize {:optimizations :simple} (-compile '(def x 3) {}))
;; optimize a project
(println (->> (-compile "samples/hello/src" {})
(apply add-dependencies {})
(apply optimize {:optimizations :simple :pretty-print true})))
)
;; Output
;; ======
;;
The result of a build is always a single string of JavaScript . The
;; build process may produce files on disk but a single string is
;; always output. What this string contains depends on whether the
;; input has been optimized or not. If the :output-to option is set
;; then this string will be written to the specified file. If not, it
;; will be returned.
;;
;; The :output-dir option can be used to set the working directory
;; where any files will be written to disk. By default this directory
;; is 'out'.
;;
;; If inputs are optimized then the output string will be the complete
;; application with all dependencies included.
;;
;; For unoptimized output, the string will be a Closure deps file
describing where the JavaScript files are on disk and their
;; dependencies. All JavaScript files will be located in the working
;; directory, including any dependencies from the Closure library.
;;
Unoptimized mode is faster because the Closure Compiler is not
;; run. It also makes debugging much simpler because each file is
;; loaded in its own script tag.
;;
;; When working with uncompiled files, you will need to add additional
script tags to the hosting HTML file : one which pulls in Closure
;; library's base.js and one which calls goog.require to load your
;; code. See samples/hello/hello-dev.html for an example.
(defn ^String path-relative-to
"Generate a string which is the path to the input IJavaScript relative
to the specified base file."
[^File base input]
(let [base-path (util/path-seq (.getCanonicalPath base))
input-path (util/path-seq (.getCanonicalPath (io/file ^URL (deps/-url input))))
count-base (count base-path)
common (count (take-while true? (map #(= %1 %2) base-path input-path)))
prefix (repeat (- count-base common 1) "..")]
(if (= count-base common)
(last input-path) ;; same file
(util/to-path (concat prefix (drop common input-path)) "/"))))
(defn add-dep-string
"Return a goog.addDependency string for an input."
[opts input]
(letfn [(ns-list [coll] (when (seq coll) (apply str (interpose ", " (map #(str "'" (comp/munge %) "'") coll)))))]
(str "goog.addDependency(\""
(path-relative-to
(io/file (util/output-directory opts) "goog" "base.js") input)
"\", ["
(ns-list (deps/-provides input))
"], ["
(ns-list (deps/-requires input))
"]);")))
(defn deps-file
"Return a deps file string for a sequence of inputs."
[opts sources]
(apply str (interpose "\n" (map #(add-dep-string opts %) sources))))
(comment
(path-relative-to (io/file "out/goog/base.js") {:url (deps/to-url "out/cljs/core.js")})
(add-dep-string {} {:url (deps/to-url "out/cljs/core.js") :requires ["goog.string"] :provides ["cljs.core"]})
(deps-file {} [{:url (deps/to-url "out/cljs/core.js") :requires ["goog.string"] :provides ["cljs.core"]}])
)
(defn output-one-file [{:keys [output-to] :as opts} js]
(cond
(nil? output-to) js
(string? output-to)
(spit output-to js)
:else (println js)))
(defn output-deps-file [opts sources]
(output-one-file opts (deps-file opts sources)))
(defn output-main-file [opts]
(let [asset-path (or (:asset-path opts)
(util/output-directory opts))]
(case (:target opts)
:nodejs
(output-one-file opts
(str "var path = require(\"path\");\n"
"try {\n"
" require(\"source-map-support\").install();\n"
"} catch(err) {\n"
"}\n"
"require(path.join(path.resolve(\".\"),\"" asset-path "\",\"goog\",\"bootstrap\",\"nodejs.js\"));\n"
"require(path.join(path.resolve(\".\"),\"" asset-path "\",\"cljs_deps.js\"));\n"
"goog.require(\"" (comp/munge (:main opts)) "\");\n"
"goog.require(\"cljs.nodejscli\");\n"))
(output-one-file opts
(str "if(typeof goog == \"undefined\") document.write('<script src=\"" asset-path "/goog/base.js\"></script>');\n"
"document.write('<script src=\"" asset-path "/cljs_deps.js\"></script>');\n"
"document.write('<script>if (typeof goog != \"undefined\") { goog.require(\"" (comp/munge (:main opts))
"\"); } else { console.warn(\"ClojureScript could not load :main, did you forget to specify :asset-path?\"); };</script>');\n")))))
(defn ^String rel-output-path
"Given an IJavaScript which is either in memory, in a jar file,
or is a foreign lib, return the path relative to the output
directory."
[js]
(let [url (deps/-url js)]
(cond
url
(if (deps/-foreign? js)
(util/get-name url)
(path-from-jarfile url))
(string? js)
(let [digest (MessageDigest/getInstance "SHA-1")]
(.reset digest)
(.update digest (.getBytes ^String js "utf8"))
(str
(->> (DatatypeConverter/printHexBinary (.digest digest))
(take 7)
(apply str))
".js"))
:else (str (random-string 5) ".js"))))
(defn write-javascript
"Write or copy a JavaScript file to output directory. Only write if the file
does not already exist. Return IJavaScript for the file on disk at the new
location."
[opts js]
(let [out-dir (io/file (util/output-directory opts))
out-name (rel-output-path js)
out-file (io/file out-dir out-name)
ijs {:url (deps/to-url out-file)
:requires (deps/-requires js)
:provides (deps/-provides js)
:group (:group js)}]
(when-not (.exists out-file)
(util/mkdirs out-file)
(spit out-file (deps/-source js)))
(if (map? js)
(merge js ijs)
ijs)))
(defn write-js?
"Returns true if IJavaScript instance needs to be written/copied to output
directory. True when in memory, in a JAR, or if foreign library."
[js]
(let [url ^URL (deps/-url js)]
(or (not url)
(= (.getProtocol url) "jar")
(deps/-foreign? js))))
(defn source-on-disk
"Ensure that the given IJavaScript exists on disk in the output directory.
Return updated IJavaScript with the new location if necessary."
[opts js]
(if (write-js? js)
(write-javascript opts js)
;; always copy original ClojureScript sources to the output directory
;; when source maps enabled
(let [out-file (if-let [ns (and (:source-map opts) (first (:provides js)))]
(io/file (io/file (util/output-directory opts))
(util/ns->relpath ns)))
source-url (:source-url js)]
(when (and out-file source-url
(or (not (.exists ^File out-file))
(> (.lastModified (io/file source-url))
(.lastModified out-file))))
(spit out-file (slurp source-url)))
js)))
(comment
(write-javascript {} "goog.provide('demo');\nalert('hello');\n")
;; write something from a jar file to disk
(source-on-disk {}
{:url (deps/goog-resource "goog/base.js")
:source (with-open [reader (io/reader (deps/goog-resource "goog/base.js"))]
(slurp reader))})
;; doesn't write a file that is already on disk
(source-on-disk {} {:url (io/resource "cljs/core.cljs")})
)
(defn output-unoptimized
"Ensure that all JavaScript source files are on disk (not in jars),
write the goog deps file including only the libraries that are being
used and write the deps file for the current project.
The deps file for the current project will include third-party
libraries."
[opts & sources]
(let [disk-sources (remove #(= (:group %) :goog)
(map #(source-on-disk opts %) sources))
goog-deps (io/file (util/output-directory opts)
"goog" "deps.js")
main (:main opts)]
(util/mkdirs goog-deps)
(spit goog-deps (slurp (io/resource "goog/deps.js")))
(if main
(do
(output-deps-file
(assoc opts :output-to
(str (util/output-directory opts)
File/separator "cljs_deps.js"))
disk-sources)
(output-main-file opts))
(output-deps-file opts disk-sources))))
(comment
;; output unoptimized alone
(output-unoptimized {} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n")
;; output unoptimized with all dependencies
(apply output-unoptimized {}
(add-dependencies {}
"goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n"))
;; output unoptimized with external library
(apply output-unoptimized {}
(add-dependencies {:libs ["closure/library/third_party/closure"]}
"goog.provide('test');\ngoog.require('cljs.core');\ngoog.require('goog.dom.query');\n"))
output unoptimized and write file to ' out / test.js '
(output-unoptimized {:output-to "out/test.js"}
"goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n")
)
(defn get-upstream-deps*
"returns a merged map containing all upstream dependencies defined
by libraries on the classpath. Should be run in the main thread. If
not, pass (java.lang.ClassLoader/getSystemClassLoader) to use the
system classloader."
([]
(get-upstream-deps* (. (Thread/currentThread) (getContextClassLoader))))
([classloader]
(let [upstream-deps (map #(read-string (slurp %)) (enumeration-seq (. classloader (findResources "deps.cljs"))))]
#_(doseq [dep upstream-deps]
(println (str "Upstream deps.cljs found on classpath. " dep " This is an EXPERIMENTAL FEATURE and is not guarenteed to remain stable in future versions.")))
(apply merge-with concat upstream-deps))))
(def get-upstream-deps (memoize get-upstream-deps*))
(defn add-header [opts js]
(str (make-preamble opts) js))
(defn foreign-deps-str [opts sources]
(letfn [(to-js-str [ijs]
(let [url (or (and (= (:optimizations opts) :advanced)
(:url-min ijs))
(:url ijs))]
(slurp url)))]
(str (string/join "\n" (map to-js-str sources)) "\n")))
(defn add-wrapper [{:keys [output-wrapper] :as opts} js]
(if output-wrapper
(str ";(function(){\n" js "\n})();\n")
js))
(defn add-source-map-link [{:keys [source-map output-to] :as opts} js]
(if source-map
(if (= output-to :print)
(str js "\n//# sourceMappingURL=" source-map)
(str js "\n//# sourceMappingURL=" (path-relative-to (io/file output-to) {:url source-map})))
js))
(defn absolute-path? [path]
(.isAbsolute (io/file path)))
(defn absolute-parent [path]
(.getParent (.getAbsoluteFile (io/file path))))
(defn in-same-dir? [path-1 path-2]
"Checks that path-1 and path-2 are siblings in the same logical directory."
(= (absolute-parent path-1)
(absolute-parent path-2)))
(defn same-or-subdirectory-of? [dir path]
"Checks that path names a file or directory that is the dir or a subdirectory there of."
(let [dir-path (.getAbsolutePath (io/file dir))
path-path (.getAbsolutePath (io/file path))]
(.startsWith path-path dir-path)))
(defn check-output-to [{:keys [output-to] :as opts}]
(when (contains? opts :output-to)
(assert (or (string? output-to)
(= :print output-to))
(format ":output-to %s must specify a file or be :print"
(pr-str output-to))))
true)
(defn check-output-dir [{:keys [output-dir] :as opts}]
(when (contains? opts :output-dir)
(assert (string? output-dir)
(format ":output-dir %s must specify a directory"
(pr-str output-dir))))
true)
(defn check-source-map [{:keys [output-to source-map output-dir] :as opts}]
"When :source-map is specified in opts, "
(when (and (contains? opts :source-map)
(not (= (:optimizations opts) :none)))
(assert (and (contains? opts :output-to)
(contains? opts :output-dir))
":source-map cannot be specied without also specifying :output-to and :output-dir if optimization setting applied")
(assert (string? source-map)
(format ":source-map %s must specify a file in the same directory as :output-to %s if optimization setting applied"
(pr-str source-map)
(pr-str output-to)))
(assert (in-same-dir? source-map output-to)
(format ":source-map %s must specify a file in the same directory as :output-to %s if optimization setting applied"
(pr-str source-map)
(pr-str output-to)))
(assert (same-or-subdirectory-of? (absolute-parent output-to) output-dir)
(format ":output-dir %s must specify a directory in :output-to's parent %s if optimization setting applied"
(pr-str output-dir)
(pr-str (absolute-parent output-to)))))
true)
(defn check-source-map-path [{:keys [source-map-path] :as opts}]
(when (contains? opts :source-map-path)
(assert (string? source-map-path)
(format ":source-map-path %s must be a directory"
source-map-path))
(when-not (= (:optimizations opts) :none)
(assert (and (contains? opts :output-to)
(contains? opts :source-map))
":source-map-path cannot be specified without also specifying :output-to and :source-map if optimization setting applied")))
true)
(defn check-output-wrapper [{:keys [output-wrapper optimizations]}]
(assert (not (and output-wrapper (= :whitespace optimizations)))
":output-wrapper cannot be combined with :optimizations :whitespace"))
(defn check-node-target [{:keys [target optimizations] :as opts}]
(assert (not (and (= target :nodejs) (= optimizations :whitespace)))
(format ":nodejs target not compatible with :whitespace optimizations")))
(defn foreign-source? [js]
(and (satisfies? deps/IJavaScript js)
(deps/-foreign? js)))
(defn build
"Given a source which can be compiled, produce runnable JavaScript."
([source opts]
(build source opts
(if-not (nil? env/*compiler*)
env/*compiler*
(env/default-compiler-env opts))))
([source opts compiler-env]
(env/with-compiler-env compiler-env
(let [compiler-stats (:compiler-stats opts)
ups-deps (get-upstream-deps)
all-opts (-> opts
(assoc
:ups-libs (:libs ups-deps)
:ups-foreign-libs (:foreign-libs ups-deps)
:ups-externs (:externs ups-deps))
(update-in [:preamble] #(into (or % []) ["cljs/imul.js"])))
emit-constants (or (and (= (:optimizations opts) :advanced)
(not (false? (:optimize-constants opts))))
(:optimize-constants opts))]
(check-output-to opts)
(check-output-dir opts)
(check-source-map opts)
(check-source-map-path opts)
(check-output-wrapper opts)
(check-node-target opts)
(swap! compiler-env
#(-> %
(assoc-in [:opts :emit-constants] emit-constants)
(assoc :target (:target opts))
(assoc :js-dependency-index (deps/js-dependency-index all-opts))))
(binding [comp/*dependents* (when-not (false? (:recompile-dependents opts))
(atom {:recompile #{} :visited #{}}))
ana/*cljs-static-fns*
(or (and (= (:optimizations opts) :advanced)
(not (false? (:static-fns opts))))
(:static-fns opts)
ana/*cljs-static-fns*)
*assert* (not= (:elide-asserts opts) true)
ana/*load-tests* (not= (:load-tests opts) false)
ana/*cljs-warnings*
(let [warnings (opts :warnings true)]
(merge
ana/*cljs-warnings*
(if (or (true? warnings)
(false? warnings))
(zipmap
[:unprovided :undeclared-var
:undeclared-ns :undeclared-ns-form]
(repeat warnings))
warnings)))
ana/*verbose* (:verbose opts)]
(let [compiled (util/measure compiler-stats
"Compile basic sources"
(doall (-compile source all-opts)))
;; the constants_table.js file is not used directly here, is picked up by
;; add-dependencies below
_ (when emit-constants
(comp/emit-constants-table-to-file
(::ana/constant-table @env/*compiler*)
(str (util/output-directory all-opts) "/constants_table.js")))
js-sources (util/measure compiler-stats
"Add dependencies"
(doall
(concat
(apply add-dependencies all-opts
(concat
(if (coll? compiled) compiled [compiled])
(when (= :nodejs (:target all-opts))
[(-compile (io/resource "cljs/nodejs.cljs") all-opts)])))
(when (= :nodejs (:target all-opts))
[(-compile (io/resource "cljs/nodejscli.cljs") all-opts)]))))
optim (:optimizations all-opts)
ret (if (and optim (not= optim :none))
(let [fdeps-str (foreign-deps-str all-opts
(filter foreign-source? js-sources))
all-opts (assoc all-opts
:foreign-deps-line-count
(- (count (.split #"\r?\n" fdeps-str -1)) 1))]
(when-let [fname (:source-map all-opts)]
(assert (string? fname)
(str ":source-map must name a file when using :whitespace, "
":simple, or :advanced optimizations"))
(doall (map #(source-on-disk all-opts %) js-sources)))
(->>
(util/measure compiler-stats
"Optimize sources"
(apply optimize all-opts
(remove foreign-source? js-sources)))
(add-wrapper all-opts)
(add-source-map-link all-opts)
(str fdeps-str)
(add-header all-opts)
(output-one-file all-opts)))
(apply output-unoptimized all-opts js-sources))]
;; emit Node.js bootstrap script for :none & :whitespace optimizations
(when (and (= (:target opts) :nodejs)
(#{:none :whitespace} (:optimizations opts)))
(let [outfile (io/file (util/output-directory opts)
"goog" "bootstrap" "nodejs.js")]
(util/mkdirs outfile)
(spit outfile (slurp (io/resource "cljs/bootstrap_node.js")))))
ret))))))
(defn watch
"Given a source directory, produce runnable JavaScript. Watch the source
directory for changes rebuliding when necessary. Takes the same arguments as
cljs.closure/build."
([source opts]
(watch source opts
(if-not (nil? env/*compiler*)
env/*compiler*
(env/default-compiler-env opts))))
([source opts compiler-env]
(let [path (Paths/get (.toURI (io/file source)))
fs (.getFileSystem path)
service (.newWatchService fs)]
(letfn [(buildf []
(let [start (System/nanoTime)]
(build source opts compiler-env)
(println "... done. Elapsed"
(/ (unchecked-subtract (System/nanoTime) start) 1e9) "seconds")
(flush)))
(watch-all [^Path root]
(Files/walkFileTree root
(reify
FileVisitor
(preVisitDirectory [_ dir _]
(let [^Path dir dir]
(. dir
(register service
(into-array [StandardWatchEventKinds/ENTRY_CREATE
StandardWatchEventKinds/ENTRY_DELETE
StandardWatchEventKinds/ENTRY_MODIFY])
(into-array [SensitivityWatchEventModifier/HIGH]))))
FileVisitResult/CONTINUE)
(postVisitDirectory [_ dir exc]
FileVisitResult/CONTINUE)
(visitFile [_ file attrs]
FileVisitResult/CONTINUE)
(visitFileFailed [_ file exc]
FileVisitResult/CONTINUE))))]
(println "Building...")
(flush)
(buildf)
(println "Watching path:" source)
(watch-all path)
(loop [key nil]
(when (or (nil? key) (. ^WatchKey key reset))
(let [key (. service take)]
(when (some (fn [^WatchEvent e]
(or (.. (. e context) toString (endsWith "cljs"))
(.. (. e context) toString (endsWith "js"))))
(seq (.pollEvents key)))
(println "Change detected, recompiling...")
(flush)
(try
(buildf)
(catch Exception e
(.printStackTrace e))))
(recur key))))))))
(comment
(watch "samples/hello/src"
{:optimizations :none
:output-to "samples/hello/out/hello.js"
:output-dir "samples/hello/out"
:cache-analysis true
:source-map true
:verbose true})
)
;; =============================================================================
Utilities
;; for backwards compatibility
(defn output-directory [opts]
(util/output-directory opts))
(defn parse-js-ns [f]
(deps/parse-js-ns (line-seq (io/reader f))))
(defn ^File src-file->target-file
([src] (src-file->target-file src nil))
([src opts]
(util/to-target-file
(when (:output-dir opts)
(util/output-directory opts))
(ana/parse-ns src))))
(defn ^String src-file->goog-require
([src] (src-file->goog-require src {:wrap true}))
([src {:keys [wrap all-provides :as options]}]
(let [goog-ns
(case (util/ext src)
"cljs" (comp/munge (:ns (ana/parse-ns src)))
"js" (cond-> (:provides (parse-js-ns src))
(not all-provides) first)
(throw
(IllegalArgumentException.
(str "Can't create goog.require expression for " src))))]
(if (and (not all-provides) wrap)
(str "goog.require(\"" goog-ns "\");")
(if (vector? goog-ns)
goog-ns
(str goog-ns))))))
(comment
(println (build '[(ns hello.core)
(defn ^{:export greet} greet [n] (str "Hola " n))
(defn ^:export sum [xs] 42)]
{:optimizations :simple :pretty-print true}))
;; build a project with optimizations
(build "samples/hello/src" {:optimizations :advanced})
(build "samples/hello/src" {:optimizations :advanced :output-to "samples/hello/hello.js"})
;; open 'samples/hello/hello.html' to see the result in action
;; build a project without optimizations
(build "samples/hello/src" {:output-dir "samples/hello/out" :output-to "samples/hello/hello.js"})
;; open 'samples/hello/hello-dev.html' to see the result in action
;; notice how each script was loaded individually
;; build unoptimized from raw ClojureScript
(build '[(ns hello.core)
(defn ^{:export greet} greet [n] (str "Hola " n))
(defn ^:export sum [xs] 42)]
{:output-dir "samples/hello/out" :output-to "samples/hello/hello.js"})
;; open 'samples/hello/hello-dev.html' to see the result in action
)
| null | https://raw.githubusercontent.com/stabilized/clojurescript/f38f141525576b2a89cde190f25f9cf2fc4c418a/src/clj/cljs/closure.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
Closure API
===========
========================================
Compile
=======
TODO: it would be nice if we could consolidate requires-compilation?
compile a file in memory
compile a file to disk - see file @ 'out/clojure/set.js'
compile a project
compile a project with a custom output directory
compile a form
compile a vector of forms
find dependencies
find dependencies in an external library
don't get cljs.core twice
add dependencies to literal js
add dependencies with external lib
add dependencies with foreign lib
Optimize
========
compiler.getSourceMap().reset()
=> "alert(5);"
optimize a ClojureScript form
optimize a project
Output
======
build process may produce files on disk but a single string is
always output. What this string contains depends on whether the
input has been optimized or not. If the :output-to option is set
then this string will be written to the specified file. If not, it
will be returned.
The :output-dir option can be used to set the working directory
where any files will be written to disk. By default this directory
is 'out'.
If inputs are optimized then the output string will be the complete
application with all dependencies included.
For unoptimized output, the string will be a Closure deps file
dependencies. All JavaScript files will be located in the working
directory, including any dependencies from the Closure library.
run. It also makes debugging much simpler because each file is
loaded in its own script tag.
When working with uncompiled files, you will need to add additional
library's base.js and one which calls goog.require to load your
code. See samples/hello/hello-dev.html for an example.
same file
} else { console.warn(\"ClojureScript could not load :main, did you forget to specify :asset-path?\"); };</script>');\n")))))
always copy original ClojureScript sources to the output directory
when source maps enabled
write something from a jar file to disk
doesn't write a file that is already on disk
output unoptimized alone
output unoptimized with all dependencies
output unoptimized with external library
the constants_table.js file is not used directly here, is picked up by
add-dependencies below
emit Node.js bootstrap script for :none & :whitespace optimizations
=============================================================================
for backwards compatibility
build a project with optimizations
open 'samples/hello/hello.html' to see the result in action
build a project without optimizations
open 'samples/hello/hello-dev.html' to see the result in action
notice how each script was loaded individually
build unoptimized from raw ClojureScript
open 'samples/hello/hello-dev.html' to see the result in action | Copyright ( c ) . All rights reserved .
(ns cljs.closure
"Compile ClojureScript to JavaScript with optimizations from Google
Closure Compiler producing runnable JavaScript.
The Closure Compiler (compiler.jar) must be on the classpath.
Use the 'build' function for end-to-end compilation.
build = compile -> add-dependencies -> optimize -> output
Two protocols are defined: IJavaScript and Compilable. The
Compilable protocol is satisfied by something which can return one
or more IJavaScripts.
With IJavaScript objects in hand, calling add-dependencies will
produce a sequence of IJavaScript objects which includes all
required dependencies from the Closure library and ClojureScript,
in dependency order. This function replaces the closurebuilder
tool.
The optimize function converts one or more IJavaScripts into a
single string of JavaScript source code using the Closure Compiler
API.
The produced output is either a single string of optimized
JavaScript or a deps file for use during development.
"
(:refer-clojure :exclude [compile])
(:require [cljs.util :as util]
[cljs.compiler :as comp]
[cljs.analyzer :as ana]
[cljs.source-map :as sm]
[cljs.env :as env]
[cljs.js-deps :as deps]
[clojure.java.io :as io]
[clojure.string :as string]
[clojure.data.json :as json])
(:import [java.io File BufferedInputStream]
[java.net URL]
[java.util.logging Level]
[java.util List Random]
[com.google.javascript.jscomp CompilerOptions CompilationLevel
CompilerOptions$LanguageMode SourceMap$Format
SourceMap$DetailLevel ClosureCodingConvention SourceFile
Result JSError CheckLevel DiagnosticGroups
CommandLineRunner AnonymousFunctionNamingPolicy]
[java.security MessageDigest]
[javax.xml.bind DatatypeConverter]
[java.nio.file Path Paths Files StandardWatchEventKinds WatchKey
WatchEvent FileVisitor FileVisitResult]
[com.sun.nio.file SensitivityWatchEventModifier]))
(def name-chars (map char (concat (range 48 57) (range 65 90) (range 97 122))))
(defn random-char []
(nth name-chars (.nextInt (Random.) (count name-chars))))
(defn random-string [length]
(apply str (take length (repeatedly random-char))))
(defmulti js-source-file (fn [_ source] (class source)))
(defmethod js-source-file String [^String name ^String source]
(SourceFile/fromCode name source))
(defmethod js-source-file File [_ ^File source]
(SourceFile/fromFile source))
(defmethod js-source-file BufferedInputStream [^String name ^BufferedInputStream source]
(SourceFile/fromInputStream name source))
(defn set-options
"TODO: Add any other options that we would like to support."
[opts ^CompilerOptions compiler-options]
(when (contains? opts :pretty-print)
(set! (.prettyPrint compiler-options) (:pretty-print opts)))
(when (contains? opts :pseudo-names)
(set! (.generatePseudoNames compiler-options) (:pseudo-names opts)))
(when (contains? opts :anon-fn-naming-policy)
(let [policy (:anon-fn-naming-policy opts)]
(set! (.anonymousFunctionNaming compiler-options)
(case policy
:off AnonymousFunctionNamingPolicy/OFF
:unmapped AnonymousFunctionNamingPolicy/UNMAPPED
:mapped AnonymousFunctionNamingPolicy/MAPPED
(throw (IllegalArgumentException. (str "Invalid :anon-fn-naming-policy value " policy " - only :off, :unmapped, :mapped permitted")))))))
(when (contains? opts :language-in)
(case (:language-in opts)
:ecmascript5 (.setLanguageIn compiler-options CompilerOptions$LanguageMode/ECMASCRIPT5)
:ecmascript5-strict (.setLanguageIn compiler-options CompilerOptions$LanguageMode/ECMASCRIPT5_STRICT)
:ecmascript3 (.setLanguageIn compiler-options CompilerOptions$LanguageMode/ECMASCRIPT3)))
(when (contains? opts :language-out)
(case (:language-out opts)
:ecmascript5 (.setLanguageOut compiler-options CompilerOptions$LanguageMode/ECMASCRIPT5)
:ecmascript5-strict (.setLanguageOut compiler-options CompilerOptions$LanguageMode/ECMASCRIPT5_STRICT)
:ecmascript3 (.setLanguageOut compiler-options CompilerOptions$LanguageMode/ECMASCRIPT3)))
(when (contains? opts :print-input-delimiter)
(set! (.printInputDelimiter compiler-options)
(:print-input-delimiter opts))))
(def check-level
{:error CheckLevel/ERROR
:warning CheckLevel/WARNING
:off CheckLevel/OFF})
(def warning-types
{:access-controls DiagnosticGroups/ACCESS_CONTROLS
:ambiguous-function-decl DiagnosticGroups/AMBIGUOUS_FUNCTION_DECL
:debugger-statement-present DiagnosticGroups/DEBUGGER_STATEMENT_PRESENT
:check-regexp DiagnosticGroups/CHECK_REGEXP
:check-types DiagnosticGroups/CHECK_TYPES
:check-useless-code DiagnosticGroups/CHECK_USELESS_CODE
:check-variables DiagnosticGroups/CHECK_VARIABLES
:const DiagnosticGroups/CONST
:constant-property DiagnosticGroups/CONSTANT_PROPERTY
:deprecated DiagnosticGroups/DEPRECATED
:duplicate-message DiagnosticGroups/DUPLICATE_MESSAGE
:es5-strict DiagnosticGroups/ES5_STRICT
:externs-validation DiagnosticGroups/EXTERNS_VALIDATION
:fileoverview-jsdoc DiagnosticGroups/FILEOVERVIEW_JSDOC
:global-this DiagnosticGroups/GLOBAL_THIS
:internet-explorer-checks DiagnosticGroups/INTERNET_EXPLORER_CHECKS
:invalid-casts DiagnosticGroups/INVALID_CASTS
:missing-properties DiagnosticGroups/MISSING_PROPERTIES
:non-standard-jsdoc DiagnosticGroups/NON_STANDARD_JSDOC
:strict-module-dep-check DiagnosticGroups/STRICT_MODULE_DEP_CHECK
:tweaks DiagnosticGroups/TWEAKS
:undefined-names DiagnosticGroups/UNDEFINED_NAMES
:undefined-variables DiagnosticGroups/UNDEFINED_VARIABLES
:unknown-defines DiagnosticGroups/UNKNOWN_DEFINES
:visiblity DiagnosticGroups/VISIBILITY})
(defn ^CompilerOptions make-options
"Create a CompilerOptions object and set options from opts map."
[opts]
(let [level (case (:optimizations opts)
:advanced CompilationLevel/ADVANCED_OPTIMIZATIONS
:whitespace CompilationLevel/WHITESPACE_ONLY
:simple CompilationLevel/SIMPLE_OPTIMIZATIONS)
compiler-options (doto (CompilerOptions.)
(.setCodingConvention (ClosureCodingConvention.)))]
(doseq [[key val] (:closure-defines opts)]
(let [key (name key)]
(cond
(string? val) (.setDefineToStringLiteral compiler-options key val)
(integer? val) (.setDefineToIntegerLiteral compiler-options key val)
(float? val) (.setDefineToDoubleLiteral compiler-options key val)
(or (true? val)
(false? val)) (.setDefineToBooleanLiteral compiler-options key val)
:else (println "value for" key "must be string, int, float, or bool"))))
(doseq [[type level] (:closure-warnings opts)]
(. compiler-options
(setWarningLevel (type warning-types) (level check-level))))
(if-let [extra-annotations (:closure-extra-annotations opts)]
(. compiler-options (setExtraAnnotationNames (map name extra-annotations))))
(when (contains? opts :source-map)
(set! (.sourceMapOutputPath compiler-options)
(:source-map opts))
(set! (.sourceMapDetailLevel compiler-options)
SourceMap$DetailLevel/ALL)
(set! (.sourceMapFormat compiler-options)
SourceMap$Format/V3))
(do
(.setOptionsForCompilationLevel level compiler-options)
(set-options opts compiler-options)
compiler-options)))
(defn load-externs
"Externs are JavaScript files which contain empty definitions of
functions which will be provided by the environment. Any function in
an extern file will not be renamed during optimization.
Options may contain an :externs key with a list of file paths to
load. The :use-only-custom-externs flag may be used to indicate that
the default externs should be excluded."
[{:keys [externs use-only-custom-externs target ups-externs]}]
(let [filter-cp-js (fn [paths]
(for [p paths u (deps/find-js-classpath p)] u))
filter-js (fn [paths]
(for [p paths u (deps/find-js-resources p)] u))
add-target (fn [ext]
(cons (io/resource "cljs/externs.js")
(if (= :nodejs target)
(cons (io/resource "cljs/nodejs_externs.js")
(or ext []))
ext)))
load-js (fn [ext]
(map #(js-source-file (.getFile %) (slurp %)) ext))]
(let [js-sources (-> externs filter-js add-target load-js)
ups-sources (-> ups-externs filter-cp-js load-js)
all-sources (concat js-sources ups-sources)]
(if use-only-custom-externs
all-sources
(into all-sources (CommandLineRunner/getDefaultExterns))))))
(defn ^com.google.javascript.jscomp.Compiler make-closure-compiler []
(let [compiler (com.google.javascript.jscomp.Compiler.)]
(com.google.javascript.jscomp.Compiler/setLoggingLevel Level/WARNING)
compiler))
(defn report-failure [^Result result]
(let [errors (.errors result)
warnings (.warnings result)]
(doseq [next (seq errors)]
(println "ERROR:" (.toString ^JSError next)))
(doseq [next (seq warnings)]
(println "WARNING:" (.toString ^JSError next)))))
Protocols for IJavaScript and Compilable
(defprotocol ISourceMap
(-source-url [this] "Return the CLJS source url")
(-source-map [this] "Return the CLJS compiler generated JS source mapping"))
(extend-protocol deps/IJavaScript
String
(-foreign? [this] false)
(-url [this] nil)
(-provides [this] (:provides (deps/parse-js-ns (string/split-lines this))))
(-requires [this] (:requires (deps/parse-js-ns (string/split-lines this))))
(-source [this] this)
clojure.lang.IPersistentMap
(-foreign? [this] (:foreign this))
(-url [this] (or (:url this)
(deps/to-url (:file this))))
(-provides [this] (map name (:provides this)))
(-requires [this] (map name (:requires this)))
(-source [this] (if-let [s (:source this)]
s (with-open [reader (io/reader (deps/-url this))]
(slurp reader)))))
(defrecord JavaScriptFile [foreign ^URL url ^URL source-url provides requires lines source-map]
deps/IJavaScript
(-foreign? [this] foreign)
(-url [this] url)
(-provides [this] provides)
(-requires [this] requires)
(-source [this]
(with-open [reader (io/reader url)]
(slurp reader)))
ISourceMap
(-source-url [this] source-url)
(-source-map [this] source-map))
(defn javascript-file
([foreign ^URL url provides requires]
(javascript-file foreign url nil provides requires nil nil))
([foreign ^URL url source-url provides requires lines source-map]
(assert (first provides) (str source-url " does not provide a namespace"))
(JavaScriptFile. foreign url source-url (map name provides) (map name requires) lines source-map)))
(defn map->javascript-file [m]
(javascript-file
(:foreign m)
(when-let [f (:file m)]
(deps/to-url f))
(when-let [sf (:source-file m)]
(deps/to-url sf))
(:provides m)
(:requires m)
(:lines m)
(:source-map m)))
(defn read-js
"Read a JavaScript file returning a map of file information."
[f]
(let [source (slurp f)
m (deps/parse-js-ns (string/split-lines source))]
(map->javascript-file (assoc m :file f))))
(defprotocol Compilable
(-compile [this opts] "Returns one or more IJavaScripts."))
(defn compile-form-seq
"Compile a sequence of forms to a JavaScript source string."
[forms]
(comp/with-core-cljs nil
(fn []
(with-out-str
(binding [ana/*cljs-ns* 'cljs.user]
(doseq [form forms]
(comp/emit (ana/analyze (ana/empty-env) form))))))))
(defn compiled-file
"Given a map with at least a :file key, return a map with
{:file .. :provides .. :requires ..}.
Compiled files are cached so they will only be read once."
[m]
(let [path (.getPath (.toURL ^File (:file m)))
js (if (:provides m)
(map->javascript-file m)
(if-let [js (get-in @env/*compiler* [::compiled-cljs path])]
js
(read-js (:file m))))]
(swap! env/*compiler* update-in [::compiled-cljs] assoc path js)
js))
(defn compile
"Given a Compilable, compile it and return an IJavaScript."
[compilable opts]
(-compile compilable opts))
(defn compile-file
"Compile a single cljs file. If no output-file is specified, returns
a string of compiled JavaScript. With an output-file option, the
compiled JavaScript will written to this location and the function
returns a JavaScriptFile. In either case the return value satisfies
IJavaScript."
[^File file {:keys [output-file] :as opts}]
(if output-file
(let [out-file (io/file (util/output-directory opts) output-file)]
(compiled-file (comp/compile-file file out-file opts)))
(binding [ana/*cljs-file* (.getPath ^File file)]
(compile-form-seq (ana/forms-seq file)))))
(defn compile-dir
"Recursively compile all cljs files under the given source
directory. Return a list of JavaScriptFiles."
[^File src-dir opts]
(let [out-dir (util/output-directory opts)]
(map compiled-file
(comp/compile-root src-dir out-dir opts))))
(defn ^String path-from-jarfile
"Given the URL of a file within a jar, return the path of the file
from the root of the jar."
[^URL url]
(last (string/split (.getFile url) #"\.jar!/")))
(defn jar-file-to-disk
"Copy a file contained within a jar to disk. Return the created file."
[url out-dir]
(let [out-file (io/file out-dir (path-from-jarfile url))
content (with-open [reader (io/reader url)]
(slurp reader))]
(util/mkdirs out-file)
(spit out-file content)
out-file))
logic -
(defn compile-from-jar
"Compile a file from a jar."
[this {:keys [output-file] :as opts}]
(or (when output-file
(let [out-file (io/file (util/output-directory opts) output-file)]
(when (and (.exists out-file)
(= (util/compiled-by-version out-file)
(util/clojurescript-version)))
(compile-file
(io/file (util/output-directory opts)
(last (string/split (.getPath ^URL this) #"\.jar!/")))
opts))))
(let [file-on-disk (jar-file-to-disk this (util/output-directory opts))]
(-compile file-on-disk opts))))
(extend-protocol Compilable
File
(-compile [this opts]
(if (.isDirectory this)
(compile-dir this opts)
(compile-file this opts)))
URL
(-compile [this opts]
(case (.getProtocol this)
"file" (-compile (io/file this) opts)
"jar" (compile-from-jar this opts)))
clojure.lang.PersistentList
(-compile [this opts]
(compile-form-seq [this]))
String
(-compile [this opts] (-compile (io/file this) opts))
clojure.lang.PersistentVector
(-compile [this opts] (compile-form-seq this))
)
(comment
(-compile "samples/hello/src/hello/core.cljs" {})
(-compile (io/resource "clojure/set.cljs") {:output-file "clojure/set.js"})
(-compile (io/file "samples/hello/src") {})
(-compile (io/file "samples/hello/src") {:output-dir "my-output"})
(-compile '(defn plus-one [x] (inc x)) {})
(-compile '[(ns test.app (:require [goog.array :as array]))
(defn plus-one [x] (inc x))]
{})
)
(defn js-dependencies
"Given a sequence of Closure namespace strings, return the list of
all dependencies. The returned list includes all Google and
third-party library dependencies.
Third-party libraries are configured using the :libs option where
the value is a list of directories containing third-party
libraries."
[opts requires]
(loop [requires requires
visited (set requires)
deps #{}]
(if (seq requires)
(let [node (or (get (@env/*compiler* :js-dependency-index) (first requires))
(deps/find-classpath-lib (first requires)))
new-req (remove #(contains? visited %) (:requires node))]
(recur (into (rest requires) new-req)
(into visited new-req)
(conj deps node)))
(remove nil? deps))))
(comment
(js-dependencies {} ["goog.array"])
(js-dependencies {:libs ["closure/library/third_party/closure"]} ["goog.dom.query"])
)
(defn get-compiled-cljs
"Return an IJavaScript for this file. Compiled output will be
written to the working directory."
[opts {:keys [relative-path uri]}]
(let [js-file (comp/rename-to-js relative-path)]
(-compile uri (merge opts {:output-file js-file}))))
(defn cljs-source-for-namespace
"Returns a map containing :relative-path, :uri referring to the resource that
should contain the source for the given namespace name."
[ns]
(as-> (munge ns) %
(string/replace % \. \/)
(str % ".cljs")
{:relative-path % :uri (io/resource %)}))
(defn source-for-namespace
[ns compiler-env]
(let [ns-str (str (comp/munge ns))
path (string/replace ns-str \. \/)
relpath (str path ".cljs")]
(if-let [cljs-res (io/resource relpath)]
{:relative-path relpath :uri cljs-res}
(let [relpath (:file (get-in @compiler-env [:js-dependency-index ns-str]))]
(if-let [js-res (io/resource relpath)]
{:relative-path relpath :uri js-res}
(throw
(IllegalArgumentException. (str "Namespace " ns " does not exist"))))))))
(defn cljs-dependencies
"Given a list of all required namespaces, return a list of
IJavaScripts which are the cljs dependencies. The returned list will
not only include the explicitly required files but any transitive
dependencies as well. JavaScript files will be compiled to the
working directory if they do not already exist.
Only load dependencies from the classpath."
[opts requires]
(let [cljs-deps (fn [lib-names]
(->> (remove #(or ((@env/*compiler* :js-dependency-index) %)
(deps/find-classpath-lib %))
lib-names)
(map cljs-source-for-namespace)
(remove (comp nil? :uri))))]
(loop [required-files (cljs-deps requires)
visited (set required-files)
js-deps #{}]
(if (seq required-files)
(let [next-file (first required-files)
js (get-compiled-cljs opts next-file)
new-req (remove #(contains? visited %) (cljs-deps (deps/-requires js)))]
(recur (into (rest required-files) new-req)
(into visited new-req)
(conj js-deps js)))
(remove nil? js-deps)))))
(comment
only get
(cljs-dependencies {} ["goog.string" "cljs.core"])
get transitive
(cljs-dependencies {} ["clojure.string"])
(cljs-dependencies {} ["cljs.core" "clojure.string"])
)
(defn add-dependencies
"Given one or more IJavaScript objects in dependency order, produce
a new sequence of IJavaScript objects which includes the input list
plus all dependencies in dependency order."
[opts & inputs]
(let [requires (mapcat deps/-requires inputs)
required-cljs (remove (set inputs) (cljs-dependencies opts requires))
required-js (js-dependencies opts (set (concat (mapcat deps/-requires required-cljs) requires)))
provided (mapcat deps/-provides (concat inputs required-cljs required-js))
unprovided (clojure.set/difference (set requires) (set provided) #{"constants-table"})]
(when (seq unprovided)
(ana/warning :unprovided @env/*compiler* {:unprovided (sort unprovided)}))
(cons
(javascript-file nil (deps/goog-resource "goog/base.js") ["goog"] nil)
(deps/dependency-order
(concat
(map
(fn [{:keys [foreign url file provides requires] :as js-map}]
(let [url (or url (io/resource file))]
(merge
(javascript-file foreign url provides requires)
js-map)))
required-js)
[(when (-> @env/*compiler* :opts :emit-constants)
(let [url (deps/to-url (str (util/output-directory opts) "/constants_table.js"))]
(javascript-file nil url url ["constants-table"] ["cljs.core"] nil nil)))]
required-cljs
inputs)))))
(defn preamble-from-paths [paths]
(when-let [missing (seq (remove io/resource paths))]
(ana/warning :preamble-missing @env/*compiler* {:missing (sort missing)}))
(let [resources (remove nil? (map io/resource paths))]
(str (string/join "\n" (map slurp resources)) "\n")))
(defn make-preamble [{:keys [target preamble hashbang]}]
(str (when (and (= :nodejs target) (not (false? hashbang)))
(str "#!" (or hashbang "/usr/bin/env node") "\n"))
(when preamble (preamble-from-paths preamble))))
(comment
(add-dependencies {} "goog.provide('test.app');\ngoog.require('cljs.core');")
(add-dependencies {} "goog.provide('test.app');\ngoog.require('goog.array');")
(add-dependencies {} (str "goog.provide('test.app');\n"
"goog.require('goog.array');\n"
"goog.require('clojure.set');"))
(add-dependencies {:libs ["closure/library/third_party/closure"]}
(str "goog.provide('test.app');\n"
"goog.require('goog.array');\n"
"goog.require('goog.dom.query');"))
(add-dependencies {:foreign-libs [{:file "samples/hello/src/hello/core.cljs"
:provides ["example.lib"]}]}
(str "goog.provide('test.app');\n"
"goog.require('example.lib');\n"))
add dependencies to a JavaScriptFile record
(add-dependencies {} (javascript-file false
(deps/to-url "samples/hello/src/hello/core.cljs")
["hello.core"]
["goog.array"]))
)
(defmulti javascript-name class)
(defmethod javascript-name URL [^URL url]
(if url (.getPath url) "cljs/user.js"))
(defmethod javascript-name String [s]
(if-let [name (first (deps/-provides s))] name "cljs/user.js"))
(defmethod javascript-name JavaScriptFile [js] (javascript-name (deps/-url js)))
(defn build-provides
"Given a vector of provides, builds required goog.provide statements"
[provides]
(apply str (map #(str "goog.provide('" % "');\n") provides)))
(defmethod js-source-file JavaScriptFile [_ js]
(when-let [url (deps/-url js)]
(js-source-file (javascript-name url) (io/input-stream url))))
(defn optimize
"Use the Closure Compiler to optimize one or more JavaScript files."
[opts & sources]
(let [closure-compiler (make-closure-compiler)
^List externs (load-externs opts)
compiler-options (make-options opts)
sources (if (= :whitespace (:optimizations opts))
(cons "var CLOSURE_NO_DEPS = true;" sources)
sources)
^List inputs (map #(js-source-file (javascript-name %) %) sources)
result ^Result (.compile closure-compiler externs inputs compiler-options)
preamble (make-preamble opts)
preamble-line-count (- (count (.split #"\r?\n" preamble -1)) 1)]
(if (.success result)
(let [source (.toSource closure-compiler)]
(when-let [name (:source-map opts)]
(with-open [out (io/writer name)]
(.appendTo (.getSourceMap closure-compiler) out name))
(let [sm-json (-> (io/file name) slurp
(json/read-str :key-fn keyword))
closure-source-map (sm/decode-reverse sm-json)]
(loop [sources (seq sources)
relpaths {}
merged (sorted-map-by
(sm/source-compare
(remove nil?
(map (fn [source]
(if-let [^URL source-url (:source-url source)]
(.getPath source-url)
(if-let [^URL url (:url source)]
(.getPath url))))
sources))))]
(if sources
(let [source (first sources)]
(recur
(next sources)
(let [{:keys [provides source-url]} source]
(if (and provides source-url)
(assoc relpaths (.getPath ^URL source-url)
(util/ns->relpath (first provides)))
relpaths))
(if-let [url (:url source)]
(let [path (.getPath ^URL url)]
(if-let [compiled (get-in @env/*compiler* [::comp/compiled-cljs path])]
(if-let [source-url (:source-url source)]
(assoc merged (.getPath ^URL source-url)
(sm/merge-source-maps
(:source-map compiled)
(get closure-source-map path)))
merged)
(assoc merged path (get closure-source-map path))))
merged)))
(spit
(io/file name)
(sm/encode merged
{:preamble-line-count (+ preamble-line-count
(or (:foreign-deps-line-count opts) 0))
:lines (+ (:lineCount sm-json) preamble-line-count 2)
:file (:file sm-json)
:output-dir (util/output-directory opts)
:source-map (:source-map opts)
:source-map-path (:source-map-path opts)
:source-map-timestamp (:source-map-timestamp opts)
:source-map-pretty-print (:source-map-pretty-print opts)
:relpaths relpaths}))))))
source)
(report-failure result))))
(comment
optimize JavaScript strings
(optimize {:optimizations :whitespace} "var x = 3 + 2; alert(x);")
= > " var x=3 + 2;alert(x ) ; "
(optimize {:optimizations :simple} "var x = 3 + 2; alert(x);")
= > " var x=5;alert(x ) ; "
(optimize {:optimizations :advanced} "var x = 3 + 2; alert(x);")
(optimize {:optimizations :simple} (-compile '(def x 3) {}))
(println (->> (-compile "samples/hello/src" {})
(apply add-dependencies {})
(apply optimize {:optimizations :simple :pretty-print true})))
)
The result of a build is always a single string of JavaScript . The
describing where the JavaScript files are on disk and their
Unoptimized mode is faster because the Closure Compiler is not
script tags to the hosting HTML file : one which pulls in Closure
(defn ^String path-relative-to
"Generate a string which is the path to the input IJavaScript relative
to the specified base file."
[^File base input]
(let [base-path (util/path-seq (.getCanonicalPath base))
input-path (util/path-seq (.getCanonicalPath (io/file ^URL (deps/-url input))))
count-base (count base-path)
common (count (take-while true? (map #(= %1 %2) base-path input-path)))
prefix (repeat (- count-base common 1) "..")]
(if (= count-base common)
(util/to-path (concat prefix (drop common input-path)) "/"))))
(defn add-dep-string
"Return a goog.addDependency string for an input."
[opts input]
(letfn [(ns-list [coll] (when (seq coll) (apply str (interpose ", " (map #(str "'" (comp/munge %) "'") coll)))))]
(str "goog.addDependency(\""
(path-relative-to
(io/file (util/output-directory opts) "goog" "base.js") input)
"\", ["
(ns-list (deps/-provides input))
"], ["
(ns-list (deps/-requires input))
"]);")))
(defn deps-file
"Return a deps file string for a sequence of inputs."
[opts sources]
(apply str (interpose "\n" (map #(add-dep-string opts %) sources))))
(comment
(path-relative-to (io/file "out/goog/base.js") {:url (deps/to-url "out/cljs/core.js")})
(add-dep-string {} {:url (deps/to-url "out/cljs/core.js") :requires ["goog.string"] :provides ["cljs.core"]})
(deps-file {} [{:url (deps/to-url "out/cljs/core.js") :requires ["goog.string"] :provides ["cljs.core"]}])
)
(defn output-one-file [{:keys [output-to] :as opts} js]
(cond
(nil? output-to) js
(string? output-to)
(spit output-to js)
:else (println js)))
(defn output-deps-file [opts sources]
(output-one-file opts (deps-file opts sources)))
(defn output-main-file [opts]
(let [asset-path (or (:asset-path opts)
(util/output-directory opts))]
(case (:target opts)
:nodejs
(output-one-file opts
(str "var path = require(\"path\");\n"
"try {\n"
" require(\"source-map-support\").install();\n"
"} catch(err) {\n"
"}\n"
"require(path.join(path.resolve(\".\"),\"" asset-path "\",\"goog\",\"bootstrap\",\"nodejs.js\"));\n"
"require(path.join(path.resolve(\".\"),\"" asset-path "\",\"cljs_deps.js\"));\n"
"goog.require(\"" (comp/munge (:main opts)) "\");\n"
"goog.require(\"cljs.nodejscli\");\n"))
(output-one-file opts
(str "if(typeof goog == \"undefined\") document.write('<script src=\"" asset-path "/goog/base.js\"></script>');\n"
"document.write('<script src=\"" asset-path "/cljs_deps.js\"></script>');\n"
"document.write('<script>if (typeof goog != \"undefined\") { goog.require(\"" (comp/munge (:main opts))
(defn ^String rel-output-path
"Given an IJavaScript which is either in memory, in a jar file,
or is a foreign lib, return the path relative to the output
directory."
[js]
(let [url (deps/-url js)]
(cond
url
(if (deps/-foreign? js)
(util/get-name url)
(path-from-jarfile url))
(string? js)
(let [digest (MessageDigest/getInstance "SHA-1")]
(.reset digest)
(.update digest (.getBytes ^String js "utf8"))
(str
(->> (DatatypeConverter/printHexBinary (.digest digest))
(take 7)
(apply str))
".js"))
:else (str (random-string 5) ".js"))))
(defn write-javascript
"Write or copy a JavaScript file to output directory. Only write if the file
does not already exist. Return IJavaScript for the file on disk at the new
location."
[opts js]
(let [out-dir (io/file (util/output-directory opts))
out-name (rel-output-path js)
out-file (io/file out-dir out-name)
ijs {:url (deps/to-url out-file)
:requires (deps/-requires js)
:provides (deps/-provides js)
:group (:group js)}]
(when-not (.exists out-file)
(util/mkdirs out-file)
(spit out-file (deps/-source js)))
(if (map? js)
(merge js ijs)
ijs)))
(defn write-js?
"Returns true if IJavaScript instance needs to be written/copied to output
directory. True when in memory, in a JAR, or if foreign library."
[js]
(let [url ^URL (deps/-url js)]
(or (not url)
(= (.getProtocol url) "jar")
(deps/-foreign? js))))
(defn source-on-disk
"Ensure that the given IJavaScript exists on disk in the output directory.
Return updated IJavaScript with the new location if necessary."
[opts js]
(if (write-js? js)
(write-javascript opts js)
(let [out-file (if-let [ns (and (:source-map opts) (first (:provides js)))]
(io/file (io/file (util/output-directory opts))
(util/ns->relpath ns)))
source-url (:source-url js)]
(when (and out-file source-url
(or (not (.exists ^File out-file))
(> (.lastModified (io/file source-url))
(.lastModified out-file))))
(spit out-file (slurp source-url)))
js)))
(comment
(write-javascript {} "goog.provide('demo');\nalert('hello');\n")
(source-on-disk {}
{:url (deps/goog-resource "goog/base.js")
:source (with-open [reader (io/reader (deps/goog-resource "goog/base.js"))]
(slurp reader))})
(source-on-disk {} {:url (io/resource "cljs/core.cljs")})
)
(defn output-unoptimized
"Ensure that all JavaScript source files are on disk (not in jars),
write the goog deps file including only the libraries that are being
used and write the deps file for the current project.
The deps file for the current project will include third-party
libraries."
[opts & sources]
(let [disk-sources (remove #(= (:group %) :goog)
(map #(source-on-disk opts %) sources))
goog-deps (io/file (util/output-directory opts)
"goog" "deps.js")
main (:main opts)]
(util/mkdirs goog-deps)
(spit goog-deps (slurp (io/resource "goog/deps.js")))
(if main
(do
(output-deps-file
(assoc opts :output-to
(str (util/output-directory opts)
File/separator "cljs_deps.js"))
disk-sources)
(output-main-file opts))
(output-deps-file opts disk-sources))))
(comment
(output-unoptimized {} "goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n")
(apply output-unoptimized {}
(add-dependencies {}
"goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n"))
(apply output-unoptimized {}
(add-dependencies {:libs ["closure/library/third_party/closure"]}
"goog.provide('test');\ngoog.require('cljs.core');\ngoog.require('goog.dom.query');\n"))
output unoptimized and write file to ' out / test.js '
(output-unoptimized {:output-to "out/test.js"}
"goog.provide('test');\ngoog.require('cljs.core');\nalert('hello');\n")
)
(defn get-upstream-deps*
"returns a merged map containing all upstream dependencies defined
by libraries on the classpath. Should be run in the main thread. If
not, pass (java.lang.ClassLoader/getSystemClassLoader) to use the
system classloader."
([]
(get-upstream-deps* (. (Thread/currentThread) (getContextClassLoader))))
([classloader]
(let [upstream-deps (map #(read-string (slurp %)) (enumeration-seq (. classloader (findResources "deps.cljs"))))]
#_(doseq [dep upstream-deps]
(println (str "Upstream deps.cljs found on classpath. " dep " This is an EXPERIMENTAL FEATURE and is not guarenteed to remain stable in future versions.")))
(apply merge-with concat upstream-deps))))
(def get-upstream-deps (memoize get-upstream-deps*))
(defn add-header [opts js]
(str (make-preamble opts) js))
(defn foreign-deps-str [opts sources]
(letfn [(to-js-str [ijs]
(let [url (or (and (= (:optimizations opts) :advanced)
(:url-min ijs))
(:url ijs))]
(slurp url)))]
(str (string/join "\n" (map to-js-str sources)) "\n")))
(defn add-wrapper [{:keys [output-wrapper] :as opts} js]
(if output-wrapper
(str ";(function(){\n" js "\n})();\n")
js))
(defn add-source-map-link [{:keys [source-map output-to] :as opts} js]
(if source-map
(if (= output-to :print)
(str js "\n//# sourceMappingURL=" source-map)
(str js "\n//# sourceMappingURL=" (path-relative-to (io/file output-to) {:url source-map})))
js))
(defn absolute-path? [path]
(.isAbsolute (io/file path)))
(defn absolute-parent [path]
(.getParent (.getAbsoluteFile (io/file path))))
(defn in-same-dir? [path-1 path-2]
"Checks that path-1 and path-2 are siblings in the same logical directory."
(= (absolute-parent path-1)
(absolute-parent path-2)))
(defn same-or-subdirectory-of? [dir path]
"Checks that path names a file or directory that is the dir or a subdirectory there of."
(let [dir-path (.getAbsolutePath (io/file dir))
path-path (.getAbsolutePath (io/file path))]
(.startsWith path-path dir-path)))
(defn check-output-to [{:keys [output-to] :as opts}]
(when (contains? opts :output-to)
(assert (or (string? output-to)
(= :print output-to))
(format ":output-to %s must specify a file or be :print"
(pr-str output-to))))
true)
(defn check-output-dir [{:keys [output-dir] :as opts}]
(when (contains? opts :output-dir)
(assert (string? output-dir)
(format ":output-dir %s must specify a directory"
(pr-str output-dir))))
true)
(defn check-source-map [{:keys [output-to source-map output-dir] :as opts}]
"When :source-map is specified in opts, "
(when (and (contains? opts :source-map)
(not (= (:optimizations opts) :none)))
(assert (and (contains? opts :output-to)
(contains? opts :output-dir))
":source-map cannot be specied without also specifying :output-to and :output-dir if optimization setting applied")
(assert (string? source-map)
(format ":source-map %s must specify a file in the same directory as :output-to %s if optimization setting applied"
(pr-str source-map)
(pr-str output-to)))
(assert (in-same-dir? source-map output-to)
(format ":source-map %s must specify a file in the same directory as :output-to %s if optimization setting applied"
(pr-str source-map)
(pr-str output-to)))
(assert (same-or-subdirectory-of? (absolute-parent output-to) output-dir)
(format ":output-dir %s must specify a directory in :output-to's parent %s if optimization setting applied"
(pr-str output-dir)
(pr-str (absolute-parent output-to)))))
true)
(defn check-source-map-path [{:keys [source-map-path] :as opts}]
(when (contains? opts :source-map-path)
(assert (string? source-map-path)
(format ":source-map-path %s must be a directory"
source-map-path))
(when-not (= (:optimizations opts) :none)
(assert (and (contains? opts :output-to)
(contains? opts :source-map))
":source-map-path cannot be specified without also specifying :output-to and :source-map if optimization setting applied")))
true)
(defn check-output-wrapper [{:keys [output-wrapper optimizations]}]
(assert (not (and output-wrapper (= :whitespace optimizations)))
":output-wrapper cannot be combined with :optimizations :whitespace"))
(defn check-node-target [{:keys [target optimizations] :as opts}]
(assert (not (and (= target :nodejs) (= optimizations :whitespace)))
(format ":nodejs target not compatible with :whitespace optimizations")))
(defn foreign-source? [js]
(and (satisfies? deps/IJavaScript js)
(deps/-foreign? js)))
(defn build
"Given a source which can be compiled, produce runnable JavaScript."
([source opts]
(build source opts
(if-not (nil? env/*compiler*)
env/*compiler*
(env/default-compiler-env opts))))
([source opts compiler-env]
(env/with-compiler-env compiler-env
(let [compiler-stats (:compiler-stats opts)
ups-deps (get-upstream-deps)
all-opts (-> opts
(assoc
:ups-libs (:libs ups-deps)
:ups-foreign-libs (:foreign-libs ups-deps)
:ups-externs (:externs ups-deps))
(update-in [:preamble] #(into (or % []) ["cljs/imul.js"])))
emit-constants (or (and (= (:optimizations opts) :advanced)
(not (false? (:optimize-constants opts))))
(:optimize-constants opts))]
(check-output-to opts)
(check-output-dir opts)
(check-source-map opts)
(check-source-map-path opts)
(check-output-wrapper opts)
(check-node-target opts)
(swap! compiler-env
#(-> %
(assoc-in [:opts :emit-constants] emit-constants)
(assoc :target (:target opts))
(assoc :js-dependency-index (deps/js-dependency-index all-opts))))
(binding [comp/*dependents* (when-not (false? (:recompile-dependents opts))
(atom {:recompile #{} :visited #{}}))
ana/*cljs-static-fns*
(or (and (= (:optimizations opts) :advanced)
(not (false? (:static-fns opts))))
(:static-fns opts)
ana/*cljs-static-fns*)
*assert* (not= (:elide-asserts opts) true)
ana/*load-tests* (not= (:load-tests opts) false)
ana/*cljs-warnings*
(let [warnings (opts :warnings true)]
(merge
ana/*cljs-warnings*
(if (or (true? warnings)
(false? warnings))
(zipmap
[:unprovided :undeclared-var
:undeclared-ns :undeclared-ns-form]
(repeat warnings))
warnings)))
ana/*verbose* (:verbose opts)]
(let [compiled (util/measure compiler-stats
"Compile basic sources"
(doall (-compile source all-opts)))
_ (when emit-constants
(comp/emit-constants-table-to-file
(::ana/constant-table @env/*compiler*)
(str (util/output-directory all-opts) "/constants_table.js")))
js-sources (util/measure compiler-stats
"Add dependencies"
(doall
(concat
(apply add-dependencies all-opts
(concat
(if (coll? compiled) compiled [compiled])
(when (= :nodejs (:target all-opts))
[(-compile (io/resource "cljs/nodejs.cljs") all-opts)])))
(when (= :nodejs (:target all-opts))
[(-compile (io/resource "cljs/nodejscli.cljs") all-opts)]))))
optim (:optimizations all-opts)
ret (if (and optim (not= optim :none))
(let [fdeps-str (foreign-deps-str all-opts
(filter foreign-source? js-sources))
all-opts (assoc all-opts
:foreign-deps-line-count
(- (count (.split #"\r?\n" fdeps-str -1)) 1))]
(when-let [fname (:source-map all-opts)]
(assert (string? fname)
(str ":source-map must name a file when using :whitespace, "
":simple, or :advanced optimizations"))
(doall (map #(source-on-disk all-opts %) js-sources)))
(->>
(util/measure compiler-stats
"Optimize sources"
(apply optimize all-opts
(remove foreign-source? js-sources)))
(add-wrapper all-opts)
(add-source-map-link all-opts)
(str fdeps-str)
(add-header all-opts)
(output-one-file all-opts)))
(apply output-unoptimized all-opts js-sources))]
(when (and (= (:target opts) :nodejs)
(#{:none :whitespace} (:optimizations opts)))
(let [outfile (io/file (util/output-directory opts)
"goog" "bootstrap" "nodejs.js")]
(util/mkdirs outfile)
(spit outfile (slurp (io/resource "cljs/bootstrap_node.js")))))
ret))))))
(defn watch
"Given a source directory, produce runnable JavaScript. Watch the source
directory for changes rebuliding when necessary. Takes the same arguments as
cljs.closure/build."
([source opts]
(watch source opts
(if-not (nil? env/*compiler*)
env/*compiler*
(env/default-compiler-env opts))))
([source opts compiler-env]
(let [path (Paths/get (.toURI (io/file source)))
fs (.getFileSystem path)
service (.newWatchService fs)]
(letfn [(buildf []
(let [start (System/nanoTime)]
(build source opts compiler-env)
(println "... done. Elapsed"
(/ (unchecked-subtract (System/nanoTime) start) 1e9) "seconds")
(flush)))
(watch-all [^Path root]
(Files/walkFileTree root
(reify
FileVisitor
(preVisitDirectory [_ dir _]
(let [^Path dir dir]
(. dir
(register service
(into-array [StandardWatchEventKinds/ENTRY_CREATE
StandardWatchEventKinds/ENTRY_DELETE
StandardWatchEventKinds/ENTRY_MODIFY])
(into-array [SensitivityWatchEventModifier/HIGH]))))
FileVisitResult/CONTINUE)
(postVisitDirectory [_ dir exc]
FileVisitResult/CONTINUE)
(visitFile [_ file attrs]
FileVisitResult/CONTINUE)
(visitFileFailed [_ file exc]
FileVisitResult/CONTINUE))))]
(println "Building...")
(flush)
(buildf)
(println "Watching path:" source)
(watch-all path)
(loop [key nil]
(when (or (nil? key) (. ^WatchKey key reset))
(let [key (. service take)]
(when (some (fn [^WatchEvent e]
(or (.. (. e context) toString (endsWith "cljs"))
(.. (. e context) toString (endsWith "js"))))
(seq (.pollEvents key)))
(println "Change detected, recompiling...")
(flush)
(try
(buildf)
(catch Exception e
(.printStackTrace e))))
(recur key))))))))
(comment
(watch "samples/hello/src"
{:optimizations :none
:output-to "samples/hello/out/hello.js"
:output-dir "samples/hello/out"
:cache-analysis true
:source-map true
:verbose true})
)
Utilities
(defn output-directory [opts]
(util/output-directory opts))
(defn parse-js-ns [f]
(deps/parse-js-ns (line-seq (io/reader f))))
(defn ^File src-file->target-file
([src] (src-file->target-file src nil))
([src opts]
(util/to-target-file
(when (:output-dir opts)
(util/output-directory opts))
(ana/parse-ns src))))
(defn ^String src-file->goog-require
([src] (src-file->goog-require src {:wrap true}))
([src {:keys [wrap all-provides :as options]}]
(let [goog-ns
(case (util/ext src)
"cljs" (comp/munge (:ns (ana/parse-ns src)))
"js" (cond-> (:provides (parse-js-ns src))
(not all-provides) first)
(throw
(IllegalArgumentException.
(str "Can't create goog.require expression for " src))))]
(if (and (not all-provides) wrap)
(str "goog.require(\"" goog-ns "\");")
(if (vector? goog-ns)
goog-ns
(str goog-ns))))))
(comment
(println (build '[(ns hello.core)
(defn ^{:export greet} greet [n] (str "Hola " n))
(defn ^:export sum [xs] 42)]
{:optimizations :simple :pretty-print true}))
(build "samples/hello/src" {:optimizations :advanced})
(build "samples/hello/src" {:optimizations :advanced :output-to "samples/hello/hello.js"})
(build "samples/hello/src" {:output-dir "samples/hello/out" :output-to "samples/hello/hello.js"})
(build '[(ns hello.core)
(defn ^{:export greet} greet [n] (str "Hola " n))
(defn ^:export sum [xs] 42)]
{:output-dir "samples/hello/out" :output-to "samples/hello/hello.js"})
)
|
5c86a5ca4723063b61a3a944c9bf2aaa8cf18728f6fa33a9615c7491b6094514 | amosr/folderol | Conduit.hs | module Bench.Append2.Conduit where
import Bench.Plumbing.Conduit
import qualified Data.Conduit as C
runAppend2 :: FilePath -> FilePath -> FilePath -> IO Int
runAppend2 in1 in2 out =
C.runConduit (sources C..| sinks)
where
sources = sourceFile in1 >> sourceFile in2
sinks = do
(i,_) <- C.fuseBoth (counting 0) (sinkFile out)
return i
counting i = do
e <- C.await
case e of
Nothing -> return i
Just v -> do
C.yield v
counting (i + 1)
| null | https://raw.githubusercontent.com/amosr/folderol/9b8c0cd30cfb798dadaa404cc66404765b1fc4fe/bench/Bench/Append2/Conduit.hs | haskell | module Bench.Append2.Conduit where
import Bench.Plumbing.Conduit
import qualified Data.Conduit as C
runAppend2 :: FilePath -> FilePath -> FilePath -> IO Int
runAppend2 in1 in2 out =
C.runConduit (sources C..| sinks)
where
sources = sourceFile in1 >> sourceFile in2
sinks = do
(i,_) <- C.fuseBoth (counting 0) (sinkFile out)
return i
counting i = do
e <- C.await
case e of
Nothing -> return i
Just v -> do
C.yield v
counting (i + 1)
| |
e9474a2cfba18bf6f9cbdeea324a234b77a3d55ff189fd69c83f518410eaaa39 | ejgallego/coq-serapi | ser_proof_bullet.ml | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * INRIA , CNRS and contributors - Copyright 1999 - 2018
(* <O___,, * (see CREDITS file for the list of authors) *)
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
(************************************************************************)
(* Coq serialization API/Plugin *)
Copyright 2016 - 2018 MINES ParisTech -- Dual License LGPL 2.1 / GPL3 +
(************************************************************************)
(* Status: Very Experimental *)
(************************************************************************)
open Ppx_hash_lib.Std.Hash.Builtin
open Ppx_compare_lib.Builtin
open Sexplib.Conv
type t =
[%import: Proof_bullet.t]
[@@deriving sexp,yojson,hash,compare]
| null | https://raw.githubusercontent.com/ejgallego/coq-serapi/61d2a5c092c1918312b8a92f43a374639d1786f9/serlib/ser_proof_bullet.ml | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
<O___,, * (see CREDITS file for the list of authors)
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
**********************************************************************
Coq serialization API/Plugin
**********************************************************************
Status: Very Experimental
********************************************************************** | v * INRIA , CNRS and contributors - Copyright 1999 - 2018
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
Copyright 2016 - 2018 MINES ParisTech -- Dual License LGPL 2.1 / GPL3 +
open Ppx_hash_lib.Std.Hash.Builtin
open Ppx_compare_lib.Builtin
open Sexplib.Conv
type t =
[%import: Proof_bullet.t]
[@@deriving sexp,yojson,hash,compare]
|
1b635e94e2b825ff6b8546f0dfac7bfd9724169d95bcbc96f6f74527f95f1ba7 | OCamlPro/ezjs | ezjs_tyxml.ml | This file is part of Learn - OCaml .
*
* Copyright ( C ) 2015 OCamlPro : , .
* Copyright ( C ) 2012 , ( for the ' Manip ' module )
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Library General Public License for more details .
*
* You should have received a copy of the GNU Library General Public License
* along with this program . If not , see < / > .
*
* Copyright (C) 2015 OCamlPro: Grégoire Henry, Çağdaş Bozman.
* Copyright (C) 2012 Vincent Balat, Benedikt Becker (for the 'Manip' module)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this program. If not, see </>. *)
module Js = Js_of_ocaml.Js
module Url = Js_of_ocaml.Url
module Dom_html = Js_of_ocaml.Dom_html
module Firebug = Js_of_ocaml.Firebug
module File = Js_of_ocaml.File
module Dom = Js_of_ocaml.Dom
module Typed_array = Js_of_ocaml.Typed_array
module Regexp = Js_of_ocaml.Regexp
module Html = Js_of_ocaml_tyxml.Tyxml_js.Html5
module Of_dom = Js_of_ocaml_tyxml.Tyxml_js.Of_dom
module To_dom = Js_of_ocaml_tyxml.Tyxml_js.To_dom
module Xml = Js_of_ocaml_tyxml.Tyxml_js.Xml
type 'a elt = 'a Html.elt
open Js
exception JsError = Error
type ('a, 'b) result = ('a, 'b) Stdlib.result = Ok of 'a | Error of 'b
let doc = Dom_html.document
let window = Dom_html.window
(* let loc = Js.Unsafe.variable "location" *)
let alert s = window##(alert (string s))
let confirm s = to_bool (window##(confirm (string s)))
let js_log obj = Firebug.console##log obj
let js_debug obj = Firebug.console##debug obj
let js_warn obj = Firebug.console##warn obj
let js_error obj = Firebug.console##error obj
let log fmt =
Format.kfprintf
(fun _fmt -> Firebug.console##(log (string (Format.flush_str_formatter ()))))
Format.str_formatter
fmt
let debug fmt =
Format.kfprintf
(fun _fmt -> Firebug.console##(debug (string (Format.flush_str_formatter ()))))
Format.str_formatter
fmt
let warn fmt =
Format.kfprintf
(fun _fmt -> Firebug.console##(warn (string (Format.flush_str_formatter ()))))
Format.str_formatter
fmt
let error fmt =
Format.kfprintf
(fun _fmt -> Firebug.console##(error (string (Format.flush_str_formatter ()))))
Format.str_formatter
fmt
(*
let is_hidden div =
div##style##display = string "none"
*)
let reload () = window##.location##reload
module Manip = struct
let option_map f = function None -> None | Some x -> Some (f x)
exception Error of string
let manip_error fmt =
Format.ksprintf
(fun s -> debug "%s" s; raise (Error s))
fmt
let id x = x
let get_node = Html.toelt
let get_elt name elt : Dom_html.element t =
Opt.case
(Dom_html.CoerceTo.element (Html.toelt elt))
(fun () ->
manip_error
"Cannot call %s on a node which is not an element"
name)
id
let html_doc_constr : Dom_html.document constr =
Unsafe.global##._HTMLDocument
let document elt =
let elt = get_elt "document" elt in
let rec loop (elt : Dom.node t) =
if instanceof elt html_doc_constr
then (Obj.magic elt : Dom_html.document t)
else
Opt.case
(elt##.parentNode)
(fun () -> (Obj.magic elt : Dom_html.document t))
loop
in
loop (elt : Dom_html.element t :> Dom.node t)
let window elt =
let doc = document elt in
(Obj.magic doc)##.defaultView
let clone ?(deep=false) elt =
let elt = get_elt "clone" elt in
Obj.magic (elt##(cloneNode (bool deep)))
let setInnerHtml elt s =
let elt = get_elt "setInnerHtml" elt in
elt##.innerHTML := string s
let addClass elt s =
let elt = get_elt "addClass" elt in
elt##.classList##(add (string s))
let removeClass elt s =
let elt = get_elt "removeClass" elt in
elt##.classList##(remove (string s))
let containsClass elt s =
let elt = get_elt "containsClass" elt in
to_bool elt##.classList##(contains (string s))
let setAttribute elt key value =
let elt = get_elt "setAttribute" elt in
elt##(setAttribute (string key) (string value))
let removeAttribute elt key =
let elt = get_elt "removeAttribute" elt in
elt##(removeAttribute (string key))
let raw_appendChild ?before node elt2 =
match before with
| None -> ignore(node##(appendChild (get_node elt2)))
| Some elt3 ->
let node3 = get_node elt3 in
ignore(node##(insertBefore (get_node elt2) (some node3)))
let raw_appendChildren ?before node elts =
match before with
| None ->
List.iter (fun elt2 -> ignore(node##(appendChild (get_node elt2)))) elts
| Some elt3 ->
let node3 = get_node elt3 in
List.iter (fun elt2 -> ignore(node##(insertBefore (get_node elt2) (some node3)))) elts
let raw_insertChildAfter node1 node2 elt3 =
Opt.case
(node2##.nextSibling)
(fun () ->
ignore(node1##(appendChild (get_node elt3))))
(fun node2 ->
ignore(node1##(insertBefore (get_node elt3) (some node2))))
let raw_insertChildrenAfter node1 node2 elts =
Opt.case
(node2##.nextSibling)
(fun () ->
List.iter (fun elt3 ->
ignore(node1##(appendChild (get_node elt3)))))
(fun node2 ->
List.iter (fun elt3 ->
ignore(node1##(insertBefore (get_node elt3) (some node2)))))
elts
let raw_removeChild node1 elt2 =
let node2 = get_node elt2 in
ignore(node1##(removeChild node2))
let raw_replaceChild node1 elt2 elt3 =
let node2 = get_node elt2 in
ignore(node1##(replaceChild node2 (get_node elt3)))
let raw_removeChildren node =
let childrens = Dom.list_of_nodeList (node##.childNodes) in
List.iter (fun c -> ignore(node##(removeChild c))) childrens
let raw_replaceChildren node elts =
raw_removeChildren node;
List.iter (fun elt -> ignore(node##(appendChild (get_node elt)))) elts
let nth elt n =
let node = get_node elt in
let res = Opt.bind (node##.childNodes##(item n)) (fun node ->
Opt.map (Dom.CoerceTo.element node) (fun node ->
Of_dom.of_element (Dom_html.element node)
)
) in
Opt.to_option res
let by_id n =
let res = Opt.bind (Dom_html.window##.document##(getElementById (string n))) (fun node ->
Opt.map (Dom.CoerceTo.element node) (fun node ->
Of_dom.of_element (Dom_html.element node)
)
) in
Opt.to_option res
let by_class n =
let nl = Dom_html.window##.document##(getElementsByClassName (string n)) in
Array.init (nl##.length) (fun i ->
let node = nl##(item i) in
let node = Opt.get node (fun _ -> assert false) in
Of_dom.of_element (Dom_html.element node)
)
|> Array.to_list
let by_tag n =
let nl = Dom_html.window##.document##(getElementsByTagName (string n)) in
Array.init (nl##.length) (fun i ->
let node = nl##(item i) in
let node = Opt.get node (fun _ -> assert false) in
Of_dom.of_element (Dom_html.element node)
)
|> Array.to_list
let childLength elt =
let node = get_node elt in
node##.childNodes##.length
let appendChild ?before elt1 elt2 =
let node = get_node elt1 in
raw_appendChild ?before node elt2
let appendChildren ?before elt1 elts =
let node = get_node elt1 in
raw_appendChildren ?before node elts
let insertChildAfter elt1 elt2 elt3 =
let node1 = get_node elt1 in
let node2 = get_node elt2 in
raw_insertChildAfter node1 node2 elt3
let insertChildrenAfter elt1 elt2 elts =
let node1 = get_node elt1 in
let node2 = get_node elt2 in
raw_insertChildrenAfter node1 node2 elts
let removeChild elt1 elt2 =
let node1 = get_node elt1 in
raw_removeChild node1 elt2
let removeSelf elt =
let node = get_node elt in
let res = Opt.bind (node##.parentNode) (fun node ->
Opt.map (Dom.CoerceTo.element node) (fun node ->
Of_dom.of_element (Dom_html.element node)
)
) in
Opt.iter res (fun p -> removeChild p elt)
let appendChildFirst p c =
let before = nth p 0 in
appendChild ?before p c
let replaceChild elt1 elt2 elt3 =
let node1 = get_node elt1 in
raw_replaceChild node1 elt2 elt3
let removeChildren elt =
let node = get_node elt in
raw_removeChildren node
let replaceChildren elt elts =
let node = get_node elt in
raw_replaceChildren node elts
let children elt =
let node = get_node elt in
List.map Html.tot (Dom.list_of_nodeList (node##.childNodes))
let parent elt =
let node = get_node elt in
Opt.case (node##.parentNode)
(fun () -> None)
(fun elt -> Some (Html.tot elt))
let appendToBody ?before elt2 =
let body = (Of_dom.of_body Dom_html.window##.document##.body) in
appendChild ?before body elt2
let get_elt_input name elt : Dom_html.inputElement t =
Opt.case
(Dom_html.CoerceTo.input (get_elt name elt))
(fun () -> failwith (Printf.sprintf "Non 'input' node (%s)" name))
id
let get_elt_select name elt : Dom_html.selectElement t =
Opt.case
(Dom_html.CoerceTo.select (get_elt name elt))
(fun () -> failwith (Printf.sprintf "Non 'select' node (%s)" name))
id
let get_elt_textarea name elt : Dom_html.textAreaElement t =
Opt.case
(Dom_html.CoerceTo.textarea (get_elt name elt))
(fun () -> failwith (Printf.sprintf "Non element node (%s)" name))
id
let get_elt_img name elt : Dom_html.imageElement t =
Opt.case
(Dom_html.CoerceTo.img (get_elt name elt))
(fun () -> failwith (Printf.sprintf "Non element node (%s)" name))
id
let scrollIntoView ?(bottom = false) elt =
let elt = get_elt "Css.background" elt in
elt##(scrollIntoView (bool (not bottom)))
type disable = < disabled: bool t prop >
let get_disable_elt name elt : disable t =
if undefined == (Unsafe.coerce @@ Html.toelt elt)##.disabled then
manip_error
"Cannot call %s on a node without a 'disable' property"
name;
Unsafe.coerce @@ Html.toelt elt
let disable elt =
let elt = get_disable_elt "disable" elt in
elt##.disabled := _true
let enable elt =
let elt = get_disable_elt "enable" elt in
elt##.disabled := _false
type focus = < focus: unit meth >
let get_focus_elt name elt : focus t =
if undefined == (Unsafe.coerce @@ Html.toelt elt)##.focus then
manip_error
"Cannot call %s on a node without a 'focus' property"
name;
Unsafe.coerce @@ Html.toelt elt
let focus elt =
let elt = get_focus_elt "focus" elt in
elt##focus
type blur = < blur: unit meth >
let get_blur_elt name elt : blur t =
if undefined == (Unsafe.coerce @@ Html.toelt elt)##.blur then
manip_error
"Cannot call %s on a node without a 'blur' property"
name;
Unsafe.coerce @@ Html.toelt elt
let blur elt =
let elt = get_blur_elt "blur" elt in
elt##blur
type value = < value: js_string t prop >
let get_value_elt name elt : value t =
if undefined == (Unsafe.coerce @@ Html.toelt elt)##.value then
manip_error
"Cannot call %s on a node without a 'value' property"
name;
Unsafe.coerce @@ Html.toelt elt
let value elt =
let elt = get_value_elt "value" elt in
to_string elt##.value
let set_value elt s =
let elt = get_value_elt "value" elt in
elt##.value := (string s)
type files = < files: File.fileList t optdef readonly_prop >
let get_files_elt name elt : files t =
if undefined == (Unsafe.coerce @@ Html.toelt elt)##.files then
manip_error
"Cannot call %s on a node without a 'files' property"
name;
Unsafe.coerce @@ Html.toelt elt
let files elt =
let elt = get_files_elt "files" elt in
let files = elt##.files in
Optdef.case files
(fun () -> [])
(fun files ->
let rec list_init n f = match n with
| i when i<=0 -> []
| i -> f (i-1) :: (list_init (i-1) f) in
let l = list_init (files##.length) (fun i -> files##(item i)) in
List.rev @@ List.fold_left (fun acc file ->
Opt.case file
(fun () -> acc)
(fun file -> file :: acc)) [] l)
let upload_input ?(btoa=true) ?encoding elt post =
let files = files elt in
List.iter (fun file ->
let reader = new%js File.fileReader in
reader##.onloadend :=
Dom.handler (fun _evt ->
if reader##.readyState = File.DONE then
Opt.case (File.CoerceTo.string (reader##.result))
(fun () -> assert false)
(fun s ->
if not btoa then post (to_string s)
else
let s = to_string (Dom_html.window##(btoa s)) in
post s);
_true
);
match encoding with
| None -> reader##(readAsBinaryString file)
| Some e -> reader##(readAsText_withEncoding file (Js.string e))
) files;
true
module Elt = struct
let body =
try Of_dom.of_body (Dom_html.window##.document##.body)
with _ -> Obj.magic undefined (* For workers... *)
let active () =
(Unsafe.coerce Dom_html.window##.document)##.activeElement
end
module Ev = struct
type ('a, 'b) ev = 'a Html.elt -> ('b t -> bool) -> unit
type ('a,'b) ev_unit = 'a Html.elt -> ('b t -> unit) -> unit
let bool_cb f = Dom_html.handler (fun e -> bool (f e))
let onkeyup elt f =
let elt = get_elt "Ev.onkeyup" elt in
elt##.onkeyup := (bool_cb f)
let onkeydown elt f =
let elt = get_elt "Ev.onkeydown" elt in
elt##.onkeydown := (bool_cb f)
let onmouseup elt f =
let elt = get_elt "Ev.onmouseup" elt in
elt##.onmouseup := (bool_cb f)
let onmousedown elt f =
let elt = get_elt "Ev.onmousedown" elt in
elt##.onmousedown := (bool_cb f)
let onmouseout elt f =
let elt = get_elt "Ev.onmouseout" elt in
elt##.onmouseout := (bool_cb f)
let onmouseover elt f =
let elt = get_elt "Ev.onmouseover" elt in
elt##.onmouseover := (bool_cb f)
let onclick elt f =
let elt = get_elt "Ev.onclick" elt in
elt##.onclick := (bool_cb f)
let ondblclick elt f =
let elt = get_elt "Ev.ondblclick" elt in
elt##.ondblclick := (bool_cb f)
let onmousemove elt f =
let elt = get_elt "Ev.onmousemove" elt in
elt##.onmousemove := (bool_cb f)
let onload elt f =
let elt = get_elt_img "Ev.onload" elt in
elt##.onload := (bool_cb f)
let onerror elt f =
let elt = get_elt_img "Ev.onerror" elt in
elt##.onerror := (bool_cb f)
let onabort elt f =
let elt = get_elt_img "Ev.onabort" elt in
elt##.onabort := (bool_cb f)
let onfocus elt f =
let elt = get_elt_input "Ev.onfocus" elt in
elt##.onfocus := (bool_cb f)
let onblur elt f =
let elt = get_elt_input "Ev.onblur" elt in
elt##.onblur := (bool_cb f)
let onfocus_textarea elt f =
let elt = get_elt_textarea "Ev.onfocus" elt in
elt##.onfocus := (bool_cb f)
let onblur_textarea elt f =
let elt = get_elt_textarea "Ev.onblur" elt in
elt##.onblur := (bool_cb f)
let onscroll elt f =
let elt = get_elt "Ev.onscroll" elt in
elt##.onscroll := (bool_cb f)
let onreturn elt f =
let f ev =
let key = ev##.keyCode in
if key = 13 then f ev;
true in
onkeydown elt f
let onchange elt f =
let elt = get_elt_input "Ev.onchange" elt in
elt##.onchange := (bool_cb f)
let onchange_select elt f =
let elt = get_elt_select "Ev.onchange_select" elt in
elt##.onchange := (bool_cb f)
let oninput elt f =
let elt = get_elt_input "Ev.oninput" elt in
elt##.oninput := (bool_cb f)
end
module Attr = struct
let clientWidth elt =
let elt = get_elt "Attr.clientWidth" elt in
elt##.clientWidth
let clientHeight elt =
let elt = get_elt "Attr.clientHeight" elt in
elt##.clientHeight
let offsetWidth elt =
let elt = get_elt "Attr.offsetWidth" elt in
elt##.offsetWidth
let offsetHeight elt =
let elt = get_elt "Attr.offsetHeight" elt in
elt##.offsetHeight
let clientLeft elt =
let elt = get_elt "Attr.clientLeft" elt in
elt##.clientLeft
let clientTop elt =
let elt = get_elt "Attr.clientTop" elt in
elt##.clientTop
end
module Css = struct
let background elt =
let elt = get_elt "Css.background" elt in
to_bytestring (elt##.style##.background)
let backgroundAttachment elt =
let elt = get_elt "Css.backgroundAttachment" elt in
to_bytestring (elt##.style##.backgroundAttachment)
let backgroundColor elt =
let elt = get_elt "Css.backgroundColor" elt in
to_bytestring (elt##.style##.backgroundColor)
let backgroundImage elt =
let elt = get_elt "Css.backgroundImage" elt in
to_bytestring (elt##.style##.backgroundImage)
let backgroundPosition elt =
let elt = get_elt "Css.backgroundPosition" elt in
to_bytestring (elt##.style##.backgroundPosition)
let backgroundRepeat elt =
let elt = get_elt "Css.backgroundRepeat" elt in
to_bytestring (elt##.style##.backgroundRepeat)
let border elt =
let elt = get_elt "Css.border" elt in
to_bytestring (elt##.style##.border)
let borderBottom elt =
let elt = get_elt "Css.borderBottom" elt in
to_bytestring (elt##.style##.borderBottom)
let borderBottomColor elt =
let elt = get_elt "Css.borderBottomColor" elt in
to_bytestring (elt##.style##.borderBottomColor)
let borderBottomStyle elt =
let elt = get_elt "Css.borderBottomStyle" elt in
to_bytestring (elt##.style##.borderBottomStyle)
let borderBottomWidth elt =
let elt = get_elt "Css.borderBottomWidth" elt in
to_bytestring (elt##.style##.borderBottomWidth)
let borderBottomWidthPx elt =
let elt = get_elt "Css.borderBottomWidthPx" elt in
parseInt (elt##.style##.borderBottomWidth)
let borderCollapse elt =
let elt = get_elt "Css.borderCollapse" elt in
to_bytestring (elt##.style##.borderCollapse)
let borderColor elt =
let elt = get_elt "Css.borderColor" elt in
to_bytestring (elt##.style##.borderColor)
let borderLeft elt =
let elt = get_elt "Css.borderLeft" elt in
to_bytestring (elt##.style##.borderLeft)
let borderLeftColor elt =
let elt = get_elt "Css.borderLeftColor" elt in
to_bytestring (elt##.style##.borderLeftColor)
let borderLeftStyle elt =
let elt = get_elt "Css.borderLeftStyle" elt in
to_bytestring (elt##.style##.borderLeftStyle)
let borderLeftWidth elt =
let elt = get_elt "Css.borderLeftWidth" elt in
to_bytestring (elt##.style##.borderLeftWidth)
let borderLeftWidthPx elt =
let elt = get_elt "Css.borderLeftWidthPx" elt in
parseInt (elt##.style##.borderLeftWidth)
let borderRight elt =
let elt = get_elt "Css.borderRight" elt in
to_bytestring (elt##.style##.borderRight)
let borderRightColor elt =
let elt = get_elt "Css.borderRightColor" elt in
to_bytestring (elt##.style##.borderRightColor)
let borderRightStyle elt =
let elt = get_elt "Css.borderRightStyle" elt in
to_bytestring (elt##.style##.borderRightStyle)
let borderRightWidth elt =
let elt = get_elt "Css.borderRightWidth" elt in
to_bytestring (elt##.style##.borderRightWidth)
let borderRightWidthPx elt =
let elt = get_elt "Css.borderRightWidthPx" elt in
parseInt (elt##.style##.borderRightWidth)
let borderSpacing elt =
let elt = get_elt "Css.borderSpacing" elt in
to_bytestring (elt##.style##.borderSpacing)
let borderStyle elt =
let elt = get_elt "Css.borderStyle" elt in
to_bytestring (elt##.style##.borderStyle)
let borderTop elt =
let elt = get_elt "Css.borderTop" elt in
to_bytestring (elt##.style##.borderTop)
let borderTopColor elt =
let elt = get_elt "Css.borderTopColor" elt in
to_bytestring (elt##.style##.borderTopColor)
let borderTopStyle elt =
let elt = get_elt "Css.borderTopStyle" elt in
to_bytestring (elt##.style##.borderTopStyle)
let borderTopWidth elt =
let elt = get_elt "Css.borderTopWidth" elt in
to_bytestring (elt##.style##.borderTopWidth)
let borderTopWidthPx elt =
let elt = get_elt "Css.borderTopWidthPx" elt in
parseInt (elt##.style##.borderTopWidth)
let borderWidth elt =
let elt = get_elt "Css.borderWidth" elt in
to_bytestring (elt##.style##.borderWidth)
let borderWidthPx elt =
let elt = get_elt "Css.borderWidthPx" elt in
parseInt (elt##.style##.borderWidth)
let borderRadius elt =
let elt = get_elt "Css.borderRadius" elt in
to_bytestring (elt##.style##.borderRadius)
let bottom elt =
let elt = get_elt "Css.bottom" elt in
to_bytestring (elt##.style##.bottom)
let captionSide elt =
let elt = get_elt "Css.captionSide" elt in
to_bytestring (elt##.style##.captionSide)
let clear elt =
let elt = get_elt "Css.clear" elt in
to_bytestring (elt##.style##.clear)
let clip elt =
let elt = get_elt "Css.clip" elt in
to_bytestring (elt##.style##.clip)
let color elt =
let elt = get_elt "Css.color" elt in
to_bytestring (elt##.style##.color)
let content elt =
let elt = get_elt "Css.content" elt in
to_bytestring (elt##.style##.content)
let counterIncrement elt =
let elt = get_elt "Css.counterIncrement" elt in
to_bytestring (elt##.style##.counterIncrement)
let counterReset elt =
let elt = get_elt "Css.counterReset" elt in
to_bytestring (elt##.style##.counterReset)
let cssFloat elt =
let elt = get_elt "Css.cssFloat" elt in
to_bytestring (elt##.style##.cssFloat)
let cssText elt =
let elt = get_elt "Css.cssText" elt in
to_bytestring (elt##.style##.cssText)
let cursor elt =
let elt = get_elt "Css.cursor" elt in
to_bytestring (elt##.style##.cursor)
let direction elt =
let elt = get_elt "Css.direction" elt in
to_bytestring (elt##.style##.direction)
let display elt =
let elt = get_elt "Css.display" elt in
to_bytestring (elt##.style##.display)
let emptyCells elt =
let elt = get_elt "Css.emptyCells" elt in
to_bytestring (elt##.style##.emptyCells)
let font elt =
let elt = get_elt "Css.font" elt in
to_bytestring (elt##.style##.font)
let fontFamily elt =
let elt = get_elt "Css.fontFamily" elt in
to_bytestring (elt##.style##.fontFamily)
let fontSize elt =
let elt = get_elt "Css.fontSize" elt in
to_bytestring (elt##.style##.fontSize)
let fontStyle elt =
let elt = get_elt "Css.fontStyle" elt in
to_bytestring (elt##.style##.fontStyle)
let fontVariant elt =
let elt = get_elt "Css.fontVariant" elt in
to_bytestring (elt##.style##.fontVariant)
let fontWeight elt =
let elt = get_elt "Css.fontWeight" elt in
to_bytestring (elt##.style##.fontWeight)
let height elt =
let elt = get_elt "Css.height" elt in
to_bytestring (elt##.style##.height)
let heightPx elt =
let elt = get_elt "Css.heightPx" elt in
parseInt (elt##.style##.height)
let left elt =
let elt = get_elt "Css.left" elt in
to_bytestring (elt##.style##.left)
let leftPx elt =
let elt = get_elt "Css.leftPx" elt in
parseInt (elt##.style##.left)
let letterSpacing elt =
let elt = get_elt "Css.letterSpacing" elt in
to_bytestring (elt##.style##.letterSpacing)
let lineHeight elt =
let elt = get_elt "Css.lineHeight" elt in
to_bytestring (elt##.style##.lineHeight)
let listStyle elt =
let elt = get_elt "Css.listStyle" elt in
to_bytestring (elt##.style##.listStyle)
let listStyleImage elt =
let elt = get_elt "Css.listStyleImage" elt in
to_bytestring (elt##.style##.listStyleImage)
let listStylePosition elt =
let elt = get_elt "Css.listStylePosition" elt in
to_bytestring (elt##.style##.listStylePosition)
let listStyleType elt =
let elt = get_elt "Css.listStyleType" elt in
to_bytestring (elt##.style##.listStyleType)
let margin elt =
let elt = get_elt "Css.margin" elt in
to_bytestring (elt##.style##.margin)
let marginBottom elt =
let elt = get_elt "Css.marginBottom" elt in
to_bytestring (elt##.style##.marginBottom)
let marginBottomPx elt =
let elt = get_elt "Css.marginBottomPx" elt in
parseInt (elt##.style##.marginBottom)
let marginLeft elt =
let elt = get_elt "Css.marginLeft" elt in
to_bytestring (elt##.style##.marginLeft)
let marginLeftPx elt =
let elt = get_elt "Css.marginLeftPx" elt in
parseInt (elt##.style##.marginLeft)
let marginRight elt =
let elt = get_elt "Css.marginRight" elt in
to_bytestring (elt##.style##.marginRight)
let marginRightPx elt =
let elt = get_elt "Css.marginRightPx" elt in
parseInt (elt##.style##.marginRight)
let marginTop elt =
let elt = get_elt "Css.marginTop" elt in
to_bytestring (elt##.style##.marginTop)
let marginTopPx elt =
let elt = get_elt "Css.marginTopPx" elt in
parseInt (elt##.style##.marginTop)
let maxHeight elt =
let elt = get_elt "Css.maxHeight" elt in
to_bytestring (elt##.style##.maxHeight)
let maxHeightPx elt =
let elt = get_elt "Css.maxHeightPx" elt in
parseInt (elt##.style##.maxHeight)
let maxWidth elt =
let elt = get_elt "Css.maxWidth" elt in
to_bytestring (elt##.style##.maxWidth)
let maxWidthPx elt =
let elt = get_elt "Css.maxWidthPx" elt in
parseInt (elt##.style##.maxWidth)
let minHeight elt =
let elt = get_elt "Css.minHeight" elt in
to_bytestring (elt##.style##.minHeight)
let minHeightPx elt =
let elt = get_elt "Css.minHeightPx" elt in
parseInt (elt##.style##.minHeight)
let minWidth elt =
let elt = get_elt "Css.minWidth" elt in
to_bytestring (elt##.style##.minWidth)
let minWidthPx elt =
let elt = get_elt "Css.minWidthPx" elt in
parseInt (elt##.style##.minWidth)
let opacity elt =
let elt = get_elt "Css.opacity" elt in
option_map to_bytestring (Optdef.to_option (elt##.style##.opacity))
let outline elt =
let elt = get_elt "Css.outline" elt in
to_bytestring (elt##.style##.outline)
let outlineColor elt =
let elt = get_elt "Css.outlineColor" elt in
to_bytestring (elt##.style##.outlineColor)
let outlineOffset elt =
let elt = get_elt "Css.outlineOffset" elt in
to_bytestring (elt##.style##.outlineOffset)
let outlineStyle elt =
let elt = get_elt "Css.outlineStyle" elt in
to_bytestring (elt##.style##.outlineStyle)
let outlineWidth elt =
let elt = get_elt "Css.outlineWidth" elt in
to_bytestring (elt##.style##.outlineWidth)
let overflow elt =
let elt = get_elt "Css.overflow" elt in
to_bytestring (elt##.style##.overflow)
let overflowX elt =
let elt = get_elt "Css.overflowX" elt in
to_bytestring (elt##.style##.overflowX)
let overflowY elt =
let elt = get_elt "Css.overflowY" elt in
to_bytestring (elt##.style##.overflowY)
let padding elt =
let elt = get_elt "Css.padding" elt in
to_bytestring (elt##.style##.padding)
let paddingBottom elt =
let elt = get_elt "Css.paddingBottom" elt in
to_bytestring (elt##.style##.paddingBottom)
let paddingBottomPx elt =
let elt = get_elt "Css.paddingBottomPx" elt in
parseInt (elt##.style##.paddingBottom)
let paddingLeft elt =
let elt = get_elt "Css.paddingLeft" elt in
to_bytestring (elt##.style##.paddingLeft)
let paddingLeftPx elt =
let elt = get_elt "Css.paddingLeftPx" elt in
parseInt (elt##.style##.paddingLeft)
let paddingRight elt =
let elt = get_elt "Css.paddingRight" elt in
to_bytestring (elt##.style##.paddingRight)
let paddingRightPx elt =
let elt = get_elt "Css.paddingRightPx" elt in
parseInt (elt##.style##.paddingRight)
let paddingTop elt =
let elt = get_elt "Css.paddingTop" elt in
to_bytestring (elt##.style##.paddingTop)
let paddingTopPx elt =
let elt = get_elt "Css.paddingTopPx" elt in
parseInt (elt##.style##.paddingTop)
let pageBreakAfter elt =
let elt = get_elt "Css.pageBreakAfter" elt in
to_bytestring (elt##.style##.pageBreakAfter)
let pageBreakBefore elt =
let elt = get_elt "Css.pageBreakBefore" elt in
to_bytestring (elt##.style##.pageBreakBefore)
let position elt =
let elt = get_elt "Css.position" elt in
to_bytestring (elt##.style##.position)
let right elt =
let elt = get_elt "Css.right" elt in
to_bytestring (elt##.style##.right)
let rightPx elt =
let elt = get_elt "Css.rightPx" elt in
parseInt (elt##.style##.right)
let tableLayout elt =
let elt = get_elt "Css.tableLayout" elt in
to_bytestring (elt##.style##.tableLayout)
let textAlign elt =
let elt = get_elt "Css.textAlign" elt in
to_bytestring (elt##.style##.textAlign)
let textDecoration elt =
let elt = get_elt "Css.textDecoration" elt in
to_bytestring (elt##.style##.textDecoration)
let textIndent elt =
let elt = get_elt "Css.textIndent" elt in
to_bytestring (elt##.style##.textIndent)
let textTransform elt =
let elt = get_elt "Css.textTransform" elt in
to_bytestring (elt##.style##.textTransform)
let top elt =
let elt = get_elt "Css.top" elt in
to_bytestring (elt##.style##.top)
let topPx elt =
let elt = get_elt "Css.topPx" elt in
parseInt (elt##.style##.top)
let verticalAlign elt =
let elt = get_elt "Css.verticalAlign" elt in
to_bytestring (elt##.style##.verticalAlign)
let visibility elt =
let elt = get_elt "Css.visibility" elt in
to_bytestring (elt##.style##.visibility)
let whiteSpace elt =
let elt = get_elt "Css.whiteSpace" elt in
to_bytestring (elt##.style##.whiteSpace)
let width elt =
let elt = get_elt "Css.width" elt in
to_bytestring (elt##.style##.width)
let widthPx elt =
let elt = get_elt "Css.widthPx" elt in
parseInt (elt##.style##.width)
let wordSpacing elt =
let elt = get_elt "Css.wordSpacing" elt in
to_bytestring (elt##.style##.wordSpacing)
let zIndex elt =
let elt = get_elt "Css.zIndex" elt in
to_bytestring (elt##.style##.zIndex)
end
module SetCss = struct
let background elt v =
let elt = get_elt "SetCss.background" elt in
elt##.style##.background := bytestring v
let backgroundAttachment elt v =
let elt = get_elt "SetCss.backgroundAttachment" elt in
elt##.style##.backgroundAttachment := bytestring v
let backgroundColor elt v =
let elt = get_elt "SetCss.backgroundColor" elt in
elt##.style##.backgroundColor := bytestring v
let backgroundImage elt v =
let elt = get_elt "SetCss.backgroundImage" elt in
elt##.style##.backgroundImage := bytestring v
let backgroundPosition elt v =
let elt = get_elt "SetCss.backgroundPosition" elt in
elt##.style##.backgroundPosition := bytestring v
let backgroundRepeat elt v =
let elt = get_elt "SetCss.backgroundRepeat" elt in
elt##.style##.backgroundRepeat := bytestring v
let border elt v =
let elt = get_elt "SetCss.border" elt in
elt##.style##.border := bytestring v
let borderBottom elt v =
let elt = get_elt "SetCss.borderBottom" elt in
elt##.style##.borderBottom := bytestring v
let borderBottomColor elt v =
let elt = get_elt "SetCss.borderBottomColor" elt in
elt##.style##.borderBottomColor := bytestring v
let borderBottomStyle elt v =
let elt = get_elt "SetCss.borderBottomStyle" elt in
elt##.style##.borderBottomStyle := bytestring v
let borderBottomWidth elt v =
let elt = get_elt "SetCss.borderBottomWidth" elt in
elt##.style##.borderBottomWidth := bytestring v
let borderBottomWidthPx elt v = borderBottomWidth elt (Printf.sprintf "%dpx" v)
let borderCollapse elt v =
let elt = get_elt "SetCss.borderCollapse" elt in
elt##.style##.borderCollapse := bytestring v
let borderColor elt v =
let elt = get_elt "SetCss.borderColor" elt in
elt##.style##.borderColor := bytestring v
let borderLeft elt v =
let elt = get_elt "SetCss.borderLeft" elt in
elt##.style##.borderLeft := bytestring v
let borderLeftColor elt v =
let elt = get_elt "SetCss.borderLeftColor" elt in
elt##.style##.borderLeftColor := bytestring v
let borderLeftStyle elt v =
let elt = get_elt "SetCss.borderLeftStyle" elt in
elt##.style##.borderLeftStyle := bytestring v
let borderLeftWidth elt v =
let elt = get_elt "SetCss.borderLeftWidth" elt in
elt##.style##.borderLeftWidth := bytestring v
let borderLeftWidthPx elt v = borderLeftWidth elt (Printf.sprintf "%dpx" v)
let borderRight elt v =
let elt = get_elt "SetCss.borderRight" elt in
elt##.style##.borderRight := bytestring v
let borderRightColor elt v =
let elt = get_elt "SetCss.borderRightColor" elt in
elt##.style##.borderRightColor := bytestring v
let borderRightStyle elt v =
let elt = get_elt "SetCss.borderRightStyle" elt in
elt##.style##.borderRightStyle := bytestring v
let borderRightWidth elt v =
let elt = get_elt "SetCss.borderRightWidth" elt in
elt##.style##.borderRightWidth := bytestring v
let borderRightWidthPx elt v = borderRightWidth elt (Printf.sprintf "%dpx" v)
let borderSpacing elt v =
let elt = get_elt "SetCss.borderSpacing" elt in
elt##.style##.borderSpacing := bytestring v
let borderStyle elt v =
let elt = get_elt "SetCss.borderStyle" elt in
elt##.style##.borderStyle := bytestring v
let borderTop elt v =
let elt = get_elt "SetCss.borderTop" elt in
elt##.style##.borderTop := bytestring v
let borderTopColor elt v =
let elt = get_elt "SetCss.borderTopColor" elt in
elt##.style##.borderTopColor := bytestring v
let borderTopStyle elt v =
let elt = get_elt "SetCss.borderTopStyle" elt in
elt##.style##.borderTopStyle := bytestring v
let borderTopWidth elt v =
let elt = get_elt "SetCss.borderTopWidth" elt in
elt##.style##.borderTopWidth := bytestring v
let borderTopWidthPx elt v = borderTopWidth elt (Printf.sprintf "%dpx" v)
let borderWidth elt v =
let elt = get_elt "SetCss.borderWidth" elt in
elt##.style##.borderWidth := bytestring v
let borderRadius elt v =
let elt = get_elt "SetCss.borderRadius" elt in
elt##.style##.borderRadius := bytestring v
let bottom elt v =
let elt = get_elt "SetCss.bottom" elt in
elt##.style##.bottom := bytestring v
let bottomPx elt v = bottom elt (Printf.sprintf "%dpx" v)
let captionSide elt v =
let elt = get_elt "SetCss.captionSide" elt in
elt##.style##.captionSide := bytestring v
let clear elt v =
let elt = get_elt "SetCss.clear" elt in
elt##.style##.clear := bytestring v
let clip elt v =
let elt = get_elt "SetCss.clip" elt in
elt##.style##.clip := bytestring v
let color elt v =
let elt = get_elt "SetCss.color" elt in
elt##.style##.color := bytestring v
let content elt v =
let elt = get_elt "SetCss.content" elt in
elt##.style##.content := bytestring v
let counterIncrement elt v =
let elt = get_elt "SetCss.counterIncrement" elt in
elt##.style##.counterIncrement := bytestring v
let counterReset elt v =
let elt = get_elt "SetCss.counterReset" elt in
elt##.style##.counterReset := bytestring v
let cssFloat elt v =
let elt = get_elt "SetCss.cssFloat" elt in
elt##.style##.cssFloat := bytestring v
let cssText elt v =
let elt = get_elt "SetCss.cssText" elt in
elt##.style##.cssText := bytestring v
let cursor elt v =
let elt = get_elt "SetCss.cursor" elt in
elt##.style##.cursor := bytestring v
let direction elt v =
let elt = get_elt "SetCss.direction" elt in
elt##.style##.direction := bytestring v
let display elt v =
let elt = get_elt "SetCss.display" elt in
elt##.style##.display := bytestring v
let emptyCells elt v =
let elt = get_elt "SetCss.emptyCells" elt in
elt##.style##.emptyCells := bytestring v
let font elt v =
let elt = get_elt "SetCss.font" elt in
elt##.style##.font := bytestring v
let fontFamily elt v =
let elt = get_elt "SetCss.fontFamily" elt in
elt##.style##.fontFamily := bytestring v
let fontSize elt v =
let elt = get_elt "SetCss.fontSize" elt in
elt##.style##.fontSize := bytestring v
let fontStyle elt v =
let elt = get_elt "SetCss.fontStyle" elt in
elt##.style##.fontStyle := bytestring v
let fontVariant elt v =
let elt = get_elt "SetCss.fontVariant" elt in
elt##.style##.fontVariant := bytestring v
let fontWeight elt v =
let elt = get_elt "SetCss.fontWeight" elt in
elt##.style##.fontWeight := bytestring v
let height elt v =
let elt = get_elt "SetCss.height" elt in
elt##.style##.height := bytestring v
let heightPx elt v = height elt (Printf.sprintf "%dpx" v)
let left elt v =
let elt = get_elt "SetCss.left" elt in
elt##.style##.left := bytestring v
let leftPx elt v = left elt (Printf.sprintf "%dpx" v)
let letterSpacing elt v =
let elt = get_elt "SetCss.letterSpacing" elt in
elt##.style##.letterSpacing := bytestring v
let lineHeight elt v =
let elt = get_elt "SetCss.lineHeight" elt in
elt##.style##.lineHeight := bytestring v
let listStyle elt v =
let elt = get_elt "SetCss.listStyle" elt in
elt##.style##.listStyle := bytestring v
let listStyleImage elt v =
let elt = get_elt "SetCss.listStyleImage" elt in
elt##.style##.listStyleImage := bytestring v
let listStylePosition elt v =
let elt = get_elt "SetCss.listStylePosition" elt in
elt##.style##.listStylePosition := bytestring v
let listStyleType elt v =
let elt = get_elt "SetCss.listStyleType" elt in
elt##.style##.listStyleType := bytestring v
let margin elt v =
let elt = get_elt "SetCss.margin" elt in
elt##.style##.margin := bytestring v
let marginBottom elt v =
let elt = get_elt "SetCss.marginBottom" elt in
elt##.style##.marginBottom := bytestring v
let marginBottomPx elt v = marginBottom elt (Printf.sprintf "%dpx" v)
let marginLeft elt v =
let elt = get_elt "SetCss.marginLeft" elt in
elt##.style##.marginLeft := bytestring v
let marginLeftPx elt v = marginLeft elt (Printf.sprintf "%dpx" v)
let marginRight elt v =
let elt = get_elt "SetCss.marginRight" elt in
elt##.style##.marginRight := bytestring v
let marginRightPx elt v = marginRight elt (Printf.sprintf "%dpx" v)
let marginTop elt v =
let elt = get_elt "SetCss.marginTop" elt in
elt##.style##.marginTop := bytestring v
let marginTopPx elt v = marginTop elt (Printf.sprintf "%dpx" v)
let maxHeight elt v =
let elt = get_elt "SetCss.maxHeight" elt in
elt##.style##.maxHeight := bytestring v
let maxHeightPx elt v = maxHeight elt (Printf.sprintf "%dpx" v)
let maxWidth elt v =
let elt = get_elt "SetCss.maxWidth" elt in
elt##.style##.maxWidth := bytestring v
let maxWidthPx elt v = maxWidth elt (Printf.sprintf "%dpx" v)
let minHeight elt v =
let elt = get_elt "SetCss.minHeight" elt in
elt##.style##.minHeight := bytestring v
let minHeightPx elt v = minHeight elt (Printf.sprintf "%dpx" v)
let minWidth elt v =
let elt = get_elt "SetCss.minWidth" elt in
elt##.style##.minWidth := bytestring v
let minWidthPx elt v = minWidth elt (Printf.sprintf "%dpx" v)
let opacity elt v =
let elt = get_elt "SetCss.opacity" elt in
elt##.style##.opacity := match v with None -> undefined | Some v -> def (bytestring v)
let outline elt v =
let elt = get_elt "SetCss.outline" elt in
elt##.style##.outline := bytestring v
let outlineColor elt v =
let elt = get_elt "SetCss.outlineColor" elt in
elt##.style##.outlineColor := bytestring v
let outlineOffset elt v =
let elt = get_elt "SetCss.outlineOffset" elt in
elt##.style##.outlineOffset := bytestring v
let outlineStyle elt v =
let elt = get_elt "SetCss.outlineStyle" elt in
elt##.style##.outlineStyle := bytestring v
let outlineWidth elt v =
let elt = get_elt "SetCss.outlineWidth" elt in
elt##.style##.outlineWidth := bytestring v
let overflow elt v =
let elt = get_elt "SetCss.overflow" elt in
elt##.style##.overflow := bytestring v
let overflowX elt v =
let elt = get_elt "SetCss.overflowX" elt in
elt##.style##.overflowX := bytestring v
let overflowY elt v =
let elt = get_elt "SetCss.overflowY" elt in
elt##.style##.overflowY := bytestring v
let padding elt v =
let elt = get_elt "SetCss.padding" elt in
elt##.style##.padding := bytestring v
let paddingBottom elt v =
let elt = get_elt "SetCss.paddingBottom" elt in
elt##.style##.paddingBottom := bytestring v
let paddingBottomPx elt v = paddingBottom elt (Printf.sprintf "%dpx" v)
let paddingLeft elt v =
let elt = get_elt "SetCss.paddingLeft" elt in
elt##.style##.paddingLeft := bytestring v
let paddingLeftPx elt v = paddingLeft elt (Printf.sprintf "%dpx" v)
let paddingRight elt v =
let elt = get_elt "SetCss.paddingRight" elt in
elt##.style##.paddingRight := bytestring v
let paddingRightPx elt v = paddingRight elt (Printf.sprintf "%dpx" v)
let paddingTop elt v =
let elt = get_elt "SetCss.paddingTop" elt in
elt##.style##.paddingTop := bytestring v
let paddingTopPx elt v = paddingTop elt (Printf.sprintf "%dpx" v)
let pageBreakAfter elt v =
let elt = get_elt "SetCss.pageBreakAfter" elt in
elt##.style##.pageBreakAfter := bytestring v
let pageBreakBefore elt v =
let elt = get_elt "SetCss.pageBreakBefore" elt in
elt##.style##.pageBreakBefore := bytestring v
let position elt v =
let elt = get_elt "SetCss.position" elt in
elt##.style##.position := bytestring v
let right elt v =
let elt = get_elt "SetCss.right" elt in
elt##.style##.right := bytestring v
let rightPx elt v = right elt (Printf.sprintf "%dpx" v)
let tableLayout elt v =
let elt = get_elt "SetCss.tableLayout" elt in
elt##.style##.tableLayout := bytestring v
let textAlign elt v =
let elt = get_elt "SetCss.textAlign" elt in
elt##.style##.textAlign := bytestring v
let textDecoration elt v =
let elt = get_elt "SetCss.textDecoration" elt in
elt##.style##.textDecoration := bytestring v
let textIndent elt v =
let elt = get_elt "SetCss.textIndent" elt in
elt##.style##.textIndent := bytestring v
let textTransform elt v =
let elt = get_elt "SetCss.textTransform" elt in
elt##.style##.textTransform := bytestring v
let top elt v =
let elt = get_elt "SetCss.top" elt in
elt##.style##.top := bytestring v
let topPx elt v = top elt (Printf.sprintf "%dpx" v)
let verticalAlign elt v =
let elt = get_elt "SetCss.verticalAlign" elt in
elt##.style##.verticalAlign := bytestring v
let visibility elt v =
let elt = get_elt "SetCss.visibility" elt in
elt##.style##.visibility := bytestring v
let whiteSpace elt v =
let elt = get_elt "SetCss.whiteSpace" elt in
elt##.style##.whiteSpace := bytestring v
let width elt v =
let elt = get_elt "SetCss.width" elt in
elt##.style##.width := bytestring v
let widthPx elt v = width elt (Printf.sprintf "%dpx" v)
let wordSpacing elt v =
let elt = get_elt "SetCss.wordSpacing" elt in
elt##.style##.wordSpacing := bytestring v
let zIndex elt v =
let elt = get_elt "SetCss.zIndex" elt in
elt##.style##.zIndex := bytestring v
end
end
let hide elt = Manip.SetCss.display elt "none"
let show elt = Manip.SetCss.display elt ""
let window_open ?features url name =
let features = match features with
| None -> null
| Some s -> some @@ string s in
window##(open_ (string url) (string name) features)
module Window = struct
let close win = win##close
let body win = Of_dom.of_body win##.document##.body
let head win = Of_dom.of_head win##.document##.head
let onunload ?(win = Dom_html.window) f =
win##.onunload := Dom_html.handler (fun ev -> bool (f ev))
let onresize ?(win = Dom_html.window) f =
win##.onresize := Dom_html.handler (fun ev -> bool (f ev))
let prompt ?(win = Dom_html.window) ?(value = "") msg =
Opt.case
(win##(prompt (string msg) (string value)))
(fun () -> "")
to_string
let onhashchange ?(win = Dom_html.window) f =
win##.onhashchange := Dom_html.handler (fun ev -> bool (f ev))
end
module Document = struct
let uri () = to_string (doc##._URL)
end
let parse_fragment () =
let elts =
Regexp.(split (regexp "(&|%26)") (Url.Current.get_fragment ())) in
List.fold_right
(fun elt acc ->
if elt = "&" || elt = "%26" || elt = "" then acc else
match Regexp.(split (regexp "(=|%3D)") elt) with
| [name] -> (name, "") :: acc
| name :: _ :: value -> (name, String.concat "" value) :: acc
| _ -> assert false)
elts []
let set_fragment args =
let pairs = List.map (fun (n, v) -> n ^ "=" ^ v) args in
let fragment = String.concat "&" pairs in
Url.Current.set_fragment fragment
let find_component id =
match Manip.by_id id with
| Some div -> div
| None -> failwith ("Cannot find id " ^ id)
module Clipboard = struct
type t = {
mutable intercept : bool ;
mutable data : string ;
}
let clipboard =
{ intercept = false ; data = "" }
let set_copy () =
Dom.addEventListener
doc
(Dom.Event.make "copy")
(Dom.handler (fun e ->
if clipboard.intercept then begin
(e##.clipboardData##(setData (string "text/plain") (string clipboard.data)));
Dom.preventDefault e;
clipboard.intercept <- false ;
clipboard.data <- ""
end;
_true))
_true |> ignore
let copy value : unit =
clipboard.intercept <- true;
clipboard.data <- value;
Dom_html.document##(execCommand (string "copy")
(_false) (null))
end
| null | https://raw.githubusercontent.com/OCamlPro/ezjs/5e9ce77f1fc0efbb13f649f03d8d4f982f21b6d1/libs/utils/ezjs_tyxml.ml | ocaml | let loc = Js.Unsafe.variable "location"
let is_hidden div =
div##style##display = string "none"
For workers... | This file is part of Learn - OCaml .
*
* Copyright ( C ) 2015 OCamlPro : , .
* Copyright ( C ) 2012 , ( for the ' Manip ' module )
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Library General Public License for more details .
*
* You should have received a copy of the GNU Library General Public License
* along with this program . If not , see < / > .
*
* Copyright (C) 2015 OCamlPro: Grégoire Henry, Çağdaş Bozman.
* Copyright (C) 2012 Vincent Balat, Benedikt Becker (for the 'Manip' module)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this program. If not, see </>. *)
module Js = Js_of_ocaml.Js
module Url = Js_of_ocaml.Url
module Dom_html = Js_of_ocaml.Dom_html
module Firebug = Js_of_ocaml.Firebug
module File = Js_of_ocaml.File
module Dom = Js_of_ocaml.Dom
module Typed_array = Js_of_ocaml.Typed_array
module Regexp = Js_of_ocaml.Regexp
module Html = Js_of_ocaml_tyxml.Tyxml_js.Html5
module Of_dom = Js_of_ocaml_tyxml.Tyxml_js.Of_dom
module To_dom = Js_of_ocaml_tyxml.Tyxml_js.To_dom
module Xml = Js_of_ocaml_tyxml.Tyxml_js.Xml
type 'a elt = 'a Html.elt
open Js
exception JsError = Error
type ('a, 'b) result = ('a, 'b) Stdlib.result = Ok of 'a | Error of 'b
let doc = Dom_html.document
let window = Dom_html.window
let alert s = window##(alert (string s))
let confirm s = to_bool (window##(confirm (string s)))
let js_log obj = Firebug.console##log obj
let js_debug obj = Firebug.console##debug obj
let js_warn obj = Firebug.console##warn obj
let js_error obj = Firebug.console##error obj
let log fmt =
Format.kfprintf
(fun _fmt -> Firebug.console##(log (string (Format.flush_str_formatter ()))))
Format.str_formatter
fmt
let debug fmt =
Format.kfprintf
(fun _fmt -> Firebug.console##(debug (string (Format.flush_str_formatter ()))))
Format.str_formatter
fmt
let warn fmt =
Format.kfprintf
(fun _fmt -> Firebug.console##(warn (string (Format.flush_str_formatter ()))))
Format.str_formatter
fmt
let error fmt =
Format.kfprintf
(fun _fmt -> Firebug.console##(error (string (Format.flush_str_formatter ()))))
Format.str_formatter
fmt
let reload () = window##.location##reload
module Manip = struct
let option_map f = function None -> None | Some x -> Some (f x)
exception Error of string
let manip_error fmt =
Format.ksprintf
(fun s -> debug "%s" s; raise (Error s))
fmt
let id x = x
let get_node = Html.toelt
let get_elt name elt : Dom_html.element t =
Opt.case
(Dom_html.CoerceTo.element (Html.toelt elt))
(fun () ->
manip_error
"Cannot call %s on a node which is not an element"
name)
id
let html_doc_constr : Dom_html.document constr =
Unsafe.global##._HTMLDocument
let document elt =
let elt = get_elt "document" elt in
let rec loop (elt : Dom.node t) =
if instanceof elt html_doc_constr
then (Obj.magic elt : Dom_html.document t)
else
Opt.case
(elt##.parentNode)
(fun () -> (Obj.magic elt : Dom_html.document t))
loop
in
loop (elt : Dom_html.element t :> Dom.node t)
let window elt =
let doc = document elt in
(Obj.magic doc)##.defaultView
let clone ?(deep=false) elt =
let elt = get_elt "clone" elt in
Obj.magic (elt##(cloneNode (bool deep)))
let setInnerHtml elt s =
let elt = get_elt "setInnerHtml" elt in
elt##.innerHTML := string s
let addClass elt s =
let elt = get_elt "addClass" elt in
elt##.classList##(add (string s))
let removeClass elt s =
let elt = get_elt "removeClass" elt in
elt##.classList##(remove (string s))
let containsClass elt s =
let elt = get_elt "containsClass" elt in
to_bool elt##.classList##(contains (string s))
let setAttribute elt key value =
let elt = get_elt "setAttribute" elt in
elt##(setAttribute (string key) (string value))
let removeAttribute elt key =
let elt = get_elt "removeAttribute" elt in
elt##(removeAttribute (string key))
let raw_appendChild ?before node elt2 =
match before with
| None -> ignore(node##(appendChild (get_node elt2)))
| Some elt3 ->
let node3 = get_node elt3 in
ignore(node##(insertBefore (get_node elt2) (some node3)))
let raw_appendChildren ?before node elts =
match before with
| None ->
List.iter (fun elt2 -> ignore(node##(appendChild (get_node elt2)))) elts
| Some elt3 ->
let node3 = get_node elt3 in
List.iter (fun elt2 -> ignore(node##(insertBefore (get_node elt2) (some node3)))) elts
let raw_insertChildAfter node1 node2 elt3 =
Opt.case
(node2##.nextSibling)
(fun () ->
ignore(node1##(appendChild (get_node elt3))))
(fun node2 ->
ignore(node1##(insertBefore (get_node elt3) (some node2))))
let raw_insertChildrenAfter node1 node2 elts =
Opt.case
(node2##.nextSibling)
(fun () ->
List.iter (fun elt3 ->
ignore(node1##(appendChild (get_node elt3)))))
(fun node2 ->
List.iter (fun elt3 ->
ignore(node1##(insertBefore (get_node elt3) (some node2)))))
elts
let raw_removeChild node1 elt2 =
let node2 = get_node elt2 in
ignore(node1##(removeChild node2))
let raw_replaceChild node1 elt2 elt3 =
let node2 = get_node elt2 in
ignore(node1##(replaceChild node2 (get_node elt3)))
let raw_removeChildren node =
let childrens = Dom.list_of_nodeList (node##.childNodes) in
List.iter (fun c -> ignore(node##(removeChild c))) childrens
let raw_replaceChildren node elts =
raw_removeChildren node;
List.iter (fun elt -> ignore(node##(appendChild (get_node elt)))) elts
let nth elt n =
let node = get_node elt in
let res = Opt.bind (node##.childNodes##(item n)) (fun node ->
Opt.map (Dom.CoerceTo.element node) (fun node ->
Of_dom.of_element (Dom_html.element node)
)
) in
Opt.to_option res
let by_id n =
let res = Opt.bind (Dom_html.window##.document##(getElementById (string n))) (fun node ->
Opt.map (Dom.CoerceTo.element node) (fun node ->
Of_dom.of_element (Dom_html.element node)
)
) in
Opt.to_option res
let by_class n =
let nl = Dom_html.window##.document##(getElementsByClassName (string n)) in
Array.init (nl##.length) (fun i ->
let node = nl##(item i) in
let node = Opt.get node (fun _ -> assert false) in
Of_dom.of_element (Dom_html.element node)
)
|> Array.to_list
let by_tag n =
let nl = Dom_html.window##.document##(getElementsByTagName (string n)) in
Array.init (nl##.length) (fun i ->
let node = nl##(item i) in
let node = Opt.get node (fun _ -> assert false) in
Of_dom.of_element (Dom_html.element node)
)
|> Array.to_list
let childLength elt =
let node = get_node elt in
node##.childNodes##.length
let appendChild ?before elt1 elt2 =
let node = get_node elt1 in
raw_appendChild ?before node elt2
let appendChildren ?before elt1 elts =
let node = get_node elt1 in
raw_appendChildren ?before node elts
let insertChildAfter elt1 elt2 elt3 =
let node1 = get_node elt1 in
let node2 = get_node elt2 in
raw_insertChildAfter node1 node2 elt3
let insertChildrenAfter elt1 elt2 elts =
let node1 = get_node elt1 in
let node2 = get_node elt2 in
raw_insertChildrenAfter node1 node2 elts
let removeChild elt1 elt2 =
let node1 = get_node elt1 in
raw_removeChild node1 elt2
let removeSelf elt =
let node = get_node elt in
let res = Opt.bind (node##.parentNode) (fun node ->
Opt.map (Dom.CoerceTo.element node) (fun node ->
Of_dom.of_element (Dom_html.element node)
)
) in
Opt.iter res (fun p -> removeChild p elt)
let appendChildFirst p c =
let before = nth p 0 in
appendChild ?before p c
let replaceChild elt1 elt2 elt3 =
let node1 = get_node elt1 in
raw_replaceChild node1 elt2 elt3
let removeChildren elt =
let node = get_node elt in
raw_removeChildren node
let replaceChildren elt elts =
let node = get_node elt in
raw_replaceChildren node elts
let children elt =
let node = get_node elt in
List.map Html.tot (Dom.list_of_nodeList (node##.childNodes))
let parent elt =
let node = get_node elt in
Opt.case (node##.parentNode)
(fun () -> None)
(fun elt -> Some (Html.tot elt))
let appendToBody ?before elt2 =
let body = (Of_dom.of_body Dom_html.window##.document##.body) in
appendChild ?before body elt2
let get_elt_input name elt : Dom_html.inputElement t =
Opt.case
(Dom_html.CoerceTo.input (get_elt name elt))
(fun () -> failwith (Printf.sprintf "Non 'input' node (%s)" name))
id
let get_elt_select name elt : Dom_html.selectElement t =
Opt.case
(Dom_html.CoerceTo.select (get_elt name elt))
(fun () -> failwith (Printf.sprintf "Non 'select' node (%s)" name))
id
let get_elt_textarea name elt : Dom_html.textAreaElement t =
Opt.case
(Dom_html.CoerceTo.textarea (get_elt name elt))
(fun () -> failwith (Printf.sprintf "Non element node (%s)" name))
id
let get_elt_img name elt : Dom_html.imageElement t =
Opt.case
(Dom_html.CoerceTo.img (get_elt name elt))
(fun () -> failwith (Printf.sprintf "Non element node (%s)" name))
id
let scrollIntoView ?(bottom = false) elt =
let elt = get_elt "Css.background" elt in
elt##(scrollIntoView (bool (not bottom)))
type disable = < disabled: bool t prop >
let get_disable_elt name elt : disable t =
if undefined == (Unsafe.coerce @@ Html.toelt elt)##.disabled then
manip_error
"Cannot call %s on a node without a 'disable' property"
name;
Unsafe.coerce @@ Html.toelt elt
let disable elt =
let elt = get_disable_elt "disable" elt in
elt##.disabled := _true
let enable elt =
let elt = get_disable_elt "enable" elt in
elt##.disabled := _false
type focus = < focus: unit meth >
let get_focus_elt name elt : focus t =
if undefined == (Unsafe.coerce @@ Html.toelt elt)##.focus then
manip_error
"Cannot call %s on a node without a 'focus' property"
name;
Unsafe.coerce @@ Html.toelt elt
let focus elt =
let elt = get_focus_elt "focus" elt in
elt##focus
type blur = < blur: unit meth >
let get_blur_elt name elt : blur t =
if undefined == (Unsafe.coerce @@ Html.toelt elt)##.blur then
manip_error
"Cannot call %s on a node without a 'blur' property"
name;
Unsafe.coerce @@ Html.toelt elt
let blur elt =
let elt = get_blur_elt "blur" elt in
elt##blur
type value = < value: js_string t prop >
let get_value_elt name elt : value t =
if undefined == (Unsafe.coerce @@ Html.toelt elt)##.value then
manip_error
"Cannot call %s on a node without a 'value' property"
name;
Unsafe.coerce @@ Html.toelt elt
let value elt =
let elt = get_value_elt "value" elt in
to_string elt##.value
let set_value elt s =
let elt = get_value_elt "value" elt in
elt##.value := (string s)
type files = < files: File.fileList t optdef readonly_prop >
let get_files_elt name elt : files t =
if undefined == (Unsafe.coerce @@ Html.toelt elt)##.files then
manip_error
"Cannot call %s on a node without a 'files' property"
name;
Unsafe.coerce @@ Html.toelt elt
let files elt =
let elt = get_files_elt "files" elt in
let files = elt##.files in
Optdef.case files
(fun () -> [])
(fun files ->
let rec list_init n f = match n with
| i when i<=0 -> []
| i -> f (i-1) :: (list_init (i-1) f) in
let l = list_init (files##.length) (fun i -> files##(item i)) in
List.rev @@ List.fold_left (fun acc file ->
Opt.case file
(fun () -> acc)
(fun file -> file :: acc)) [] l)
let upload_input ?(btoa=true) ?encoding elt post =
let files = files elt in
List.iter (fun file ->
let reader = new%js File.fileReader in
reader##.onloadend :=
Dom.handler (fun _evt ->
if reader##.readyState = File.DONE then
Opt.case (File.CoerceTo.string (reader##.result))
(fun () -> assert false)
(fun s ->
if not btoa then post (to_string s)
else
let s = to_string (Dom_html.window##(btoa s)) in
post s);
_true
);
match encoding with
| None -> reader##(readAsBinaryString file)
| Some e -> reader##(readAsText_withEncoding file (Js.string e))
) files;
true
module Elt = struct
let body =
try Of_dom.of_body (Dom_html.window##.document##.body)
let active () =
(Unsafe.coerce Dom_html.window##.document)##.activeElement
end
module Ev = struct
type ('a, 'b) ev = 'a Html.elt -> ('b t -> bool) -> unit
type ('a,'b) ev_unit = 'a Html.elt -> ('b t -> unit) -> unit
let bool_cb f = Dom_html.handler (fun e -> bool (f e))
let onkeyup elt f =
let elt = get_elt "Ev.onkeyup" elt in
elt##.onkeyup := (bool_cb f)
let onkeydown elt f =
let elt = get_elt "Ev.onkeydown" elt in
elt##.onkeydown := (bool_cb f)
let onmouseup elt f =
let elt = get_elt "Ev.onmouseup" elt in
elt##.onmouseup := (bool_cb f)
let onmousedown elt f =
let elt = get_elt "Ev.onmousedown" elt in
elt##.onmousedown := (bool_cb f)
let onmouseout elt f =
let elt = get_elt "Ev.onmouseout" elt in
elt##.onmouseout := (bool_cb f)
let onmouseover elt f =
let elt = get_elt "Ev.onmouseover" elt in
elt##.onmouseover := (bool_cb f)
let onclick elt f =
let elt = get_elt "Ev.onclick" elt in
elt##.onclick := (bool_cb f)
let ondblclick elt f =
let elt = get_elt "Ev.ondblclick" elt in
elt##.ondblclick := (bool_cb f)
let onmousemove elt f =
let elt = get_elt "Ev.onmousemove" elt in
elt##.onmousemove := (bool_cb f)
let onload elt f =
let elt = get_elt_img "Ev.onload" elt in
elt##.onload := (bool_cb f)
let onerror elt f =
let elt = get_elt_img "Ev.onerror" elt in
elt##.onerror := (bool_cb f)
let onabort elt f =
let elt = get_elt_img "Ev.onabort" elt in
elt##.onabort := (bool_cb f)
let onfocus elt f =
let elt = get_elt_input "Ev.onfocus" elt in
elt##.onfocus := (bool_cb f)
let onblur elt f =
let elt = get_elt_input "Ev.onblur" elt in
elt##.onblur := (bool_cb f)
let onfocus_textarea elt f =
let elt = get_elt_textarea "Ev.onfocus" elt in
elt##.onfocus := (bool_cb f)
let onblur_textarea elt f =
let elt = get_elt_textarea "Ev.onblur" elt in
elt##.onblur := (bool_cb f)
let onscroll elt f =
let elt = get_elt "Ev.onscroll" elt in
elt##.onscroll := (bool_cb f)
let onreturn elt f =
let f ev =
let key = ev##.keyCode in
if key = 13 then f ev;
true in
onkeydown elt f
let onchange elt f =
let elt = get_elt_input "Ev.onchange" elt in
elt##.onchange := (bool_cb f)
let onchange_select elt f =
let elt = get_elt_select "Ev.onchange_select" elt in
elt##.onchange := (bool_cb f)
let oninput elt f =
let elt = get_elt_input "Ev.oninput" elt in
elt##.oninput := (bool_cb f)
end
module Attr = struct
let clientWidth elt =
let elt = get_elt "Attr.clientWidth" elt in
elt##.clientWidth
let clientHeight elt =
let elt = get_elt "Attr.clientHeight" elt in
elt##.clientHeight
let offsetWidth elt =
let elt = get_elt "Attr.offsetWidth" elt in
elt##.offsetWidth
let offsetHeight elt =
let elt = get_elt "Attr.offsetHeight" elt in
elt##.offsetHeight
let clientLeft elt =
let elt = get_elt "Attr.clientLeft" elt in
elt##.clientLeft
let clientTop elt =
let elt = get_elt "Attr.clientTop" elt in
elt##.clientTop
end
module Css = struct
let background elt =
let elt = get_elt "Css.background" elt in
to_bytestring (elt##.style##.background)
let backgroundAttachment elt =
let elt = get_elt "Css.backgroundAttachment" elt in
to_bytestring (elt##.style##.backgroundAttachment)
let backgroundColor elt =
let elt = get_elt "Css.backgroundColor" elt in
to_bytestring (elt##.style##.backgroundColor)
let backgroundImage elt =
let elt = get_elt "Css.backgroundImage" elt in
to_bytestring (elt##.style##.backgroundImage)
let backgroundPosition elt =
let elt = get_elt "Css.backgroundPosition" elt in
to_bytestring (elt##.style##.backgroundPosition)
let backgroundRepeat elt =
let elt = get_elt "Css.backgroundRepeat" elt in
to_bytestring (elt##.style##.backgroundRepeat)
let border elt =
let elt = get_elt "Css.border" elt in
to_bytestring (elt##.style##.border)
let borderBottom elt =
let elt = get_elt "Css.borderBottom" elt in
to_bytestring (elt##.style##.borderBottom)
let borderBottomColor elt =
let elt = get_elt "Css.borderBottomColor" elt in
to_bytestring (elt##.style##.borderBottomColor)
let borderBottomStyle elt =
let elt = get_elt "Css.borderBottomStyle" elt in
to_bytestring (elt##.style##.borderBottomStyle)
let borderBottomWidth elt =
let elt = get_elt "Css.borderBottomWidth" elt in
to_bytestring (elt##.style##.borderBottomWidth)
let borderBottomWidthPx elt =
let elt = get_elt "Css.borderBottomWidthPx" elt in
parseInt (elt##.style##.borderBottomWidth)
let borderCollapse elt =
let elt = get_elt "Css.borderCollapse" elt in
to_bytestring (elt##.style##.borderCollapse)
let borderColor elt =
let elt = get_elt "Css.borderColor" elt in
to_bytestring (elt##.style##.borderColor)
let borderLeft elt =
let elt = get_elt "Css.borderLeft" elt in
to_bytestring (elt##.style##.borderLeft)
let borderLeftColor elt =
let elt = get_elt "Css.borderLeftColor" elt in
to_bytestring (elt##.style##.borderLeftColor)
let borderLeftStyle elt =
let elt = get_elt "Css.borderLeftStyle" elt in
to_bytestring (elt##.style##.borderLeftStyle)
let borderLeftWidth elt =
let elt = get_elt "Css.borderLeftWidth" elt in
to_bytestring (elt##.style##.borderLeftWidth)
let borderLeftWidthPx elt =
let elt = get_elt "Css.borderLeftWidthPx" elt in
parseInt (elt##.style##.borderLeftWidth)
let borderRight elt =
let elt = get_elt "Css.borderRight" elt in
to_bytestring (elt##.style##.borderRight)
let borderRightColor elt =
let elt = get_elt "Css.borderRightColor" elt in
to_bytestring (elt##.style##.borderRightColor)
let borderRightStyle elt =
let elt = get_elt "Css.borderRightStyle" elt in
to_bytestring (elt##.style##.borderRightStyle)
let borderRightWidth elt =
let elt = get_elt "Css.borderRightWidth" elt in
to_bytestring (elt##.style##.borderRightWidth)
let borderRightWidthPx elt =
let elt = get_elt "Css.borderRightWidthPx" elt in
parseInt (elt##.style##.borderRightWidth)
let borderSpacing elt =
let elt = get_elt "Css.borderSpacing" elt in
to_bytestring (elt##.style##.borderSpacing)
let borderStyle elt =
let elt = get_elt "Css.borderStyle" elt in
to_bytestring (elt##.style##.borderStyle)
let borderTop elt =
let elt = get_elt "Css.borderTop" elt in
to_bytestring (elt##.style##.borderTop)
let borderTopColor elt =
let elt = get_elt "Css.borderTopColor" elt in
to_bytestring (elt##.style##.borderTopColor)
let borderTopStyle elt =
let elt = get_elt "Css.borderTopStyle" elt in
to_bytestring (elt##.style##.borderTopStyle)
let borderTopWidth elt =
let elt = get_elt "Css.borderTopWidth" elt in
to_bytestring (elt##.style##.borderTopWidth)
let borderTopWidthPx elt =
let elt = get_elt "Css.borderTopWidthPx" elt in
parseInt (elt##.style##.borderTopWidth)
let borderWidth elt =
let elt = get_elt "Css.borderWidth" elt in
to_bytestring (elt##.style##.borderWidth)
let borderWidthPx elt =
let elt = get_elt "Css.borderWidthPx" elt in
parseInt (elt##.style##.borderWidth)
let borderRadius elt =
let elt = get_elt "Css.borderRadius" elt in
to_bytestring (elt##.style##.borderRadius)
let bottom elt =
let elt = get_elt "Css.bottom" elt in
to_bytestring (elt##.style##.bottom)
let captionSide elt =
let elt = get_elt "Css.captionSide" elt in
to_bytestring (elt##.style##.captionSide)
let clear elt =
let elt = get_elt "Css.clear" elt in
to_bytestring (elt##.style##.clear)
let clip elt =
let elt = get_elt "Css.clip" elt in
to_bytestring (elt##.style##.clip)
let color elt =
let elt = get_elt "Css.color" elt in
to_bytestring (elt##.style##.color)
let content elt =
let elt = get_elt "Css.content" elt in
to_bytestring (elt##.style##.content)
let counterIncrement elt =
let elt = get_elt "Css.counterIncrement" elt in
to_bytestring (elt##.style##.counterIncrement)
let counterReset elt =
let elt = get_elt "Css.counterReset" elt in
to_bytestring (elt##.style##.counterReset)
let cssFloat elt =
let elt = get_elt "Css.cssFloat" elt in
to_bytestring (elt##.style##.cssFloat)
let cssText elt =
let elt = get_elt "Css.cssText" elt in
to_bytestring (elt##.style##.cssText)
let cursor elt =
let elt = get_elt "Css.cursor" elt in
to_bytestring (elt##.style##.cursor)
let direction elt =
let elt = get_elt "Css.direction" elt in
to_bytestring (elt##.style##.direction)
let display elt =
let elt = get_elt "Css.display" elt in
to_bytestring (elt##.style##.display)
let emptyCells elt =
let elt = get_elt "Css.emptyCells" elt in
to_bytestring (elt##.style##.emptyCells)
let font elt =
let elt = get_elt "Css.font" elt in
to_bytestring (elt##.style##.font)
let fontFamily elt =
let elt = get_elt "Css.fontFamily" elt in
to_bytestring (elt##.style##.fontFamily)
let fontSize elt =
let elt = get_elt "Css.fontSize" elt in
to_bytestring (elt##.style##.fontSize)
let fontStyle elt =
let elt = get_elt "Css.fontStyle" elt in
to_bytestring (elt##.style##.fontStyle)
let fontVariant elt =
let elt = get_elt "Css.fontVariant" elt in
to_bytestring (elt##.style##.fontVariant)
let fontWeight elt =
let elt = get_elt "Css.fontWeight" elt in
to_bytestring (elt##.style##.fontWeight)
let height elt =
let elt = get_elt "Css.height" elt in
to_bytestring (elt##.style##.height)
let heightPx elt =
let elt = get_elt "Css.heightPx" elt in
parseInt (elt##.style##.height)
let left elt =
let elt = get_elt "Css.left" elt in
to_bytestring (elt##.style##.left)
let leftPx elt =
let elt = get_elt "Css.leftPx" elt in
parseInt (elt##.style##.left)
let letterSpacing elt =
let elt = get_elt "Css.letterSpacing" elt in
to_bytestring (elt##.style##.letterSpacing)
let lineHeight elt =
let elt = get_elt "Css.lineHeight" elt in
to_bytestring (elt##.style##.lineHeight)
let listStyle elt =
let elt = get_elt "Css.listStyle" elt in
to_bytestring (elt##.style##.listStyle)
let listStyleImage elt =
let elt = get_elt "Css.listStyleImage" elt in
to_bytestring (elt##.style##.listStyleImage)
let listStylePosition elt =
let elt = get_elt "Css.listStylePosition" elt in
to_bytestring (elt##.style##.listStylePosition)
let listStyleType elt =
let elt = get_elt "Css.listStyleType" elt in
to_bytestring (elt##.style##.listStyleType)
let margin elt =
let elt = get_elt "Css.margin" elt in
to_bytestring (elt##.style##.margin)
let marginBottom elt =
let elt = get_elt "Css.marginBottom" elt in
to_bytestring (elt##.style##.marginBottom)
let marginBottomPx elt =
let elt = get_elt "Css.marginBottomPx" elt in
parseInt (elt##.style##.marginBottom)
let marginLeft elt =
let elt = get_elt "Css.marginLeft" elt in
to_bytestring (elt##.style##.marginLeft)
let marginLeftPx elt =
let elt = get_elt "Css.marginLeftPx" elt in
parseInt (elt##.style##.marginLeft)
let marginRight elt =
let elt = get_elt "Css.marginRight" elt in
to_bytestring (elt##.style##.marginRight)
let marginRightPx elt =
let elt = get_elt "Css.marginRightPx" elt in
parseInt (elt##.style##.marginRight)
let marginTop elt =
let elt = get_elt "Css.marginTop" elt in
to_bytestring (elt##.style##.marginTop)
let marginTopPx elt =
let elt = get_elt "Css.marginTopPx" elt in
parseInt (elt##.style##.marginTop)
let maxHeight elt =
let elt = get_elt "Css.maxHeight" elt in
to_bytestring (elt##.style##.maxHeight)
let maxHeightPx elt =
let elt = get_elt "Css.maxHeightPx" elt in
parseInt (elt##.style##.maxHeight)
let maxWidth elt =
let elt = get_elt "Css.maxWidth" elt in
to_bytestring (elt##.style##.maxWidth)
let maxWidthPx elt =
let elt = get_elt "Css.maxWidthPx" elt in
parseInt (elt##.style##.maxWidth)
let minHeight elt =
let elt = get_elt "Css.minHeight" elt in
to_bytestring (elt##.style##.minHeight)
let minHeightPx elt =
let elt = get_elt "Css.minHeightPx" elt in
parseInt (elt##.style##.minHeight)
let minWidth elt =
let elt = get_elt "Css.minWidth" elt in
to_bytestring (elt##.style##.minWidth)
let minWidthPx elt =
let elt = get_elt "Css.minWidthPx" elt in
parseInt (elt##.style##.minWidth)
let opacity elt =
let elt = get_elt "Css.opacity" elt in
option_map to_bytestring (Optdef.to_option (elt##.style##.opacity))
let outline elt =
let elt = get_elt "Css.outline" elt in
to_bytestring (elt##.style##.outline)
let outlineColor elt =
let elt = get_elt "Css.outlineColor" elt in
to_bytestring (elt##.style##.outlineColor)
let outlineOffset elt =
let elt = get_elt "Css.outlineOffset" elt in
to_bytestring (elt##.style##.outlineOffset)
let outlineStyle elt =
let elt = get_elt "Css.outlineStyle" elt in
to_bytestring (elt##.style##.outlineStyle)
let outlineWidth elt =
let elt = get_elt "Css.outlineWidth" elt in
to_bytestring (elt##.style##.outlineWidth)
let overflow elt =
let elt = get_elt "Css.overflow" elt in
to_bytestring (elt##.style##.overflow)
let overflowX elt =
let elt = get_elt "Css.overflowX" elt in
to_bytestring (elt##.style##.overflowX)
let overflowY elt =
let elt = get_elt "Css.overflowY" elt in
to_bytestring (elt##.style##.overflowY)
let padding elt =
let elt = get_elt "Css.padding" elt in
to_bytestring (elt##.style##.padding)
let paddingBottom elt =
let elt = get_elt "Css.paddingBottom" elt in
to_bytestring (elt##.style##.paddingBottom)
let paddingBottomPx elt =
let elt = get_elt "Css.paddingBottomPx" elt in
parseInt (elt##.style##.paddingBottom)
let paddingLeft elt =
let elt = get_elt "Css.paddingLeft" elt in
to_bytestring (elt##.style##.paddingLeft)
let paddingLeftPx elt =
let elt = get_elt "Css.paddingLeftPx" elt in
parseInt (elt##.style##.paddingLeft)
let paddingRight elt =
let elt = get_elt "Css.paddingRight" elt in
to_bytestring (elt##.style##.paddingRight)
let paddingRightPx elt =
let elt = get_elt "Css.paddingRightPx" elt in
parseInt (elt##.style##.paddingRight)
let paddingTop elt =
let elt = get_elt "Css.paddingTop" elt in
to_bytestring (elt##.style##.paddingTop)
let paddingTopPx elt =
let elt = get_elt "Css.paddingTopPx" elt in
parseInt (elt##.style##.paddingTop)
let pageBreakAfter elt =
let elt = get_elt "Css.pageBreakAfter" elt in
to_bytestring (elt##.style##.pageBreakAfter)
let pageBreakBefore elt =
let elt = get_elt "Css.pageBreakBefore" elt in
to_bytestring (elt##.style##.pageBreakBefore)
let position elt =
let elt = get_elt "Css.position" elt in
to_bytestring (elt##.style##.position)
let right elt =
let elt = get_elt "Css.right" elt in
to_bytestring (elt##.style##.right)
let rightPx elt =
let elt = get_elt "Css.rightPx" elt in
parseInt (elt##.style##.right)
let tableLayout elt =
let elt = get_elt "Css.tableLayout" elt in
to_bytestring (elt##.style##.tableLayout)
let textAlign elt =
let elt = get_elt "Css.textAlign" elt in
to_bytestring (elt##.style##.textAlign)
let textDecoration elt =
let elt = get_elt "Css.textDecoration" elt in
to_bytestring (elt##.style##.textDecoration)
let textIndent elt =
let elt = get_elt "Css.textIndent" elt in
to_bytestring (elt##.style##.textIndent)
let textTransform elt =
let elt = get_elt "Css.textTransform" elt in
to_bytestring (elt##.style##.textTransform)
let top elt =
let elt = get_elt "Css.top" elt in
to_bytestring (elt##.style##.top)
let topPx elt =
let elt = get_elt "Css.topPx" elt in
parseInt (elt##.style##.top)
let verticalAlign elt =
let elt = get_elt "Css.verticalAlign" elt in
to_bytestring (elt##.style##.verticalAlign)
let visibility elt =
let elt = get_elt "Css.visibility" elt in
to_bytestring (elt##.style##.visibility)
let whiteSpace elt =
let elt = get_elt "Css.whiteSpace" elt in
to_bytestring (elt##.style##.whiteSpace)
let width elt =
let elt = get_elt "Css.width" elt in
to_bytestring (elt##.style##.width)
let widthPx elt =
let elt = get_elt "Css.widthPx" elt in
parseInt (elt##.style##.width)
let wordSpacing elt =
let elt = get_elt "Css.wordSpacing" elt in
to_bytestring (elt##.style##.wordSpacing)
let zIndex elt =
let elt = get_elt "Css.zIndex" elt in
to_bytestring (elt##.style##.zIndex)
end
module SetCss = struct
let background elt v =
let elt = get_elt "SetCss.background" elt in
elt##.style##.background := bytestring v
let backgroundAttachment elt v =
let elt = get_elt "SetCss.backgroundAttachment" elt in
elt##.style##.backgroundAttachment := bytestring v
let backgroundColor elt v =
let elt = get_elt "SetCss.backgroundColor" elt in
elt##.style##.backgroundColor := bytestring v
let backgroundImage elt v =
let elt = get_elt "SetCss.backgroundImage" elt in
elt##.style##.backgroundImage := bytestring v
let backgroundPosition elt v =
let elt = get_elt "SetCss.backgroundPosition" elt in
elt##.style##.backgroundPosition := bytestring v
let backgroundRepeat elt v =
let elt = get_elt "SetCss.backgroundRepeat" elt in
elt##.style##.backgroundRepeat := bytestring v
let border elt v =
let elt = get_elt "SetCss.border" elt in
elt##.style##.border := bytestring v
let borderBottom elt v =
let elt = get_elt "SetCss.borderBottom" elt in
elt##.style##.borderBottom := bytestring v
let borderBottomColor elt v =
let elt = get_elt "SetCss.borderBottomColor" elt in
elt##.style##.borderBottomColor := bytestring v
let borderBottomStyle elt v =
let elt = get_elt "SetCss.borderBottomStyle" elt in
elt##.style##.borderBottomStyle := bytestring v
let borderBottomWidth elt v =
let elt = get_elt "SetCss.borderBottomWidth" elt in
elt##.style##.borderBottomWidth := bytestring v
let borderBottomWidthPx elt v = borderBottomWidth elt (Printf.sprintf "%dpx" v)
let borderCollapse elt v =
let elt = get_elt "SetCss.borderCollapse" elt in
elt##.style##.borderCollapse := bytestring v
let borderColor elt v =
let elt = get_elt "SetCss.borderColor" elt in
elt##.style##.borderColor := bytestring v
let borderLeft elt v =
let elt = get_elt "SetCss.borderLeft" elt in
elt##.style##.borderLeft := bytestring v
let borderLeftColor elt v =
let elt = get_elt "SetCss.borderLeftColor" elt in
elt##.style##.borderLeftColor := bytestring v
let borderLeftStyle elt v =
let elt = get_elt "SetCss.borderLeftStyle" elt in
elt##.style##.borderLeftStyle := bytestring v
let borderLeftWidth elt v =
let elt = get_elt "SetCss.borderLeftWidth" elt in
elt##.style##.borderLeftWidth := bytestring v
let borderLeftWidthPx elt v = borderLeftWidth elt (Printf.sprintf "%dpx" v)
let borderRight elt v =
let elt = get_elt "SetCss.borderRight" elt in
elt##.style##.borderRight := bytestring v
let borderRightColor elt v =
let elt = get_elt "SetCss.borderRightColor" elt in
elt##.style##.borderRightColor := bytestring v
let borderRightStyle elt v =
let elt = get_elt "SetCss.borderRightStyle" elt in
elt##.style##.borderRightStyle := bytestring v
let borderRightWidth elt v =
let elt = get_elt "SetCss.borderRightWidth" elt in
elt##.style##.borderRightWidth := bytestring v
let borderRightWidthPx elt v = borderRightWidth elt (Printf.sprintf "%dpx" v)
let borderSpacing elt v =
let elt = get_elt "SetCss.borderSpacing" elt in
elt##.style##.borderSpacing := bytestring v
let borderStyle elt v =
let elt = get_elt "SetCss.borderStyle" elt in
elt##.style##.borderStyle := bytestring v
let borderTop elt v =
let elt = get_elt "SetCss.borderTop" elt in
elt##.style##.borderTop := bytestring v
let borderTopColor elt v =
let elt = get_elt "SetCss.borderTopColor" elt in
elt##.style##.borderTopColor := bytestring v
let borderTopStyle elt v =
let elt = get_elt "SetCss.borderTopStyle" elt in
elt##.style##.borderTopStyle := bytestring v
let borderTopWidth elt v =
let elt = get_elt "SetCss.borderTopWidth" elt in
elt##.style##.borderTopWidth := bytestring v
let borderTopWidthPx elt v = borderTopWidth elt (Printf.sprintf "%dpx" v)
let borderWidth elt v =
let elt = get_elt "SetCss.borderWidth" elt in
elt##.style##.borderWidth := bytestring v
let borderRadius elt v =
let elt = get_elt "SetCss.borderRadius" elt in
elt##.style##.borderRadius := bytestring v
let bottom elt v =
let elt = get_elt "SetCss.bottom" elt in
elt##.style##.bottom := bytestring v
let bottomPx elt v = bottom elt (Printf.sprintf "%dpx" v)
let captionSide elt v =
let elt = get_elt "SetCss.captionSide" elt in
elt##.style##.captionSide := bytestring v
let clear elt v =
let elt = get_elt "SetCss.clear" elt in
elt##.style##.clear := bytestring v
let clip elt v =
let elt = get_elt "SetCss.clip" elt in
elt##.style##.clip := bytestring v
let color elt v =
let elt = get_elt "SetCss.color" elt in
elt##.style##.color := bytestring v
let content elt v =
let elt = get_elt "SetCss.content" elt in
elt##.style##.content := bytestring v
let counterIncrement elt v =
let elt = get_elt "SetCss.counterIncrement" elt in
elt##.style##.counterIncrement := bytestring v
let counterReset elt v =
let elt = get_elt "SetCss.counterReset" elt in
elt##.style##.counterReset := bytestring v
let cssFloat elt v =
let elt = get_elt "SetCss.cssFloat" elt in
elt##.style##.cssFloat := bytestring v
let cssText elt v =
let elt = get_elt "SetCss.cssText" elt in
elt##.style##.cssText := bytestring v
let cursor elt v =
let elt = get_elt "SetCss.cursor" elt in
elt##.style##.cursor := bytestring v
let direction elt v =
let elt = get_elt "SetCss.direction" elt in
elt##.style##.direction := bytestring v
let display elt v =
let elt = get_elt "SetCss.display" elt in
elt##.style##.display := bytestring v
let emptyCells elt v =
let elt = get_elt "SetCss.emptyCells" elt in
elt##.style##.emptyCells := bytestring v
let font elt v =
let elt = get_elt "SetCss.font" elt in
elt##.style##.font := bytestring v
let fontFamily elt v =
let elt = get_elt "SetCss.fontFamily" elt in
elt##.style##.fontFamily := bytestring v
let fontSize elt v =
let elt = get_elt "SetCss.fontSize" elt in
elt##.style##.fontSize := bytestring v
let fontStyle elt v =
let elt = get_elt "SetCss.fontStyle" elt in
elt##.style##.fontStyle := bytestring v
let fontVariant elt v =
let elt = get_elt "SetCss.fontVariant" elt in
elt##.style##.fontVariant := bytestring v
let fontWeight elt v =
let elt = get_elt "SetCss.fontWeight" elt in
elt##.style##.fontWeight := bytestring v
let height elt v =
let elt = get_elt "SetCss.height" elt in
elt##.style##.height := bytestring v
let heightPx elt v = height elt (Printf.sprintf "%dpx" v)
let left elt v =
let elt = get_elt "SetCss.left" elt in
elt##.style##.left := bytestring v
let leftPx elt v = left elt (Printf.sprintf "%dpx" v)
let letterSpacing elt v =
let elt = get_elt "SetCss.letterSpacing" elt in
elt##.style##.letterSpacing := bytestring v
let lineHeight elt v =
let elt = get_elt "SetCss.lineHeight" elt in
elt##.style##.lineHeight := bytestring v
let listStyle elt v =
let elt = get_elt "SetCss.listStyle" elt in
elt##.style##.listStyle := bytestring v
let listStyleImage elt v =
let elt = get_elt "SetCss.listStyleImage" elt in
elt##.style##.listStyleImage := bytestring v
let listStylePosition elt v =
let elt = get_elt "SetCss.listStylePosition" elt in
elt##.style##.listStylePosition := bytestring v
let listStyleType elt v =
let elt = get_elt "SetCss.listStyleType" elt in
elt##.style##.listStyleType := bytestring v
let margin elt v =
let elt = get_elt "SetCss.margin" elt in
elt##.style##.margin := bytestring v
let marginBottom elt v =
let elt = get_elt "SetCss.marginBottom" elt in
elt##.style##.marginBottom := bytestring v
let marginBottomPx elt v = marginBottom elt (Printf.sprintf "%dpx" v)
let marginLeft elt v =
let elt = get_elt "SetCss.marginLeft" elt in
elt##.style##.marginLeft := bytestring v
let marginLeftPx elt v = marginLeft elt (Printf.sprintf "%dpx" v)
let marginRight elt v =
let elt = get_elt "SetCss.marginRight" elt in
elt##.style##.marginRight := bytestring v
let marginRightPx elt v = marginRight elt (Printf.sprintf "%dpx" v)
let marginTop elt v =
let elt = get_elt "SetCss.marginTop" elt in
elt##.style##.marginTop := bytestring v
let marginTopPx elt v = marginTop elt (Printf.sprintf "%dpx" v)
let maxHeight elt v =
let elt = get_elt "SetCss.maxHeight" elt in
elt##.style##.maxHeight := bytestring v
let maxHeightPx elt v = maxHeight elt (Printf.sprintf "%dpx" v)
let maxWidth elt v =
let elt = get_elt "SetCss.maxWidth" elt in
elt##.style##.maxWidth := bytestring v
let maxWidthPx elt v = maxWidth elt (Printf.sprintf "%dpx" v)
let minHeight elt v =
let elt = get_elt "SetCss.minHeight" elt in
elt##.style##.minHeight := bytestring v
let minHeightPx elt v = minHeight elt (Printf.sprintf "%dpx" v)
let minWidth elt v =
let elt = get_elt "SetCss.minWidth" elt in
elt##.style##.minWidth := bytestring v
let minWidthPx elt v = minWidth elt (Printf.sprintf "%dpx" v)
let opacity elt v =
let elt = get_elt "SetCss.opacity" elt in
elt##.style##.opacity := match v with None -> undefined | Some v -> def (bytestring v)
let outline elt v =
let elt = get_elt "SetCss.outline" elt in
elt##.style##.outline := bytestring v
let outlineColor elt v =
let elt = get_elt "SetCss.outlineColor" elt in
elt##.style##.outlineColor := bytestring v
let outlineOffset elt v =
let elt = get_elt "SetCss.outlineOffset" elt in
elt##.style##.outlineOffset := bytestring v
let outlineStyle elt v =
let elt = get_elt "SetCss.outlineStyle" elt in
elt##.style##.outlineStyle := bytestring v
let outlineWidth elt v =
let elt = get_elt "SetCss.outlineWidth" elt in
elt##.style##.outlineWidth := bytestring v
let overflow elt v =
let elt = get_elt "SetCss.overflow" elt in
elt##.style##.overflow := bytestring v
let overflowX elt v =
let elt = get_elt "SetCss.overflowX" elt in
elt##.style##.overflowX := bytestring v
let overflowY elt v =
let elt = get_elt "SetCss.overflowY" elt in
elt##.style##.overflowY := bytestring v
let padding elt v =
let elt = get_elt "SetCss.padding" elt in
elt##.style##.padding := bytestring v
let paddingBottom elt v =
let elt = get_elt "SetCss.paddingBottom" elt in
elt##.style##.paddingBottom := bytestring v
let paddingBottomPx elt v = paddingBottom elt (Printf.sprintf "%dpx" v)
let paddingLeft elt v =
let elt = get_elt "SetCss.paddingLeft" elt in
elt##.style##.paddingLeft := bytestring v
let paddingLeftPx elt v = paddingLeft elt (Printf.sprintf "%dpx" v)
let paddingRight elt v =
let elt = get_elt "SetCss.paddingRight" elt in
elt##.style##.paddingRight := bytestring v
let paddingRightPx elt v = paddingRight elt (Printf.sprintf "%dpx" v)
let paddingTop elt v =
let elt = get_elt "SetCss.paddingTop" elt in
elt##.style##.paddingTop := bytestring v
let paddingTopPx elt v = paddingTop elt (Printf.sprintf "%dpx" v)
let pageBreakAfter elt v =
let elt = get_elt "SetCss.pageBreakAfter" elt in
elt##.style##.pageBreakAfter := bytestring v
let pageBreakBefore elt v =
let elt = get_elt "SetCss.pageBreakBefore" elt in
elt##.style##.pageBreakBefore := bytestring v
let position elt v =
let elt = get_elt "SetCss.position" elt in
elt##.style##.position := bytestring v
let right elt v =
let elt = get_elt "SetCss.right" elt in
elt##.style##.right := bytestring v
let rightPx elt v = right elt (Printf.sprintf "%dpx" v)
let tableLayout elt v =
let elt = get_elt "SetCss.tableLayout" elt in
elt##.style##.tableLayout := bytestring v
let textAlign elt v =
let elt = get_elt "SetCss.textAlign" elt in
elt##.style##.textAlign := bytestring v
let textDecoration elt v =
let elt = get_elt "SetCss.textDecoration" elt in
elt##.style##.textDecoration := bytestring v
let textIndent elt v =
let elt = get_elt "SetCss.textIndent" elt in
elt##.style##.textIndent := bytestring v
let textTransform elt v =
let elt = get_elt "SetCss.textTransform" elt in
elt##.style##.textTransform := bytestring v
let top elt v =
let elt = get_elt "SetCss.top" elt in
elt##.style##.top := bytestring v
let topPx elt v = top elt (Printf.sprintf "%dpx" v)
let verticalAlign elt v =
let elt = get_elt "SetCss.verticalAlign" elt in
elt##.style##.verticalAlign := bytestring v
let visibility elt v =
let elt = get_elt "SetCss.visibility" elt in
elt##.style##.visibility := bytestring v
let whiteSpace elt v =
let elt = get_elt "SetCss.whiteSpace" elt in
elt##.style##.whiteSpace := bytestring v
let width elt v =
let elt = get_elt "SetCss.width" elt in
elt##.style##.width := bytestring v
let widthPx elt v = width elt (Printf.sprintf "%dpx" v)
let wordSpacing elt v =
let elt = get_elt "SetCss.wordSpacing" elt in
elt##.style##.wordSpacing := bytestring v
let zIndex elt v =
let elt = get_elt "SetCss.zIndex" elt in
elt##.style##.zIndex := bytestring v
end
end
let hide elt = Manip.SetCss.display elt "none"
let show elt = Manip.SetCss.display elt ""
let window_open ?features url name =
let features = match features with
| None -> null
| Some s -> some @@ string s in
window##(open_ (string url) (string name) features)
module Window = struct
let close win = win##close
let body win = Of_dom.of_body win##.document##.body
let head win = Of_dom.of_head win##.document##.head
let onunload ?(win = Dom_html.window) f =
win##.onunload := Dom_html.handler (fun ev -> bool (f ev))
let onresize ?(win = Dom_html.window) f =
win##.onresize := Dom_html.handler (fun ev -> bool (f ev))
let prompt ?(win = Dom_html.window) ?(value = "") msg =
Opt.case
(win##(prompt (string msg) (string value)))
(fun () -> "")
to_string
let onhashchange ?(win = Dom_html.window) f =
win##.onhashchange := Dom_html.handler (fun ev -> bool (f ev))
end
module Document = struct
let uri () = to_string (doc##._URL)
end
let parse_fragment () =
let elts =
Regexp.(split (regexp "(&|%26)") (Url.Current.get_fragment ())) in
List.fold_right
(fun elt acc ->
if elt = "&" || elt = "%26" || elt = "" then acc else
match Regexp.(split (regexp "(=|%3D)") elt) with
| [name] -> (name, "") :: acc
| name :: _ :: value -> (name, String.concat "" value) :: acc
| _ -> assert false)
elts []
let set_fragment args =
let pairs = List.map (fun (n, v) -> n ^ "=" ^ v) args in
let fragment = String.concat "&" pairs in
Url.Current.set_fragment fragment
let find_component id =
match Manip.by_id id with
| Some div -> div
| None -> failwith ("Cannot find id " ^ id)
module Clipboard = struct
type t = {
mutable intercept : bool ;
mutable data : string ;
}
let clipboard =
{ intercept = false ; data = "" }
let set_copy () =
Dom.addEventListener
doc
(Dom.Event.make "copy")
(Dom.handler (fun e ->
if clipboard.intercept then begin
(e##.clipboardData##(setData (string "text/plain") (string clipboard.data)));
Dom.preventDefault e;
clipboard.intercept <- false ;
clipboard.data <- ""
end;
_true))
_true |> ignore
let copy value : unit =
clipboard.intercept <- true;
clipboard.data <- value;
Dom_html.document##(execCommand (string "copy")
(_false) (null))
end
|
a50ab3158a7974c8941305e453df2aa8754d7cba9d221a9686045cb70a5e85a1 | jeapostrophe/exp | foo-stx.rkt | #lang racket/base
(require syntax/parse
(for-template racket/base))
(struct method (id)
#:property prop:procedure
(λ (m stx)
(with-syntax ([m-id (method-id m)])
(syntax-parse
stx
[f:id
; XXX keywords
(syntax/loc stx (λ (obj . args) (apply obj m-id args)))]
[(f:id obj:expr . args)
(syntax/loc stx (obj m-id . args))]))))
(provide (struct-out method)) | null | https://raw.githubusercontent.com/jeapostrophe/exp/43615110fd0439d2ef940c42629fcdc054c370f9/foo/foo-stx.rkt | racket | XXX keywords | #lang racket/base
(require syntax/parse
(for-template racket/base))
(struct method (id)
#:property prop:procedure
(λ (m stx)
(with-syntax ([m-id (method-id m)])
(syntax-parse
stx
[f:id
(syntax/loc stx (λ (obj . args) (apply obj m-id args)))]
[(f:id obj:expr . args)
(syntax/loc stx (obj m-id . args))]))))
(provide (struct-out method)) |
433f644fb66636868484e38dd3f2826b4fa0577d7e8d3e0346f09e4f6e4ef783 | bakul/s9fes | mean.scm | Scheme 9 from Empty Space , Function Library
By , 2015
; Placed in the Public Domain
;
; (mean list) ==> real
;
; (load-from-library "mean.scm")
;
; Mean (average) function, returns the arithmetic mean value of a
; list of reals.
;
Example : ( mean ' ( 1 2 3 4 5 6 ) ) = = > 3.5
(define (mean set)
(/ (apply + set) (length set)))
| null | https://raw.githubusercontent.com/bakul/s9fes/74c14c0db5f07f5bc6d94131e9e4ee15a29275aa/lib/mean.scm | scheme | Placed in the Public Domain
(mean list) ==> real
(load-from-library "mean.scm")
Mean (average) function, returns the arithmetic mean value of a
list of reals.
| Scheme 9 from Empty Space , Function Library
By , 2015
Example : ( mean ' ( 1 2 3 4 5 6 ) ) = = > 3.5
(define (mean set)
(/ (apply + set) (length set)))
|
e3524c73c3513b123d27d988c71e3ce8ed988919e9b6d54b56e8db79ce49bcdf | lukexi/halive | TestCompileExpr.hs | {-# LANGUAGE OverloadedStrings #-}
import Control.Concurrent.STM
import Halive
main :: IO ()
main = do
ghc <- startGHC defaultGHCSessionConfig
resultChan <- compileExpression ghc
"main = print 123456789"
"main"
result <- atomically (readTChan resultChan)
putStrLn "Got result:"
print result
| null | https://raw.githubusercontent.com/lukexi/halive/cae5b327730bcea5ef25bb05e5a12a283eade97d/test/TestCompileExpr.hs | haskell | # LANGUAGE OverloadedStrings # | import Control.Concurrent.STM
import Halive
main :: IO ()
main = do
ghc <- startGHC defaultGHCSessionConfig
resultChan <- compileExpression ghc
"main = print 123456789"
"main"
result <- atomically (readTChan resultChan)
putStrLn "Got result:"
print result
|
a3629e4823cc6c59e5bf477486cda92373d072be1f349c6d43c8e2813b09053c | daveliepmann/vdquil | figure1.clj | , Chapter 4 ( Time Series ) , figure 1 :
One set of points over time
Converted from Processing to Quil as an exercise by
(ns vdquil.chapter4.figure1
(:use [quil.core]
[vdquil.util]
[vdquil.chapter4.ch4data]))
(def year-min (apply min (map first (rest milk-tea-coffee-data))))
(def year-max (apply max (map first (rest milk-tea-coffee-data))))
(def data-min (apply min (mapcat rest (rest milk-tea-coffee-data))))
(def data-max (apply max (mapcat rest (rest milk-tea-coffee-data))))
(defn setup []
(smooth))
(defn draw-plot-area
"Render the plot area as a white box"
[]
(background 224)
(fill 255)
(no-stroke)
(rect-mode :corners)
(rect 50 60 (- (width) 50) (- (height) 60)))
(defn draw-data-point [[year milk tea coffee]]
(point (map-range year year-min year-max 50 (- (width) 50))
(map-range milk data-min data-max (- (height) 60) 60)))
(defn draw []
(draw-plot-area)
(stroke-weight 5)
(stroke (hex-to-color "#5679C1"))
(doseq [row (rest milk-tea-coffee-data)]
(draw-data-point row)))
(defsketch mtc
:title "Milk, Tea, Coffee"
:setup setup
:draw draw
:size [720 400])
| null | https://raw.githubusercontent.com/daveliepmann/vdquil/f40788ff7634870a9a5f1dc4ca3df8543beaf00b/src/vdquil/chapter4/figure1.clj | clojure | , Chapter 4 ( Time Series ) , figure 1 :
One set of points over time
Converted from Processing to Quil as an exercise by
(ns vdquil.chapter4.figure1
(:use [quil.core]
[vdquil.util]
[vdquil.chapter4.ch4data]))
(def year-min (apply min (map first (rest milk-tea-coffee-data))))
(def year-max (apply max (map first (rest milk-tea-coffee-data))))
(def data-min (apply min (mapcat rest (rest milk-tea-coffee-data))))
(def data-max (apply max (mapcat rest (rest milk-tea-coffee-data))))
(defn setup []
(smooth))
(defn draw-plot-area
"Render the plot area as a white box"
[]
(background 224)
(fill 255)
(no-stroke)
(rect-mode :corners)
(rect 50 60 (- (width) 50) (- (height) 60)))
(defn draw-data-point [[year milk tea coffee]]
(point (map-range year year-min year-max 50 (- (width) 50))
(map-range milk data-min data-max (- (height) 60) 60)))
(defn draw []
(draw-plot-area)
(stroke-weight 5)
(stroke (hex-to-color "#5679C1"))
(doseq [row (rest milk-tea-coffee-data)]
(draw-data-point row)))
(defsketch mtc
:title "Milk, Tea, Coffee"
:setup setup
:draw draw
:size [720 400])
| |
cc91279d2eb3f4fb91f23079ef970ecd5a0f13caacda207e889808b9d2220bfc | CIFASIS/QuickFuzz | Utils.hs | module Utils
( module Utils.Generation
, module Utils.Patch
, module Utils.Console
, module Utils.Shrink
, module Utils.Unique
) where
import Utils.Generation
import Utils.Decoding
import Utils.Mutation
import Utils.Patch
import Utils.Console
import Utils.Shrink
import Utils.Unique
| null | https://raw.githubusercontent.com/CIFASIS/QuickFuzz/a1c69f028b0960c002cb83e8145f039ecc0e0a23/app/Utils.hs | haskell | module Utils
( module Utils.Generation
, module Utils.Patch
, module Utils.Console
, module Utils.Shrink
, module Utils.Unique
) where
import Utils.Generation
import Utils.Decoding
import Utils.Mutation
import Utils.Patch
import Utils.Console
import Utils.Shrink
import Utils.Unique
| |
80897302e2fac3e611eeb8277b31ff57304a047bd55c55fb78530ae7770cb381 | NoRedInk/haskell-libraries | Settings.hs | module Kafka.Worker.Settings
( Settings (..),
decoder,
MaxMsgsPerSecondPerPartition (..),
MaxMsgsPerPartitionBufferedLocally (..),
MaxPollIntervalMs (..),
SkipOrNot (..),
)
where
import qualified Environment
import qualified Kafka.Consumer as Consumer
import qualified Kafka.Settings.Internal as Internal
import qualified Observability
import qualified Prelude
-- | Settings required to process kafka messages
data Settings = Settings
{ -- | broker addresses. See hw-kafka's documentation for more info
brokerAddresses :: [Consumer.BrokerAddress],
| Worker will poll for new messages . This is the timeout
pollingTimeout :: Consumer.Timeout,
| Used for throttling . Turn this down to give a speed limit .
maxMsgsPerSecondPerPartition :: MaxMsgsPerSecondPerPartition,
logLevel :: Internal.KafkaLogLevel,
observability :: Observability.Settings,
-- | Provides backpressure from message-workers to the queue-reader worker.
-- Ensures that the thread responsible for pulling messages off of kafka
-- doesn't race ahead / steal resources from the threads executing messages.
maxMsgsPerPartitionBufferedLocally :: MaxMsgsPerPartitionBufferedLocally,
pollBatchSize :: Consumer.BatchSize,
-- | Time between polling
maxPollIntervalMs :: MaxPollIntervalMs,
-- | This option provides us the possibility to skip messages on failure.
Useful for testing worker . DoNotSkip is a reasonable default !
onProcessMessageSkip :: SkipOrNot,
-- | Compression codec used for topics
compressionCodec :: Internal.KafkaCompressionCodec
}
-- | This option provides us the possibility to skip messages on failure.
Useful for testing worker . DoNotSkip is a reasonable default !
data SkipOrNot = Skip | DoNotSkip
| Used for throttling . Turn this down to give a speed limit .
data MaxMsgsPerSecondPerPartition = ThrottleAt Int | DontThrottle
-- | Provides backpressure from message-workers to the queue-reader worker.
-- Ensures that the thread responsible for pulling messages off of kafka
-- doesn't race ahead / steal resources from the threads executing messages.
newtype MaxMsgsPerPartitionBufferedLocally = MaxMsgsPerPartitionBufferedLocally {unMaxMsgsPerPartitionBufferedLocally :: Int}
-- | Time between polling
newtype MaxPollIntervalMs = MaxPollIntervalMs {unMaxPollIntervalMs :: Int}
-- | decodes Settings from environmental variables
-- Also consumes Observability env variables (see nri-observability)
KAFKA_BROKER_ADDRESSES = localhost:9092 # comma delimeted list
-- KAFKA_LOG_LEVEL=Debug
-- KAFKA_POLLING_TIMEOUT=1000
KAFKA_MAX_MESSAGES_PER_SECOND_PER_PARTITION=0 ( disabled )
-- KAFKA_MAX_POLL_INTERVAL_MS=300000
KAFKA_MAX_MSGS_PER_PARTITION_BUFFERED_LOCALLY=100
-- KAFKA_SKIP_ON_PROCESS_MESSAGE_FAILURE=0
-- KAFKA_GROUP_ID=0
decoder :: Environment.Decoder Settings
decoder =
Prelude.pure Settings
|> andMap Internal.decoderBrokerAddresses
|> andMap decoderPollingTimeout
|> andMap decoderMaxMessagesPerSecondPerPartition
|> andMap Internal.decoderKafkaLogLevel
|> andMap Observability.decoder
|> andMap decoderMaxMsgsPerPartitionBufferedLocally
|> andMap decoderPollBatchSize
|> andMap decoderMaxPollIntervalMs
|> andMap decoderOnProcessMessageFailure
|> andMap Internal.decoderCompressionCodec
decoderPollingTimeout :: Environment.Decoder Consumer.Timeout
decoderPollingTimeout =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_POLLING_TIMEOUT",
Environment.description = "Polling timout for consumers",
Environment.defaultValue = "1000"
}
(map Consumer.Timeout Environment.int)
decoderMaxMessagesPerSecondPerPartition :: Environment.Decoder MaxMsgsPerSecondPerPartition
decoderMaxMessagesPerSecondPerPartition =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_MAX_MESSAGES_PER_SECOND_PER_PARTITION",
Environment.description = "This is how we throttle workers. Sets the maximum amount of messages this worker should process per second per partition. 0 is disabled.",
Environment.defaultValue = "0"
}
( map
( \maxPerSecond ->
( if maxPerSecond == 0
then DontThrottle
else ThrottleAt maxPerSecond
)
)
Environment.int
)
decoderMaxPollIntervalMs :: Environment.Decoder MaxPollIntervalMs
decoderMaxPollIntervalMs =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_MAX_POLL_INTERVAL_MS",
Environment.description = "This is used to set max.poll.interval.ms",
Environment.defaultValue = "300000"
}
(map MaxPollIntervalMs Environment.int)
decoderMaxMsgsPerPartitionBufferedLocally :: Environment.Decoder MaxMsgsPerPartitionBufferedLocally
decoderMaxMsgsPerPartitionBufferedLocally =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_MAX_MSGS_PER_PARTITION_BUFFERED_LOCALLY",
Environment.description = "Pausing reading from kafka when we have this many messages queued up but not yet processed",
Environment.defaultValue = "100"
}
(map MaxMsgsPerPartitionBufferedLocally Environment.int)
decoderPollBatchSize :: Environment.Decoder Consumer.BatchSize
decoderPollBatchSize =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_POLL_BATCH_SIZE",
Environment.description = "The amount of messages we request in a single poll request to Kafka",
Environment.defaultValue = "100"
}
(map Consumer.BatchSize Environment.int)
decoderOnProcessMessageFailure :: Environment.Decoder SkipOrNot
decoderOnProcessMessageFailure =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_SKIP_ON_PROCESS_MESSAGE_FAILURE",
Environment.description = "Whether to skip message that are failing processing. 1 means on, 0 means off.",
Environment.defaultValue = "0"
}
( Environment.custom
Environment.int
( \int ->
if int >= 1
then Ok Skip
else Ok DoNotSkip
)
)
| null | https://raw.githubusercontent.com/NoRedInk/haskell-libraries/fd3482318d0ae7e54ad753b473aef24951de7d2f/nri-kafka/src/Kafka/Worker/Settings.hs | haskell | | Settings required to process kafka messages
| broker addresses. See hw-kafka's documentation for more info
| Provides backpressure from message-workers to the queue-reader worker.
Ensures that the thread responsible for pulling messages off of kafka
doesn't race ahead / steal resources from the threads executing messages.
| Time between polling
| This option provides us the possibility to skip messages on failure.
| Compression codec used for topics
| This option provides us the possibility to skip messages on failure.
| Provides backpressure from message-workers to the queue-reader worker.
Ensures that the thread responsible for pulling messages off of kafka
doesn't race ahead / steal resources from the threads executing messages.
| Time between polling
| decodes Settings from environmental variables
Also consumes Observability env variables (see nri-observability)
KAFKA_LOG_LEVEL=Debug
KAFKA_POLLING_TIMEOUT=1000
KAFKA_MAX_POLL_INTERVAL_MS=300000
KAFKA_SKIP_ON_PROCESS_MESSAGE_FAILURE=0
KAFKA_GROUP_ID=0 | module Kafka.Worker.Settings
( Settings (..),
decoder,
MaxMsgsPerSecondPerPartition (..),
MaxMsgsPerPartitionBufferedLocally (..),
MaxPollIntervalMs (..),
SkipOrNot (..),
)
where
import qualified Environment
import qualified Kafka.Consumer as Consumer
import qualified Kafka.Settings.Internal as Internal
import qualified Observability
import qualified Prelude
data Settings = Settings
brokerAddresses :: [Consumer.BrokerAddress],
| Worker will poll for new messages . This is the timeout
pollingTimeout :: Consumer.Timeout,
| Used for throttling . Turn this down to give a speed limit .
maxMsgsPerSecondPerPartition :: MaxMsgsPerSecondPerPartition,
logLevel :: Internal.KafkaLogLevel,
observability :: Observability.Settings,
maxMsgsPerPartitionBufferedLocally :: MaxMsgsPerPartitionBufferedLocally,
pollBatchSize :: Consumer.BatchSize,
maxPollIntervalMs :: MaxPollIntervalMs,
Useful for testing worker . DoNotSkip is a reasonable default !
onProcessMessageSkip :: SkipOrNot,
compressionCodec :: Internal.KafkaCompressionCodec
}
Useful for testing worker . DoNotSkip is a reasonable default !
data SkipOrNot = Skip | DoNotSkip
| Used for throttling . Turn this down to give a speed limit .
data MaxMsgsPerSecondPerPartition = ThrottleAt Int | DontThrottle
newtype MaxMsgsPerPartitionBufferedLocally = MaxMsgsPerPartitionBufferedLocally {unMaxMsgsPerPartitionBufferedLocally :: Int}
newtype MaxPollIntervalMs = MaxPollIntervalMs {unMaxPollIntervalMs :: Int}
KAFKA_BROKER_ADDRESSES = localhost:9092 # comma delimeted list
KAFKA_MAX_MESSAGES_PER_SECOND_PER_PARTITION=0 ( disabled )
KAFKA_MAX_MSGS_PER_PARTITION_BUFFERED_LOCALLY=100
decoder :: Environment.Decoder Settings
decoder =
Prelude.pure Settings
|> andMap Internal.decoderBrokerAddresses
|> andMap decoderPollingTimeout
|> andMap decoderMaxMessagesPerSecondPerPartition
|> andMap Internal.decoderKafkaLogLevel
|> andMap Observability.decoder
|> andMap decoderMaxMsgsPerPartitionBufferedLocally
|> andMap decoderPollBatchSize
|> andMap decoderMaxPollIntervalMs
|> andMap decoderOnProcessMessageFailure
|> andMap Internal.decoderCompressionCodec
decoderPollingTimeout :: Environment.Decoder Consumer.Timeout
decoderPollingTimeout =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_POLLING_TIMEOUT",
Environment.description = "Polling timout for consumers",
Environment.defaultValue = "1000"
}
(map Consumer.Timeout Environment.int)
decoderMaxMessagesPerSecondPerPartition :: Environment.Decoder MaxMsgsPerSecondPerPartition
decoderMaxMessagesPerSecondPerPartition =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_MAX_MESSAGES_PER_SECOND_PER_PARTITION",
Environment.description = "This is how we throttle workers. Sets the maximum amount of messages this worker should process per second per partition. 0 is disabled.",
Environment.defaultValue = "0"
}
( map
( \maxPerSecond ->
( if maxPerSecond == 0
then DontThrottle
else ThrottleAt maxPerSecond
)
)
Environment.int
)
decoderMaxPollIntervalMs :: Environment.Decoder MaxPollIntervalMs
decoderMaxPollIntervalMs =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_MAX_POLL_INTERVAL_MS",
Environment.description = "This is used to set max.poll.interval.ms",
Environment.defaultValue = "300000"
}
(map MaxPollIntervalMs Environment.int)
decoderMaxMsgsPerPartitionBufferedLocally :: Environment.Decoder MaxMsgsPerPartitionBufferedLocally
decoderMaxMsgsPerPartitionBufferedLocally =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_MAX_MSGS_PER_PARTITION_BUFFERED_LOCALLY",
Environment.description = "Pausing reading from kafka when we have this many messages queued up but not yet processed",
Environment.defaultValue = "100"
}
(map MaxMsgsPerPartitionBufferedLocally Environment.int)
decoderPollBatchSize :: Environment.Decoder Consumer.BatchSize
decoderPollBatchSize =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_POLL_BATCH_SIZE",
Environment.description = "The amount of messages we request in a single poll request to Kafka",
Environment.defaultValue = "100"
}
(map Consumer.BatchSize Environment.int)
decoderOnProcessMessageFailure :: Environment.Decoder SkipOrNot
decoderOnProcessMessageFailure =
Environment.variable
Environment.Variable
{ Environment.name = "KAFKA_SKIP_ON_PROCESS_MESSAGE_FAILURE",
Environment.description = "Whether to skip message that are failing processing. 1 means on, 0 means off.",
Environment.defaultValue = "0"
}
( Environment.custom
Environment.int
( \int ->
if int >= 1
then Ok Skip
else Ok DoNotSkip
)
)
|
176b57bcae17d6b603096260d3b6aa3e91f488ba3871d5a44f6a9dc8795d7b35 | ndmitchell/rattle | Shared.hs |
module Development.Rattle.Shared(
Shared, withShared,
getSpeculate, setSpeculate,
getFile, setFile,
getCmdTraces, addCmdTrace,
nextRun, lastRun,
dump
) where
import General.Extra
import Development.Rattle.Types
import Development.Rattle.Hash
import General.FileName
import System.Directory.Extra
import System.FilePath
import System.IO.Extra
import Data.Maybe
import Data.List
import Control.Monad.Extra
import Control.Concurrent.Extra
import qualified Data.ByteString as BS
import General.FileInfo
import General.Binary
import Data.Monoid
import Prelude
---------------------------------------------------------------------
-- PRIMITIVES
data Shared = Shared Lock FilePath Bool
withShared :: FilePath -> Bool -> (Shared -> IO a) -> IO a
withShared dir multiple act = do
lock <- newLock
createDirectoryRecursive dir
act $ Shared lock dir multiple
filenameHash :: Hash -> String
filenameHash str = let (a:b:cs) = hashHex str in [a,b] </> cs
filenameValue :: BinaryEx a => a -> String
filenameValue = filenameHash . hashByteString . runBuilder . putEx
getList :: (BinaryEx a, BinaryEx b) => String -> Shared -> a -> IO [b]
getList typ (Shared lock dir _) name = withLock lock $ do
let file = dir </> typ </> filenameValue name
b <- doesFileExist file
if not b then pure [] else map getEx . getExList <$> BS.readFile file
setList :: (Show a, BinaryEx a, BinaryEx b) => String -> IOMode -> Shared -> a -> [b] -> IO ()
setList typ mode (Shared lock dir multiple) name vals = withLock lock $ do
let mode2 = if multiple then mode else WriteMode
let file = dir </> typ </> filenameValue name
createDirectoryRecursive $ takeDirectory file
unlessM (doesFileExist $ file <.> "txt") $
writeFile (file <.> "txt") $ show name
withFile file mode2 $ \h -> do
hSetEncoding h utf8
BS.hPutStr h $ runBuilder $ putExList $ map putEx vals
---------------------------------------------------------------------
-- SPECIAL SUPPORT FOR FILES
getFile :: Shared -> Hash -> IO (Maybe (FileName -> IO ()))
getFile (Shared lock dir _) hash = do
let file = dir </> "files" </> filenameHash hash
b <- doesFileExist file
pure $ if not b then Nothing else Just $ \out -> do
let x = fileNameToString out
createDirectoryRecursive $ takeDirectory x
copyFile file x
setFile :: Shared -> FileName -> Hash -> IO Bool -> IO ()
setFile (Shared lock dir _) source hash check = do
let file = dir </> "files" </> filenameHash hash
unlessM (doesFileExist file) $ withLock lock $ do
createDirectoryRecursive $ takeDirectory file
copyFile (fileNameToString source) (file <.> "tmp")
good <- check
if not good then
removeFile $ file <.> "tmp"
else
renameFile (file <.> "tmp") file
---------------------------------------------------------------------
-- TYPE SAFE WRAPPERS
nextRun :: Shared -> String -> IO RunIndex
nextRun share name = do
t <- maybe runIndex0 nextRunIndex . listToMaybe <$> getList "run" share name
setList "run" WriteMode share name [t]
pure t
lastRun :: Shared -> String -> IO (Maybe RunIndex)
lastRun share name = listToMaybe <$> getList "run" share name
getSpeculate :: Shared -> String -> IO [Cmd]
getSpeculate = getList "speculate"
setSpeculate :: Shared -> String -> [Cmd] -> IO ()
setSpeculate = setList "speculate" WriteMode
-- Intermediate data type which puts spaces in the right places to get better
-- word orientated diffs when looking at the output in a text editor
data File = File FileName ModTime Hash
deriving (Show)
instance BinaryEx File where
getEx x = File (byteStringToFileName a) b (getEx c)
where (b,ca) = binarySplit x
(c,a) = BS.splitAt hashLength ca
putEx (File a b c) = putExStorable b <> putEx c <> putEx (fileNameToByteString a)
First trace in list is earliest one ; last is latest one .
getCmdTraces :: Shared -> Cmd -> IO [Trace (FileName, ModTime, Hash)]
getCmdTraces shared cmd = map (fmap fromFile) <$> getList "command" shared cmd
where fromFile (File path mt x) = (path, mt, x)
addCmdTrace :: Shared -> Cmd -> Trace (FileName, ModTime, Hash) -> IO ()
addCmdTrace share cmd t = setList "command" AppendMode share cmd [fmap toFile t]
where toFile (path, mt, x) = File path mt x
---------------------------------------------------------------------
-- DUMPING
dumpList :: (String -> IO ()) -> FilePath -> String -> IO ()
dumpList out dir name = do
out ""
out $ "## " ++ name
dirs <- listDirectories $ dir </> name
forM_ dirs $ \x -> do
files <- filter (".txt" `isSuffixOf`) <$> listFiles x
forM_ files $ \file -> do
out ""
name <- readFileUTF8' file
out $ "### " ++ name
out =<< readFileUTF8' (dropExtension file)
dump :: (String -> IO ()) -> FilePath -> IO ()
dump out dir = do
out $ "# Rattle dump: " ++ dir
mapM_ (dumpList out dir) ["run","speculate","command"]
| null | https://raw.githubusercontent.com/ndmitchell/rattle/f1f10504ef175dd005c8affdddfc1fb615c040f2/src/Development/Rattle/Shared.hs | haskell | -------------------------------------------------------------------
PRIMITIVES
-------------------------------------------------------------------
SPECIAL SUPPORT FOR FILES
-------------------------------------------------------------------
TYPE SAFE WRAPPERS
Intermediate data type which puts spaces in the right places to get better
word orientated diffs when looking at the output in a text editor
-------------------------------------------------------------------
DUMPING |
module Development.Rattle.Shared(
Shared, withShared,
getSpeculate, setSpeculate,
getFile, setFile,
getCmdTraces, addCmdTrace,
nextRun, lastRun,
dump
) where
import General.Extra
import Development.Rattle.Types
import Development.Rattle.Hash
import General.FileName
import System.Directory.Extra
import System.FilePath
import System.IO.Extra
import Data.Maybe
import Data.List
import Control.Monad.Extra
import Control.Concurrent.Extra
import qualified Data.ByteString as BS
import General.FileInfo
import General.Binary
import Data.Monoid
import Prelude
data Shared = Shared Lock FilePath Bool
withShared :: FilePath -> Bool -> (Shared -> IO a) -> IO a
withShared dir multiple act = do
lock <- newLock
createDirectoryRecursive dir
act $ Shared lock dir multiple
filenameHash :: Hash -> String
filenameHash str = let (a:b:cs) = hashHex str in [a,b] </> cs
filenameValue :: BinaryEx a => a -> String
filenameValue = filenameHash . hashByteString . runBuilder . putEx
getList :: (BinaryEx a, BinaryEx b) => String -> Shared -> a -> IO [b]
getList typ (Shared lock dir _) name = withLock lock $ do
let file = dir </> typ </> filenameValue name
b <- doesFileExist file
if not b then pure [] else map getEx . getExList <$> BS.readFile file
setList :: (Show a, BinaryEx a, BinaryEx b) => String -> IOMode -> Shared -> a -> [b] -> IO ()
setList typ mode (Shared lock dir multiple) name vals = withLock lock $ do
let mode2 = if multiple then mode else WriteMode
let file = dir </> typ </> filenameValue name
createDirectoryRecursive $ takeDirectory file
unlessM (doesFileExist $ file <.> "txt") $
writeFile (file <.> "txt") $ show name
withFile file mode2 $ \h -> do
hSetEncoding h utf8
BS.hPutStr h $ runBuilder $ putExList $ map putEx vals
getFile :: Shared -> Hash -> IO (Maybe (FileName -> IO ()))
getFile (Shared lock dir _) hash = do
let file = dir </> "files" </> filenameHash hash
b <- doesFileExist file
pure $ if not b then Nothing else Just $ \out -> do
let x = fileNameToString out
createDirectoryRecursive $ takeDirectory x
copyFile file x
setFile :: Shared -> FileName -> Hash -> IO Bool -> IO ()
setFile (Shared lock dir _) source hash check = do
let file = dir </> "files" </> filenameHash hash
unlessM (doesFileExist file) $ withLock lock $ do
createDirectoryRecursive $ takeDirectory file
copyFile (fileNameToString source) (file <.> "tmp")
good <- check
if not good then
removeFile $ file <.> "tmp"
else
renameFile (file <.> "tmp") file
nextRun :: Shared -> String -> IO RunIndex
nextRun share name = do
t <- maybe runIndex0 nextRunIndex . listToMaybe <$> getList "run" share name
setList "run" WriteMode share name [t]
pure t
lastRun :: Shared -> String -> IO (Maybe RunIndex)
lastRun share name = listToMaybe <$> getList "run" share name
getSpeculate :: Shared -> String -> IO [Cmd]
getSpeculate = getList "speculate"
setSpeculate :: Shared -> String -> [Cmd] -> IO ()
setSpeculate = setList "speculate" WriteMode
data File = File FileName ModTime Hash
deriving (Show)
instance BinaryEx File where
getEx x = File (byteStringToFileName a) b (getEx c)
where (b,ca) = binarySplit x
(c,a) = BS.splitAt hashLength ca
putEx (File a b c) = putExStorable b <> putEx c <> putEx (fileNameToByteString a)
First trace in list is earliest one ; last is latest one .
getCmdTraces :: Shared -> Cmd -> IO [Trace (FileName, ModTime, Hash)]
getCmdTraces shared cmd = map (fmap fromFile) <$> getList "command" shared cmd
where fromFile (File path mt x) = (path, mt, x)
addCmdTrace :: Shared -> Cmd -> Trace (FileName, ModTime, Hash) -> IO ()
addCmdTrace share cmd t = setList "command" AppendMode share cmd [fmap toFile t]
where toFile (path, mt, x) = File path mt x
dumpList :: (String -> IO ()) -> FilePath -> String -> IO ()
dumpList out dir name = do
out ""
out $ "## " ++ name
dirs <- listDirectories $ dir </> name
forM_ dirs $ \x -> do
files <- filter (".txt" `isSuffixOf`) <$> listFiles x
forM_ files $ \file -> do
out ""
name <- readFileUTF8' file
out $ "### " ++ name
out =<< readFileUTF8' (dropExtension file)
dump :: (String -> IO ()) -> FilePath -> IO ()
dump out dir = do
out $ "# Rattle dump: " ++ dir
mapM_ (dumpList out dir) ["run","speculate","command"]
|
1569cd64314849426591533bf902960bbf36d56488dd6a60b4a35c8bf07b7c6c | danlentz/clj-uuid | util.clj | (ns clj-uuid.util
(:import (java.util UUID)))
(defn indexed
"Returns a lazy sequence of [index, item] pairs, where items come
from 's' and indexes count up from zero.
(indexed '(a b c d)) => ([0 a] [1 b] [2 c] [3 d])"
[s]
;; (map vector (iterate inc 0) s))
(map #(clojure.lang.MapEntry. %1 %2) (range) s))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
PROG1 but with more idiomatic clojure name
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmacro returning
"Compute a return value, then execute other forms for side effects.
Like prog1 in common lisp, or a (do) that returns the first form."
[value & forms]
`(let [value# ~value]
~@forms
value#))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Conditional Compilation
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn java6? []
(neg? (compare (System/getProperty "java.version") "1.7")))
(defmacro compile-if
"Evaluate `exp` and if it returns logical true and doesn't error, expand to
`then` otherwise expand to `else`.
credit: <clojure/src/clj/clojure/core/reducers.clj#L24>
(compile-if (Class/forName \"java.util.concurrent.ForkJoinTask\")
(do-cool-stuff-with-fork-join)
(fall-back-to-executor-services))"
[exp then else]
(if (try (eval exp)
(catch Throwable _ false))
`(do ~then)
`(do ~else)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Timing and Performance Metric
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmacro with-timing
"Same as clojure.core/time but returns a vector of a the result of
the code and the milliseconds rather than printing a string. Runs
the code in an implicit do."
[& body]
`(let [start# (System/nanoTime) ret# ~(cons 'do body)]
[ret# (/ (double (- (System/nanoTime) start#)) 1000000.0)]))
(defmacro run-and-measure-timing [expr]
`(let [start-time# (System/currentTimeMillis)
response# ~expr
end-time# (System/currentTimeMillis)]
{:time-taken (- end-time# start-time#)
:response response#
:start-time start-time#
:end-time end-time#}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Debugging
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmacro wrap-fn [name args & body]
`(let [old-fn# (var-get (var ~name))
new-fn# (fn [& p#]
(let [~args p#]
(do ~@body)))
wrapper# (fn [& params#]
(if (= ~(count args) (count params#))
(apply new-fn# params#)
(apply old-fn# params#)))]
(alter-var-root (var ~name) (constantly wrapper#))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; IO
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmacro with-temp-file [f-sym & body]
`(let [prefix# (.toString (UUID/randomUUID))
postfix# (.toString (UUID/randomUUID))
~f-sym (java.io.File/createTempFile prefix# postfix#)]
(try
(do ~@body)
(finally
(.delete ~f-sym)))))
(defn lines-of-file [^String file-name]
(line-seq
(java.io.BufferedReader.
(java.io.InputStreamReader.
(java.io.FileInputStream. file-name)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Condition Handling
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmacro exception [& [param & more :as params]]
(if (class? param)
`(throw (new ~param (str ~@(interpose " " more))))
`(throw (Exception. (str ~@(interpose " " params))))))
| null | https://raw.githubusercontent.com/danlentz/clj-uuid/5cbd2662dd3fce90ec0b8354e794ca6a1b5ce44c/src/clj_uuid/util.clj | clojure | (map vector (iterate inc 0) s))
Conditional Compilation
Timing and Performance Metric
Debugging
IO
Condition Handling
| (ns clj-uuid.util
(:import (java.util UUID)))
(defn indexed
"Returns a lazy sequence of [index, item] pairs, where items come
from 's' and indexes count up from zero.
(indexed '(a b c d)) => ([0 a] [1 b] [2 c] [3 d])"
[s]
(map #(clojure.lang.MapEntry. %1 %2) (range) s))
PROG1 but with more idiomatic clojure name
(defmacro returning
"Compute a return value, then execute other forms for side effects.
Like prog1 in common lisp, or a (do) that returns the first form."
[value & forms]
`(let [value# ~value]
~@forms
value#))
(defn java6? []
(neg? (compare (System/getProperty "java.version") "1.7")))
(defmacro compile-if
"Evaluate `exp` and if it returns logical true and doesn't error, expand to
`then` otherwise expand to `else`.
credit: <clojure/src/clj/clojure/core/reducers.clj#L24>
(compile-if (Class/forName \"java.util.concurrent.ForkJoinTask\")
(do-cool-stuff-with-fork-join)
(fall-back-to-executor-services))"
[exp then else]
(if (try (eval exp)
(catch Throwable _ false))
`(do ~then)
`(do ~else)))
(defmacro with-timing
"Same as clojure.core/time but returns a vector of a the result of
the code and the milliseconds rather than printing a string. Runs
the code in an implicit do."
[& body]
`(let [start# (System/nanoTime) ret# ~(cons 'do body)]
[ret# (/ (double (- (System/nanoTime) start#)) 1000000.0)]))
(defmacro run-and-measure-timing [expr]
`(let [start-time# (System/currentTimeMillis)
response# ~expr
end-time# (System/currentTimeMillis)]
{:time-taken (- end-time# start-time#)
:response response#
:start-time start-time#
:end-time end-time#}))
(defmacro wrap-fn [name args & body]
`(let [old-fn# (var-get (var ~name))
new-fn# (fn [& p#]
(let [~args p#]
(do ~@body)))
wrapper# (fn [& params#]
(if (= ~(count args) (count params#))
(apply new-fn# params#)
(apply old-fn# params#)))]
(alter-var-root (var ~name) (constantly wrapper#))))
(defmacro with-temp-file [f-sym & body]
`(let [prefix# (.toString (UUID/randomUUID))
postfix# (.toString (UUID/randomUUID))
~f-sym (java.io.File/createTempFile prefix# postfix#)]
(try
(do ~@body)
(finally
(.delete ~f-sym)))))
(defn lines-of-file [^String file-name]
(line-seq
(java.io.BufferedReader.
(java.io.InputStreamReader.
(java.io.FileInputStream. file-name)))))
(defmacro exception [& [param & more :as params]]
(if (class? param)
`(throw (new ~param (str ~@(interpose " " more))))
`(throw (Exception. (str ~@(interpose " " params))))))
|
c5dc0b5fa9abe216825d9a4a726462a4a188420aa071139c28068b0e499c2bd2 | chef/chef-server | bksw_conf.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil ; fill - column : 92 -*-
%% ex: ts=4 sw=4 et
@author < >
Copyright Chef Software , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
-module(bksw_conf).
%% API
-export([get_wm_configuration/0,
get_context/1,
access_key_id/1,
disk_store/0,
storage_type/0,
reset_dispatch/0,
secret_access_key/1,
stream_download/0,
summarize_config/0]).
%% Exported for common test
-ifdef(TEST).
-export([port/0, ip/0, keys/0]).
-endif.
-include("internal.hrl").
%%%===================================================================
%%% types
%%%===================================================================
-type context() :: #context{}.
%%%===================================================================
%%% API
%%%===================================================================
-spec get_context(proplists:proplist()) -> context().
get_context(Config) ->
#context{auth_check_disabled = proplists:get_value(auth_check_disabled, Config),
access_key_id = proplists:get_value(access_key_id, Config),
secret_access_key = proplists:get_value(secret_access_key, Config),
stream_download = proplists:get_value(stream_download, Config),
reqid_header_name = proplists:get_value(reqid_header_name, Config),
sql_retry_delay = proplists:get_value(sql_retry_delay, Config),
sql_retry_count = proplists:get_value(sql_retry_count, Config)
}.
-spec summarize_config() -> proplists:proplist().
summarize_config() ->
{KeyId, _Secret} = keys(),
[{ip, ip()},
{port, port()},
{log_dir, log_dir()},
{storage_type, storage_type()},
{disk_store, disk_store()},
{stream_download, stream_download()},
{auth_check_disabled, auth_check_disabled()},
{reqid_header_name, reqid_header_name()},
{access_key_id, KeyId},
{sql_retry_delay, sql_retry_delay()},
{sql_retry_count, sql_retry_count()}].
-spec get_wm_configuration() -> list().
get_wm_configuration() ->
[{ip, ip()},
{port, port()},
{dispatch, dispatch()},
{log_dir, log_dir()},
{ssl, ssl()},
{ssl_opts, ssl_opts()}].
-spec access_key_id(context()) -> binary().
access_key_id(#context{access_key_id=AccessKeyId}) ->
AccessKeyId.
-spec secret_access_key(context()) -> binary().
secret_access_key(#context{secret_access_key=SecretAccessKey}) ->
SecretAccessKey.
-spec disk_store() -> string().
-ifdef(EUNIT_TEST).
disk_store() ->
"/tmp/".
-else.
disk_store() ->
case envy:get(bookshelf, disk_store, undefined, any) of
undefined ->
error({missing_config, {bookshelf, disk_store}});
Path when is_list(Path) ->
case ends_with($/, Path) of
true ->
Path;
false ->
Path ++ "/"
end
end.
ends_with(Char, String) ->
lists:last(String) =:= Char.
-endif.
reset_dispatch() ->
Dispatch = dispatch(),
error_logger:info_msg("resetting webmachine dispatch_list: ~p~n", [Dispatch]),
application:set_env(webmachine, dispatch_list, Dispatch).
%%%===================================================================
Internal functions
%%%===================================================================
dispatch() ->
{AccessKeyId, SecretAccessKey} = keys(),
Config = [{stream_download, stream_download()},
{auth_check_disabled, auth_check_disabled()},
{access_key_id, AccessKeyId},
{secret_access_key, SecretAccessKey},
{sql_retry_count, sql_retry_count()},
{sql_retry_delay, sql_retry_delay()}],
%% per wm docs, init args for resources should be a list
dispatch_by_storage(storage_type(), Config).
dispatch_by_storage(filesystem, Config) ->
[{[bucket, obj_part, '*'], bksw_wm_object, Config},
{[bucket], bksw_wm_bucket, Config},
{[], bksw_wm_index, Config}];
dispatch_by_storage(sql, Config) ->
[{[bucket, obj_part, '*'], bksw_wm_sql_object, Config},
{[bucket], bksw_wm_sql_bucket, Config},
{[], bksw_wm_sql_index, Config}].
ip() ->
envy:get(bookshelf, ip, "127.0.0.1", string).
port() ->
envy:get(bookshelf, port, 4321, positive_integer).
ssl() ->
envy:get(bookshelf, ssl, false, boolean).
ssl_opts() ->
envy:get(bookshelf, ssl_opts, [], list).
keys() ->
{ok, AWSAccessKey} = chef_secrets:get(<<"bookshelf">>, <<"access_key_id">>),
{ok, SecretKey} = chef_secrets:get(<<"bookshelf">>, <<"secret_access_key">>),
{AWSAccessKey, SecretKey}.
log_dir() ->
envy:get(bookshelf, log_dir, code:priv_dir(bookshelf), any).
auth_check_disabled() ->
envy:get(bookshelf, auth_check_disabled, false, boolean).
storage_type() ->
envy:get(bookshelf, storage_type, filesystem, atom).
reqid_header_name() ->
envy:get(bookshelf, reqid_header_name, undefined, string).
stream_download() ->
envy:get(bookshelf, stream_download, false, boolean).
sql_retry_delay() ->
envy:get(bookshelf, sql_retry_delay, ?PGSQL_RETRY_INTERVAL, integer).
sql_retry_count() ->
envy:get(bookshelf, sql_retry_count, 0, integer).
| null | https://raw.githubusercontent.com/chef/chef-server/6d31841ecd73d984d819244add7ad6ebac284323/src/bookshelf/src/bksw_conf.erl | erlang | ex: ts=4 sw=4 et
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
API
Exported for common test
===================================================================
types
===================================================================
===================================================================
API
===================================================================
===================================================================
===================================================================
per wm docs, init args for resources should be a list | -*- erlang - indent - level : 4;indent - tabs - mode : nil ; fill - column : 92 -*-
@author < >
Copyright Chef Software , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(bksw_conf).
-export([get_wm_configuration/0,
get_context/1,
access_key_id/1,
disk_store/0,
storage_type/0,
reset_dispatch/0,
secret_access_key/1,
stream_download/0,
summarize_config/0]).
-ifdef(TEST).
-export([port/0, ip/0, keys/0]).
-endif.
-include("internal.hrl").
-type context() :: #context{}.
-spec get_context(proplists:proplist()) -> context().
get_context(Config) ->
#context{auth_check_disabled = proplists:get_value(auth_check_disabled, Config),
access_key_id = proplists:get_value(access_key_id, Config),
secret_access_key = proplists:get_value(secret_access_key, Config),
stream_download = proplists:get_value(stream_download, Config),
reqid_header_name = proplists:get_value(reqid_header_name, Config),
sql_retry_delay = proplists:get_value(sql_retry_delay, Config),
sql_retry_count = proplists:get_value(sql_retry_count, Config)
}.
-spec summarize_config() -> proplists:proplist().
summarize_config() ->
{KeyId, _Secret} = keys(),
[{ip, ip()},
{port, port()},
{log_dir, log_dir()},
{storage_type, storage_type()},
{disk_store, disk_store()},
{stream_download, stream_download()},
{auth_check_disabled, auth_check_disabled()},
{reqid_header_name, reqid_header_name()},
{access_key_id, KeyId},
{sql_retry_delay, sql_retry_delay()},
{sql_retry_count, sql_retry_count()}].
-spec get_wm_configuration() -> list().
get_wm_configuration() ->
[{ip, ip()},
{port, port()},
{dispatch, dispatch()},
{log_dir, log_dir()},
{ssl, ssl()},
{ssl_opts, ssl_opts()}].
-spec access_key_id(context()) -> binary().
access_key_id(#context{access_key_id=AccessKeyId}) ->
AccessKeyId.
-spec secret_access_key(context()) -> binary().
secret_access_key(#context{secret_access_key=SecretAccessKey}) ->
SecretAccessKey.
-spec disk_store() -> string().
-ifdef(EUNIT_TEST).
disk_store() ->
"/tmp/".
-else.
disk_store() ->
case envy:get(bookshelf, disk_store, undefined, any) of
undefined ->
error({missing_config, {bookshelf, disk_store}});
Path when is_list(Path) ->
case ends_with($/, Path) of
true ->
Path;
false ->
Path ++ "/"
end
end.
ends_with(Char, String) ->
lists:last(String) =:= Char.
-endif.
reset_dispatch() ->
Dispatch = dispatch(),
error_logger:info_msg("resetting webmachine dispatch_list: ~p~n", [Dispatch]),
application:set_env(webmachine, dispatch_list, Dispatch).
Internal functions
dispatch() ->
{AccessKeyId, SecretAccessKey} = keys(),
Config = [{stream_download, stream_download()},
{auth_check_disabled, auth_check_disabled()},
{access_key_id, AccessKeyId},
{secret_access_key, SecretAccessKey},
{sql_retry_count, sql_retry_count()},
{sql_retry_delay, sql_retry_delay()}],
dispatch_by_storage(storage_type(), Config).
dispatch_by_storage(filesystem, Config) ->
[{[bucket, obj_part, '*'], bksw_wm_object, Config},
{[bucket], bksw_wm_bucket, Config},
{[], bksw_wm_index, Config}];
dispatch_by_storage(sql, Config) ->
[{[bucket, obj_part, '*'], bksw_wm_sql_object, Config},
{[bucket], bksw_wm_sql_bucket, Config},
{[], bksw_wm_sql_index, Config}].
ip() ->
envy:get(bookshelf, ip, "127.0.0.1", string).
port() ->
envy:get(bookshelf, port, 4321, positive_integer).
ssl() ->
envy:get(bookshelf, ssl, false, boolean).
ssl_opts() ->
envy:get(bookshelf, ssl_opts, [], list).
keys() ->
{ok, AWSAccessKey} = chef_secrets:get(<<"bookshelf">>, <<"access_key_id">>),
{ok, SecretKey} = chef_secrets:get(<<"bookshelf">>, <<"secret_access_key">>),
{AWSAccessKey, SecretKey}.
log_dir() ->
envy:get(bookshelf, log_dir, code:priv_dir(bookshelf), any).
auth_check_disabled() ->
envy:get(bookshelf, auth_check_disabled, false, boolean).
storage_type() ->
envy:get(bookshelf, storage_type, filesystem, atom).
reqid_header_name() ->
envy:get(bookshelf, reqid_header_name, undefined, string).
stream_download() ->
envy:get(bookshelf, stream_download, false, boolean).
sql_retry_delay() ->
envy:get(bookshelf, sql_retry_delay, ?PGSQL_RETRY_INTERVAL, integer).
sql_retry_count() ->
envy:get(bookshelf, sql_retry_count, 0, integer).
|
0250269376d36b97663e3e9eb885522fd2ed4abd8444ed6795aee6545d9d2246 | ninenines/cowboy | upload_h.erl | %% Feel free to use, reuse and abuse the code in this file.
%% @doc Upload handler.
-module(upload_h).
-export([init/2]).
init(Req, Opts) ->
{ok, Headers, Req2} = cowboy_req:read_part(Req),
{ok, Data, Req3} = cowboy_req:read_part_body(Req2),
{file, <<"inputfile">>, Filename, ContentType}
= cow_multipart:form_data(Headers),
io:format("Received file ~p of content-type ~p as follow:~n~p~n~n",
[Filename, ContentType, Data]),
{ok, Req3, Opts}.
| null | https://raw.githubusercontent.com/ninenines/cowboy/8795233c57f1f472781a22ffbf186ce38cc5b049/examples/upload/src/upload_h.erl | erlang | Feel free to use, reuse and abuse the code in this file.
@doc Upload handler. |
-module(upload_h).
-export([init/2]).
init(Req, Opts) ->
{ok, Headers, Req2} = cowboy_req:read_part(Req),
{ok, Data, Req3} = cowboy_req:read_part_body(Req2),
{file, <<"inputfile">>, Filename, ContentType}
= cow_multipart:form_data(Headers),
io:format("Received file ~p of content-type ~p as follow:~n~p~n~n",
[Filename, ContentType, Data]),
{ok, Req3, Opts}.
|
67e004992bb567d785f971431edfa11270cbf015dc902fd05246389cdb453553 | basho/basho_bench | basho_bench_driver_shortcut.erl | %% -------------------------------------------------------------------
%%
%% basho_bench: Benchmarking Suite
%%
Copyright ( c ) 2009 - 2012 Basho Techonologies
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(basho_bench_driver_shortcut).
-export([new/1,
run/4]).
-export([count_eleveldb_keys/1, calc_bkey_to_prefidxes/4]).
-include("basho_bench.hrl").
-record(state, { id :: integer(),
backend :: 'bitcask' | 'eleveldb' | 'hanoidb',
backend_flags :: list(),
data_dir :: string(),
n_val :: integer(),
ring :: term(),
bucket :: binary(),
store_riak_obj :: boolean(),
idxes_to_do :: list(integer()),
idx :: integer(),
handle :: term()}).
%% ====================================================================
%% API
%% ====================================================================
new(Id) ->
Backend = basho_bench_config:get(shortcut_backend),
BackendFlags = basho_bench_config:get(shortcut_backend_flags),
DataDir = basho_bench_config:get(shortcut_data_dir),
os:cmd("mkdir -p " ++ DataDir),
RingSize = basho_bench_config:get(shortcut_ring_creation_size),
N = basho_bench_config:get(shortcut_n_val),
Ring = riak_core_ring:fresh(RingSize, nonode),
Bucket = basho_bench_config:get(shortcut_bucket),
StoreObj = basho_bench_config:get(shortcut_store_riak_object, false),
Idxes = [Idx || {Idx, _} <- riak_core_ring:all_owners(Ring)],
Concurrent = basho_bench_config:get(concurrent),
IdxParts = partition_work(Idxes, Concurrent),
I d starts counting at 1
MyIdxes = lists:nth(Id, IdxParts),
{ok, rotate_idx(#state{id = Id,
backend = Backend,
backend_flags = BackendFlags,
data_dir = DataDir,
n_val = N,
ring = Ring,
bucket = Bucket,
store_riak_obj = StoreObj,
idxes_to_do = MyIdxes})}.
run(put, KeyGen, ValueGen, S) ->
try
Key = filter_key_gen(KeyGen, S),
Value = ValueGen(),
do_put(Key, Value, S)
catch
throw:{stop, empty_keygen} ->
?DEBUG("Empty keygen\n", []),
NewS = rotate_idx(S),
do_put(filter_key_gen(KeyGen, NewS), ValueGen(), NewS)
end.
%% Private functions
partition_work(L, Num) ->
partition_work2(L, lists:duplicate(Num, [])).
partition_work2([], Acc) ->
[lists:reverse(L) || L <- Acc];
partition_work2([H|T], [First|Rest] = _Res) ->
partition_work2(T, Rest ++ [[H|First]]).
filter_key_gen(KeyGen, #state{ring = Ring, n_val = N, bucket = Bucket,
idx = Idx} = S) ->
Key = KeyGen(),
case ( ) of
{ sext_pair , SextBKey , PlainKey } - >
HashKey = PlainKey ,
Key = SextBKey , PlainKey ;
%% Plain ->
HashKey = Key = Plain
%% end,
PrefIdxes = calc_bkey_to_prefidxes(Bucket, Key, Ring, N),
case lists:member(Idx, PrefIdxes) of
true ->
Key;
false ->
filter_key_gen(KeyGen, S)
end.
rotate_idx(#state{idxes_to_do = []} = S) ->
%% Borrow a trick from the key generator: we are really, really done now.
stop_idx(S),
throw({stop, empty_keygen});
rotate_idx(#state{backend = Backend,
backend_flags = BackendFlags,
data_dir = DataDir,
idxes_to_do = [Idx|Idxes]} = S0) ->
S1 = stop_idx(S0),
basho_bench_keygen:reset_sequential_int_state(),
Handle = start_idx(Backend, BackendFlags, DataDir, Idx),
S1#state{idxes_to_do = Idxes,
idx = Idx,
handle = Handle}.
stop_idx(#state{backend = Backend, handle = Handle} = S) ->
try
stop_backend(Backend, Handle)
catch
X:Y ->
?ERROR("Stopping Id ~p's handle ~p -> ~p ~p: ~p\n",
[S#state.id, Handle, X, Y, erlang:get_stacktrace()])
end,
S#state{handle = undefined}.
start_idx(eleveldb, Flags0, DataDir, Idx) ->
Flags = [{create_if_missing, true}|Flags0],
{ok, Handle} = eleveldb:open(DataDir ++ "/" ++ integer_to_list(Idx), Flags),
Handle;
start_idx(bitcask, Flags0, DataDir, Idx) ->
Flags = [read_write|Flags0],
bitcask:open(DataDir ++ "/" ++ integer_to_list(Idx), Flags);
start_idx(hanoidb, Flags, DataDir, Idx) ->
{ok, Handle} = hanoidb:open(DataDir ++ "/" ++ integer_to_list(Idx), Flags),
Handle.
do_put(Key0, Value0, #state{backend = eleveldb, handle = Handle,
bucket = Bucket} = S) ->
{Key, Value} = make_riak_object_maybe(Bucket, Key0, Value0, S),
%% TODO: add an option for put options?
case eleveldb:put(Handle, Key, Value, []) of
ok ->
{ok, S};
{error, Reason} ->
{error, Reason, S}
end;
do_put(Key0, Value0, #state{backend = bitcask, handle = Handle,
bucket = Bucket} = S) ->
{Key, Value} = make_riak_object_maybe(Bucket, Key0, Value0, S),
case bitcask:put(Handle, Key, Value) of
ok ->
{ok, S};
{error, Reason} ->
{error, Reason, S}
end;
do_put(Key0, Value0, #state{backend = hanoidb, handle = Handle,
bucket = Bucket} = S) ->
{Key, Value} = make_riak_object_maybe(Bucket, Key0, Value0, S),
case hanoidb:put(Handle, Key, Value) of
ok ->
{ok, S};
{error, Reason} ->
{error, Reason, S}
end.
stop_backend(_, undefined) ->
ok;
stop_backend(eleveldb, _Handle) ->
Key = < < 66:2048 > > ,
ok = : put(Handle , Key , < < > > , [ ] ) ,
ok = : delete(Handle , Key , [ { sync , true } ] ) ,
ok;
stop_backend(bitcask, Handle) ->
ok = bitcask:close(Handle);
stop_backend(hanoidb, Handle) ->
ok = hanoidb:close(Handle).
count_eleveldb_keys(Dir) ->
[{File, begin
{ok, L1} = eleveldb:open(Dir ++ "/" ++ File, []),
eleveldb:fold_keys(L1, fun(_, Acc) -> Acc + 1 end, 0, [])
end} || File <- filelib:wildcard("*", Dir)].
make_riak_object_maybe(_Bucket, Key, Value, #state{store_riak_obj = false}) ->
{Key, Value};
make_riak_object_maybe(Bucket, Key, Value, #state{store_riak_obj = true,
backend = eleveldb}) ->
new_object(sext:encode({o, Bucket, Key}), Bucket, Key, Value);
make_riak_object_maybe(Bucket, Key, Value, #state{store_riak_obj = true,
backend = bitcask}) ->
new_object(term_to_binary({Bucket, Key}), Bucket, Key, Value);
make_riak_object_maybe(Bucket, Key, Value, #state{store_riak_obj = true,
backend = hanoidb}) ->
new_object(sext:encode({o, Bucket, Key}), Bucket, Key, Value).
new_object(EncodedKey, Bucket, Key, Value) ->
%% MD stuff stolen from riak_kv_put_fsm.erl
Now = erlang:now(),
<<HashAsNum:128/integer>> = basho_bench:md5(term_to_binary({node(), Now})),
VT = riak_core_util:integer_to_list(HashAsNum,62),
NewMD = dict:store(<<"X-Riak-VTag">>, VT,
dict:store(<<"X-Riak-Last-Modified">>, Now, dict:new())),
{EncodedKey,
term_to_binary(
riak_object:increment_vclock(riak_object:new(Bucket, Key, Value,
NewMD),
<<42:32/big>>))}.
calc_bkey_to_prefidxes(Bucket, Key, RingSize, N) when is_integer(RingSize) ->
calc_bkey_to_prefidxes(Bucket, Key, riak_core_ring:fresh(RingSize, nonode),
N);
calc_bkey_to_prefidxes(Bucket, Key, Ring, N) ->
DocIdx = riak_core_util:chash_std_keyfun({Bucket, Key}),
Preflist = lists:sublist(riak_core_ring:preflist(DocIdx, Ring), N),
[I || {I, _} <- Preflist].
| null | https://raw.githubusercontent.com/basho/basho_bench/aa66398bb6a91645dbb97e91a236f3cdcd1f188f/src/basho_bench_driver_shortcut.erl | erlang | -------------------------------------------------------------------
basho_bench: Benchmarking Suite
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
====================================================================
API
====================================================================
Private functions
Plain ->
end,
Borrow a trick from the key generator: we are really, really done now.
TODO: add an option for put options?
MD stuff stolen from riak_kv_put_fsm.erl | Copyright ( c ) 2009 - 2012 Basho Techonologies
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(basho_bench_driver_shortcut).
-export([new/1,
run/4]).
-export([count_eleveldb_keys/1, calc_bkey_to_prefidxes/4]).
-include("basho_bench.hrl").
-record(state, { id :: integer(),
backend :: 'bitcask' | 'eleveldb' | 'hanoidb',
backend_flags :: list(),
data_dir :: string(),
n_val :: integer(),
ring :: term(),
bucket :: binary(),
store_riak_obj :: boolean(),
idxes_to_do :: list(integer()),
idx :: integer(),
handle :: term()}).
new(Id) ->
Backend = basho_bench_config:get(shortcut_backend),
BackendFlags = basho_bench_config:get(shortcut_backend_flags),
DataDir = basho_bench_config:get(shortcut_data_dir),
os:cmd("mkdir -p " ++ DataDir),
RingSize = basho_bench_config:get(shortcut_ring_creation_size),
N = basho_bench_config:get(shortcut_n_val),
Ring = riak_core_ring:fresh(RingSize, nonode),
Bucket = basho_bench_config:get(shortcut_bucket),
StoreObj = basho_bench_config:get(shortcut_store_riak_object, false),
Idxes = [Idx || {Idx, _} <- riak_core_ring:all_owners(Ring)],
Concurrent = basho_bench_config:get(concurrent),
IdxParts = partition_work(Idxes, Concurrent),
I d starts counting at 1
MyIdxes = lists:nth(Id, IdxParts),
{ok, rotate_idx(#state{id = Id,
backend = Backend,
backend_flags = BackendFlags,
data_dir = DataDir,
n_val = N,
ring = Ring,
bucket = Bucket,
store_riak_obj = StoreObj,
idxes_to_do = MyIdxes})}.
run(put, KeyGen, ValueGen, S) ->
try
Key = filter_key_gen(KeyGen, S),
Value = ValueGen(),
do_put(Key, Value, S)
catch
throw:{stop, empty_keygen} ->
?DEBUG("Empty keygen\n", []),
NewS = rotate_idx(S),
do_put(filter_key_gen(KeyGen, NewS), ValueGen(), NewS)
end.
partition_work(L, Num) ->
partition_work2(L, lists:duplicate(Num, [])).
partition_work2([], Acc) ->
[lists:reverse(L) || L <- Acc];
partition_work2([H|T], [First|Rest] = _Res) ->
partition_work2(T, Rest ++ [[H|First]]).
filter_key_gen(KeyGen, #state{ring = Ring, n_val = N, bucket = Bucket,
idx = Idx} = S) ->
Key = KeyGen(),
case ( ) of
{ sext_pair , SextBKey , PlainKey } - >
HashKey = PlainKey ,
Key = SextBKey , PlainKey ;
HashKey = Key = Plain
PrefIdxes = calc_bkey_to_prefidxes(Bucket, Key, Ring, N),
case lists:member(Idx, PrefIdxes) of
true ->
Key;
false ->
filter_key_gen(KeyGen, S)
end.
rotate_idx(#state{idxes_to_do = []} = S) ->
stop_idx(S),
throw({stop, empty_keygen});
rotate_idx(#state{backend = Backend,
backend_flags = BackendFlags,
data_dir = DataDir,
idxes_to_do = [Idx|Idxes]} = S0) ->
S1 = stop_idx(S0),
basho_bench_keygen:reset_sequential_int_state(),
Handle = start_idx(Backend, BackendFlags, DataDir, Idx),
S1#state{idxes_to_do = Idxes,
idx = Idx,
handle = Handle}.
stop_idx(#state{backend = Backend, handle = Handle} = S) ->
try
stop_backend(Backend, Handle)
catch
X:Y ->
?ERROR("Stopping Id ~p's handle ~p -> ~p ~p: ~p\n",
[S#state.id, Handle, X, Y, erlang:get_stacktrace()])
end,
S#state{handle = undefined}.
start_idx(eleveldb, Flags0, DataDir, Idx) ->
Flags = [{create_if_missing, true}|Flags0],
{ok, Handle} = eleveldb:open(DataDir ++ "/" ++ integer_to_list(Idx), Flags),
Handle;
start_idx(bitcask, Flags0, DataDir, Idx) ->
Flags = [read_write|Flags0],
bitcask:open(DataDir ++ "/" ++ integer_to_list(Idx), Flags);
start_idx(hanoidb, Flags, DataDir, Idx) ->
{ok, Handle} = hanoidb:open(DataDir ++ "/" ++ integer_to_list(Idx), Flags),
Handle.
do_put(Key0, Value0, #state{backend = eleveldb, handle = Handle,
bucket = Bucket} = S) ->
{Key, Value} = make_riak_object_maybe(Bucket, Key0, Value0, S),
case eleveldb:put(Handle, Key, Value, []) of
ok ->
{ok, S};
{error, Reason} ->
{error, Reason, S}
end;
do_put(Key0, Value0, #state{backend = bitcask, handle = Handle,
bucket = Bucket} = S) ->
{Key, Value} = make_riak_object_maybe(Bucket, Key0, Value0, S),
case bitcask:put(Handle, Key, Value) of
ok ->
{ok, S};
{error, Reason} ->
{error, Reason, S}
end;
do_put(Key0, Value0, #state{backend = hanoidb, handle = Handle,
bucket = Bucket} = S) ->
{Key, Value} = make_riak_object_maybe(Bucket, Key0, Value0, S),
case hanoidb:put(Handle, Key, Value) of
ok ->
{ok, S};
{error, Reason} ->
{error, Reason, S}
end.
stop_backend(_, undefined) ->
ok;
stop_backend(eleveldb, _Handle) ->
Key = < < 66:2048 > > ,
ok = : put(Handle , Key , < < > > , [ ] ) ,
ok = : delete(Handle , Key , [ { sync , true } ] ) ,
ok;
stop_backend(bitcask, Handle) ->
ok = bitcask:close(Handle);
stop_backend(hanoidb, Handle) ->
ok = hanoidb:close(Handle).
count_eleveldb_keys(Dir) ->
[{File, begin
{ok, L1} = eleveldb:open(Dir ++ "/" ++ File, []),
eleveldb:fold_keys(L1, fun(_, Acc) -> Acc + 1 end, 0, [])
end} || File <- filelib:wildcard("*", Dir)].
make_riak_object_maybe(_Bucket, Key, Value, #state{store_riak_obj = false}) ->
{Key, Value};
make_riak_object_maybe(Bucket, Key, Value, #state{store_riak_obj = true,
backend = eleveldb}) ->
new_object(sext:encode({o, Bucket, Key}), Bucket, Key, Value);
make_riak_object_maybe(Bucket, Key, Value, #state{store_riak_obj = true,
backend = bitcask}) ->
new_object(term_to_binary({Bucket, Key}), Bucket, Key, Value);
make_riak_object_maybe(Bucket, Key, Value, #state{store_riak_obj = true,
backend = hanoidb}) ->
new_object(sext:encode({o, Bucket, Key}), Bucket, Key, Value).
new_object(EncodedKey, Bucket, Key, Value) ->
Now = erlang:now(),
<<HashAsNum:128/integer>> = basho_bench:md5(term_to_binary({node(), Now})),
VT = riak_core_util:integer_to_list(HashAsNum,62),
NewMD = dict:store(<<"X-Riak-VTag">>, VT,
dict:store(<<"X-Riak-Last-Modified">>, Now, dict:new())),
{EncodedKey,
term_to_binary(
riak_object:increment_vclock(riak_object:new(Bucket, Key, Value,
NewMD),
<<42:32/big>>))}.
calc_bkey_to_prefidxes(Bucket, Key, RingSize, N) when is_integer(RingSize) ->
calc_bkey_to_prefidxes(Bucket, Key, riak_core_ring:fresh(RingSize, nonode),
N);
calc_bkey_to_prefidxes(Bucket, Key, Ring, N) ->
DocIdx = riak_core_util:chash_std_keyfun({Bucket, Key}),
Preflist = lists:sublist(riak_core_ring:preflist(DocIdx, Ring), N),
[I || {I, _} <- Preflist].
|
85f8d6d8593b134e472451a1d955bfdb6edc3fd1effd900713a43c618e03a01f | borodust/claw | packages.lisp | (cl:defpackage :iffi
(:use :cl :alexandria)
(:export #:*allocator*
#:*allocator-expander*
#:*extricator*
#:*extricator-expander*
#:defifun
#:defitype
#:defirecord
#:defistruct
#:defiunion
#:deficlass
#:intricate-function-pointer
#:intricate-size
#:intricate-alignment
#:intricate-alloc
#:intricate-free
#:with-intricate-alloc
#:with-intricate-allocs
#:intricate-slot-value
#:with-intricate-slots
#:make-intricate-instance
#:destroy-intricate-instance
#:with-intricate-instance
#:with-intricate-instances
#:define-intricate-function-class
#:deficallback
#:make-intricate-callback
#:destroy-intricate-callback))
| null | https://raw.githubusercontent.com/borodust/claw/213ebd94c48019c62c9bf130566ec06d7ad1675a/src/iffi/packages.lisp | lisp | (cl:defpackage :iffi
(:use :cl :alexandria)
(:export #:*allocator*
#:*allocator-expander*
#:*extricator*
#:*extricator-expander*
#:defifun
#:defitype
#:defirecord
#:defistruct
#:defiunion
#:deficlass
#:intricate-function-pointer
#:intricate-size
#:intricate-alignment
#:intricate-alloc
#:intricate-free
#:with-intricate-alloc
#:with-intricate-allocs
#:intricate-slot-value
#:with-intricate-slots
#:make-intricate-instance
#:destroy-intricate-instance
#:with-intricate-instance
#:with-intricate-instances
#:define-intricate-function-class
#:deficallback
#:make-intricate-callback
#:destroy-intricate-callback))
| |
4bf0d317d27be87abfe72b2c637130cbad53d23fd1c27119fda81a5735f17293 | angavrilov/ecl-compute | thread-core.lisp | ;;; -*- mode:lisp; indent-tabs-mode: nil; -*-
(in-package fast-compute)
#+linux
(progn
(ffi:clines "#include <unistd.h>")
(defun get-cpu-count ()
(max 1
(ffi:c-inline () () :int
"sysconf(_SC_NPROCESSORS_ONLN)"
:one-liner t)))
(ffi:clines "#include <sched.h>")
(ffi:clines "#include <errno.h>")
(ffi:clines "#include <string.h>")
(defun set-cpu-affinity (cpus)
(let ((cpu-list (if (listp cpus) cpus (list cpus)))
(numcpus (get-cpu-count)))
(unless cpus
(error "Cannot set affinity to an empty list"))
(dolist (item cpu-list)
(unless (and (integerp item)
(<= 0 item)
(< item numcpus))
(error "Invalid CPU index in affinity: ~A" item)))
(ffi:c-inline (cpu-list) (:object) :void
"cpu_set_t cset;
CPU_ZERO(&cset);
cl_object lst = #0;
while (CONSP(lst)) {
CPU_SET(fixnnint(ECL_CONS_CAR(lst)), &cset);
lst = ECL_CONS_CDR(lst);
}
if (sched_setaffinity(0,sizeof(cset),&cset) < 0)
FEerror(\"Affinity setting failed: ~A\", 1,
make_base_string_copy(strerror(errno)));
"))))
#-linux
(progn
(defun get-cpu-count () 1)
(defun set-cpu-affinity (cpus) (declare (ignore cpus)) nil))
(defun get-lock-spin (lock &key max-tries)
(if max-tries
(loop for try-id from 1 to max-tries
do (when (mp:get-lock lock nil)
(return))
finally (mp:get-lock lock t))
(loop
(when (mp:get-lock lock nil)
(return)))))
(defmacro with-lock-spin ((lock &key max-tries) &body code)
(let ((lock-sym (gensym)))
`(let ((,lock-sym ,lock))
(get-lock-spin ,lock-sym :max-tries ,max-tries)
(unwind-protect
(progn ,@code)
(mp:giveup-lock ,lock-sym)))))
(defmacro condition-wait-spin ((cond mutex &key (max-tries 10000)) check)
(let ((lock-sym (gensym))
(cond-sym (gensym))
(tries-sym (gensym))
(try-sym (gensym))
(found-sym (gensym)))
`(let ((,lock-sym ,mutex)
(,cond-sym ,cond)
(,tries-sym ,max-tries)
(,found-sym ,nil))
(when (> ,tries-sym 0)
(mp:giveup-lock ,lock-sym)
(unwind-protect
(loop for ,try-sym from 1 to ,tries-sym
do (when ,check
(setf ,found-sym t)
(return)))
(get-lock-spin ,lock-sym)))
(unless (or ,found-sym ,check)
(mp:condition-variable-wait ,cond-sym ,lock-sym)))))
(defparameter *worker-cond-spins* 100000)
(defvar *worker-count* 0)
(defvar *worker-threads* ())
(defvar *worker-mutex* (mp:make-lock))
(defvar *work-start-cond* (mp:make-condition-variable))
(defvar *work-done-cond* (mp:make-condition-variable))
(defvar *working-threads* 0)
(defvar *task-id* 0)
(defvar *workers-failed* 0)
(defvar *task-function* nil)
(defvar *dispatch-lock* (mp:make-lock))
(defvar *dispatch-pos* 0)
(defvar *dispatch-limit* 0)
(defun nuke-workers ()
(setf *worker-count* 0)
(incf *task-id*)
(mp:condition-variable-broadcast *work-start-cond*)
(sleep 0.2)
(dolist (thread *worker-threads*)
(ignore-errors (mp:process-kill thread)))
(sleep 0.1)
(setf *worker-threads* nil))
(unless (find #'nuke-workers si::*exit-hooks*)
(push #'nuke-workers si::*exit-hooks*))
(defun worker-thread (idx)
(format t "Worker ~A starting.~%" idx)
(unwind-protect
(let ((last-id 0) (caught-error nil))
(loop
(multiple-value-bind (task task-id w-count)
(with-lock-spin (*worker-mutex*)
(unless (and *task-function*
(/= *task-id* last-id))
(condition-wait-spin (*work-start-cond* *worker-mutex*
:max-tries *worker-cond-spins*)
(/= *task-id* last-id)))
(when (> idx *worker-count*)
(return-from worker-thread))
(values *task-function* *task-id* *worker-count*))
(when (and task (/= task-id last-id))
(setf last-id task-id)
(setf caught-error nil)
(unwind-protect
(handler-case
(funcall task idx (1+ w-count))
(condition (err)
(setf caught-error t)
(format t "Worker ~A failed:~% ~A~%" idx err)))
(with-lock-spin (*worker-mutex*)
(incf *working-threads* -1)
(when caught-error
(incf *workers-failed*))
(when (<= *working-threads* 0)
(mp:condition-variable-broadcast *work-done-cond*))))))))
(format t "Worker ~A exited.~%" idx)))
(defun thread-dispatch (idx fun)
(loop
(funcall fun idx *dispatch-limit*)
(with-lock-spin (*dispatch-lock*)
(setf idx *dispatch-pos*)
(when (< *dispatch-pos* *dispatch-limit*)
(incf *dispatch-pos*)))
(unless (< idx *dispatch-limit*)
(return))))
(defun wrap-dispatch (fun)
#'(lambda (idx num)
(thread-dispatch idx fun)))
(defun spawn-worker-threads (num)
(mp:with-lock (*worker-mutex*)
(do ()
((<= *worker-count* num))
(incf *worker-count* -1)
(pop *worker-threads*))
(incf *task-id*)
(mp:condition-variable-broadcast *work-start-cond*)
(do ()
((>= *worker-count* num))
(incf *worker-count* 1)
(push (mp:process-run-function 'worker
#'worker-thread *worker-count*)
*worker-threads*))))
(defun run-work (fun &key (dispatch-limit 1))
(when (null fun)
(error "Cannot run a NIL task"))
(mp:with-lock (*worker-mutex*)
(when *task-function*
(error "Task already running"))
(setf *task-function* fun)
(setf *workers-failed* 0)
(setf *working-threads* *worker-count*)
(setf *dispatch-pos* (1+ *worker-count*))
(setf *dispatch-limit* (* *dispatch-pos* (max 1 dispatch-limit)))
(incf *task-id*)
(mp:condition-variable-broadcast *work-start-cond*))
(let ((success nil))
(unwind-protect
(progn
(funcall fun 0 (1+ *worker-count*))
(setf success t))
(with-lock-spin (*worker-mutex*)
(when (> *working-threads* 0)
(condition-wait-spin (*work-done-cond* *worker-mutex*
:max-tries *worker-cond-spins*)
(<= *working-threads* 0)))
(setf *task-function* nil))
;; Transfer errors from worker threads if we succeeded
(when (and success (> *workers-failed* 0))
(error "~A worker threads failed." *workers-failed*)))))
(defun set-compute-thread-count (num-threads &key (adjust-affinity t))
(let ((num-threads (or num-threads
(ignore-errors (parse-integer (si:getenv "OMP_NUM_THREADS")))
(get-cpu-count))))
(spawn-worker-threads (max (1- num-threads) 0)))
(when adjust-affinity
(let ((cpu-count (get-cpu-count)))
(run-work #'(lambda (idx num)
(set-cpu-affinity (- cpu-count 1 (mod idx cpu-count))))))))
| null | https://raw.githubusercontent.com/angavrilov/ecl-compute/466f0d287f8b6ab0e2b5c2ac03693ad4f4df6a3f/thread-core.lisp | lisp | -*- mode:lisp; indent-tabs-mode: nil; -*-
Transfer errors from worker threads if we succeeded |
(in-package fast-compute)
#+linux
(progn
(ffi:clines "#include <unistd.h>")
(defun get-cpu-count ()
(max 1
(ffi:c-inline () () :int
"sysconf(_SC_NPROCESSORS_ONLN)"
:one-liner t)))
(ffi:clines "#include <sched.h>")
(ffi:clines "#include <errno.h>")
(ffi:clines "#include <string.h>")
(defun set-cpu-affinity (cpus)
(let ((cpu-list (if (listp cpus) cpus (list cpus)))
(numcpus (get-cpu-count)))
(unless cpus
(error "Cannot set affinity to an empty list"))
(dolist (item cpu-list)
(unless (and (integerp item)
(<= 0 item)
(< item numcpus))
(error "Invalid CPU index in affinity: ~A" item)))
(ffi:c-inline (cpu-list) (:object) :void
while (CONSP(lst)) {
}
if (sched_setaffinity(0,sizeof(cset),&cset) < 0)
FEerror(\"Affinity setting failed: ~A\", 1,
"))))
#-linux
(progn
(defun get-cpu-count () 1)
(defun set-cpu-affinity (cpus) (declare (ignore cpus)) nil))
(defun get-lock-spin (lock &key max-tries)
(if max-tries
(loop for try-id from 1 to max-tries
do (when (mp:get-lock lock nil)
(return))
finally (mp:get-lock lock t))
(loop
(when (mp:get-lock lock nil)
(return)))))
(defmacro with-lock-spin ((lock &key max-tries) &body code)
(let ((lock-sym (gensym)))
`(let ((,lock-sym ,lock))
(get-lock-spin ,lock-sym :max-tries ,max-tries)
(unwind-protect
(progn ,@code)
(mp:giveup-lock ,lock-sym)))))
(defmacro condition-wait-spin ((cond mutex &key (max-tries 10000)) check)
(let ((lock-sym (gensym))
(cond-sym (gensym))
(tries-sym (gensym))
(try-sym (gensym))
(found-sym (gensym)))
`(let ((,lock-sym ,mutex)
(,cond-sym ,cond)
(,tries-sym ,max-tries)
(,found-sym ,nil))
(when (> ,tries-sym 0)
(mp:giveup-lock ,lock-sym)
(unwind-protect
(loop for ,try-sym from 1 to ,tries-sym
do (when ,check
(setf ,found-sym t)
(return)))
(get-lock-spin ,lock-sym)))
(unless (or ,found-sym ,check)
(mp:condition-variable-wait ,cond-sym ,lock-sym)))))
(defparameter *worker-cond-spins* 100000)
(defvar *worker-count* 0)
(defvar *worker-threads* ())
(defvar *worker-mutex* (mp:make-lock))
(defvar *work-start-cond* (mp:make-condition-variable))
(defvar *work-done-cond* (mp:make-condition-variable))
(defvar *working-threads* 0)
(defvar *task-id* 0)
(defvar *workers-failed* 0)
(defvar *task-function* nil)
(defvar *dispatch-lock* (mp:make-lock))
(defvar *dispatch-pos* 0)
(defvar *dispatch-limit* 0)
(defun nuke-workers ()
(setf *worker-count* 0)
(incf *task-id*)
(mp:condition-variable-broadcast *work-start-cond*)
(sleep 0.2)
(dolist (thread *worker-threads*)
(ignore-errors (mp:process-kill thread)))
(sleep 0.1)
(setf *worker-threads* nil))
(unless (find #'nuke-workers si::*exit-hooks*)
(push #'nuke-workers si::*exit-hooks*))
(defun worker-thread (idx)
(format t "Worker ~A starting.~%" idx)
(unwind-protect
(let ((last-id 0) (caught-error nil))
(loop
(multiple-value-bind (task task-id w-count)
(with-lock-spin (*worker-mutex*)
(unless (and *task-function*
(/= *task-id* last-id))
(condition-wait-spin (*work-start-cond* *worker-mutex*
:max-tries *worker-cond-spins*)
(/= *task-id* last-id)))
(when (> idx *worker-count*)
(return-from worker-thread))
(values *task-function* *task-id* *worker-count*))
(when (and task (/= task-id last-id))
(setf last-id task-id)
(setf caught-error nil)
(unwind-protect
(handler-case
(funcall task idx (1+ w-count))
(condition (err)
(setf caught-error t)
(format t "Worker ~A failed:~% ~A~%" idx err)))
(with-lock-spin (*worker-mutex*)
(incf *working-threads* -1)
(when caught-error
(incf *workers-failed*))
(when (<= *working-threads* 0)
(mp:condition-variable-broadcast *work-done-cond*))))))))
(format t "Worker ~A exited.~%" idx)))
(defun thread-dispatch (idx fun)
(loop
(funcall fun idx *dispatch-limit*)
(with-lock-spin (*dispatch-lock*)
(setf idx *dispatch-pos*)
(when (< *dispatch-pos* *dispatch-limit*)
(incf *dispatch-pos*)))
(unless (< idx *dispatch-limit*)
(return))))
(defun wrap-dispatch (fun)
#'(lambda (idx num)
(thread-dispatch idx fun)))
(defun spawn-worker-threads (num)
(mp:with-lock (*worker-mutex*)
(do ()
((<= *worker-count* num))
(incf *worker-count* -1)
(pop *worker-threads*))
(incf *task-id*)
(mp:condition-variable-broadcast *work-start-cond*)
(do ()
((>= *worker-count* num))
(incf *worker-count* 1)
(push (mp:process-run-function 'worker
#'worker-thread *worker-count*)
*worker-threads*))))
(defun run-work (fun &key (dispatch-limit 1))
(when (null fun)
(error "Cannot run a NIL task"))
(mp:with-lock (*worker-mutex*)
(when *task-function*
(error "Task already running"))
(setf *task-function* fun)
(setf *workers-failed* 0)
(setf *working-threads* *worker-count*)
(setf *dispatch-pos* (1+ *worker-count*))
(setf *dispatch-limit* (* *dispatch-pos* (max 1 dispatch-limit)))
(incf *task-id*)
(mp:condition-variable-broadcast *work-start-cond*))
(let ((success nil))
(unwind-protect
(progn
(funcall fun 0 (1+ *worker-count*))
(setf success t))
(with-lock-spin (*worker-mutex*)
(when (> *working-threads* 0)
(condition-wait-spin (*work-done-cond* *worker-mutex*
:max-tries *worker-cond-spins*)
(<= *working-threads* 0)))
(setf *task-function* nil))
(when (and success (> *workers-failed* 0))
(error "~A worker threads failed." *workers-failed*)))))
(defun set-compute-thread-count (num-threads &key (adjust-affinity t))
(let ((num-threads (or num-threads
(ignore-errors (parse-integer (si:getenv "OMP_NUM_THREADS")))
(get-cpu-count))))
(spawn-worker-threads (max (1- num-threads) 0)))
(when adjust-affinity
(let ((cpu-count (get-cpu-count)))
(run-work #'(lambda (idx num)
(set-cpu-affinity (- cpu-count 1 (mod idx cpu-count))))))))
|
f856957441871deee90564aa8a3f75df7b32098cb5c46b17cc7d98a2bc6b4e18 | nasa/Common-Metadata-Repository | project.clj | (defn get-banner
[]
(try
(str
(slurp "resources/text/banner.txt"))
;(slurp "resources/text/loading.txt")
;; If another project can't find the banner, just skip it.
(catch Exception _ "")))
(defn get-prompt
[ns]
(str "\u001B[35m[\u001B[34m"
ns
"\u001B[35m]\u001B[33m λ\u001B[m=> "))
(defproject gov.nasa.earthdata/cmr-http-kit "0.2.0"
:description "Utilities, wrappers, middleware, and components for http-kit interop"
:url "-exchange/cmr-http-kit"
:license {
:name "Apache License, Version 2.0"
:url "-2.0"}
:dependencies [[cheshire "5.8.1"]
[clojusc/trifl "0.4.2"]
[clojusc/twig "0.4.0"]
[gov.nasa.earthdata/cmr-exchange-common "0.3.3"]
[gov.nasa.earthdata/cmr-jar-plugin "0.1.2"]
[http-kit "2.5.3"]
[metosin/ring-http-response "0.9.1"]
[org.clojure/clojure "1.9.0"]
[org.clojure/data.xml "0.2.0-alpha5"]
[ring/ring-defaults "0.3.2"]
[selmer "1.12.5"]
[tolitius/xml-in "0.1.0"]]
:profiles {:ubercompile {:aot :all
:source-paths ["test"]}
:security {:plugins [[com.livingsocial/lein-dependency-check "1.1.1"]]
:dependency-check {:output-format [:all]
:suppression-file "resources/security/suppression.xml"}
:source-paths ^:replace ["src"]
:exclusions [
The following are excluded due to their being flagged as a CVE
[com.google.protobuf/protobuf-java]
[com.google.javascript/closure-compiler-unshaded]
[commons-fileupload]]
:dependencies [
;; The following pull required deps that have been either been
;; explicitly or implicitly excluded above due to CVEs and need
declare secure versions of the libs pulled in
[commons-fileupload "1.3.3"]
[commons-io "2.6"]]}
:dev {:dependencies [[clojusc/system-manager "0.3.0"]
[org.clojure/java.classpath "0.3.0"]
[org.clojure/tools.namespace "0.2.11"]
[proto-repl "0.3.1"]]
:plugins [[venantius/ultra "0.5.2"]]
:source-paths ["dev-resources/src"]
:repl-options {:init-ns cmr.http.kit.dev
:prompt ~get-prompt
:init ~(println (get-banner))}}
:lint {:source-paths ^:replace ["src"]
:test-paths ^:replace []
:plugins [[jonase/eastwood "0.3.3"]
[lein-ancient "0.6.15"]
[lein-kibit "0.1.8"]]}
:test {:dependencies [[clojusc/ltest "0.3.0"]]
:plugins [[lein-ltest "0.3.0"]]
:test-selectors {:unit #(not (or (:integration %) (:system %)))
:integration :integration
:system :system
:default (complement :system)}}}
:aliases {
;; Dev & Testing Aliases
"repl" ["do"
["clean"]
["repl"]]
"ubercompile" ["with-profile" "+ubercompile,+security" "compile"]
"check-vers" ["with-profile" "+lint" "ancient" "check" ":all"]
"check-jars" ["with-profile" "+lint" "do"
["deps" ":tree"]
["deps" ":plugin-tree"]]
"check-deps" ["do"
["check-jars"]
["check-vers"]]
"ltest" ["with-profile" "+test,+system,+security" "ltest"]
Linting
"kibit" ["with-profile" "+lint" "kibit"]
"eastwood" ["with-profile" "+lint" "eastwood" "{:namespaces [:source-paths]}"]
"lint" ["do"
["kibit"]]
;["eastwood"]
Security
"check-sec" ["with-profile" "+security" "do"
["clean"]
["dependency-check"]]
;; Build tasks
"build-jar" ["with-profile" "+security" "jar"]
"build-uberjar" ["with-profile" "+security" "uberjar"]
"build-lite" ["do"
["ltest" ":unit"]]
"build" ["do"
["clean"]
["check-vers"]
["check-sec"]
["ltest" ":unit"]
["ubercompile"]
["build-uberjar"]]
"build-full" ["do"
["ltest" ":unit"]
["ubercompile"]
["build-uberjar"]]
;; Installing
"install" ["do"
["clean"]
["ubercompile"]
["clean"]
["install"]]
;; Publishing
"publish" ["with-profile" "+security" "do"
["clean"]
["build-jar"]
["deploy" "clojars"]]})
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/63001cf021d32d61030b1dcadd8b253e4a221662/other/cmr-exchange/http-kit-support/project.clj | clojure | (slurp "resources/text/loading.txt")
If another project can't find the banner, just skip it.
The following pull required deps that have been either been
explicitly or implicitly excluded above due to CVEs and need
Dev & Testing Aliases
["eastwood"]
Build tasks
Installing
Publishing | (defn get-banner
[]
(try
(str
(slurp "resources/text/banner.txt"))
(catch Exception _ "")))
(defn get-prompt
[ns]
(str "\u001B[35m[\u001B[34m"
ns
"\u001B[35m]\u001B[33m λ\u001B[m=> "))
(defproject gov.nasa.earthdata/cmr-http-kit "0.2.0"
:description "Utilities, wrappers, middleware, and components for http-kit interop"
:url "-exchange/cmr-http-kit"
:license {
:name "Apache License, Version 2.0"
:url "-2.0"}
:dependencies [[cheshire "5.8.1"]
[clojusc/trifl "0.4.2"]
[clojusc/twig "0.4.0"]
[gov.nasa.earthdata/cmr-exchange-common "0.3.3"]
[gov.nasa.earthdata/cmr-jar-plugin "0.1.2"]
[http-kit "2.5.3"]
[metosin/ring-http-response "0.9.1"]
[org.clojure/clojure "1.9.0"]
[org.clojure/data.xml "0.2.0-alpha5"]
[ring/ring-defaults "0.3.2"]
[selmer "1.12.5"]
[tolitius/xml-in "0.1.0"]]
:profiles {:ubercompile {:aot :all
:source-paths ["test"]}
:security {:plugins [[com.livingsocial/lein-dependency-check "1.1.1"]]
:dependency-check {:output-format [:all]
:suppression-file "resources/security/suppression.xml"}
:source-paths ^:replace ["src"]
:exclusions [
The following are excluded due to their being flagged as a CVE
[com.google.protobuf/protobuf-java]
[com.google.javascript/closure-compiler-unshaded]
[commons-fileupload]]
:dependencies [
declare secure versions of the libs pulled in
[commons-fileupload "1.3.3"]
[commons-io "2.6"]]}
:dev {:dependencies [[clojusc/system-manager "0.3.0"]
[org.clojure/java.classpath "0.3.0"]
[org.clojure/tools.namespace "0.2.11"]
[proto-repl "0.3.1"]]
:plugins [[venantius/ultra "0.5.2"]]
:source-paths ["dev-resources/src"]
:repl-options {:init-ns cmr.http.kit.dev
:prompt ~get-prompt
:init ~(println (get-banner))}}
:lint {:source-paths ^:replace ["src"]
:test-paths ^:replace []
:plugins [[jonase/eastwood "0.3.3"]
[lein-ancient "0.6.15"]
[lein-kibit "0.1.8"]]}
:test {:dependencies [[clojusc/ltest "0.3.0"]]
:plugins [[lein-ltest "0.3.0"]]
:test-selectors {:unit #(not (or (:integration %) (:system %)))
:integration :integration
:system :system
:default (complement :system)}}}
:aliases {
"repl" ["do"
["clean"]
["repl"]]
"ubercompile" ["with-profile" "+ubercompile,+security" "compile"]
"check-vers" ["with-profile" "+lint" "ancient" "check" ":all"]
"check-jars" ["with-profile" "+lint" "do"
["deps" ":tree"]
["deps" ":plugin-tree"]]
"check-deps" ["do"
["check-jars"]
["check-vers"]]
"ltest" ["with-profile" "+test,+system,+security" "ltest"]
Linting
"kibit" ["with-profile" "+lint" "kibit"]
"eastwood" ["with-profile" "+lint" "eastwood" "{:namespaces [:source-paths]}"]
"lint" ["do"
["kibit"]]
Security
"check-sec" ["with-profile" "+security" "do"
["clean"]
["dependency-check"]]
"build-jar" ["with-profile" "+security" "jar"]
"build-uberjar" ["with-profile" "+security" "uberjar"]
"build-lite" ["do"
["ltest" ":unit"]]
"build" ["do"
["clean"]
["check-vers"]
["check-sec"]
["ltest" ":unit"]
["ubercompile"]
["build-uberjar"]]
"build-full" ["do"
["ltest" ":unit"]
["ubercompile"]
["build-uberjar"]]
"install" ["do"
["clean"]
["ubercompile"]
["clean"]
["install"]]
"publish" ["with-profile" "+security" "do"
["clean"]
["build-jar"]
["deploy" "clojars"]]})
|
cc973826ca8f461784465531ac7c3f6e1f165bcf2c20376dbcd511e24336c256 | exoscale/clojure-kubernetes-client | v1alpha1_runtime_class.clj | (ns clojure-kubernetes-client.specs.v1alpha1-runtime-class
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-object-meta :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-runtime-class-spec :refer :all]
)
(:import (java.io File)))
(declare v1alpha1-runtime-class-data v1alpha1-runtime-class)
(def v1alpha1-runtime-class-data
{
(ds/opt :apiVersion) string?
(ds/opt :kind) string?
(ds/opt :metadata) v1-object-meta
(ds/req :spec) v1alpha1-runtime-class-spec
})
(def v1alpha1-runtime-class
(ds/spec
{:name ::v1alpha1-runtime-class
:spec v1alpha1-runtime-class-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v1alpha1_runtime_class.clj | clojure | (ns clojure-kubernetes-client.specs.v1alpha1-runtime-class
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-object-meta :refer :all]
[clojure-kubernetes-client.specs.v1alpha1-runtime-class-spec :refer :all]
)
(:import (java.io File)))
(declare v1alpha1-runtime-class-data v1alpha1-runtime-class)
(def v1alpha1-runtime-class-data
{
(ds/opt :apiVersion) string?
(ds/opt :kind) string?
(ds/opt :metadata) v1-object-meta
(ds/req :spec) v1alpha1-runtime-class-spec
})
(def v1alpha1-runtime-class
(ds/spec
{:name ::v1alpha1-runtime-class
:spec v1alpha1-runtime-class-data}))
| |
267205e93c4bce43f86f632d22d9dbb013607b812bdc59e878ae4c197357512c | dvv/stable | cowboy_resource.erl | %%% ----------------------------------------------------------------------------
%%%
@doc for a HTTP resource .
%%%
%%% ----------------------------------------------------------------------------
-module(cowboy_resource).
-author('Vladimir Dronnikov <>').
-behaviour(cowboy_sub_protocol).
-export([
upgrade/4
]).
% -behaviour(cowboy_rest_handler).
-export([
rest_init/2,
rest_terminate/3,
% resource_available/2,
allowed_methods/2,
% malformed_request/2,
is_authorized/2,
forbidden/2,
% options/2,
resource_exists/2,
content_types_accepted/2,
content_types_provided/2,
charsets_provided/2,
delete_resource/2,
delete_completed/2
]).
-behaviour(cowboy_websocket_handler).
-export([
websocket_init/3,
websocket_handle/3,
websocket_info/3,
websocket_terminate/3
]).
% getters
-export([
get_resource/2
]).
% setters
-export([
put_form/2,
put_json/2,
put_plain/2,
rpc_json/2
]).
-type proplist() :: list({term(), term()}).
-record(state, {
method :: binary(),
params :: proplist(),
body :: proplist(),
querystring :: proplist(),
auth :: {Identity :: term(), AllowedScope :: term()},
completed = false :: boolean(),
options :: proplist(),
handler :: module()
}).
upgrade(Req, Env, Handler, Opts) ->
% enable CORS
Req3 = case lists:keyfind(cors, 1, Opts) of
{_, AllowedOrigins} ->
cors(Req, AllowedOrigins);
false ->
Req
end,
% extract request info
{Params, Req4} = cowboy_req:bindings(Req3),
{Query, Req5} = cowboy_req:qs_vals(Req4),
{Method, Req6} = cowboy_req:method(Req5),
% distinguish websocket and rest
NewProto = case cowboy_req:header(<<"upgrade">>, Req6) of
{<<"websocket">>, Req7} -> cowboy_websocket;
{_, Req7} -> cowboy_rest
end,
NewProto:upgrade(Req7, Env, ?MODULE, #state{
method = Method,
% params = Params,
params = lists:ukeymerge(1,
lists:ukeysort(1, Params),
lists:ukeysort(1, Query)),
querystring = Query,
options = Opts,
handler = Handler
}).
rest_init(Req, State) ->
{ok, Req, State}.
rest_terminate(_Reason, _Req, _State) ->
ok.
resource_available(Req , State ) - >
{ true , Req , State } .
allowed_methods(Req, State) ->
{[<<"GET">>, <<"POST">>, <<"PUT">>, <<"DELETE">>,
<<"PATCH">>, <<"HEAD">>, <<"OPTIONS">>], Req, State}.
%%
Validate GET requests . Body is not yet available and is not yet done .
%%
malformed_request(Req , State ) - >
{ false , Req , State } .
%%
%% Verify that authentication credentials provided and not forged.
Bearer or basic authorization , or ? access_token = TOKEN , or session required .
%%
is_authorized(Req, State = #state{options = Opts}) ->
case cowboy_req:parse_header(<<"authorization">>, Req) of
{ok, {<<"bearer">>, Token}, Req2} ->
{_, Secret} = lists:keyfind(token_secret, 1, Opts),
try_authorize(Req2, State, token, {Token, Secret});
{ok, {<<"basic">>, Credentials}, Req2} ->
try_authorize(Req2, State, password, Credentials);
_ ->
case cowboy_req:qs_val(<<"access_token">>, Req) of
{undefined, Req2} ->
{Session, Req3} = cowboy_session:get(Req2),
try_authorize(Req3, State, session, Session);
{Token, Req2} ->
{_, Secret} = lists:keyfind(token_secret, 1, Opts),
try_authorize(Req2, State, token, {Token, Secret})
end
end.
try_authorize(Req, State = #state{params = Params, handler = Handler},
Type, Credentials) ->
case erlang:function_exported(Handler, authorize, 3) of
true ->
case Handler:authorize(Type, Credentials, Params) of
{ok, Auth} ->
{true, Req, State#state{auth = Auth}};
{error, _} ->
{{false, <<"Bearer, Basic, Access-Token, Cookie">>}, Req, State}
end;
false ->
{true, Req, State#state{auth = none}}
end.
%%
%% Checks user is authorized to access the resource.
NB : POST carries batch RPC and access will be checked later on
%% for each request in the batch.
%%
%% @fixme should be active if Content-Type: application/rpc+json
%%
forbidden(Req , State = # state{method = < < " POST " > > } ) - >
{ false , Req , State } ;
%%
%% Other methods mean single action and access can be checked at once.
%%
forbidden(Req, State = #state{method = Method,
auth = Auth, handler = Handler}) ->
{not call_allowed(handler_for(Method), Auth, Handler), Req, State}.
handler_for(<<"GET">>) -> get;
handler_for(<<"POST">>) -> post;
handler_for(<<"PUT">>) -> put;
handler_for(<<"PATCH">>) -> patch;
handler_for(<<"DELETE">>) -> delete;
handler_for(<<"OPTIONS">>) -> options;
handler_for(<<"HEAD">>) -> get;
handler_for(Other) -> Other.
call_allowed(Method, Auth, Handler) ->
case erlang:function_exported(Handler, allowed, 2) of
true ->
Handler:allowed(Method, Auth);
false ->
true
end.
%%
%% Called on OPTIONS. Use to set custom headers. Note returned tag is 'ok'.
%%
options(Req , State ) - >
{ ok , Req , State } .
%%
%% Is resource exist?
NB : controls as well status code returned from put_resource when POST
%%
resource_exists(Req, State = #state{method = <<"POST">>}) ->
{false, Req, State};
resource_exists(Req, State) ->
{true, Req, State}.
%%
%% Enumerate content types resource may process.
%%
content_types_accepted(Req, State) ->
{[
% {{<<"application">>, <<"json">>, [
{ < < " charset " > > , < < " utf-8 " > > } ] } , ,
{{<<"application">>, <<"json">>, '*'}, put_json},
{{<<"application">>, <<"x-www-form-urlencoded">>, '*'}, put_form},
{{<<"text">>, <<"plain">>, '*'}, put_plain},
% application/rpc+json accepts batch of requests
{{<<"application">>, <<"rpc+json">>, '*'}, rpc_json}
% @todo: application/rpc+bert for -rpc.org/
], Req, State}.
%%
%% Enumerate content types resource may return.
%%
content_types_provided(Req, State) ->
{[
{{<<"application">>, <<"json">>, '*'}, get_resource},
@todo disable if application / rpc+json data was provided
{{<<"application">>, <<"x-www-form-urlencoded">>, '*'}, get_resource},
{{<<"text">>, <<"html">>, '*'}, get_resource}
], Req, State}.
%%
%% Enumerate character sets resource may return.
%%
charsets_provided(Req, State) ->
{[<<"utf-8">>], Req, State}.
%%
%% -----------------------------------------------------------------------------
%% Resource operations.
%% -----------------------------------------------------------------------------
%%
%%
Delegates actual processing to application 's get/2 handler .
Encodes response entity .
%%
%%
- { Body , Req , State } -- > 200 OK
- { halt , Req , State } -- > no further processing
%%
get_resource(Req, State = #state{
params = Params, handler = Handler, options = Opts, auth = Auth}) ->
try Handler:get(Params, [{auth, Auth} | Opts]) of
{ok, Result} ->
{serialize(Result, Req), Req, State};
{error, enoent} ->
{halt, respond(404, <<"enoent">>, Req), State};
{error, Reason} ->
{halt, respond(400, Reason, Req), State};
error ->
{halt, respond(400, undefined, Req), State};
{goto, Location = << $/, _/binary >> } ->
{halt, cowboy_req:set_resp_header(<<"location">>, Location, Req), State};
{goto, Location} ->
{BasePath, Req2} = cowboy_req:path(Req),
{halt, cowboy_req:set_resp_header(<<"location">>,
[BasePath, $/, Location], Req2), State}
catch Class:Reason ->
error_logger:error_msg(
"** API handler ~p terminating in get/3~n"
" for the reason ~p:~p~n** State was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Class, Reason, State, erlang:get_stacktrace()]),
{halt, respond(500, Reason, Req), State}
end.
%%
%% Handle PUT/POST/PATCH requests.
%%
- { false , Req , State } -- > 422 Unprocessable Entity
- { true , Req , State } -- > 204 No Content
- { halt , Req , State } -- > no further processing
%%
The following applies only on PUT
- set response location : and { true , Req , State } -- > 201 Created
- set response body and { true , Req , State } -- > 200 OK
%%
%% The following applies only on POST
- { { true , Location } , Req , State } -- > 303 See Other
%%
put_json(Req, State) ->
@todo make it streaming
{ok, Body, Req2} = cowboy_req:body(Req),
case jsx:decode(Body, [{error_handler, fun(_, _, _) -> {error, badarg} end}])
of
{error, _} ->
{false, Req2, State};
{incomplete, _} ->
{false, Req2, State};
Data ->
put_resource(Req2, State#state{body = Data})
end.
put_form(Req, State) ->
{ok, Result, Req2} = cowboy_req:body_qs(Req),
put_resource(Req2, State#state{body = Result}).
put_plain(Req, State) ->
{ok, Result, Req2} = cowboy_req:body(Req),
put_resource(Req2, State#state{body = Result}).
%%
%% Take batch of requests from body, return batch of responses.
Requests processing delegated to application 's handle(Method , [ ] ) .
%%
%% Request is triplet array: [Method, Params, Id].
%% Response is triplet array: [null, Result, Id] | [Error, null, Id].
%%
rpc_json(Req, State) ->
@todo make it streaming
{ok, Body, Req2} = cowboy_req:body(Req),
case jsx:decode(Body, [{error_handler, fun(_, _, _) -> {error, badarg} end}])
of
{error, _} ->
{false, Req2, State};
{incomplete, _} ->
{false, Req2, State};
Data ->
State2 = State#state{body = Data},
{halt, respond(200, batch_rpc(State2), Req2), State2}
end.
%%
@todo add more body decoders here . multipart is welcome .
%%
%%
Bodyful methods delegate actual processing to application 's handlers .
%% Response entity is encoded according to Accept: header.
%%
put_resource(Req, State = #state{method = <<"POST">>, body = Data,
params = Params, handler = Handler, options = Opts, auth = Auth}) ->
try Handler:post(Data, Params, [{auth, Auth} | Opts]) of
{ok, Body} ->
{true, set_resp_body(Body, Req), State};
ok ->
{true, Req, State};
{error, eexist} ->
{halt, respond(409, <<"eexist">>, Req), State};
{error, Reason} ->
{halt, respond(400, Reason, Req), State};
error ->
{halt, respond(400, undefined, Req), State};
{goto, Location = << $/, _/binary >> } ->
{{true, Location}, Req, State};
{goto, Location} ->
{BasePath, Req2} = cowboy_req:path(Req),
{{true, [BasePath, $/, Location]}, Req2, State}
catch Class:Reason ->
error_logger:error_msg(
"** API handler ~p terminating in post/3~n"
" for the reason ~p:~p~n** State was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Class, Reason, State, erlang:get_stacktrace()]),
{halt, respond(500, Reason, Req), State}
end;
put_resource(Req, State = #state{method = <<"PUT">>, body = Data,
params = Params, handler = Handler, options = Opts, auth = Auth}) ->
try Handler:put(Data, Params, [{auth, Auth} | Opts]) of
ok ->
{true, Req, State};
{ok, Body} ->
{true, set_resp_body(Body, Req), State};
{error, eexist} ->
{halt, respond(409, <<"eexist">>, Req), State};
{error, Reason} ->
{halt, respond(400, Reason, Req), State};
error ->
{halt, respond(400, undefined, Req), State}
catch Class:Reason ->
error_logger:error_msg(
"** API handler ~p terminating in put/3~n"
" for the reason ~p:~p~n** State was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Class, Reason, State, erlang:get_stacktrace()]),
{halt, respond(500, Reason, Req), State}
end;
put_resource(Req, State = #state{method = <<"PATCH">>, body = Data,
params = Params, handler = Handler, options = Opts, auth = Auth}) ->
try Handler:patch(Data, Params, [{auth, Auth} | Opts]) of
ok ->
{true, Req, State};
{ok, Body} ->
{true, set_resp_body(Body, Req), State};
{error, enoent} ->
{halt, respond(404, <<"enoent">>, Req), State};
{error, Reason} ->
{halt, respond(400, Reason, Req), State};
error ->
{halt, respond(400, undefined, Req), State}
catch Class:Reason ->
error_logger:error_msg(
"** API handler ~p terminating in patch/3~n"
" for the reason ~p:~p~n** State was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Class, Reason, State, erlang:get_stacktrace()]),
{halt, respond(500, Reason, Req), State}
end.
%%
%% Delegates actual processing to application's delete/2 handler.
%%
%% It should start deleting the resource and return.
- { true , Req , State } -- > 204 No Content , unless delete_completed/2 defined
- { X = /= true , Req , State } -- > 500 Internal Server Error
- { halt , Req , State } -- > no further processing
%%
delete_resource(Req, State = #state{
params = Params, handler = Handler, options = Opts, auth = Auth}) ->
try Handler:delete(Params, [{auth, Auth} | Opts]) of
ok ->
{true, Req, State#state{completed = true}};
accepted ->
{true, Req, State#state{completed = false}};
error ->
{halt, respond(400, undefined, Req), State};
{error, enoent} ->
{halt, respond(404, <<"enoent">>, Req), State};
{error, Reason} ->
{halt, respond(400, Reason, Req), State}
catch Class:Reason ->
error_logger:error_msg(
"** API handler ~p terminating in delete/3~n"
" for the reason ~p:~p~n** State was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Class, Reason, State, erlang:get_stacktrace()]),
{halt, respond(500, Reason, Req), State}
end.
%%
%% Indicates whether the resource has been deleted yet.
- { true , Req , State } -- > go ahead with 200/204
- { false , Req , State } -- > 202 Accepted
- { halt , Req , State } -- > no further processing
%%
delete_completed(Req, State = #state{completed = Completed}) ->
{Completed, Req, State}.
%%
%%------------------------------------------------------------------------------
%% RPC functions
%%------------------------------------------------------------------------------
%%
batch_rpc(#state{body = Batch,
handler = Handler, auth = Auth, options = Opts}) ->
[case call_allowed(Method, Auth, Handler) of
true ->
try Handler:call(Method, Args, Opts) of
{ok, Result} ->
[null, Result, Id];
ok ->
[null, null, Id];
% {error, Reason} when is_atom(Reason) ->
% [atom_to_binary(Reason, latin1), null, Id];
{error, Reason} ->
[Reason, null, Id];
error ->
[reason(undefined), null, Id];
accepted ->
[null, null, Id];
{goto, Location} ->
[null, Location, Id]
catch
_:function_clause ->
[<<"enoent">>, null, Id];
_:badarg ->
[<<"einval">>, null, Id];
_:badarith ->
[<<"einval">>, null, Id];
Class:Reason ->
error_logger:error_msg(
"** API RPC handler ~p terminating in handle/3~n"
" for the reason ~p:~p~n** Method was ~p~n"
"** Arguments were ~p~n** Stacktrace: ~p~n~n",
[Handler, Class, Reason, Method, Args, erlang:get_stacktrace()]),
[<<"einval">>, null, Id]
end;
false ->
[<<"eperm">>, null, Id]
end || [Method, Args, Id] <- Batch].
%%
%% -----------------------------------------------------------------------------
%% Helpers
%% -----------------------------------------------------------------------------
%%
%%
%% Error reporting.
%%
reason(undefined) ->
reason(<<"unknown">>);
reason(Reason) when is_list(Reason) ->
Reason;
reason(Reason) when is_binary(Reason); is_number(Reason) ->
reason([{error, Reason}]);
reason(Reason) when is_atom(Reason) ->
reason(atom_to_binary(Reason, latin1)).
%%
%% Response helpers
%%
respond(Status, Reason, Req) ->
{ok, Req2} = cowboy_req:reply(Status, set_resp_body(reason(Reason), Req)),
Req2.
set_resp_body(Body, Req) ->
cowboy_req:set_resp_body(serialize(Body, Req), Req).
%%
%% Setup CORS
%%
cors(Req, AllowedOrigins) ->
@todo validate
Req2 = cowboy_req:set_resp_header(<<"access-control-allow-origin">>,
AllowedOrigins, Req),
Access - Control - Allow - Methods : POST , GET , PUT , PATCH , DELETE , OPTIONS
Req3 = cowboy_req:set_resp_header(<<"access-control-allow-credentials">>,
<<"true">>, Req2),
cowboy_req:set_resp_header(<<"access-control-allow-headers">>,
<<"content-type, if-modified-since, authorization, x-requested-with">>,
Req3).
%%
%% Setup caching
%%
cache(Req) ->
@todo set right headers
@todo move to generate_etag ?
Req.
%%
%% -----------------------------------------------------------------------------
%% Serialization
%% -----------------------------------------------------------------------------
%%
serialize(Body, Req) ->
NB : we choose encoder from media_type meta , honoring Accept : header .
One may choose to always encode to one fixed format as well .
{CType, _} = cowboy_req:meta(media_type, Req),
encode(CType, Body, Req).
NB : first argument should match those of content_types_*/2
encode({<<"application">>, <<"x-www-form-urlencoded">>, _Params}, Body, _Req) ->
build_qs(Body);
encode({<<"application">>, <<"json">>, _Params}, Body, _Req) ->
jsx:encode(Body);
encode({<<"application">>, <<"rpc+json">>, _Params}, Body, _Req) ->
jsx:encode(Body);
NB : @fixme experimental template render support
%% Accept: text/html; template=foo --> foo:render(Body)
encode({<<"text">>, <<"html">>, Params}, Body, _Req) ->
case lists:keyfind(<<"template">>, 1, Params) of
false ->
jsx:encode(Body);
{_, TemplateName} ->
try
TemplateModule = binary_to_existing_atom(TemplateName, latin1),
{ok, IoList} = TemplateModule:render(Body),
IoList
catch
_:badarg ->
<<"no render module">>;
_:undef ->
<<"bad render module">>
end
end.
NB : Cowboy issue # 479
build_qs(Bin) when is_binary(Bin) ->
cowboy_http:urlencode(Bin);
build_qs(Atom) when is_atom(Atom) ->
build_qs(atom_to_binary(Atom, latin1));
build_qs(Int) when is_integer(Int) ->
NB : nothing unsafe in integers
list_to_binary(integer_to_list(Int));
build_qs({K, undefined}) ->
<< (build_qs(K))/binary, $= >>;
build_qs({K, V}) ->
<< (build_qs(K))/binary, $=, (build_qs(V))/binary >>;
build_qs([]) ->
<<>>;
build_qs(List) when is_list(List) ->
<< "&", R/binary >> = << << "&", (build_qs(X))/binary >> || X <- List >>,
R.
%%
%% -----------------------------------------------------------------------------
%% WebSocket.
%% -----------------------------------------------------------------------------
%%
websocket_init(_Transport, Req, State) ->
{ok, Req, State}.
websocket_terminate(_Reason, _Req, _State) ->
ok.
websocket_handle({text, Msg}, Req, State) ->
@todo make it streaming
case jsx:decode(Msg, [{error_handler, fun(_, _, _) -> {error, badarg} end}])
of
{error, _} ->
{reply, {text, <<"einval">>}, Req, State};
{incomplete, _} ->
{reply, {text, <<"einval">>}, Req, State};
Data ->
State2 = State#state{body = Data, auth = none},
{reply, {text, jsx:encode(batch_rpc(State2))}, Req, State}
end;
websocket_handle(_Data, Req, State) ->
{ok, Req, State}.
websocket_info({timeout, _Ref, Msg}, Req, State) ->
{reply, {text, Msg}, Req, State};
websocket_info(_Info, Req, State) ->
{ok, Req, State}.
| null | https://raw.githubusercontent.com/dvv/stable/4897e4abeec7d3c0cc65f7cfe1e5586c428babc7/src/cowboy_resource.erl | erlang | ----------------------------------------------------------------------------
----------------------------------------------------------------------------
-behaviour(cowboy_rest_handler).
resource_available/2,
malformed_request/2,
options/2,
getters
setters
enable CORS
extract request info
distinguish websocket and rest
params = Params,
Verify that authentication credentials provided and not forged.
Checks user is authorized to access the resource.
for each request in the batch.
@fixme should be active if Content-Type: application/rpc+json
Other methods mean single action and access can be checked at once.
Called on OPTIONS. Use to set custom headers. Note returned tag is 'ok'.
Is resource exist?
Enumerate content types resource may process.
{{<<"application">>, <<"json">>, [
application/rpc+json accepts batch of requests
@todo: application/rpc+bert for -rpc.org/
Enumerate content types resource may return.
Enumerate character sets resource may return.
-----------------------------------------------------------------------------
Resource operations.
-----------------------------------------------------------------------------
Handle PUT/POST/PATCH requests.
The following applies only on POST
Take batch of requests from body, return batch of responses.
Request is triplet array: [Method, Params, Id].
Response is triplet array: [null, Result, Id] | [Error, null, Id].
Response entity is encoded according to Accept: header.
Delegates actual processing to application's delete/2 handler.
It should start deleting the resource and return.
Indicates whether the resource has been deleted yet.
------------------------------------------------------------------------------
RPC functions
------------------------------------------------------------------------------
{error, Reason} when is_atom(Reason) ->
[atom_to_binary(Reason, latin1), null, Id];
-----------------------------------------------------------------------------
Helpers
-----------------------------------------------------------------------------
Error reporting.
Response helpers
Setup CORS
Setup caching
-----------------------------------------------------------------------------
Serialization
-----------------------------------------------------------------------------
Accept: text/html; template=foo --> foo:render(Body)
-----------------------------------------------------------------------------
WebSocket.
-----------------------------------------------------------------------------
| @doc for a HTTP resource .
-module(cowboy_resource).
-author('Vladimir Dronnikov <>').
-behaviour(cowboy_sub_protocol).
-export([
upgrade/4
]).
-export([
rest_init/2,
rest_terminate/3,
allowed_methods/2,
is_authorized/2,
forbidden/2,
resource_exists/2,
content_types_accepted/2,
content_types_provided/2,
charsets_provided/2,
delete_resource/2,
delete_completed/2
]).
-behaviour(cowboy_websocket_handler).
-export([
websocket_init/3,
websocket_handle/3,
websocket_info/3,
websocket_terminate/3
]).
-export([
get_resource/2
]).
-export([
put_form/2,
put_json/2,
put_plain/2,
rpc_json/2
]).
-type proplist() :: list({term(), term()}).
-record(state, {
method :: binary(),
params :: proplist(),
body :: proplist(),
querystring :: proplist(),
auth :: {Identity :: term(), AllowedScope :: term()},
completed = false :: boolean(),
options :: proplist(),
handler :: module()
}).
upgrade(Req, Env, Handler, Opts) ->
Req3 = case lists:keyfind(cors, 1, Opts) of
{_, AllowedOrigins} ->
cors(Req, AllowedOrigins);
false ->
Req
end,
{Params, Req4} = cowboy_req:bindings(Req3),
{Query, Req5} = cowboy_req:qs_vals(Req4),
{Method, Req6} = cowboy_req:method(Req5),
NewProto = case cowboy_req:header(<<"upgrade">>, Req6) of
{<<"websocket">>, Req7} -> cowboy_websocket;
{_, Req7} -> cowboy_rest
end,
NewProto:upgrade(Req7, Env, ?MODULE, #state{
method = Method,
params = lists:ukeymerge(1,
lists:ukeysort(1, Params),
lists:ukeysort(1, Query)),
querystring = Query,
options = Opts,
handler = Handler
}).
rest_init(Req, State) ->
{ok, Req, State}.
rest_terminate(_Reason, _Req, _State) ->
ok.
resource_available(Req , State ) - >
{ true , Req , State } .
allowed_methods(Req, State) ->
{[<<"GET">>, <<"POST">>, <<"PUT">>, <<"DELETE">>,
<<"PATCH">>, <<"HEAD">>, <<"OPTIONS">>], Req, State}.
Validate GET requests . Body is not yet available and is not yet done .
malformed_request(Req , State ) - >
{ false , Req , State } .
Bearer or basic authorization , or ? access_token = TOKEN , or session required .
is_authorized(Req, State = #state{options = Opts}) ->
case cowboy_req:parse_header(<<"authorization">>, Req) of
{ok, {<<"bearer">>, Token}, Req2} ->
{_, Secret} = lists:keyfind(token_secret, 1, Opts),
try_authorize(Req2, State, token, {Token, Secret});
{ok, {<<"basic">>, Credentials}, Req2} ->
try_authorize(Req2, State, password, Credentials);
_ ->
case cowboy_req:qs_val(<<"access_token">>, Req) of
{undefined, Req2} ->
{Session, Req3} = cowboy_session:get(Req2),
try_authorize(Req3, State, session, Session);
{Token, Req2} ->
{_, Secret} = lists:keyfind(token_secret, 1, Opts),
try_authorize(Req2, State, token, {Token, Secret})
end
end.
try_authorize(Req, State = #state{params = Params, handler = Handler},
Type, Credentials) ->
case erlang:function_exported(Handler, authorize, 3) of
true ->
case Handler:authorize(Type, Credentials, Params) of
{ok, Auth} ->
{true, Req, State#state{auth = Auth}};
{error, _} ->
{{false, <<"Bearer, Basic, Access-Token, Cookie">>}, Req, State}
end;
false ->
{true, Req, State#state{auth = none}}
end.
NB : POST carries batch RPC and access will be checked later on
forbidden(Req , State = # state{method = < < " POST " > > } ) - >
{ false , Req , State } ;
forbidden(Req, State = #state{method = Method,
auth = Auth, handler = Handler}) ->
{not call_allowed(handler_for(Method), Auth, Handler), Req, State}.
handler_for(<<"GET">>) -> get;
handler_for(<<"POST">>) -> post;
handler_for(<<"PUT">>) -> put;
handler_for(<<"PATCH">>) -> patch;
handler_for(<<"DELETE">>) -> delete;
handler_for(<<"OPTIONS">>) -> options;
handler_for(<<"HEAD">>) -> get;
handler_for(Other) -> Other.
call_allowed(Method, Auth, Handler) ->
case erlang:function_exported(Handler, allowed, 2) of
true ->
Handler:allowed(Method, Auth);
false ->
true
end.
options(Req , State ) - >
{ ok , Req , State } .
NB : controls as well status code returned from put_resource when POST
resource_exists(Req, State = #state{method = <<"POST">>}) ->
{false, Req, State};
resource_exists(Req, State) ->
{true, Req, State}.
content_types_accepted(Req, State) ->
{[
{ < < " charset " > > , < < " utf-8 " > > } ] } , ,
{{<<"application">>, <<"json">>, '*'}, put_json},
{{<<"application">>, <<"x-www-form-urlencoded">>, '*'}, put_form},
{{<<"text">>, <<"plain">>, '*'}, put_plain},
{{<<"application">>, <<"rpc+json">>, '*'}, rpc_json}
], Req, State}.
content_types_provided(Req, State) ->
{[
{{<<"application">>, <<"json">>, '*'}, get_resource},
@todo disable if application / rpc+json data was provided
{{<<"application">>, <<"x-www-form-urlencoded">>, '*'}, get_resource},
{{<<"text">>, <<"html">>, '*'}, get_resource}
], Req, State}.
charsets_provided(Req, State) ->
{[<<"utf-8">>], Req, State}.
Delegates actual processing to application 's get/2 handler .
Encodes response entity .
- { Body , Req , State } -- > 200 OK
- { halt , Req , State } -- > no further processing
get_resource(Req, State = #state{
params = Params, handler = Handler, options = Opts, auth = Auth}) ->
try Handler:get(Params, [{auth, Auth} | Opts]) of
{ok, Result} ->
{serialize(Result, Req), Req, State};
{error, enoent} ->
{halt, respond(404, <<"enoent">>, Req), State};
{error, Reason} ->
{halt, respond(400, Reason, Req), State};
error ->
{halt, respond(400, undefined, Req), State};
{goto, Location = << $/, _/binary >> } ->
{halt, cowboy_req:set_resp_header(<<"location">>, Location, Req), State};
{goto, Location} ->
{BasePath, Req2} = cowboy_req:path(Req),
{halt, cowboy_req:set_resp_header(<<"location">>,
[BasePath, $/, Location], Req2), State}
catch Class:Reason ->
error_logger:error_msg(
"** API handler ~p terminating in get/3~n"
" for the reason ~p:~p~n** State was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Class, Reason, State, erlang:get_stacktrace()]),
{halt, respond(500, Reason, Req), State}
end.
- { false , Req , State } -- > 422 Unprocessable Entity
- { true , Req , State } -- > 204 No Content
- { halt , Req , State } -- > no further processing
The following applies only on PUT
- set response location : and { true , Req , State } -- > 201 Created
- set response body and { true , Req , State } -- > 200 OK
- { { true , Location } , Req , State } -- > 303 See Other
put_json(Req, State) ->
@todo make it streaming
{ok, Body, Req2} = cowboy_req:body(Req),
case jsx:decode(Body, [{error_handler, fun(_, _, _) -> {error, badarg} end}])
of
{error, _} ->
{false, Req2, State};
{incomplete, _} ->
{false, Req2, State};
Data ->
put_resource(Req2, State#state{body = Data})
end.
put_form(Req, State) ->
{ok, Result, Req2} = cowboy_req:body_qs(Req),
put_resource(Req2, State#state{body = Result}).
put_plain(Req, State) ->
{ok, Result, Req2} = cowboy_req:body(Req),
put_resource(Req2, State#state{body = Result}).
Requests processing delegated to application 's handle(Method , [ ] ) .
rpc_json(Req, State) ->
@todo make it streaming
{ok, Body, Req2} = cowboy_req:body(Req),
case jsx:decode(Body, [{error_handler, fun(_, _, _) -> {error, badarg} end}])
of
{error, _} ->
{false, Req2, State};
{incomplete, _} ->
{false, Req2, State};
Data ->
State2 = State#state{body = Data},
{halt, respond(200, batch_rpc(State2), Req2), State2}
end.
@todo add more body decoders here . multipart is welcome .
Bodyful methods delegate actual processing to application 's handlers .
put_resource(Req, State = #state{method = <<"POST">>, body = Data,
params = Params, handler = Handler, options = Opts, auth = Auth}) ->
try Handler:post(Data, Params, [{auth, Auth} | Opts]) of
{ok, Body} ->
{true, set_resp_body(Body, Req), State};
ok ->
{true, Req, State};
{error, eexist} ->
{halt, respond(409, <<"eexist">>, Req), State};
{error, Reason} ->
{halt, respond(400, Reason, Req), State};
error ->
{halt, respond(400, undefined, Req), State};
{goto, Location = << $/, _/binary >> } ->
{{true, Location}, Req, State};
{goto, Location} ->
{BasePath, Req2} = cowboy_req:path(Req),
{{true, [BasePath, $/, Location]}, Req2, State}
catch Class:Reason ->
error_logger:error_msg(
"** API handler ~p terminating in post/3~n"
" for the reason ~p:~p~n** State was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Class, Reason, State, erlang:get_stacktrace()]),
{halt, respond(500, Reason, Req), State}
end;
put_resource(Req, State = #state{method = <<"PUT">>, body = Data,
params = Params, handler = Handler, options = Opts, auth = Auth}) ->
try Handler:put(Data, Params, [{auth, Auth} | Opts]) of
ok ->
{true, Req, State};
{ok, Body} ->
{true, set_resp_body(Body, Req), State};
{error, eexist} ->
{halt, respond(409, <<"eexist">>, Req), State};
{error, Reason} ->
{halt, respond(400, Reason, Req), State};
error ->
{halt, respond(400, undefined, Req), State}
catch Class:Reason ->
error_logger:error_msg(
"** API handler ~p terminating in put/3~n"
" for the reason ~p:~p~n** State was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Class, Reason, State, erlang:get_stacktrace()]),
{halt, respond(500, Reason, Req), State}
end;
put_resource(Req, State = #state{method = <<"PATCH">>, body = Data,
params = Params, handler = Handler, options = Opts, auth = Auth}) ->
try Handler:patch(Data, Params, [{auth, Auth} | Opts]) of
ok ->
{true, Req, State};
{ok, Body} ->
{true, set_resp_body(Body, Req), State};
{error, enoent} ->
{halt, respond(404, <<"enoent">>, Req), State};
{error, Reason} ->
{halt, respond(400, Reason, Req), State};
error ->
{halt, respond(400, undefined, Req), State}
catch Class:Reason ->
error_logger:error_msg(
"** API handler ~p terminating in patch/3~n"
" for the reason ~p:~p~n** State was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Class, Reason, State, erlang:get_stacktrace()]),
{halt, respond(500, Reason, Req), State}
end.
- { true , Req , State } -- > 204 No Content , unless delete_completed/2 defined
- { X = /= true , Req , State } -- > 500 Internal Server Error
- { halt , Req , State } -- > no further processing
delete_resource(Req, State = #state{
params = Params, handler = Handler, options = Opts, auth = Auth}) ->
try Handler:delete(Params, [{auth, Auth} | Opts]) of
ok ->
{true, Req, State#state{completed = true}};
accepted ->
{true, Req, State#state{completed = false}};
error ->
{halt, respond(400, undefined, Req), State};
{error, enoent} ->
{halt, respond(404, <<"enoent">>, Req), State};
{error, Reason} ->
{halt, respond(400, Reason, Req), State}
catch Class:Reason ->
error_logger:error_msg(
"** API handler ~p terminating in delete/3~n"
" for the reason ~p:~p~n** State was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Class, Reason, State, erlang:get_stacktrace()]),
{halt, respond(500, Reason, Req), State}
end.
- { true , Req , State } -- > go ahead with 200/204
- { false , Req , State } -- > 202 Accepted
- { halt , Req , State } -- > no further processing
delete_completed(Req, State = #state{completed = Completed}) ->
{Completed, Req, State}.
batch_rpc(#state{body = Batch,
handler = Handler, auth = Auth, options = Opts}) ->
[case call_allowed(Method, Auth, Handler) of
true ->
try Handler:call(Method, Args, Opts) of
{ok, Result} ->
[null, Result, Id];
ok ->
[null, null, Id];
{error, Reason} ->
[Reason, null, Id];
error ->
[reason(undefined), null, Id];
accepted ->
[null, null, Id];
{goto, Location} ->
[null, Location, Id]
catch
_:function_clause ->
[<<"enoent">>, null, Id];
_:badarg ->
[<<"einval">>, null, Id];
_:badarith ->
[<<"einval">>, null, Id];
Class:Reason ->
error_logger:error_msg(
"** API RPC handler ~p terminating in handle/3~n"
" for the reason ~p:~p~n** Method was ~p~n"
"** Arguments were ~p~n** Stacktrace: ~p~n~n",
[Handler, Class, Reason, Method, Args, erlang:get_stacktrace()]),
[<<"einval">>, null, Id]
end;
false ->
[<<"eperm">>, null, Id]
end || [Method, Args, Id] <- Batch].
reason(undefined) ->
reason(<<"unknown">>);
reason(Reason) when is_list(Reason) ->
Reason;
reason(Reason) when is_binary(Reason); is_number(Reason) ->
reason([{error, Reason}]);
reason(Reason) when is_atom(Reason) ->
reason(atom_to_binary(Reason, latin1)).
respond(Status, Reason, Req) ->
{ok, Req2} = cowboy_req:reply(Status, set_resp_body(reason(Reason), Req)),
Req2.
set_resp_body(Body, Req) ->
cowboy_req:set_resp_body(serialize(Body, Req), Req).
cors(Req, AllowedOrigins) ->
@todo validate
Req2 = cowboy_req:set_resp_header(<<"access-control-allow-origin">>,
AllowedOrigins, Req),
Access - Control - Allow - Methods : POST , GET , PUT , PATCH , DELETE , OPTIONS
Req3 = cowboy_req:set_resp_header(<<"access-control-allow-credentials">>,
<<"true">>, Req2),
cowboy_req:set_resp_header(<<"access-control-allow-headers">>,
<<"content-type, if-modified-since, authorization, x-requested-with">>,
Req3).
cache(Req) ->
@todo set right headers
@todo move to generate_etag ?
Req.
serialize(Body, Req) ->
NB : we choose encoder from media_type meta , honoring Accept : header .
One may choose to always encode to one fixed format as well .
{CType, _} = cowboy_req:meta(media_type, Req),
encode(CType, Body, Req).
NB : first argument should match those of content_types_*/2
encode({<<"application">>, <<"x-www-form-urlencoded">>, _Params}, Body, _Req) ->
build_qs(Body);
encode({<<"application">>, <<"json">>, _Params}, Body, _Req) ->
jsx:encode(Body);
encode({<<"application">>, <<"rpc+json">>, _Params}, Body, _Req) ->
jsx:encode(Body);
NB : @fixme experimental template render support
encode({<<"text">>, <<"html">>, Params}, Body, _Req) ->
case lists:keyfind(<<"template">>, 1, Params) of
false ->
jsx:encode(Body);
{_, TemplateName} ->
try
TemplateModule = binary_to_existing_atom(TemplateName, latin1),
{ok, IoList} = TemplateModule:render(Body),
IoList
catch
_:badarg ->
<<"no render module">>;
_:undef ->
<<"bad render module">>
end
end.
NB : Cowboy issue # 479
build_qs(Bin) when is_binary(Bin) ->
cowboy_http:urlencode(Bin);
build_qs(Atom) when is_atom(Atom) ->
build_qs(atom_to_binary(Atom, latin1));
build_qs(Int) when is_integer(Int) ->
NB : nothing unsafe in integers
list_to_binary(integer_to_list(Int));
build_qs({K, undefined}) ->
<< (build_qs(K))/binary, $= >>;
build_qs({K, V}) ->
<< (build_qs(K))/binary, $=, (build_qs(V))/binary >>;
build_qs([]) ->
<<>>;
build_qs(List) when is_list(List) ->
<< "&", R/binary >> = << << "&", (build_qs(X))/binary >> || X <- List >>,
R.
websocket_init(_Transport, Req, State) ->
{ok, Req, State}.
websocket_terminate(_Reason, _Req, _State) ->
ok.
websocket_handle({text, Msg}, Req, State) ->
@todo make it streaming
case jsx:decode(Msg, [{error_handler, fun(_, _, _) -> {error, badarg} end}])
of
{error, _} ->
{reply, {text, <<"einval">>}, Req, State};
{incomplete, _} ->
{reply, {text, <<"einval">>}, Req, State};
Data ->
State2 = State#state{body = Data, auth = none},
{reply, {text, jsx:encode(batch_rpc(State2))}, Req, State}
end;
websocket_handle(_Data, Req, State) ->
{ok, Req, State}.
websocket_info({timeout, _Ref, Msg}, Req, State) ->
{reply, {text, Msg}, Req, State};
websocket_info(_Info, Req, State) ->
{ok, Req, State}.
|
bfc54b739bcad16171a210a7c18482bb1f65bb4ef630a8102e95648a7f722226 | webyrd/normalization-by-evaluation | nbe.scm | (load "../faster-miniKanren/mk-vicare.scm")
(load "../faster-miniKanren/mk.scm")
(load "../faster-miniKanren/test-check.scm")
(define lookupo
(lambda (x env val)
(fresh (y v env^)
(== `((,y . ,v) . ,env^) env)
(symbolo x)
(symbolo y)
(conde
((== x y) (== v val))
((=/= x y)
(lookupo x env^ val))))))
(define eval-expro
(lambda (expr env val)
(conde
((fresh (x body)
(== `(Lam ,x ,body) expr)
(== `(Closure ,x ,body ,env) val)
(symbolo x)))
((fresh (x)
(== `(Var ,x) expr)
(symbolo x)
(lookupo x env val)))
((fresh (e1 e2 f v)
(== `(App ,e1 ,e2) expr)
(eval-expro e1 env f)
(eval-expro e2 env v)
(apply-expro f v val))))))
(define apply-expro
(lambda (f v val)
(conde
((fresh (n)
(== `(N ,n) f)
(== `(N (NApp ,n ,v)) val)))
((fresh (x body env)
(== `(Closure ,x ,body ,env) f)
(symbolo x)
(eval-expro body `((,x . ,v) . ,env) val))))))
Fast and simple fresho definition ( written with )
;; Rather than compute a renamed variable, we just describe the constraints.
(define fresho
(lambda (xs x^)
(fresh ()
(symbolo x^)
(absento x^ xs))))
(define uneval-valueo
(lambda (xs v expr)
(conde
((fresh (n)
(== `(N ,n) v)
(uneval-neutralo xs n expr)))
((fresh (x body env x^ body^ bv)
(== `(Closure ,x ,body ,env) v)
(== `(Lam ,x^ ,body^) expr)
(symbolo x)
(symbolo x^)
(fresho xs x^)
(eval-expro body `((,x . (N (NVar ,x^))) . ,env) bv)
(uneval-valueo `(,x^ . ,xs) bv body^))))))
(define uneval-neutralo
(lambda (xs n expr)
(conde
((fresh (x)
(== `(NVar ,x) n)
(== `(Var ,x) expr)))
((fresh (n^ v ne ve)
(== `(NApp ,n^ ,v) n)
(== `(App ,ne ,ve) expr)
(uneval-neutralo xs n^ ne)
(uneval-valueo xs v ve))))))
(define nfo
(lambda (t env expr)
(fresh (v)
(eval-expro t env v)
(uneval-valueo '() v expr))))
(define main
(lambda ()
(run* (result)
(fresh (id_ const_)
(eval-expro '(Lam x (Var x)) '() id_)
(eval-expro '(Lam x (Lam y (Var x))) '() const_)
(eval-expro '(App (Var const) (Var id)) `((id . ,id_) (const . ,const_)) result)))))
(test "main"
(main)
'((Closure y (Var x) ((x Closure x (Var x) ())))))
nf [ ] ( " x " ( App ( " y " ( App ( Var " x " ) ( Var " y " ) ) ) ( " x " ( Var " x " ) ) ) )
;; =>
Lam " x " ( App ( Var " x " ) ( " x ' " ( Var " x ' " ) ) )
(test "nf-0"
(run* (expr)
(nfo '(Lam x (App (Lam y (App (Var x) (Var y))) (Lam x (Var x)))) '() expr))
'(((Lam _.0 (App (Var _.0) (Lam _.1 (Var _.1))))
(=/= ((_.0 _.1)))
(sym _.0 _.1))))
| null | https://raw.githubusercontent.com/webyrd/normalization-by-evaluation/07e56763d67cfd8e84d4252a6a4d80b04cde1373/miniKanren-version/naive/nbe.scm | scheme | Rather than compute a renamed variable, we just describe the constraints.
=> | (load "../faster-miniKanren/mk-vicare.scm")
(load "../faster-miniKanren/mk.scm")
(load "../faster-miniKanren/test-check.scm")
(define lookupo
(lambda (x env val)
(fresh (y v env^)
(== `((,y . ,v) . ,env^) env)
(symbolo x)
(symbolo y)
(conde
((== x y) (== v val))
((=/= x y)
(lookupo x env^ val))))))
(define eval-expro
(lambda (expr env val)
(conde
((fresh (x body)
(== `(Lam ,x ,body) expr)
(== `(Closure ,x ,body ,env) val)
(symbolo x)))
((fresh (x)
(== `(Var ,x) expr)
(symbolo x)
(lookupo x env val)))
((fresh (e1 e2 f v)
(== `(App ,e1 ,e2) expr)
(eval-expro e1 env f)
(eval-expro e2 env v)
(apply-expro f v val))))))
(define apply-expro
(lambda (f v val)
(conde
((fresh (n)
(== `(N ,n) f)
(== `(N (NApp ,n ,v)) val)))
((fresh (x body env)
(== `(Closure ,x ,body ,env) f)
(symbolo x)
(eval-expro body `((,x . ,v) . ,env) val))))))
Fast and simple fresho definition ( written with )
(define fresho
(lambda (xs x^)
(fresh ()
(symbolo x^)
(absento x^ xs))))
(define uneval-valueo
(lambda (xs v expr)
(conde
((fresh (n)
(== `(N ,n) v)
(uneval-neutralo xs n expr)))
((fresh (x body env x^ body^ bv)
(== `(Closure ,x ,body ,env) v)
(== `(Lam ,x^ ,body^) expr)
(symbolo x)
(symbolo x^)
(fresho xs x^)
(eval-expro body `((,x . (N (NVar ,x^))) . ,env) bv)
(uneval-valueo `(,x^ . ,xs) bv body^))))))
(define uneval-neutralo
(lambda (xs n expr)
(conde
((fresh (x)
(== `(NVar ,x) n)
(== `(Var ,x) expr)))
((fresh (n^ v ne ve)
(== `(NApp ,n^ ,v) n)
(== `(App ,ne ,ve) expr)
(uneval-neutralo xs n^ ne)
(uneval-valueo xs v ve))))))
(define nfo
(lambda (t env expr)
(fresh (v)
(eval-expro t env v)
(uneval-valueo '() v expr))))
(define main
(lambda ()
(run* (result)
(fresh (id_ const_)
(eval-expro '(Lam x (Var x)) '() id_)
(eval-expro '(Lam x (Lam y (Var x))) '() const_)
(eval-expro '(App (Var const) (Var id)) `((id . ,id_) (const . ,const_)) result)))))
(test "main"
(main)
'((Closure y (Var x) ((x Closure x (Var x) ())))))
nf [ ] ( " x " ( App ( " y " ( App ( Var " x " ) ( Var " y " ) ) ) ( " x " ( Var " x " ) ) ) )
Lam " x " ( App ( Var " x " ) ( " x ' " ( Var " x ' " ) ) )
(test "nf-0"
(run* (expr)
(nfo '(Lam x (App (Lam y (App (Var x) (Var y))) (Lam x (Var x)))) '() expr))
'(((Lam _.0 (App (Var _.0) (Lam _.1 (Var _.1))))
(=/= ((_.0 _.1)))
(sym _.0 _.1))))
|
10cd8989e0a3925387fac3dda9d94afe3b6dce6e72bcb01a6350533441048b51 | 1Jajen1/Brokkr | Utils.hs | {-# LANGUAGE ImpredicativeTypes #-}
module Network.Utils (
encodePackets
) where
import qualified Data.ByteString as BS
import Network.Effect.Packet (toStrictSizePrefixedByteString)
import qualified Mason.Builder as B
encodePackets :: [[B.Builder]] -> [BS.ByteString]
encodePackets toSend = foldMap (\x -> toStrictSizePrefixedByteString 128 x) <$> toSend
| null | https://raw.githubusercontent.com/1Jajen1/Brokkr/fe56efaf450f29a5571cc34fa01f7301678f3eaf/test/Network/Utils.hs | haskell | # LANGUAGE ImpredicativeTypes # | module Network.Utils (
encodePackets
) where
import qualified Data.ByteString as BS
import Network.Effect.Packet (toStrictSizePrefixedByteString)
import qualified Mason.Builder as B
encodePackets :: [[B.Builder]] -> [BS.ByteString]
encodePackets toSend = foldMap (\x -> toStrictSizePrefixedByteString 128 x) <$> toSend
|
99f02b1921bb81e6ea558ba1b5e6c467ca4d1e119468d51f7c66a8f4a85d2215 | polymeris/cljs-aws | direct_connect.cljs | (ns cljs-aws.direct-connect
(:require [cljs-aws.base.requests])
(:require-macros [cljs-aws.base.service :refer [defservice]]))
(defservice "DirectConnect" "directconnect-2012-10-25.min.json")
| null | https://raw.githubusercontent.com/polymeris/cljs-aws/3326e7c4db4dfc36dcb80770610c14c8a7fd0d66/src/cljs_aws/direct_connect.cljs | clojure | (ns cljs-aws.direct-connect
(:require [cljs-aws.base.requests])
(:require-macros [cljs-aws.base.service :refer [defservice]]))
(defservice "DirectConnect" "directconnect-2012-10-25.min.json")
| |
944c38956fea031d64994c106e358900457c2f9d8076dba51ab1335d76e0407a | kmi/irs | emergency-gis-domain.lisp | Mode : Lisp ; Package :
File created in WebOnto
(in-package "OCML")
(in-ontology emergency-gis-domain)
;; method
(def-class method ())
(def-instance getRestCentresInRadius method)
(def-instance getHotelsInRadius method)
(def-instance getInnsInRadius method)
(def-instance getHospitalsInRadius method)
(def-instance getSupermarketsInRadius method)
;; results
(def-class rest-centres-list (list))
(def-class accommodations-list (list))
(def-class hospitals-list (list))
(def-class supermarkets-list (list))
;; results' elements
(def-class rest-centre ()
((has-latitude :type float)
(has-longitude :type float)
(has-address :type string)
(has-capacity :type integer)
(has-cooking :type string)
(has-GMS :type string)
(has-heating :type string)
(has-key-holder :type string)
(has-key-holder2 :type string)
(has-key-holder2-telephone :type string)
(has-key-holder-telephone :type string)
(has-meals :type integer)
(has-name :type string)
(has-provision :type string)
(has-remarks :type string)
(has-telephone :type string)))
(def-class accommodation ()
((has-latitude :type float)
(has-longitude :type float)
(has-address1 :type string)
(has-address2 :type string)
(has-postcode :type string)
(has-rooms :type string)
(has-telephone :type string)))
(def-class hospital ()
((has-latitude :type float)
(has-longitude :type float)
(has-address :type string)
(has-beds :type integer)
(has-location :type string)
(has-name :type string)
(has-postcode :type string)
(has-telephone :type string)))
(def-class supermarket ()
((has-latitude :type float)
(has-longitude :type float)
(has-address :type string)
(has-alcohol :type boolean)
(has-chain :type string)
(has-clothing :type boolean)
(has-fri-close :type string)
(has-fri-open :type string)
(has-mon-close :type string)
(has-mon-open :type string)
(has-petrol :type boolean)
(has-pharmacy :type boolean)
(has-postcode :type string)
(has-sat-close :type string)
(has-sat-open :type string)
(has-store :type string)
(has-sun-close :type string)
(has-sun-open :type string)
(has-telephone :type string)
(has-thu-close :type string)
(has-thu-open :type string)
(has-town :type string)
(has-tue-close :type string)
(has-tue-open :type string)
(has-wed-close :type string)
(has-wed-open :type string)))
| null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/apps/emerges/emergency-gis-domain.lisp | lisp | Package :
method
results
results' elements |
File created in WebOnto
(in-package "OCML")
(in-ontology emergency-gis-domain)
(def-class method ())
(def-instance getRestCentresInRadius method)
(def-instance getHotelsInRadius method)
(def-instance getInnsInRadius method)
(def-instance getHospitalsInRadius method)
(def-instance getSupermarketsInRadius method)
(def-class rest-centres-list (list))
(def-class accommodations-list (list))
(def-class hospitals-list (list))
(def-class supermarkets-list (list))
(def-class rest-centre ()
((has-latitude :type float)
(has-longitude :type float)
(has-address :type string)
(has-capacity :type integer)
(has-cooking :type string)
(has-GMS :type string)
(has-heating :type string)
(has-key-holder :type string)
(has-key-holder2 :type string)
(has-key-holder2-telephone :type string)
(has-key-holder-telephone :type string)
(has-meals :type integer)
(has-name :type string)
(has-provision :type string)
(has-remarks :type string)
(has-telephone :type string)))
(def-class accommodation ()
((has-latitude :type float)
(has-longitude :type float)
(has-address1 :type string)
(has-address2 :type string)
(has-postcode :type string)
(has-rooms :type string)
(has-telephone :type string)))
(def-class hospital ()
((has-latitude :type float)
(has-longitude :type float)
(has-address :type string)
(has-beds :type integer)
(has-location :type string)
(has-name :type string)
(has-postcode :type string)
(has-telephone :type string)))
(def-class supermarket ()
((has-latitude :type float)
(has-longitude :type float)
(has-address :type string)
(has-alcohol :type boolean)
(has-chain :type string)
(has-clothing :type boolean)
(has-fri-close :type string)
(has-fri-open :type string)
(has-mon-close :type string)
(has-mon-open :type string)
(has-petrol :type boolean)
(has-pharmacy :type boolean)
(has-postcode :type string)
(has-sat-close :type string)
(has-sat-open :type string)
(has-store :type string)
(has-sun-close :type string)
(has-sun-open :type string)
(has-telephone :type string)
(has-thu-close :type string)
(has-thu-open :type string)
(has-town :type string)
(has-tue-close :type string)
(has-tue-open :type string)
(has-wed-close :type string)
(has-wed-open :type string)))
|
76e2611c547268a4edd0ff2d23199550e816e5dc15dba7bfb672d6d7ccd73be8 | well-typed/cborg | Term.hs | # LANGUAGE CPP #
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
# OPTIONS_GHC -fno - warn - orphans #
module Tests.Term (
Term
, serialise
, deserialise
, toRefTerm
, fromRefTerm
, eqTerm
, canonicaliseTerm
, prop_fromToRefTerm
, prop_toFromRefTerm
) where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import Data.Word
import qualified Numeric.Half as Half
import Codec.CBOR.Term
import Codec.CBOR.Read
import Codec.CBOR.Write
import Test.QuickCheck
import qualified Tests.Reference.Implementation as Ref
import Tests.Reference.Generators
( floatToWord, doubleToWord, canonicalNaN
, HalfSpecials(..), FloatSpecials(..), DoubleSpecials(..) )
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Exception (throw)
------------------------------------------------------------------------------
serialise :: Term -> LBS.ByteString
serialise = toLazyByteString . encodeTerm
deserialise :: LBS.ByteString -> Term
deserialise b =
case deserialiseFromBytes decodeTerm b of
Left failure -> throw failure
Right (trailing, _) | not (LBS.null trailing)
-> error "Test.deserialise: trailing data"
Right (_, t) -> t
------------------------------------------------------------------------------
toRefTerm :: Term -> Ref.Term
toRefTerm (TInt n)
| n >= 0 = Ref.TUInt (Ref.toUInt (fromIntegral n))
| otherwise = Ref.TNInt (Ref.toUInt (fromIntegral (-1 - n)))
toRefTerm (TInteger n) -- = Ref.TBigInt n
| n >= 0 && n <= fromIntegral (maxBound :: Word64)
= Ref.TUInt (Ref.toUInt (fromIntegral n))
| n < 0 && n >= -1 - fromIntegral (maxBound :: Word64)
= Ref.TNInt (Ref.toUInt (fromIntegral (-1 - n)))
| otherwise = Ref.TBigInt n
toRefTerm (TBytes bs) = Ref.TBytes (BS.unpack bs)
toRefTerm (TBytesI bs) = Ref.TBytess (map BS.unpack (LBS.toChunks bs))
toRefTerm (TString st) = Ref.TString (T.unpack st)
toRefTerm (TStringI st) = Ref.TStrings (map T.unpack (LT.toChunks st))
toRefTerm (TList ts) = Ref.TArray (map toRefTerm ts)
toRefTerm (TListI ts) = Ref.TArrayI (map toRefTerm ts)
toRefTerm (TMap ts) = Ref.TMap [ (toRefTerm x, toRefTerm y)
| (x,y) <- ts ]
toRefTerm (TMapI ts) = Ref.TMapI [ (toRefTerm x, toRefTerm y)
| (x,y) <- ts ]
toRefTerm (TTagged w t) = Ref.TTagged (Ref.toUInt (fromIntegral w))
(toRefTerm t)
toRefTerm (TBool False) = Ref.TFalse
toRefTerm (TBool True) = Ref.TTrue
toRefTerm TNull = Ref.TNull
toRefTerm (TSimple 23) = Ref.TUndef
toRefTerm (TSimple w) = Ref.TSimple (Ref.toSimple w)
toRefTerm (THalf f) = if isNaN f
then Ref.TFloat16 canonicalNaN
else Ref.TFloat16 (HalfSpecials (Half.toHalf f))
toRefTerm (TFloat f) = if isNaN f
then Ref.TFloat16 canonicalNaN
else Ref.TFloat32 (FloatSpecials f)
toRefTerm (TDouble f) = if isNaN f
then Ref.TFloat16 canonicalNaN
else Ref.TFloat64 (DoubleSpecials f)
fromRefTerm :: Ref.Term -> Term
fromRefTerm (Ref.TUInt u)
| n <= fromIntegral (maxBound :: Int) = TInt (fromIntegral n)
| otherwise = TInteger (fromIntegral n)
where n = Ref.fromUInt u
fromRefTerm (Ref.TNInt u)
| n <= fromIntegral (maxBound :: Int) = TInt (-1 - fromIntegral n)
| otherwise = TInteger (-1 - fromIntegral n)
where n = Ref.fromUInt u
fromRefTerm (Ref.TBigInt n) = TInteger n
fromRefTerm (Ref.TBytes bs) = TBytes (BS.pack bs)
fromRefTerm (Ref.TBytess bs) = TBytesI (LBS.fromChunks (map BS.pack bs))
fromRefTerm (Ref.TString st) = TString (T.pack st)
fromRefTerm (Ref.TStrings st) = TStringI (LT.fromChunks (map T.pack st))
fromRefTerm (Ref.TArray ts) = TList (map fromRefTerm ts)
fromRefTerm (Ref.TArrayI ts) = TListI (map fromRefTerm ts)
fromRefTerm (Ref.TMap ts) = TMap [ (fromRefTerm x, fromRefTerm y)
| (x,y) <- ts ]
fromRefTerm (Ref.TMapI ts) = TMapI [ (fromRefTerm x, fromRefTerm y)
| (x,y) <- ts ]
fromRefTerm (Ref.TTagged w t) = TTagged (Ref.fromUInt w)
(fromRefTerm t)
fromRefTerm (Ref.TFalse) = TBool False
fromRefTerm (Ref.TTrue) = TBool True
fromRefTerm Ref.TNull = TNull
fromRefTerm Ref.TUndef = TSimple 23
fromRefTerm (Ref.TSimple w) = TSimple (Ref.fromSimple w)
fromRefTerm (Ref.TFloat16 f) = THalf (Half.fromHalf (getHalfSpecials f))
fromRefTerm (Ref.TFloat32 f) = TFloat (getFloatSpecials f)
fromRefTerm (Ref.TFloat64 f) = TDouble (getDoubleSpecials f)
-- | Compare terms for equality.
--
-- It does exact bit for bit equality of floats. This means we can compare
NaNs , and different NaNs do not compare equal . If you need equality
modulo different NaNs then use ' canonicaliseTerm ' .
--
If you need equality modulo different representations of ' TInt ' vs ' TInteger '
-- then use 'canonicaliseTerm'.
--
eqTerm :: Term -> Term -> Bool
eqTerm (TList ts) (TList ts') = and (zipWith eqTerm ts ts')
eqTerm (TListI ts) (TListI ts') = and (zipWith eqTerm ts ts')
eqTerm (TMap ts) (TMap ts') = and (zipWith eqTermPair ts ts')
eqTerm (TMapI ts) (TMapI ts') = and (zipWith eqTermPair ts ts')
eqTerm (TTagged w t) (TTagged w' t') = w == w' && eqTerm t t'
eqTerm (THalf f) (THalf f') = floatToWord f == floatToWord f'
eqTerm (TFloat f) (TFloat f') = floatToWord f == floatToWord f'
eqTerm (TDouble f) (TDouble f') = doubleToWord f == doubleToWord f'
eqTerm a b = a == b
eqTermPair :: (Term, Term) -> (Term, Term) -> Bool
eqTermPair (a,b) (a',b') = eqTerm a a' && eqTerm b b'
| Both ' toRefTerm ' and the encoding \/ decoding round trip canonicalises
NaNs . So tests involving these often need this in combination with
-- comparing for exact equality using 'eqTerm'.
--
canonicaliseTerm :: Term -> Term
canonicaliseTerm (THalf f) | isNaN f = canonicalTermNaN
canonicaliseTerm (TFloat f) | isNaN f = canonicalTermNaN
canonicaliseTerm (TDouble f) | isNaN f = canonicalTermNaN
canonicaliseTerm (TInteger n) | n <= fromIntegral (maxBound :: Int)
, n >= fromIntegral (minBound :: Int)
= TInt (fromIntegral n)
canonicaliseTerm (TList ts) = TList (map canonicaliseTerm ts)
canonicaliseTerm (TListI ts) = TListI (map canonicaliseTerm ts)
canonicaliseTerm (TMap ts) = TMap (map canonicaliseTermPair ts)
canonicaliseTerm (TMapI ts) = TMapI (map canonicaliseTermPair ts)
canonicaliseTerm (TTagged tag t) = TTagged tag (canonicaliseTerm t)
canonicaliseTerm t = t
canonicalTermNaN :: Term
canonicalTermNaN = THalf canonicalNaN
canonicaliseTermPair :: (Term, Term) -> (Term, Term)
canonicaliseTermPair (a,b) =
(canonicaliseTerm a, canonicaliseTerm b)
prop_fromToRefTerm :: Ref.Term -> Bool
prop_fromToRefTerm term = toRefTerm (fromRefTerm term)
== Ref.canonicaliseTerm term
prop_toFromRefTerm :: Term -> Bool
prop_toFromRefTerm term = fromRefTerm (toRefTerm term)
`eqTerm` canonicaliseTerm term
instance Arbitrary Term where
arbitrary = fromRefTerm <$> arbitrary
shrink (TInt n) = [ TInt n' | n' <- shrink n ]
shrink (TInteger n) = [ TInteger n' | n' <- shrink n ]
shrink (TBytes ws) = [ TBytes (BS.pack ws') | ws' <- shrink (BS.unpack ws) ]
shrink (TBytesI wss) = [ TBytesI (LBS.fromChunks (map BS.pack wss'))
| wss' <- shrink (map BS.unpack (LBS.toChunks wss)) ]
shrink (TString cs) = [ TString (T.pack cs') | cs' <- shrink (T.unpack cs) ]
shrink (TStringI css) = [ TStringI (LT.fromChunks (map T.pack css'))
| css' <- shrink (map T.unpack (LT.toChunks css)) ]
shrink (TList xs@[x]) = x : [ TList xs' | xs' <- shrink xs ]
shrink (TList xs) = [ TList xs' | xs' <- shrink xs ]
shrink (TListI xs@[x]) = x : [ TListI xs' | xs' <- shrink xs ]
shrink (TListI xs) = [ TListI xs' | xs' <- shrink xs ]
shrink (TMap xys@[(x,y)]) = x : y : [ TMap xys' | xys' <- shrink xys ]
shrink (TMap xys) = [ TMap xys' | xys' <- shrink xys ]
shrink (TMapI xys@[(x,y)]) = x : y : [ TMapI xys' | xys' <- shrink xys ]
shrink (TMapI xys) = [ TMapI xys' | xys' <- shrink xys ]
shrink (TTagged w t) = t : [ TTagged w' t' | (w', t') <- shrink (w, t)
, not (Ref.reservedTag (fromIntegral w')) ]
shrink (TBool _) = []
shrink TNull = []
shrink (TSimple w) = [ TSimple w' | w' <- shrink w
, Ref.unassignedSimple w || w == 23 ]
shrink (THalf _f) = []
shrink (TFloat f) = [ TFloat f' | f' <- shrink f ]
shrink (TDouble f) = [ TDouble f' | f' <- shrink f ]
| null | https://raw.githubusercontent.com/well-typed/cborg/9be3fd5437f9d2ec1df784d5d939efb9a85fd1fb/cborg/tests/Tests/Term.hs | haskell | # LANGUAGE NamedFieldPuns #
# LANGUAGE OverloadedStrings #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
= Ref.TBigInt n
| Compare terms for equality.
It does exact bit for bit equality of floats. This means we can compare
then use 'canonicaliseTerm'.
comparing for exact equality using 'eqTerm'.
| # LANGUAGE CPP #
# OPTIONS_GHC -fno - warn - orphans #
module Tests.Term (
Term
, serialise
, deserialise
, toRefTerm
, fromRefTerm
, eqTerm
, canonicaliseTerm
, prop_fromToRefTerm
, prop_toFromRefTerm
) where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import Data.Word
import qualified Numeric.Half as Half
import Codec.CBOR.Term
import Codec.CBOR.Read
import Codec.CBOR.Write
import Test.QuickCheck
import qualified Tests.Reference.Implementation as Ref
import Tests.Reference.Generators
( floatToWord, doubleToWord, canonicalNaN
, HalfSpecials(..), FloatSpecials(..), DoubleSpecials(..) )
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Exception (throw)
serialise :: Term -> LBS.ByteString
serialise = toLazyByteString . encodeTerm
deserialise :: LBS.ByteString -> Term
deserialise b =
case deserialiseFromBytes decodeTerm b of
Left failure -> throw failure
Right (trailing, _) | not (LBS.null trailing)
-> error "Test.deserialise: trailing data"
Right (_, t) -> t
toRefTerm :: Term -> Ref.Term
toRefTerm (TInt n)
| n >= 0 = Ref.TUInt (Ref.toUInt (fromIntegral n))
| otherwise = Ref.TNInt (Ref.toUInt (fromIntegral (-1 - n)))
| n >= 0 && n <= fromIntegral (maxBound :: Word64)
= Ref.TUInt (Ref.toUInt (fromIntegral n))
| n < 0 && n >= -1 - fromIntegral (maxBound :: Word64)
= Ref.TNInt (Ref.toUInt (fromIntegral (-1 - n)))
| otherwise = Ref.TBigInt n
toRefTerm (TBytes bs) = Ref.TBytes (BS.unpack bs)
toRefTerm (TBytesI bs) = Ref.TBytess (map BS.unpack (LBS.toChunks bs))
toRefTerm (TString st) = Ref.TString (T.unpack st)
toRefTerm (TStringI st) = Ref.TStrings (map T.unpack (LT.toChunks st))
toRefTerm (TList ts) = Ref.TArray (map toRefTerm ts)
toRefTerm (TListI ts) = Ref.TArrayI (map toRefTerm ts)
toRefTerm (TMap ts) = Ref.TMap [ (toRefTerm x, toRefTerm y)
| (x,y) <- ts ]
toRefTerm (TMapI ts) = Ref.TMapI [ (toRefTerm x, toRefTerm y)
| (x,y) <- ts ]
toRefTerm (TTagged w t) = Ref.TTagged (Ref.toUInt (fromIntegral w))
(toRefTerm t)
toRefTerm (TBool False) = Ref.TFalse
toRefTerm (TBool True) = Ref.TTrue
toRefTerm TNull = Ref.TNull
toRefTerm (TSimple 23) = Ref.TUndef
toRefTerm (TSimple w) = Ref.TSimple (Ref.toSimple w)
toRefTerm (THalf f) = if isNaN f
then Ref.TFloat16 canonicalNaN
else Ref.TFloat16 (HalfSpecials (Half.toHalf f))
toRefTerm (TFloat f) = if isNaN f
then Ref.TFloat16 canonicalNaN
else Ref.TFloat32 (FloatSpecials f)
toRefTerm (TDouble f) = if isNaN f
then Ref.TFloat16 canonicalNaN
else Ref.TFloat64 (DoubleSpecials f)
fromRefTerm :: Ref.Term -> Term
fromRefTerm (Ref.TUInt u)
| n <= fromIntegral (maxBound :: Int) = TInt (fromIntegral n)
| otherwise = TInteger (fromIntegral n)
where n = Ref.fromUInt u
fromRefTerm (Ref.TNInt u)
| n <= fromIntegral (maxBound :: Int) = TInt (-1 - fromIntegral n)
| otherwise = TInteger (-1 - fromIntegral n)
where n = Ref.fromUInt u
fromRefTerm (Ref.TBigInt n) = TInteger n
fromRefTerm (Ref.TBytes bs) = TBytes (BS.pack bs)
fromRefTerm (Ref.TBytess bs) = TBytesI (LBS.fromChunks (map BS.pack bs))
fromRefTerm (Ref.TString st) = TString (T.pack st)
fromRefTerm (Ref.TStrings st) = TStringI (LT.fromChunks (map T.pack st))
fromRefTerm (Ref.TArray ts) = TList (map fromRefTerm ts)
fromRefTerm (Ref.TArrayI ts) = TListI (map fromRefTerm ts)
fromRefTerm (Ref.TMap ts) = TMap [ (fromRefTerm x, fromRefTerm y)
| (x,y) <- ts ]
fromRefTerm (Ref.TMapI ts) = TMapI [ (fromRefTerm x, fromRefTerm y)
| (x,y) <- ts ]
fromRefTerm (Ref.TTagged w t) = TTagged (Ref.fromUInt w)
(fromRefTerm t)
fromRefTerm (Ref.TFalse) = TBool False
fromRefTerm (Ref.TTrue) = TBool True
fromRefTerm Ref.TNull = TNull
fromRefTerm Ref.TUndef = TSimple 23
fromRefTerm (Ref.TSimple w) = TSimple (Ref.fromSimple w)
fromRefTerm (Ref.TFloat16 f) = THalf (Half.fromHalf (getHalfSpecials f))
fromRefTerm (Ref.TFloat32 f) = TFloat (getFloatSpecials f)
fromRefTerm (Ref.TFloat64 f) = TDouble (getDoubleSpecials f)
NaNs , and different NaNs do not compare equal . If you need equality
modulo different NaNs then use ' canonicaliseTerm ' .
If you need equality modulo different representations of ' TInt ' vs ' TInteger '
eqTerm :: Term -> Term -> Bool
eqTerm (TList ts) (TList ts') = and (zipWith eqTerm ts ts')
eqTerm (TListI ts) (TListI ts') = and (zipWith eqTerm ts ts')
eqTerm (TMap ts) (TMap ts') = and (zipWith eqTermPair ts ts')
eqTerm (TMapI ts) (TMapI ts') = and (zipWith eqTermPair ts ts')
eqTerm (TTagged w t) (TTagged w' t') = w == w' && eqTerm t t'
eqTerm (THalf f) (THalf f') = floatToWord f == floatToWord f'
eqTerm (TFloat f) (TFloat f') = floatToWord f == floatToWord f'
eqTerm (TDouble f) (TDouble f') = doubleToWord f == doubleToWord f'
eqTerm a b = a == b
eqTermPair :: (Term, Term) -> (Term, Term) -> Bool
eqTermPair (a,b) (a',b') = eqTerm a a' && eqTerm b b'
| Both ' toRefTerm ' and the encoding \/ decoding round trip canonicalises
NaNs . So tests involving these often need this in combination with
canonicaliseTerm :: Term -> Term
canonicaliseTerm (THalf f) | isNaN f = canonicalTermNaN
canonicaliseTerm (TFloat f) | isNaN f = canonicalTermNaN
canonicaliseTerm (TDouble f) | isNaN f = canonicalTermNaN
canonicaliseTerm (TInteger n) | n <= fromIntegral (maxBound :: Int)
, n >= fromIntegral (minBound :: Int)
= TInt (fromIntegral n)
canonicaliseTerm (TList ts) = TList (map canonicaliseTerm ts)
canonicaliseTerm (TListI ts) = TListI (map canonicaliseTerm ts)
canonicaliseTerm (TMap ts) = TMap (map canonicaliseTermPair ts)
canonicaliseTerm (TMapI ts) = TMapI (map canonicaliseTermPair ts)
canonicaliseTerm (TTagged tag t) = TTagged tag (canonicaliseTerm t)
canonicaliseTerm t = t
canonicalTermNaN :: Term
canonicalTermNaN = THalf canonicalNaN
canonicaliseTermPair :: (Term, Term) -> (Term, Term)
canonicaliseTermPair (a,b) =
(canonicaliseTerm a, canonicaliseTerm b)
prop_fromToRefTerm :: Ref.Term -> Bool
prop_fromToRefTerm term = toRefTerm (fromRefTerm term)
== Ref.canonicaliseTerm term
prop_toFromRefTerm :: Term -> Bool
prop_toFromRefTerm term = fromRefTerm (toRefTerm term)
`eqTerm` canonicaliseTerm term
instance Arbitrary Term where
arbitrary = fromRefTerm <$> arbitrary
shrink (TInt n) = [ TInt n' | n' <- shrink n ]
shrink (TInteger n) = [ TInteger n' | n' <- shrink n ]
shrink (TBytes ws) = [ TBytes (BS.pack ws') | ws' <- shrink (BS.unpack ws) ]
shrink (TBytesI wss) = [ TBytesI (LBS.fromChunks (map BS.pack wss'))
| wss' <- shrink (map BS.unpack (LBS.toChunks wss)) ]
shrink (TString cs) = [ TString (T.pack cs') | cs' <- shrink (T.unpack cs) ]
shrink (TStringI css) = [ TStringI (LT.fromChunks (map T.pack css'))
| css' <- shrink (map T.unpack (LT.toChunks css)) ]
shrink (TList xs@[x]) = x : [ TList xs' | xs' <- shrink xs ]
shrink (TList xs) = [ TList xs' | xs' <- shrink xs ]
shrink (TListI xs@[x]) = x : [ TListI xs' | xs' <- shrink xs ]
shrink (TListI xs) = [ TListI xs' | xs' <- shrink xs ]
shrink (TMap xys@[(x,y)]) = x : y : [ TMap xys' | xys' <- shrink xys ]
shrink (TMap xys) = [ TMap xys' | xys' <- shrink xys ]
shrink (TMapI xys@[(x,y)]) = x : y : [ TMapI xys' | xys' <- shrink xys ]
shrink (TMapI xys) = [ TMapI xys' | xys' <- shrink xys ]
shrink (TTagged w t) = t : [ TTagged w' t' | (w', t') <- shrink (w, t)
, not (Ref.reservedTag (fromIntegral w')) ]
shrink (TBool _) = []
shrink TNull = []
shrink (TSimple w) = [ TSimple w' | w' <- shrink w
, Ref.unassignedSimple w || w == 23 ]
shrink (THalf _f) = []
shrink (TFloat f) = [ TFloat f' | f' <- shrink f ]
shrink (TDouble f) = [ TDouble f' | f' <- shrink f ]
|
13cb1c9f67bed1087047f0b1f955f4fa2f57b7e286978a68ea16b18ccc8df693 | gotthardp/lorawan-server | lorawan_backend_sup.erl | %
Copyright ( c ) 2016 - 2019 < >
% All rights reserved.
Distributed under the terms of the MIT License . See the LICENSE file .
%
-module(lorawan_backend_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-spec start_link() -> {ok, pid()}.
start_link() ->
supervisor:start_link(?MODULE, []).
init([]) ->
{ok, {{one_for_all, 2, 10}, [
{connectors,
{lorawan_connector_sup, start_link, []},
permanent, infinity, supervisor, [lorawan_connector_sup]},
{factory,
{lorawan_backend_factory, start_link, []},
permanent, 5000, worker, [lorawan_backend_factory]},
{monitor,
{lorawan_connector_monitor, start_link, []},
permanent, 5000, worker, [lorawan_connector_monitor]}
]}}.
% end of file
| null | https://raw.githubusercontent.com/gotthardp/lorawan-server/13b1409b57e079499a633bca20a33eb5acdf05ee/src/lorawan_backend_sup.erl | erlang |
All rights reserved.
end of file | Copyright ( c ) 2016 - 2019 < >
Distributed under the terms of the MIT License . See the LICENSE file .
-module(lorawan_backend_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-spec start_link() -> {ok, pid()}.
start_link() ->
supervisor:start_link(?MODULE, []).
init([]) ->
{ok, {{one_for_all, 2, 10}, [
{connectors,
{lorawan_connector_sup, start_link, []},
permanent, infinity, supervisor, [lorawan_connector_sup]},
{factory,
{lorawan_backend_factory, start_link, []},
permanent, 5000, worker, [lorawan_backend_factory]},
{monitor,
{lorawan_connector_monitor, start_link, []},
permanent, 5000, worker, [lorawan_connector_monitor]}
]}}.
|
53424a9a9fb788d913b0e37d399b9154105ad0a93825c39447b5ffbd3795d37f | ocaml-multicore/ocaml-tsan | odoc_exception.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cambium , INRIA Paris
(* *)
Copyright 2022 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(** Representation and manipulation of exceptions. *)
(** This module has an implementation although it declares only types.
This is because other modules use the let module construct ot access it
so it is needed as a real module. *)
module Name = Odoc_name
type exception_alias = {
ea_name : Name.t;
mutable ea_ex : t_exception option;
}
and t_exception = {
ex_name : Name.t;
mutable ex_info : Odoc_types.info option;
ex_args : Odoc_type.constructor_args;
ex_ret : Types.type_expr option;
ex_alias : exception_alias option;
mutable ex_loc : Odoc_types.location;
mutable ex_code : string option;
}
| null | https://raw.githubusercontent.com/ocaml-multicore/ocaml-tsan/f54002470cc6ab780963cc81b11a85a820a40819/ocamldoc/odoc_exception.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Representation and manipulation of exceptions.
* This module has an implementation although it declares only types.
This is because other modules use the let module construct ot access it
so it is needed as a real module. | , projet Cambium , INRIA Paris
Copyright 2022 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
module Name = Odoc_name
type exception_alias = {
ea_name : Name.t;
mutable ea_ex : t_exception option;
}
and t_exception = {
ex_name : Name.t;
mutable ex_info : Odoc_types.info option;
ex_args : Odoc_type.constructor_args;
ex_ret : Types.type_expr option;
ex_alias : exception_alias option;
mutable ex_loc : Odoc_types.location;
mutable ex_code : string option;
}
|
fa9ae3e2a70581056a29ecc1b1e69098b253049cc388331f6986f9459094abb3 | uim/sigscheme | test-define.scm | ;; Filename : test-define.scm
About : unit test for R5RS ' define '
;;
Copyright ( C ) 2005 - 2006 < mover AT hct.zaq.ne.jp >
Copyright ( c ) 2007 - 2008 SigScheme Project < uim - en AT googlegroups.com >
;;
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions
;; are met:
;;
1 . Redistributions of source code must retain the above copyright
;; notice, this list of conditions and the following disclaimer.
2 . Redistributions in binary form must reproduce the above copyright
;; notice, this list of conditions and the following disclaimer in the
;; documentation and/or other materials provided with the distribution.
3 . Neither the name of authors nor the names of its contributors
;; may be used to endorse or promote products derived from this software
;; without specific prior written permission.
;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ` ` AS
;; IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
;; THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
;; PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
;; CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR
;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
internal definitions in ' define ' are writtin at test-define-internal.scm
;; internal definitions in 'let' variants are writtin at test-let.scm
;; see also test-begin.scm for top-level definitions
(require-extension (unittest))
(define tn test-name)
(define *test-track-progress* #f)
(tn "define invalid form")
(assert-error (tn)
(lambda ()
(define)))
(assert-error (tn)
(lambda ()
(define . a)))
(assert-error (tn)
(lambda ()
(define a)))
(assert-error (tn)
(lambda ()
(define a . 2)))
(assert-error (tn)
(lambda ()
(define a 1 'excessive)))
(assert-error (tn)
(lambda ()
(define a 1 . 'excessive)))
;; <variable> is not a symbol
(assert-error (tn)
(lambda ()
(define 1)))
(assert-error (tn)
(lambda ()
(define 1 . 1)))
(assert-error (tn)
(lambda ()
(define 1 1)))
(assert-error (tn)
(lambda ()
(define #t 1)))
(assert-error (tn)
(lambda ()
(define #f 1)))
(assert-error (tn)
(lambda ()
(define 1 1 'excessive)))
(assert-error (tn)
(lambda ()
(define 1 1 . 'excessive)))
;; function forms
(assert-error (tn)
(lambda ()
(define ())))
(assert-error (tn)
(lambda ()
(define () 1)))
(assert-error (tn)
(lambda ()
(define (f))))
(assert-error (tn)
(lambda ()
(define (f) . 1)))
(assert-error (tn)
(lambda ()
(define (f) 1 . 1)))
(assert-error (tn)
(lambda ()
(define (f x))))
(assert-error (tn)
(lambda ()
(define (f x) . 1)))
(assert-error (tn)
(lambda ()
(define (f x) 1 . 1)))
(assert-error (tn)
(lambda ()
(define (f . x))))
(assert-error (tn)
(lambda ()
(define (f . x) . 1)))
(assert-error (tn)
(lambda ()
(define (f . x) 1 . 1)))
(tn "define syntactic keywords as value")
(assert-error (tn)
(lambda ()
(eval '(define syn define)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn if)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn and)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn cond)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn begin)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn do)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn delay)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn let*)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn else)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn =>)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn quote)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn quasiquote)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn unquote)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn unquote-splicing)
(interaction-environment))))
(tn "define syntactic keywords as value internally")
(assert-error (tn)
(lambda ()
(let ()
(define syn define)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn if)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn and)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn cond)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn begin)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn do)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn delay)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn let*)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn else)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn =>)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn quote)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn quasiquote)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn unquote)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn unquote-splicing)
#t)))
(tn "define syntactic keyword internally")
(assert-equal? (tn) 7 ((lambda () (define else 7) else)))
(assert-equal? (tn) 8 ((lambda () (define => 8) =>)))
(assert-equal? (tn) 9 ((lambda () (define do 9) do)))
(assert-error (tn) (lambda () else))
(assert-error (tn) (lambda () =>))
(assert-error (tn) (lambda () do))
(tn "define syntactic keyword as top-level variable")
(define else 3)
(assert-equal? (tn) 3 else)
(define => 4)
(assert-equal? (tn) 4 =>)
(define do 5)
(assert-equal? (tn) 5 do)
(if (and sigscheme?
(provided? "strict-argcheck"))
(begin
(tn "define function form: boolean as an arg")
(assert-error (tn) (lambda () (define (f . #t) #t)))
(assert-error (tn) (lambda () (define (f #t) #t)))
(assert-error (tn) (lambda () (define (f x #t) #t)))
(assert-error (tn) (lambda () (define (f #t x) #t)))
(assert-error (tn) (lambda () (define (f x . #t) #t)))
(assert-error (tn) (lambda () (define (f #t . x) #t)))
(assert-error (tn) (lambda () (define (f x y #t) #t)))
(assert-error (tn) (lambda () (define (f x y . #t) #t)))
(assert-error (tn) (lambda () (define (f x #t y) #t)))
(assert-error (tn) (lambda () (define (f x #t . y) #t)))
(tn "define function form: intger as an arg")
(assert-error (tn) (lambda () (define (f . 1) #t)))
(assert-error (tn) (lambda () (define (f 1) #t)))
(assert-error (tn) (lambda () (define (f x 1) #t)))
(assert-error (tn) (lambda () (define (f 1 x) #t)))
(assert-error (tn) (lambda () (define (f x . 1) #t)))
(assert-error (tn) (lambda () (define (f 1 . x) #t)))
(assert-error (tn) (lambda () (define (f x y 1) #t)))
(assert-error (tn) (lambda () (define (f x y . 1) #t)))
(assert-error (tn) (lambda () (define (f x 1 y) #t)))
(assert-error (tn) (lambda () (define (f x 1 . y) #t)))
(tn "define function form: null as an arg")
(assert-true (tn) (define (f . ()) #t))
(assert-error (tn) (lambda () (define (f ()) #t)))
(assert-error (tn) (lambda () (define (f x ()) #t)))
(assert-error (tn) (lambda () (define (f () x) #t)))
(assert-true (tn) (define (f x . ()) #t))
(assert-error (tn) (lambda () (define (f () . x) #t)))
(assert-error (tn) (lambda () (define (f x y ()) #t)))
(assert-true (tn) (define (f x y . ()) #t))
(assert-error (tn) (lambda () (define (f x () y) #t)))
(assert-error (tn) (lambda () (define (f x () . y) #t)))
(tn "define function form: pair as an arg")
(assert-true (tn) (define (f . (a)) #t))
(assert-error (tn) (lambda () (define (f (a)) #t)))
(assert-error (tn) (lambda () (define (f x (a)) #t)))
(assert-error (tn) (lambda () (define (f (a) x) #t)))
(assert-true (tn) (define (f x . (a)) #t))
(assert-error (tn) (lambda () (define (f (a) . x) #t)))
(assert-error (tn) (lambda () (define (f x y (a)) #t)))
(assert-true (tn) (define (f x y . (a)) #t))
(assert-error (tn) (lambda () (define (f x (a) y) #t)))
(assert-error (tn) (lambda () (define (f x (a) . y) #t)))
(tn "define function form: char as an arg")
(assert-error (tn) (lambda () (define (f . #\a) #t)))
(assert-error (tn) (lambda () (define (f #\a) #t)))
(assert-error (tn) (lambda () (define (f x #\a) #t)))
(assert-error (tn) (lambda () (define (f #\a x) #t)))
(assert-error (tn) (lambda () (define (f x . #\a) #t)))
(assert-error (tn) (lambda () (define (f #\a . x) #t)))
(assert-error (tn) (lambda () (define (f x y #\a) #t)))
(assert-error (tn) (lambda () (define (f x y . #\a) #t)))
(assert-error (tn) (lambda () (define (f x #\a y) #t)))
(assert-error (tn) (lambda () (define (f x #\a . y) #t)))
(tn "define function form: string as an arg")
(assert-error (tn) (lambda () (define (f . "a") #t)))
(assert-error (tn) (lambda () (define (f "a") #t)))
(assert-error (tn) (lambda () (define (f x "a") #t)))
(assert-error (tn) (lambda () (define (f "a" x) #t)))
(assert-error (tn) (lambda () (define (f x . "a") #t)))
(assert-error (tn) (lambda () (define (f "a" . x) #t)))
(assert-error (tn) (lambda () (define (f x y "a") #t)))
(assert-error (tn) (lambda () (define (f x y . "a") #t)))
(assert-error (tn) (lambda () (define (f x "a" y) #t)))
(assert-error (tn) (lambda () (define (f x "a" . y) #t)))
(tn "define function form: vector as an arg")
(assert-error (tn) (lambda () (define (f . #(a)) #t)))
(assert-error (tn) (lambda () (define (f #(a)) #t)))
(assert-error (tn) (lambda () (define (f x #(a)) #t)))
(assert-error (tn) (lambda () (define (f #(a) x) #t)))
(assert-error (tn) (lambda () (define (f x . #(a)) #t)))
(assert-error (tn) (lambda () (define (f #(a) . x) #t)))
(assert-error (tn) (lambda () (define (f x y #(a)) #t)))
(assert-error (tn) (lambda () (define (f x y . #(a)) #t)))
(assert-error (tn) (lambda () (define (f x #(a) y) #t)))
(assert-error (tn) (lambda () (define (f x #(a) . y) #t)))))
(tn "top-level definition invalid forms")
;; top-level define cannot be placed under a non-begin structure.
;; See also test-begin.scm for top-level definitions.
(if (provided? "strict-toplevel-definitions")
(begin
(assert-error (tn)
(lambda ()
(eval '(if #t (define var0 1))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(if #f #t (define var0 1))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(and (define var0 1))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(or (define var0 1))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(cond (#t (define var0 1)))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(cond (else (define var0 1)))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(case 'key ((key) (define var0 1)))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(case 'key (else (define var0 1)))
(interaction-environment))))
(tn "ttt")
;; test being evaled at non-tail part of <sequence>
(assert-error (tn)
(lambda ()
(eval '(and (define var0 1) #t)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(or (define var0 1) #t)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(cond (#t (define var0 1) #t))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(cond (else (define var0 1) #t))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(case 'key ((key) (define var0 1) #t))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(case 'key (else (define var0 1) #t))
(interaction-environment))))))
; basic define
(define val1 3)
(assert-equal? "basic define check" 3 val1)
; redefine
(define val1 5)
(assert-equal? "redefine check" 5 val1)
; define lambda
(define (what? x)
"DEADBEEF" x)
(assert-equal? "func define" 10 (what? 10))
(define what2?
(lambda (x)
"DEADBEEF" x))
(assert-equal? "func define" 10 (what2? 10))
(define (nullarg)
"nullarg")
(assert-equal? "nullarg test" "nullarg" (nullarg))
(define (add x y)
(+ x y))
(assert-equal? "func define" 10 (add 2 8))
; tests for dot list arguments
(define (dotarg1 . a)
a)
(assert-equal? "dot arg test 1" '(1 2) (dotarg1 1 2))
(define (dotarg2 a . b)
a)
(assert-equal? "dot arg test 2" 1 (dotarg2 1 2))
(define (dotarg3 a . b)
b)
(assert-equal? "dot arg test 3" '(2) (dotarg3 1 2))
(assert-equal? "dot arg test 4" '(2 3) (dotarg3 1 2 3))
(define (dotarg4 a b . c)
b)
(assert-equal? "dot arg test 5" 2 (dotarg4 1 2 3))
(define (dotarg5 a b . c)
c)
(assert-equal? "dot arg test 6" '(3 4) (dotarg5 1 2 3 4))
; set!
(define (set-dot a . b)
(set! b '(1 2))
b)
(assert-equal? "set dot test" '(1 2) (set-dot '()))
; test for internal define
; more comprehensive tests are written at test-define-internal.scm
(define (idefine-o a)
(define (idefine-i c)
(+ c 3))
(idefine-i a))
(assert-equal? "internal define1" 5 (idefine-o 2))
(define (idefine0 a)
(define (idefine1 . args)
(apply + args))
(define (idefine2 c)
(+ c 2))
(+ (idefine1 1 2 3 4 5) (idefine2 a)))
(assert-equal? "internal define2" 17 (idefine0 0))
(total-report)
| null | https://raw.githubusercontent.com/uim/sigscheme/ccf1f92d6c2a0f45c15d93da82e399c2a78fe5f3/test/test-define.scm | scheme | Filename : test-define.scm
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
may be used to endorse or promote products derived from this software
without specific prior written permission.
IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
LOSS OF USE , DATA , OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
internal definitions in 'let' variants are writtin at test-let.scm
see also test-begin.scm for top-level definitions
<variable> is not a symbol
function forms
top-level define cannot be placed under a non-begin structure.
See also test-begin.scm for top-level definitions.
test being evaled at non-tail part of <sequence>
basic define
redefine
define lambda
tests for dot list arguments
set!
test for internal define
more comprehensive tests are written at test-define-internal.scm | About : unit test for R5RS ' define '
Copyright ( C ) 2005 - 2006 < mover AT hct.zaq.ne.jp >
Copyright ( c ) 2007 - 2008 SigScheme Project < uim - en AT googlegroups.com >
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . Neither the name of authors nor the names of its contributors
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ` ` AS
EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
internal definitions in ' define ' are writtin at test-define-internal.scm
(require-extension (unittest))
(define tn test-name)
(define *test-track-progress* #f)
(tn "define invalid form")
(assert-error (tn)
(lambda ()
(define)))
(assert-error (tn)
(lambda ()
(define . a)))
(assert-error (tn)
(lambda ()
(define a)))
(assert-error (tn)
(lambda ()
(define a . 2)))
(assert-error (tn)
(lambda ()
(define a 1 'excessive)))
(assert-error (tn)
(lambda ()
(define a 1 . 'excessive)))
(assert-error (tn)
(lambda ()
(define 1)))
(assert-error (tn)
(lambda ()
(define 1 . 1)))
(assert-error (tn)
(lambda ()
(define 1 1)))
(assert-error (tn)
(lambda ()
(define #t 1)))
(assert-error (tn)
(lambda ()
(define #f 1)))
(assert-error (tn)
(lambda ()
(define 1 1 'excessive)))
(assert-error (tn)
(lambda ()
(define 1 1 . 'excessive)))
(assert-error (tn)
(lambda ()
(define ())))
(assert-error (tn)
(lambda ()
(define () 1)))
(assert-error (tn)
(lambda ()
(define (f))))
(assert-error (tn)
(lambda ()
(define (f) . 1)))
(assert-error (tn)
(lambda ()
(define (f) 1 . 1)))
(assert-error (tn)
(lambda ()
(define (f x))))
(assert-error (tn)
(lambda ()
(define (f x) . 1)))
(assert-error (tn)
(lambda ()
(define (f x) 1 . 1)))
(assert-error (tn)
(lambda ()
(define (f . x))))
(assert-error (tn)
(lambda ()
(define (f . x) . 1)))
(assert-error (tn)
(lambda ()
(define (f . x) 1 . 1)))
(tn "define syntactic keywords as value")
(assert-error (tn)
(lambda ()
(eval '(define syn define)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn if)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn and)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn cond)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn begin)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn do)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn delay)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn let*)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn else)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn =>)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn quote)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn quasiquote)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn unquote)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(define syn unquote-splicing)
(interaction-environment))))
(tn "define syntactic keywords as value internally")
(assert-error (tn)
(lambda ()
(let ()
(define syn define)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn if)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn and)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn cond)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn begin)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn do)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn delay)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn let*)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn else)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn =>)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn quote)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn quasiquote)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn unquote)
#t)))
(assert-error (tn)
(lambda ()
(let ()
(define syn unquote-splicing)
#t)))
(tn "define syntactic keyword internally")
(assert-equal? (tn) 7 ((lambda () (define else 7) else)))
(assert-equal? (tn) 8 ((lambda () (define => 8) =>)))
(assert-equal? (tn) 9 ((lambda () (define do 9) do)))
(assert-error (tn) (lambda () else))
(assert-error (tn) (lambda () =>))
(assert-error (tn) (lambda () do))
(tn "define syntactic keyword as top-level variable")
(define else 3)
(assert-equal? (tn) 3 else)
(define => 4)
(assert-equal? (tn) 4 =>)
(define do 5)
(assert-equal? (tn) 5 do)
(if (and sigscheme?
(provided? "strict-argcheck"))
(begin
(tn "define function form: boolean as an arg")
(assert-error (tn) (lambda () (define (f . #t) #t)))
(assert-error (tn) (lambda () (define (f #t) #t)))
(assert-error (tn) (lambda () (define (f x #t) #t)))
(assert-error (tn) (lambda () (define (f #t x) #t)))
(assert-error (tn) (lambda () (define (f x . #t) #t)))
(assert-error (tn) (lambda () (define (f #t . x) #t)))
(assert-error (tn) (lambda () (define (f x y #t) #t)))
(assert-error (tn) (lambda () (define (f x y . #t) #t)))
(assert-error (tn) (lambda () (define (f x #t y) #t)))
(assert-error (tn) (lambda () (define (f x #t . y) #t)))
(tn "define function form: intger as an arg")
(assert-error (tn) (lambda () (define (f . 1) #t)))
(assert-error (tn) (lambda () (define (f 1) #t)))
(assert-error (tn) (lambda () (define (f x 1) #t)))
(assert-error (tn) (lambda () (define (f 1 x) #t)))
(assert-error (tn) (lambda () (define (f x . 1) #t)))
(assert-error (tn) (lambda () (define (f 1 . x) #t)))
(assert-error (tn) (lambda () (define (f x y 1) #t)))
(assert-error (tn) (lambda () (define (f x y . 1) #t)))
(assert-error (tn) (lambda () (define (f x 1 y) #t)))
(assert-error (tn) (lambda () (define (f x 1 . y) #t)))
(tn "define function form: null as an arg")
(assert-true (tn) (define (f . ()) #t))
(assert-error (tn) (lambda () (define (f ()) #t)))
(assert-error (tn) (lambda () (define (f x ()) #t)))
(assert-error (tn) (lambda () (define (f () x) #t)))
(assert-true (tn) (define (f x . ()) #t))
(assert-error (tn) (lambda () (define (f () . x) #t)))
(assert-error (tn) (lambda () (define (f x y ()) #t)))
(assert-true (tn) (define (f x y . ()) #t))
(assert-error (tn) (lambda () (define (f x () y) #t)))
(assert-error (tn) (lambda () (define (f x () . y) #t)))
(tn "define function form: pair as an arg")
(assert-true (tn) (define (f . (a)) #t))
(assert-error (tn) (lambda () (define (f (a)) #t)))
(assert-error (tn) (lambda () (define (f x (a)) #t)))
(assert-error (tn) (lambda () (define (f (a) x) #t)))
(assert-true (tn) (define (f x . (a)) #t))
(assert-error (tn) (lambda () (define (f (a) . x) #t)))
(assert-error (tn) (lambda () (define (f x y (a)) #t)))
(assert-true (tn) (define (f x y . (a)) #t))
(assert-error (tn) (lambda () (define (f x (a) y) #t)))
(assert-error (tn) (lambda () (define (f x (a) . y) #t)))
(tn "define function form: char as an arg")
(assert-error (tn) (lambda () (define (f . #\a) #t)))
(assert-error (tn) (lambda () (define (f #\a) #t)))
(assert-error (tn) (lambda () (define (f x #\a) #t)))
(assert-error (tn) (lambda () (define (f #\a x) #t)))
(assert-error (tn) (lambda () (define (f x . #\a) #t)))
(assert-error (tn) (lambda () (define (f #\a . x) #t)))
(assert-error (tn) (lambda () (define (f x y #\a) #t)))
(assert-error (tn) (lambda () (define (f x y . #\a) #t)))
(assert-error (tn) (lambda () (define (f x #\a y) #t)))
(assert-error (tn) (lambda () (define (f x #\a . y) #t)))
(tn "define function form: string as an arg")
(assert-error (tn) (lambda () (define (f . "a") #t)))
(assert-error (tn) (lambda () (define (f "a") #t)))
(assert-error (tn) (lambda () (define (f x "a") #t)))
(assert-error (tn) (lambda () (define (f "a" x) #t)))
(assert-error (tn) (lambda () (define (f x . "a") #t)))
(assert-error (tn) (lambda () (define (f "a" . x) #t)))
(assert-error (tn) (lambda () (define (f x y "a") #t)))
(assert-error (tn) (lambda () (define (f x y . "a") #t)))
(assert-error (tn) (lambda () (define (f x "a" y) #t)))
(assert-error (tn) (lambda () (define (f x "a" . y) #t)))
(tn "define function form: vector as an arg")
(assert-error (tn) (lambda () (define (f . #(a)) #t)))
(assert-error (tn) (lambda () (define (f #(a)) #t)))
(assert-error (tn) (lambda () (define (f x #(a)) #t)))
(assert-error (tn) (lambda () (define (f #(a) x) #t)))
(assert-error (tn) (lambda () (define (f x . #(a)) #t)))
(assert-error (tn) (lambda () (define (f #(a) . x) #t)))
(assert-error (tn) (lambda () (define (f x y #(a)) #t)))
(assert-error (tn) (lambda () (define (f x y . #(a)) #t)))
(assert-error (tn) (lambda () (define (f x #(a) y) #t)))
(assert-error (tn) (lambda () (define (f x #(a) . y) #t)))))
(tn "top-level definition invalid forms")
(if (provided? "strict-toplevel-definitions")
(begin
(assert-error (tn)
(lambda ()
(eval '(if #t (define var0 1))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(if #f #t (define var0 1))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(and (define var0 1))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(or (define var0 1))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(cond (#t (define var0 1)))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(cond (else (define var0 1)))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(case 'key ((key) (define var0 1)))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(case 'key (else (define var0 1)))
(interaction-environment))))
(tn "ttt")
(assert-error (tn)
(lambda ()
(eval '(and (define var0 1) #t)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(or (define var0 1) #t)
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(cond (#t (define var0 1) #t))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(cond (else (define var0 1) #t))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(case 'key ((key) (define var0 1) #t))
(interaction-environment))))
(assert-error (tn)
(lambda ()
(eval '(case 'key (else (define var0 1) #t))
(interaction-environment))))))
(define val1 3)
(assert-equal? "basic define check" 3 val1)
(define val1 5)
(assert-equal? "redefine check" 5 val1)
(define (what? x)
"DEADBEEF" x)
(assert-equal? "func define" 10 (what? 10))
(define what2?
(lambda (x)
"DEADBEEF" x))
(assert-equal? "func define" 10 (what2? 10))
(define (nullarg)
"nullarg")
(assert-equal? "nullarg test" "nullarg" (nullarg))
(define (add x y)
(+ x y))
(assert-equal? "func define" 10 (add 2 8))
(define (dotarg1 . a)
a)
(assert-equal? "dot arg test 1" '(1 2) (dotarg1 1 2))
(define (dotarg2 a . b)
a)
(assert-equal? "dot arg test 2" 1 (dotarg2 1 2))
(define (dotarg3 a . b)
b)
(assert-equal? "dot arg test 3" '(2) (dotarg3 1 2))
(assert-equal? "dot arg test 4" '(2 3) (dotarg3 1 2 3))
(define (dotarg4 a b . c)
b)
(assert-equal? "dot arg test 5" 2 (dotarg4 1 2 3))
(define (dotarg5 a b . c)
c)
(assert-equal? "dot arg test 6" '(3 4) (dotarg5 1 2 3 4))
(define (set-dot a . b)
(set! b '(1 2))
b)
(assert-equal? "set dot test" '(1 2) (set-dot '()))
(define (idefine-o a)
(define (idefine-i c)
(+ c 3))
(idefine-i a))
(assert-equal? "internal define1" 5 (idefine-o 2))
(define (idefine0 a)
(define (idefine1 . args)
(apply + args))
(define (idefine2 c)
(+ c 2))
(+ (idefine1 1 2 3 4 5) (idefine2 a)))
(assert-equal? "internal define2" 17 (idefine0 0))
(total-report)
|
cff23e62ceeda7dd96f9187a77a4933bd004f6c6aee47362e50fdb09c5f781ba | openmusic-project/OMChroma | stereo-2.lisp | (in-package :om)
;;;===================================
SPAT CLASSES
;;;===================================
(defclass! stereo-2
(cs-spat-evt)
(
(source-code :initform
(load-buffer-textfile
(get-orc-source (get-orc "stereo-2"))
'textfile "append")
:allocation :class :type textfile :accessor source-code)
(numchan :initform (or (get-orc-channels (get-orc "stereo-2")) 2) :allocation :class :accessor numchan)
(globals-list :initform (get-orc-globals (get-orc "stereo-2")) :allocation :class :type list :accessor globals-list)
(macros-list :initform nil :allocation :class :type list :accessor macros-list)
(orc-header :initform nil :allocation :class :type list :accessor orc-header)
(InstID :initform 1 :allocation :class :accessor InstID)
( afil :type t
:initarg :afil
:initform nil
:accessor afil)
( Channel1 :type number
:initarg :Channel1
:initform 1.0
:accessor Channel1)
( Channel2 :type number
:initarg :Channel2
:initform 0.0
:accessor Channel2)
)
(:documentation "
;=============================================================================
; STEREO-2.ORC
INDEPENDENT PANNING , 1 SLOT / CHANNEL
;-----------------------------------------------------------------------------
; p1 = instrument number
p2 = action time [ sec ]
p3 = duration [ sec ]
; p4 = input (file)
p5 = Ch 1 level [ 0 - 1 ]
p6 = Ch 2 level [ 0 - 1 ]
; AFIL = filename
CHANNEL1 = amplitude [ 0 - 1 , 1 ]
; CHANNEL2 = amplitude [0-1, 0]
")
(:icon 3001)
)
| null | https://raw.githubusercontent.com/openmusic-project/OMChroma/5ded34f22b59a1a93ea7b87e182c9dbdfa95e047/sources/om6/cs-events/csound/classes/Panning/stereo-2.lisp | lisp | ===================================
===================================
=============================================================================
STEREO-2.ORC
-----------------------------------------------------------------------------
p1 = instrument number
p4 = input (file)
AFIL = filename
CHANNEL2 = amplitude [0-1, 0] | (in-package :om)
SPAT CLASSES
(defclass! stereo-2
(cs-spat-evt)
(
(source-code :initform
(load-buffer-textfile
(get-orc-source (get-orc "stereo-2"))
'textfile "append")
:allocation :class :type textfile :accessor source-code)
(numchan :initform (or (get-orc-channels (get-orc "stereo-2")) 2) :allocation :class :accessor numchan)
(globals-list :initform (get-orc-globals (get-orc "stereo-2")) :allocation :class :type list :accessor globals-list)
(macros-list :initform nil :allocation :class :type list :accessor macros-list)
(orc-header :initform nil :allocation :class :type list :accessor orc-header)
(InstID :initform 1 :allocation :class :accessor InstID)
( afil :type t
:initarg :afil
:initform nil
:accessor afil)
( Channel1 :type number
:initarg :Channel1
:initform 1.0
:accessor Channel1)
( Channel2 :type number
:initarg :Channel2
:initform 0.0
:accessor Channel2)
)
(:documentation "
INDEPENDENT PANNING , 1 SLOT / CHANNEL
p2 = action time [ sec ]
p3 = duration [ sec ]
p5 = Ch 1 level [ 0 - 1 ]
p6 = Ch 2 level [ 0 - 1 ]
CHANNEL1 = amplitude [ 0 - 1 , 1 ]
")
(:icon 3001)
)
|
b95367988614702877ddbf6ec0f0e072cf1c8134e0bd7514f4cc1fa3c280f073 | input-output-hk/cardano-wallet | DBVar.hs | {-# OPTIONS_GHC -Wno-redundant-constraints#-}
-- We intentionally specify more constraints than necessary for some exports.
# LANGUAGE LambdaCase #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Data.DBVar (
-- * Synopsis
-- | 'DBVar' represents a mutable variable whose value is kept in memory,
-- but which is written to the hard drive on every update.
-- This provides a convenient interface for persisting
-- values across program runs.
-- For efficient updates, delta encodings are used, see "Data.Delta".
--
' Store ' represent a storage facility to which the ' DBVar '
-- is written.
-- * DBVar
DBVar
, readDBVar, updateDBVar, modifyDBVar, modifyDBMaybe
, initDBVar, loadDBVar
-- * Store
, Store (..)
, newStore
, NotInitialized (..)
-- $EitherSomeException
, embedStore, pairStores
-- * Testing
, embedStore'
, updateLoad
, newCachedStore
) where
import Prelude
import Control.Applicative
( liftA2 )
import Control.Exception
( Exception, SomeException, toException )
import Control.Monad
( join )
import Control.Monad.Class.MonadSTM
( MonadSTM
, atomically
, modifyTVar'
, newTVarIO
, readTVar
, readTVarIO
, retry
, writeTVar
)
import Control.Monad.Class.MonadThrow
( MonadEvaluate
, MonadMask
, MonadThrow
, bracket
, evaluate
, finally
, mask
, throwIO
)
import Data.Delta
( Delta (..), Embedding, Embedding' (..), Machine (..), inject, project )
{-------------------------------------------------------------------------------
DBVar
-------------------------------------------------------------------------------}
| A ' DBVar'@ m delta@ is a mutable reference to a value of type
-- The type @delta@ is a delta encoding for this value type @a@,
that is we have @a ~ @'Base'@ delta@.
--
The value is cached in memory , in weak head normal form ( WHNF ) .
-- However, whenever the value is updated, a copy of will be written
-- to persistent storage like a file or database on the hard disk;
any particular storage is specified by the ' Store ' type .
-- For efficient updates, the delta encoding @delta@ is used in the update.
--
-- Concurrency:
--
-- * Updates are atomic and will block other updates.
* Reads will /not/ be blocked during an update
-- (except for a small moment where the new value atomically
-- replaces the old one).
data DBVar m delta = DBVar
{ readDBVar_ :: m (Base delta)
, modifyDBMaybe_ :: forall b. (Base delta -> (Maybe delta, b)) -> m b
}
| Read the current value of the ' DBVar ' .
readDBVar :: (Delta da, a ~ Base da) => DBVar m da -> m a
readDBVar = readDBVar_
| Update the value of the ' DBVar ' using a delta encoding .
--
-- The new value will be evaluated to weak head normal form.
updateDBVar :: (Delta da, Monad m) => DBVar m da -> da -> m ()
updateDBVar var delta = modifyDBMaybe var $ \_ -> (Just delta,())
-- | Modify the value in a 'DBVar'.
--
-- The new value will be evaluated to weak head normal form.
modifyDBVar
:: (Delta da, Monad m, a ~ Base da)
=> DBVar m da -> (a -> (da, b)) -> m b
modifyDBVar var f = modifyDBMaybe var $ \a -> let (da,b) = f a in (Just da,b)
-- | Maybe modify the value in a 'DBVar'
--
-- If updated, the new value will be evaluated to weak head normal form.
modifyDBMaybe
:: (Delta da, Monad m, a ~ Base da)
=> DBVar m da -> (a -> (Maybe da, b)) -> m b
modifyDBMaybe = modifyDBMaybe_
-- | Initialize a new 'DBVar' for a given 'Store'.
initDBVar
:: ( MonadSTM m, MonadThrow m, MonadEvaluate m, MonadMask m
, Delta da, a ~ Base da
)
=> Store m da -- ^ 'Store' for writing.
-> a -- ^ Initial value.
-> m (DBVar m da)
initDBVar store v = do
writeS store v
newWithCache (updateS store . Just) v
-- | Create a 'DBVar' by loading its value from an existing 'Store'
-- Throws an exception if the value cannot be loaded.
loadDBVar
:: ( MonadSTM m, MonadThrow m, MonadEvaluate m, MonadMask m
, Delta da
)
=> Store m da -- ^ 'Store' for writing and for reading the initial value.
-> m (DBVar m da)
loadDBVar store =
loadS store >>= \case
Left e -> throwIO e
Right a -> newWithCache (updateS store . Just) a
-- | Create 'DBVar' from an initial value and an update function
using a ' TVar ' as in - memory cache .
--
Space : The value in the ' TVar ' will be evaluated to weak head normal form .
--
-- Concurrency: The update function needs to be atomic even in the presence
-- of asynchronous exceptions.
newWithCache
:: ( MonadSTM m, MonadThrow m, MonadMask m, MonadEvaluate m
, Delta da, a ~ Base da
)
=> (a -> da -> m ()) -> a -> m (DBVar m da)
newWithCache update a = do
cache <- newTVarIO a
locked <- newTVarIO False -- lock for updating the cache
pure $ DBVar
{ readDBVar_ = readTVarIO cache
, modifyDBMaybe_ = \f -> do
let before = atomically $ do
readTVar locked >>= \case
True -> retry
False -> do
writeTVar locked True
readTVar cache
after _ = atomically $ writeTVar locked False
action old = do
let (mdelta, b) = f old
case mdelta of
Nothing -> pure ()
Just delta -> do
new <- evaluate $ apply delta old
mask $ \restore -> do
-- We mask asynchronous exceptions here
to ensure that the TVar will be updated
whenever @update@ succeeds without exception .
restore $ update old delta
atomically $ writeTVar cache new
pure b
bracket before after action
}
{-------------------------------------------------------------------------------
Store
-------------------------------------------------------------------------------}
|
A ' Store ' is a storage facility for Haskell values of type @a ~@'Base'@ da@.
Typical use cases are a file or a database on the hard disk .
A ' Store ' has many similarities with an ' Embedding ' .
The main difference is that storing value in a ' Store ' has side effects .
A ' Store ' is described by three action :
* ' writeS ' writes a value to the store .
* ' ' loads a value from the store .
* ' updateS ' uses a delta encoding of type @da@ to efficiently update
the store .
In order to avoid performing an expensive ' loadS ' operation ,
the action ' updateS ' expects the value described by the store
as an argument , but no check is performed whether the provided
value matches the contents of the store .
Also , not every store inspects this argument .
A ' Store ' is characterized by the following properties :
* The store _ _ need not contain _ _ a properly formatted _ _ value _ _ :
Loading a value from the store may fail , and this is why ' loadS '
has an ' Either ' result .
For example , if the ' Store ' represents
a file on disk , then the file may corrupted or in an incompatible
file format when first opened .
In such a case of failure , the result ' ( e : : is returned , where the exception gives more information
about the failure .
However , loading a value after writing it should always succeed ,
we have
> writeS s a > > loadS s = pure ( Right a )
* The store is _ _ redundant _ _ :
Two stores with different contents may describe
the same value of type @a@.
For example , two files with different whitespace
may describe the same JSON value .
In general , we have
> loadS s > > = either ( const $ pure ( ) ) ( writeS s ) ≠ pure ( )
* Updating a store _ _ commutes with ' apply ' _ _ :
We have
> updateS s a da > > = pure $ Right $ apply a da
However , since the store is redundant , we often have
> updateS s a da ≠ writeS s ( apply a da )
* _ _ Exceptions _ _ :
It is expected that the functions ' loadS ' , ' updateS ' , ' writeS '
do not throw synchronous exceptions . In the worst case ,
' loadS ' should return ' Left ' after reading or writing
to the store was unsuccessful .
* _ _ Concurrency _ _ :
It is expected that the functions ' updateS ' and ' writeS '
are /atomic/ : Either they succeed in updating / writing
the new value in its entirety , or the old value is kept .
In particular , we expect this even when one of these
functions receives an asynchronous exception and needs to abort
normal operation .
A 'Store' is a storage facility for Haskell values of type @a ~@'Base'@ da@.
Typical use cases are a file or a database on the hard disk.
A 'Store' has many similarities with an 'Embedding'.
The main difference is that storing value in a 'Store' has side effects.
A 'Store' is described by three action:
* 'writeS' writes a value to the store.
* 'loadS' loads a value from the store.
* 'updateS' uses a delta encoding of type @da@ to efficiently update
the store.
In order to avoid performing an expensive 'loadS' operation,
the action 'updateS' expects the value described by the store
as an argument, but no check is performed whether the provided
value matches the contents of the store.
Also, not every store inspects this argument.
A 'Store' is characterized by the following properties:
* The store __need not contain__ a properly formatted __value__:
Loading a value from the store may fail, and this is why 'loadS'
has an 'Either' result.
For example, if the 'Store' represents
a file on disk, then the file may corrupted or in an incompatible
file format when first opened.
In such a case of failure, the result 'Left'@ (e :: @'SomeException'@)@
is returned, where the exception @e@ gives more information
about the failure.
However, loading a value after writing it should always succeed,
we have
> writeS s a >> loadS s = pure (Right a)
* The store is __redundant__:
Two stores with different contents may describe
the same value of type @a@.
For example, two files with different whitespace
may describe the same JSON value.
In general, we have
> loadS s >>= either (const $ pure ()) (writeS s) ≠ pure ()
* Updating a store __commutes with 'apply'__:
We have
> updateS s a da >> loadS s = pure $ Right $ apply a da
However, since the store is redundant, we often have
> updateS s a da ≠ writeS s (apply a da)
* __Exceptions__:
It is expected that the functions 'loadS', 'updateS', 'writeS'
do not throw synchronous exceptions. In the worst case,
'loadS' should return 'Left' after reading or writing
to the store was unsuccessful.
* __Concurrency__:
It is expected that the functions 'updateS' and 'writeS'
are /atomic/: Either they succeed in updating / writing
the new value in its entirety, or the old value is kept.
In particular, we expect this even when one of these
functions receives an asynchronous exception and needs to abort
normal operation.
-}
data Store m da = Store
{ loadS :: m (Either SomeException (Base da))
, writeS :: Base da -> m ()
, updateS
:: Maybe (Base da) -- old value, for performance
-> da -- delta to new value
-> m () -- write new value
}
HLINT ignore " Use readTVarIO "
| An in - memory ' Store ' from a mutable variable ( ' TVar ' ) .
-- Useful for testing.
newStore :: (Delta da, MonadSTM m) => m (Store m da)
newStore = do
ref <- newTVarIO $ Left $ toException NotInitialized
pure $ Store
{ loadS = atomically $ readTVar ref
, writeS = atomically . writeTVar ref . Right
, updateS = \_ -> atomically . modifyTVar' ref . fmap . apply
}
| $ EitherSomeException
NOTE : [ EitherSomeException ]
In this version of the library , the error case returned by ' ' and ' load '
is the general ' SomeException ' type , which is a disjoint sum of all possible
error types ( that is , members of the ' Exception ' class ) .
In a future version of this library , this may be replaced by a more specific
error type , but at the price of introducing a new type parameter in the
' Store ' type .
For now , I have opted to explore a region of the design space
where the number of type parameters is kept to a minimum .
I would argue that making errors visible on the type level is not as
useful as one might hope for , because in exchange for making the types noisier ,
the amount of type - safety we gain is very small .
Specifically , if we encounter an element of the ' SomeException ' type that
we did not expect , it is entirely ok to ' throw ' it .
For example , consider the following code :
@
let ea : : Either SomeException ( )
ea = [ .. ]
in
case ea of
Right _ - > " everything is ok "
Left e - > case fromException e of
Just ( AssertionFailed _ ) - > " bad things happened "
Nothing - > throw e
@
In this example , using the more specific type @ea : : Either AssertionFailed ( ) @
would have eliminated the need to handle the ' Nothing ' case .
But as we are dealing with exceptions , this case does have a default handler ,
and there is less need to exclude it at compile as opposed to , say ,
the case of an empty list .
NOTE: [EitherSomeException]
In this version of the library, the error case returned by 'loadS' and 'load'
is the general 'SomeException' type, which is a disjoint sum of all possible
error types (that is, members of the 'Exception' class).
In a future version of this library, this may be replaced by a more specific
error type, but at the price of introducing a new type parameter @e@ in the
'Store' type.
For now, I have opted to explore a region of the design space
where the number of type parameters is kept to a minimum.
I would argue that making errors visible on the type level is not as
useful as one might hope for, because in exchange for making the types noisier,
the amount of type-safety we gain is very small.
Specifically, if we encounter an element of the 'SomeException' type that
we did not expect, it is entirely ok to 'throw' it.
For example, consider the following code:
@
let ea :: Either SomeException ()
ea = [..]
in
case ea of
Right _ -> "everything is ok"
Left e -> case fromException e of
Just (AssertionFailed _) -> "bad things happened"
Nothing -> throw e
@
In this example, using the more specific type @ea :: Either AssertionFailed ()@
would have eliminated the need to handle the 'Nothing' case.
But as we are dealing with exceptions, this case does have a default handler,
and there is less need to exclude it at compile as opposed to, say,
the case of an empty list.
-}
| Failure that occurs when calling ' ' on a ' ' that is empty .
data NotInitialized = NotInitialized deriving (Eq, Show)
instance Exception NotInitialized
-- | Add a caching layer to a 'Store'.
--
-- Access to the underlying 'Store' is enforced to be sequential,
-- but the cache can be accessed in parallel.
-- FIXME: There is still a small race condition where the cache
-- could be written twice before it is filled.
-- In general, think about restricting the monad `m`,
as the ` Store ` operations do not compose very well .
newCachedStore
:: forall m da. (Delta da, MonadSTM m, MonadThrow m, MonadEvaluate m)
=> Store m da -> m (Store m da)
newCachedStore Store{loadS,writeS,updateS} = do
Lock that puts , writeS and updateS into sequence
islocked <- newTVarIO False
let withLock :: forall b. m b -> m b
withLock action = do
atomically $ readTVar islocked >>= \case
True -> retry
False -> writeTVar islocked True
action `finally` atomically (writeTVar islocked False)
-- Cache that need not be filled in the beginning
cache <- newTVarIO (Nothing :: Maybe (Base da))
let writeCache ma = writeTVar cache ma
Load the value from the Store only if it is not cached and
-- nobody else is writing to the store.
let load :: m (Either SomeException (Base da))
load = join $ atomically $ do
ma <- readTVar cache
case ma of
Nothing -> readTVar islocked >>= \case
True -> retry -- somebody is writing
False -> pure $ withLock $ do
ea <- loadS
case ea of
Left e -> pure $ Left e
Right a -> do
atomically $ writeCache $ Just a
pure $ Right a
Just a -> pure $ pure $ Right a
pure $ Store
{ loadS = load
, writeS = \a -> withLock $ do
atomically $ writeCache (Just a)
writeS a
, updateS = updateLoad load throwIO $ \old delta -> withLock $ do
new <- evaluate $ apply delta old
atomically $ writeCache $ Just new
updateS (Just old) delta
}
embedStore :: (MonadSTM m, MonadMask m, Delta da)
=> Embedding da db -> Store m db -> m (Store m da)
embedStore embed bstore = do
-- For reasons of efficiency, we have to store the 'Machine'
-- that is created within the 'Embedding'.
machine <- newTVarIO Nothing
let readMachine = readTVarIO machine
writeMachine = atomically . writeTVar machine . Just
-- Operations of the result 'Store'.
let load = loadS bstore >>= \case
Left e -> pure $ Left e
Right b -> case project embed b of
Left e -> pure $ Left e
Right (a,mab) -> do
writeMachine mab
pure $ Right a
write a = do
let mab = inject embed a
mask $ \restore -> do
restore $ writeS bstore (state_ mab)
writeMachine mab
update = updateLoad load throwIO $ \a da -> do
readMachine >>= \case
Nothing -> do -- we were missing the initial write
write (apply da a)
advance the machine by one step
let (db, mab2) = step_ mab1 (a,da)
mask $ \restore -> do
restore $ updateS bstore (Just $ state_ mab2) db
writeMachine mab2
pure $ Store {loadS=load,writeS=write,updateS=update}
| Obtain a ' Store ' for one type @a1@ from a ' Store ' for another type @a2@
via an ' Embedding '' of the first type into the second type .
--
-- Note: This function is exported for testing and documentation only,
-- use the more efficient 'embedStore' instead.
embedStore'
:: (Monad m, MonadThrow m)
=> Embedding' da db -> Store m db -> Store m da
embedStore' Embedding'{load,write,update} Store{loadS,writeS,updateS} =
let
loadL = (load =<<) <$> loadS
updateL = \ma da -> case ma of
Just a -> loadS >>= \case
Left _ -> pure ()
Right b -> updateS (Just b) (update a b da)
Nothing -> do
ea <- loadL
case ea of
Left e -> throwIO e
Right a -> updateL (Just a) da
in Store
{ loadS = loadL
, writeS = writeS . write
, updateS = updateL
}
| Combine two ' Stores ' into a store for pairs .
--
-- WARNING: The 'updateS' and 'writeS' functions of the result are not atomic
-- in the presence of asynchronous exceptions.
For example , the update of the first store may succeed while the update of
the second store may fail .
In other words , this combinator works for some monads , such as @m = @'STM ' ,
-- but fails for others, such as @m = 'IO'@.
pairStores :: Monad m => Store m da -> Store m db -> Store m (da, db)
pairStores sa sb = Store
{ loadS = liftA2 (,) <$> loadS sa <*> loadS sb
, writeS = \(a,b) -> writeS sa a >> writeS sb b
, updateS = \mi (da,db) ->
case mi of
Nothing -> updateS sa Nothing da >> updateS sb Nothing db
Just (a,b) -> updateS sa (Just a) da >> updateS sb (Just b) db
}
-- | Helper for implementing `updateS`
-- for the case where a value is not yet loaded.
updateLoad :: (Exception e, Monad m)
=> m (Either e t) -- ^ How to load the value.
-> (e -> m b) -- ^ What to do with the error when loading the value.
-> (t -> da -> m b) -- ^ What to do with the value.
-> Maybe t -- ^ Value, maybe loaded, maybe not.
^ Delta .
-> m b
updateLoad load handle update' Nothing da = do
ea <- load
case ea of
Left e -> handle e
Right x -> update' x da
updateLoad _load _ update' (Just x) da = update' x da
| null | https://raw.githubusercontent.com/input-output-hk/cardano-wallet/10a35cbeae31779069832dff01401e4ad0e39226/lib/dbvar/src/Data/DBVar.hs | haskell | # OPTIONS_GHC -Wno-redundant-constraints#
We intentionally specify more constraints than necessary for some exports.
# LANGUAGE RankNTypes #
* Synopsis
| 'DBVar' represents a mutable variable whose value is kept in memory,
but which is written to the hard drive on every update.
This provides a convenient interface for persisting
values across program runs.
For efficient updates, delta encodings are used, see "Data.Delta".
is written.
* DBVar
* Store
$EitherSomeException
* Testing
------------------------------------------------------------------------------
DBVar
------------------------------------------------------------------------------
The type @delta@ is a delta encoding for this value type @a@,
However, whenever the value is updated, a copy of will be written
to persistent storage like a file or database on the hard disk;
For efficient updates, the delta encoding @delta@ is used in the update.
Concurrency:
* Updates are atomic and will block other updates.
(except for a small moment where the new value atomically
replaces the old one).
The new value will be evaluated to weak head normal form.
| Modify the value in a 'DBVar'.
The new value will be evaluated to weak head normal form.
| Maybe modify the value in a 'DBVar'
If updated, the new value will be evaluated to weak head normal form.
| Initialize a new 'DBVar' for a given 'Store'.
^ 'Store' for writing.
^ Initial value.
| Create a 'DBVar' by loading its value from an existing 'Store'
Throws an exception if the value cannot be loaded.
^ 'Store' for writing and for reading the initial value.
| Create 'DBVar' from an initial value and an update function
Concurrency: The update function needs to be atomic even in the presence
of asynchronous exceptions.
lock for updating the cache
We mask asynchronous exceptions here
------------------------------------------------------------------------------
Store
------------------------------------------------------------------------------
old value, for performance
delta to new value
write new value
Useful for testing.
| Add a caching layer to a 'Store'.
Access to the underlying 'Store' is enforced to be sequential,
but the cache can be accessed in parallel.
FIXME: There is still a small race condition where the cache
could be written twice before it is filled.
In general, think about restricting the monad `m`,
Cache that need not be filled in the beginning
nobody else is writing to the store.
somebody is writing
For reasons of efficiency, we have to store the 'Machine'
that is created within the 'Embedding'.
Operations of the result 'Store'.
we were missing the initial write
Note: This function is exported for testing and documentation only,
use the more efficient 'embedStore' instead.
WARNING: The 'updateS' and 'writeS' functions of the result are not atomic
in the presence of asynchronous exceptions.
but fails for others, such as @m = 'IO'@.
| Helper for implementing `updateS`
for the case where a value is not yet loaded.
^ How to load the value.
^ What to do with the error when loading the value.
^ What to do with the value.
^ Value, maybe loaded, maybe not. | # LANGUAGE LambdaCase #
# LANGUAGE NamedFieldPuns #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Data.DBVar (
' Store ' represent a storage facility to which the ' DBVar '
DBVar
, readDBVar, updateDBVar, modifyDBVar, modifyDBMaybe
, initDBVar, loadDBVar
, Store (..)
, newStore
, NotInitialized (..)
, embedStore, pairStores
, embedStore'
, updateLoad
, newCachedStore
) where
import Prelude
import Control.Applicative
( liftA2 )
import Control.Exception
( Exception, SomeException, toException )
import Control.Monad
( join )
import Control.Monad.Class.MonadSTM
( MonadSTM
, atomically
, modifyTVar'
, newTVarIO
, readTVar
, readTVarIO
, retry
, writeTVar
)
import Control.Monad.Class.MonadThrow
( MonadEvaluate
, MonadMask
, MonadThrow
, bracket
, evaluate
, finally
, mask
, throwIO
)
import Data.Delta
( Delta (..), Embedding, Embedding' (..), Machine (..), inject, project )
| A ' DBVar'@ m delta@ is a mutable reference to a value of type
that is we have @a ~ @'Base'@ delta@.
The value is cached in memory , in weak head normal form ( WHNF ) .
any particular storage is specified by the ' Store ' type .
* Reads will /not/ be blocked during an update
data DBVar m delta = DBVar
{ readDBVar_ :: m (Base delta)
, modifyDBMaybe_ :: forall b. (Base delta -> (Maybe delta, b)) -> m b
}
| Read the current value of the ' DBVar ' .
readDBVar :: (Delta da, a ~ Base da) => DBVar m da -> m a
readDBVar = readDBVar_
| Update the value of the ' DBVar ' using a delta encoding .
updateDBVar :: (Delta da, Monad m) => DBVar m da -> da -> m ()
updateDBVar var delta = modifyDBMaybe var $ \_ -> (Just delta,())
modifyDBVar
:: (Delta da, Monad m, a ~ Base da)
=> DBVar m da -> (a -> (da, b)) -> m b
modifyDBVar var f = modifyDBMaybe var $ \a -> let (da,b) = f a in (Just da,b)
modifyDBMaybe
:: (Delta da, Monad m, a ~ Base da)
=> DBVar m da -> (a -> (Maybe da, b)) -> m b
modifyDBMaybe = modifyDBMaybe_
initDBVar
:: ( MonadSTM m, MonadThrow m, MonadEvaluate m, MonadMask m
, Delta da, a ~ Base da
)
-> m (DBVar m da)
initDBVar store v = do
writeS store v
newWithCache (updateS store . Just) v
loadDBVar
:: ( MonadSTM m, MonadThrow m, MonadEvaluate m, MonadMask m
, Delta da
)
-> m (DBVar m da)
loadDBVar store =
loadS store >>= \case
Left e -> throwIO e
Right a -> newWithCache (updateS store . Just) a
using a ' TVar ' as in - memory cache .
Space : The value in the ' TVar ' will be evaluated to weak head normal form .
newWithCache
:: ( MonadSTM m, MonadThrow m, MonadMask m, MonadEvaluate m
, Delta da, a ~ Base da
)
=> (a -> da -> m ()) -> a -> m (DBVar m da)
newWithCache update a = do
cache <- newTVarIO a
pure $ DBVar
{ readDBVar_ = readTVarIO cache
, modifyDBMaybe_ = \f -> do
let before = atomically $ do
readTVar locked >>= \case
True -> retry
False -> do
writeTVar locked True
readTVar cache
after _ = atomically $ writeTVar locked False
action old = do
let (mdelta, b) = f old
case mdelta of
Nothing -> pure ()
Just delta -> do
new <- evaluate $ apply delta old
mask $ \restore -> do
to ensure that the TVar will be updated
whenever @update@ succeeds without exception .
restore $ update old delta
atomically $ writeTVar cache new
pure b
bracket before after action
}
|
A ' Store ' is a storage facility for Haskell values of type @a ~@'Base'@ da@.
Typical use cases are a file or a database on the hard disk .
A ' Store ' has many similarities with an ' Embedding ' .
The main difference is that storing value in a ' Store ' has side effects .
A ' Store ' is described by three action :
* ' writeS ' writes a value to the store .
* ' ' loads a value from the store .
* ' updateS ' uses a delta encoding of type @da@ to efficiently update
the store .
In order to avoid performing an expensive ' loadS ' operation ,
the action ' updateS ' expects the value described by the store
as an argument , but no check is performed whether the provided
value matches the contents of the store .
Also , not every store inspects this argument .
A ' Store ' is characterized by the following properties :
* The store _ _ need not contain _ _ a properly formatted _ _ value _ _ :
Loading a value from the store may fail , and this is why ' loadS '
has an ' Either ' result .
For example , if the ' Store ' represents
a file on disk , then the file may corrupted or in an incompatible
file format when first opened .
In such a case of failure , the result ' ( e : : is returned , where the exception gives more information
about the failure .
However , loading a value after writing it should always succeed ,
we have
> writeS s a > > loadS s = pure ( Right a )
* The store is _ _ redundant _ _ :
Two stores with different contents may describe
the same value of type @a@.
For example , two files with different whitespace
may describe the same JSON value .
In general , we have
> loadS s > > = either ( const $ pure ( ) ) ( writeS s ) ≠ pure ( )
* Updating a store _ _ commutes with ' apply ' _ _ :
We have
> updateS s a da > > = pure $ Right $ apply a da
However , since the store is redundant , we often have
> updateS s a da ≠ writeS s ( apply a da )
* _ _ Exceptions _ _ :
It is expected that the functions ' loadS ' , ' updateS ' , ' writeS '
do not throw synchronous exceptions . In the worst case ,
' loadS ' should return ' Left ' after reading or writing
to the store was unsuccessful .
* _ _ Concurrency _ _ :
It is expected that the functions ' updateS ' and ' writeS '
are /atomic/ : Either they succeed in updating / writing
the new value in its entirety , or the old value is kept .
In particular , we expect this even when one of these
functions receives an asynchronous exception and needs to abort
normal operation .
A 'Store' is a storage facility for Haskell values of type @a ~@'Base'@ da@.
Typical use cases are a file or a database on the hard disk.
A 'Store' has many similarities with an 'Embedding'.
The main difference is that storing value in a 'Store' has side effects.
A 'Store' is described by three action:
* 'writeS' writes a value to the store.
* 'loadS' loads a value from the store.
* 'updateS' uses a delta encoding of type @da@ to efficiently update
the store.
In order to avoid performing an expensive 'loadS' operation,
the action 'updateS' expects the value described by the store
as an argument, but no check is performed whether the provided
value matches the contents of the store.
Also, not every store inspects this argument.
A 'Store' is characterized by the following properties:
* The store __need not contain__ a properly formatted __value__:
Loading a value from the store may fail, and this is why 'loadS'
has an 'Either' result.
For example, if the 'Store' represents
a file on disk, then the file may corrupted or in an incompatible
file format when first opened.
In such a case of failure, the result 'Left'@ (e :: @'SomeException'@)@
is returned, where the exception @e@ gives more information
about the failure.
However, loading a value after writing it should always succeed,
we have
> writeS s a >> loadS s = pure (Right a)
* The store is __redundant__:
Two stores with different contents may describe
the same value of type @a@.
For example, two files with different whitespace
may describe the same JSON value.
In general, we have
> loadS s >>= either (const $ pure ()) (writeS s) ≠ pure ()
* Updating a store __commutes with 'apply'__:
We have
> updateS s a da >> loadS s = pure $ Right $ apply a da
However, since the store is redundant, we often have
> updateS s a da ≠ writeS s (apply a da)
* __Exceptions__:
It is expected that the functions 'loadS', 'updateS', 'writeS'
do not throw synchronous exceptions. In the worst case,
'loadS' should return 'Left' after reading or writing
to the store was unsuccessful.
* __Concurrency__:
It is expected that the functions 'updateS' and 'writeS'
are /atomic/: Either they succeed in updating / writing
the new value in its entirety, or the old value is kept.
In particular, we expect this even when one of these
functions receives an asynchronous exception and needs to abort
normal operation.
-}
data Store m da = Store
{ loadS :: m (Either SomeException (Base da))
, writeS :: Base da -> m ()
, updateS
}
HLINT ignore " Use readTVarIO "
| An in - memory ' Store ' from a mutable variable ( ' TVar ' ) .
newStore :: (Delta da, MonadSTM m) => m (Store m da)
newStore = do
ref <- newTVarIO $ Left $ toException NotInitialized
pure $ Store
{ loadS = atomically $ readTVar ref
, writeS = atomically . writeTVar ref . Right
, updateS = \_ -> atomically . modifyTVar' ref . fmap . apply
}
| $ EitherSomeException
NOTE : [ EitherSomeException ]
In this version of the library , the error case returned by ' ' and ' load '
is the general ' SomeException ' type , which is a disjoint sum of all possible
error types ( that is , members of the ' Exception ' class ) .
In a future version of this library , this may be replaced by a more specific
error type , but at the price of introducing a new type parameter in the
' Store ' type .
For now , I have opted to explore a region of the design space
where the number of type parameters is kept to a minimum .
I would argue that making errors visible on the type level is not as
useful as one might hope for , because in exchange for making the types noisier ,
the amount of type - safety we gain is very small .
Specifically , if we encounter an element of the ' SomeException ' type that
we did not expect , it is entirely ok to ' throw ' it .
For example , consider the following code :
@
let ea : : Either SomeException ( )
ea = [ .. ]
in
case ea of
Right _ - > " everything is ok "
Left e - > case fromException e of
Just ( AssertionFailed _ ) - > " bad things happened "
Nothing - > throw e
@
In this example , using the more specific type @ea : : Either AssertionFailed ( ) @
would have eliminated the need to handle the ' Nothing ' case .
But as we are dealing with exceptions , this case does have a default handler ,
and there is less need to exclude it at compile as opposed to , say ,
the case of an empty list .
NOTE: [EitherSomeException]
In this version of the library, the error case returned by 'loadS' and 'load'
is the general 'SomeException' type, which is a disjoint sum of all possible
error types (that is, members of the 'Exception' class).
In a future version of this library, this may be replaced by a more specific
error type, but at the price of introducing a new type parameter @e@ in the
'Store' type.
For now, I have opted to explore a region of the design space
where the number of type parameters is kept to a minimum.
I would argue that making errors visible on the type level is not as
useful as one might hope for, because in exchange for making the types noisier,
the amount of type-safety we gain is very small.
Specifically, if we encounter an element of the 'SomeException' type that
we did not expect, it is entirely ok to 'throw' it.
For example, consider the following code:
@
let ea :: Either SomeException ()
ea = [..]
in
case ea of
Right _ -> "everything is ok"
Left e -> case fromException e of
Just (AssertionFailed _) -> "bad things happened"
Nothing -> throw e
@
In this example, using the more specific type @ea :: Either AssertionFailed ()@
would have eliminated the need to handle the 'Nothing' case.
But as we are dealing with exceptions, this case does have a default handler,
and there is less need to exclude it at compile as opposed to, say,
the case of an empty list.
-}
| Failure that occurs when calling ' ' on a ' ' that is empty .
data NotInitialized = NotInitialized deriving (Eq, Show)
instance Exception NotInitialized
as the ` Store ` operations do not compose very well .
newCachedStore
:: forall m da. (Delta da, MonadSTM m, MonadThrow m, MonadEvaluate m)
=> Store m da -> m (Store m da)
newCachedStore Store{loadS,writeS,updateS} = do
Lock that puts , writeS and updateS into sequence
islocked <- newTVarIO False
let withLock :: forall b. m b -> m b
withLock action = do
atomically $ readTVar islocked >>= \case
True -> retry
False -> writeTVar islocked True
action `finally` atomically (writeTVar islocked False)
cache <- newTVarIO (Nothing :: Maybe (Base da))
let writeCache ma = writeTVar cache ma
Load the value from the Store only if it is not cached and
let load :: m (Either SomeException (Base da))
load = join $ atomically $ do
ma <- readTVar cache
case ma of
Nothing -> readTVar islocked >>= \case
False -> pure $ withLock $ do
ea <- loadS
case ea of
Left e -> pure $ Left e
Right a -> do
atomically $ writeCache $ Just a
pure $ Right a
Just a -> pure $ pure $ Right a
pure $ Store
{ loadS = load
, writeS = \a -> withLock $ do
atomically $ writeCache (Just a)
writeS a
, updateS = updateLoad load throwIO $ \old delta -> withLock $ do
new <- evaluate $ apply delta old
atomically $ writeCache $ Just new
updateS (Just old) delta
}
embedStore :: (MonadSTM m, MonadMask m, Delta da)
=> Embedding da db -> Store m db -> m (Store m da)
embedStore embed bstore = do
machine <- newTVarIO Nothing
let readMachine = readTVarIO machine
writeMachine = atomically . writeTVar machine . Just
let load = loadS bstore >>= \case
Left e -> pure $ Left e
Right b -> case project embed b of
Left e -> pure $ Left e
Right (a,mab) -> do
writeMachine mab
pure $ Right a
write a = do
let mab = inject embed a
mask $ \restore -> do
restore $ writeS bstore (state_ mab)
writeMachine mab
update = updateLoad load throwIO $ \a da -> do
readMachine >>= \case
write (apply da a)
advance the machine by one step
let (db, mab2) = step_ mab1 (a,da)
mask $ \restore -> do
restore $ updateS bstore (Just $ state_ mab2) db
writeMachine mab2
pure $ Store {loadS=load,writeS=write,updateS=update}
| Obtain a ' Store ' for one type @a1@ from a ' Store ' for another type @a2@
via an ' Embedding '' of the first type into the second type .
embedStore'
:: (Monad m, MonadThrow m)
=> Embedding' da db -> Store m db -> Store m da
embedStore' Embedding'{load,write,update} Store{loadS,writeS,updateS} =
let
loadL = (load =<<) <$> loadS
updateL = \ma da -> case ma of
Just a -> loadS >>= \case
Left _ -> pure ()
Right b -> updateS (Just b) (update a b da)
Nothing -> do
ea <- loadL
case ea of
Left e -> throwIO e
Right a -> updateL (Just a) da
in Store
{ loadS = loadL
, writeS = writeS . write
, updateS = updateL
}
| Combine two ' Stores ' into a store for pairs .
For example , the update of the first store may succeed while the update of
the second store may fail .
In other words , this combinator works for some monads , such as @m = @'STM ' ,
pairStores :: Monad m => Store m da -> Store m db -> Store m (da, db)
pairStores sa sb = Store
{ loadS = liftA2 (,) <$> loadS sa <*> loadS sb
, writeS = \(a,b) -> writeS sa a >> writeS sb b
, updateS = \mi (da,db) ->
case mi of
Nothing -> updateS sa Nothing da >> updateS sb Nothing db
Just (a,b) -> updateS sa (Just a) da >> updateS sb (Just b) db
}
updateLoad :: (Exception e, Monad m)
^ Delta .
-> m b
updateLoad load handle update' Nothing da = do
ea <- load
case ea of
Left e -> handle e
Right x -> update' x da
updateLoad _load _ update' (Just x) da = update' x da
|
fe8fd8708c39476df01a54e43ba46e3a563b568f60b7d0b366a8702d6d38b404 | janestreet/bonsai | main.ml | open! Core
open Bonsai_web
open Bonsai.Let_syntax
module User_info = struct
type t =
{ name : string
; int_id : int
}
[@@deriving compare, equal, fields, sexp]
let sample_data =
List.mapi [ "prod"; "dev"; "test" ] ~f:(fun i suffix ->
List.mapi [ "bonsai"; "incremental"; "app" ] ~f:(fun j name ->
let name = String.concat ~sep:"-" [ name; suffix ] in
let int_id = (10 * i) + j in
name, Fields.create ~name ~int_id))
|> List.concat
|> String.Map.of_alist_exn
;;
end
module Search_bar = struct
module Username = struct
type t = { username : string } [@@deriving compare, equal, fields, sexp]
let of_user_info user_info = Fields.create ~username:(User_info.name user_info)
let to_string t = username t
let of_string username = Fields.create ~username |> Option.some
end
module Input = struct
include Bonsai_web_ui_search_bar.Input
let create = Fields.create
end
let component =
Bonsai_web_ui_search_bar.create
(module Username)
~of_string:Username.of_string
~additional_query_results_on_click:2
~max_query_results:5
()
;;
end
module Input = struct
type t = { all_users : User_info.t String.Map.t } [@@deriving fields]
let default () = { all_users = User_info.sample_data }
end
let selected_display selected_user =
match%arr selected_user with
| None -> Vdom.Node.div [ Vdom.Node.text "No user selected" ]
| Some ({ name; int_id } : User_info.t) ->
Vdom.Node.div
[ Vdom.Node.text "Selected user"
; Vdom.Node.br ()
; Vdom.Node.textf "name : %s , id %d" name int_id
]
;;
let set_model_component =
let module User_opt = struct
type t = User_info.t option [@@deriving equal, sexp]
end
in
Bonsai.state (module User_opt) ~default_model:None
;;
let to_server_input input =
let%sub set_model = set_model_component in
let%arr current_user, inject_set_model = set_model
and all_users = input >>| Input.all_users in
let choices = all_users |> Map.data |> List.map ~f:Search_bar.Username.of_user_info in
let on_select username =
username |> Search_bar.Username.to_string |> Map.find all_users |> inject_set_model
in
current_user, Search_bar.Input.create ~choices ~on_select
;;
let component input =
let%sub current_user, search_bar_input = to_server_input input in
let%sub selected = selected_display current_user in
let%sub search_bar = Search_bar.component search_bar_input in
let%arr selected = selected
and search_bar = search_bar in
Vdom.Node.div [ search_bar; selected ]
;;
let () =
let input = Bonsai.Var.create (Input.default ()) in
Bonsai_web.Start.start (component (Bonsai.Var.value input))
;;
| null | https://raw.githubusercontent.com/janestreet/bonsai/782fecd000a1f97b143a3f24b76efec96e36a398/examples/search_bar/main.ml | ocaml | open! Core
open Bonsai_web
open Bonsai.Let_syntax
module User_info = struct
type t =
{ name : string
; int_id : int
}
[@@deriving compare, equal, fields, sexp]
let sample_data =
List.mapi [ "prod"; "dev"; "test" ] ~f:(fun i suffix ->
List.mapi [ "bonsai"; "incremental"; "app" ] ~f:(fun j name ->
let name = String.concat ~sep:"-" [ name; suffix ] in
let int_id = (10 * i) + j in
name, Fields.create ~name ~int_id))
|> List.concat
|> String.Map.of_alist_exn
;;
end
module Search_bar = struct
module Username = struct
type t = { username : string } [@@deriving compare, equal, fields, sexp]
let of_user_info user_info = Fields.create ~username:(User_info.name user_info)
let to_string t = username t
let of_string username = Fields.create ~username |> Option.some
end
module Input = struct
include Bonsai_web_ui_search_bar.Input
let create = Fields.create
end
let component =
Bonsai_web_ui_search_bar.create
(module Username)
~of_string:Username.of_string
~additional_query_results_on_click:2
~max_query_results:5
()
;;
end
module Input = struct
type t = { all_users : User_info.t String.Map.t } [@@deriving fields]
let default () = { all_users = User_info.sample_data }
end
let selected_display selected_user =
match%arr selected_user with
| None -> Vdom.Node.div [ Vdom.Node.text "No user selected" ]
| Some ({ name; int_id } : User_info.t) ->
Vdom.Node.div
[ Vdom.Node.text "Selected user"
; Vdom.Node.br ()
; Vdom.Node.textf "name : %s , id %d" name int_id
]
;;
let set_model_component =
let module User_opt = struct
type t = User_info.t option [@@deriving equal, sexp]
end
in
Bonsai.state (module User_opt) ~default_model:None
;;
let to_server_input input =
let%sub set_model = set_model_component in
let%arr current_user, inject_set_model = set_model
and all_users = input >>| Input.all_users in
let choices = all_users |> Map.data |> List.map ~f:Search_bar.Username.of_user_info in
let on_select username =
username |> Search_bar.Username.to_string |> Map.find all_users |> inject_set_model
in
current_user, Search_bar.Input.create ~choices ~on_select
;;
let component input =
let%sub current_user, search_bar_input = to_server_input input in
let%sub selected = selected_display current_user in
let%sub search_bar = Search_bar.component search_bar_input in
let%arr selected = selected
and search_bar = search_bar in
Vdom.Node.div [ search_bar; selected ]
;;
let () =
let input = Bonsai.Var.create (Input.default ()) in
Bonsai_web.Start.start (component (Bonsai.Var.value input))
;;
| |
bc2b5a01bbebda8ca00ccdf489bda0867ccf724e706beac4a6930422c2a93012 | ntoronto/drbayes | split.rkt | #lang typed/racket/base
(require racket/list
"../set.rkt"
"../flonum.rkt")
(provide (all-defined-out))
(: interval-split (-> Nonempty-Prob-Interval (Listof Nonempty-Prob-Interval)))
(define (interval-split I)
(define-values (a b a? b?) (prob-interval-fields I))
(define c (prob-midpoint a b))
(define m1 (prob- c a))
(define m2 (prob- b c))
(cond [(and (prob? m1) (not (prob-0? m1))
(prob? m2) (not (prob-0? m2)))
(define I1 (prob-interval a c a? #t))
(define I2 (prob-interval c b #f b?))
(cond [(and (not (empty-prob-set? I1)) (prob-interval-can-sample? I1)
(not (empty-prob-set? I2)) (prob-interval-can-sample? I2))
(list I1 I2)]
[else empty])]
[else empty]))
| null | https://raw.githubusercontent.com/ntoronto/drbayes/e59eb7c7867118bf4c77ca903e133c7530e612a3/drbayes/private/search/split.rkt | racket | #lang typed/racket/base
(require racket/list
"../set.rkt"
"../flonum.rkt")
(provide (all-defined-out))
(: interval-split (-> Nonempty-Prob-Interval (Listof Nonempty-Prob-Interval)))
(define (interval-split I)
(define-values (a b a? b?) (prob-interval-fields I))
(define c (prob-midpoint a b))
(define m1 (prob- c a))
(define m2 (prob- b c))
(cond [(and (prob? m1) (not (prob-0? m1))
(prob? m2) (not (prob-0? m2)))
(define I1 (prob-interval a c a? #t))
(define I2 (prob-interval c b #f b?))
(cond [(and (not (empty-prob-set? I1)) (prob-interval-can-sample? I1)
(not (empty-prob-set? I2)) (prob-interval-can-sample? I2))
(list I1 I2)]
[else empty])]
[else empty]))
| |
7f87498cbb2bd7b811a9b5191bdda29f17f093e974780256f568ea677aa8ef07 | vlstill/hsExprTest | hiord-compiled.q.hs | -- @ compiled: true
expr = "f"
f :: (a -> a -> a) -> a -> [a] -> a
f = foldl
| null | https://raw.githubusercontent.com/vlstill/hsExprTest/391fc823c1684ec248ac8f76412fefeffb791865/test/hiord-compiled.q.hs | haskell | @ compiled: true |
expr = "f"
f :: (a -> a -> a) -> a -> [a] -> a
f = foldl
|
18de158aa74a1cf70077f943b870b422f840bbef67ad5eae493654baa5ecaefb | jeluard/cljc-ethereum | json.cljc | (ns ethereum.utils.json
"JSON utils functions"
#?(:clj (:require [jsonista.core :as jsonista])))
(defn map->json
"Converts a map into a JSON string."
[o]
#?(:clj (jsonista/write-value-as-string o)
:cljs (js/JSON.stringify (clj->js o))))
(defn json->map
"Converts a JSON string into a map. Keys are strings."
[o]
#?(:clj (jsonista/read-value o)
:cljs (js->clj (js/JSON.parse o)))) | null | https://raw.githubusercontent.com/jeluard/cljc-ethereum/41fb6e1fc5cd870eca7daa690c2a19e4db08db9c/src/ethereum/utils/json.cljc | clojure | (ns ethereum.utils.json
"JSON utils functions"
#?(:clj (:require [jsonista.core :as jsonista])))
(defn map->json
"Converts a map into a JSON string."
[o]
#?(:clj (jsonista/write-value-as-string o)
:cljs (js/JSON.stringify (clj->js o))))
(defn json->map
"Converts a JSON string into a map. Keys are strings."
[o]
#?(:clj (jsonista/read-value o)
:cljs (js->clj (js/JSON.parse o)))) | |
4d27211ce57a99fddba4a5c75d061076d36f91d64040726515b726b89c78388a | GRACeFUL-project/haskelzinc | FZSolutionParser.hs | # LANGUAGE FlexibleInstances #
|
Module : FZSolutionParser
Description : FlatZinc solutions parser
License : < >
Stability : experimental
This module defines a parser for the default format of the output of the two solvers
integrated in haskelzinc ( G12 / FD and choco3 ) . It also provides modular parsers for
entities that constitute a solution , such as MiniZinc variable names and values ,
solutions ' separator in case of multiple solutions , etc . These modular parsers can be
used in building a parser for a solver 's output , the format of which is specified by a
MiniZinc @output@ item differs from the default one .
Module : FZSolutionParser
Description : FlatZinc solutions parser
License : BSD3
Maintainer : Klara Marntirosian <>
Stability : experimental
This module defines a parser for the default format of the output of the two solvers
integrated in haskelzinc (G12/FD and choco3). It also provides modular parsers for
entities that constitute a solution, such as MiniZinc variable names and values,
solutions' separator in case of multiple solutions, etc. These modular parsers can be
used in building a parser for a solver's output, the format of which is specified by a
MiniZinc @output@ item differs from the default one.
-}
module Interfaces.FZSolutionParser (
MValue(..), Solution,
-- * Parsing values
valueM,
intM, boolM, floatM, stringM, setM,
setRange, arrayM,
-- * Solutions
varName, simpleVarName, quotedVarName,
comment, comments,
-- ** Default parsers
defaultNameValuePair,
defaultUnsat, defaultSolution,
trySolutionsDefault,
getAllSolutionsDefault, getDefaultSolutionsFromFile,
-- ** Custom
getAllSolutions, trySolutions,
-- | The following functions can be used when a MiniZinc @output@ item, which alters the
-- default output format of the solver, is present in the model.
nameValuePair,
allSolutions, takeSolutionsWithParser
) where
import Data.Char
import Control.Applicative
import Data.Set (Set, fromDistinctAscList)
import qualified Text.Parsec as P
import qualified Text.Parsec.Char as C
import Text.Parsec.String (Parser)
Next two modules for testing only
import
import Control .
-- | A Solution consists of a list of pairs. Each pair represents an assignment of a
-- value to a decision variable of the constraint model.
type Solution = [(String, MValue)]
-- | Representation of returned values.
data MValue = MError String
| MInt Int
| MFloat Float
| MBool Bool
| MString String
| MArray [MValue]
| MSet (Set MValue)
deriving Show
--deriving (Show, Generic, NFData)
-- | Returns either a parse error or a list of solutions of the constraint model, parsed
-- from the file where they are printed. The length of the list is specified by the
second argument of the function .
getDefaultSolutionsFromFile :: FilePath -> Int -> IO (Either P.ParseError [Solution])
getDefaultSolutionsFromFile path n = do
output <- readFile path
return $ getAllSolutionsDefault output
{-
-- | Same as 'getSolutionFromFile' but parses the string argument of the function instead
-- of the contents of a file.
getDefaultSolutions :: Int -> String -> Either P.ParseError [Solution]
getDefaultSolutions = takeSolutions trySolutionsDefault
-}
getAllSolutions :: Parser [Solution] -> String -> Either P.ParseError [Solution]
getAllSolutions = runParser
getAllSolutionsDefault :: String -> Either P.ParseError [Solution]
getAllSolutionsDefault = getAllSolutions trySolutionsDefault
-- | A custom version of 'getDefaultSolutions'. This function accepts a custom parser to
-- parse the solutions. The custom parser must be parametrized by an integer, for
-- specifying the number of solutions to be returned.
takeSolutionsWithParser :: (Int -> Parser [Solution]) -> Int -> String -> Either P.ParseError [Solution]
takeSolutionsWithParser p n = runParser (p n)
allSolutions' :: Parser [Solution] -> String -> Either P.ParseError [Solution]
allSolutions' = runParser
-- Auxiliary definitions
digit :: Parser Char
digit = C.digit
anyChar :: Parser Char
anyChar = C.anyChar
char :: Char -> Parser Char
char = C.char
sepBy :: Parser a -> Parser b -> Parser [a]
sepBy = P.sepBy
between :: Parser a -> Parser b -> Parser c -> Parser c
between = P.between
manyTill :: Parser a -> Parser b -> Parser [a]
manyTill = P.manyTill
many1 :: Parser a -> Parser [a]
many1 = P.many1
skipMany :: Parser a -> Parser ()
skipMany = P.skipMany
anyToken = P.anyToken
eof :: Parser ()
eof = P.eof
endOfLine :: Parser Char
endOfLine = C.endOfLine
string :: String -> Parser String
string = C.string
spaces :: Parser ()
spaces = C.spaces
parseAll :: Parser a -> P.SourceName -> String -> Either P.ParseError a
parseAll = P.parse
count :: Int -> Parser a -> Parser [a]
count = P.count
try :: Parser a -> Parser a
try = P.try
-----------------------
-- Defaults
unsatMSG = "=====UNSATISFIABLE=====" -- Unsatisfiable-model message
eoSMSG = "==========" -- End-of-solutions message
eosMSG = "----------" -- End-of-solution message
-----------------------
runParser :: Parser a -> String -> Either P.ParseError a
runParser p = parseAll (p <* eof) ""
-- | @tryDefaultSolutions n@ tries to parse the solutions and, if it succeeds, returns
the first @n@. Else , tries ' defaultUnsat ' and returns an empty list .
trySolutionsDefault :: Parser [Solution]
trySolutionsDefault = trySolutions allSolutionsDefault defaultUnsat
| @trySolutions f p n@ applies @f n@ and returns the solutions . If that fails , tries
to parse an /Unsatisfiable/ message by applying and returns an empty list . The
-- custom parser must be parametrized by an integer, for specifying the number of
-- solutions to be returned.
trySolutions :: Parser [Solution] -- Custom solutions parser
-> Parser String -- Custom /Unsatisfiable/ message parser
-> Parser [Solution]
trySolutions p u = try $ p <|> (u >> return [[]])
-- | Parses the default message for a model with no solutions: @=====UNSATISFIABLE=====@,
-- surrounded by commented lines before and after.
defaultUnsat :: Parser String
defaultUnsat = skipMany comment *> (string unsatMSG) <* endOfLine <* many comment
takeSolutions :: Parser Solution -> Int -> Parser [Solution]
takeSolutions p n = case (n > 0) of
True -> count n p
_ -> allSolutions p
takeSolutionsDefault :: Int -> Parser [Solution]
takeSolutionsDefault = takeSolutions defaultSolution
allSolutions :: Parser Solution -> Parser [Solution]
allSolutions p = manyTill p (optional (string eoSMSG *> endOfLine) *> eof)
-- | Parses all the returned solutions.
allSolutionsDefault :: Parser [Solution]
allSolutionsDefault = allSolutions defaultSolution
-- | Parses a single solution with the default output format from the set of returned
-- solutions.
defaultSolution :: Parser Solution
defaultSolution = P.many (comments *> defaultNameValuePair)
<* string eosMSG <* endOfLine
-- | Parses a comment in the solutions and returns the content.
comment :: Parser String
comment = char '%' *> spaces *> (manyTill anyToken endOfLine)
-- | Parses a sequence of commented lines in the solutions and returns their content.
comments :: Parser String
comments = unlines <$> P.many comment
-- | Parses a MiniZinc variable name-value pair in a solution with the default output
-- format.
defaultNameValuePair :: Parser (String, MValue)
defaultNameValuePair = nameValuePair (spaces *> (string "=") <* spaces)
<* ((: []) <$> (char ';' *> endOfLine))
| Used to parse a MiniZinc variable name - value pair in a solution .
-- @nameValuePair s@ parses succesfully if sequential parsing of 'varName', @s@ and
' valueM ' is succesfull . Returns the MiniZinc name - value pair in a pair and
-- /forgets/ the result of parser @s@.
nameValuePair :: Parser String -- ^ Value-name separator
-> Parser (String, MValue)
nameValuePair p1 = do
name <- varName
p1
value <- valueM
return (name, value)
| Parses a conventional MiniZinc variable identifier . That is , a string of the form
@[A - Za - z][A - Za - z0 - 9_]*@.
simpleVarName :: Parser String
simpleVarName = do
first <- C.letter
rest <- P.many (C.alphaNum <|> char '_')
return (first : rest)
| Parses a quoted MiniZinc identifier .
quotedVarName :: Parser String
quotedVarName = do
lq <- char '\''
name <- manyTill anyChar (char '\'')
return (lq : (name ++ "\'"))
-- | Parses a MiniZinc variable name by trying 'simpleVarName' and 'quotedVarName'.
varName :: Parser String
varName = simpleVarName <|> quotedVarName
-- | Parses a MiniZinc value. Tries 'floatM', 'intM', 'boolM', 'setM', 'arrayM' and
-- 'stringM' in this order.
valueM :: Parser MValue
valueM = try floatM <|> intM <|> boolM <|> (setM scalar) <|> (arrayM scalar) <|> stringM
| Parses a MiniZinc integer value .
intM :: Parser MValue
intM = MInt <$> int
| Parses a MiniZinc boolean value .
boolM :: Parser MValue
boolM = MBool <$> bool
| Parses a MiniZinc float value .
floatM :: Parser MValue
floatM = MFloat <$> float
| Parses a MiniZinc string value .
stringM :: Parser MValue
stringM = MString <$> (string "\"" *> manyTill anyChar (string "\""))
| Parses a MiniZinc set value .
setM :: Parser MValue -> Parser MValue
setM p = (MSet <$> fromDistinctAscList <$> (set p)) <|> setRange
int :: Parser Int
int = (char '-' >> opposite ) <|> natural
bool :: Parser Bool
bool = string "true" >> return True <|> (string "false" >> return False)
float :: Parser Float
float = do
ipart <- many1 digit
char '.'
dpart <- many1 digit
let a = read (ipart ++ "." ++ dpart) :: Float in
return a
set :: Parser a -> Parser [a]
set p = between (char '{') (char '}') (sepBy p (string "," >> spaces))
| Parses a MiniZinc set value defined with the use of the MiniZinc range operator
-- (@..@).
setRange :: Parser MValue
setRange = MSet <$> fromDistinctAscList <$> do
v1 <- int
string ".."
v2 <- int
return (map MInt (take (v2 - v1 + 1) (iterate ((+) 1) v1)))
-- | Parses MiniZinc 1-dimensional or multi-dimensional array values.
arrayM :: Parser MValue -> Parser MValue
arrayM p = do
string "array"
manyTill anyChar (char '(')
ls <- arraySizes
es <- extract p
string ")"
return (fixDims ls es)
natural :: Parser Int
natural = P.chainl1 digitValue ascendDecimal
opposite :: Parser Int
opposite = (0 - ) <$> natural
digitValue :: Parser Int
digitValue = do
d <- digit
return $ ord(d) - ord('0')
ascendDecimal :: Parser (Int -> Int -> Int)
ascendDecimal = do
return $ \x y -> x*10 + y
indexRange :: Parser Int
indexRange = do
a <- int
string ".."
b <- int
return (b - a + 1)
arraySizes :: Parser [Int]
arraySizes = P.sepEndBy1 indexRange (string "," >> spaces)
extract :: Parser MValue -> Parser [MValue]
extract p = between (char '[') (char ']') (sepBy p (string "," >> spaces))
fixDims :: [Int] -> [MValue] -> MValue
fixDims [] _ = MError "Array dimensions error: fixDims applied on empty list"
fixDims [d] ms = MArray $ ms
fixDims ds ms = fixDims (init ds) (fix1Dim (last ds) ms)
fix1Dim :: Int -> [MValue] -> [MValue]
fix1Dim _ [] = []
fix1Dim d ms = MArray (take d ms) : (fix1Dim d (drop d ms))
scalar :: Parser MValue
scalar = try floatM <|> intM <|> boolM <|> stringM
-- for testing purposes
parseWithLeftOver :: Parser a -> String -> Either P.ParseError (a,String)
parseWithLeftOver p = parseAll ((,) <$> p <*> leftOver) ""
where leftOver = manyTill anyToken eof
| null | https://raw.githubusercontent.com/GRACeFUL-project/haskelzinc/e00684428f4e2072c9b1912c7036f686314b1824/src/Interfaces/FZSolutionParser.hs | haskell | * Parsing values
* Solutions
** Default parsers
** Custom
| The following functions can be used when a MiniZinc @output@ item, which alters the
default output format of the solver, is present in the model.
| A Solution consists of a list of pairs. Each pair represents an assignment of a
value to a decision variable of the constraint model.
| Representation of returned values.
deriving (Show, Generic, NFData)
| Returns either a parse error or a list of solutions of the constraint model, parsed
from the file where they are printed. The length of the list is specified by the
-- | Same as 'getSolutionFromFile' but parses the string argument of the function instead
-- of the contents of a file.
getDefaultSolutions :: Int -> String -> Either P.ParseError [Solution]
getDefaultSolutions = takeSolutions trySolutionsDefault
| A custom version of 'getDefaultSolutions'. This function accepts a custom parser to
parse the solutions. The custom parser must be parametrized by an integer, for
specifying the number of solutions to be returned.
Auxiliary definitions
---------------------
Defaults
Unsatisfiable-model message
End-of-solutions message
End-of-solution message
---------------------
| @tryDefaultSolutions n@ tries to parse the solutions and, if it succeeds, returns
custom parser must be parametrized by an integer, for specifying the number of
solutions to be returned.
Custom solutions parser
Custom /Unsatisfiable/ message parser
| Parses the default message for a model with no solutions: @=====UNSATISFIABLE=====@,
surrounded by commented lines before and after.
| Parses all the returned solutions.
| Parses a single solution with the default output format from the set of returned
solutions.
| Parses a comment in the solutions and returns the content.
| Parses a sequence of commented lines in the solutions and returns their content.
| Parses a MiniZinc variable name-value pair in a solution with the default output
format.
@nameValuePair s@ parses succesfully if sequential parsing of 'varName', @s@ and
/forgets/ the result of parser @s@.
^ Value-name separator
| Parses a MiniZinc variable name by trying 'simpleVarName' and 'quotedVarName'.
| Parses a MiniZinc value. Tries 'floatM', 'intM', 'boolM', 'setM', 'arrayM' and
'stringM' in this order.
(@..@).
| Parses MiniZinc 1-dimensional or multi-dimensional array values.
for testing purposes | # LANGUAGE FlexibleInstances #
|
Module : FZSolutionParser
Description : FlatZinc solutions parser
License : < >
Stability : experimental
This module defines a parser for the default format of the output of the two solvers
integrated in haskelzinc ( G12 / FD and choco3 ) . It also provides modular parsers for
entities that constitute a solution , such as MiniZinc variable names and values ,
solutions ' separator in case of multiple solutions , etc . These modular parsers can be
used in building a parser for a solver 's output , the format of which is specified by a
MiniZinc @output@ item differs from the default one .
Module : FZSolutionParser
Description : FlatZinc solutions parser
License : BSD3
Maintainer : Klara Marntirosian <>
Stability : experimental
This module defines a parser for the default format of the output of the two solvers
integrated in haskelzinc (G12/FD and choco3). It also provides modular parsers for
entities that constitute a solution, such as MiniZinc variable names and values,
solutions' separator in case of multiple solutions, etc. These modular parsers can be
used in building a parser for a solver's output, the format of which is specified by a
MiniZinc @output@ item differs from the default one.
-}
module Interfaces.FZSolutionParser (
MValue(..), Solution,
valueM,
intM, boolM, floatM, stringM, setM,
setRange, arrayM,
varName, simpleVarName, quotedVarName,
comment, comments,
defaultNameValuePair,
defaultUnsat, defaultSolution,
trySolutionsDefault,
getAllSolutionsDefault, getDefaultSolutionsFromFile,
getAllSolutions, trySolutions,
nameValuePair,
allSolutions, takeSolutionsWithParser
) where
import Data.Char
import Control.Applicative
import Data.Set (Set, fromDistinctAscList)
import qualified Text.Parsec as P
import qualified Text.Parsec.Char as C
import Text.Parsec.String (Parser)
Next two modules for testing only
import
import Control .
type Solution = [(String, MValue)]
data MValue = MError String
| MInt Int
| MFloat Float
| MBool Bool
| MString String
| MArray [MValue]
| MSet (Set MValue)
deriving Show
second argument of the function .
getDefaultSolutionsFromFile :: FilePath -> Int -> IO (Either P.ParseError [Solution])
getDefaultSolutionsFromFile path n = do
output <- readFile path
return $ getAllSolutionsDefault output
getAllSolutions :: Parser [Solution] -> String -> Either P.ParseError [Solution]
getAllSolutions = runParser
getAllSolutionsDefault :: String -> Either P.ParseError [Solution]
getAllSolutionsDefault = getAllSolutions trySolutionsDefault
takeSolutionsWithParser :: (Int -> Parser [Solution]) -> Int -> String -> Either P.ParseError [Solution]
takeSolutionsWithParser p n = runParser (p n)
allSolutions' :: Parser [Solution] -> String -> Either P.ParseError [Solution]
allSolutions' = runParser
digit :: Parser Char
digit = C.digit
anyChar :: Parser Char
anyChar = C.anyChar
char :: Char -> Parser Char
char = C.char
sepBy :: Parser a -> Parser b -> Parser [a]
sepBy = P.sepBy
between :: Parser a -> Parser b -> Parser c -> Parser c
between = P.between
manyTill :: Parser a -> Parser b -> Parser [a]
manyTill = P.manyTill
many1 :: Parser a -> Parser [a]
many1 = P.many1
skipMany :: Parser a -> Parser ()
skipMany = P.skipMany
anyToken = P.anyToken
eof :: Parser ()
eof = P.eof
endOfLine :: Parser Char
endOfLine = C.endOfLine
string :: String -> Parser String
string = C.string
spaces :: Parser ()
spaces = C.spaces
parseAll :: Parser a -> P.SourceName -> String -> Either P.ParseError a
parseAll = P.parse
count :: Int -> Parser a -> Parser [a]
count = P.count
try :: Parser a -> Parser a
try = P.try
runParser :: Parser a -> String -> Either P.ParseError a
runParser p = parseAll (p <* eof) ""
the first @n@. Else , tries ' defaultUnsat ' and returns an empty list .
trySolutionsDefault :: Parser [Solution]
trySolutionsDefault = trySolutions allSolutionsDefault defaultUnsat
| @trySolutions f p n@ applies @f n@ and returns the solutions . If that fails , tries
to parse an /Unsatisfiable/ message by applying and returns an empty list . The
-> Parser [Solution]
trySolutions p u = try $ p <|> (u >> return [[]])
defaultUnsat :: Parser String
defaultUnsat = skipMany comment *> (string unsatMSG) <* endOfLine <* many comment
takeSolutions :: Parser Solution -> Int -> Parser [Solution]
takeSolutions p n = case (n > 0) of
True -> count n p
_ -> allSolutions p
takeSolutionsDefault :: Int -> Parser [Solution]
takeSolutionsDefault = takeSolutions defaultSolution
allSolutions :: Parser Solution -> Parser [Solution]
allSolutions p = manyTill p (optional (string eoSMSG *> endOfLine) *> eof)
allSolutionsDefault :: Parser [Solution]
allSolutionsDefault = allSolutions defaultSolution
defaultSolution :: Parser Solution
defaultSolution = P.many (comments *> defaultNameValuePair)
<* string eosMSG <* endOfLine
comment :: Parser String
comment = char '%' *> spaces *> (manyTill anyToken endOfLine)
comments :: Parser String
comments = unlines <$> P.many comment
defaultNameValuePair :: Parser (String, MValue)
defaultNameValuePair = nameValuePair (spaces *> (string "=") <* spaces)
<* ((: []) <$> (char ';' *> endOfLine))
| Used to parse a MiniZinc variable name - value pair in a solution .
' valueM ' is succesfull . Returns the MiniZinc name - value pair in a pair and
-> Parser (String, MValue)
nameValuePair p1 = do
name <- varName
p1
value <- valueM
return (name, value)
| Parses a conventional MiniZinc variable identifier . That is , a string of the form
@[A - Za - z][A - Za - z0 - 9_]*@.
simpleVarName :: Parser String
simpleVarName = do
first <- C.letter
rest <- P.many (C.alphaNum <|> char '_')
return (first : rest)
| Parses a quoted MiniZinc identifier .
quotedVarName :: Parser String
quotedVarName = do
lq <- char '\''
name <- manyTill anyChar (char '\'')
return (lq : (name ++ "\'"))
varName :: Parser String
varName = simpleVarName <|> quotedVarName
valueM :: Parser MValue
valueM = try floatM <|> intM <|> boolM <|> (setM scalar) <|> (arrayM scalar) <|> stringM
| Parses a MiniZinc integer value .
intM :: Parser MValue
intM = MInt <$> int
| Parses a MiniZinc boolean value .
boolM :: Parser MValue
boolM = MBool <$> bool
| Parses a MiniZinc float value .
floatM :: Parser MValue
floatM = MFloat <$> float
| Parses a MiniZinc string value .
stringM :: Parser MValue
stringM = MString <$> (string "\"" *> manyTill anyChar (string "\""))
| Parses a MiniZinc set value .
setM :: Parser MValue -> Parser MValue
setM p = (MSet <$> fromDistinctAscList <$> (set p)) <|> setRange
int :: Parser Int
int = (char '-' >> opposite ) <|> natural
bool :: Parser Bool
bool = string "true" >> return True <|> (string "false" >> return False)
float :: Parser Float
float = do
ipart <- many1 digit
char '.'
dpart <- many1 digit
let a = read (ipart ++ "." ++ dpart) :: Float in
return a
set :: Parser a -> Parser [a]
set p = between (char '{') (char '}') (sepBy p (string "," >> spaces))
| Parses a MiniZinc set value defined with the use of the MiniZinc range operator
setRange :: Parser MValue
setRange = MSet <$> fromDistinctAscList <$> do
v1 <- int
string ".."
v2 <- int
return (map MInt (take (v2 - v1 + 1) (iterate ((+) 1) v1)))
arrayM :: Parser MValue -> Parser MValue
arrayM p = do
string "array"
manyTill anyChar (char '(')
ls <- arraySizes
es <- extract p
string ")"
return (fixDims ls es)
natural :: Parser Int
natural = P.chainl1 digitValue ascendDecimal
opposite :: Parser Int
opposite = (0 - ) <$> natural
digitValue :: Parser Int
digitValue = do
d <- digit
return $ ord(d) - ord('0')
ascendDecimal :: Parser (Int -> Int -> Int)
ascendDecimal = do
return $ \x y -> x*10 + y
indexRange :: Parser Int
indexRange = do
a <- int
string ".."
b <- int
return (b - a + 1)
arraySizes :: Parser [Int]
arraySizes = P.sepEndBy1 indexRange (string "," >> spaces)
extract :: Parser MValue -> Parser [MValue]
extract p = between (char '[') (char ']') (sepBy p (string "," >> spaces))
fixDims :: [Int] -> [MValue] -> MValue
fixDims [] _ = MError "Array dimensions error: fixDims applied on empty list"
fixDims [d] ms = MArray $ ms
fixDims ds ms = fixDims (init ds) (fix1Dim (last ds) ms)
fix1Dim :: Int -> [MValue] -> [MValue]
fix1Dim _ [] = []
fix1Dim d ms = MArray (take d ms) : (fix1Dim d (drop d ms))
scalar :: Parser MValue
scalar = try floatM <|> intM <|> boolM <|> stringM
parseWithLeftOver :: Parser a -> String -> Either P.ParseError (a,String)
parseWithLeftOver p = parseAll ((,) <$> p <*> leftOver) ""
where leftOver = manyTill anyToken eof
|
1ea85227e0698d1c3945199aca7198573f2f30a33b2908b05916d68533df405b | gedge-platform/gedge-platform | amqp_channel_sup.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright ( c ) 2007 - 2021 VMware , Inc. or its affiliates . All rights reserved .
%%
@private
-module(amqp_channel_sup).
-include("amqp_client_internal.hrl").
-behaviour(supervisor2).
-export([start_link/6]).
-export([init/1]).
%%---------------------------------------------------------------------------
Interface
%%---------------------------------------------------------------------------
start_link(Type, Connection, ConnName, InfraArgs, ChNumber,
Consumer = {_, _}) ->
Identity = {ConnName, ChNumber},
{ok, Sup} = supervisor2:start_link(?MODULE, [Consumer, Identity]),
[{gen_consumer, ConsumerPid, _, _}] = supervisor2:which_children(Sup),
{ok, ChPid} = supervisor2:start_child(
Sup, {channel,
{amqp_channel, start_link,
[Type, Connection, ChNumber, ConsumerPid, Identity]},
intrinsic, ?WORKER_WAIT, worker, [amqp_channel]}),
case start_writer(Sup, Type, InfraArgs, ConnName, ChNumber, ChPid) of
{ok, Writer} ->
amqp_channel:set_writer(ChPid, Writer),
{ok, AState} = init_command_assembler(Type),
{ok, Sup, {ChPid, AState}};
{error, _}=Error ->
Error
end.
%%---------------------------------------------------------------------------
Internal plumbing
%%---------------------------------------------------------------------------
1 GB
-define(DEFAULT_GC_THRESHOLD, 1000000000).
start_writer(_Sup, direct, [ConnPid, Node, User, VHost, Collector, AmqpParams],
ConnName, ChNumber, ChPid) ->
case rpc:call(Node, rabbit_direct, start_channel,
[ChNumber, ChPid, ConnPid, ConnName, ?PROTOCOL, User,
VHost, ?CLIENT_CAPABILITIES, Collector, AmqpParams], ?DIRECT_OPERATION_TIMEOUT) of
{ok, _Writer} = Reply ->
Reply;
{badrpc, Reason} ->
{error, {Reason, Node}};
Error ->
Error
end;
start_writer(Sup, network, [Sock, FrameMax], ConnName, ChNumber, ChPid) ->
GCThreshold = application:get_env(amqp_client, writer_gc_threshold, ?DEFAULT_GC_THRESHOLD),
supervisor2:start_child(
Sup,
{writer, {rabbit_writer, start_link,
[Sock, ChNumber, FrameMax, ?PROTOCOL, ChPid,
{ConnName, ChNumber}, false, GCThreshold]},
transient, ?WORKER_WAIT, worker, [rabbit_writer]}).
init_command_assembler(direct) -> {ok, none};
init_command_assembler(network) -> rabbit_command_assembler:init(?PROTOCOL).
%%---------------------------------------------------------------------------
supervisor2 callbacks
%%---------------------------------------------------------------------------
init([{ConsumerModule, ConsumerArgs}, Identity]) ->
{ok, {{one_for_all, 0, 1},
[{gen_consumer, {amqp_gen_consumer, start_link,
[ConsumerModule, ConsumerArgs, Identity]},
intrinsic, ?WORKER_WAIT, worker, [amqp_gen_consumer]}]}}.
| null | https://raw.githubusercontent.com/gedge-platform/gedge-platform/97c1e87faf28ba2942a77196b6be0a952bff1c3e/gs-broker/broker-server/deps/amqp_client/src/amqp_channel_sup.erl | erlang |
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
--------------------------------------------------------------------------- | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2007 - 2021 VMware , Inc. or its affiliates . All rights reserved .
@private
-module(amqp_channel_sup).
-include("amqp_client_internal.hrl").
-behaviour(supervisor2).
-export([start_link/6]).
-export([init/1]).
Interface
start_link(Type, Connection, ConnName, InfraArgs, ChNumber,
Consumer = {_, _}) ->
Identity = {ConnName, ChNumber},
{ok, Sup} = supervisor2:start_link(?MODULE, [Consumer, Identity]),
[{gen_consumer, ConsumerPid, _, _}] = supervisor2:which_children(Sup),
{ok, ChPid} = supervisor2:start_child(
Sup, {channel,
{amqp_channel, start_link,
[Type, Connection, ChNumber, ConsumerPid, Identity]},
intrinsic, ?WORKER_WAIT, worker, [amqp_channel]}),
case start_writer(Sup, Type, InfraArgs, ConnName, ChNumber, ChPid) of
{ok, Writer} ->
amqp_channel:set_writer(ChPid, Writer),
{ok, AState} = init_command_assembler(Type),
{ok, Sup, {ChPid, AState}};
{error, _}=Error ->
Error
end.
Internal plumbing
1 GB
-define(DEFAULT_GC_THRESHOLD, 1000000000).
start_writer(_Sup, direct, [ConnPid, Node, User, VHost, Collector, AmqpParams],
ConnName, ChNumber, ChPid) ->
case rpc:call(Node, rabbit_direct, start_channel,
[ChNumber, ChPid, ConnPid, ConnName, ?PROTOCOL, User,
VHost, ?CLIENT_CAPABILITIES, Collector, AmqpParams], ?DIRECT_OPERATION_TIMEOUT) of
{ok, _Writer} = Reply ->
Reply;
{badrpc, Reason} ->
{error, {Reason, Node}};
Error ->
Error
end;
start_writer(Sup, network, [Sock, FrameMax], ConnName, ChNumber, ChPid) ->
GCThreshold = application:get_env(amqp_client, writer_gc_threshold, ?DEFAULT_GC_THRESHOLD),
supervisor2:start_child(
Sup,
{writer, {rabbit_writer, start_link,
[Sock, ChNumber, FrameMax, ?PROTOCOL, ChPid,
{ConnName, ChNumber}, false, GCThreshold]},
transient, ?WORKER_WAIT, worker, [rabbit_writer]}).
init_command_assembler(direct) -> {ok, none};
init_command_assembler(network) -> rabbit_command_assembler:init(?PROTOCOL).
supervisor2 callbacks
init([{ConsumerModule, ConsumerArgs}, Identity]) ->
{ok, {{one_for_all, 0, 1},
[{gen_consumer, {amqp_gen_consumer, start_link,
[ConsumerModule, ConsumerArgs, Identity]},
intrinsic, ?WORKER_WAIT, worker, [amqp_gen_consumer]}]}}.
|
bcb7bcdc3f471408d7dc8e7e07f2db753064862723fa3e078e6544d5ebfa9d3e | 2600hz/kazoo | kzd_fax_box.erl | %%%-----------------------------------------------------------------------------
( C ) 2014 - 2020 , 2600Hz
%%% @doc Device document manipulation
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(kzd_fax_box).
-export([new/0
,type/0
,owner_id/1, owner_id/2
,timezone/1, timezone/2
,retries/1, retries/2
]).
-include("kz_documents.hrl").
-type doc() :: kz_json:object().
-export_type([doc/0]).
-define(KEY_OWNER_ID, <<"owner_id">>).
-define(KEY_TIMEZONE, <<"fax_timezone">>).
-define(KEY_RETRIES, <<"retries">>).
-define(PVT_TYPE, <<"faxbox">>).
-spec new() -> doc().
new() ->
kz_json:from_list([{<<"pvt_type">>, type()}]).
-spec type() -> kz_term:ne_binary().
type() -> ?PVT_TYPE.
-spec owner_id(doc()) -> kz_term:api_binary().
owner_id(Box) ->
owner_id(Box, 'undefined').
-spec owner_id(doc(), Default) -> kz_term:ne_binary() | Default.
owner_id(Box, Default) ->
kz_json:get_value(?KEY_OWNER_ID, Box, Default).
-spec timezone(doc()) -> kz_term:ne_binary().
timezone(Box) ->
timezone(Box, 'undefined').
-spec timezone(doc(), Default) -> kz_term:ne_binary() | Default.
timezone(Box, Default) ->
case kz_json:get_value(?KEY_TIMEZONE, Box) of
'undefined' -> owner_timezone(Box, Default);
UI-1808
TZ -> TZ
end.
-spec owner_timezone(doc(), Default) -> kz_term:ne_binary() | Default.
owner_timezone(Box, Default) ->
case kzd_users:fetch(kz_doc:account_db(Box), owner_id(Box)) of
{'ok', OwnerJObj} -> kzd_users:timezone(OwnerJObj, Default);
{'error', _} -> kzd_accounts:timezone(kz_doc:account_id(Box), Default)
end.
-spec retries(doc()) -> kz_term:api_integer().
retries(Box) ->
retries(Box, 'undefined').
-spec retries(doc(), Default) -> integer() | Default.
retries(Box, Default) ->
kz_json:get_integer_value(?KEY_RETRIES, Box, Default).
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/core/kazoo_documents/src/kzd_fax_box.erl | erlang | -----------------------------------------------------------------------------
@doc Device document manipulation
@end
----------------------------------------------------------------------------- | ( C ) 2014 - 2020 , 2600Hz
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(kzd_fax_box).
-export([new/0
,type/0
,owner_id/1, owner_id/2
,timezone/1, timezone/2
,retries/1, retries/2
]).
-include("kz_documents.hrl").
-type doc() :: kz_json:object().
-export_type([doc/0]).
-define(KEY_OWNER_ID, <<"owner_id">>).
-define(KEY_TIMEZONE, <<"fax_timezone">>).
-define(KEY_RETRIES, <<"retries">>).
-define(PVT_TYPE, <<"faxbox">>).
-spec new() -> doc().
new() ->
kz_json:from_list([{<<"pvt_type">>, type()}]).
-spec type() -> kz_term:ne_binary().
type() -> ?PVT_TYPE.
-spec owner_id(doc()) -> kz_term:api_binary().
owner_id(Box) ->
owner_id(Box, 'undefined').
-spec owner_id(doc(), Default) -> kz_term:ne_binary() | Default.
owner_id(Box, Default) ->
kz_json:get_value(?KEY_OWNER_ID, Box, Default).
-spec timezone(doc()) -> kz_term:ne_binary().
timezone(Box) ->
timezone(Box, 'undefined').
-spec timezone(doc(), Default) -> kz_term:ne_binary() | Default.
timezone(Box, Default) ->
case kz_json:get_value(?KEY_TIMEZONE, Box) of
'undefined' -> owner_timezone(Box, Default);
UI-1808
TZ -> TZ
end.
-spec owner_timezone(doc(), Default) -> kz_term:ne_binary() | Default.
owner_timezone(Box, Default) ->
case kzd_users:fetch(kz_doc:account_db(Box), owner_id(Box)) of
{'ok', OwnerJObj} -> kzd_users:timezone(OwnerJObj, Default);
{'error', _} -> kzd_accounts:timezone(kz_doc:account_id(Box), Default)
end.
-spec retries(doc()) -> kz_term:api_integer().
retries(Box) ->
retries(Box, 'undefined').
-spec retries(doc(), Default) -> integer() | Default.
retries(Box, Default) ->
kz_json:get_integer_value(?KEY_RETRIES, Box, Default).
|
6e37b6d7f17bf78866806f3bcfcdea87e936c58af5159a7ee9e706ded809e815 | hunt-framework/hunt | Crawler.hs | # OPTIONS #
-- ------------------------------------------------------------
module Holumbus.Crawler
( module Holumbus.Crawler.BasicTypes
, module Holumbus.Crawler.Constants
, module Holumbus.Crawler.Core
, module Holumbus.Crawler.Html
, module Holumbus.Crawler.Logger
, module Holumbus.Crawler.Robots
, module Holumbus.Crawler.RobotTypes
, module Holumbus.Crawler.Types
, module Holumbus.Crawler.URIs
, module Holumbus.Crawler.Util
, module Holumbus.Crawler.XmlArrows
)
where
import Holumbus.Crawler.BasicTypes
import Holumbus.Crawler.Constants
import Holumbus.Crawler.Core
import Holumbus.Crawler.Html
import Holumbus.Crawler.Logger
import Holumbus.Crawler.Robots
import Holumbus.Crawler.RobotTypes
import Holumbus.Crawler.Types
import Holumbus.Crawler.URIs
import Holumbus.Crawler.Util
import Holumbus.Crawler.XmlArrows
-- ------------------------------------------------------------
| null | https://raw.githubusercontent.com/hunt-framework/hunt/d692aae756b7bdfb4c99f5a3951aec12893649a8/hunt-crawler/src/Holumbus/Crawler.hs | haskell | ------------------------------------------------------------
------------------------------------------------------------ | # OPTIONS #
module Holumbus.Crawler
( module Holumbus.Crawler.BasicTypes
, module Holumbus.Crawler.Constants
, module Holumbus.Crawler.Core
, module Holumbus.Crawler.Html
, module Holumbus.Crawler.Logger
, module Holumbus.Crawler.Robots
, module Holumbus.Crawler.RobotTypes
, module Holumbus.Crawler.Types
, module Holumbus.Crawler.URIs
, module Holumbus.Crawler.Util
, module Holumbus.Crawler.XmlArrows
)
where
import Holumbus.Crawler.BasicTypes
import Holumbus.Crawler.Constants
import Holumbus.Crawler.Core
import Holumbus.Crawler.Html
import Holumbus.Crawler.Logger
import Holumbus.Crawler.Robots
import Holumbus.Crawler.RobotTypes
import Holumbus.Crawler.Types
import Holumbus.Crawler.URIs
import Holumbus.Crawler.Util
import Holumbus.Crawler.XmlArrows
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.