_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
9b5d09725527ec5def504a1881a23bf175dae0c759247fe29777621f6ffb48b9 | RunOrg/RunOrg | db.mli | (* © 2014 RunOrg *)
(** A database, in the RunOrg sense, is the space where all other objects live
(with a few server-level exceptions, such as server-admins). *)
open Std
(** Create a new database, with the provided name. *)
val create : [`ServerAdmin] Token.I.id -> string -> (# Cqrs.ctx, Id.t * Cqrs.Clock.t) Run.t
(** Return the number of created, not-yet-deleted databases on the server. *)
val count : [`ServerAdmin] Token.I.id -> (# Cqrs.ctx, int) Run.t
(** Return the Persona audiences available for a database. *)
val persona_audience : unit -> (#Cqrs.ctx, String.Url.t list) Run.t
* Return a subset of all databases , ordered by identifier . Maximum count is [ 100000 ] .
val all :
limit:int ->
offset:int ->
[`ServerAdmin] Token.I.id ->
(# Cqrs.ctx, < id : Id.t ; label : string ; created : Time.t > list) Run.t
(** Returns a context for the specified database, after checking that the database
exists. *)
val ctx : Id.t -> (# Cqrs.ctx as 'ctx, 'ctx option) Run.t
| null | https://raw.githubusercontent.com/RunOrg/RunOrg/b53ee2357f4bcb919ac48577426d632dffc25062/server/db.mli | ocaml | © 2014 RunOrg
* A database, in the RunOrg sense, is the space where all other objects live
(with a few server-level exceptions, such as server-admins).
* Create a new database, with the provided name.
* Return the number of created, not-yet-deleted databases on the server.
* Return the Persona audiences available for a database.
* Returns a context for the specified database, after checking that the database
exists. |
open Std
val create : [`ServerAdmin] Token.I.id -> string -> (# Cqrs.ctx, Id.t * Cqrs.Clock.t) Run.t
val count : [`ServerAdmin] Token.I.id -> (# Cqrs.ctx, int) Run.t
val persona_audience : unit -> (#Cqrs.ctx, String.Url.t list) Run.t
* Return a subset of all databases , ordered by identifier . Maximum count is [ 100000 ] .
val all :
limit:int ->
offset:int ->
[`ServerAdmin] Token.I.id ->
(# Cqrs.ctx, < id : Id.t ; label : string ; created : Time.t > list) Run.t
val ctx : Id.t -> (# Cqrs.ctx as 'ctx, 'ctx option) Run.t
|
6f2a738622b8c6bb9ae1b9069537c472e78a8dcf93fd7fbf0cef376d653aba24 | overminder/YAC | CPSTrans.hs | module Frontend.Scheme.CPSTrans (
runCPSTrans,
transformToplevel
) where
import Control.Monad
import Control.Monad.Trans
import Frontend.Scheme.AST
import Util.Temp
type CPSTransGen = TempGen
runCPSTrans :: CPSTransGen a -> TempGen a
runCPSTrans = id
gensym :: String -> CPSTransGen String
gensym s = do
i <- nextTemp
return $ s ++ show i
-- Turn a toplevel lambda into cps form
transformToplevel :: Expr -> CPSTransGen Expr
transformToplevel (ELambda upvals args body) = do
k <- gensym "$ScCont_"
newBody <- contWith (EVar k) body
return $ ELambda upvals (args ++ [k]) newBody
contWith :: Expr -> Expr -> CPSTransGen Expr
contWith k expr = case expr of
EAp func args ->
let funArgs = func:args
in if all isAtom funArgs
then do
-- (fun arg1 ... argN) -> (fun arg1 ... argN k)
return $ EAp func (args ++ [k])
else do
-- (fun (g arg) ... argN) ->
( g arg ( lambda ( $ ApCont_0 )
-- (fun $ApCont_0 ... argN k)))
hole <- gensym "$ApCont_"
let (fun':args', cExpr) = swapComplex funArgs (EVar hole)
e' <- contWith k (EAp fun' args')
contWith (ELambda [] [hole] e') cExpr
EIf cond ifTrue ifFalse ->
if isAtom cond
then do
-- (if atomCond (f x) y) ->
-- (if atomCond (f x k) (k y))
ifTrue' <- contWith k ifTrue
ifFalse' <- contWith k ifFalse
return $ EIf cond ifTrue' ifFalse'
else do
-- (if (f x) (g y) z ->
-- (f x (lambda ($IfCont_0)
-- (if $IfCont_0 (g y k) (k z))))
hole <- gensym "$IfCont_"
e' <- contWith k (EIf (EVar hole) ifTrue ifFalse)
contWith (ELambda [] [hole] e') cond
ESeq es ->
case es of
x:ys@(_:_) -> if isAtom x
then do
-- (begin
var1
-- (g x)) ->
-- ((lambda (_)
-- (g x k)) var1)
hole <- gensym "$Unused_"
rest <- contWith k (ESeq ys)
return $ EAp (ELambda [] [hole] rest) [x]
else do
-- (begin
-- (f x)
-- (g x)) ->
-- (f x (lambda (_)
-- (g x k)))
hole <- gensym "$Unused_"
rest <- contWith k (ESeq ys)
let newK = ELambda [] [hole] rest
contWith newK x
[x] -> contWith k x
[] -> return $ EAp k [EUnspecified] -- Correct?
EDefine name expr ->
if isAtom expr
then do
( define a 1 ) - > ( begin ( define a 1 ) ( k # < unspecified > ) )
return $ ESeq [EDefine name expr, EAp k [EUnspecified]]
else do
-- (define a (f x)) ->
-- (begin
( define a # < unbound > )
-- (f x (lambda ($Res)
-- (begin
-- (set! a $Res)
-- (k #<unspecified>)))))
hole <- gensym "$DefCont_"
let newK = ELambda [] [hole] (ESeq [ESete name (EVar hole),
EAp k [EUnspecified]])
rest <- contWith newK expr
return $ ESeq [EDefine name EUnbound, rest]
ESete name expr ->
if isAtom expr
then do
( set ! a 1 ) - > ( begin ( set ! a 1 ) ( k # < unspecified > ) )
return $ ESeq [ESete name expr, EAp k [EUnspecified]]
else do
-- (set! a (f x)) ->
-- (f x (lambda ($Res)
-- (begin
-- (set! a $res)
-- (k #<unspecified>))))
hole <- gensym "$SetCont_"
let newK = ELambda [] [hole] (ESeq [ESete name (EVar hole),
EAp k [EUnspecified]])
contWith newK expr
ELambda upvals args body -> do
-- (lambda (x y)
-- (+ x y)) ->
-- (k (lambda (x y $LamCont)
-- (+ x y $LamCont)))
lamK <- gensym "$LamCont_"
newBody <- contWith (EVar lamK) body
return $ EAp k [ELambda upvals (args ++ [lamK]) newBody]
_ ->
if isAtom expr
-- Atoms
-- v -> (k v)
-- XXX: do beta reduction here?
then
return $ EAp k [expr]
else
error $ "contWith: Unknown expr: " ++ show expr
mergeESeq :: Expr -> Expr -> Expr
mergeESeq e1 e2 = merged
where
flat1 = flattenESeq e1
flat2 = flattenESeq e2
flat = flat1 ++ flat2
merged = case flat of
[] -> ESeq []
[x] -> x
_ -> ESeq flat
swapComplex :: [Expr] -> Expr -> ([Expr], Expr)
swapComplex orig hole = (lhs ++ (hole:rhs), x)
where
(lhs, x:rhs) = span isAtom orig
| null | https://raw.githubusercontent.com/overminder/YAC/4a2633a81bc2d5880d1e7948b0cce0037ed788b1/Frontend/Scheme/CPSTrans.hs | haskell | Turn a toplevel lambda into cps form
(fun arg1 ... argN) -> (fun arg1 ... argN k)
(fun (g arg) ... argN) ->
(fun $ApCont_0 ... argN k)))
(if atomCond (f x) y) ->
(if atomCond (f x k) (k y))
(if (f x) (g y) z ->
(f x (lambda ($IfCont_0)
(if $IfCont_0 (g y k) (k z))))
(begin
(g x)) ->
((lambda (_)
(g x k)) var1)
(begin
(f x)
(g x)) ->
(f x (lambda (_)
(g x k)))
Correct?
(define a (f x)) ->
(begin
(f x (lambda ($Res)
(begin
(set! a $Res)
(k #<unspecified>)))))
(set! a (f x)) ->
(f x (lambda ($Res)
(begin
(set! a $res)
(k #<unspecified>))))
(lambda (x y)
(+ x y)) ->
(k (lambda (x y $LamCont)
(+ x y $LamCont)))
Atoms
v -> (k v)
XXX: do beta reduction here? | module Frontend.Scheme.CPSTrans (
runCPSTrans,
transformToplevel
) where
import Control.Monad
import Control.Monad.Trans
import Frontend.Scheme.AST
import Util.Temp
type CPSTransGen = TempGen
runCPSTrans :: CPSTransGen a -> TempGen a
runCPSTrans = id
gensym :: String -> CPSTransGen String
gensym s = do
i <- nextTemp
return $ s ++ show i
transformToplevel :: Expr -> CPSTransGen Expr
transformToplevel (ELambda upvals args body) = do
k <- gensym "$ScCont_"
newBody <- contWith (EVar k) body
return $ ELambda upvals (args ++ [k]) newBody
contWith :: Expr -> Expr -> CPSTransGen Expr
contWith k expr = case expr of
EAp func args ->
let funArgs = func:args
in if all isAtom funArgs
then do
return $ EAp func (args ++ [k])
else do
( g arg ( lambda ( $ ApCont_0 )
hole <- gensym "$ApCont_"
let (fun':args', cExpr) = swapComplex funArgs (EVar hole)
e' <- contWith k (EAp fun' args')
contWith (ELambda [] [hole] e') cExpr
EIf cond ifTrue ifFalse ->
if isAtom cond
then do
ifTrue' <- contWith k ifTrue
ifFalse' <- contWith k ifFalse
return $ EIf cond ifTrue' ifFalse'
else do
hole <- gensym "$IfCont_"
e' <- contWith k (EIf (EVar hole) ifTrue ifFalse)
contWith (ELambda [] [hole] e') cond
ESeq es ->
case es of
x:ys@(_:_) -> if isAtom x
then do
var1
hole <- gensym "$Unused_"
rest <- contWith k (ESeq ys)
return $ EAp (ELambda [] [hole] rest) [x]
else do
hole <- gensym "$Unused_"
rest <- contWith k (ESeq ys)
let newK = ELambda [] [hole] rest
contWith newK x
[x] -> contWith k x
EDefine name expr ->
if isAtom expr
then do
( define a 1 ) - > ( begin ( define a 1 ) ( k # < unspecified > ) )
return $ ESeq [EDefine name expr, EAp k [EUnspecified]]
else do
( define a # < unbound > )
hole <- gensym "$DefCont_"
let newK = ELambda [] [hole] (ESeq [ESete name (EVar hole),
EAp k [EUnspecified]])
rest <- contWith newK expr
return $ ESeq [EDefine name EUnbound, rest]
ESete name expr ->
if isAtom expr
then do
( set ! a 1 ) - > ( begin ( set ! a 1 ) ( k # < unspecified > ) )
return $ ESeq [ESete name expr, EAp k [EUnspecified]]
else do
hole <- gensym "$SetCont_"
let newK = ELambda [] [hole] (ESeq [ESete name (EVar hole),
EAp k [EUnspecified]])
contWith newK expr
ELambda upvals args body -> do
lamK <- gensym "$LamCont_"
newBody <- contWith (EVar lamK) body
return $ EAp k [ELambda upvals (args ++ [lamK]) newBody]
_ ->
if isAtom expr
then
return $ EAp k [expr]
else
error $ "contWith: Unknown expr: " ++ show expr
mergeESeq :: Expr -> Expr -> Expr
mergeESeq e1 e2 = merged
where
flat1 = flattenESeq e1
flat2 = flattenESeq e2
flat = flat1 ++ flat2
merged = case flat of
[] -> ESeq []
[x] -> x
_ -> ESeq flat
swapComplex :: [Expr] -> Expr -> ([Expr], Expr)
swapComplex orig hole = (lhs ++ (hole:rhs), x)
where
(lhs, x:rhs) = span isAtom orig
|
8f2dd8530471c0f2eb8ae39f4d62b622de47af99e5b31c88f33f43e58b411ee8 | mnxn/jsonoo | jsonoo.mli | include Jsonoo_intf.Jsonoo
* @inline
| null | https://raw.githubusercontent.com/mnxn/jsonoo/7e4d0b66e6188e58106908b9d64a61e93dfeece9/jsonoo.mli | ocaml | include Jsonoo_intf.Jsonoo
* @inline
| |
bd273a5a15e6f00d01eabe9c9f5e3c98077d72d70c896027fb0b4b8aafb4f795 | hemmi/coq2scala | environ.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Names
open Term
open Declarations
open Sign
(** Unsafe environments. We define here a datatype for environments.
Since typing is not yet defined, it is not possible to check the
informations added in environments, and that is why we speak here
of ``unsafe'' environments. *)
* Environments have the following components :
- a context for variables
- a context for variables vm values
- a context for section variables and goal assumptions
- a context for section variables and goal assumptions vm values
- a context for global constants and axioms
- a context for inductive definitions
- a set of universe constraints
- a flag telling if Set is , can be , or can not be set impredicative
- a context for de Bruijn variables
- a context for de Bruijn variables vm values
- a context for section variables and goal assumptions
- a context for section variables and goal assumptions vm values
- a context for global constants and axioms
- a context for inductive definitions
- a set of universe constraints
- a flag telling if Set is, can be, or cannot be set impredicative *)
type env
val pre_env : env -> Pre_env.env
val env_of_pre_env : Pre_env.env -> env
type named_context_val
val eq_named_context_val : named_context_val -> named_context_val -> bool
val empty_env : env
val universes : env -> Univ.universes
val rel_context : env -> rel_context
val named_context : env -> named_context
val named_context_val : env -> named_context_val
val engagement : env -> engagement option
(** is the local context empty *)
val empty_context : env -> bool
* { 5 Context of de Bruijn variables ( [ rel_context ] ) }
val nb_rel : env -> int
val push_rel : rel_declaration -> env -> env
val push_rel_context : rel_context -> env -> env
val push_rec_types : rec_declaration -> env -> env
(** Looks up in the context of local vars referred by indice ([rel_context])
raises [Not_found] if the index points out of the context *)
val lookup_rel : int -> env -> rel_declaration
val evaluable_rel : int -> env -> bool
* { 6 Recurrence on [ rel_context ] }
val fold_rel_context :
(env -> rel_declaration -> 'a -> 'a) -> env -> init:'a -> 'a
* { 5 Context of variables ( section variables and goal assumptions ) }
val named_context_of_val : named_context_val -> named_context
val named_vals_of_val : named_context_val -> Pre_env.named_vals
val val_of_named_context : named_context -> named_context_val
val empty_named_context_val : named_context_val
(** [map_named_val f ctxt] apply [f] to the body and the type of
each declarations.
*** /!\ *** [f t] should be convertible with t *)
val map_named_val :
(constr -> constr) -> named_context_val -> named_context_val
val push_named : named_declaration -> env -> env
val push_named_context_val :
named_declaration -> named_context_val -> named_context_val
(** Looks up in the context of local vars referred by names ([named_context])
raises [Not_found] if the identifier is not found *)
val lookup_named : variable -> env -> named_declaration
val lookup_named_val : variable -> named_context_val -> named_declaration
val evaluable_named : variable -> env -> bool
val named_type : variable -> env -> types
val named_body : variable -> env -> constr option
* { 6 Recurrence on [ named_context ] : older declarations processed first }
val fold_named_context :
(env -> named_declaration -> 'a -> 'a) -> env -> init:'a -> 'a
* Recurrence on [ named_context ] starting from younger
val fold_named_context_reverse :
('a -> named_declaration -> 'a) -> init:'a -> env -> 'a
(** This forgets named and rel contexts *)
val reset_context : env -> env
(** This forgets rel context and sets a new named context *)
val reset_with_named_context : named_context_val -> env -> env
* { 5 Global constants }
{ 6 Add entries to global environment }
{6 Add entries to global environment } *)
val add_constant : constant -> constant_body -> env -> env
(** Looks up in the context of global constant names
raises [Not_found] if the required path is not found *)
val lookup_constant : constant -> env -> constant_body
val evaluable_constant : constant -> env -> bool
* { 6 ... }
* [ constant_value env c ] raises [ NotEvaluableConst Opaque ] if
[ c ] is opaque and [ ] if it has no
body and [ Not_found ] if it does not exist in [ env ]
[c] is opaque and [NotEvaluableConst NoBody] if it has no
body and [Not_found] if it does not exist in [env] *)
type const_evaluation_result = NoBody | Opaque
exception NotEvaluableConst of const_evaluation_result
val constant_value : env -> constant -> constr
val constant_type : env -> constant -> constant_type
val constant_opt_value : env -> constant -> constr option
* { 5 Inductive types }
val add_mind : mutual_inductive -> mutual_inductive_body -> env -> env
(** Looks up in the context of global inductive names
raises [Not_found] if the required path is not found *)
val lookup_mind : mutual_inductive -> env -> mutual_inductive_body
* { 5 Modules }
val add_modtype : module_path -> module_type_body -> env -> env
(** [shallow_add_module] does not add module components *)
val shallow_add_module : module_path -> module_body -> env -> env
val lookup_module : module_path -> env -> module_body
val lookup_modtype : module_path -> env -> module_type_body
* { 5 Universe constraints }
val add_constraints : Univ.constraints -> env -> env
val set_engagement : engagement -> env -> env
* { 6 Sets of referred section variables }
[ global_vars_set env c ] returns the list of [ i d ] 's occurring either
directly as [ Var i d ] in [ c ] or indirectly as a section variable
dependent in a global reference occurring in [ c ]
[global_vars_set env c] returns the list of [id]'s occurring either
directly as [Var id] in [c] or indirectly as a section variable
dependent in a global reference occurring in [c] *)
val global_vars_set : env -> constr -> Idset.t
(** the constr must be a global reference *)
val vars_of_global : env -> constr -> identifier list
val keep_hyps : env -> Idset.t -> section_context
* { 5 Unsafe judgments . }
We introduce here the pre - type of judgments , which is
actually only a datatype to store a term with its type and the type of its
type .
We introduce here the pre-type of judgments, which is
actually only a datatype to store a term with its type and the type of its
type. *)
type unsafe_judgment = {
uj_val : constr;
uj_type : types }
val make_judge : constr -> types -> unsafe_judgment
val j_val : unsafe_judgment -> constr
val j_type : unsafe_judgment -> types
type unsafe_type_judgment = {
utj_val : constr;
utj_type : sorts }
(** {6 Compilation of global declaration } *)
val compile_constant_body : env -> constant_def -> Cemitcodes.body_code
exception Hyp_not_found
(** [apply_to_hyp sign id f] split [sign] into [tail::(id,_,_)::head] and
return [tail::(f head (id,_,_) (rev tail))::head].
the value associated to id should not change *)
val apply_to_hyp : named_context_val -> variable ->
(named_context -> named_declaration -> named_context -> named_declaration) ->
named_context_val
(** [apply_to_hyp_and_dependent_on sign id f g] split [sign] into
[tail::(id,_,_)::head] and
return [(g tail)::(f (id,_,_))::head]. *)
val apply_to_hyp_and_dependent_on : named_context_val -> variable ->
(named_declaration -> named_context_val -> named_declaration) ->
(named_declaration -> named_context_val -> named_declaration) ->
named_context_val
val insert_after_hyp : named_context_val -> variable ->
named_declaration ->
(named_context -> unit) -> named_context_val
val remove_hyps : identifier list -> (named_declaration -> named_declaration) -> (Pre_env.lazy_val -> Pre_env.lazy_val) -> named_context_val -> named_context_val
open Retroknowledge
(** functions manipulating the retroknowledge
@author spiwack *)
val retroknowledge : (retroknowledge->'a) -> env -> 'a
val registered : env -> field -> bool
val unregister : env -> field -> env
val register : env -> field -> Retroknowledge.entry -> env
| null | https://raw.githubusercontent.com/hemmi/coq2scala/d10f441c18146933a99bf2088116bd213ac3648d/coq-8.4pl2-old/kernel/environ.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* Unsafe environments. We define here a datatype for environments.
Since typing is not yet defined, it is not possible to check the
informations added in environments, and that is why we speak here
of ``unsafe'' environments.
* is the local context empty
* Looks up in the context of local vars referred by indice ([rel_context])
raises [Not_found] if the index points out of the context
* [map_named_val f ctxt] apply [f] to the body and the type of
each declarations.
*** /!\ *** [f t] should be convertible with t
* Looks up in the context of local vars referred by names ([named_context])
raises [Not_found] if the identifier is not found
* This forgets named and rel contexts
* This forgets rel context and sets a new named context
* Looks up in the context of global constant names
raises [Not_found] if the required path is not found
* Looks up in the context of global inductive names
raises [Not_found] if the required path is not found
* [shallow_add_module] does not add module components
* the constr must be a global reference
* {6 Compilation of global declaration }
* [apply_to_hyp sign id f] split [sign] into [tail::(id,_,_)::head] and
return [tail::(f head (id,_,_) (rev tail))::head].
the value associated to id should not change
* [apply_to_hyp_and_dependent_on sign id f g] split [sign] into
[tail::(id,_,_)::head] and
return [(g tail)::(f (id,_,_))::head].
* functions manipulating the retroknowledge
@author spiwack | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Names
open Term
open Declarations
open Sign
* Environments have the following components :
- a context for variables
- a context for variables vm values
- a context for section variables and goal assumptions
- a context for section variables and goal assumptions vm values
- a context for global constants and axioms
- a context for inductive definitions
- a set of universe constraints
- a flag telling if Set is , can be , or can not be set impredicative
- a context for de Bruijn variables
- a context for de Bruijn variables vm values
- a context for section variables and goal assumptions
- a context for section variables and goal assumptions vm values
- a context for global constants and axioms
- a context for inductive definitions
- a set of universe constraints
- a flag telling if Set is, can be, or cannot be set impredicative *)
type env
val pre_env : env -> Pre_env.env
val env_of_pre_env : Pre_env.env -> env
type named_context_val
val eq_named_context_val : named_context_val -> named_context_val -> bool
val empty_env : env
val universes : env -> Univ.universes
val rel_context : env -> rel_context
val named_context : env -> named_context
val named_context_val : env -> named_context_val
val engagement : env -> engagement option
val empty_context : env -> bool
* { 5 Context of de Bruijn variables ( [ rel_context ] ) }
val nb_rel : env -> int
val push_rel : rel_declaration -> env -> env
val push_rel_context : rel_context -> env -> env
val push_rec_types : rec_declaration -> env -> env
val lookup_rel : int -> env -> rel_declaration
val evaluable_rel : int -> env -> bool
* { 6 Recurrence on [ rel_context ] }
val fold_rel_context :
(env -> rel_declaration -> 'a -> 'a) -> env -> init:'a -> 'a
* { 5 Context of variables ( section variables and goal assumptions ) }
val named_context_of_val : named_context_val -> named_context
val named_vals_of_val : named_context_val -> Pre_env.named_vals
val val_of_named_context : named_context -> named_context_val
val empty_named_context_val : named_context_val
val map_named_val :
(constr -> constr) -> named_context_val -> named_context_val
val push_named : named_declaration -> env -> env
val push_named_context_val :
named_declaration -> named_context_val -> named_context_val
val lookup_named : variable -> env -> named_declaration
val lookup_named_val : variable -> named_context_val -> named_declaration
val evaluable_named : variable -> env -> bool
val named_type : variable -> env -> types
val named_body : variable -> env -> constr option
* { 6 Recurrence on [ named_context ] : older declarations processed first }
val fold_named_context :
(env -> named_declaration -> 'a -> 'a) -> env -> init:'a -> 'a
* Recurrence on [ named_context ] starting from younger
val fold_named_context_reverse :
('a -> named_declaration -> 'a) -> init:'a -> env -> 'a
val reset_context : env -> env
val reset_with_named_context : named_context_val -> env -> env
* { 5 Global constants }
{ 6 Add entries to global environment }
{6 Add entries to global environment } *)
val add_constant : constant -> constant_body -> env -> env
val lookup_constant : constant -> env -> constant_body
val evaluable_constant : constant -> env -> bool
* { 6 ... }
* [ constant_value env c ] raises [ NotEvaluableConst Opaque ] if
[ c ] is opaque and [ ] if it has no
body and [ Not_found ] if it does not exist in [ env ]
[c] is opaque and [NotEvaluableConst NoBody] if it has no
body and [Not_found] if it does not exist in [env] *)
type const_evaluation_result = NoBody | Opaque
exception NotEvaluableConst of const_evaluation_result
val constant_value : env -> constant -> constr
val constant_type : env -> constant -> constant_type
val constant_opt_value : env -> constant -> constr option
* { 5 Inductive types }
val add_mind : mutual_inductive -> mutual_inductive_body -> env -> env
val lookup_mind : mutual_inductive -> env -> mutual_inductive_body
* { 5 Modules }
val add_modtype : module_path -> module_type_body -> env -> env
val shallow_add_module : module_path -> module_body -> env -> env
val lookup_module : module_path -> env -> module_body
val lookup_modtype : module_path -> env -> module_type_body
* { 5 Universe constraints }
val add_constraints : Univ.constraints -> env -> env
val set_engagement : engagement -> env -> env
* { 6 Sets of referred section variables }
[ global_vars_set env c ] returns the list of [ i d ] 's occurring either
directly as [ Var i d ] in [ c ] or indirectly as a section variable
dependent in a global reference occurring in [ c ]
[global_vars_set env c] returns the list of [id]'s occurring either
directly as [Var id] in [c] or indirectly as a section variable
dependent in a global reference occurring in [c] *)
val global_vars_set : env -> constr -> Idset.t
val vars_of_global : env -> constr -> identifier list
val keep_hyps : env -> Idset.t -> section_context
* { 5 Unsafe judgments . }
We introduce here the pre - type of judgments , which is
actually only a datatype to store a term with its type and the type of its
type .
We introduce here the pre-type of judgments, which is
actually only a datatype to store a term with its type and the type of its
type. *)
type unsafe_judgment = {
uj_val : constr;
uj_type : types }
val make_judge : constr -> types -> unsafe_judgment
val j_val : unsafe_judgment -> constr
val j_type : unsafe_judgment -> types
type unsafe_type_judgment = {
utj_val : constr;
utj_type : sorts }
val compile_constant_body : env -> constant_def -> Cemitcodes.body_code
exception Hyp_not_found
val apply_to_hyp : named_context_val -> variable ->
(named_context -> named_declaration -> named_context -> named_declaration) ->
named_context_val
val apply_to_hyp_and_dependent_on : named_context_val -> variable ->
(named_declaration -> named_context_val -> named_declaration) ->
(named_declaration -> named_context_val -> named_declaration) ->
named_context_val
val insert_after_hyp : named_context_val -> variable ->
named_declaration ->
(named_context -> unit) -> named_context_val
val remove_hyps : identifier list -> (named_declaration -> named_declaration) -> (Pre_env.lazy_val -> Pre_env.lazy_val) -> named_context_val -> named_context_val
open Retroknowledge
val retroknowledge : (retroknowledge->'a) -> env -> 'a
val registered : env -> field -> bool
val unregister : env -> field -> env
val register : env -> field -> Retroknowledge.entry -> env
|
c848bf4d2dcad7e65d2e5a4b16ebe3b69bc4162af711d74d92531b38ebbde2f0 | cedlemo/OCaml-GI-ctypes-bindings-generator | Corner_type.mli | open Ctypes
type t = Top_left | Bottom_left | Top_right | Bottom_right
val of_value:
Unsigned.uint32 -> t
val to_value:
t -> Unsigned.uint32
val t_view: t typ
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Corner_type.mli | ocaml | open Ctypes
type t = Top_left | Bottom_left | Top_right | Bottom_right
val of_value:
Unsigned.uint32 -> t
val to_value:
t -> Unsigned.uint32
val t_view: t typ
| |
a72468630cfeb52e6f76e848f297725520aca1775a110f83956a9407b9ba732c | unison-code/unison | Main.hs | |
Copyright : Copyright ( c ) 2016 , RISE SICS AB
License : BSD3 ( see the LICENSE file )
Maintainer :
Copyright : Copyright (c) 2016, RISE SICS AB
License : BSD3 (see the LICENSE file)
Maintainer :
-}
Main authors :
< >
Contributing authors :
< >
This file is part of Unison , see -code.github.io
Main authors:
Roberto Castaneda Lozano <>
Contributing authors:
Daniel Lundén <>
This file is part of Unison, see -code.github.io
-}
module Main (main) where
import SpecsGen.Driver
main = runSpecsGen id (\_ _ -> return ())
| null | https://raw.githubusercontent.com/unison-code/unison/9f8caf78230f956a57b50a327f8d1dca5839bf64/src/unison-specsgen/exec/specsgen/Main.hs | haskell | |
Copyright : Copyright ( c ) 2016 , RISE SICS AB
License : BSD3 ( see the LICENSE file )
Maintainer :
Copyright : Copyright (c) 2016, RISE SICS AB
License : BSD3 (see the LICENSE file)
Maintainer :
-}
Main authors :
< >
Contributing authors :
< >
This file is part of Unison , see -code.github.io
Main authors:
Roberto Castaneda Lozano <>
Contributing authors:
Daniel Lundén <>
This file is part of Unison, see -code.github.io
-}
module Main (main) where
import SpecsGen.Driver
main = runSpecsGen id (\_ _ -> return ())
| |
ba148fa250da441582d166a8d9d24610e27cd51cfc890643cd91d1d12c12dbfd | S8A/htdp-exercises | ex477.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex477) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
; [List-of X] -> [List-of [List-of X]]
; creates a list of all rearrangements of the items in w
; by prepending each item in w to the arrangements of w
; without said item
termination the function removes one item from w on each
; recursive call, therefore it will reach '() at some point
(define (arrangements w)
(cond
[(empty? w) '(())]
[else
(foldr (lambda (item others)
(local ((define without-item
(arrangements (remove item w)))
(define add-item-to-front
(map (lambda (a) (cons item a))
without-item)))
(append add-item-to-front others)))
'()
w)]))
(define (all-words-from-rat? w)
(and (member (explode "rat") w)
(member (explode "art") w)
(member (explode "tar") w)))
(check-satisfied (arrangements '("r" "a" "t"))
all-words-from-rat?)
; Q: What is a trivially solvable problem?
; A: Producing the arrangements of an empty list.
; Q: How are trivial solutions solved?
; A: An empty list has no arrangements (returns the empty list).
; Q: How does the algorithm generate new problems that are more easily
solvable than the original one ? Is there one new problem that we generate or
; are there several?
A : We have two new problems : finding the arrangements of w without a
; given item and adding the given item to the beginning of each of those
arrangements . This must be done for each item in w.
Q : Is the solution of the given problem the same as the solution of ( one of )
; the new problems? Or, do we need to combine the solutions to create a
; solution for the original problem? And, if so, do we need anything from
; the original problem data?
A : As explained above , we need to solve two problems for each item in w and
; then combine their solutions in a list. Therefore we use foldr to append
the final solutions one after the other . To solve the first problem
; we just have to recursively call arrangements on w with the given item
removed ( using a built - in function ) . To solve the second problem we use
the map function to cons the given item into the result of the first problem .
| null | https://raw.githubusercontent.com/S8A/htdp-exercises/578e49834a9513f29ef81b7589b28081c5e0b69f/ex477.rkt | racket | about the language level of this file in a form that our tools can easily process.
[List-of X] -> [List-of [List-of X]]
creates a list of all rearrangements of the items in w
by prepending each item in w to the arrangements of w
without said item
recursive call, therefore it will reach '() at some point
Q: What is a trivially solvable problem?
A: Producing the arrangements of an empty list.
Q: How are trivial solutions solved?
A: An empty list has no arrangements (returns the empty list).
Q: How does the algorithm generate new problems that are more easily
are there several?
given item and adding the given item to the beginning of each of those
the new problems? Or, do we need to combine the solutions to create a
solution for the original problem? And, if so, do we need anything from
the original problem data?
then combine their solutions in a list. Therefore we use foldr to append
we just have to recursively call arrangements on w with the given item | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex477) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
termination the function removes one item from w on each
(define (arrangements w)
(cond
[(empty? w) '(())]
[else
(foldr (lambda (item others)
(local ((define without-item
(arrangements (remove item w)))
(define add-item-to-front
(map (lambda (a) (cons item a))
without-item)))
(append add-item-to-front others)))
'()
w)]))
(define (all-words-from-rat? w)
(and (member (explode "rat") w)
(member (explode "art") w)
(member (explode "tar") w)))
(check-satisfied (arrangements '("r" "a" "t"))
all-words-from-rat?)
solvable than the original one ? Is there one new problem that we generate or
A : We have two new problems : finding the arrangements of w without a
arrangements . This must be done for each item in w.
Q : Is the solution of the given problem the same as the solution of ( one of )
A : As explained above , we need to solve two problems for each item in w and
the final solutions one after the other . To solve the first problem
removed ( using a built - in function ) . To solve the second problem we use
the map function to cons the given item into the result of the first problem .
|
2a4ecfb13643ba6e18d559d20d74d4b8319a92f4830bc5538584274f1417d8fd | qkrgud55/ocamlmulti | topdirs.ml | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : topdirs.ml 12661 2012 - 07 - 07 11:41:17Z scherer $
Toplevel directives
open Format
open Misc
open Longident
open Path
open Types
open Cmo_format
open Trace
open Toploop
(* The standard output formatter *)
let std_out = std_formatter
(* To quit *)
let dir_quit () = exit 0
let _ = Hashtbl.add directive_table "quit" (Directive_none dir_quit)
(* To add a directory to the load path *)
let dir_directory s =
let d = expand_directory Config.standard_library s in
Config.load_path := d :: !Config.load_path;
Dll.add_path [d]
let _ = Hashtbl.add directive_table "directory" (Directive_string dir_directory)
(* To remove a directory from the load path *)
let dir_remove_directory s =
let d = expand_directory Config.standard_library s in
Config.load_path := List.filter (fun d' -> d' <> d) !Config.load_path;
Dll.remove_path [d]
let _ =
Hashtbl.add directive_table "remove_directory"
(Directive_string dir_remove_directory)
(* To change the current directory *)
let dir_cd s = Sys.chdir s
let _ = Hashtbl.add directive_table "cd" (Directive_string dir_cd)
(* Load in-core a .cmo file *)
exception Load_failed
let check_consistency ppf filename cu =
try
List.iter
(fun (name, crc) -> Consistbl.check Env.crc_units name crc filename)
cu.cu_imports
with Consistbl.Inconsistency(name, user, auth) ->
fprintf ppf "@[<hv 0>The files %s@ and %s@ \
disagree over interface %s@]@."
user auth name;
raise Load_failed
let load_compunit ic filename ppf compunit =
check_consistency ppf filename compunit;
seek_in ic compunit.cu_pos;
let code_size = compunit.cu_codesize + 8 in
let code = Meta.static_alloc code_size in
unsafe_really_input ic code 0 compunit.cu_codesize;
String.unsafe_set code compunit.cu_codesize (Char.chr Opcodes.opRETURN);
String.unsafe_blit "\000\000\000\001\000\000\000" 0
code (compunit.cu_codesize + 1) 7;
let initial_symtable = Symtable.current_state() in
Symtable.patch_object code compunit.cu_reloc;
Symtable.update_global_table();
begin try
may_trace := true;
ignore((Meta.reify_bytecode code code_size) ());
may_trace := false;
with exn ->
may_trace := false;
Symtable.restore_state initial_symtable;
print_exception_outcome ppf exn;
raise Load_failed
end
let rec load_file recursive ppf name =
let filename = try Some (find_in_path !Config.load_path name) with Not_found -> None in
match filename with
| None -> fprintf ppf "Cannot find file %s.@." name; false
| Some filename ->
let ic = open_in_bin filename in
try
let success = really_load_file recursive ppf name filename ic in
close_in ic;
success
with exn ->
close_in ic;
raise exn
and really_load_file recursive ppf name filename ic =
let ic = open_in_bin filename in
let buffer = Misc.input_bytes ic (String.length Config.cmo_magic_number) in
try
if buffer = Config.cmo_magic_number then begin
let compunit_pos = input_binary_int ic in (* Go to descriptor *)
seek_in ic compunit_pos;
let cu : compilation_unit = input_value ic in
if recursive then
List.iter
(function
| (Reloc_getglobal id, _) when not (Symtable.is_global_defined id) ->
let file = Ident.name id ^ ".cmo" in
begin match try Some (Misc.find_in_path_uncap !Config.load_path file) with Not_found -> None with
| None -> ()
| Some file -> if not (load_file recursive ppf file) then raise Load_failed
end
| _ -> ()
)
cu.cu_reloc;
load_compunit ic filename ppf cu;
true
end else
if buffer = Config.cma_magic_number then begin
let toc_pos = input_binary_int ic in (* Go to table of contents *)
seek_in ic toc_pos;
let lib = (input_value ic : library) in
List.iter
(fun dllib ->
let name = Dll.extract_dll_name dllib in
try Dll.open_dlls Dll.For_execution [name]
with Failure reason ->
fprintf ppf
"Cannot load required shared library %s.@.Reason: %s.@."
name reason;
raise Load_failed)
lib.lib_dllibs;
List.iter (load_compunit ic filename ppf) lib.lib_units;
true
end else begin
fprintf ppf "File %s is not a bytecode object file.@." name;
false
end
with Load_failed -> false
let dir_load ppf name = ignore (load_file false ppf name)
let _ = Hashtbl.add directive_table "load" (Directive_string (dir_load std_out))
let dir_load_rec ppf name = ignore (load_file true ppf name)
let _ = Hashtbl.add directive_table "load_rec" (Directive_string (dir_load_rec std_out))
let load_file = load_file false
(* Load commands from a file *)
let dir_use ppf name = ignore(Toploop.use_file ppf name)
let _ = Hashtbl.add directive_table "use" (Directive_string (dir_use std_out))
(* Install, remove a printer *)
type 'a printer_type_new = Format.formatter -> 'a -> unit
type 'a printer_type_old = 'a -> unit
let match_printer_type ppf desc typename =
let (printer_type, _) =
try
Env.lookup_type (Ldot(Lident "Topdirs", typename)) !toplevel_env
with Not_found ->
fprintf ppf "Cannot find type Topdirs.%s.@." typename;
raise Exit in
Ctype.init_def(Ident.current_time());
Ctype.begin_def();
let ty_arg = Ctype.newvar() in
Ctype.unify !toplevel_env
(Ctype.newconstr printer_type [ty_arg])
(Ctype.instance_def desc.val_type);
Ctype.end_def();
Ctype.generalize ty_arg;
ty_arg
let find_printer_type ppf lid =
try
let (path, desc) = Env.lookup_value lid !toplevel_env in
let (ty_arg, is_old_style) =
try
(match_printer_type ppf desc "printer_type_new", false)
with Ctype.Unify _ ->
(match_printer_type ppf desc "printer_type_old", true) in
(ty_arg, path, is_old_style)
with
| Not_found ->
fprintf ppf "Unbound value %a.@." Printtyp.longident lid;
raise Exit
| Ctype.Unify _ ->
fprintf ppf "%a has a wrong type for a printing function.@."
Printtyp.longident lid;
raise Exit
let dir_install_printer ppf lid =
try
let (ty_arg, path, is_old_style) = find_printer_type ppf lid in
let v = eval_path path in
let print_function =
if is_old_style then
(fun formatter repr -> Obj.obj v (Obj.obj repr))
else
(fun formatter repr -> Obj.obj v formatter (Obj.obj repr)) in
install_printer path ty_arg print_function
with Exit -> ()
let dir_remove_printer ppf lid =
try
let (ty_arg, path, is_old_style) = find_printer_type ppf lid in
begin try
remove_printer path
with Not_found ->
fprintf ppf "No printer named %a.@." Printtyp.longident lid
end
with Exit -> ()
let _ = Hashtbl.add directive_table "install_printer"
(Directive_ident (dir_install_printer std_out))
let _ = Hashtbl.add directive_table "remove_printer"
(Directive_ident (dir_remove_printer std_out))
(* The trace *)
external current_environment: unit -> Obj.t = "caml_get_current_environment"
let tracing_function_ptr =
get_code_pointer
(Obj.repr (fun arg -> Trace.print_trace (current_environment()) arg))
let dir_trace ppf lid =
try
let (path, desc) = Env.lookup_value lid !toplevel_env in
(* Check if this is a primitive *)
match desc.val_kind with
| Val_prim p ->
fprintf ppf "%a is an external function and cannot be traced.@."
Printtyp.longident lid
| _ ->
let clos = eval_path path in
(* Nothing to do if it's not a closure *)
if Obj.is_block clos
&& (Obj.tag clos = Obj.closure_tag || Obj.tag clos = Obj.infix_tag)
then begin
match is_traced clos with
| Some opath ->
fprintf ppf "%a is already traced (under the name %a).@."
Printtyp.path path
Printtyp.path opath
| None ->
(* Instrument the old closure *)
traced_functions :=
{ path = path;
closure = clos;
actual_code = get_code_pointer clos;
instrumented_fun =
instrument_closure !toplevel_env lid ppf desc.val_type }
:: !traced_functions;
(* Redirect the code field of the closure to point
to the instrumentation function *)
set_code_pointer clos tracing_function_ptr;
fprintf ppf "%a is now traced.@." Printtyp.longident lid
end else fprintf ppf "%a is not a function.@." Printtyp.longident lid
with
| Not_found -> fprintf ppf "Unbound value %a.@." Printtyp.longident lid
let dir_untrace ppf lid =
try
let (path, desc) = Env.lookup_value lid !toplevel_env in
let rec remove = function
| [] ->
fprintf ppf "%a was not traced.@." Printtyp.longident lid;
[]
| f :: rem ->
if Path.same f.path path then begin
set_code_pointer f.closure f.actual_code;
fprintf ppf "%a is no longer traced.@." Printtyp.longident lid;
rem
end else f :: remove rem in
traced_functions := remove !traced_functions
with
| Not_found -> fprintf ppf "Unbound value %a.@." Printtyp.longident lid
let dir_untrace_all ppf () =
List.iter
(fun f ->
set_code_pointer f.closure f.actual_code;
fprintf ppf "%a is no longer traced.@." Printtyp.path f.path)
!traced_functions;
traced_functions := []
let parse_warnings ppf iserr s =
try Warnings.parse_options iserr s
with Arg.Bad err -> fprintf ppf "%s.@." err
let _ =
Hashtbl.add directive_table "trace" (Directive_ident (dir_trace std_out));
Hashtbl.add directive_table "untrace" (Directive_ident (dir_untrace std_out));
Hashtbl.add directive_table
"untrace_all" (Directive_none (dir_untrace_all std_out));
(* Control the printing of values *)
Hashtbl.add directive_table "print_depth"
(Directive_int(fun n -> max_printer_depth := n));
Hashtbl.add directive_table "print_length"
(Directive_int(fun n -> max_printer_steps := n));
(* Set various compiler flags *)
Hashtbl.add directive_table "labels"
(Directive_bool(fun b -> Clflags.classic := not b));
Hashtbl.add directive_table "principal"
(Directive_bool(fun b -> Clflags.principal := b));
Hashtbl.add directive_table "rectypes"
(Directive_none(fun () -> Clflags.recursive_types := true));
Hashtbl.add directive_table "warnings"
(Directive_string (parse_warnings std_out false));
Hashtbl.add directive_table "warn_error"
(Directive_string (parse_warnings std_out true))
| null | https://raw.githubusercontent.com/qkrgud55/ocamlmulti/74fe84df0ce7be5ee03fb4ac0520fb3e9f4b6d1f/toplevel/topdirs.ml | ocaml | *********************************************************************
OCaml
*********************************************************************
The standard output formatter
To quit
To add a directory to the load path
To remove a directory from the load path
To change the current directory
Load in-core a .cmo file
Go to descriptor
Go to table of contents
Load commands from a file
Install, remove a printer
The trace
Check if this is a primitive
Nothing to do if it's not a closure
Instrument the old closure
Redirect the code field of the closure to point
to the instrumentation function
Control the printing of values
Set various compiler flags | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : topdirs.ml 12661 2012 - 07 - 07 11:41:17Z scherer $
Toplevel directives
open Format
open Misc
open Longident
open Path
open Types
open Cmo_format
open Trace
open Toploop
let std_out = std_formatter
let dir_quit () = exit 0
let _ = Hashtbl.add directive_table "quit" (Directive_none dir_quit)
let dir_directory s =
let d = expand_directory Config.standard_library s in
Config.load_path := d :: !Config.load_path;
Dll.add_path [d]
let _ = Hashtbl.add directive_table "directory" (Directive_string dir_directory)
let dir_remove_directory s =
let d = expand_directory Config.standard_library s in
Config.load_path := List.filter (fun d' -> d' <> d) !Config.load_path;
Dll.remove_path [d]
let _ =
Hashtbl.add directive_table "remove_directory"
(Directive_string dir_remove_directory)
let dir_cd s = Sys.chdir s
let _ = Hashtbl.add directive_table "cd" (Directive_string dir_cd)
exception Load_failed
let check_consistency ppf filename cu =
try
List.iter
(fun (name, crc) -> Consistbl.check Env.crc_units name crc filename)
cu.cu_imports
with Consistbl.Inconsistency(name, user, auth) ->
fprintf ppf "@[<hv 0>The files %s@ and %s@ \
disagree over interface %s@]@."
user auth name;
raise Load_failed
let load_compunit ic filename ppf compunit =
check_consistency ppf filename compunit;
seek_in ic compunit.cu_pos;
let code_size = compunit.cu_codesize + 8 in
let code = Meta.static_alloc code_size in
unsafe_really_input ic code 0 compunit.cu_codesize;
String.unsafe_set code compunit.cu_codesize (Char.chr Opcodes.opRETURN);
String.unsafe_blit "\000\000\000\001\000\000\000" 0
code (compunit.cu_codesize + 1) 7;
let initial_symtable = Symtable.current_state() in
Symtable.patch_object code compunit.cu_reloc;
Symtable.update_global_table();
begin try
may_trace := true;
ignore((Meta.reify_bytecode code code_size) ());
may_trace := false;
with exn ->
may_trace := false;
Symtable.restore_state initial_symtable;
print_exception_outcome ppf exn;
raise Load_failed
end
let rec load_file recursive ppf name =
let filename = try Some (find_in_path !Config.load_path name) with Not_found -> None in
match filename with
| None -> fprintf ppf "Cannot find file %s.@." name; false
| Some filename ->
let ic = open_in_bin filename in
try
let success = really_load_file recursive ppf name filename ic in
close_in ic;
success
with exn ->
close_in ic;
raise exn
and really_load_file recursive ppf name filename ic =
let ic = open_in_bin filename in
let buffer = Misc.input_bytes ic (String.length Config.cmo_magic_number) in
try
if buffer = Config.cmo_magic_number then begin
seek_in ic compunit_pos;
let cu : compilation_unit = input_value ic in
if recursive then
List.iter
(function
| (Reloc_getglobal id, _) when not (Symtable.is_global_defined id) ->
let file = Ident.name id ^ ".cmo" in
begin match try Some (Misc.find_in_path_uncap !Config.load_path file) with Not_found -> None with
| None -> ()
| Some file -> if not (load_file recursive ppf file) then raise Load_failed
end
| _ -> ()
)
cu.cu_reloc;
load_compunit ic filename ppf cu;
true
end else
if buffer = Config.cma_magic_number then begin
seek_in ic toc_pos;
let lib = (input_value ic : library) in
List.iter
(fun dllib ->
let name = Dll.extract_dll_name dllib in
try Dll.open_dlls Dll.For_execution [name]
with Failure reason ->
fprintf ppf
"Cannot load required shared library %s.@.Reason: %s.@."
name reason;
raise Load_failed)
lib.lib_dllibs;
List.iter (load_compunit ic filename ppf) lib.lib_units;
true
end else begin
fprintf ppf "File %s is not a bytecode object file.@." name;
false
end
with Load_failed -> false
let dir_load ppf name = ignore (load_file false ppf name)
let _ = Hashtbl.add directive_table "load" (Directive_string (dir_load std_out))
let dir_load_rec ppf name = ignore (load_file true ppf name)
let _ = Hashtbl.add directive_table "load_rec" (Directive_string (dir_load_rec std_out))
let load_file = load_file false
let dir_use ppf name = ignore(Toploop.use_file ppf name)
let _ = Hashtbl.add directive_table "use" (Directive_string (dir_use std_out))
type 'a printer_type_new = Format.formatter -> 'a -> unit
type 'a printer_type_old = 'a -> unit
let match_printer_type ppf desc typename =
let (printer_type, _) =
try
Env.lookup_type (Ldot(Lident "Topdirs", typename)) !toplevel_env
with Not_found ->
fprintf ppf "Cannot find type Topdirs.%s.@." typename;
raise Exit in
Ctype.init_def(Ident.current_time());
Ctype.begin_def();
let ty_arg = Ctype.newvar() in
Ctype.unify !toplevel_env
(Ctype.newconstr printer_type [ty_arg])
(Ctype.instance_def desc.val_type);
Ctype.end_def();
Ctype.generalize ty_arg;
ty_arg
let find_printer_type ppf lid =
try
let (path, desc) = Env.lookup_value lid !toplevel_env in
let (ty_arg, is_old_style) =
try
(match_printer_type ppf desc "printer_type_new", false)
with Ctype.Unify _ ->
(match_printer_type ppf desc "printer_type_old", true) in
(ty_arg, path, is_old_style)
with
| Not_found ->
fprintf ppf "Unbound value %a.@." Printtyp.longident lid;
raise Exit
| Ctype.Unify _ ->
fprintf ppf "%a has a wrong type for a printing function.@."
Printtyp.longident lid;
raise Exit
let dir_install_printer ppf lid =
try
let (ty_arg, path, is_old_style) = find_printer_type ppf lid in
let v = eval_path path in
let print_function =
if is_old_style then
(fun formatter repr -> Obj.obj v (Obj.obj repr))
else
(fun formatter repr -> Obj.obj v formatter (Obj.obj repr)) in
install_printer path ty_arg print_function
with Exit -> ()
let dir_remove_printer ppf lid =
try
let (ty_arg, path, is_old_style) = find_printer_type ppf lid in
begin try
remove_printer path
with Not_found ->
fprintf ppf "No printer named %a.@." Printtyp.longident lid
end
with Exit -> ()
let _ = Hashtbl.add directive_table "install_printer"
(Directive_ident (dir_install_printer std_out))
let _ = Hashtbl.add directive_table "remove_printer"
(Directive_ident (dir_remove_printer std_out))
external current_environment: unit -> Obj.t = "caml_get_current_environment"
let tracing_function_ptr =
get_code_pointer
(Obj.repr (fun arg -> Trace.print_trace (current_environment()) arg))
let dir_trace ppf lid =
try
let (path, desc) = Env.lookup_value lid !toplevel_env in
match desc.val_kind with
| Val_prim p ->
fprintf ppf "%a is an external function and cannot be traced.@."
Printtyp.longident lid
| _ ->
let clos = eval_path path in
if Obj.is_block clos
&& (Obj.tag clos = Obj.closure_tag || Obj.tag clos = Obj.infix_tag)
then begin
match is_traced clos with
| Some opath ->
fprintf ppf "%a is already traced (under the name %a).@."
Printtyp.path path
Printtyp.path opath
| None ->
traced_functions :=
{ path = path;
closure = clos;
actual_code = get_code_pointer clos;
instrumented_fun =
instrument_closure !toplevel_env lid ppf desc.val_type }
:: !traced_functions;
set_code_pointer clos tracing_function_ptr;
fprintf ppf "%a is now traced.@." Printtyp.longident lid
end else fprintf ppf "%a is not a function.@." Printtyp.longident lid
with
| Not_found -> fprintf ppf "Unbound value %a.@." Printtyp.longident lid
let dir_untrace ppf lid =
try
let (path, desc) = Env.lookup_value lid !toplevel_env in
let rec remove = function
| [] ->
fprintf ppf "%a was not traced.@." Printtyp.longident lid;
[]
| f :: rem ->
if Path.same f.path path then begin
set_code_pointer f.closure f.actual_code;
fprintf ppf "%a is no longer traced.@." Printtyp.longident lid;
rem
end else f :: remove rem in
traced_functions := remove !traced_functions
with
| Not_found -> fprintf ppf "Unbound value %a.@." Printtyp.longident lid
let dir_untrace_all ppf () =
List.iter
(fun f ->
set_code_pointer f.closure f.actual_code;
fprintf ppf "%a is no longer traced.@." Printtyp.path f.path)
!traced_functions;
traced_functions := []
let parse_warnings ppf iserr s =
try Warnings.parse_options iserr s
with Arg.Bad err -> fprintf ppf "%s.@." err
let _ =
Hashtbl.add directive_table "trace" (Directive_ident (dir_trace std_out));
Hashtbl.add directive_table "untrace" (Directive_ident (dir_untrace std_out));
Hashtbl.add directive_table
"untrace_all" (Directive_none (dir_untrace_all std_out));
Hashtbl.add directive_table "print_depth"
(Directive_int(fun n -> max_printer_depth := n));
Hashtbl.add directive_table "print_length"
(Directive_int(fun n -> max_printer_steps := n));
Hashtbl.add directive_table "labels"
(Directive_bool(fun b -> Clflags.classic := not b));
Hashtbl.add directive_table "principal"
(Directive_bool(fun b -> Clflags.principal := b));
Hashtbl.add directive_table "rectypes"
(Directive_none(fun () -> Clflags.recursive_types := true));
Hashtbl.add directive_table "warnings"
(Directive_string (parse_warnings std_out false));
Hashtbl.add directive_table "warn_error"
(Directive_string (parse_warnings std_out true))
|
438249971dc4f10057bcdf07b1fd68af477778db76e920bf9e8e754343092220 | senapk/funcional_arcade | solver.hs | myrepeat x = x:myrepeat x | null | https://raw.githubusercontent.com/senapk/funcional_arcade/89625b4559b3e590d88dd70a27b6cbdc07cc2988/base/072/solver.hs | haskell | myrepeat x = x:myrepeat x | |
9aa5eecc8b73109d25f11bd5617c86db6cbcf50426fb96e941483f768c2e1173 | diagrams/diagrams-cairo | Cairo.hs | # LANGUAGE CPP #
# LANGUAGE TypeFamilies #
-----------------------------------------------------------------------------
-- |
Module : Diagrams . Backend . Cairo
Copyright : ( c ) 2011 Diagrams - cairo team ( see LICENSE )
-- License : BSD-style (see LICENSE)
-- Maintainer :
--
-- A full-featured rendering backend for diagrams using the
cairo rendering engine .
--
To invoke the cairo backend , you have three options .
--
* You can use the " Diagrams . Backend . Cairo . CmdLine " module to create
-- standalone executables which output images when invoked.
--
-- * You can use the 'renderCairo' function provided by this module,
-- which gives you more flexible programmatic control over when and
-- how images are output (making it easy to, for example, write a
-- single program that outputs multiple images, or one that outputs
-- images dynamically based on user input, and so on).
--
-- * Finally, for the most flexibility, you can directly
-- use methods from the
' Diagrams . Core . Types . Backend ' instance for @Cairo@. In particular ,
-- 'Diagrams.Core.Types.renderDia' has the generic type
--
-- > renderDia :: b -> Options b v n -> QDiagram b v n m -> Result b v n
--
( omitting a few type class constraints ) . represents the
backend type , @v@ the vector space , @n@ the numeric field , and @m@
-- the type of monoidal query annotations on the diagram. 'Options'
-- and 'Result' are associated data and type families, respectively,
-- which yield the type of option records and rendering results
-- specific to any particular backend. For @b ~ Cairo@, @v ~ V2@, and
-- @n ~ Double@, we have
--
-- > data family Options Cairo V2 Double = CairoOptions
-- > { _cairoFileName :: String -- ^ The name of the file you want generated
> , _ cairoSizeSpec : : SizeSpec V2 Double -- ^ The requested size of the output
> , _ cairoOutputType : : OutputType -- ^ the output format and associated options
> , _ cairoBypassAdjust : : -- ^ Should the ' adjustDia ' step be bypassed during rendering ?
-- > }
--
-- @
type family Result Cairo V2 Double = ( IO ( ) , ' Graphics . Rendering . Cairo . Render ' ( ) )
-- @
--
-- So the type of 'renderDia' resolves to
--
-- @
renderDia : : Cairo - > Options Cairo V2 Double - > QQDiagram Cairo V2 Double Any m - > ( IO ( ) , ' Graphics . Rendering . Cairo . Render ' ( ) )
-- @
--
-- which you could call like so:
--
-- @
renderDia Cairo ( CairoOptions \"foo.png\ " ( Width 250 ) PNG False ) ( myDiagram : : QDiagram Cairo V2 Double Any )
-- @
--
This would return a pair ; the first element is an @IO ( ) @ action
which will write out @foo.png@ to disk , and the second is a cairo
-- rendering action which can be used, for example, to directly draw
to a Gtk window . Note the type annotation on @myDiagram@ which may
-- be necessary to fix the type variable @m@; this example uses the
type synonym @Diagram b = QDiagram b ( V b ) ( N b ) Any@ to fix @m = Any@
and fix @v@ and @n@ to backend specific types .
--
-----------------------------------------------------------------------------
module Diagrams.Backend.Cairo
( -- * Rendering
renderCairo
* Cairo - supported output formats
, OutputType(..)
* Cairo - specific options
-- $CairoOptions
The below CPP hack is needed because GHC 7.0.x has a bug regarding
-- (re?)export of data family constructors; in particular the below
-- export causes the error "Not in scope: type constructor or class
-- `Options'" even though
-- #Import_and_export
seems to indicate it should be supported . When using 7.0.x one
must import Diagrams . Backend . Cairo . Internal in order to bring
CairoOptions into scope .
GHC 7.4.0 regression ?
#if __GLASGOW_HASKELL__ >= 702 && __GLASGOW_HASKELL__ < 704
, Options(..)
#endif
-- * Backend token
, Cairo(..)
, B
) where
import System.FilePath (takeExtension)
import Diagrams.Backend.Cairo.Internal
import Diagrams.Prelude
-- $CairoOptions
--
Unfortunately , does not yet support documentation for
-- associated data families, so we must just provide it manually.
-- This module defines
--
-- > data family Options Cairo V2 Double = CairoOptions
-- > { _cairoFileName :: String -- ^ The name of the file you want generated
> , _ cairoSizeSpec : : SizeSpec V2 Double -- ^ The requested size of the output
> , _ cairoOutputType : : OutputType -- ^ the output format and associated options
-- > , _cairoBypassAdjust :: Bool -- ^ Should the 'adjustDia' step be bypassed during rendering?
-- > }
--
See the documentation at the top of " Diagrams . Backend . Cairo " for
-- information on how to make use of this.
| Render a diagram using the cairo backend , writing to the given
-- output file and using the requested size. The output type (PNG,
PS , PDF , or SVG ) is determined automatically from the output file
-- extension.
--
-- This function is provided as a convenience; if you need more
-- flexibility than it provides, you can call 'renderDia' directly,
-- as described above.
renderCairo :: FilePath -> SizeSpec V2 Double -> QDiagram Cairo V2 Double Any -> IO ()
renderCairo outFile sizeSpec d
= fst (renderDia Cairo (CairoOptions outFile sizeSpec outTy False) d)
where
outTy =
case takeExtension outFile of
".png" -> PNG
".ps" -> PS
".pdf" -> PDF
".svg" -> SVG
_ -> PNG
| null | https://raw.githubusercontent.com/diagrams/diagrams-cairo/533e4f4f18f961543bb1d78493c750dec45fd4a3/src/Diagrams/Backend/Cairo.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-style (see LICENSE)
Maintainer :
A full-featured rendering backend for diagrams using the
standalone executables which output images when invoked.
* You can use the 'renderCairo' function provided by this module,
which gives you more flexible programmatic control over when and
how images are output (making it easy to, for example, write a
single program that outputs multiple images, or one that outputs
images dynamically based on user input, and so on).
* Finally, for the most flexibility, you can directly
use methods from the
'Diagrams.Core.Types.renderDia' has the generic type
> renderDia :: b -> Options b v n -> QDiagram b v n m -> Result b v n
the type of monoidal query annotations on the diagram. 'Options'
and 'Result' are associated data and type families, respectively,
which yield the type of option records and rendering results
specific to any particular backend. For @b ~ Cairo@, @v ~ V2@, and
@n ~ Double@, we have
> data family Options Cairo V2 Double = CairoOptions
> { _cairoFileName :: String -- ^ The name of the file you want generated
^ The requested size of the output
^ the output format and associated options
^ Should the ' adjustDia ' step be bypassed during rendering ?
> }
@
@
So the type of 'renderDia' resolves to
@
@
which you could call like so:
@
@
rendering action which can be used, for example, to directly draw
be necessary to fix the type variable @m@; this example uses the
---------------------------------------------------------------------------
* Rendering
$CairoOptions
(re?)export of data family constructors; in particular the below
export causes the error "Not in scope: type constructor or class
`Options'" even though
#Import_and_export
* Backend token
$CairoOptions
associated data families, so we must just provide it manually.
This module defines
> data family Options Cairo V2 Double = CairoOptions
> { _cairoFileName :: String -- ^ The name of the file you want generated
^ The requested size of the output
^ the output format and associated options
> , _cairoBypassAdjust :: Bool -- ^ Should the 'adjustDia' step be bypassed during rendering?
> }
information on how to make use of this.
output file and using the requested size. The output type (PNG,
extension.
This function is provided as a convenience; if you need more
flexibility than it provides, you can call 'renderDia' directly,
as described above. | # LANGUAGE CPP #
# LANGUAGE TypeFamilies #
Module : Diagrams . Backend . Cairo
Copyright : ( c ) 2011 Diagrams - cairo team ( see LICENSE )
cairo rendering engine .
To invoke the cairo backend , you have three options .
* You can use the " Diagrams . Backend . Cairo . CmdLine " module to create
' Diagrams . Core . Types . Backend ' instance for @Cairo@. In particular ,
( omitting a few type class constraints ) . represents the
backend type , @v@ the vector space , @n@ the numeric field , and @m@
type family Result Cairo V2 Double = ( IO ( ) , ' Graphics . Rendering . Cairo . Render ' ( ) )
renderDia : : Cairo - > Options Cairo V2 Double - > QQDiagram Cairo V2 Double Any m - > ( IO ( ) , ' Graphics . Rendering . Cairo . Render ' ( ) )
renderDia Cairo ( CairoOptions \"foo.png\ " ( Width 250 ) PNG False ) ( myDiagram : : QDiagram Cairo V2 Double Any )
This would return a pair ; the first element is an @IO ( ) @ action
which will write out @foo.png@ to disk , and the second is a cairo
to a Gtk window . Note the type annotation on @myDiagram@ which may
type synonym @Diagram b = QDiagram b ( V b ) ( N b ) Any@ to fix @m = Any@
and fix @v@ and @n@ to backend specific types .
module Diagrams.Backend.Cairo
renderCairo
* Cairo - supported output formats
, OutputType(..)
* Cairo - specific options
The below CPP hack is needed because GHC 7.0.x has a bug regarding
seems to indicate it should be supported . When using 7.0.x one
must import Diagrams . Backend . Cairo . Internal in order to bring
CairoOptions into scope .
GHC 7.4.0 regression ?
#if __GLASGOW_HASKELL__ >= 702 && __GLASGOW_HASKELL__ < 704
, Options(..)
#endif
, Cairo(..)
, B
) where
import System.FilePath (takeExtension)
import Diagrams.Backend.Cairo.Internal
import Diagrams.Prelude
Unfortunately , does not yet support documentation for
See the documentation at the top of " Diagrams . Backend . Cairo " for
| Render a diagram using the cairo backend , writing to the given
PS , PDF , or SVG ) is determined automatically from the output file
renderCairo :: FilePath -> SizeSpec V2 Double -> QDiagram Cairo V2 Double Any -> IO ()
renderCairo outFile sizeSpec d
= fst (renderDia Cairo (CairoOptions outFile sizeSpec outTy False) d)
where
outTy =
case takeExtension outFile of
".png" -> PNG
".ps" -> PS
".pdf" -> PDF
".svg" -> SVG
_ -> PNG
|
a285b84df76a1fd535905b5ff86a428b04715fb2154919f802130d0d3e622914 | ocaml-multicore/parafuzz | test.ml | TEST
files = " a.ml b.ml c.ml main.ml main_ok.ml "
* setup - ocamlc.byte - build - env
* * script
script = " mkdir -p subdir "
* * * script
script = " cp $ { test_source_directory}/subdir / m.ml subdir "
* * * * ocamlc.byte
module = " subdir / m.ml "
* * * * * ocamlc.byte
flags = " -I subdir "
module = " a.ml "
* * * * * * ocamlc.byte
module = " b.ml "
* * * * * * * ocamlc.byte
module = " c.ml "
* * * * * * * * ocamlc.byte
flags = " "
module = " main_ok.ml "
* * * * * * * * * ocamlc.byte
module = " main.ml "
ocamlc_byte_exit_status = " 2 "
* * * * * * * * * * check - ocamlc.byte - output
files = "a.ml b.ml c.ml main.ml main_ok.ml"
* setup-ocamlc.byte-build-env
** script
script = "mkdir -p subdir"
*** script
script = "cp ${test_source_directory}/subdir/m.ml subdir"
**** ocamlc.byte
module = "subdir/m.ml"
***** ocamlc.byte
flags = "-I subdir"
module = "a.ml"
****** ocamlc.byte
module = "b.ml"
******* ocamlc.byte
module = "c.ml"
******** ocamlc.byte
flags = ""
module = "main_ok.ml"
********* ocamlc.byte
module = "main.ml"
ocamlc_byte_exit_status = "2"
********** check-ocamlc.byte-output
*)
| null | https://raw.githubusercontent.com/ocaml-multicore/parafuzz/6a92906f1ba03287ffcb433063bded831a644fd5/testsuite/tests/typing-missing-cmi/test.ml | ocaml | TEST
files = " a.ml b.ml c.ml main.ml main_ok.ml "
* setup - ocamlc.byte - build - env
* * script
script = " mkdir -p subdir "
* * * script
script = " cp $ { test_source_directory}/subdir / m.ml subdir "
* * * * ocamlc.byte
module = " subdir / m.ml "
* * * * * ocamlc.byte
flags = " -I subdir "
module = " a.ml "
* * * * * * ocamlc.byte
module = " b.ml "
* * * * * * * ocamlc.byte
module = " c.ml "
* * * * * * * * ocamlc.byte
flags = " "
module = " main_ok.ml "
* * * * * * * * * ocamlc.byte
module = " main.ml "
ocamlc_byte_exit_status = " 2 "
* * * * * * * * * * check - ocamlc.byte - output
files = "a.ml b.ml c.ml main.ml main_ok.ml"
* setup-ocamlc.byte-build-env
** script
script = "mkdir -p subdir"
*** script
script = "cp ${test_source_directory}/subdir/m.ml subdir"
**** ocamlc.byte
module = "subdir/m.ml"
***** ocamlc.byte
flags = "-I subdir"
module = "a.ml"
****** ocamlc.byte
module = "b.ml"
******* ocamlc.byte
module = "c.ml"
******** ocamlc.byte
flags = ""
module = "main_ok.ml"
********* ocamlc.byte
module = "main.ml"
ocamlc_byte_exit_status = "2"
********** check-ocamlc.byte-output
*)
| |
b464e184aec7b4bcf2853f007fecf2a6b4e5c323bc01120052661a8c54878087 | hyperledger-labs/fabric-chaincode-haskell | Policies.hs | # LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
# LANGUAGE TypeApplications #
{-# LANGUAGE OverloadedStrings #-}
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - matches #
-- | Generated by Haskell protocol buffer compiler. DO NOT EDIT!
module Common.Policies where
import qualified Prelude as Hs
import qualified Proto3.Suite.Class as HsProtobuf
import qualified Proto3.Suite.DotProto as HsProtobuf
import qualified Proto3.Suite.JSONPB as HsJSONPB
import Proto3.Suite.JSONPB ((.=), (.:))
import qualified Proto3.Suite.Types as HsProtobuf
import qualified Proto3.Wire as HsProtobuf
import qualified Control.Applicative as Hs
import Control.Applicative ((<*>), (<|>), (<$>))
import qualified Control.DeepSeq as Hs
import qualified Control.Monad as Hs
import qualified Data.ByteString as Hs
import qualified Data.Coerce as Hs
import qualified Data.Int as Hs (Int16, Int32, Int64)
import qualified Data.List.NonEmpty as Hs (NonEmpty(..))
import qualified Data.Map as Hs (Map, mapKeysMonotonic)
import qualified Data.Proxy as Proxy
import qualified Data.String as Hs (fromString)
import qualified Data.Text.Lazy as Hs (Text)
import qualified Data.Vector as Hs (Vector)
import qualified Data.Word as Hs (Word16, Word32, Word64)
import qualified GHC.Enum as Hs
import qualified GHC.Generics as Hs
import qualified Unsafe.Coerce as Hs
import qualified Msp.MspPrincipal
data Policy = Policy{policyType :: Hs.Int32,
policyValue :: Hs.ByteString}
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named Policy where
nameOf _ = (Hs.fromString "Policy")
instance HsProtobuf.HasDefault Policy
instance HsProtobuf.Message Policy where
encodeMessage _
Policy{policyType = policyType, policyValue = policyValue}
= (Hs.mconcat
[(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 1)
policyType),
(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 2)
policyValue)])
decodeMessage _
= (Hs.pure Policy) <*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 1))
<*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 2))
dotProto _
= [(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 1)
(HsProtobuf.Prim HsProtobuf.Int32)
(HsProtobuf.Single "type")
[]
""),
(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 2)
(HsProtobuf.Prim HsProtobuf.Bytes)
(HsProtobuf.Single "value")
[]
"")]
instance HsJSONPB.ToJSONPB Policy where
toJSONPB (Policy f1 f2)
= (HsJSONPB.object ["type" .= f1, "value" .= f2])
toEncodingPB (Policy f1 f2)
= (HsJSONPB.pairs ["type" .= f1, "value" .= f2])
instance HsJSONPB.FromJSONPB Policy where
parseJSONPB
= (HsJSONPB.withObject "Policy"
(\ obj -> (Hs.pure Policy) <*> obj .: "type" <*> obj .: "value"))
instance HsJSONPB.ToJSON Policy where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON Policy where
parseJSON = HsJSONPB.parseJSONPB
instance HsJSONPB.ToSchema Policy where
declareNamedSchema _
= do let declare_type = HsJSONPB.declareSchemaRef
policyType <- declare_type Proxy.Proxy
let declare_value = HsJSONPB.declareSchemaRef
policyValue <- declare_value Proxy.Proxy
let _ = Hs.pure Policy <*> HsJSONPB.asProxy declare_type <*>
HsJSONPB.asProxy declare_value
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName = Hs.Just "Policy",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("type", policyType),
("value", policyValue)]}})
data Policy_PolicyType = Policy_PolicyTypeUNKNOWN
| Policy_PolicyTypeSIGNATURE
| Policy_PolicyTypeMSP
| Policy_PolicyTypeIMPLICIT_META
deriving (Hs.Show, Hs.Eq, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named Policy_PolicyType where
nameOf _ = (Hs.fromString "Policy_PolicyType")
instance HsProtobuf.HasDefault Policy_PolicyType
instance Hs.Bounded Policy_PolicyType where
minBound = Policy_PolicyTypeUNKNOWN
maxBound = Policy_PolicyTypeIMPLICIT_META
instance Hs.Ord Policy_PolicyType where
compare x y
= Hs.compare (HsProtobuf.fromProtoEnum x)
(HsProtobuf.fromProtoEnum y)
instance HsProtobuf.ProtoEnum Policy_PolicyType where
toProtoEnumMay 0 = Hs.Just Policy_PolicyTypeUNKNOWN
toProtoEnumMay 1 = Hs.Just Policy_PolicyTypeSIGNATURE
toProtoEnumMay 2 = Hs.Just Policy_PolicyTypeMSP
toProtoEnumMay 3 = Hs.Just Policy_PolicyTypeIMPLICIT_META
toProtoEnumMay _ = Hs.Nothing
fromProtoEnum (Policy_PolicyTypeUNKNOWN) = 0
fromProtoEnum (Policy_PolicyTypeSIGNATURE) = 1
fromProtoEnum (Policy_PolicyTypeMSP) = 2
fromProtoEnum (Policy_PolicyTypeIMPLICIT_META) = 3
instance HsJSONPB.ToJSONPB Policy_PolicyType where
toJSONPB x _ = HsJSONPB.enumFieldString x
toEncodingPB x _ = HsJSONPB.enumFieldEncoding x
instance HsJSONPB.FromJSONPB Policy_PolicyType where
parseJSONPB (HsJSONPB.String "UNKNOWN")
= Hs.pure Policy_PolicyTypeUNKNOWN
parseJSONPB (HsJSONPB.String "SIGNATURE")
= Hs.pure Policy_PolicyTypeSIGNATURE
parseJSONPB (HsJSONPB.String "MSP") = Hs.pure Policy_PolicyTypeMSP
parseJSONPB (HsJSONPB.String "IMPLICIT_META")
= Hs.pure Policy_PolicyTypeIMPLICIT_META
parseJSONPB v = (HsJSONPB.typeMismatch "Policy_PolicyType" v)
instance HsJSONPB.ToJSON Policy_PolicyType where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON Policy_PolicyType where
parseJSON = HsJSONPB.parseJSONPB
instance HsProtobuf.Finite Policy_PolicyType
data SignaturePolicyEnvelope = SignaturePolicyEnvelope{signaturePolicyEnvelopeVersion
:: Hs.Int32,
signaturePolicyEnvelopeRule ::
Hs.Maybe Common.Policies.SignaturePolicy,
signaturePolicyEnvelopeIdentities ::
Hs.Vector Msp.MspPrincipal.MSPPrincipal}
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named SignaturePolicyEnvelope where
nameOf _ = (Hs.fromString "SignaturePolicyEnvelope")
instance HsProtobuf.HasDefault SignaturePolicyEnvelope
instance HsProtobuf.Message SignaturePolicyEnvelope where
encodeMessage _
SignaturePolicyEnvelope{signaturePolicyEnvelopeVersion =
signaturePolicyEnvelopeVersion,
signaturePolicyEnvelopeRule = signaturePolicyEnvelopeRule,
signaturePolicyEnvelopeIdentities =
signaturePolicyEnvelopeIdentities}
= (Hs.mconcat
[(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 1)
signaturePolicyEnvelopeVersion),
(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 2)
(Hs.coerce @(Hs.Maybe Common.Policies.SignaturePolicy)
@(HsProtobuf.Nested Common.Policies.SignaturePolicy)
signaturePolicyEnvelopeRule)),
(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 3)
(Hs.coerce @(Hs.Vector Msp.MspPrincipal.MSPPrincipal)
@(HsProtobuf.NestedVec Msp.MspPrincipal.MSPPrincipal)
signaturePolicyEnvelopeIdentities))])
decodeMessage _
= (Hs.pure SignaturePolicyEnvelope) <*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 1))
<*>
(Hs.coerce @(_ (HsProtobuf.Nested Common.Policies.SignaturePolicy))
@(_ (Hs.Maybe Common.Policies.SignaturePolicy))
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 2)))
<*>
(Hs.coerce
@(_ (HsProtobuf.NestedVec Msp.MspPrincipal.MSPPrincipal))
@(_ (Hs.Vector Msp.MspPrincipal.MSPPrincipal))
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 3)))
dotProto _
= [(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 1)
(HsProtobuf.Prim HsProtobuf.Int32)
(HsProtobuf.Single "version")
[]
""),
(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 2)
(HsProtobuf.Prim
(HsProtobuf.Named (HsProtobuf.Single "SignaturePolicy")))
(HsProtobuf.Single "rule")
[]
""),
(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 3)
(HsProtobuf.Repeated
(HsProtobuf.Named (HsProtobuf.Single "MSPPrincipal")))
(HsProtobuf.Single "identities")
[]
"")]
instance HsJSONPB.ToJSONPB SignaturePolicyEnvelope where
toJSONPB (SignaturePolicyEnvelope f1 f2 f3)
= (HsJSONPB.object
["version" .= f1, "rule" .= f2, "identities" .= f3])
toEncodingPB (SignaturePolicyEnvelope f1 f2 f3)
= (HsJSONPB.pairs
["version" .= f1, "rule" .= f2, "identities" .= f3])
instance HsJSONPB.FromJSONPB SignaturePolicyEnvelope where
parseJSONPB
= (HsJSONPB.withObject "SignaturePolicyEnvelope"
(\ obj ->
(Hs.pure SignaturePolicyEnvelope) <*> obj .: "version" <*>
obj .: "rule"
<*> obj .: "identities"))
instance HsJSONPB.ToJSON SignaturePolicyEnvelope where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON SignaturePolicyEnvelope where
parseJSON = HsJSONPB.parseJSONPB
instance HsJSONPB.ToSchema SignaturePolicyEnvelope where
declareNamedSchema _
= do let declare_version = HsJSONPB.declareSchemaRef
signaturePolicyEnvelopeVersion <- declare_version Proxy.Proxy
let declare_rule = HsJSONPB.declareSchemaRef
signaturePolicyEnvelopeRule <- declare_rule Proxy.Proxy
let declare_identities = HsJSONPB.declareSchemaRef
signaturePolicyEnvelopeIdentities <- declare_identities Proxy.Proxy
let _ = Hs.pure SignaturePolicyEnvelope <*>
HsJSONPB.asProxy declare_version
<*> HsJSONPB.asProxy declare_rule
<*> HsJSONPB.asProxy declare_identities
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName =
Hs.Just "SignaturePolicyEnvelope",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("version", signaturePolicyEnvelopeVersion),
("rule", signaturePolicyEnvelopeRule),
("identities",
signaturePolicyEnvelopeIdentities)]}})
newtype SignaturePolicy = SignaturePolicy{signaturePolicyType ::
Hs.Maybe SignaturePolicyType}
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named SignaturePolicy where
nameOf _ = (Hs.fromString "SignaturePolicy")
instance HsProtobuf.HasDefault SignaturePolicy
instance HsProtobuf.Message SignaturePolicy where
encodeMessage _
SignaturePolicy{signaturePolicyType = signaturePolicyType}
= (Hs.mconcat
[case signaturePolicyType of
Hs.Nothing -> Hs.mempty
Hs.Just x
-> case x of
SignaturePolicyTypeSignedBy y
-> (HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 1)
(HsProtobuf.ForceEmit y))
SignaturePolicyTypeNOutOf y
-> (HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 2)
(Hs.coerce @(Hs.Maybe Common.Policies.SignaturePolicy_NOutOf)
@(HsProtobuf.Nested Common.Policies.SignaturePolicy_NOutOf)
(Hs.Just y)))])
decodeMessage _
= (Hs.pure SignaturePolicy) <*>
(HsProtobuf.oneof Hs.Nothing
[((HsProtobuf.FieldNumber 1),
(Hs.pure (Hs.Just Hs.. SignaturePolicyTypeSignedBy)) <*>
HsProtobuf.decodeMessageField),
((HsProtobuf.FieldNumber 2),
(Hs.pure (Hs.fmap SignaturePolicyTypeNOutOf)) <*>
(Hs.coerce
@(_ (HsProtobuf.Nested Common.Policies.SignaturePolicy_NOutOf))
@(_ (Hs.Maybe Common.Policies.SignaturePolicy_NOutOf))
HsProtobuf.decodeMessageField))])
dotProto _ = []
instance HsJSONPB.ToJSONPB SignaturePolicy where
toJSONPB (SignaturePolicy f1_or_f2)
= (HsJSONPB.object
[(let encodeType
= (case f1_or_f2 of
Hs.Just (SignaturePolicyTypeSignedBy f1)
-> (HsJSONPB.pair "signed_by" f1)
Hs.Just (SignaturePolicyTypeNOutOf f2)
-> (HsJSONPB.pair "n_out_of" f2)
Hs.Nothing -> Hs.mempty)
in
\ options ->
if HsJSONPB.optEmitNamedOneof options then
("Type" .= (HsJSONPB.objectOrNull [encodeType] options)) options
else encodeType options)])
toEncodingPB (SignaturePolicy f1_or_f2)
= (HsJSONPB.pairs
[(let encodeType
= (case f1_or_f2 of
Hs.Just (SignaturePolicyTypeSignedBy f1)
-> (HsJSONPB.pair "signed_by" f1)
Hs.Just (SignaturePolicyTypeNOutOf f2)
-> (HsJSONPB.pair "n_out_of" f2)
Hs.Nothing -> Hs.mempty)
in
\ options ->
if HsJSONPB.optEmitNamedOneof options then
("Type" .= (HsJSONPB.pairsOrNull [encodeType] options)) options
else encodeType options)])
instance HsJSONPB.FromJSONPB SignaturePolicy where
parseJSONPB
= (HsJSONPB.withObject "SignaturePolicy"
(\ obj ->
(Hs.pure SignaturePolicy) <*>
(let parseType parseObj
= Hs.msum
[Hs.Just Hs.. SignaturePolicyTypeSignedBy <$>
(HsJSONPB.parseField parseObj "signed_by"),
Hs.Just Hs.. SignaturePolicyTypeNOutOf <$>
(HsJSONPB.parseField parseObj "n_out_of"),
Hs.pure Hs.Nothing]
in
((obj .: "Type") Hs.>>= (HsJSONPB.withObject "Type" parseType)) <|>
(parseType obj))))
instance HsJSONPB.ToJSON SignaturePolicy where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON SignaturePolicy where
parseJSON = HsJSONPB.parseJSONPB
instance HsJSONPB.ToSchema SignaturePolicy where
declareNamedSchema _
= do let declare_Type = HsJSONPB.declareSchemaRef
signaturePolicyType <- declare_Type Proxy.Proxy
let _ = Hs.pure SignaturePolicy <*> HsJSONPB.asProxy declare_Type
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName =
Hs.Just "SignaturePolicy",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("Type", signaturePolicyType)]}})
data SignaturePolicy_NOutOf = SignaturePolicy_NOutOf{signaturePolicy_NOutOfN
:: Hs.Int32,
signaturePolicy_NOutOfRules ::
Hs.Vector Common.Policies.SignaturePolicy}
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named SignaturePolicy_NOutOf where
nameOf _ = (Hs.fromString "SignaturePolicy_NOutOf")
instance HsProtobuf.HasDefault SignaturePolicy_NOutOf
instance HsProtobuf.Message SignaturePolicy_NOutOf where
encodeMessage _
SignaturePolicy_NOutOf{signaturePolicy_NOutOfN =
signaturePolicy_NOutOfN,
signaturePolicy_NOutOfRules = signaturePolicy_NOutOfRules}
= (Hs.mconcat
[(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 1)
signaturePolicy_NOutOfN),
(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 2)
(Hs.coerce @(Hs.Vector Common.Policies.SignaturePolicy)
@(HsProtobuf.NestedVec Common.Policies.SignaturePolicy)
signaturePolicy_NOutOfRules))])
decodeMessage _
= (Hs.pure SignaturePolicy_NOutOf) <*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 1))
<*>
(Hs.coerce
@(_ (HsProtobuf.NestedVec Common.Policies.SignaturePolicy))
@(_ (Hs.Vector Common.Policies.SignaturePolicy))
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 2)))
dotProto _
= [(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 1)
(HsProtobuf.Prim HsProtobuf.Int32)
(HsProtobuf.Single "n")
[]
""),
(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 2)
(HsProtobuf.Repeated
(HsProtobuf.Named (HsProtobuf.Single "SignaturePolicy")))
(HsProtobuf.Single "rules")
[]
"")]
instance HsJSONPB.ToJSONPB SignaturePolicy_NOutOf where
toJSONPB (SignaturePolicy_NOutOf f1 f2)
= (HsJSONPB.object ["n" .= f1, "rules" .= f2])
toEncodingPB (SignaturePolicy_NOutOf f1 f2)
= (HsJSONPB.pairs ["n" .= f1, "rules" .= f2])
instance HsJSONPB.FromJSONPB SignaturePolicy_NOutOf where
parseJSONPB
= (HsJSONPB.withObject "SignaturePolicy_NOutOf"
(\ obj ->
(Hs.pure SignaturePolicy_NOutOf) <*> obj .: "n" <*>
obj .: "rules"))
instance HsJSONPB.ToJSON SignaturePolicy_NOutOf where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON SignaturePolicy_NOutOf where
parseJSON = HsJSONPB.parseJSONPB
instance HsJSONPB.ToSchema SignaturePolicy_NOutOf where
declareNamedSchema _
= do let declare_n = HsJSONPB.declareSchemaRef
signaturePolicy_NOutOfN <- declare_n Proxy.Proxy
let declare_rules = HsJSONPB.declareSchemaRef
signaturePolicy_NOutOfRules <- declare_rules Proxy.Proxy
let _ = Hs.pure SignaturePolicy_NOutOf <*>
HsJSONPB.asProxy declare_n
<*> HsJSONPB.asProxy declare_rules
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName =
Hs.Just "SignaturePolicy_NOutOf",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("n", signaturePolicy_NOutOfN),
("rules", signaturePolicy_NOutOfRules)]}})
data SignaturePolicyType = SignaturePolicyTypeSignedBy Hs.Int32
| SignaturePolicyTypeNOutOf Common.Policies.SignaturePolicy_NOutOf
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named SignaturePolicyType where
nameOf _ = (Hs.fromString "SignaturePolicyType")
instance HsJSONPB.ToSchema SignaturePolicyType where
declareNamedSchema _
= do let declare_signed_by = HsJSONPB.declareSchemaRef
signaturePolicyTypeSignedBy <- declare_signed_by Proxy.Proxy
let _ = Hs.pure SignaturePolicyTypeSignedBy <*>
HsJSONPB.asProxy declare_signed_by
let declare_n_out_of = HsJSONPB.declareSchemaRef
signaturePolicyTypeNOutOf <- declare_n_out_of Proxy.Proxy
let _ = Hs.pure SignaturePolicyTypeNOutOf <*>
HsJSONPB.asProxy declare_n_out_of
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName =
Hs.Just "SignaturePolicyType",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("signed_by", signaturePolicyTypeSignedBy),
("n_out_of", signaturePolicyTypeNOutOf)],
HsJSONPB._schemaMinProperties = Hs.Just 1,
HsJSONPB._schemaMaxProperties = Hs.Just 1}})
data ImplicitMetaPolicy = ImplicitMetaPolicy{implicitMetaPolicySubPolicy
:: Hs.Text,
implicitMetaPolicyRule ::
HsProtobuf.Enumerated
Common.Policies.ImplicitMetaPolicy_Rule}
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named ImplicitMetaPolicy where
nameOf _ = (Hs.fromString "ImplicitMetaPolicy")
instance HsProtobuf.HasDefault ImplicitMetaPolicy
instance HsProtobuf.Message ImplicitMetaPolicy where
encodeMessage _
ImplicitMetaPolicy{implicitMetaPolicySubPolicy =
implicitMetaPolicySubPolicy,
implicitMetaPolicyRule = implicitMetaPolicyRule}
= (Hs.mconcat
[(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 1)
implicitMetaPolicySubPolicy),
(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 2)
implicitMetaPolicyRule)])
decodeMessage _
= (Hs.pure ImplicitMetaPolicy) <*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 1))
<*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 2))
dotProto _
= [(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 1)
(HsProtobuf.Prim HsProtobuf.String)
(HsProtobuf.Single "sub_policy")
[]
""),
(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 2)
(HsProtobuf.Prim (HsProtobuf.Named (HsProtobuf.Single "Rule")))
(HsProtobuf.Single "rule")
[]
"")]
instance HsJSONPB.ToJSONPB ImplicitMetaPolicy where
toJSONPB (ImplicitMetaPolicy f1 f2)
= (HsJSONPB.object ["sub_policy" .= f1, "rule" .= f2])
toEncodingPB (ImplicitMetaPolicy f1 f2)
= (HsJSONPB.pairs ["sub_policy" .= f1, "rule" .= f2])
instance HsJSONPB.FromJSONPB ImplicitMetaPolicy where
parseJSONPB
= (HsJSONPB.withObject "ImplicitMetaPolicy"
(\ obj ->
(Hs.pure ImplicitMetaPolicy) <*> obj .: "sub_policy" <*>
obj .: "rule"))
instance HsJSONPB.ToJSON ImplicitMetaPolicy where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON ImplicitMetaPolicy where
parseJSON = HsJSONPB.parseJSONPB
instance HsJSONPB.ToSchema ImplicitMetaPolicy where
declareNamedSchema _
= do let declare_sub_policy = HsJSONPB.declareSchemaRef
implicitMetaPolicySubPolicy <- declare_sub_policy Proxy.Proxy
let declare_rule = HsJSONPB.declareSchemaRef
implicitMetaPolicyRule <- declare_rule Proxy.Proxy
let _ = Hs.pure ImplicitMetaPolicy <*>
HsJSONPB.asProxy declare_sub_policy
<*> HsJSONPB.asProxy declare_rule
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName =
Hs.Just "ImplicitMetaPolicy",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("sub_policy", implicitMetaPolicySubPolicy),
("rule", implicitMetaPolicyRule)]}})
data ImplicitMetaPolicy_Rule = ImplicitMetaPolicy_RuleANY
| ImplicitMetaPolicy_RuleALL
| ImplicitMetaPolicy_RuleMAJORITY
deriving (Hs.Show, Hs.Eq, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named ImplicitMetaPolicy_Rule where
nameOf _ = (Hs.fromString "ImplicitMetaPolicy_Rule")
instance HsProtobuf.HasDefault ImplicitMetaPolicy_Rule
instance Hs.Bounded ImplicitMetaPolicy_Rule where
minBound = ImplicitMetaPolicy_RuleANY
maxBound = ImplicitMetaPolicy_RuleMAJORITY
instance Hs.Ord ImplicitMetaPolicy_Rule where
compare x y
= Hs.compare (HsProtobuf.fromProtoEnum x)
(HsProtobuf.fromProtoEnum y)
instance HsProtobuf.ProtoEnum ImplicitMetaPolicy_Rule where
toProtoEnumMay 0 = Hs.Just ImplicitMetaPolicy_RuleANY
toProtoEnumMay 1 = Hs.Just ImplicitMetaPolicy_RuleALL
toProtoEnumMay 2 = Hs.Just ImplicitMetaPolicy_RuleMAJORITY
toProtoEnumMay _ = Hs.Nothing
fromProtoEnum (ImplicitMetaPolicy_RuleANY) = 0
fromProtoEnum (ImplicitMetaPolicy_RuleALL) = 1
fromProtoEnum (ImplicitMetaPolicy_RuleMAJORITY) = 2
instance HsJSONPB.ToJSONPB ImplicitMetaPolicy_Rule where
toJSONPB x _ = HsJSONPB.enumFieldString x
toEncodingPB x _ = HsJSONPB.enumFieldEncoding x
instance HsJSONPB.FromJSONPB ImplicitMetaPolicy_Rule where
parseJSONPB (HsJSONPB.String "ANY")
= Hs.pure ImplicitMetaPolicy_RuleANY
parseJSONPB (HsJSONPB.String "ALL")
= Hs.pure ImplicitMetaPolicy_RuleALL
parseJSONPB (HsJSONPB.String "MAJORITY")
= Hs.pure ImplicitMetaPolicy_RuleMAJORITY
parseJSONPB v = (HsJSONPB.typeMismatch "ImplicitMetaPolicy_Rule" v)
instance HsJSONPB.ToJSON ImplicitMetaPolicy_Rule where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON ImplicitMetaPolicy_Rule where
parseJSON = HsJSONPB.parseJSONPB
instance HsProtobuf.Finite ImplicitMetaPolicy_Rule | null | https://raw.githubusercontent.com/hyperledger-labs/fabric-chaincode-haskell/9594e82371a9a805ad812e38fd58e063df5ca4e7/protos-hs/Common/Policies.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
| Generated by Haskell protocol buffer compiler. DO NOT EDIT! | # LANGUAGE DeriveGeneric #
# LANGUAGE DataKinds #
# LANGUAGE TypeApplications #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - matches #
module Common.Policies where
import qualified Prelude as Hs
import qualified Proto3.Suite.Class as HsProtobuf
import qualified Proto3.Suite.DotProto as HsProtobuf
import qualified Proto3.Suite.JSONPB as HsJSONPB
import Proto3.Suite.JSONPB ((.=), (.:))
import qualified Proto3.Suite.Types as HsProtobuf
import qualified Proto3.Wire as HsProtobuf
import qualified Control.Applicative as Hs
import Control.Applicative ((<*>), (<|>), (<$>))
import qualified Control.DeepSeq as Hs
import qualified Control.Monad as Hs
import qualified Data.ByteString as Hs
import qualified Data.Coerce as Hs
import qualified Data.Int as Hs (Int16, Int32, Int64)
import qualified Data.List.NonEmpty as Hs (NonEmpty(..))
import qualified Data.Map as Hs (Map, mapKeysMonotonic)
import qualified Data.Proxy as Proxy
import qualified Data.String as Hs (fromString)
import qualified Data.Text.Lazy as Hs (Text)
import qualified Data.Vector as Hs (Vector)
import qualified Data.Word as Hs (Word16, Word32, Word64)
import qualified GHC.Enum as Hs
import qualified GHC.Generics as Hs
import qualified Unsafe.Coerce as Hs
import qualified Msp.MspPrincipal
data Policy = Policy{policyType :: Hs.Int32,
policyValue :: Hs.ByteString}
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named Policy where
nameOf _ = (Hs.fromString "Policy")
instance HsProtobuf.HasDefault Policy
instance HsProtobuf.Message Policy where
encodeMessage _
Policy{policyType = policyType, policyValue = policyValue}
= (Hs.mconcat
[(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 1)
policyType),
(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 2)
policyValue)])
decodeMessage _
= (Hs.pure Policy) <*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 1))
<*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 2))
dotProto _
= [(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 1)
(HsProtobuf.Prim HsProtobuf.Int32)
(HsProtobuf.Single "type")
[]
""),
(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 2)
(HsProtobuf.Prim HsProtobuf.Bytes)
(HsProtobuf.Single "value")
[]
"")]
instance HsJSONPB.ToJSONPB Policy where
toJSONPB (Policy f1 f2)
= (HsJSONPB.object ["type" .= f1, "value" .= f2])
toEncodingPB (Policy f1 f2)
= (HsJSONPB.pairs ["type" .= f1, "value" .= f2])
instance HsJSONPB.FromJSONPB Policy where
parseJSONPB
= (HsJSONPB.withObject "Policy"
(\ obj -> (Hs.pure Policy) <*> obj .: "type" <*> obj .: "value"))
instance HsJSONPB.ToJSON Policy where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON Policy where
parseJSON = HsJSONPB.parseJSONPB
instance HsJSONPB.ToSchema Policy where
declareNamedSchema _
= do let declare_type = HsJSONPB.declareSchemaRef
policyType <- declare_type Proxy.Proxy
let declare_value = HsJSONPB.declareSchemaRef
policyValue <- declare_value Proxy.Proxy
let _ = Hs.pure Policy <*> HsJSONPB.asProxy declare_type <*>
HsJSONPB.asProxy declare_value
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName = Hs.Just "Policy",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("type", policyType),
("value", policyValue)]}})
data Policy_PolicyType = Policy_PolicyTypeUNKNOWN
| Policy_PolicyTypeSIGNATURE
| Policy_PolicyTypeMSP
| Policy_PolicyTypeIMPLICIT_META
deriving (Hs.Show, Hs.Eq, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named Policy_PolicyType where
nameOf _ = (Hs.fromString "Policy_PolicyType")
instance HsProtobuf.HasDefault Policy_PolicyType
instance Hs.Bounded Policy_PolicyType where
minBound = Policy_PolicyTypeUNKNOWN
maxBound = Policy_PolicyTypeIMPLICIT_META
instance Hs.Ord Policy_PolicyType where
compare x y
= Hs.compare (HsProtobuf.fromProtoEnum x)
(HsProtobuf.fromProtoEnum y)
instance HsProtobuf.ProtoEnum Policy_PolicyType where
toProtoEnumMay 0 = Hs.Just Policy_PolicyTypeUNKNOWN
toProtoEnumMay 1 = Hs.Just Policy_PolicyTypeSIGNATURE
toProtoEnumMay 2 = Hs.Just Policy_PolicyTypeMSP
toProtoEnumMay 3 = Hs.Just Policy_PolicyTypeIMPLICIT_META
toProtoEnumMay _ = Hs.Nothing
fromProtoEnum (Policy_PolicyTypeUNKNOWN) = 0
fromProtoEnum (Policy_PolicyTypeSIGNATURE) = 1
fromProtoEnum (Policy_PolicyTypeMSP) = 2
fromProtoEnum (Policy_PolicyTypeIMPLICIT_META) = 3
instance HsJSONPB.ToJSONPB Policy_PolicyType where
toJSONPB x _ = HsJSONPB.enumFieldString x
toEncodingPB x _ = HsJSONPB.enumFieldEncoding x
instance HsJSONPB.FromJSONPB Policy_PolicyType where
parseJSONPB (HsJSONPB.String "UNKNOWN")
= Hs.pure Policy_PolicyTypeUNKNOWN
parseJSONPB (HsJSONPB.String "SIGNATURE")
= Hs.pure Policy_PolicyTypeSIGNATURE
parseJSONPB (HsJSONPB.String "MSP") = Hs.pure Policy_PolicyTypeMSP
parseJSONPB (HsJSONPB.String "IMPLICIT_META")
= Hs.pure Policy_PolicyTypeIMPLICIT_META
parseJSONPB v = (HsJSONPB.typeMismatch "Policy_PolicyType" v)
instance HsJSONPB.ToJSON Policy_PolicyType where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON Policy_PolicyType where
parseJSON = HsJSONPB.parseJSONPB
instance HsProtobuf.Finite Policy_PolicyType
data SignaturePolicyEnvelope = SignaturePolicyEnvelope{signaturePolicyEnvelopeVersion
:: Hs.Int32,
signaturePolicyEnvelopeRule ::
Hs.Maybe Common.Policies.SignaturePolicy,
signaturePolicyEnvelopeIdentities ::
Hs.Vector Msp.MspPrincipal.MSPPrincipal}
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named SignaturePolicyEnvelope where
nameOf _ = (Hs.fromString "SignaturePolicyEnvelope")
instance HsProtobuf.HasDefault SignaturePolicyEnvelope
instance HsProtobuf.Message SignaturePolicyEnvelope where
encodeMessage _
SignaturePolicyEnvelope{signaturePolicyEnvelopeVersion =
signaturePolicyEnvelopeVersion,
signaturePolicyEnvelopeRule = signaturePolicyEnvelopeRule,
signaturePolicyEnvelopeIdentities =
signaturePolicyEnvelopeIdentities}
= (Hs.mconcat
[(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 1)
signaturePolicyEnvelopeVersion),
(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 2)
(Hs.coerce @(Hs.Maybe Common.Policies.SignaturePolicy)
@(HsProtobuf.Nested Common.Policies.SignaturePolicy)
signaturePolicyEnvelopeRule)),
(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 3)
(Hs.coerce @(Hs.Vector Msp.MspPrincipal.MSPPrincipal)
@(HsProtobuf.NestedVec Msp.MspPrincipal.MSPPrincipal)
signaturePolicyEnvelopeIdentities))])
decodeMessage _
= (Hs.pure SignaturePolicyEnvelope) <*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 1))
<*>
(Hs.coerce @(_ (HsProtobuf.Nested Common.Policies.SignaturePolicy))
@(_ (Hs.Maybe Common.Policies.SignaturePolicy))
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 2)))
<*>
(Hs.coerce
@(_ (HsProtobuf.NestedVec Msp.MspPrincipal.MSPPrincipal))
@(_ (Hs.Vector Msp.MspPrincipal.MSPPrincipal))
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 3)))
dotProto _
= [(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 1)
(HsProtobuf.Prim HsProtobuf.Int32)
(HsProtobuf.Single "version")
[]
""),
(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 2)
(HsProtobuf.Prim
(HsProtobuf.Named (HsProtobuf.Single "SignaturePolicy")))
(HsProtobuf.Single "rule")
[]
""),
(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 3)
(HsProtobuf.Repeated
(HsProtobuf.Named (HsProtobuf.Single "MSPPrincipal")))
(HsProtobuf.Single "identities")
[]
"")]
instance HsJSONPB.ToJSONPB SignaturePolicyEnvelope where
toJSONPB (SignaturePolicyEnvelope f1 f2 f3)
= (HsJSONPB.object
["version" .= f1, "rule" .= f2, "identities" .= f3])
toEncodingPB (SignaturePolicyEnvelope f1 f2 f3)
= (HsJSONPB.pairs
["version" .= f1, "rule" .= f2, "identities" .= f3])
instance HsJSONPB.FromJSONPB SignaturePolicyEnvelope where
parseJSONPB
= (HsJSONPB.withObject "SignaturePolicyEnvelope"
(\ obj ->
(Hs.pure SignaturePolicyEnvelope) <*> obj .: "version" <*>
obj .: "rule"
<*> obj .: "identities"))
instance HsJSONPB.ToJSON SignaturePolicyEnvelope where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON SignaturePolicyEnvelope where
parseJSON = HsJSONPB.parseJSONPB
instance HsJSONPB.ToSchema SignaturePolicyEnvelope where
declareNamedSchema _
= do let declare_version = HsJSONPB.declareSchemaRef
signaturePolicyEnvelopeVersion <- declare_version Proxy.Proxy
let declare_rule = HsJSONPB.declareSchemaRef
signaturePolicyEnvelopeRule <- declare_rule Proxy.Proxy
let declare_identities = HsJSONPB.declareSchemaRef
signaturePolicyEnvelopeIdentities <- declare_identities Proxy.Proxy
let _ = Hs.pure SignaturePolicyEnvelope <*>
HsJSONPB.asProxy declare_version
<*> HsJSONPB.asProxy declare_rule
<*> HsJSONPB.asProxy declare_identities
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName =
Hs.Just "SignaturePolicyEnvelope",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("version", signaturePolicyEnvelopeVersion),
("rule", signaturePolicyEnvelopeRule),
("identities",
signaturePolicyEnvelopeIdentities)]}})
newtype SignaturePolicy = SignaturePolicy{signaturePolicyType ::
Hs.Maybe SignaturePolicyType}
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named SignaturePolicy where
nameOf _ = (Hs.fromString "SignaturePolicy")
instance HsProtobuf.HasDefault SignaturePolicy
instance HsProtobuf.Message SignaturePolicy where
encodeMessage _
SignaturePolicy{signaturePolicyType = signaturePolicyType}
= (Hs.mconcat
[case signaturePolicyType of
Hs.Nothing -> Hs.mempty
Hs.Just x
-> case x of
SignaturePolicyTypeSignedBy y
-> (HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 1)
(HsProtobuf.ForceEmit y))
SignaturePolicyTypeNOutOf y
-> (HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 2)
(Hs.coerce @(Hs.Maybe Common.Policies.SignaturePolicy_NOutOf)
@(HsProtobuf.Nested Common.Policies.SignaturePolicy_NOutOf)
(Hs.Just y)))])
decodeMessage _
= (Hs.pure SignaturePolicy) <*>
(HsProtobuf.oneof Hs.Nothing
[((HsProtobuf.FieldNumber 1),
(Hs.pure (Hs.Just Hs.. SignaturePolicyTypeSignedBy)) <*>
HsProtobuf.decodeMessageField),
((HsProtobuf.FieldNumber 2),
(Hs.pure (Hs.fmap SignaturePolicyTypeNOutOf)) <*>
(Hs.coerce
@(_ (HsProtobuf.Nested Common.Policies.SignaturePolicy_NOutOf))
@(_ (Hs.Maybe Common.Policies.SignaturePolicy_NOutOf))
HsProtobuf.decodeMessageField))])
dotProto _ = []
instance HsJSONPB.ToJSONPB SignaturePolicy where
toJSONPB (SignaturePolicy f1_or_f2)
= (HsJSONPB.object
[(let encodeType
= (case f1_or_f2 of
Hs.Just (SignaturePolicyTypeSignedBy f1)
-> (HsJSONPB.pair "signed_by" f1)
Hs.Just (SignaturePolicyTypeNOutOf f2)
-> (HsJSONPB.pair "n_out_of" f2)
Hs.Nothing -> Hs.mempty)
in
\ options ->
if HsJSONPB.optEmitNamedOneof options then
("Type" .= (HsJSONPB.objectOrNull [encodeType] options)) options
else encodeType options)])
toEncodingPB (SignaturePolicy f1_or_f2)
= (HsJSONPB.pairs
[(let encodeType
= (case f1_or_f2 of
Hs.Just (SignaturePolicyTypeSignedBy f1)
-> (HsJSONPB.pair "signed_by" f1)
Hs.Just (SignaturePolicyTypeNOutOf f2)
-> (HsJSONPB.pair "n_out_of" f2)
Hs.Nothing -> Hs.mempty)
in
\ options ->
if HsJSONPB.optEmitNamedOneof options then
("Type" .= (HsJSONPB.pairsOrNull [encodeType] options)) options
else encodeType options)])
instance HsJSONPB.FromJSONPB SignaturePolicy where
parseJSONPB
= (HsJSONPB.withObject "SignaturePolicy"
(\ obj ->
(Hs.pure SignaturePolicy) <*>
(let parseType parseObj
= Hs.msum
[Hs.Just Hs.. SignaturePolicyTypeSignedBy <$>
(HsJSONPB.parseField parseObj "signed_by"),
Hs.Just Hs.. SignaturePolicyTypeNOutOf <$>
(HsJSONPB.parseField parseObj "n_out_of"),
Hs.pure Hs.Nothing]
in
((obj .: "Type") Hs.>>= (HsJSONPB.withObject "Type" parseType)) <|>
(parseType obj))))
instance HsJSONPB.ToJSON SignaturePolicy where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON SignaturePolicy where
parseJSON = HsJSONPB.parseJSONPB
instance HsJSONPB.ToSchema SignaturePolicy where
declareNamedSchema _
= do let declare_Type = HsJSONPB.declareSchemaRef
signaturePolicyType <- declare_Type Proxy.Proxy
let _ = Hs.pure SignaturePolicy <*> HsJSONPB.asProxy declare_Type
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName =
Hs.Just "SignaturePolicy",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("Type", signaturePolicyType)]}})
data SignaturePolicy_NOutOf = SignaturePolicy_NOutOf{signaturePolicy_NOutOfN
:: Hs.Int32,
signaturePolicy_NOutOfRules ::
Hs.Vector Common.Policies.SignaturePolicy}
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named SignaturePolicy_NOutOf where
nameOf _ = (Hs.fromString "SignaturePolicy_NOutOf")
instance HsProtobuf.HasDefault SignaturePolicy_NOutOf
instance HsProtobuf.Message SignaturePolicy_NOutOf where
encodeMessage _
SignaturePolicy_NOutOf{signaturePolicy_NOutOfN =
signaturePolicy_NOutOfN,
signaturePolicy_NOutOfRules = signaturePolicy_NOutOfRules}
= (Hs.mconcat
[(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 1)
signaturePolicy_NOutOfN),
(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 2)
(Hs.coerce @(Hs.Vector Common.Policies.SignaturePolicy)
@(HsProtobuf.NestedVec Common.Policies.SignaturePolicy)
signaturePolicy_NOutOfRules))])
decodeMessage _
= (Hs.pure SignaturePolicy_NOutOf) <*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 1))
<*>
(Hs.coerce
@(_ (HsProtobuf.NestedVec Common.Policies.SignaturePolicy))
@(_ (Hs.Vector Common.Policies.SignaturePolicy))
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 2)))
dotProto _
= [(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 1)
(HsProtobuf.Prim HsProtobuf.Int32)
(HsProtobuf.Single "n")
[]
""),
(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 2)
(HsProtobuf.Repeated
(HsProtobuf.Named (HsProtobuf.Single "SignaturePolicy")))
(HsProtobuf.Single "rules")
[]
"")]
instance HsJSONPB.ToJSONPB SignaturePolicy_NOutOf where
toJSONPB (SignaturePolicy_NOutOf f1 f2)
= (HsJSONPB.object ["n" .= f1, "rules" .= f2])
toEncodingPB (SignaturePolicy_NOutOf f1 f2)
= (HsJSONPB.pairs ["n" .= f1, "rules" .= f2])
instance HsJSONPB.FromJSONPB SignaturePolicy_NOutOf where
parseJSONPB
= (HsJSONPB.withObject "SignaturePolicy_NOutOf"
(\ obj ->
(Hs.pure SignaturePolicy_NOutOf) <*> obj .: "n" <*>
obj .: "rules"))
instance HsJSONPB.ToJSON SignaturePolicy_NOutOf where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON SignaturePolicy_NOutOf where
parseJSON = HsJSONPB.parseJSONPB
instance HsJSONPB.ToSchema SignaturePolicy_NOutOf where
declareNamedSchema _
= do let declare_n = HsJSONPB.declareSchemaRef
signaturePolicy_NOutOfN <- declare_n Proxy.Proxy
let declare_rules = HsJSONPB.declareSchemaRef
signaturePolicy_NOutOfRules <- declare_rules Proxy.Proxy
let _ = Hs.pure SignaturePolicy_NOutOf <*>
HsJSONPB.asProxy declare_n
<*> HsJSONPB.asProxy declare_rules
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName =
Hs.Just "SignaturePolicy_NOutOf",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("n", signaturePolicy_NOutOfN),
("rules", signaturePolicy_NOutOfRules)]}})
data SignaturePolicyType = SignaturePolicyTypeSignedBy Hs.Int32
| SignaturePolicyTypeNOutOf Common.Policies.SignaturePolicy_NOutOf
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named SignaturePolicyType where
nameOf _ = (Hs.fromString "SignaturePolicyType")
instance HsJSONPB.ToSchema SignaturePolicyType where
declareNamedSchema _
= do let declare_signed_by = HsJSONPB.declareSchemaRef
signaturePolicyTypeSignedBy <- declare_signed_by Proxy.Proxy
let _ = Hs.pure SignaturePolicyTypeSignedBy <*>
HsJSONPB.asProxy declare_signed_by
let declare_n_out_of = HsJSONPB.declareSchemaRef
signaturePolicyTypeNOutOf <- declare_n_out_of Proxy.Proxy
let _ = Hs.pure SignaturePolicyTypeNOutOf <*>
HsJSONPB.asProxy declare_n_out_of
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName =
Hs.Just "SignaturePolicyType",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("signed_by", signaturePolicyTypeSignedBy),
("n_out_of", signaturePolicyTypeNOutOf)],
HsJSONPB._schemaMinProperties = Hs.Just 1,
HsJSONPB._schemaMaxProperties = Hs.Just 1}})
data ImplicitMetaPolicy = ImplicitMetaPolicy{implicitMetaPolicySubPolicy
:: Hs.Text,
implicitMetaPolicyRule ::
HsProtobuf.Enumerated
Common.Policies.ImplicitMetaPolicy_Rule}
deriving (Hs.Show, Hs.Eq, Hs.Ord, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named ImplicitMetaPolicy where
nameOf _ = (Hs.fromString "ImplicitMetaPolicy")
instance HsProtobuf.HasDefault ImplicitMetaPolicy
instance HsProtobuf.Message ImplicitMetaPolicy where
encodeMessage _
ImplicitMetaPolicy{implicitMetaPolicySubPolicy =
implicitMetaPolicySubPolicy,
implicitMetaPolicyRule = implicitMetaPolicyRule}
= (Hs.mconcat
[(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 1)
implicitMetaPolicySubPolicy),
(HsProtobuf.encodeMessageField (HsProtobuf.FieldNumber 2)
implicitMetaPolicyRule)])
decodeMessage _
= (Hs.pure ImplicitMetaPolicy) <*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 1))
<*>
(HsProtobuf.at HsProtobuf.decodeMessageField
(HsProtobuf.FieldNumber 2))
dotProto _
= [(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 1)
(HsProtobuf.Prim HsProtobuf.String)
(HsProtobuf.Single "sub_policy")
[]
""),
(HsProtobuf.DotProtoField (HsProtobuf.FieldNumber 2)
(HsProtobuf.Prim (HsProtobuf.Named (HsProtobuf.Single "Rule")))
(HsProtobuf.Single "rule")
[]
"")]
instance HsJSONPB.ToJSONPB ImplicitMetaPolicy where
toJSONPB (ImplicitMetaPolicy f1 f2)
= (HsJSONPB.object ["sub_policy" .= f1, "rule" .= f2])
toEncodingPB (ImplicitMetaPolicy f1 f2)
= (HsJSONPB.pairs ["sub_policy" .= f1, "rule" .= f2])
instance HsJSONPB.FromJSONPB ImplicitMetaPolicy where
parseJSONPB
= (HsJSONPB.withObject "ImplicitMetaPolicy"
(\ obj ->
(Hs.pure ImplicitMetaPolicy) <*> obj .: "sub_policy" <*>
obj .: "rule"))
instance HsJSONPB.ToJSON ImplicitMetaPolicy where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON ImplicitMetaPolicy where
parseJSON = HsJSONPB.parseJSONPB
instance HsJSONPB.ToSchema ImplicitMetaPolicy where
declareNamedSchema _
= do let declare_sub_policy = HsJSONPB.declareSchemaRef
implicitMetaPolicySubPolicy <- declare_sub_policy Proxy.Proxy
let declare_rule = HsJSONPB.declareSchemaRef
implicitMetaPolicyRule <- declare_rule Proxy.Proxy
let _ = Hs.pure ImplicitMetaPolicy <*>
HsJSONPB.asProxy declare_sub_policy
<*> HsJSONPB.asProxy declare_rule
Hs.return
(HsJSONPB.NamedSchema{HsJSONPB._namedSchemaName =
Hs.Just "ImplicitMetaPolicy",
HsJSONPB._namedSchemaSchema =
Hs.mempty{HsJSONPB._schemaParamSchema =
Hs.mempty{HsJSONPB._paramSchemaType =
Hs.Just HsJSONPB.SwaggerObject},
HsJSONPB._schemaProperties =
HsJSONPB.insOrdFromList
[("sub_policy", implicitMetaPolicySubPolicy),
("rule", implicitMetaPolicyRule)]}})
data ImplicitMetaPolicy_Rule = ImplicitMetaPolicy_RuleANY
| ImplicitMetaPolicy_RuleALL
| ImplicitMetaPolicy_RuleMAJORITY
deriving (Hs.Show, Hs.Eq, Hs.Generic, Hs.NFData)
instance HsProtobuf.Named ImplicitMetaPolicy_Rule where
nameOf _ = (Hs.fromString "ImplicitMetaPolicy_Rule")
instance HsProtobuf.HasDefault ImplicitMetaPolicy_Rule
instance Hs.Bounded ImplicitMetaPolicy_Rule where
minBound = ImplicitMetaPolicy_RuleANY
maxBound = ImplicitMetaPolicy_RuleMAJORITY
instance Hs.Ord ImplicitMetaPolicy_Rule where
compare x y
= Hs.compare (HsProtobuf.fromProtoEnum x)
(HsProtobuf.fromProtoEnum y)
instance HsProtobuf.ProtoEnum ImplicitMetaPolicy_Rule where
toProtoEnumMay 0 = Hs.Just ImplicitMetaPolicy_RuleANY
toProtoEnumMay 1 = Hs.Just ImplicitMetaPolicy_RuleALL
toProtoEnumMay 2 = Hs.Just ImplicitMetaPolicy_RuleMAJORITY
toProtoEnumMay _ = Hs.Nothing
fromProtoEnum (ImplicitMetaPolicy_RuleANY) = 0
fromProtoEnum (ImplicitMetaPolicy_RuleALL) = 1
fromProtoEnum (ImplicitMetaPolicy_RuleMAJORITY) = 2
instance HsJSONPB.ToJSONPB ImplicitMetaPolicy_Rule where
toJSONPB x _ = HsJSONPB.enumFieldString x
toEncodingPB x _ = HsJSONPB.enumFieldEncoding x
instance HsJSONPB.FromJSONPB ImplicitMetaPolicy_Rule where
parseJSONPB (HsJSONPB.String "ANY")
= Hs.pure ImplicitMetaPolicy_RuleANY
parseJSONPB (HsJSONPB.String "ALL")
= Hs.pure ImplicitMetaPolicy_RuleALL
parseJSONPB (HsJSONPB.String "MAJORITY")
= Hs.pure ImplicitMetaPolicy_RuleMAJORITY
parseJSONPB v = (HsJSONPB.typeMismatch "ImplicitMetaPolicy_Rule" v)
instance HsJSONPB.ToJSON ImplicitMetaPolicy_Rule where
toJSON = HsJSONPB.toAesonValue
toEncoding = HsJSONPB.toAesonEncoding
instance HsJSONPB.FromJSON ImplicitMetaPolicy_Rule where
parseJSON = HsJSONPB.parseJSONPB
instance HsProtobuf.Finite ImplicitMetaPolicy_Rule |
2704f156a9712048387a8ad4cd2d975e32545c94dc4739becffeae2de8156e26 | kdltr/chicken-core | chicken-profile.scm | ;;;; chicken-profile.scm - Formatted display of profile outputs - felix -*- Scheme -*-
;
Copyright ( c ) 2008 - 2021 , The CHICKEN Team
Copyright ( c ) 2000 - 2007 ,
; All rights reserved.
;
; Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
; conditions are met:
;
; Redistributions of source code must retain the above copyright notice, this list of conditions and the following
; disclaimer.
; Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
; disclaimer in the documentation and/or other materials provided with the distribution.
; Neither the name of the author nor the names of its contributors may be used to endorse or promote
; products derived from this software without specific prior written permission.
;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS
; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
; AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR
; OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
; POSSIBILITY OF SUCH DAMAGE.
(declare (block))
(module main ()
(import scheme
chicken.base
chicken.file
chicken.file.posix
chicken.fixnum
chicken.internal
chicken.platform
chicken.process-context
chicken.sort
chicken.string)
(include "mini-srfi-1.scm")
(define symbol-table-size 3001)
(define sort-by #f)
(define file #f)
(define no-unused #f)
(define seconds-digits 3)
(define average-digits 3)
(define percent-digits 3)
(define top 0)
(define (print-usage)
(display #<#EOF
Usage: chicken-profile [OPTION ...] [FILENAME ...]
-sort-by-calls sort output by call frequency
-sort-by-time sort output by procedure execution time
-sort-by-avg sort output by average procedure execution time
-sort-by-name sort output alphabetically by procedure name
-decimals DDD set number of decimals for seconds, average and
percent columns (three digits, default: #{seconds-digits}#{average-digits}#{percent-digits})
-no-unused remove procedures that are never called
-top N display only the top N entries
-help show this text and exit
-version show version and exit
-release show release number and exit
FILENAME defaults to the `PROFILE.<number>', selecting the one with
the highest modification time, in case multiple profiles exist.
EOF
;|
)
(exit 64) )
(define (run args)
(let loop ([args args])
(if (null? args)
(begin
(unless file
(set! file
(let ((fs (glob "PROFILE.*")))
(if (null? fs)
(error "no PROFILEs found")
(first (sort fs
(lambda (f1 f2)
(> (file-modification-time f1)
(file-modification-time f2))) ) ) ) ) ) )
(write-profile) )
(let ([arg (car args)]
[rest (cdr args)] )
(define (next-arg)
(if (null? rest)
(error "missing argument to option" arg)
(let ((narg (car rest)))
(set! rest (cdr rest))
narg)))
(define (next-number)
(let ((n (string->number (next-arg))))
(if (and n (> n 0)) n (error "invalid argument to option" arg))))
(cond
[(member arg '("-h" "-help" "--help")) (print-usage)]
[(string=? arg "-version")
(print "chicken-profile - Version " (chicken-version))
(exit) ]
[(string=? arg "-release")
(print (chicken-version))
(exit) ]
[(string=? arg "-no-unused") (set! no-unused #t)]
[(string=? arg "-top") (set! top (next-number))]
[(string=? arg "-sort-by-calls") (set! sort-by sort-by-calls)]
[(string=? arg "-sort-by-time") (set! sort-by sort-by-time)]
[(string=? arg "-sort-by-avg") (set! sort-by sort-by-avg)]
[(string=? arg "-sort-by-name") (set! sort-by sort-by-name)]
[(string=? arg "-decimals") (set-decimals (next-arg))]
[(and (> (string-length arg) 1) (char=? #\- (string-ref arg 0)))
(error "invalid option" arg) ]
[file (print-usage)]
[else (set! file arg)] )
(loop rest) ) ) ) )
(define (sort-by-calls x y)
(let ([c1 (second x)]
[c2 (second y)] )
(if (eqv? c1 c2)
(> (third x) (third y))
(if c1 (if c2 (> c1 c2) #t) #t) ) ) )
(define (sort-by-time x y)
(let ([c1 (third x)]
[c2 (third y)] )
(if (= c1 c2)
(> (second x) (second y))
(> c1 c2) ) ) )
(define (sort-by-avg x y)
(let ([c1 (cadddr x)]
[c2 (cadddr y)] )
(if (eqv? c1 c2)
(> (third x) (third y))
(> c1 c2) ) ) )
(define (sort-by-name x y)
(string<? (symbol->string (first x)) (symbol->string (first y))) )
(set! sort-by sort-by-time)
(define (set-decimals arg)
(define (arg-digit n)
(let ((n (- (char->integer (string-ref arg n))
(char->integer #\0))))
(if (<= 0 n 9)
(if (= n 9) 8 n) ; 9 => overflow in format-real
(error "invalid argument to -decimals option" arg))))
(if (= (string-length arg) 3)
(begin
(set! seconds-digits (arg-digit 0))
(set! average-digits (arg-digit 1))
(set! percent-digits (arg-digit 2)))
(error "invalid argument to -decimals option" arg)))
(define (make-symbol-table)
(make-vector symbol-table-size '()))
(define (read-profile)
(let* ((hash (make-symbol-table))
(header (read))
(type (if (symbol? header) header 'instrumented)))
(do ((line (if (symbol? header) (read) header) (read)))
((eof-object? line))
(hash-table-set!
hash (first line)
(map (lambda (x y) (and x y (+ x y)))
(or (hash-table-ref hash (first line)) '(0 0))
(cdr line))))
(let ((alist '()))
(hash-table-for-each
(lambda (sym counts)
(set! alist (alist-cons sym counts alist)))
hash)
(cons type alist))))
(define (format-string str cols #!optional right (padc #\space))
(let* ((len (string-length str))
(pad (make-string (fxmax 0 (fx- cols len)) padc)) )
(if right
(string-append pad str)
(string-append str pad) ) ) )
(define (format-real n d)
(let ((exact-value (inexact->exact (truncate n))))
(string-append
(number->string exact-value)
(if (> d 0) "." "")
(substring
(number->string
(inexact->exact
(truncate
(* (- n exact-value -1) (expt 10 d)))))
1 (+ d 1)))))
(define (write-profile)
(print "reading `" file "' ...\n")
(let* ((type&data0 (with-input-from-file file read-profile))
(type (car type&data0))
(data0 (cdr type&data0))
Instrumented profiling results in total runtime being
;; counted for the outermost "main" procedure, while
;; statistical counts time spent only inside the procedure
;; itself. Ideally we'd have both, but that's tricky to do.
(total-t (foldl (if (eq? type 'instrumented)
(lambda (r t) (max r (third t)))
(lambda (r t) (+ r (third t))))
0 data0))
(data (sort (map
(lambda (t)
(append
t
(let ((c (second t)) ; count
(t (third t))) ; time tallied to procedure
(list (or (and c (> c 0) (/ t c)) ; time / count
0)
(or (and (> total-t 0) (* (/ t total-t) 100)) ; % of total-time
0)
))))
data0)
sort-by)))
(if (< 0 top (length data))
(set! data (take data top)))
(set! data (map (lambda (entry)
(let ((c (second entry)) ; count
(t (third entry)) ; total time
(a (fourth entry)) ; average time
(p (fifth entry)) ) ; % of max time
(list (##sys#symbol->string (first entry))
(if (not c) "overflow" (number->string c))
(format-real (/ t 1000) seconds-digits)
(format-real (/ a 1000) average-digits)
(format-real p percent-digits))))
(if no-unused
(filter (lambda (entry) (> (second entry) 0)) data)
data)))
(let* ((headers (list "procedure" "calls" "seconds" "average" "percent"))
(alignments (list #f #t #t #t #t))
(spacing 2)
(spacer (make-string spacing #\space))
(column-widths (foldl
(lambda (max-widths row)
(map max (map string-length row) max-widths))
(list 0 0 0 0 0)
(cons headers data))))
(define (print-row row)
(print (string-intersperse (map format-string row column-widths alignments) spacer)))
(print-row headers)
(print (make-string (+ (foldl + 0 column-widths)
(* spacing (- (length alignments) 1)))
#\-))
(for-each print-row data))))
(run (command-line-arguments))
)
| null | https://raw.githubusercontent.com/kdltr/chicken-core/b2e6c5243dd469064bec947cb3b49dafaa1514e5/chicken-profile.scm | scheme | chicken-profile.scm - Formatted display of profile outputs - felix -*- Scheme -*-
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with the distribution.
Neither the name of the author nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
|
9 => overflow in format-real
counted for the outermost "main" procedure, while
statistical counts time spent only inside the procedure
itself. Ideally we'd have both, but that's tricky to do.
count
time tallied to procedure
time / count
% of total-time
count
total time
average time
% of max time | Copyright ( c ) 2008 - 2021 , The CHICKEN Team
Copyright ( c ) 2000 - 2007 ,
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS
CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR
(declare (block))
(module main ()
(import scheme
chicken.base
chicken.file
chicken.file.posix
chicken.fixnum
chicken.internal
chicken.platform
chicken.process-context
chicken.sort
chicken.string)
(include "mini-srfi-1.scm")
(define symbol-table-size 3001)
(define sort-by #f)
(define file #f)
(define no-unused #f)
(define seconds-digits 3)
(define average-digits 3)
(define percent-digits 3)
(define top 0)
(define (print-usage)
(display #<#EOF
Usage: chicken-profile [OPTION ...] [FILENAME ...]
-sort-by-calls sort output by call frequency
-sort-by-time sort output by procedure execution time
-sort-by-avg sort output by average procedure execution time
-sort-by-name sort output alphabetically by procedure name
-decimals DDD set number of decimals for seconds, average and
percent columns (three digits, default: #{seconds-digits}#{average-digits}#{percent-digits})
-no-unused remove procedures that are never called
-top N display only the top N entries
-help show this text and exit
-version show version and exit
-release show release number and exit
FILENAME defaults to the `PROFILE.<number>', selecting the one with
the highest modification time, in case multiple profiles exist.
EOF
)
(exit 64) )
(define (run args)
(let loop ([args args])
(if (null? args)
(begin
(unless file
(set! file
(let ((fs (glob "PROFILE.*")))
(if (null? fs)
(error "no PROFILEs found")
(first (sort fs
(lambda (f1 f2)
(> (file-modification-time f1)
(file-modification-time f2))) ) ) ) ) ) )
(write-profile) )
(let ([arg (car args)]
[rest (cdr args)] )
(define (next-arg)
(if (null? rest)
(error "missing argument to option" arg)
(let ((narg (car rest)))
(set! rest (cdr rest))
narg)))
(define (next-number)
(let ((n (string->number (next-arg))))
(if (and n (> n 0)) n (error "invalid argument to option" arg))))
(cond
[(member arg '("-h" "-help" "--help")) (print-usage)]
[(string=? arg "-version")
(print "chicken-profile - Version " (chicken-version))
(exit) ]
[(string=? arg "-release")
(print (chicken-version))
(exit) ]
[(string=? arg "-no-unused") (set! no-unused #t)]
[(string=? arg "-top") (set! top (next-number))]
[(string=? arg "-sort-by-calls") (set! sort-by sort-by-calls)]
[(string=? arg "-sort-by-time") (set! sort-by sort-by-time)]
[(string=? arg "-sort-by-avg") (set! sort-by sort-by-avg)]
[(string=? arg "-sort-by-name") (set! sort-by sort-by-name)]
[(string=? arg "-decimals") (set-decimals (next-arg))]
[(and (> (string-length arg) 1) (char=? #\- (string-ref arg 0)))
(error "invalid option" arg) ]
[file (print-usage)]
[else (set! file arg)] )
(loop rest) ) ) ) )
(define (sort-by-calls x y)
(let ([c1 (second x)]
[c2 (second y)] )
(if (eqv? c1 c2)
(> (third x) (third y))
(if c1 (if c2 (> c1 c2) #t) #t) ) ) )
(define (sort-by-time x y)
(let ([c1 (third x)]
[c2 (third y)] )
(if (= c1 c2)
(> (second x) (second y))
(> c1 c2) ) ) )
(define (sort-by-avg x y)
(let ([c1 (cadddr x)]
[c2 (cadddr y)] )
(if (eqv? c1 c2)
(> (third x) (third y))
(> c1 c2) ) ) )
(define (sort-by-name x y)
(string<? (symbol->string (first x)) (symbol->string (first y))) )
(set! sort-by sort-by-time)
(define (set-decimals arg)
(define (arg-digit n)
(let ((n (- (char->integer (string-ref arg n))
(char->integer #\0))))
(if (<= 0 n 9)
(error "invalid argument to -decimals option" arg))))
(if (= (string-length arg) 3)
(begin
(set! seconds-digits (arg-digit 0))
(set! average-digits (arg-digit 1))
(set! percent-digits (arg-digit 2)))
(error "invalid argument to -decimals option" arg)))
(define (make-symbol-table)
(make-vector symbol-table-size '()))
(define (read-profile)
(let* ((hash (make-symbol-table))
(header (read))
(type (if (symbol? header) header 'instrumented)))
(do ((line (if (symbol? header) (read) header) (read)))
((eof-object? line))
(hash-table-set!
hash (first line)
(map (lambda (x y) (and x y (+ x y)))
(or (hash-table-ref hash (first line)) '(0 0))
(cdr line))))
(let ((alist '()))
(hash-table-for-each
(lambda (sym counts)
(set! alist (alist-cons sym counts alist)))
hash)
(cons type alist))))
(define (format-string str cols #!optional right (padc #\space))
(let* ((len (string-length str))
(pad (make-string (fxmax 0 (fx- cols len)) padc)) )
(if right
(string-append pad str)
(string-append str pad) ) ) )
(define (format-real n d)
(let ((exact-value (inexact->exact (truncate n))))
(string-append
(number->string exact-value)
(if (> d 0) "." "")
(substring
(number->string
(inexact->exact
(truncate
(* (- n exact-value -1) (expt 10 d)))))
1 (+ d 1)))))
(define (write-profile)
(print "reading `" file "' ...\n")
(let* ((type&data0 (with-input-from-file file read-profile))
(type (car type&data0))
(data0 (cdr type&data0))
Instrumented profiling results in total runtime being
(total-t (foldl (if (eq? type 'instrumented)
(lambda (r t) (max r (third t)))
(lambda (r t) (+ r (third t))))
0 data0))
(data (sort (map
(lambda (t)
(append
t
0)
0)
))))
data0)
sort-by)))
(if (< 0 top (length data))
(set! data (take data top)))
(set! data (map (lambda (entry)
(list (##sys#symbol->string (first entry))
(if (not c) "overflow" (number->string c))
(format-real (/ t 1000) seconds-digits)
(format-real (/ a 1000) average-digits)
(format-real p percent-digits))))
(if no-unused
(filter (lambda (entry) (> (second entry) 0)) data)
data)))
(let* ((headers (list "procedure" "calls" "seconds" "average" "percent"))
(alignments (list #f #t #t #t #t))
(spacing 2)
(spacer (make-string spacing #\space))
(column-widths (foldl
(lambda (max-widths row)
(map max (map string-length row) max-widths))
(list 0 0 0 0 0)
(cons headers data))))
(define (print-row row)
(print (string-intersperse (map format-string row column-widths alignments) spacer)))
(print-row headers)
(print (make-string (+ (foldl + 0 column-widths)
(* spacing (- (length alignments) 1)))
#\-))
(for-each print-row data))))
(run (command-line-arguments))
)
|
914e98cfdf4167beff18bbc8badba52519ce91a0e1d35574e87d95275e02ae6c | ghc/testsuite | tcrun043.hs | # LANGUAGE GADTs , TypeFamilies , ConstraintKinds #
import GHC.Prim ( Constraint )
type Showish = Show
f :: (Showish a) => a -> String
f x = show x ++ show x
data T = T
data F = F
data GADT a where
Tish :: GADT T
Fish :: GADT F
type family Indexed a b :: Constraint
type instance Indexed T b = Show b
type instance Indexed F b = Num b
g :: (Indexed a b) => GADT a -> b -> Either String b
g Tish x = Left (show x)
g Fish x = Right (x + 1)
type TwoConstraints a = (Show a, Num a)
-- We'll NOINLINE h so that we test the code generation for
-- constraint tuples
# NOINLINE h #
h :: TwoConstraints a => a -> String
h x = show (x + 1)
main :: IO ()
main = do
print $ f 9
print $ f True
print $ g Tish 10
print $ g Tish False
print $ g Fish 11
print $ g Fish 12.0
print $ h 13
print $ h 14.0
| null | https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_run/tcrun043.hs | haskell | We'll NOINLINE h so that we test the code generation for
constraint tuples | # LANGUAGE GADTs , TypeFamilies , ConstraintKinds #
import GHC.Prim ( Constraint )
type Showish = Show
f :: (Showish a) => a -> String
f x = show x ++ show x
data T = T
data F = F
data GADT a where
Tish :: GADT T
Fish :: GADT F
type family Indexed a b :: Constraint
type instance Indexed T b = Show b
type instance Indexed F b = Num b
g :: (Indexed a b) => GADT a -> b -> Either String b
g Tish x = Left (show x)
g Fish x = Right (x + 1)
type TwoConstraints a = (Show a, Num a)
# NOINLINE h #
h :: TwoConstraints a => a -> String
h x = show (x + 1)
main :: IO ()
main = do
print $ f 9
print $ f True
print $ g Tish 10
print $ g Tish False
print $ g Fish 11
print $ g Fish 12.0
print $ h 13
print $ h 14.0
|
5c9d1d6db1124c164e279834f5eaa107317fbf378c7bea20d0284132514e3e42 | odis-labs/onix | Resolutions.ml | type t = {
with_constraint : OpamFormula.version_constraint OpamPackage.Name.Map.t;
without_constraint : OpamPackage.Name.Set.t;
compiler_name : OpamPackage.Name.t;
}
let default =
{
with_constraint = OpamPackage.Name.Map.empty;
without_constraint = OpamPackage.Name.Set.empty;
compiler_name = Opam_utils.ocaml_base_compiler_name;
}
let constraints t = t.with_constraint
let make t =
List.fold_left
(fun acc (name, constraint_opt) ->
let acc =
if Opam_utils.is_ocaml_compiler_name name then
{ acc with compiler_name = name }
else acc
in
match constraint_opt with
| Some constr ->
{
acc with
with_constraint =
OpamPackage.Name.Map.add name constr acc.with_constraint;
}
| None ->
{
acc with
without_constraint =
OpamPackage.Name.Set.add name acc.without_constraint;
})
default t
let compiler_name t = t.compiler_name
let all t =
OpamPackage.Name.Map.fold
(fun name _ acc -> OpamPackage.Name.Set.add name acc)
t.with_constraint t.without_constraint
|> OpamPackage.Name.Set.add t.compiler_name
|> OpamPackage.Name.Set.to_seq
|> List.of_seq
let debug t =
if OpamPackage.Name.Map.cardinal t.with_constraint > 0 then
Fmt.epr "Resolutions:@.";
OpamPackage.Name.Map.iter
(fun n vc ->
Fmt.epr "- %s@." (OpamFormula.short_string_of_atom (n, Some vc)))
t.with_constraint;
OpamPackage.Name.Set.iter
(fun n -> Fmt.epr "- %a@." Opam_utils.pp_package_name n)
t.without_constraint
let resolution_re =
Re.
[
bos;
group (rep1 (diff any (set ">=<.!")));
group (alt [seq [set "<>"; opt (char '=')]; set "=."; str "!="]);
group (rep1 any);
eos;
]
|> Re.seq
|> Re.compile
let parse_resolution str =
try
let sub = Re.exec resolution_re str in
let name = OpamPackage.Name.of_string (Re.Group.get sub 1) in
let op = Re.Group.get sub 2 in
let op = if op = "." then "=" else op in
let op = OpamLexer.FullPos.relop op in
let version = Re.Group.get sub 3 in
let version = OpamPackage.Version.of_string version in
`Ok (name, Some (op, version))
with Not_found | Failure _ | OpamLexer.Error _ -> (
try `Ok (OpamPackage.Name.of_string str, None)
with Failure msg -> `Error msg)
let pp_resolution ppf x = Fmt.string ppf (OpamFormula.short_string_of_atom x)
| null | https://raw.githubusercontent.com/odis-labs/onix/81cc82953ef062425656ac9bf80474f2ce642f5b/src/onix_core/Resolutions.ml | ocaml | type t = {
with_constraint : OpamFormula.version_constraint OpamPackage.Name.Map.t;
without_constraint : OpamPackage.Name.Set.t;
compiler_name : OpamPackage.Name.t;
}
let default =
{
with_constraint = OpamPackage.Name.Map.empty;
without_constraint = OpamPackage.Name.Set.empty;
compiler_name = Opam_utils.ocaml_base_compiler_name;
}
let constraints t = t.with_constraint
let make t =
List.fold_left
(fun acc (name, constraint_opt) ->
let acc =
if Opam_utils.is_ocaml_compiler_name name then
{ acc with compiler_name = name }
else acc
in
match constraint_opt with
| Some constr ->
{
acc with
with_constraint =
OpamPackage.Name.Map.add name constr acc.with_constraint;
}
| None ->
{
acc with
without_constraint =
OpamPackage.Name.Set.add name acc.without_constraint;
})
default t
let compiler_name t = t.compiler_name
let all t =
OpamPackage.Name.Map.fold
(fun name _ acc -> OpamPackage.Name.Set.add name acc)
t.with_constraint t.without_constraint
|> OpamPackage.Name.Set.add t.compiler_name
|> OpamPackage.Name.Set.to_seq
|> List.of_seq
let debug t =
if OpamPackage.Name.Map.cardinal t.with_constraint > 0 then
Fmt.epr "Resolutions:@.";
OpamPackage.Name.Map.iter
(fun n vc ->
Fmt.epr "- %s@." (OpamFormula.short_string_of_atom (n, Some vc)))
t.with_constraint;
OpamPackage.Name.Set.iter
(fun n -> Fmt.epr "- %a@." Opam_utils.pp_package_name n)
t.without_constraint
let resolution_re =
Re.
[
bos;
group (rep1 (diff any (set ">=<.!")));
group (alt [seq [set "<>"; opt (char '=')]; set "=."; str "!="]);
group (rep1 any);
eos;
]
|> Re.seq
|> Re.compile
let parse_resolution str =
try
let sub = Re.exec resolution_re str in
let name = OpamPackage.Name.of_string (Re.Group.get sub 1) in
let op = Re.Group.get sub 2 in
let op = if op = "." then "=" else op in
let op = OpamLexer.FullPos.relop op in
let version = Re.Group.get sub 3 in
let version = OpamPackage.Version.of_string version in
`Ok (name, Some (op, version))
with Not_found | Failure _ | OpamLexer.Error _ -> (
try `Ok (OpamPackage.Name.of_string str, None)
with Failure msg -> `Error msg)
let pp_resolution ppf x = Fmt.string ppf (OpamFormula.short_string_of_atom x)
| |
94cd0ede1e45fff256abf3afaa1c91f8826cf482c357c6c95b2718bc940dc2d8 | maximedenes/native-coq | rtree.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
* Type of regular tree with nodes labelled by values of type ' a
The implementation uses de Bruijn indices , so binding capture
is avoided by the lift operator ( see example below )
The implementation uses de Bruijn indices, so binding capture
is avoided by the lift operator (see example below) *)
type 'a t
(** Building trees *)
(** build a node given a label and the vector of sons *)
val mk_node : 'a -> 'a t array -> 'a t
* Build mutually recursive trees :
X_1 = f_1(X_1, .. ,X_n ) ... X_n = f_n(X_1, .. ,X_n )
is obtained by the following pseudo - code
let vx = mk_rec_calls n in
let [ |x_1; .. ;x_n| ] =
mk_rec[|f_1(vx.(0), .. ,vx.(n-1); .. ;f_n(vx.(0), .. ,vx.(n-1))| ]
First example : build rec X = , Y ) and Y = b(X , Y , Y )
let [ |vx;vy| ] = mk_rec_calls 2 in
let [ |x;y| ] = mk_rec [ |mk_node a [ |vx;vy| ] ; mk_node b [ ]
Another example : nested recursive trees rec Y = = a(X , Y),Y , Y )
let [ |vy| ] = mk_rec_calls 1 in
let [ |vx| ] = mk_rec_calls 1 in
let [ |x| ] = mk_rec[|mk_node a vx;lift 1 vy| ]
let [ |y| ] = mk_rec[|mk_node b x;vy;vy| ]
( note the lift to avoid
X_1 = f_1(X_1,..,X_n) ... X_n = f_n(X_1,..,X_n)
is obtained by the following pseudo-code
let vx = mk_rec_calls n in
let [|x_1;..;x_n|] =
mk_rec[|f_1(vx.(0),..,vx.(n-1);..;f_n(vx.(0),..,vx.(n-1))|]
First example: build rec X = a(X,Y) and Y = b(X,Y,Y)
let [|vx;vy|] = mk_rec_calls 2 in
let [|x;y|] = mk_rec [|mk_node a [|vx;vy|]; mk_node b [|vx;vy;vy|]|]
Another example: nested recursive trees rec Y = b(rec X = a(X,Y),Y,Y)
let [|vy|] = mk_rec_calls 1 in
let [|vx|] = mk_rec_calls 1 in
let [|x|] = mk_rec[|mk_node a vx;lift 1 vy|]
let [|y|] = mk_rec[|mk_node b x;vy;vy|]
(note the lift to avoid
*)
val mk_rec_calls : int -> 'a t array
val mk_rec : 'a t array -> 'a t array
(** [lift k t] increases of [k] the free parameters of [t]. Needed
to avoid captures when a tree appears under [mk_rec] *)
val lift : int -> 'a t -> 'a t
val is_node : 'a t -> bool
(** Destructors (recursive calls are expanded) *)
val dest_node : 'a t -> 'a * 'a t array
(** dest_param is not needed for closed trees (i.e. with no free variable) *)
val dest_param : 'a t -> int * int
(** Tells if a tree has an infinite branch *)
val is_infinite : 'a t -> bool
(** [compare_rtree f t1 t2] compares t1 t2 (top-down).
f is called on each node: if the result is negative then the
traversal ends on false, it is is positive then deeper nodes are
not examined, and the traversal continues on respective siblings,
and if it is 0, then the traversal continues on sons, pairwise.
In this latter case, if the nodes do not have the same number of
sons, then the traversal ends on false.
In case of loop, the traversal is successful and it resumes on
siblings.
*)
val compare_rtree : ('a t -> 'b t -> int) -> 'a t -> 'b t -> bool
val eq_rtree : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
(** Iterators *)
val map : ('a -> 'b) -> 'a t -> 'b t
(** [(smartmap f t) == t] if [(f a) ==a ] for all nodes *)
val smartmap : ('a -> 'a) -> 'a t -> 'a t
val fold : (bool -> 'a t -> ('a t -> 'b) -> 'b) -> 'a t -> 'b
val fold2 :
(bool -> 'a t -> 'b -> ('a t -> 'b -> 'c) -> 'c) -> 'a t -> 'b -> 'c
(** A rather simple minded pretty-printer *)
val pp_tree : ('a -> Pp.std_ppcmds) -> 'a t -> Pp.std_ppcmds
| null | https://raw.githubusercontent.com/maximedenes/native-coq/3623a4d9fe95c165f02f7119c0e6564a83a9f4c9/lib/rtree.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* Building trees
* build a node given a label and the vector of sons
* [lift k t] increases of [k] the free parameters of [t]. Needed
to avoid captures when a tree appears under [mk_rec]
* Destructors (recursive calls are expanded)
* dest_param is not needed for closed trees (i.e. with no free variable)
* Tells if a tree has an infinite branch
* [compare_rtree f t1 t2] compares t1 t2 (top-down).
f is called on each node: if the result is negative then the
traversal ends on false, it is is positive then deeper nodes are
not examined, and the traversal continues on respective siblings,
and if it is 0, then the traversal continues on sons, pairwise.
In this latter case, if the nodes do not have the same number of
sons, then the traversal ends on false.
In case of loop, the traversal is successful and it resumes on
siblings.
* Iterators
* [(smartmap f t) == t] if [(f a) ==a ] for all nodes
* A rather simple minded pretty-printer | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Type of regular tree with nodes labelled by values of type ' a
The implementation uses de Bruijn indices , so binding capture
is avoided by the lift operator ( see example below )
The implementation uses de Bruijn indices, so binding capture
is avoided by the lift operator (see example below) *)
type 'a t
val mk_node : 'a -> 'a t array -> 'a t
* Build mutually recursive trees :
X_1 = f_1(X_1, .. ,X_n ) ... X_n = f_n(X_1, .. ,X_n )
is obtained by the following pseudo - code
let vx = mk_rec_calls n in
let [ |x_1; .. ;x_n| ] =
mk_rec[|f_1(vx.(0), .. ,vx.(n-1); .. ;f_n(vx.(0), .. ,vx.(n-1))| ]
First example : build rec X = , Y ) and Y = b(X , Y , Y )
let [ |vx;vy| ] = mk_rec_calls 2 in
let [ |x;y| ] = mk_rec [ |mk_node a [ |vx;vy| ] ; mk_node b [ ]
Another example : nested recursive trees rec Y = = a(X , Y),Y , Y )
let [ |vy| ] = mk_rec_calls 1 in
let [ |vx| ] = mk_rec_calls 1 in
let [ |x| ] = mk_rec[|mk_node a vx;lift 1 vy| ]
let [ |y| ] = mk_rec[|mk_node b x;vy;vy| ]
( note the lift to avoid
X_1 = f_1(X_1,..,X_n) ... X_n = f_n(X_1,..,X_n)
is obtained by the following pseudo-code
let vx = mk_rec_calls n in
let [|x_1;..;x_n|] =
mk_rec[|f_1(vx.(0),..,vx.(n-1);..;f_n(vx.(0),..,vx.(n-1))|]
First example: build rec X = a(X,Y) and Y = b(X,Y,Y)
let [|vx;vy|] = mk_rec_calls 2 in
let [|x;y|] = mk_rec [|mk_node a [|vx;vy|]; mk_node b [|vx;vy;vy|]|]
Another example: nested recursive trees rec Y = b(rec X = a(X,Y),Y,Y)
let [|vy|] = mk_rec_calls 1 in
let [|vx|] = mk_rec_calls 1 in
let [|x|] = mk_rec[|mk_node a vx;lift 1 vy|]
let [|y|] = mk_rec[|mk_node b x;vy;vy|]
(note the lift to avoid
*)
val mk_rec_calls : int -> 'a t array
val mk_rec : 'a t array -> 'a t array
val lift : int -> 'a t -> 'a t
val is_node : 'a t -> bool
val dest_node : 'a t -> 'a * 'a t array
val dest_param : 'a t -> int * int
val is_infinite : 'a t -> bool
val compare_rtree : ('a t -> 'b t -> int) -> 'a t -> 'b t -> bool
val eq_rtree : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
val map : ('a -> 'b) -> 'a t -> 'b t
val smartmap : ('a -> 'a) -> 'a t -> 'a t
val fold : (bool -> 'a t -> ('a t -> 'b) -> 'b) -> 'a t -> 'b
val fold2 :
(bool -> 'a t -> 'b -> ('a t -> 'b -> 'c) -> 'c) -> 'a t -> 'b -> 'c
val pp_tree : ('a -> Pp.std_ppcmds) -> 'a t -> Pp.std_ppcmds
|
a8c1b0a818f966b05ba1377995416024ba4d6298d5c89301d81030cb556009f9 | ConsumerDataStandardsAustralia/validation-prototype | Internal.hs | module Web.ConsumerData.Au.LambdaBank.Server.Internal where
import Control.Monad.Reader (ReaderT, asks)
import Servant.Links (Link)
import Web.ConsumerData.Au.Api.Types
(LinkQualifier, PaginatedResponse, Paginator, StandardResponse,
mkPaginatedResponse, mkStandardResponse)
import Web.ConsumerData.Au.LambdaBank.LambdaModel (LambdaModelM)
type LambdaBankM = ReaderT LinkQualifier LambdaModelM
bankStandardResponse :: a -> Link -> LambdaBankM (StandardResponse a)
bankStandardResponse a l = asks $ \lq -> mkStandardResponse a lq l
bankPaginatedResponse :: a -> Paginator -> LambdaBankM (PaginatedResponse a)
bankPaginatedResponse a p = asks $ \lq -> mkPaginatedResponse a lq p
| null | https://raw.githubusercontent.com/ConsumerDataStandardsAustralia/validation-prototype/ff63338b77339ee49fa3e0be5bb9d7f74e50c28b/consumer-data-au-lambdabank/src/Web/ConsumerData/Au/LambdaBank/Server/Internal.hs | haskell | module Web.ConsumerData.Au.LambdaBank.Server.Internal where
import Control.Monad.Reader (ReaderT, asks)
import Servant.Links (Link)
import Web.ConsumerData.Au.Api.Types
(LinkQualifier, PaginatedResponse, Paginator, StandardResponse,
mkPaginatedResponse, mkStandardResponse)
import Web.ConsumerData.Au.LambdaBank.LambdaModel (LambdaModelM)
type LambdaBankM = ReaderT LinkQualifier LambdaModelM
bankStandardResponse :: a -> Link -> LambdaBankM (StandardResponse a)
bankStandardResponse a l = asks $ \lq -> mkStandardResponse a lq l
bankPaginatedResponse :: a -> Paginator -> LambdaBankM (PaginatedResponse a)
bankPaginatedResponse a p = asks $ \lq -> mkPaginatedResponse a lq p
| |
60efcc6d134e400eac32defeddc9dbeb23bbc5f2fba9aa53f743f86da70e14da | coq/coq | cWarnings.ml | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
type status =
Disabled | Enabled | AsError
type t = {
default : status;
category : string;
status : status;
}
type _ tag = ..
type w = W : 'a tag * 'a -> w
exception WarnError of w
module DMap = PolyMap.Make (struct type nonrec 'a tag = 'a tag = .. end)
module PrintMap = DMap.Map(struct type 'a t = 'a -> Pp.t end)
let printers = ref PrintMap.empty
let print (W (tag, w)) =
let pp = try PrintMap.find tag !printers with Not_found -> assert false in
pp w
let () = CErrors.register_handler (function
| WarnError w -> Some (print w)
| _ -> None)
let warnings : (string, t) Hashtbl.t = Hashtbl.create 97
let categories : (string, string list) Hashtbl.t = Hashtbl.create 97
let flags = ref ""
let get_flags () = !flags
let add_warning_in_category ~name ~category =
let ws =
try
Hashtbl.find categories category
with Not_found -> []
in
Hashtbl.replace categories category (name::ws)
let set_warning_status ~name status =
try
let w = Hashtbl.find warnings name in
Hashtbl.replace warnings name { w with status = status }
with Not_found -> ()
let reset_default_warnings () =
Hashtbl.iter (fun name w ->
Hashtbl.replace warnings name { w with status = w.default })
warnings
let set_all_warnings_status status =
Hashtbl.iter (fun name w ->
Hashtbl.replace warnings name { w with status })
warnings
let set_category_status ~name status =
let names = Hashtbl.find categories name in
List.iter (fun name -> set_warning_status ~name status) names
let is_all_keyword name = CString.equal name "all"
let is_none_keyword s = CString.equal s "none"
let parse_flag s =
if String.length s > 1 then
match String.get s 0 with
| '+' -> (AsError, String.sub s 1 (String.length s - 1))
| '-' -> (Disabled, String.sub s 1 (String.length s - 1))
| _ -> (Enabled, s)
else CErrors.user_err Pp.(str "Invalid warnings flag")
let string_of_flag (status,name) =
match status with
| AsError -> "+" ^ name
| Disabled -> "-" ^ name
| Enabled -> name
let string_of_flags flags =
String.concat "," (List.map string_of_flag flags)
let set_status ~name status =
if is_all_keyword name then
set_all_warnings_status status
else
try
set_category_status ~name status
with Not_found ->
try
set_warning_status ~name status
with Not_found -> ()
let split_flags s =
let reg = Str.regexp "[ ,]+" in Str.split reg s
(** [cut_before_all_rev] removes all flags subsumed by a later occurrence of the
"all" flag, and reverses the list. *)
let rec cut_before_all_rev acc = function
| [] -> acc
| (status,name as w) :: warnings ->
let acc =
if is_all_keyword name then [w]
else if is_none_keyword name then [(Disabled,"all")]
else w :: acc in
cut_before_all_rev acc warnings
let cut_before_all_rev warnings = cut_before_all_rev [] warnings
(** [uniquize_flags_rev] removes flags that are subsumed by later occurrences of
themselves or their categories, and reverses the list. *)
let uniquize_flags_rev flags =
let rec aux acc visited = function
| (_,name as flag)::flags ->
if CString.Set.mem name visited then aux acc visited flags else
let visited =
try
let warnings = Hashtbl.find categories name in
List.fold_left (fun v w -> CString.Set.add w v) visited warnings
with Not_found ->
visited
in
aux (flag::acc) (CString.Set.add name visited) flags
| [] -> acc
in aux [] CString.Set.empty flags
(** [normalize_flags] removes redundant warnings. Unknown warnings are kept
because they may be declared in a plugin that will be linked later. *)
let normalize_flags warnings =
let warnings = cut_before_all_rev warnings in
uniquize_flags_rev warnings
let flags_of_string s = List.map parse_flag (split_flags s)
let normalize_flags_string s =
if is_none_keyword s then s
else
let flags = flags_of_string s in
let flags = normalize_flags flags in
string_of_flags flags
let parse_warnings items =
CList.iter (fun (status, name) -> set_status ~name status) items
(* For compatibility, we accept "none" *)
let parse_flags s =
if is_none_keyword s then begin
Flags.make_warn false;
set_all_warnings_status Disabled;
"none"
end
else begin
Flags.make_warn true;
let flags = flags_of_string s in
let flags = normalize_flags flags in
parse_warnings flags;
string_of_flags flags
end
let set_flags s =
reset_default_warnings (); let s = parse_flags s in flags := s
(* Adds a warning to the [warnings] and [category] tables. We then reparse the
warning flags string, because the warning being created might have been set
already. *)
let create (type a) ~name ~category ?(default=Enabled) (pp:a -> Pp.t) =
let pp x = let open Pp in
pp x ++ spc () ++ str "[" ++ str name ++ str "," ++
str category ++ str "]"
in
let tag = DMap.make () in
printers := PrintMap.add tag pp !printers;
Hashtbl.replace warnings name { default; category; status = default };
add_warning_in_category ~name ~category;
if default <> Disabled then
add_warning_in_category ~name ~category:"default";
(* We re-parse and also re-normalize the flags, because the category of the
new warning is now known. *)
set_flags !flags;
fun ?loc x ->
let w = Hashtbl.find warnings name in
match w.status with
| Disabled -> ()
| AsError -> Loc.raise ?loc (WarnError (W (DMap.tag_of_onetag tag, x)))
| Enabled -> Feedback.msg_warning ?loc (pp x)
let get_status ~name = (Hashtbl.find warnings name).status
(* Remark: [warn] does not need to start with a comma, but if present
it won't hurt (",," is normalized into ","). *)
let with_warn warn (f:'b -> 'a) x =
let s = get_flags () in
Util.try_finally (fun x -> set_flags (s^","^warn);f x) x set_flags s
| null | https://raw.githubusercontent.com/coq/coq/a290b310c86275123786137fd5d70963ae16e3c4/lib/cWarnings.ml | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
* [cut_before_all_rev] removes all flags subsumed by a later occurrence of the
"all" flag, and reverses the list.
* [uniquize_flags_rev] removes flags that are subsumed by later occurrences of
themselves or their categories, and reverses the list.
* [normalize_flags] removes redundant warnings. Unknown warnings are kept
because they may be declared in a plugin that will be linked later.
For compatibility, we accept "none"
Adds a warning to the [warnings] and [category] tables. We then reparse the
warning flags string, because the warning being created might have been set
already.
We re-parse and also re-normalize the flags, because the category of the
new warning is now known.
Remark: [warn] does not need to start with a comma, but if present
it won't hurt (",," is normalized into ","). | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
type status =
Disabled | Enabled | AsError
type t = {
default : status;
category : string;
status : status;
}
type _ tag = ..
type w = W : 'a tag * 'a -> w
exception WarnError of w
module DMap = PolyMap.Make (struct type nonrec 'a tag = 'a tag = .. end)
module PrintMap = DMap.Map(struct type 'a t = 'a -> Pp.t end)
let printers = ref PrintMap.empty
let print (W (tag, w)) =
let pp = try PrintMap.find tag !printers with Not_found -> assert false in
pp w
let () = CErrors.register_handler (function
| WarnError w -> Some (print w)
| _ -> None)
let warnings : (string, t) Hashtbl.t = Hashtbl.create 97
let categories : (string, string list) Hashtbl.t = Hashtbl.create 97
let flags = ref ""
let get_flags () = !flags
let add_warning_in_category ~name ~category =
let ws =
try
Hashtbl.find categories category
with Not_found -> []
in
Hashtbl.replace categories category (name::ws)
let set_warning_status ~name status =
try
let w = Hashtbl.find warnings name in
Hashtbl.replace warnings name { w with status = status }
with Not_found -> ()
let reset_default_warnings () =
Hashtbl.iter (fun name w ->
Hashtbl.replace warnings name { w with status = w.default })
warnings
let set_all_warnings_status status =
Hashtbl.iter (fun name w ->
Hashtbl.replace warnings name { w with status })
warnings
let set_category_status ~name status =
let names = Hashtbl.find categories name in
List.iter (fun name -> set_warning_status ~name status) names
let is_all_keyword name = CString.equal name "all"
let is_none_keyword s = CString.equal s "none"
let parse_flag s =
if String.length s > 1 then
match String.get s 0 with
| '+' -> (AsError, String.sub s 1 (String.length s - 1))
| '-' -> (Disabled, String.sub s 1 (String.length s - 1))
| _ -> (Enabled, s)
else CErrors.user_err Pp.(str "Invalid warnings flag")
let string_of_flag (status,name) =
match status with
| AsError -> "+" ^ name
| Disabled -> "-" ^ name
| Enabled -> name
let string_of_flags flags =
String.concat "," (List.map string_of_flag flags)
let set_status ~name status =
if is_all_keyword name then
set_all_warnings_status status
else
try
set_category_status ~name status
with Not_found ->
try
set_warning_status ~name status
with Not_found -> ()
let split_flags s =
let reg = Str.regexp "[ ,]+" in Str.split reg s
let rec cut_before_all_rev acc = function
| [] -> acc
| (status,name as w) :: warnings ->
let acc =
if is_all_keyword name then [w]
else if is_none_keyword name then [(Disabled,"all")]
else w :: acc in
cut_before_all_rev acc warnings
let cut_before_all_rev warnings = cut_before_all_rev [] warnings
let uniquize_flags_rev flags =
let rec aux acc visited = function
| (_,name as flag)::flags ->
if CString.Set.mem name visited then aux acc visited flags else
let visited =
try
let warnings = Hashtbl.find categories name in
List.fold_left (fun v w -> CString.Set.add w v) visited warnings
with Not_found ->
visited
in
aux (flag::acc) (CString.Set.add name visited) flags
| [] -> acc
in aux [] CString.Set.empty flags
let normalize_flags warnings =
let warnings = cut_before_all_rev warnings in
uniquize_flags_rev warnings
let flags_of_string s = List.map parse_flag (split_flags s)
let normalize_flags_string s =
if is_none_keyword s then s
else
let flags = flags_of_string s in
let flags = normalize_flags flags in
string_of_flags flags
let parse_warnings items =
CList.iter (fun (status, name) -> set_status ~name status) items
let parse_flags s =
if is_none_keyword s then begin
Flags.make_warn false;
set_all_warnings_status Disabled;
"none"
end
else begin
Flags.make_warn true;
let flags = flags_of_string s in
let flags = normalize_flags flags in
parse_warnings flags;
string_of_flags flags
end
let set_flags s =
reset_default_warnings (); let s = parse_flags s in flags := s
let create (type a) ~name ~category ?(default=Enabled) (pp:a -> Pp.t) =
let pp x = let open Pp in
pp x ++ spc () ++ str "[" ++ str name ++ str "," ++
str category ++ str "]"
in
let tag = DMap.make () in
printers := PrintMap.add tag pp !printers;
Hashtbl.replace warnings name { default; category; status = default };
add_warning_in_category ~name ~category;
if default <> Disabled then
add_warning_in_category ~name ~category:"default";
set_flags !flags;
fun ?loc x ->
let w = Hashtbl.find warnings name in
match w.status with
| Disabled -> ()
| AsError -> Loc.raise ?loc (WarnError (W (DMap.tag_of_onetag tag, x)))
| Enabled -> Feedback.msg_warning ?loc (pp x)
let get_status ~name = (Hashtbl.find warnings name).status
let with_warn warn (f:'b -> 'a) x =
let s = get_flags () in
Util.try_finally (fun x -> set_flags (s^","^warn);f x) x set_flags s
|
75d063ed4e18c98c608353b753263faeed3e30e7d1c5902213a55c24867cc3c4 | uhc/uhc | GraphTest.hs | module GraphTest where
--
-- Experimental approach to constraint graphs
-- Only for experimentation, not finished yet
--
-- In the conventional constraint graphs, each
-- constraint corresponds to an edge. In this
-- representation, each variable and constraint
-- correspond to vertices. Edges correspond to
-- plugs. We can then represent the constraint
-- graph as a plus/min scaling system for each
-- variable.
--
import qualified Data.Graph.Inductive as G
import Data.Map(Map)
import qualified Data.Map as Map
import Data.Set(Set)
import qualified Data.Set as Set
import Data.List
import Data.Maybe
data Constr
= Int :=>=: Int
| Comp :<=: Int
deriving (Eq, Ord, Show)
data Comp
= Atom Int
| Comp :+: Comp
| Comp :-: Comp
deriving (Eq, Ord, Show)
type ConstrGraph = G.Gr Operation ()
data Operation = Plus | Max | Var !Int deriving (Eq, Ord, Show)
testc = [1 :=>=: 2, 2 :=>=: 3, 2 :=>=: 4, (Atom 2 :+: Atom 4) :<=: 5, 5 :=>=: 6, 6 :=>=: 1 ]
testg = constrsToConstraintGraph testc
testn = killNonSacred [1,5] testg
testi = inferSubst testn [(1,1)] 0 upSum upMax
upSum = trunc . sum
upMax [] = 0
upMax xs = trunc (maximum xs)
trunc x = x `min` 2
constrsToConstraintGraph :: [Constr] -> ConstrGraph
constrsToConstraintGraph cs
= G.mkGraph ns' (map (\(x,y) -> (x,y,())) es')
where
keepOld = flip const
(vsMap, u) = foldr buildVsMap (Map.empty, 1) cs
find k = Map.findWithDefault (error "Not in vsMap") k vsMap
(ns, es, u2) = foldr build ([], [], u) cs
ns' = ns ++ concat [[(u1,Var n),(u2, Max)] | (n, (u1,u2)) <- Map.assocs vsMap ]
es' = es ++ [(u2,u1) | (_, (u1,u2)) <- Map.assocs vsMap]
build (a :=>=: b) (ns, es, u)
= let (s, _) = find a
(_, t) = find b
in (ns, (s, t) : es, u)
build (c :<=: b) r
= let (_, t) = find b
in buildComp c t r
buildComp (Atom i) t (ns, es, u)
= let (s, _) = find i
in (ns, (s,t):es, u)
buildComp (a :+: b) t (ns, es, u)
= let u' = u+1
(ns1,es1,u1) = buildComp a u (ns, es, u)
(ns2,es2,u2) = buildComp b u (ns1, es1, u1)
in ((u, Plus) : ns2, (u, t) : es2, u2)
buildComp (a :-: b) t (ns, es, u)
= let u' = u+1
(ns1,es1,u1) = buildComp a u (ns, es, u)
(ns2,es2,u2) = buildComp b u (ns1, es1, u1)
in ((u, Max) : ns2, (u, t) : es2, u2)
buildVsMap (a :=>=: b) (m, u)
= let u1 = u+2
u2 = u1+2
m1 = Map.insertWith keepOld a (u,u+1) m
m2 = Map.insertWith keepOld b (u1,u1+1) m1
in (m2, u2)
buildVsMap (c :<=: b) (m, u)
= let u1 = u+2
m1 = Map.insertWith keepOld b (u,u+1) m
r = buildVsMapComp c m1 u1
in r
buildVsMapComp (Atom i) m u = (Map.insertWith keepOld i (u,u+1) m, u+2)
buildVsMapComp (a :+: b) m u = let (m',u') = buildVsMapComp a m u in buildVsMapComp b m' u'
buildVsMapComp (a :-: b) m u = let (m',u') = buildVsMapComp a m u in buildVsMapComp b m' u'
inferSubst :: ConstrGraph -> [(Int, Int)] -> Int -> ([Int] -> Int) -> ([Int] -> Int) -> [(Int, Int)]
inferSubst g initial bot fPlus fMax
= let ns = G.labNodes g
s = map (\(n,a) -> (n, repl a)) ns
in trans . nubBy (\(a,_) (b,_) -> a == b) $ iter (G.nodes g) s
where
mp = Map.fromList initial
repl (Var i) = Map.findWithDefault bot i mp
repl _ = bot
trans s
= [(i,a) | (v,a) <- s, let r = fromJust (G.lab g v), isVar r, let (Var i) = r ]
isVar (Var _) = True
isVar _ = False
iter [] subst = subst
iter (n : ns) subst
= let vOld = fromJust (n `lookup` subst)
ss = G.pre g n
vs = map (fromJust . (`lookup` subst)) ss
o = fromJust (G.lab g n)
vNew = fMax [ vOld
, case o of
Var _ -> head vs
Plus -> fPlus vs
Max -> fMax vs
]
subst' = ((n,vNew) : subst)
in if vNew == vOld
then iter ns subst'
else iter (G.suc g n ++ ns) subst'
instantiate :: ConstrGraph -> ConstrGraph -> [(Int, Int)] -> ConstrGraph
instantiate src dst mp
= dst
killNonSacred :: [Int] -> ConstrGraph -> ConstrGraph
killNonSacred sac g
= let vs = G.labNodes g
ns = [n | (n,a) <- vs, not (isSacVar a)]
isSacVar (Var i) = i `elem` sac
isSacVar _ = True
es = G.labEdges g
in G.mkGraph [v | v@(n,_) <- vs, not (n `elem` ns)] ([(p, s, ()) | n <- ns, p <- G.pre g n, s <- G.suc g n] ++ [e | e@(a,b,_) <- es, not (a `elem` ns || b `elem` ns) ])
reduce :: ConstrGraph -> ConstrGraph
reduce
= undefined
| null | https://raw.githubusercontent.com/uhc/uhc/8eb6914df3ba2ba43916a1a4956c6f25aa0e07c5/EHC/src/ehc-7_2/Annotations/GraphTest.hs | haskell |
Experimental approach to constraint graphs
Only for experimentation, not finished yet
In the conventional constraint graphs, each
constraint corresponds to an edge. In this
representation, each variable and constraint
correspond to vertices. Edges correspond to
plugs. We can then represent the constraint
graph as a plus/min scaling system for each
variable.
| module GraphTest where
import qualified Data.Graph.Inductive as G
import Data.Map(Map)
import qualified Data.Map as Map
import Data.Set(Set)
import qualified Data.Set as Set
import Data.List
import Data.Maybe
data Constr
= Int :=>=: Int
| Comp :<=: Int
deriving (Eq, Ord, Show)
data Comp
= Atom Int
| Comp :+: Comp
| Comp :-: Comp
deriving (Eq, Ord, Show)
type ConstrGraph = G.Gr Operation ()
data Operation = Plus | Max | Var !Int deriving (Eq, Ord, Show)
testc = [1 :=>=: 2, 2 :=>=: 3, 2 :=>=: 4, (Atom 2 :+: Atom 4) :<=: 5, 5 :=>=: 6, 6 :=>=: 1 ]
testg = constrsToConstraintGraph testc
testn = killNonSacred [1,5] testg
testi = inferSubst testn [(1,1)] 0 upSum upMax
upSum = trunc . sum
upMax [] = 0
upMax xs = trunc (maximum xs)
trunc x = x `min` 2
constrsToConstraintGraph :: [Constr] -> ConstrGraph
constrsToConstraintGraph cs
= G.mkGraph ns' (map (\(x,y) -> (x,y,())) es')
where
keepOld = flip const
(vsMap, u) = foldr buildVsMap (Map.empty, 1) cs
find k = Map.findWithDefault (error "Not in vsMap") k vsMap
(ns, es, u2) = foldr build ([], [], u) cs
ns' = ns ++ concat [[(u1,Var n),(u2, Max)] | (n, (u1,u2)) <- Map.assocs vsMap ]
es' = es ++ [(u2,u1) | (_, (u1,u2)) <- Map.assocs vsMap]
build (a :=>=: b) (ns, es, u)
= let (s, _) = find a
(_, t) = find b
in (ns, (s, t) : es, u)
build (c :<=: b) r
= let (_, t) = find b
in buildComp c t r
buildComp (Atom i) t (ns, es, u)
= let (s, _) = find i
in (ns, (s,t):es, u)
buildComp (a :+: b) t (ns, es, u)
= let u' = u+1
(ns1,es1,u1) = buildComp a u (ns, es, u)
(ns2,es2,u2) = buildComp b u (ns1, es1, u1)
in ((u, Plus) : ns2, (u, t) : es2, u2)
buildComp (a :-: b) t (ns, es, u)
= let u' = u+1
(ns1,es1,u1) = buildComp a u (ns, es, u)
(ns2,es2,u2) = buildComp b u (ns1, es1, u1)
in ((u, Max) : ns2, (u, t) : es2, u2)
buildVsMap (a :=>=: b) (m, u)
= let u1 = u+2
u2 = u1+2
m1 = Map.insertWith keepOld a (u,u+1) m
m2 = Map.insertWith keepOld b (u1,u1+1) m1
in (m2, u2)
buildVsMap (c :<=: b) (m, u)
= let u1 = u+2
m1 = Map.insertWith keepOld b (u,u+1) m
r = buildVsMapComp c m1 u1
in r
buildVsMapComp (Atom i) m u = (Map.insertWith keepOld i (u,u+1) m, u+2)
buildVsMapComp (a :+: b) m u = let (m',u') = buildVsMapComp a m u in buildVsMapComp b m' u'
buildVsMapComp (a :-: b) m u = let (m',u') = buildVsMapComp a m u in buildVsMapComp b m' u'
inferSubst :: ConstrGraph -> [(Int, Int)] -> Int -> ([Int] -> Int) -> ([Int] -> Int) -> [(Int, Int)]
inferSubst g initial bot fPlus fMax
= let ns = G.labNodes g
s = map (\(n,a) -> (n, repl a)) ns
in trans . nubBy (\(a,_) (b,_) -> a == b) $ iter (G.nodes g) s
where
mp = Map.fromList initial
repl (Var i) = Map.findWithDefault bot i mp
repl _ = bot
trans s
= [(i,a) | (v,a) <- s, let r = fromJust (G.lab g v), isVar r, let (Var i) = r ]
isVar (Var _) = True
isVar _ = False
iter [] subst = subst
iter (n : ns) subst
= let vOld = fromJust (n `lookup` subst)
ss = G.pre g n
vs = map (fromJust . (`lookup` subst)) ss
o = fromJust (G.lab g n)
vNew = fMax [ vOld
, case o of
Var _ -> head vs
Plus -> fPlus vs
Max -> fMax vs
]
subst' = ((n,vNew) : subst)
in if vNew == vOld
then iter ns subst'
else iter (G.suc g n ++ ns) subst'
instantiate :: ConstrGraph -> ConstrGraph -> [(Int, Int)] -> ConstrGraph
instantiate src dst mp
= dst
killNonSacred :: [Int] -> ConstrGraph -> ConstrGraph
killNonSacred sac g
= let vs = G.labNodes g
ns = [n | (n,a) <- vs, not (isSacVar a)]
isSacVar (Var i) = i `elem` sac
isSacVar _ = True
es = G.labEdges g
in G.mkGraph [v | v@(n,_) <- vs, not (n `elem` ns)] ([(p, s, ()) | n <- ns, p <- G.pre g n, s <- G.suc g n] ++ [e | e@(a,b,_) <- es, not (a `elem` ns || b `elem` ns) ])
reduce :: ConstrGraph -> ConstrGraph
reduce
= undefined
|
eed241df0295fe5e2b311222bc645dadb7ff608e26687a18ed8ac0eeb8956d77 | metabase/metabase | follow_up_emails.clj | (ns metabase.task.follow-up-emails
"Tasks which follow up with Metabase users."
(:require
[clojurewerkz.quartzite.jobs :as jobs]
[clojurewerkz.quartzite.schedule.cron :as cron]
[clojurewerkz.quartzite.triggers :as triggers]
[java-time :as t]
[metabase.email :as email]
[metabase.email.messages :as messages]
[metabase.models.setting :as setting]
[metabase.models.user :as user :refer [User]]
[metabase.public-settings :as public-settings]
[metabase.task :as task]
[metabase.util.date-2 :as u.date]
[metabase.util.log :as log]
[toucan.db :as db]))
(set! *warn-on-reflection* true)
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | send follow-up emails |
;;; +----------------------------------------------------------------------------------------------------------------+
(setting/defsetting ^:private follow-up-email-sent
;; No need to i18n this as it's not user facing
"Have we sent a follow up email to the instance admin?"
:type :boolean
:default false
:visibility :internal)
(defn- send-follow-up-email!
"Send an email to the instance admin following up on their experience with Metabase thus far."
[]
;; we need access to email AND the instance must be opted into anonymous tracking. Make sure email hasn't been sent yet
(when (and (email/email-configured?)
(public-settings/anon-tracking-enabled)
(not (follow-up-email-sent)))
;; grab the oldest admins email address (likely the user who created this MB instance), that's who we'll send to
;; TODO - Does it make to send to this user instead of `(public-settings/admin-email)`?
(when-let [admin (db/select-one User :is_superuser true, :is_active true, {:order-by [:date_joined]})]
(try
(messages/send-follow-up-email! (:email admin))
(catch Throwable e
(log/error "Problem sending follow-up email:" e))
(finally
(follow-up-email-sent! true))))))
(defn- instance-creation-timestamp
"The date this Metabase instance was created. We use the `:date_joined` of the first `User` to determine this."
^java.time.temporal.Temporal []
(db/select-one-field :date_joined User, {:order-by [[:date_joined :asc]]}))
(jobs/defjob ^{:doc "Sends out a general 2 week email follow up email"} FollowUpEmail [_]
;; if we've already sent the follow-up email then we are done
(when-not (follow-up-email-sent)
;; figure out when we consider the instance created
(when-let [instance-created (instance-creation-timestamp)]
we need to be 2 + weeks from creation to send the follow up
(when (u.date/older-than? instance-created (t/weeks 2))
(send-follow-up-email!)))))
(def ^:private follow-up-emails-job-key "metabase.task.follow-up-emails.job")
(def ^:private follow-up-emails-trigger-key "metabase.task.follow-up-emails.trigger")
(defmethod task/init! ::SendFollowUpEmails [_]
(let [job (jobs/build
(jobs/of-type FollowUpEmail)
(jobs/with-identity (jobs/key follow-up-emails-job-key)))
trigger (triggers/build
(triggers/with-identity (triggers/key follow-up-emails-trigger-key))
(triggers/start-now)
(triggers/with-schedule
;; run once a day
(cron/cron-schedule "0 0 12 * * ? *")))]
(task/schedule-task! job trigger)))
| null | https://raw.githubusercontent.com/metabase/metabase/7e3048bf73f6cb7527579446166d054292166163/src/metabase/task/follow_up_emails.clj | clojure | +----------------------------------------------------------------------------------------------------------------+
| send follow-up emails |
+----------------------------------------------------------------------------------------------------------------+
No need to i18n this as it's not user facing
we need access to email AND the instance must be opted into anonymous tracking. Make sure email hasn't been sent yet
grab the oldest admins email address (likely the user who created this MB instance), that's who we'll send to
TODO - Does it make to send to this user instead of `(public-settings/admin-email)`?
if we've already sent the follow-up email then we are done
figure out when we consider the instance created
run once a day | (ns metabase.task.follow-up-emails
"Tasks which follow up with Metabase users."
(:require
[clojurewerkz.quartzite.jobs :as jobs]
[clojurewerkz.quartzite.schedule.cron :as cron]
[clojurewerkz.quartzite.triggers :as triggers]
[java-time :as t]
[metabase.email :as email]
[metabase.email.messages :as messages]
[metabase.models.setting :as setting]
[metabase.models.user :as user :refer [User]]
[metabase.public-settings :as public-settings]
[metabase.task :as task]
[metabase.util.date-2 :as u.date]
[metabase.util.log :as log]
[toucan.db :as db]))
(set! *warn-on-reflection* true)
(setting/defsetting ^:private follow-up-email-sent
"Have we sent a follow up email to the instance admin?"
:type :boolean
:default false
:visibility :internal)
(defn- send-follow-up-email!
"Send an email to the instance admin following up on their experience with Metabase thus far."
[]
(when (and (email/email-configured?)
(public-settings/anon-tracking-enabled)
(not (follow-up-email-sent)))
(when-let [admin (db/select-one User :is_superuser true, :is_active true, {:order-by [:date_joined]})]
(try
(messages/send-follow-up-email! (:email admin))
(catch Throwable e
(log/error "Problem sending follow-up email:" e))
(finally
(follow-up-email-sent! true))))))
(defn- instance-creation-timestamp
"The date this Metabase instance was created. We use the `:date_joined` of the first `User` to determine this."
^java.time.temporal.Temporal []
(db/select-one-field :date_joined User, {:order-by [[:date_joined :asc]]}))
(jobs/defjob ^{:doc "Sends out a general 2 week email follow up email"} FollowUpEmail [_]
(when-not (follow-up-email-sent)
(when-let [instance-created (instance-creation-timestamp)]
we need to be 2 + weeks from creation to send the follow up
(when (u.date/older-than? instance-created (t/weeks 2))
(send-follow-up-email!)))))
(def ^:private follow-up-emails-job-key "metabase.task.follow-up-emails.job")
(def ^:private follow-up-emails-trigger-key "metabase.task.follow-up-emails.trigger")
(defmethod task/init! ::SendFollowUpEmails [_]
(let [job (jobs/build
(jobs/of-type FollowUpEmail)
(jobs/with-identity (jobs/key follow-up-emails-job-key)))
trigger (triggers/build
(triggers/with-identity (triggers/key follow-up-emails-trigger-key))
(triggers/start-now)
(triggers/with-schedule
(cron/cron-schedule "0 0 12 * * ? *")))]
(task/schedule-task! job trigger)))
|
177bbf0b8cce93e1a099b3a8327c6ff292222955355288b73096bc502df5ffa3 | coq/coq | term.mli | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
open Names
open Constr
* { 5 Derived constructors }
* non - dependent product [ t1 - > t2 ] , an alias for
[ forall ( _ : t1 ) , t2 ] . Beware [ t_2 ] is NOT lifted .
Eg : in context [ A : Prop ] , [ A->A ] is built by [ ( mkArrow ( mkRel 1 ) ( mkRel 2 ) ) ]
[forall (_:t1), t2]. Beware [t_2] is NOT lifted.
Eg: in context [A:Prop], [A->A] is built by [(mkArrow (mkRel 1) (mkRel 2))]
*)
val mkArrow : types -> Sorts.relevance -> types -> constr
val mkArrowR : types -> types -> constr
(** For an always-relevant domain *)
(** Named version of the functions from [Term]. *)
val mkNamedLambda : Id.t Context.binder_annot -> types -> constr -> constr
val mkNamedLetIn : Id.t Context.binder_annot -> constr -> types -> constr -> constr
val mkNamedProd : Id.t Context.binder_annot -> types -> types -> types
(** Constructs either [(x:t)c] or [[x=b:t]c] *)
val mkProd_or_LetIn : Constr.rel_declaration -> types -> types
val mkProd_wo_LetIn : Constr.rel_declaration -> types -> types
val mkNamedProd_or_LetIn : Constr.named_declaration -> types -> types
val mkNamedProd_wo_LetIn : Constr.named_declaration -> types -> types
(** Constructs either [[x:t]c] or [[x=b:t]c] *)
val mkLambda_or_LetIn : Constr.rel_declaration -> constr -> constr
val mkNamedLambda_or_LetIn : Constr.named_declaration -> constr -> constr
* { 5 Other term constructors . }
(** [applist (f,args)] and its variants work as [mkApp] *)
val applist : constr * constr list -> constr
val applistc : constr -> constr list -> constr
val appvect : constr * constr array -> constr
val appvectc : constr -> constr array -> constr
* [ prodn n l b ] = [ forall ( x_1 : T_1) ... (x_n : T_n ) , b ]
where [ l ] is [ ( x_n , T_n) ... (x_1,T_1 ) ... ] .
where [l] is [(x_n,T_n)...(x_1,T_1)...]. *)
val prodn : int -> (Name.t Context.binder_annot * constr) list -> constr -> constr
* [ compose_prod l b ]
@return [ forall ( x_1 : T_1) ... (x_n : T_n ) , b ]
where [ l ] is [ ( x_n , T_n) ... (x_1,T_1 ) ] .
Inverse of [ decompose_prod ] .
@return [forall (x_1:T_1)...(x_n:T_n), b]
where [l] is [(x_n,T_n)...(x_1,T_1)].
Inverse of [decompose_prod]. *)
val compose_prod : (Name.t Context.binder_annot * constr) list -> constr -> constr
* [ n l b ]
@return [ fun ( x_1 : T_1) ... (x_n : T_n ) = > b ]
where [ l ] is [ ( x_n , T_n) ... (x_1,T_1 ) ... ] .
@return [fun (x_1:T_1)...(x_n:T_n) => b]
where [l] is [(x_n,T_n)...(x_1,T_1)...]. *)
val lamn : int -> (Name.t Context.binder_annot * constr) list -> constr -> constr
* [ compose_lam l b ]
@return [ fun ( x_1 : T_1) ... (x_n : T_n ) = > b ]
where [ l ] is [ ( x_n , T_n) ... (x_1,T_1 ) ] .
Inverse of [ it_destLam ]
@return [fun (x_1:T_1)...(x_n:T_n) => b]
where [l] is [(x_n,T_n)...(x_1,T_1)].
Inverse of [it_destLam] *)
val compose_lam : (Name.t Context.binder_annot * constr) list -> constr -> constr
(** [to_lambda n l]
@return [fun (x_1:T_1)...(x_n:T_n) => T]
where [l] is [forall (x_1:T_1)...(x_n:T_n), T] *)
val to_lambda : int -> constr -> constr
(** [to_prod n l]
@return [forall (x_1:T_1)...(x_n:T_n), T]
where [l] is [fun (x_1:T_1)...(x_n:T_n) => T] *)
val to_prod : int -> constr -> constr
val it_mkLambda_or_LetIn : constr -> Constr.rel_context -> constr
val it_mkProd_wo_LetIn : types -> Constr.rel_context -> types
val it_mkProd_or_LetIn : types -> Constr.rel_context -> types
(** In [lambda_applist c args], [c] is supposed to have the form
[λΓ.c] with [Γ] without let-in; it returns [c] with the variables
of [Γ] instantiated by [args]. *)
val lambda_applist : constr -> constr list -> constr
val lambda_appvect : constr -> constr array -> constr
(** In [lambda_applist_decls n c args], [c] is supposed to have the
form [λΓ.c] with [Γ] of length [n] and possibly with let-ins; it
returns [c] with the assumptions of [Γ] instantiated by [args] and
the local definitions of [Γ] expanded. *)
val lambda_applist_decls : int -> constr -> constr list -> constr
val lambda_appvect_decls : int -> constr -> constr array -> constr
(** pseudo-reduction rule *)
* [ prod_appvect ] [ forall ( x1 : B1; ... ;xn : Bn ) , B ] [ a1 ... an ]
@return [ B[a1 ... an ] ]
@return [B[a1...an]] *)
val prod_appvect : types -> constr array -> types
val prod_applist : types -> constr list -> types
(** In [prod_appvect_decls n c args], [c] is supposed to have the
form [∀Γ.c] with [Γ] of length [n] and possibly with let-ins; it
returns [c] with the assumptions of [Γ] instantiated by [args] and
the local definitions of [Γ] expanded. *)
val prod_appvect_decls : int -> types -> constr array -> types
val prod_applist_decls : int -> types -> constr list -> types
* { 5 Other term destructors . }
(** Transforms a product term {% $ %}(x_1:T_1)..(x_n:T_n)T{% $ %} into the pair
{% $ %}([(x_n,T_n);...;(x_1,T_1)],T){% $ %}, where {% $ %}T{% $ %} is not a product. *)
val decompose_prod : constr -> (Name.t Context.binder_annot * constr) list * constr
* Transforms a lambda term { % $ % } [ x_1 : T_1] .. [x_n : T_n]T{% $ % } into the pair
{ % $ % } ( [ ( x_n , T_n); ... ;(x_1,T_1)],T){% $ % } , where { % $ % } T{% $ % } is not a lambda .
{% $ %}([(x_n,T_n);...;(x_1,T_1)],T){% $ %}, where {% $ %}T{% $ %} is not a lambda. *)
val decompose_lambda : constr -> (Name.t Context.binder_annot * constr) list * constr
(** Given a positive integer n, decompose a product term
{% $ %}(x_1:T_1)..(x_n:T_n)T{% $ %}
into the pair {% $ %}([(xn,Tn);...;(x1,T1)],T){% $ %}.
Raise a user error if not enough products. *)
val decompose_prod_n : int -> constr -> (Name.t Context.binder_annot * constr) list * constr
* Given a positive integer { % $ % } n{% $ % } , decompose a lambda term
{ % $ % } [ x_1 : T_1] .. [x_n : T_n]T{% $ % } into the pair { % $ % } ( [ ( x_n , T_n); ... ;(x_1,T_1)],T){% $ % } .
Raise a user error if not enough lambdas .
{% $ %}[x_1:T_1]..[x_n:T_n]T{% $ %} into the pair {% $ %}([(x_n,T_n);...;(x_1,T_1)],T){% $ %}.
Raise a user error if not enough lambdas. *)
val decompose_lambda_n : int -> constr -> (Name.t Context.binder_annot * constr) list * constr
(** Extract the premisses and the conclusion of a term of the form
"(xi:Ti) ... (xj:=cj:Tj) ..., T" where T is not a product nor a let *)
val decompose_prod_decls : types -> Constr.rel_context * types
(** Idem with lambda's and let's *)
val decompose_lambda_decls : constr -> Constr.rel_context * constr
* Idem but extract the first [ n ] premisses , counting let - ins .
val decompose_prod_n_decls : int -> types -> Constr.rel_context * types
(** Idem for lambdas, _not_ counting let-ins *)
val decompose_lambda_n_assum : int -> constr -> Constr.rel_context * constr
(** Idem, counting let-ins *)
val decompose_lambda_n_decls : int -> constr -> Constr.rel_context * constr
(** Return the premisses/parameters of a type/term (let-in included) *)
val prod_decls : types -> Constr.rel_context
val lambda_decls : constr -> Constr.rel_context
(** Return the first n-th premisses/parameters of a type (let included and counted) *)
val prod_n_decls : int -> types -> Constr.rel_context
(** Return the first n-th premisses/parameters of a term (let included but not counted) *)
val lam_n_assum : int -> constr -> Constr.rel_context
(** Remove the premisses/parameters of a type/term *)
val strip_prod : types -> types
val strip_lam : constr -> constr
(** Remove the first n-th premisses/parameters of a type/term *)
val strip_prod_n : int -> types -> types
val strip_lam_n : int -> constr -> constr
(** Remove the premisses/parameters of a type/term (including let-in) *)
val strip_prod_decls : types -> types
val strip_lambda_decls : constr -> constr
* { 5 ... }
(** An "arity" is a term of the form [[x1:T1]...[xn:Tn]s] with [s] a sort.
Such a term can canonically be seen as the pair of a context of types
and of a sort *)
type arity = Constr.rel_context * Sorts.t
(** Build an "arity" from its canonical form *)
val mkArity : arity -> types
(** Destruct an "arity" into its canonical form *)
val destArity : types -> arity
(** Tell if a term has the form of an arity *)
val isArity : types -> bool
(* Deprecated *)
type sorts_family = Sorts.family = InSProp | InProp | InSet | InType | InQSort
[@@ocaml.deprecated "Alias for Sorts.family"]
type sorts = Sorts.t = private
| SProp | Prop | Set
| Type of Univ.Universe.t (** Type *)
| QSort of Sorts.QVar.t * Univ.Universe.t
[@@ocaml.deprecated "Alias for Sorts.t"]
val decompose_prod_assum : types -> Constr.rel_context * types
[@@ocaml.deprecated "Use [decompose_prod_decls] instead."]
val decompose_lam_assum : constr -> Constr.rel_context * constr
[@@ocaml.deprecated "Use [decompose_lambda_decls] instead."]
val decompose_prod_n_assum : int -> types -> Constr.rel_context * types
[@@ocaml.deprecated "Use [decompose_prod_n_decls] instead."]
val prod_assum : types -> Constr.rel_context
[@@ocaml.deprecated "Use [prod_decls] instead."]
val lam_assum : constr -> Constr.rel_context
[@@ocaml.deprecated "Use [lambda_decls] instead."]
val prod_n_assum : int -> types -> Constr.rel_context
[@@ocaml.deprecated "Use [prod_n_decls] instead."]
val strip_prod_assum : types -> types
[@@ocaml.deprecated "Use [strip_prod_decls] instead."]
val strip_lam_assum : constr -> constr
[@@ocaml.deprecated "Use [strip_lambda_decls] instead."]
val decompose_lam : t -> (Name.t Context.binder_annot * t) list * t
[@@ocaml.deprecated "Use [decompose_lambda] instead."]
val decompose_lam_n : int -> t -> (Name.t Context.binder_annot * t) list * t
[@@ocaml.deprecated "Use [decompose_lambda_n] instead."]
val decompose_lam_n_assum : int -> t -> rel_context * t
[@@ocaml.deprecated "Use [decompose_lambda_n_assum] instead."]
val decompose_lam_n_decls : int -> t -> rel_context * t
[@@ocaml.deprecated "Use [decompose_lambda_n_decls] instead."]
| null | https://raw.githubusercontent.com/coq/coq/1bacca2d42c3bab2c6826bd8cb50dd1805b310ea/kernel/term.mli | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
* For an always-relevant domain
* Named version of the functions from [Term].
* Constructs either [(x:t)c] or [[x=b:t]c]
* Constructs either [[x:t]c] or [[x=b:t]c]
* [applist (f,args)] and its variants work as [mkApp]
* [to_lambda n l]
@return [fun (x_1:T_1)...(x_n:T_n) => T]
where [l] is [forall (x_1:T_1)...(x_n:T_n), T]
* [to_prod n l]
@return [forall (x_1:T_1)...(x_n:T_n), T]
where [l] is [fun (x_1:T_1)...(x_n:T_n) => T]
* In [lambda_applist c args], [c] is supposed to have the form
[λΓ.c] with [Γ] without let-in; it returns [c] with the variables
of [Γ] instantiated by [args].
* In [lambda_applist_decls n c args], [c] is supposed to have the
form [λΓ.c] with [Γ] of length [n] and possibly with let-ins; it
returns [c] with the assumptions of [Γ] instantiated by [args] and
the local definitions of [Γ] expanded.
* pseudo-reduction rule
* In [prod_appvect_decls n c args], [c] is supposed to have the
form [∀Γ.c] with [Γ] of length [n] and possibly with let-ins; it
returns [c] with the assumptions of [Γ] instantiated by [args] and
the local definitions of [Γ] expanded.
* Transforms a product term {% $ %}(x_1:T_1)..(x_n:T_n)T{% $ %} into the pair
{% $ %}([(x_n,T_n);...;(x_1,T_1)],T){% $ %}, where {% $ %}T{% $ %} is not a product.
* Given a positive integer n, decompose a product term
{% $ %}(x_1:T_1)..(x_n:T_n)T{% $ %}
into the pair {% $ %}([(xn,Tn);...;(x1,T1)],T){% $ %}.
Raise a user error if not enough products.
* Extract the premisses and the conclusion of a term of the form
"(xi:Ti) ... (xj:=cj:Tj) ..., T" where T is not a product nor a let
* Idem with lambda's and let's
* Idem for lambdas, _not_ counting let-ins
* Idem, counting let-ins
* Return the premisses/parameters of a type/term (let-in included)
* Return the first n-th premisses/parameters of a type (let included and counted)
* Return the first n-th premisses/parameters of a term (let included but not counted)
* Remove the premisses/parameters of a type/term
* Remove the first n-th premisses/parameters of a type/term
* Remove the premisses/parameters of a type/term (including let-in)
* An "arity" is a term of the form [[x1:T1]...[xn:Tn]s] with [s] a sort.
Such a term can canonically be seen as the pair of a context of types
and of a sort
* Build an "arity" from its canonical form
* Destruct an "arity" into its canonical form
* Tell if a term has the form of an arity
Deprecated
* Type | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
open Names
open Constr
* { 5 Derived constructors }
* non - dependent product [ t1 - > t2 ] , an alias for
[ forall ( _ : t1 ) , t2 ] . Beware [ t_2 ] is NOT lifted .
Eg : in context [ A : Prop ] , [ A->A ] is built by [ ( mkArrow ( mkRel 1 ) ( mkRel 2 ) ) ]
[forall (_:t1), t2]. Beware [t_2] is NOT lifted.
Eg: in context [A:Prop], [A->A] is built by [(mkArrow (mkRel 1) (mkRel 2))]
*)
val mkArrow : types -> Sorts.relevance -> types -> constr
val mkArrowR : types -> types -> constr
val mkNamedLambda : Id.t Context.binder_annot -> types -> constr -> constr
val mkNamedLetIn : Id.t Context.binder_annot -> constr -> types -> constr -> constr
val mkNamedProd : Id.t Context.binder_annot -> types -> types -> types
val mkProd_or_LetIn : Constr.rel_declaration -> types -> types
val mkProd_wo_LetIn : Constr.rel_declaration -> types -> types
val mkNamedProd_or_LetIn : Constr.named_declaration -> types -> types
val mkNamedProd_wo_LetIn : Constr.named_declaration -> types -> types
val mkLambda_or_LetIn : Constr.rel_declaration -> constr -> constr
val mkNamedLambda_or_LetIn : Constr.named_declaration -> constr -> constr
* { 5 Other term constructors . }
val applist : constr * constr list -> constr
val applistc : constr -> constr list -> constr
val appvect : constr * constr array -> constr
val appvectc : constr -> constr array -> constr
* [ prodn n l b ] = [ forall ( x_1 : T_1) ... (x_n : T_n ) , b ]
where [ l ] is [ ( x_n , T_n) ... (x_1,T_1 ) ... ] .
where [l] is [(x_n,T_n)...(x_1,T_1)...]. *)
val prodn : int -> (Name.t Context.binder_annot * constr) list -> constr -> constr
* [ compose_prod l b ]
@return [ forall ( x_1 : T_1) ... (x_n : T_n ) , b ]
where [ l ] is [ ( x_n , T_n) ... (x_1,T_1 ) ] .
Inverse of [ decompose_prod ] .
@return [forall (x_1:T_1)...(x_n:T_n), b]
where [l] is [(x_n,T_n)...(x_1,T_1)].
Inverse of [decompose_prod]. *)
val compose_prod : (Name.t Context.binder_annot * constr) list -> constr -> constr
* [ n l b ]
@return [ fun ( x_1 : T_1) ... (x_n : T_n ) = > b ]
where [ l ] is [ ( x_n , T_n) ... (x_1,T_1 ) ... ] .
@return [fun (x_1:T_1)...(x_n:T_n) => b]
where [l] is [(x_n,T_n)...(x_1,T_1)...]. *)
val lamn : int -> (Name.t Context.binder_annot * constr) list -> constr -> constr
* [ compose_lam l b ]
@return [ fun ( x_1 : T_1) ... (x_n : T_n ) = > b ]
where [ l ] is [ ( x_n , T_n) ... (x_1,T_1 ) ] .
Inverse of [ it_destLam ]
@return [fun (x_1:T_1)...(x_n:T_n) => b]
where [l] is [(x_n,T_n)...(x_1,T_1)].
Inverse of [it_destLam] *)
val compose_lam : (Name.t Context.binder_annot * constr) list -> constr -> constr
val to_lambda : int -> constr -> constr
val to_prod : int -> constr -> constr
val it_mkLambda_or_LetIn : constr -> Constr.rel_context -> constr
val it_mkProd_wo_LetIn : types -> Constr.rel_context -> types
val it_mkProd_or_LetIn : types -> Constr.rel_context -> types
val lambda_applist : constr -> constr list -> constr
val lambda_appvect : constr -> constr array -> constr
val lambda_applist_decls : int -> constr -> constr list -> constr
val lambda_appvect_decls : int -> constr -> constr array -> constr
* [ prod_appvect ] [ forall ( x1 : B1; ... ;xn : Bn ) , B ] [ a1 ... an ]
@return [ B[a1 ... an ] ]
@return [B[a1...an]] *)
val prod_appvect : types -> constr array -> types
val prod_applist : types -> constr list -> types
val prod_appvect_decls : int -> types -> constr array -> types
val prod_applist_decls : int -> types -> constr list -> types
* { 5 Other term destructors . }
val decompose_prod : constr -> (Name.t Context.binder_annot * constr) list * constr
* Transforms a lambda term { % $ % } [ x_1 : T_1] .. [x_n : T_n]T{% $ % } into the pair
{ % $ % } ( [ ( x_n , T_n); ... ;(x_1,T_1)],T){% $ % } , where { % $ % } T{% $ % } is not a lambda .
{% $ %}([(x_n,T_n);...;(x_1,T_1)],T){% $ %}, where {% $ %}T{% $ %} is not a lambda. *)
val decompose_lambda : constr -> (Name.t Context.binder_annot * constr) list * constr
val decompose_prod_n : int -> constr -> (Name.t Context.binder_annot * constr) list * constr
* Given a positive integer { % $ % } n{% $ % } , decompose a lambda term
{ % $ % } [ x_1 : T_1] .. [x_n : T_n]T{% $ % } into the pair { % $ % } ( [ ( x_n , T_n); ... ;(x_1,T_1)],T){% $ % } .
Raise a user error if not enough lambdas .
{% $ %}[x_1:T_1]..[x_n:T_n]T{% $ %} into the pair {% $ %}([(x_n,T_n);...;(x_1,T_1)],T){% $ %}.
Raise a user error if not enough lambdas. *)
val decompose_lambda_n : int -> constr -> (Name.t Context.binder_annot * constr) list * constr
val decompose_prod_decls : types -> Constr.rel_context * types
val decompose_lambda_decls : constr -> Constr.rel_context * constr
* Idem but extract the first [ n ] premisses , counting let - ins .
val decompose_prod_n_decls : int -> types -> Constr.rel_context * types
val decompose_lambda_n_assum : int -> constr -> Constr.rel_context * constr
val decompose_lambda_n_decls : int -> constr -> Constr.rel_context * constr
val prod_decls : types -> Constr.rel_context
val lambda_decls : constr -> Constr.rel_context
val prod_n_decls : int -> types -> Constr.rel_context
val lam_n_assum : int -> constr -> Constr.rel_context
val strip_prod : types -> types
val strip_lam : constr -> constr
val strip_prod_n : int -> types -> types
val strip_lam_n : int -> constr -> constr
val strip_prod_decls : types -> types
val strip_lambda_decls : constr -> constr
* { 5 ... }
type arity = Constr.rel_context * Sorts.t
val mkArity : arity -> types
val destArity : types -> arity
val isArity : types -> bool
type sorts_family = Sorts.family = InSProp | InProp | InSet | InType | InQSort
[@@ocaml.deprecated "Alias for Sorts.family"]
type sorts = Sorts.t = private
| SProp | Prop | Set
| QSort of Sorts.QVar.t * Univ.Universe.t
[@@ocaml.deprecated "Alias for Sorts.t"]
val decompose_prod_assum : types -> Constr.rel_context * types
[@@ocaml.deprecated "Use [decompose_prod_decls] instead."]
val decompose_lam_assum : constr -> Constr.rel_context * constr
[@@ocaml.deprecated "Use [decompose_lambda_decls] instead."]
val decompose_prod_n_assum : int -> types -> Constr.rel_context * types
[@@ocaml.deprecated "Use [decompose_prod_n_decls] instead."]
val prod_assum : types -> Constr.rel_context
[@@ocaml.deprecated "Use [prod_decls] instead."]
val lam_assum : constr -> Constr.rel_context
[@@ocaml.deprecated "Use [lambda_decls] instead."]
val prod_n_assum : int -> types -> Constr.rel_context
[@@ocaml.deprecated "Use [prod_n_decls] instead."]
val strip_prod_assum : types -> types
[@@ocaml.deprecated "Use [strip_prod_decls] instead."]
val strip_lam_assum : constr -> constr
[@@ocaml.deprecated "Use [strip_lambda_decls] instead."]
val decompose_lam : t -> (Name.t Context.binder_annot * t) list * t
[@@ocaml.deprecated "Use [decompose_lambda] instead."]
val decompose_lam_n : int -> t -> (Name.t Context.binder_annot * t) list * t
[@@ocaml.deprecated "Use [decompose_lambda_n] instead."]
val decompose_lam_n_assum : int -> t -> rel_context * t
[@@ocaml.deprecated "Use [decompose_lambda_n_assum] instead."]
val decompose_lam_n_decls : int -> t -> rel_context * t
[@@ocaml.deprecated "Use [decompose_lambda_n_decls] instead."]
|
67176295e810ec072db7a86940474645a772a65c28d48b764814d82848ed9546 | f-o-a-m/kepler | Transaction.hs | module Tendermint.SDK.Types.Transaction where
import Control.Error (note)
import Control.Lens (Wrapped (..), from, iso, view,
(&), (.~), (^.), _Unwrapped')
import Crypto.Hash (Digest, hashWith)
import Crypto.Hash.Algorithms (SHA256 (..))
import Data.Bifunctor (bimap)
import Data.ByteString (ByteString)
import Data.Int (Int64)
import qualified Data.ProtoLens as P
import Data.Proxy
import Data.String.Conversions (cs)
import Data.Text (Text)
import Data.Word (Word64)
import GHC.Generics (Generic)
import qualified Proto.Types.Transaction as T
import qualified Proto.Types.Transaction_Fields as T
import Tendermint.SDK.Codec (HasCodec (..))
import Tendermint.SDK.Crypto (MakeDigest (..),
RecoverableSignatureSchema (..),
SignatureSchema (..))
import Tendermint.SDK.Types.Message (Msg (..), TypedMessage (..))
-- Our standard transaction type parameterized by the signature schema 'alg'
-- and an underlying message type 'msg'.
data Tx alg msg = Tx
{ txMsg :: Msg msg
, txRoute :: Text
, txGas :: Int64
, txSignature :: RecoverableSignature alg
, txSignBytes :: Message alg
, txSigner :: PubKey alg
, txNonce :: Word64
}
instance Functor (Tx alg) where
fmap f tx@Tx{txMsg} = tx {txMsg = fmap f txMsg}
--------------------------------------------------------------------------------
-- TODO: figure out what the actual standards are for these things, if there
-- even are any.
-- | Raw transaction type coming in over the wire
data RawTransaction = RawTransaction
{ rawTransactionData :: TypedMessage
^ the encoded message via protobuf encoding
, rawTransactionGas :: Int64
, rawTransactionRoute :: Text
-- ^ module name
, rawTransactionSignature :: ByteString
, rawTransactionNonce :: Word64
} deriving Generic
instance Wrapped RawTransaction where
type Unwrapped RawTransaction = T.RawTransaction
_Wrapped' = iso t f
where
t RawTransaction {..} =
P.defMessage
& T.data' .~ (rawTransactionData ^. _Wrapped')
& T.gas .~ rawTransactionGas
& T.route .~ rawTransactionRoute
& T.signature .~ rawTransactionSignature
& T.nonce .~ rawTransactionNonce
f message = RawTransaction
{ rawTransactionData = message ^. T.data' . _Unwrapped'
, rawTransactionGas = message ^. T.gas
, rawTransactionRoute = message ^. T.route
, rawTransactionSignature = message ^. T.signature
, rawTransactionNonce = message ^. T.nonce
}
instance HasCodec RawTransaction where
encode = P.encodeMessage . view _Wrapped'
decode = bimap cs (view $ from _Wrapped') . P.decodeMessage
instance MakeDigest RawTransaction where
makeDigest tx = hashWith SHA256 . encode $ tx {rawTransactionSignature = ""}
signRawTransaction
:: forall alg.
RecoverableSignatureSchema alg
=> Message alg ~ Digest SHA256
=> Proxy alg
-> PrivateKey alg --
-> RawTransaction
-> RecoverableSignature alg
signRawTransaction p priv tx = signRecoverableMessage p priv (makeDigest tx)
| Attempt to parse a Bytestring into a ' RawTransaction ' then as a ' Tx ' without
-- | attempting to parse the underlying message. This is done as a preprocessing
-- | step to the router, allowing for failure before the router is ever
-- | reached.
parseTx
:: forall alg.
RecoverableSignatureSchema alg
=> Message alg ~ Digest SHA256
=> Proxy alg
-> ByteString
-> Either Text (Tx alg ByteString)
parseTx p bs = do
rawTx@RawTransaction{..} <- decode bs
recSig <- note "Unable to parse transaction signature as a recovery signature." $
makeRecoverableSignature p rawTransactionSignature
let txForSigning = rawTx {rawTransactionSignature = ""}
signBytes = makeDigest txForSigning
signerPubKey <- note "Signature recovery failed." $ recover p recSig signBytes
return $ Tx
{ txMsg = Msg
{ msgData = typedMsgData rawTransactionData
, msgAuthor = addressFromPubKey p signerPubKey
, msgType = typedMsgType rawTransactionData
}
, txRoute = cs rawTransactionRoute
, txGas = rawTransactionGas
, txSignature = recSig
, txSignBytes = signBytes
, txSigner = signerPubKey
, txNonce = rawTransactionNonce
}
| null | https://raw.githubusercontent.com/f-o-a-m/kepler/6c1ad7f37683f509c2f1660e3561062307d3056b/hs-abci-sdk/src/Tendermint/SDK/Types/Transaction.hs | haskell | Our standard transaction type parameterized by the signature schema 'alg'
and an underlying message type 'msg'.
------------------------------------------------------------------------------
TODO: figure out what the actual standards are for these things, if there
even are any.
| Raw transaction type coming in over the wire
^ module name
| attempting to parse the underlying message. This is done as a preprocessing
| step to the router, allowing for failure before the router is ever
| reached. | module Tendermint.SDK.Types.Transaction where
import Control.Error (note)
import Control.Lens (Wrapped (..), from, iso, view,
(&), (.~), (^.), _Unwrapped')
import Crypto.Hash (Digest, hashWith)
import Crypto.Hash.Algorithms (SHA256 (..))
import Data.Bifunctor (bimap)
import Data.ByteString (ByteString)
import Data.Int (Int64)
import qualified Data.ProtoLens as P
import Data.Proxy
import Data.String.Conversions (cs)
import Data.Text (Text)
import Data.Word (Word64)
import GHC.Generics (Generic)
import qualified Proto.Types.Transaction as T
import qualified Proto.Types.Transaction_Fields as T
import Tendermint.SDK.Codec (HasCodec (..))
import Tendermint.SDK.Crypto (MakeDigest (..),
RecoverableSignatureSchema (..),
SignatureSchema (..))
import Tendermint.SDK.Types.Message (Msg (..), TypedMessage (..))
data Tx alg msg = Tx
{ txMsg :: Msg msg
, txRoute :: Text
, txGas :: Int64
, txSignature :: RecoverableSignature alg
, txSignBytes :: Message alg
, txSigner :: PubKey alg
, txNonce :: Word64
}
instance Functor (Tx alg) where
fmap f tx@Tx{txMsg} = tx {txMsg = fmap f txMsg}
data RawTransaction = RawTransaction
{ rawTransactionData :: TypedMessage
^ the encoded message via protobuf encoding
, rawTransactionGas :: Int64
, rawTransactionRoute :: Text
, rawTransactionSignature :: ByteString
, rawTransactionNonce :: Word64
} deriving Generic
instance Wrapped RawTransaction where
type Unwrapped RawTransaction = T.RawTransaction
_Wrapped' = iso t f
where
t RawTransaction {..} =
P.defMessage
& T.data' .~ (rawTransactionData ^. _Wrapped')
& T.gas .~ rawTransactionGas
& T.route .~ rawTransactionRoute
& T.signature .~ rawTransactionSignature
& T.nonce .~ rawTransactionNonce
f message = RawTransaction
{ rawTransactionData = message ^. T.data' . _Unwrapped'
, rawTransactionGas = message ^. T.gas
, rawTransactionRoute = message ^. T.route
, rawTransactionSignature = message ^. T.signature
, rawTransactionNonce = message ^. T.nonce
}
instance HasCodec RawTransaction where
encode = P.encodeMessage . view _Wrapped'
decode = bimap cs (view $ from _Wrapped') . P.decodeMessage
instance MakeDigest RawTransaction where
makeDigest tx = hashWith SHA256 . encode $ tx {rawTransactionSignature = ""}
signRawTransaction
:: forall alg.
RecoverableSignatureSchema alg
=> Message alg ~ Digest SHA256
=> Proxy alg
-> RawTransaction
-> RecoverableSignature alg
signRawTransaction p priv tx = signRecoverableMessage p priv (makeDigest tx)
| Attempt to parse a Bytestring into a ' RawTransaction ' then as a ' Tx ' without
parseTx
:: forall alg.
RecoverableSignatureSchema alg
=> Message alg ~ Digest SHA256
=> Proxy alg
-> ByteString
-> Either Text (Tx alg ByteString)
parseTx p bs = do
rawTx@RawTransaction{..} <- decode bs
recSig <- note "Unable to parse transaction signature as a recovery signature." $
makeRecoverableSignature p rawTransactionSignature
let txForSigning = rawTx {rawTransactionSignature = ""}
signBytes = makeDigest txForSigning
signerPubKey <- note "Signature recovery failed." $ recover p recSig signBytes
return $ Tx
{ txMsg = Msg
{ msgData = typedMsgData rawTransactionData
, msgAuthor = addressFromPubKey p signerPubKey
, msgType = typedMsgType rawTransactionData
}
, txRoute = cs rawTransactionRoute
, txGas = rawTransactionGas
, txSignature = recSig
, txSignBytes = signBytes
, txSigner = signerPubKey
, txNonce = rawTransactionNonce
}
|
b3ffba2cd44c6b75ae9ced92c090f059d62ad6c1fe6a0055b6a05ff75d7f7e3d | jeffshrager/biobike | history-listener.lisp | ;;; -*- mode: Lisp; Syntax: Common-Lisp; Package: WEBLISTENER; -*-
(in-package :weblistener)
;;; +=========================================================================+
| Copyright ( c ) 2002 , 2003 , 2004 JP , , |
;;; | |
;;; | Permission is hereby granted, free of charge, to any person obtaining |
;;; | a copy of this software and associated documentation files (the |
| " Software " ) , to deal in the Software without restriction , including |
;;; | without limitation the rights to use, copy, modify, merge, publish, |
| distribute , sublicense , and/or sell copies of the Software , and to |
| permit persons to whom the Software is furnished to do so , subject to |
;;; | the following conditions: |
;;; | |
;;; | The above copyright notice and this permission notice shall be included |
| in all copies or substantial portions of the Software . |
;;; | |
| THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , |
;;; | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
;;; | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
;;; | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |
;;; | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
;;; | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
;;; +=========================================================================+
;;; Author: JP Massar.
This gets triggered when the user clicks on one of the history URL 's
;;; (the things of the form '<n>>' and '::' preceding the input and
;;; output forms).
;;; It causes the form associated with the prompt the user just
;;; clicked on (i.e., the history URL) to be put
;;; back into one of the form text box
;;; areas so that the user can edit and/or reevaluate the form.
(publish
:path *history-url*
:content-type cl-user::*html-publish-content-type*
:function
(lambda (req ent)
(let* ((input (request-query req))
(package-name (url-parameter-value :pkg input))
(package-symbol (keywordize package-name))
(which (url-parameter-value :which input))
(history (url-parameter-value :history input)))
(execute-with-standard-weblistener-environment
req ent package-symbol
(lambda () (weblistener-in-history-redisplay which history))
))))
(defun weblistener-in-history-redisplay (which history)
;; find the appropriate form string based on the WHICH and HISTORY
(let* ((index (1+ (read-from-string history)))
(formstring
(cond
((string= which "in") (history-input-string index))
(t (error "Internal error. Only handles IN HISTORY!"))
)))
;; Determine which box the form should be displayed in based
;; on its size and whether it has newlines in it, then set
;; things up so that it will be displayed there.
(if (and (< (length formstring) 80) (null (find #\Newline formstring)))
(setq *oneline-form-data* formstring)
(setq *multiline-form-data* formstring)
)
(html
(:princ
(indirect-to-redisplay (incf *user-display-id*) (user-session-id))))
))
(publish
:path *new-history-url*
:content-type cl-user::*html-publish-content-type*
:function
(lambda (req ent)
(let* ((input (request-query req))
(package-name (url-parameter-value :pkg input))
(package-symbol (keywordize package-name))
(which (url-parameter-value :which input))
(history-index-string (url-parameter-value :history input))
(value-index-string (url-parameter-value :value input))
)
(execute-with-standard-weblistener-environment
req ent package-symbol
(lambda ()
(weblistener-history-redisplay
which history-index-string value-index-string
))))))
(defun weblistener-history-redisplay
(which history-index-string value-index-string)
If VALUE - INDEX is NIL , then
;; find the appropriate form string based on the WHICH and HISTORY
;; If it is not NIL, it must be a number and WHICH must be "out", and
;; we snarf the string representing the nth value in the output.
(let* ((hindex (1+ (read-from-string history-index-string)))
(vindex (read-from-string value-index-string))
(formstring
(cond
((plusp vindex)
(unless (string= which "out") (error "Internal error."))
(new-history-output-value-string hindex vindex))
((string= which "out") (new-history-output-string hindex))
(t (error "Internal error. Only handles OUT history."))
)))
;; Determine which box the form should be displayed in based
;; on its size and whether it has newlines in it, then set
;; things up so that it will be displayed there.
(if (and (< (length formstring) 80) (null (find #\Newline formstring)))
(setq *oneline-form-data* formstring)
(setq *multiline-form-data* formstring))
(html
(:princ
(indirect-to-redisplay (incf *user-display-id*) (user-session-id))
))))
(publish
:path *clear-history-url*
:content-type cl-user::*html-publish-content-type*
:function
(lambda (req ent)
(let* ((input (request-query req))
(package-name (url-parameter-value :pkg input))
(package-symbol (keywordize package-name)))
(execute-with-standard-weblistener-environment
req ent package-symbol
(lambda ()
(clear-history)
(html
(:princ
(indirect-to-redisplay (incf *user-display-id*) (user-session-id))
)))))))
(defun history-url
(which history-number &optional (pkgname (string (user-session-id))))
(make-history-url :pkg pkgname :which which :history history-number))
(defun new-history-url
(which history-number value-number
&optional
(pkgname (string (user-session-id))))
(make-new-history-url
:pkg pkgname :which which :history history-number :value value-number))
;;; User-callable function CLEAR-HISTORY
(defun clear-history (&optional (how-much :all))
#.(one-string-nl
"Delete part or all of the recorded history of the user's input/output"
"(and hence what gets shown to the user):"
"-- :all (the default) deletes everything"
"-- +n deletes the oldest N input/output pairs (those displayed topmost)."
"-- -n deletes the newest N pairs (those shown nearest the input boxes).")
(let ((hlen (length *in-history*)))
(cond
((eq how-much :all)
(setq *in-history* nil)
(setq *out-history* nil))
((eq how-much :logout)
(setf *in-history*
(if *in-history*
(subseq *in-history* 0 1)
'(("(logout)" (logout)))))
(setf *out-history* (list (list "" '(":expunged") '(:expunged)))))
((not (integerp how-much))
(error "Invalid CLEAR-HISTORY argument: ~A" how-much))
((plusp how-much)
(if (>= how-much hlen)
(clear-history :all)
(progn
(setf *in-history* (subseq *in-history* 0 (- hlen how-much)))
(setf *out-history* (subseq *out-history* 0 (- hlen how-much))))))
((minusp how-much)
(let ((how-much (abs how-much)))
(if (>= how-much hlen)
(clear-history :all)
(progn
(setf *in-history* (nthcdr how-much *in-history*))
(setf *out-history* (nthcdr how-much *out-history*)))))))
nil))
(defun clear-all-histories ()
(unless (wb::weblistener-guru-p)
(error "You shouldn't be executing this command."))
(let ((clearings nil))
(loop for user in *logins* do
(loop for session in (gethash user *user->sessionids-ht*) do
(with-protected-globals-bound session
(push (list *username* *sessionid*) clearings)
(clear-history)
)))
(loop for (user session) in clearings do
(cformatt "History cleared for user ~A, session ~A"
user session))
(clear-history)))
| null | https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/BioLisp/Weblisten/history-listener.lisp | lisp | -*- mode: Lisp; Syntax: Common-Lisp; Package: WEBLISTENER; -*-
+=========================================================================+
| |
| Permission is hereby granted, free of charge, to any person obtaining |
| a copy of this software and associated documentation files (the |
| without limitation the rights to use, copy, modify, merge, publish, |
| the following conditions: |
| |
| The above copyright notice and this permission notice shall be included |
| |
| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
| IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
| SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
+=========================================================================+
Author: JP Massar.
(the things of the form '<n>>' and '::' preceding the input and
output forms).
It causes the form associated with the prompt the user just
clicked on (i.e., the history URL) to be put
back into one of the form text box
areas so that the user can edit and/or reevaluate the form.
find the appropriate form string based on the WHICH and HISTORY
Determine which box the form should be displayed in based
on its size and whether it has newlines in it, then set
things up so that it will be displayed there.
find the appropriate form string based on the WHICH and HISTORY
If it is not NIL, it must be a number and WHICH must be "out", and
we snarf the string representing the nth value in the output.
Determine which box the form should be displayed in based
on its size and whether it has newlines in it, then set
things up so that it will be displayed there.
User-callable function CLEAR-HISTORY |
(in-package :weblistener)
| Copyright ( c ) 2002 , 2003 , 2004 JP , , |
| " Software " ) , to deal in the Software without restriction , including |
| distribute , sublicense , and/or sell copies of the Software , and to |
| permit persons to whom the Software is furnished to do so , subject to |
| in all copies or substantial portions of the Software . |
| THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , |
| CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |
This gets triggered when the user clicks on one of the history URL 's
(publish
:path *history-url*
:content-type cl-user::*html-publish-content-type*
:function
(lambda (req ent)
(let* ((input (request-query req))
(package-name (url-parameter-value :pkg input))
(package-symbol (keywordize package-name))
(which (url-parameter-value :which input))
(history (url-parameter-value :history input)))
(execute-with-standard-weblistener-environment
req ent package-symbol
(lambda () (weblistener-in-history-redisplay which history))
))))
(defun weblistener-in-history-redisplay (which history)
(let* ((index (1+ (read-from-string history)))
(formstring
(cond
((string= which "in") (history-input-string index))
(t (error "Internal error. Only handles IN HISTORY!"))
)))
(if (and (< (length formstring) 80) (null (find #\Newline formstring)))
(setq *oneline-form-data* formstring)
(setq *multiline-form-data* formstring)
)
(html
(:princ
(indirect-to-redisplay (incf *user-display-id*) (user-session-id))))
))
(publish
:path *new-history-url*
:content-type cl-user::*html-publish-content-type*
:function
(lambda (req ent)
(let* ((input (request-query req))
(package-name (url-parameter-value :pkg input))
(package-symbol (keywordize package-name))
(which (url-parameter-value :which input))
(history-index-string (url-parameter-value :history input))
(value-index-string (url-parameter-value :value input))
)
(execute-with-standard-weblistener-environment
req ent package-symbol
(lambda ()
(weblistener-history-redisplay
which history-index-string value-index-string
))))))
(defun weblistener-history-redisplay
(which history-index-string value-index-string)
If VALUE - INDEX is NIL , then
(let* ((hindex (1+ (read-from-string history-index-string)))
(vindex (read-from-string value-index-string))
(formstring
(cond
((plusp vindex)
(unless (string= which "out") (error "Internal error."))
(new-history-output-value-string hindex vindex))
((string= which "out") (new-history-output-string hindex))
(t (error "Internal error. Only handles OUT history."))
)))
(if (and (< (length formstring) 80) (null (find #\Newline formstring)))
(setq *oneline-form-data* formstring)
(setq *multiline-form-data* formstring))
(html
(:princ
(indirect-to-redisplay (incf *user-display-id*) (user-session-id))
))))
(publish
:path *clear-history-url*
:content-type cl-user::*html-publish-content-type*
:function
(lambda (req ent)
(let* ((input (request-query req))
(package-name (url-parameter-value :pkg input))
(package-symbol (keywordize package-name)))
(execute-with-standard-weblistener-environment
req ent package-symbol
(lambda ()
(clear-history)
(html
(:princ
(indirect-to-redisplay (incf *user-display-id*) (user-session-id))
)))))))
(defun history-url
(which history-number &optional (pkgname (string (user-session-id))))
(make-history-url :pkg pkgname :which which :history history-number))
(defun new-history-url
(which history-number value-number
&optional
(pkgname (string (user-session-id))))
(make-new-history-url
:pkg pkgname :which which :history history-number :value value-number))
(defun clear-history (&optional (how-much :all))
#.(one-string-nl
"Delete part or all of the recorded history of the user's input/output"
"(and hence what gets shown to the user):"
"-- :all (the default) deletes everything"
"-- +n deletes the oldest N input/output pairs (those displayed topmost)."
"-- -n deletes the newest N pairs (those shown nearest the input boxes).")
(let ((hlen (length *in-history*)))
(cond
((eq how-much :all)
(setq *in-history* nil)
(setq *out-history* nil))
((eq how-much :logout)
(setf *in-history*
(if *in-history*
(subseq *in-history* 0 1)
'(("(logout)" (logout)))))
(setf *out-history* (list (list "" '(":expunged") '(:expunged)))))
((not (integerp how-much))
(error "Invalid CLEAR-HISTORY argument: ~A" how-much))
((plusp how-much)
(if (>= how-much hlen)
(clear-history :all)
(progn
(setf *in-history* (subseq *in-history* 0 (- hlen how-much)))
(setf *out-history* (subseq *out-history* 0 (- hlen how-much))))))
((minusp how-much)
(let ((how-much (abs how-much)))
(if (>= how-much hlen)
(clear-history :all)
(progn
(setf *in-history* (nthcdr how-much *in-history*))
(setf *out-history* (nthcdr how-much *out-history*)))))))
nil))
(defun clear-all-histories ()
(unless (wb::weblistener-guru-p)
(error "You shouldn't be executing this command."))
(let ((clearings nil))
(loop for user in *logins* do
(loop for session in (gethash user *user->sessionids-ht*) do
(with-protected-globals-bound session
(push (list *username* *sessionid*) clearings)
(clear-history)
)))
(loop for (user session) in clearings do
(cformatt "History cleared for user ~A, session ~A"
user session))
(clear-history)))
|
0c9bb3afd68261eeb55684b5140191fe39672934f3aeea12a0154b68f1eb17ae | apatil/aplomb | viewCommon.ml | let asset_fnames = [ "babel-polyfill.js"; "vega.js"; "vega-lite.js"; "vega-embed.js"; ]
let remote_assets = [
"-polyfill/6.26.0/polyfill.js";
"";
"-lite/2.0.1/vega-lite.js";
"-embed/3.0.0-rc7/vega-embed.js";
]
type html = [
| `Doctype of string
| `Text of string
| `Script of [`Url of string | `Crunch of string]
| `Element of (string * (string * string) list * html list)
]
let node_of_tag : html -> html Markup.node = function
| `Doctype s -> `Doctype Markup.({doctype_name=Some s; public_identifier=None; system_identifier=None; raw_text=None; force_quirks=false})
| `Text s -> `Text s
| `Script (`Url u) -> `Element (("", "script"), [(("", "src"), u)], [])
| `Script (`Crunch s) -> `Element (("", "script"), [], [`Text s])
| `Element (name, attrs, children) ->
let attrs_ = List.map (fun (k, v) -> (("", k), v)) attrs in
`Element (("", name), attrs_, children)
let to_html ?(assets=`Remote) ?(figureName="Figure 1") spec =
let url_script url =
`Element ("script", [("src", url)], [])
in
let crunch_script fname =
match VegaLiteAssets.read fname with | Some s -> `Element ("script", [], [`Text s])
(* The failure case should be statically guaranteed to not happen *)
in
(* TODO: Rather than listing these just list files in the crunch *)
let scripts = match assets with
| `Inline -> List.map crunch_script asset_fnames
| `Local -> List.map url_script asset_fnames
| `Remote -> List.map url_script remote_assets
in
let head = [
`Element ("title", [], [`Text figureName]);
`Element ("meta", [("charset", "utf-8")], []);
`Element ("style", [("media", "screen")], [`Text ".vega-actions a {\n margin-right: 5px;}"])
] @ scripts
in
let idStr = "aplomb" in
let specStr = spec |> VegaLite.V2.TopLevelExtendedSpec.to_yojson |> Yojson.Safe.to_string in
let specOptStr = `Assoc [("actions", `Assoc [("export", `Bool true); ("source", `Bool true); ("editor", `Bool true)])] |> Yojson.Safe.to_string in
let vizCode = "\nvar v1Spec = " ^ specStr ^ ";\nvegaEmbed(\"#" ^ idStr ^ "\", v1Spec, " ^ specOptStr ^ ").then(function(result){}).catch(console.error);" in
let body = [
`Element ("div", [("id", idStr)], []);
`Element ("script", [], [`Text vizCode])
]
in
let nodes = [
`Doctype "html";
`Element ("head", [], head);
`Element ("body", [], body)
]
in
let open Markup in
let streams = List.map (from_tree node_of_tag) nodes in
List.map to_list streams |> List.concat |> of_list
let to_string ?assets ?figureName spec =
let open Markup in
let stream = to_html ?assets ?figureName spec in
stream |> write_html |> to_string
let (>>=) (x : ('a, 'e) result) (f : 'a -> ('b, 'e) result) : ('b, 'e) result =
match x with
| Ok x_ -> f x_
| e -> e
let ensureDir pth : (string, string) result =
let mkdir = fun () ->
try let _ = Unix.mkdir pth 0o750 in Ok "ok"
with Sys_error msg -> Error ("Failed to mkdir: " ^ msg)
in
match Sys.is_directory pth with
| exception _ -> mkdir ()
| false -> mkdir ()
| true -> Ok "ok"
let ensureAssets pth : (string, string) result =
let reducer sofar fname = match sofar with
| Error e -> Error e
| Ok _ -> (let filePath = Filename.concat pth fname in
match Sys.file_exists filePath with
| true -> Ok "ok"
| false -> (match VegaLiteAssets.read fname with
| None -> Error "Asset not found"
| Some content ->
try
let oc = open_out filePath in
let _ = Printf.fprintf oc "%s\n" content in
let _ = close_out oc in
Ok "ok"
with _ -> Error "Could not write file"))
in
List.fold_left reducer (Ok "ok") VegaLiteAssets.file_list
let writeFigureFiles ?figuresPath ?(assets=`Local) ?(figureName="Figure 1") spec =
let stream = to_html ~assets ~figureName spec |> Markup.write_html in
let figuresPath_ = match figuresPath with
| Some p -> p
| None -> Filename.(concat (get_temp_dir_name ()) "aplomb" )
in
let pth = Filename.concat figuresPath_ (figureName ^ ".html") in
let pthfwd = Str.(global_replace (regexp Filename.dir_sep) "/" pth) in
let url = Uri.make ~scheme:"file" ~host:"" ~path:pthfwd () |> Uri.to_string in
(match Filename.is_relative pth with
| true -> Error "figuresPath must be absolute"
| false -> Ok url) >>= fun _ ->
ensureDir figuresPath_ >>= fun _ ->
(match assets with
| `Remote | `Inline -> Ok url
| `Local -> ensureAssets figuresPath_) >>= fun _ ->
(match Markup.to_file pth stream with
| exception (Sys_error msg) -> Error ("Failed to write: " ^ msg)
| _ -> Ok url)
| null | https://raw.githubusercontent.com/apatil/aplomb/2b9a6150c22093217661cb927ebec1e609ecf973/viewCommon.ml | ocaml | The failure case should be statically guaranteed to not happen
TODO: Rather than listing these just list files in the crunch | let asset_fnames = [ "babel-polyfill.js"; "vega.js"; "vega-lite.js"; "vega-embed.js"; ]
let remote_assets = [
"-polyfill/6.26.0/polyfill.js";
"";
"-lite/2.0.1/vega-lite.js";
"-embed/3.0.0-rc7/vega-embed.js";
]
type html = [
| `Doctype of string
| `Text of string
| `Script of [`Url of string | `Crunch of string]
| `Element of (string * (string * string) list * html list)
]
let node_of_tag : html -> html Markup.node = function
| `Doctype s -> `Doctype Markup.({doctype_name=Some s; public_identifier=None; system_identifier=None; raw_text=None; force_quirks=false})
| `Text s -> `Text s
| `Script (`Url u) -> `Element (("", "script"), [(("", "src"), u)], [])
| `Script (`Crunch s) -> `Element (("", "script"), [], [`Text s])
| `Element (name, attrs, children) ->
let attrs_ = List.map (fun (k, v) -> (("", k), v)) attrs in
`Element (("", name), attrs_, children)
let to_html ?(assets=`Remote) ?(figureName="Figure 1") spec =
let url_script url =
`Element ("script", [("src", url)], [])
in
let crunch_script fname =
match VegaLiteAssets.read fname with | Some s -> `Element ("script", [], [`Text s])
in
let scripts = match assets with
| `Inline -> List.map crunch_script asset_fnames
| `Local -> List.map url_script asset_fnames
| `Remote -> List.map url_script remote_assets
in
let head = [
`Element ("title", [], [`Text figureName]);
`Element ("meta", [("charset", "utf-8")], []);
`Element ("style", [("media", "screen")], [`Text ".vega-actions a {\n margin-right: 5px;}"])
] @ scripts
in
let idStr = "aplomb" in
let specStr = spec |> VegaLite.V2.TopLevelExtendedSpec.to_yojson |> Yojson.Safe.to_string in
let specOptStr = `Assoc [("actions", `Assoc [("export", `Bool true); ("source", `Bool true); ("editor", `Bool true)])] |> Yojson.Safe.to_string in
let vizCode = "\nvar v1Spec = " ^ specStr ^ ";\nvegaEmbed(\"#" ^ idStr ^ "\", v1Spec, " ^ specOptStr ^ ").then(function(result){}).catch(console.error);" in
let body = [
`Element ("div", [("id", idStr)], []);
`Element ("script", [], [`Text vizCode])
]
in
let nodes = [
`Doctype "html";
`Element ("head", [], head);
`Element ("body", [], body)
]
in
let open Markup in
let streams = List.map (from_tree node_of_tag) nodes in
List.map to_list streams |> List.concat |> of_list
let to_string ?assets ?figureName spec =
let open Markup in
let stream = to_html ?assets ?figureName spec in
stream |> write_html |> to_string
let (>>=) (x : ('a, 'e) result) (f : 'a -> ('b, 'e) result) : ('b, 'e) result =
match x with
| Ok x_ -> f x_
| e -> e
let ensureDir pth : (string, string) result =
let mkdir = fun () ->
try let _ = Unix.mkdir pth 0o750 in Ok "ok"
with Sys_error msg -> Error ("Failed to mkdir: " ^ msg)
in
match Sys.is_directory pth with
| exception _ -> mkdir ()
| false -> mkdir ()
| true -> Ok "ok"
let ensureAssets pth : (string, string) result =
let reducer sofar fname = match sofar with
| Error e -> Error e
| Ok _ -> (let filePath = Filename.concat pth fname in
match Sys.file_exists filePath with
| true -> Ok "ok"
| false -> (match VegaLiteAssets.read fname with
| None -> Error "Asset not found"
| Some content ->
try
let oc = open_out filePath in
let _ = Printf.fprintf oc "%s\n" content in
let _ = close_out oc in
Ok "ok"
with _ -> Error "Could not write file"))
in
List.fold_left reducer (Ok "ok") VegaLiteAssets.file_list
let writeFigureFiles ?figuresPath ?(assets=`Local) ?(figureName="Figure 1") spec =
let stream = to_html ~assets ~figureName spec |> Markup.write_html in
let figuresPath_ = match figuresPath with
| Some p -> p
| None -> Filename.(concat (get_temp_dir_name ()) "aplomb" )
in
let pth = Filename.concat figuresPath_ (figureName ^ ".html") in
let pthfwd = Str.(global_replace (regexp Filename.dir_sep) "/" pth) in
let url = Uri.make ~scheme:"file" ~host:"" ~path:pthfwd () |> Uri.to_string in
(match Filename.is_relative pth with
| true -> Error "figuresPath must be absolute"
| false -> Ok url) >>= fun _ ->
ensureDir figuresPath_ >>= fun _ ->
(match assets with
| `Remote | `Inline -> Ok url
| `Local -> ensureAssets figuresPath_) >>= fun _ ->
(match Markup.to_file pth stream with
| exception (Sys_error msg) -> Error ("Failed to write: " ^ msg)
| _ -> Ok url)
|
fcc272319ec5a66b05bac9042f671332f8f457bd50797597114481ef6c5b919b | ghcjs/ghcjs-examples | JavaScriptFFI.hs | # LANGUAGE CPP , , TemplateHaskell , QuasiQuotes , ScopedTypeVariables , NoMonomorphismRestriction #
-----------------------------------------------------------------------------
--
-- Module : Demo.JavaScriptFFI
-- Copyright :
-- License : BSD3
--
-- | To find a nice way to wor
--
-----------------------------------------------------------------------------
module Demo.JavaScriptFFI (
canvasDemo
, callHaskell
) where
import GHCJS.DOM.Types
(WebView(..), Document(..), HTMLDivElement(..))
import GHCJS.DOM.HTMLCanvasElement
(htmlCanvasElementSetHeight, htmlCanvasElementSetWidth)
import GHCJS.DOM.Node (nodeAppendChild)
import Control.Lens ((^.))
import Language.Javascript.JSC
(eval, evalJM, valToNumber, fun, jsg, js, (#), (<#), runJSC_)
import WebKitUtils
import GHCJS.DOM (webViewGetDomDocument)
import Control.Monad.Reader (ReaderT(..))
import GHCJS.DOM.HTMLElement
(htmlElementSetInnerHTML)
import Data.Text.Lazy (unpack)
import Text.Blaze.Html.Renderer.Text (renderHtml)
import Text.Hamlet (shamlet)
import Control.Monad (void)
import Control.Monad.Trans ( liftIO )
import Demo.Threading (isPrime)
#ifdef MIN_VERSION_jmacro
import Language.Javascript.JMacro
#endif
canvasDemo :: WebView -> Document -> HTMLDivElement -> IO ()
canvasDemo webView doc example = do
htmlElementSetInnerHTML example . unpack $ renderHtml
[shamlet|$newline always
<canvas #"canvas" width="600" height="400">
|]
runJSC_ webView $ do
document <- jsg "document"
let getElementById = js "getElementById"
getContext = js "getContext"
fillStyle = js "fillStyle"
fillRect = js "fillRect"
-- var canvas = document.getElementById("canvas")
canvas <- document ^. getElementById # ["canvas"]
-- var ctx = canvas.getContext("2d")
ctx <- canvas ^. getContext # ["2d"]
ctx.fillStyle = " # 00FF00 "
ctx ^. fillStyle <# "#008000"
ctx.fillRect ( 0 , 0 , 150 , 75 )
ctx ^. fillRect # ([0, 0, 100, 100] :: [Double])
callHaskell :: WebView -> IO ()
callHaskell webView = do
runJSC_ webView $ do
jsg "checkPrime" <# fun $ \ f this [a] -> do
num <- valToNumber a
let i = round num
liftIO . putStrLn $ "The number " ++ show i ++
if isPrime i
then " is a prime"
else " is not a prime"
$([evalJM|for(n = 0; n != 10; ++n) checkPrime(n);|])
| null | https://raw.githubusercontent.com/ghcjs/ghcjs-examples/217b7fd3816f57634977beac711452704c3ea688/mloc-js/src/Demo/JavaScriptFFI.hs | haskell | ---------------------------------------------------------------------------
Module : Demo.JavaScriptFFI
Copyright :
License : BSD3
| To find a nice way to wor
---------------------------------------------------------------------------
var canvas = document.getElementById("canvas")
var ctx = canvas.getContext("2d") | # LANGUAGE CPP , , TemplateHaskell , QuasiQuotes , ScopedTypeVariables , NoMonomorphismRestriction #
module Demo.JavaScriptFFI (
canvasDemo
, callHaskell
) where
import GHCJS.DOM.Types
(WebView(..), Document(..), HTMLDivElement(..))
import GHCJS.DOM.HTMLCanvasElement
(htmlCanvasElementSetHeight, htmlCanvasElementSetWidth)
import GHCJS.DOM.Node (nodeAppendChild)
import Control.Lens ((^.))
import Language.Javascript.JSC
(eval, evalJM, valToNumber, fun, jsg, js, (#), (<#), runJSC_)
import WebKitUtils
import GHCJS.DOM (webViewGetDomDocument)
import Control.Monad.Reader (ReaderT(..))
import GHCJS.DOM.HTMLElement
(htmlElementSetInnerHTML)
import Data.Text.Lazy (unpack)
import Text.Blaze.Html.Renderer.Text (renderHtml)
import Text.Hamlet (shamlet)
import Control.Monad (void)
import Control.Monad.Trans ( liftIO )
import Demo.Threading (isPrime)
#ifdef MIN_VERSION_jmacro
import Language.Javascript.JMacro
#endif
canvasDemo :: WebView -> Document -> HTMLDivElement -> IO ()
canvasDemo webView doc example = do
htmlElementSetInnerHTML example . unpack $ renderHtml
[shamlet|$newline always
<canvas #"canvas" width="600" height="400">
|]
runJSC_ webView $ do
document <- jsg "document"
let getElementById = js "getElementById"
getContext = js "getContext"
fillStyle = js "fillStyle"
fillRect = js "fillRect"
canvas <- document ^. getElementById # ["canvas"]
ctx <- canvas ^. getContext # ["2d"]
ctx.fillStyle = " # 00FF00 "
ctx ^. fillStyle <# "#008000"
ctx.fillRect ( 0 , 0 , 150 , 75 )
ctx ^. fillRect # ([0, 0, 100, 100] :: [Double])
callHaskell :: WebView -> IO ()
callHaskell webView = do
runJSC_ webView $ do
jsg "checkPrime" <# fun $ \ f this [a] -> do
num <- valToNumber a
let i = round num
liftIO . putStrLn $ "The number " ++ show i ++
if isPrime i
then " is a prime"
else " is not a prime"
$([evalJM|for(n = 0; n != 10; ++n) checkPrime(n);|])
|
93ccafcc1e48b74e1f099f72a33fd8c2c6e651f0a0231ddbbe633b9d3ed3d619 | DSLsofMath/DSLsofMath | E1_05.hs | -- Exercise 1_FunTup
module Ex_1_FunTup where
-- Function from Bool to integer, for testing
tf :: Bool -> Integer
tf True = 1
tf False = 0
isoR :: (Bool -> t) -> (t, t)
isoR f = (f True, f False)
isoL :: (t, t) -> (Bool -> t)
isoL (a,b) = func
where
func True = a
func False = b
-- Test the functionality of isoR and isoL
test0,test1,test2 :: Bool
test0 = isoL(isoR tf) True == fst(isoR(isoL (1,0))) && isoL(isoR tf) False == snd(isoR(isoL (1,0)))
-- "isoL◦isoR = id and isoR◦isoL = id"
test1 = fst (1,0) == fst (isoR (isoL (1,0)))
&& snd (1,0) == snd (isoR (isoL (1,0)))
test2 = tf True == isoL (isoR tf) True
&& tf False == isoL (isoR tf) False
| null | https://raw.githubusercontent.com/DSLsofMath/DSLsofMath/216464afda03c54709fae39e626ca19e8053444e/L/01/ExerciseSolutions/E1_05.hs | haskell | Exercise 1_FunTup
Function from Bool to integer, for testing
Test the functionality of isoR and isoL
"isoL◦isoR = id and isoR◦isoL = id" | module Ex_1_FunTup where
tf :: Bool -> Integer
tf True = 1
tf False = 0
isoR :: (Bool -> t) -> (t, t)
isoR f = (f True, f False)
isoL :: (t, t) -> (Bool -> t)
isoL (a,b) = func
where
func True = a
func False = b
test0,test1,test2 :: Bool
test0 = isoL(isoR tf) True == fst(isoR(isoL (1,0))) && isoL(isoR tf) False == snd(isoR(isoL (1,0)))
test1 = fst (1,0) == fst (isoR (isoL (1,0)))
&& snd (1,0) == snd (isoR (isoL (1,0)))
test2 = tf True == isoL (isoR tf) True
&& tf False == isoL (isoR tf) False
|
50ca06698b023c4d5ae3e7f95044817bf081b3090d2d7ba190c52ae56fc17f49 | vii/dysfunkycom | cl-vectors.lisp | ;;;;; Converted from the "CL-VECTORS" tutorial at:
;;;;; "-vectors/section-tutorial#tutorial"
;;;;; (C)2006 Luke J Crook
(in-package #:sdl-examples)
(defun cl-vectors-1 ()
(let ((width 300) (height 200))
(sdl:with-init ()
(sdl:window width height :title-caption "CL-VECTORS, Tutorial #1")
(setf (sdl:frame-rate) 5)
(sdl:clear-display (sdl:color :r 255 :g 255 :b 255))
(let ((state (aa:make-state))) ; create the state
describe the 3 sides
(aa:line-f state 250 150 50 100) ; of the triangle
(aa:line-f state 50 100 200 50)
(let ((put-pixel (sdl:image-put-pixel sdl:*default-display* #(0 0 0))))
(aa:cells-sweep state put-pixel) ; render it
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))
(:idle () (sdl:update-display))))))))
(defun cl-vectors-2 ()
(let ((width 300) (height 200))
(sdl:with-init ()
(sdl:window width height :title-caption "CL-VECTORS, Tutorial #2")
(setf (sdl:frame-rate) 5)
(sdl:clear-display (sdl:color :r 255 :g 255 :b 255))
(let ((state (aa:make-state))) ; create the state
the 1st triangle
describe the 3 sides
of the first triangle
(aa:line-f state 50 100 200 50)
the 2nd triangle
describe the 3 sides
of the second triangle
(aa:line-f state 175 100 75 25)
(let ((put-pixel (sdl:image-put-pixel sdl:*default-display* #(0 0 0))))
(aa:cells-sweep state put-pixel) ; render it
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))
(:idle () (sdl:update-display))))))))
(defun cl-vectors-3 ()
(let ((width 300) (height 200))
(sdl:with-init ()
(sdl:window width height :title-caption "CL-VECTORS, Tutorial #3")
(setf (sdl:frame-rate) 5)
(sdl:clear-display (sdl:color :r 255 :g 255 :b 255))
(let ((state1 (aa:make-state))
(state2 (aa:make-state)))
the 1st triangle
describe the 3 sides
of the first triangle
(aa:line-f state1 50 100 200 50)
the 2nd triangle
describe the 3 sides
of the second triangle
(aa:line-f state2 175 100 75 25)
(aa:cells-sweep state1 (sdl:image-put-pixel sdl:*default-display* #(255 0 0)))
(aa:cells-sweep state2 (sdl:image-put-pixel sdl:*default-display* #(0 0 255)))
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))
(:idle () (sdl:update-display)))))))
(defun cl-vectors-4 ()
(let ((width 300) (height 200))
(sdl:with-init ()
(sdl:window width height :title-caption "CL-VECTORS, Rendering a Glyph")
(setf (sdl:frame-rate) 5)
(sdl:clear-display (sdl:color :r 255 :g 255 :b 255))
(zpb-ttf:with-font-loader (loader sdl:*default-ttf-font*)
(aa:cells-sweep (vectors:update-state (aa:make-state)
(paths-ttf:paths-from-glyph (zpb-ttf:find-glyph #\A loader)
:offset (paths:make-point 50 160)
:scale-x 0.1
:scale-y -0.1))
(sdl:image-put-pixel sdl:*default-display* #(255 0 0)))
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))
(:idle () (sdl:update-display)))))))
| null | https://raw.githubusercontent.com/vii/dysfunkycom/a493fa72662b79e7c4e70361ad0ea3c7235b6166/addons/lispbuilder-sdl/examples/cl-vectors.lisp | lisp | Converted from the "CL-VECTORS" tutorial at:
"-vectors/section-tutorial#tutorial"
(C)2006 Luke J Crook
create the state
of the triangle
render it
create the state
render it |
(in-package #:sdl-examples)
(defun cl-vectors-1 ()
(let ((width 300) (height 200))
(sdl:with-init ()
(sdl:window width height :title-caption "CL-VECTORS, Tutorial #1")
(setf (sdl:frame-rate) 5)
(sdl:clear-display (sdl:color :r 255 :g 255 :b 255))
describe the 3 sides
(aa:line-f state 50 100 200 50)
(let ((put-pixel (sdl:image-put-pixel sdl:*default-display* #(0 0 0))))
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))
(:idle () (sdl:update-display))))))))
(defun cl-vectors-2 ()
(let ((width 300) (height 200))
(sdl:with-init ()
(sdl:window width height :title-caption "CL-VECTORS, Tutorial #2")
(setf (sdl:frame-rate) 5)
(sdl:clear-display (sdl:color :r 255 :g 255 :b 255))
the 1st triangle
describe the 3 sides
of the first triangle
(aa:line-f state 50 100 200 50)
the 2nd triangle
describe the 3 sides
of the second triangle
(aa:line-f state 175 100 75 25)
(let ((put-pixel (sdl:image-put-pixel sdl:*default-display* #(0 0 0))))
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))
(:idle () (sdl:update-display))))))))
(defun cl-vectors-3 ()
(let ((width 300) (height 200))
(sdl:with-init ()
(sdl:window width height :title-caption "CL-VECTORS, Tutorial #3")
(setf (sdl:frame-rate) 5)
(sdl:clear-display (sdl:color :r 255 :g 255 :b 255))
(let ((state1 (aa:make-state))
(state2 (aa:make-state)))
the 1st triangle
describe the 3 sides
of the first triangle
(aa:line-f state1 50 100 200 50)
the 2nd triangle
describe the 3 sides
of the second triangle
(aa:line-f state2 175 100 75 25)
(aa:cells-sweep state1 (sdl:image-put-pixel sdl:*default-display* #(255 0 0)))
(aa:cells-sweep state2 (sdl:image-put-pixel sdl:*default-display* #(0 0 255)))
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))
(:idle () (sdl:update-display)))))))
(defun cl-vectors-4 ()
(let ((width 300) (height 200))
(sdl:with-init ()
(sdl:window width height :title-caption "CL-VECTORS, Rendering a Glyph")
(setf (sdl:frame-rate) 5)
(sdl:clear-display (sdl:color :r 255 :g 255 :b 255))
(zpb-ttf:with-font-loader (loader sdl:*default-ttf-font*)
(aa:cells-sweep (vectors:update-state (aa:make-state)
(paths-ttf:paths-from-glyph (zpb-ttf:find-glyph #\A loader)
:offset (paths:make-point 50 160)
:scale-x 0.1
:scale-y -0.1))
(sdl:image-put-pixel sdl:*default-display* #(255 0 0)))
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))
(:idle () (sdl:update-display)))))))
|
c4c06b9076628f5a1363fe88370aaad2192fa15dc48766f206756c09211441c7 | seckcoder/iu_c311 | interp.rkt | ; interpreter of let-lang
#lang eopl
(require racket/file)
(require "../base/utils.rkt")
(require "store1.rkt")
(provide (all-defined-out))
; expval := Int | Bool | Proc
; during the implemention, I find this datatype actually
; useless...
(define-datatype
expval expval?
(numval
(int integer?))
(boolval
(bool boolean?))
(procval
(var symbol?)
(body anything?)))
(define-datatype
proc proc?
(closure
(vars (list-of symbol?))
(body anything?)
(env anything?)))
(define apply-proc
(lambda (proc1 arg-vals arg-outer-refs)
(cases
proc proc1
(closure
(vars body env)
(let* ((arg-inner-refs (newrefs arg-vals))
(new-env (extend-env vars
arg-inner-refs
env)))
(let ((body-ret (interp-exp body new-env)))
; copy back
(for-each (lambda (inner-ref outer-ref)
(setref! outer-ref
(deref inner-ref)))
arg-inner-refs
arg-outer-refs)
body-ret)))
(else (eopl:error 'apply-proc "invalid procedure value:" proc1)))))
; environment
; env := '() | (var val env)
(define-datatype
environment environment?
(empty-env)
(extend-env
(vars (list-of symbol?))
(refs (list-of reference?))
(env environment?))
)
(define extend-env-recursively
(lambda (pnames b-lst-of-vars b-bodies inherited-env)
(let* ((refs (newrefs (map (lambda (_)
'()) pnames)))
(new-env (extend-env pnames refs inherited-env)))
(for-each (lambda (ref b-vars b-body)
(setref! ref
(closure b-vars
b-body
new-env)))
refs
b-lst-of-vars
b-bodies)
new-env)))
(define apply-env
(lambda (env search-var)
(cases
environment env
(empty-env
()
(eopl:error 'apply-env "var:~s not found" search-var))
(extend-env
(vars refs inherited-env)
(let ((idx (index-of vars search-var)))
(if (< idx 0)
(apply-env inherited-env search-var)
(list-ref refs idx))))
)))
; grammar
(define scanner-spec-a
'((white-sp (whitespace) skip)
(comment ("%" (arbno (not #\newline))) skip)
(identifier (letter (arbno (or letter digit))) symbol)
(number (digit (arbno digit)) number)
(number ("-" digit (arbno digit)) number)
))
(define grammar-al
'((program
(expression)
a-program)
(expression
(number)
const-exp)
(expression
("-(" expression "," expression ")")
diff-exp)
(expression
("zero?" "(" expression ")")
zero?-exp)
(expression
("if" expression "then" expression "else" expression)
if-exp)
(expression
(identifier)
var-exp)
(expression
("let" (arbno identifier "=" expression) "in" expression)
let-exp)
(expression
("proc" "(" (arbno identifier) ")" expression)
proc-exp)
(expression
("(" expression (arbno expression) ")")
call-exp)
(expression
("letrec" (arbno identifier "(" (arbno identifier) ")" "=" expression) "in" expression)
letrec-exp)
(expression
("{" (arbno expression ";") "}")
compound-exp)
(expression
("set" identifier "=" expression)
set-exp)
))
(define list-the-datatypes
(lambda ()
(sllgen:list-define-datatypes scanner-spec-a grammar-al)))
(sllgen:make-define-datatypes scanner-spec-a grammar-al)
(define scan&parse
(sllgen:make-string-parser scanner-spec-a grammar-al))
(define interp-exps-ret-last
(lambda (exps env)
(cond ((null? exps)
(eopl:error 'compund-exps "no expression in block"))
((null? (cdr exps))
(interp-exp (car exps) env))
(else
(interp-exp (car exps) env)
(interp-exps-ret-last (cdr exps) env)))))
(define interp-rand-exp
(lambda (rand-exp env)
(cases expression rand-exp
(var-exp
(var)
var)
(else
(eopl:error 'interp-rand-exp "procedure parameter must be variables")))))
(define interp-exp
(lambda (exp env)
(cases
expression exp
(const-exp
(num)
num)
(diff-exp
(exp1 exp2)
(- (interp-exp exp1 env)
(interp-exp exp2 env)))
(zero?-exp
(exp)
(zero? (interp-exp exp env)))
(if-exp
(predicate sbj-exp else-exp)
(if (interp-exp predicate env)
(interp-exp sbj-exp env)
(interp-exp else-exp env)))
(var-exp
(var)
(deref (apply-env env var)))
(let-exp
(vars val-exps exp2)
(let* ((vals (map (lambda (val-exp)
(interp-exp val-exp env))
val-exps))
(new-env (extend-env vars
(newrefs vals)
env)))
(interp-exp exp2 new-env)))
(proc-exp
(vars body)
(closure vars body env))
(call-exp
(proc-exp rand-exps)
(let* ((proc (interp-exp proc-exp env))
(rand-vars (map (lambda (rand-exp)
(interp-rand-exp rand-exp env))
rand-exps))
(rand-refs (map (lambda (var)
(apply-env env var))
rand-vars))
(rand-vals (map (lambda (ref)
(deref ref))
rand-refs)))
(apply-proc proc rand-vals rand-refs)))
(letrec-exp
(p-names b-lst-of-vars b-bodies letrec-body)
(let ((new-env (extend-env-recursively p-names
b-lst-of-vars
b-bodies
env)))
(interp-exp letrec-body new-env)))
(compound-exp
(exps)
(interp-exps-ret-last exps env))
(set-exp
(var exp)
(setref! (apply-env env var)
(interp-exp exp env)))
)))
(define initial-env (empty-env))
(define interp
(lambda (datum)
(cases
program datum
(a-program
(exp)
(initialize-store!)
(interp-exp exp initial-env)))))
(define test-prog
(lambda (prog)
(display (interp (scan&parse prog)))
(newline)))
(define test-prog-eqv
(lambda (prog v)
(let ((interp-v (interp (scan&parse prog))))
(if (not (eq? interp-v v))
(eopl:error 'test-prog-eqv "value-of:~s is ~s not eq ~s" prog interp-v v)
'ok))))
(define (test)
(test-prog-eqv "let f = proc (x) -(x,11)
in let var1 = 77
in let var2 = (f var1)
in (f var2)"
55)
; the following example is used to identify scoping of the proc-lang.
if it 's dynamic scoping , then the result will be 1 , else result will be
2 with lexical scoping
(test-prog-eqv "let f = let x = 3
in proc (y) -(y,x)
in let x = 4
y = 5
in (f y)"
2)
(test-prog-eqv "letrec
foo(a) = (bar a)
bar(b) = b
in let v = 3
in (foo v)"
3)
; for multi args test
(test-prog-eqv "let f = proc (x y) -(x,y)
x = 10
y = 2
in (f x y)"
8)
(test-prog-eqv "letrec
foo() = (bar)
bar() = -(10,2)
in (foo)"
8)
; test block
(test-prog-eqv "{
-(3,2);
-(4,2);
-(5,1);
}"
4)
; test implicit reference
(test-prog-eqv "let x = 0
in letrec even()
= if zero?(x)
then 1
else {
set x = -(x,1);
(odd);
}
odd()
= if zero?(x)
then 0
else {
set x = -(x,1);
(even);
}
in {
set x = 12; (odd);
}"
0)
(test-prog-eqv "let p = proc(x) set x = 4
in let a = 3
in {
(p a);
a;
}"
4) ; call by value result
eopl 4.37
for call - by - value - result , result is 4
for call - by - reference , result is 3
(test-prog-eqv "let p = proc(x y) {
set y = 4;
set x = 3;
}
x = 0
in {
(p x x);
x;
}"
4)
(display "finished test...")
)
| null | https://raw.githubusercontent.com/seckcoder/iu_c311/a1215983b6ab08df32058ef1e089cb294419e567/racket/call-by-value-result/interp.rkt | racket | interpreter of let-lang
expval := Int | Bool | Proc
during the implemention, I find this datatype actually
useless...
copy back
environment
env := '() | (var val env)
grammar
the following example is used to identify scoping of the proc-lang.
for multi args test
test block
test implicit reference
(odd);
call by value result
|
#lang eopl
(require racket/file)
(require "../base/utils.rkt")
(require "store1.rkt")
(provide (all-defined-out))
(define-datatype
expval expval?
(numval
(int integer?))
(boolval
(bool boolean?))
(procval
(var symbol?)
(body anything?)))
(define-datatype
proc proc?
(closure
(vars (list-of symbol?))
(body anything?)
(env anything?)))
(define apply-proc
(lambda (proc1 arg-vals arg-outer-refs)
(cases
proc proc1
(closure
(vars body env)
(let* ((arg-inner-refs (newrefs arg-vals))
(new-env (extend-env vars
arg-inner-refs
env)))
(let ((body-ret (interp-exp body new-env)))
(for-each (lambda (inner-ref outer-ref)
(setref! outer-ref
(deref inner-ref)))
arg-inner-refs
arg-outer-refs)
body-ret)))
(else (eopl:error 'apply-proc "invalid procedure value:" proc1)))))
(define-datatype
environment environment?
(empty-env)
(extend-env
(vars (list-of symbol?))
(refs (list-of reference?))
(env environment?))
)
(define extend-env-recursively
(lambda (pnames b-lst-of-vars b-bodies inherited-env)
(let* ((refs (newrefs (map (lambda (_)
'()) pnames)))
(new-env (extend-env pnames refs inherited-env)))
(for-each (lambda (ref b-vars b-body)
(setref! ref
(closure b-vars
b-body
new-env)))
refs
b-lst-of-vars
b-bodies)
new-env)))
(define apply-env
(lambda (env search-var)
(cases
environment env
(empty-env
()
(eopl:error 'apply-env "var:~s not found" search-var))
(extend-env
(vars refs inherited-env)
(let ((idx (index-of vars search-var)))
(if (< idx 0)
(apply-env inherited-env search-var)
(list-ref refs idx))))
)))
(define scanner-spec-a
'((white-sp (whitespace) skip)
(comment ("%" (arbno (not #\newline))) skip)
(identifier (letter (arbno (or letter digit))) symbol)
(number (digit (arbno digit)) number)
(number ("-" digit (arbno digit)) number)
))
(define grammar-al
'((program
(expression)
a-program)
(expression
(number)
const-exp)
(expression
("-(" expression "," expression ")")
diff-exp)
(expression
("zero?" "(" expression ")")
zero?-exp)
(expression
("if" expression "then" expression "else" expression)
if-exp)
(expression
(identifier)
var-exp)
(expression
("let" (arbno identifier "=" expression) "in" expression)
let-exp)
(expression
("proc" "(" (arbno identifier) ")" expression)
proc-exp)
(expression
("(" expression (arbno expression) ")")
call-exp)
(expression
("letrec" (arbno identifier "(" (arbno identifier) ")" "=" expression) "in" expression)
letrec-exp)
(expression
("{" (arbno expression ";") "}")
compound-exp)
(expression
("set" identifier "=" expression)
set-exp)
))
(define list-the-datatypes
(lambda ()
(sllgen:list-define-datatypes scanner-spec-a grammar-al)))
(sllgen:make-define-datatypes scanner-spec-a grammar-al)
(define scan&parse
(sllgen:make-string-parser scanner-spec-a grammar-al))
(define interp-exps-ret-last
(lambda (exps env)
(cond ((null? exps)
(eopl:error 'compund-exps "no expression in block"))
((null? (cdr exps))
(interp-exp (car exps) env))
(else
(interp-exp (car exps) env)
(interp-exps-ret-last (cdr exps) env)))))
(define interp-rand-exp
(lambda (rand-exp env)
(cases expression rand-exp
(var-exp
(var)
var)
(else
(eopl:error 'interp-rand-exp "procedure parameter must be variables")))))
(define interp-exp
(lambda (exp env)
(cases
expression exp
(const-exp
(num)
num)
(diff-exp
(exp1 exp2)
(- (interp-exp exp1 env)
(interp-exp exp2 env)))
(zero?-exp
(exp)
(zero? (interp-exp exp env)))
(if-exp
(predicate sbj-exp else-exp)
(if (interp-exp predicate env)
(interp-exp sbj-exp env)
(interp-exp else-exp env)))
(var-exp
(var)
(deref (apply-env env var)))
(let-exp
(vars val-exps exp2)
(let* ((vals (map (lambda (val-exp)
(interp-exp val-exp env))
val-exps))
(new-env (extend-env vars
(newrefs vals)
env)))
(interp-exp exp2 new-env)))
(proc-exp
(vars body)
(closure vars body env))
(call-exp
(proc-exp rand-exps)
(let* ((proc (interp-exp proc-exp env))
(rand-vars (map (lambda (rand-exp)
(interp-rand-exp rand-exp env))
rand-exps))
(rand-refs (map (lambda (var)
(apply-env env var))
rand-vars))
(rand-vals (map (lambda (ref)
(deref ref))
rand-refs)))
(apply-proc proc rand-vals rand-refs)))
(letrec-exp
(p-names b-lst-of-vars b-bodies letrec-body)
(let ((new-env (extend-env-recursively p-names
b-lst-of-vars
b-bodies
env)))
(interp-exp letrec-body new-env)))
(compound-exp
(exps)
(interp-exps-ret-last exps env))
(set-exp
(var exp)
(setref! (apply-env env var)
(interp-exp exp env)))
)))
(define initial-env (empty-env))
(define interp
(lambda (datum)
(cases
program datum
(a-program
(exp)
(initialize-store!)
(interp-exp exp initial-env)))))
(define test-prog
(lambda (prog)
(display (interp (scan&parse prog)))
(newline)))
(define test-prog-eqv
(lambda (prog v)
(let ((interp-v (interp (scan&parse prog))))
(if (not (eq? interp-v v))
(eopl:error 'test-prog-eqv "value-of:~s is ~s not eq ~s" prog interp-v v)
'ok))))
(define (test)
(test-prog-eqv "let f = proc (x) -(x,11)
in let var1 = 77
in let var2 = (f var1)
in (f var2)"
55)
if it 's dynamic scoping , then the result will be 1 , else result will be
2 with lexical scoping
(test-prog-eqv "let f = let x = 3
in proc (y) -(y,x)
in let x = 4
y = 5
in (f y)"
2)
(test-prog-eqv "letrec
foo(a) = (bar a)
bar(b) = b
in let v = 3
in (foo v)"
3)
(test-prog-eqv "let f = proc (x y) -(x,y)
x = 10
y = 2
in (f x y)"
8)
(test-prog-eqv "letrec
foo() = (bar)
bar() = -(10,2)
in (foo)"
8)
(test-prog-eqv "{
}"
4)
(test-prog-eqv "let x = 0
in letrec even()
= if zero?(x)
then 1
else {
}
odd()
= if zero?(x)
then 0
else {
}
in {
}"
0)
(test-prog-eqv "let p = proc(x) set x = 4
in let a = 3
in {
}"
eopl 4.37
for call - by - value - result , result is 4
for call - by - reference , result is 3
(test-prog-eqv "let p = proc(x y) {
}
x = 0
in {
}"
4)
(display "finished test...")
)
|
ee4290db1155b44234f50a6e0ffb7cdaaf30d7d99df702548e086b7cbf205dc1 | wargrey/w3s | simplification.rkt | #lang typed/racket/base
;;; -20011203.html#simplification
(provide (all-defined-out))
(require digimon/filesystem)
(require "../relaxng.rkt")
(require "compact.rkt")
(require "schema.rkt")
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define rng-current-inherit : (Parameterof (Pairof Symbol (Option String))) (make-parameter (cons 'inherit #false)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define rnc-grammar-simplify : (->* (RNC-Grammar) ((Listof (Pairof Symbol String)) (Listof RNG-Grammar-Content)) RNG-Grammar)
(lambda [rnc [annotated-attributes null] [siblings null]]
(define grammars : (U Pattern (Listof Grammar-Content)) (rnc-grammar-body rnc))
(define namespaces : RNC-Preamble-Namespaces (rnc-grammar-namespaces rnc))
(define default-uri : (Option String) (rng-namespace-uri namespaces (rnc-grammar-default-namespace rnc)))
(define xmlns:ns : (Listof (Pairof Symbol String))
(for/fold ([xmlns : (Listof (Pairof Symbol String)) null])
([(ns uri) (in-hash namespaces)])
(define xmlns:ns : Symbol (string->symbol (format "xmlns:~a" ns)))
(define xmlns-uri : (Option String) (rng-namespace-uri namespaces uri))
(cond [(not xmlns-uri) xmlns]
[else (cons (cons xmlns:ns xmlns-uri) xmlns)])))
(define attributes : (Listof (Pairof Symbol String))
(append (cond [(not default-uri) (reverse xmlns:ns)]
[else (cons (cons 'ns default-uri) (reverse xmlns:ns))])
annotated-attributes))
(if (pattern? grammars)
(rng-pattern-simplify rnc grammars)
(rnc-grammars-simplify rnc grammars namespaces attributes siblings))))
(define rng-pattern-simplify : (-> RNC-Grammar Pattern RNG-Grammar)
(lambda [rnc pattern]
(rng-grammar (rnc-grammar-tagname rnc) (rnc-grammar-location rnc)
null null #false null rng-empty-definitions)))
(define rnc-grammars-simplify : (-> RNC-Grammar (Listof Grammar-Content) RNC-Preamble-Namespaces
(Listof (Pairof Symbol String)) (Listof RNG-Grammar-Content)
RNG-Grammar)
(lambda [rnc grammars namespaces attributes siblings]
(define-values (?start defines children) (rng-grammar-contents-simplify (rnc-grammar-location rnc) grammars namespaces))
(rng-grammar (rnc-grammar-tagname rnc) (rnc-grammar-location rnc)
attributes children ?start siblings defines)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define rng-grammar-contents-simplify : (-> (U String Symbol) (Listof Grammar-Content) RNC-Preamble-Namespaces
(Values (Option RNG-Element) RNG-Definitions (Listof RNG-Grammar-Content)))
(lambda [source grammars namespaces]
(let simplify ([body : (Listof Grammar-Content) grammars]
[ydob : (Listof RNG-Grammar-Content) null]
[a:initial : (Option Annotation) #false]
[?start : (Option RNG-Element) #false]
[defines : RNG-Definitions rng-empty-definitions])
(cond [(null? body) (values ?start defines (reverse ydob))]
[else (let-values ([(self rest) (values (car body) (cdr body))])
(cond [(grammar-annotation? self)
(simplify rest (cons (rng-annotation-element->foreign-element (grammar-annotation-element self)) ydob) #false ?start defines)]
[($include? self)
(simplify rest (cons (rng-include->nested-grammar-div source self a:initial namespaces) ydob) #false ?start defines)]
[(a:content? self)
(simplify (cons (a:content-component self) rest) ydob (a:content-initial self) ?start defines)]
[($div? self) ; yes, ignore the initial annotation
(simplify (append ($div-contents self) rest) ydob #false ?start defines)]
[else (simplify rest ydob #false ?start defines)]))]))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define rng-include->nested-grammar-div : (-> (U Symbol String) $include (Option Annotation) RNC-Preamble-Namespaces RNG-Grammar)
(lambda [pwd self a:initial namespaces]
(define-values (a:attrs a:children)
(cond [(not a:initial) (values null null)]
[else (values (annotation-attributes a:initial)
(annotation-elements a:initial))]))
(define inherit ($include-inherit self))
(define-values (?start definitions siblings) (rng-grammar-contents-simplify pwd ($include-contents self) namespaces))
(define subrnc : RNG-Grammar
(parameterize ([rnc-shadow-start? (and ?start #true)]
[rnc-shadow-definitions (hash-keys definitions)]
[rng-current-inherit (if (not inherit) (rng-current-inherit) (cons inherit (hash-ref namespaces inherit (λ [] #false))))])
(rnc-grammar-simplify (read-rnc-grammar (build-requiring-path pwd ($include-href self)) #:tagname 'div)
a:attrs (append (map rng-annotation-element->foreign-element a:children) siblings))))
subrnc))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define rng-empty-definitions : RNG-Definitions #hasheq())
(define rng-namespace-uri : (-> RNC-Preamble-Namespaces (U Symbol String False) (Option String))
(lambda [ns uri]
(cond [(string? uri) uri]
[(symbol? uri) (rng-namespace-uri ns (hash-ref ns uri (λ [] #false)))]
[else (cdr (rng-current-inherit))])))
| null | https://raw.githubusercontent.com/wargrey/w3s/8283bc69bbcde2c935e632e420147f0f65e8c89e/sgml/digitama/relaxng/simplification.rkt | racket | -20011203.html#simplification
yes, ignore the initial annotation
| #lang typed/racket/base
(provide (all-defined-out))
(require digimon/filesystem)
(require "../relaxng.rkt")
(require "compact.rkt")
(require "schema.rkt")
(define rng-current-inherit : (Parameterof (Pairof Symbol (Option String))) (make-parameter (cons 'inherit #false)))
(define rnc-grammar-simplify : (->* (RNC-Grammar) ((Listof (Pairof Symbol String)) (Listof RNG-Grammar-Content)) RNG-Grammar)
(lambda [rnc [annotated-attributes null] [siblings null]]
(define grammars : (U Pattern (Listof Grammar-Content)) (rnc-grammar-body rnc))
(define namespaces : RNC-Preamble-Namespaces (rnc-grammar-namespaces rnc))
(define default-uri : (Option String) (rng-namespace-uri namespaces (rnc-grammar-default-namespace rnc)))
(define xmlns:ns : (Listof (Pairof Symbol String))
(for/fold ([xmlns : (Listof (Pairof Symbol String)) null])
([(ns uri) (in-hash namespaces)])
(define xmlns:ns : Symbol (string->symbol (format "xmlns:~a" ns)))
(define xmlns-uri : (Option String) (rng-namespace-uri namespaces uri))
(cond [(not xmlns-uri) xmlns]
[else (cons (cons xmlns:ns xmlns-uri) xmlns)])))
(define attributes : (Listof (Pairof Symbol String))
(append (cond [(not default-uri) (reverse xmlns:ns)]
[else (cons (cons 'ns default-uri) (reverse xmlns:ns))])
annotated-attributes))
(if (pattern? grammars)
(rng-pattern-simplify rnc grammars)
(rnc-grammars-simplify rnc grammars namespaces attributes siblings))))
(define rng-pattern-simplify : (-> RNC-Grammar Pattern RNG-Grammar)
(lambda [rnc pattern]
(rng-grammar (rnc-grammar-tagname rnc) (rnc-grammar-location rnc)
null null #false null rng-empty-definitions)))
(define rnc-grammars-simplify : (-> RNC-Grammar (Listof Grammar-Content) RNC-Preamble-Namespaces
(Listof (Pairof Symbol String)) (Listof RNG-Grammar-Content)
RNG-Grammar)
(lambda [rnc grammars namespaces attributes siblings]
(define-values (?start defines children) (rng-grammar-contents-simplify (rnc-grammar-location rnc) grammars namespaces))
(rng-grammar (rnc-grammar-tagname rnc) (rnc-grammar-location rnc)
attributes children ?start siblings defines)))
(define rng-grammar-contents-simplify : (-> (U String Symbol) (Listof Grammar-Content) RNC-Preamble-Namespaces
(Values (Option RNG-Element) RNG-Definitions (Listof RNG-Grammar-Content)))
(lambda [source grammars namespaces]
(let simplify ([body : (Listof Grammar-Content) grammars]
[ydob : (Listof RNG-Grammar-Content) null]
[a:initial : (Option Annotation) #false]
[?start : (Option RNG-Element) #false]
[defines : RNG-Definitions rng-empty-definitions])
(cond [(null? body) (values ?start defines (reverse ydob))]
[else (let-values ([(self rest) (values (car body) (cdr body))])
(cond [(grammar-annotation? self)
(simplify rest (cons (rng-annotation-element->foreign-element (grammar-annotation-element self)) ydob) #false ?start defines)]
[($include? self)
(simplify rest (cons (rng-include->nested-grammar-div source self a:initial namespaces) ydob) #false ?start defines)]
[(a:content? self)
(simplify (cons (a:content-component self) rest) ydob (a:content-initial self) ?start defines)]
(simplify (append ($div-contents self) rest) ydob #false ?start defines)]
[else (simplify rest ydob #false ?start defines)]))]))))
(define rng-include->nested-grammar-div : (-> (U Symbol String) $include (Option Annotation) RNC-Preamble-Namespaces RNG-Grammar)
(lambda [pwd self a:initial namespaces]
(define-values (a:attrs a:children)
(cond [(not a:initial) (values null null)]
[else (values (annotation-attributes a:initial)
(annotation-elements a:initial))]))
(define inherit ($include-inherit self))
(define-values (?start definitions siblings) (rng-grammar-contents-simplify pwd ($include-contents self) namespaces))
(define subrnc : RNG-Grammar
(parameterize ([rnc-shadow-start? (and ?start #true)]
[rnc-shadow-definitions (hash-keys definitions)]
[rng-current-inherit (if (not inherit) (rng-current-inherit) (cons inherit (hash-ref namespaces inherit (λ [] #false))))])
(rnc-grammar-simplify (read-rnc-grammar (build-requiring-path pwd ($include-href self)) #:tagname 'div)
a:attrs (append (map rng-annotation-element->foreign-element a:children) siblings))))
subrnc))
(define rng-empty-definitions : RNG-Definitions #hasheq())
(define rng-namespace-uri : (-> RNC-Preamble-Namespaces (U Symbol String False) (Option String))
(lambda [ns uri]
(cond [(string? uri) uri]
[(symbol? uri) (rng-namespace-uri ns (hash-ref ns uri (λ [] #false)))]
[else (cdr (rng-current-inherit))])))
|
fc926831c232be6f74e5909773897043be26a642b0dd0c271ad8832a84f21081 | amitrathore/remember | file_store.clj | (ns org.rathore.amit.remember.file-store
(:use org.rathore.amit.remember.core)
(:import (org.jets3t.service.model S3Object))
(:import (org.jets3t.service.acl AccessControlList)))
(defn store-file-in-bucket
[bucket-name filename
& {:keys [key]}]
(let [bucket (get-bucket bucket-name)
data (java.io.File. filename)
s3-object (S3Object. bucket data)
acl AccessControlList/REST_CANNED_PUBLIC_READ]
(.setAcl s3-object acl)
(if key (.setKey s3-object key))
(put-object bucket s3-object)))
| null | https://raw.githubusercontent.com/amitrathore/remember/c16898954b1708224cf833b195735a2123fcd52a/src/org/rathore/amit/remember/file_store.clj | clojure | (ns org.rathore.amit.remember.file-store
(:use org.rathore.amit.remember.core)
(:import (org.jets3t.service.model S3Object))
(:import (org.jets3t.service.acl AccessControlList)))
(defn store-file-in-bucket
[bucket-name filename
& {:keys [key]}]
(let [bucket (get-bucket bucket-name)
data (java.io.File. filename)
s3-object (S3Object. bucket data)
acl AccessControlList/REST_CANNED_PUBLIC_READ]
(.setAcl s3-object acl)
(if key (.setKey s3-object key))
(put-object bucket s3-object)))
| |
245df9d96b4332eb29a030e1cfa8cd29a2b1fc262e48fad089561df9b92bc0a9 | goldfirere/th-desugar | T158Exp.hs | # LANGUAGE MagicHash #
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -Wno - incomplete - patterns #
| A regression test for # 158 which ensures that lambda expressions
-- containing patterns with unlifted types desugar as expected. We define this
test in its own module , without UnboxedTuples enabled , to ensure that users
-- do not have to enable the extension themselves.
module T158Exp where
import Language.Haskell.TH.Desugar
t158 :: ()
t158 =
$([| (\27# 42# -> ()) 27# 42# |] >>= dsExp >>= return . expToTH)
| null | https://raw.githubusercontent.com/goldfirere/th-desugar/d0041788759da2f8e2448c6c87d8c6334dc12838/Test/T158Exp.hs | haskell | containing patterns with unlifted types desugar as expected. We define this
do not have to enable the extension themselves. | # LANGUAGE MagicHash #
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -Wno - incomplete - patterns #
| A regression test for # 158 which ensures that lambda expressions
test in its own module , without UnboxedTuples enabled , to ensure that users
module T158Exp where
import Language.Haskell.TH.Desugar
t158 :: ()
t158 =
$([| (\27# 42# -> ()) 27# 42# |] >>= dsExp >>= return . expToTH)
|
c332da02f9b9723bc99e9dae27223f28223e6e1185b03e9f82cb80f732206011 | ocurrent/ocaml-multicore-ci | pipeline.ml | open Current.Syntax
open Ocaml_multicore_ci
open Pipeline_utils
module Git = Current_git
module Github = Current_github
module Docker = Current_docker.Default
module Map = Map.Make (String)
let tidy_label label =
Fmt.str "%a" Fmt.(list string) (String.split_on_char '@' label)
let tidy_label_opt = function
| None -> None
| Some label -> Some (tidy_label label)
let is_compiler_from_repo_url (conf : Conf.conf) repo_url =
let package_name = Repo_url_utils.package_name_from_url repo_url in
conf.is_compiler_package package_name
let is_compiler_blocklisted (conf : Conf.conf) ov repo_url =
let package_name = Repo_url_utils.package_name_from_url repo_url in
conf.is_compiler_blocklisted ov package_name
let gref_to_version gref =
let open Ocaml_version in
match of_string gref with
| Ok v -> v
| _ -> Ocaml_version.of_string_exn "4.12"
let platforms =
let schedule = monthly in
let v { Conf.label; builder; pool; distro; ocaml_version; arch; opam_version }
=
let base =
Platform.pull ~arch ~schedule ~builder ~distro ~ocaml_version
~opam_version
in
let host_base =
match arch with
| `X86_64 -> base
| _ ->
Platform.pull ~arch:`X86_64 ~schedule ~builder ~distro ~ocaml_version
~opam_version
in
Platform.get ~arch ~label ~builder ~pool ~distro ~ocaml_version ~host_base
~opam_version base
in
let v2_1 = Conf.platforms `V2_1 in
Current.list_seq (List.map v v2_1)
let get_job_id x =
let+ md = Current.Analysis.metadata x in
match md with Some { Current.Metadata.job_id; _ } -> job_id | None -> None
let remove_version_re = Str.regexp "\\..*$"
let build_mechanism_for_selection ~selection ~(conf : Conf.conf) =
let mechanisms =
selection.Selection.packages
|> List.map (fun package ->
let package_raw = Str.global_replace remove_version_re "" package in
(package, conf.build_mechanism_for_package package_raw))
in
let _, others =
mechanisms |> List.partition (fun (_, mechanism) -> mechanism = `Build)
in
match others with
| [] -> `Build
| [ (_, (`Make _ as mech)) ] -> mech
| [ (_, (`Script _ as mech)) ] -> mech
| _ -> `Build
let selection_to_opam_spec ~analysis ~conf selection =
let label = Variant.to_string selection.Selection.variant in
let build_mechanism = build_mechanism_for_selection ~selection ~conf in
Spec.opam ~label ~selection ~analysis build_mechanism
let package_and_selection_to_opam_spec ~analysis ~package ~(conf : Conf.conf)
selection =
let label = Variant.to_string selection.Selection.variant in
let build_mechanism = conf.build_mechanism_for_package package in
Spec.opam ~label ~selection ~analysis build_mechanism
let make_opam_specs ~conf analysis =
match Analyse.Analysis.selections analysis with
| `Not_opam (package, selections) ->
selections
|> List.map (package_and_selection_to_opam_spec ~analysis ~package ~conf)
| `Opam_monorepo config ->
let lint_selection = Opam_monorepo.selection_of_config config in
[
Spec.opam ~label:"(lint-fmt)" ~selection:lint_selection ~analysis
(`Lint `Fmt);
Spec.opam_monorepo ~config;
]
| `Opam_build selections ->
(* let lint_selection = List.hd selections in*)
let builds =
selections |> List.map (selection_to_opam_spec ~analysis ~conf)
and lint =
[ (* Spec.opam ~label:"(lint-fmt)" ~selection:lint_selection ~analysis (`Lint `Fmt);*)
(* Spec.opam ~label:"(lint-doc)" ~selection:lint_selection ~analysis (`Lint `Doc);*)
(* Spec.opam ~label:"(lint-opam)" ~selection:lint_selection ~analysis (`Lint `Opam);*) ]
in
lint @ builds
let place_build ~ocluster ~repo ?test_repo ?compiler_commit ?sandmark_package
~source spec =
let+ result =
match ocluster with
| None -> Build.v ~platforms ~repo ?test_repo ?compiler_commit ~spec source
| Some ocluster ->
let src = Current.map Git.Commit.id source in
let compiler_commit_id =
Option.map (fun c -> Current.map Git.Commit.id c) compiler_commit
in
Cluster_build.v ocluster ~platforms ~repo ?test_repo
?compiler_commit:compiler_commit_id ?sandmark_package ~spec src
and+ spec = spec in
(Spec.label spec, result)
let place_builds ?ocluster ~repo ?test_repo ?compiler_gref ?compiler_commit
?label ?sandmark_package ~analysis ~conf source =
Current.with_context analysis @@ fun () ->
let specs =
let+ analysis = Current.state ~hidden:true analysis in
match analysis with
| Error _ ->
(* If we don't have the analysis yet, just use the empty list. *)
[]
| Ok analysis -> make_opam_specs ~conf analysis
in
let label = tidy_label_opt label in
let+ builds =
specs
|> Current.list_map ?label
(module Spec)
(place_build ~ocluster ~repo ?test_repo ?compiler_commit
?sandmark_package ~source)
and+ analysis_result =
Current.state ~hidden:true (Current.map (fun _ -> `Checked) analysis)
and+ analysis_id = get_job_id analysis in
(builds |> List.map (fun (l, r) -> (l, compiler_gref, r)))
@ [ ("(analysis)", None, (analysis_result, analysis_id)) ]
let analysis_component ?label ?sandmark_package ~solver ~is_compiler
~(conf : Conf.conf) commit =
let opam_repository_commits = conf.opam_repository_commits in
Analyse.examine ?sandmark_package ?label ~solver ~platforms
~opam_repository_commits ~is_compiler commit
let analysis_with_compiler_component ?label ?sandmark_package ~solver
~compiler_commit ~(conf : Conf.conf) commit =
let opam_repository_commits = conf.opam_repository_commits in
Analyse.examine_with_compiler ?sandmark_package ?label ~solver ~platforms
~opam_repository_commits ~compiler_commit commit
let build_from_clone_component ?compiler_commit repo_clone =
let repo_url, commit = repo_clone in
let repo_url, _ = Repo_url_utils.url_gref_from_url repo_url in
Build_from_clone_component.v ~repo_url ?compiler_commit commit
let cascade_component ~build (commit : Git.Commit.t Current.t) =
Current.component "cascade"
|> let> commit = commit and> _ = build in
Current.Primitive.const commit
let local_test ?label ~solver repo () =
let src = Git.Local.head_commit repo in
let repo = Current.return { Github.Repo_id.owner = "local"; name = "test" } in
let repo_str = Current.map (Fmt.to_to_string Github.Repo_id.pp) repo in
let get_is_compiler_blocklisted _ _ = false in
let conf = Conf.default_conf in
let analysis =
analysis_component ?label ~solver ~is_compiler:false
~get_is_compiler_blocklisted ~repo:repo_str ~conf src
in
Current.component "summarise"
|> let> results =
place_builds ~repo:repo_str ?label ~analysis ~conf:Conf.default_conf src
in
let result = summarise_builds results in
Current_incr.const (result, None)
let local_test_multiple ~solver repos () =
repos
|> List.map (fun repo ->
let label = Git.Local.repo repo |> Fpath.basename in
local_test ~label ~solver repo ())
|> Current.all
let clone_fixed_repos fixed_repos : (string * Git.Commit.t Current.t) list =
let repos_by_owner = fixed_repos |> index_by_owner |> Owner_map.bindings in
repos_by_owner |> List.split |> fst |> set_active_owners;
repos_by_owner
|> List.map (fun (owner, repo_names_urls) ->
let repo_names, repo_urls = repo_names_urls |> List.split in
set_active_repo_names ~owner repo_names;
repo_urls
|> List.map (fun repo_url ->
let url, gref = Repo_url_utils.url_gref_from_url repo_url in
(repo_url, Git.clone ~schedule:daily ~gref url)))
|> List.flatten
let analyse_build_summarise ?ocluster ?sandmark_package ~solver ~repo
~is_compiler ?compiler_gref ?compiler_commit ?label ~conf commit =
let is_compiler_blocklisted = is_compiler_blocklisted conf in
let analysis =
analysis_component ~solver ?label ?sandmark_package ~is_compiler
~get_is_compiler_blocklisted:is_compiler_blocklisted ~repo ~conf commit
in
let builds =
place_builds ?ocluster ~repo ?compiler_gref ?compiler_commit ?label
?sandmark_package ~analysis ~conf commit
in
(builds, summarise_builds_current builds)
let build_from_clone_with_compiler ?ocluster ?sandmark_package ~solver
?compiler_commit ~conf repo_clone =
let repo_url, _ = repo_clone in
let commit = build_from_clone_component ?compiler_commit repo_clone in
let hash = Current.map Git.Commit.hash commit in
let label = Repo_url_utils.owner_name_gref_from_url repo_url in
let is_compiler = is_compiler_from_repo_url conf repo_url in
let builds, summary =
analyse_build_summarise ?ocluster ?sandmark_package ~solver ~is_compiler
?compiler_commit ~label ~repo:(Current.return repo_url) ~conf commit
in
let recorded_builds = record_builds ~repo_url ~hash ~builds ~summary in
(commit, recorded_builds)
let build_with_compiler ?ocluster ?sandmark_package ~solver ~compiler_gref
~compiler_commit ?label ~repo_url ~conf commit =
let hash = Current.map Git.Commit.hash commit in
let cache_hint =
Current.map (fun c -> Git.Commit_id.repo (Git.Commit.id c)) compiler_commit
in
let compiler_commit_id = Current.map Git.Commit.id compiler_commit in
let analysis =
analysis_with_compiler_component ~solver ?label ?sandmark_package
~compiler_commit:compiler_commit_id ~conf commit
in
let builds =
place_builds ?ocluster ~repo:cache_hint ~test_repo:repo_url ~compiler_gref
~compiler_commit ?label ?sandmark_package ~analysis ~conf commit
in
let summary = summarise_builds_current builds in
let recorded_builds = record_builds ~repo_url ~hash ~builds ~summary in
Current.ignore_value recorded_builds
let build_from_clone ?ocluster ?sandmark_package ~solver ~(conf : Conf.conf)
(repo_clone : string * Git.Commit.t Current.t) =
let repo_url, commit = repo_clone in
if is_compiler_from_repo_url conf repo_url then
let compiler_commit, compiler_build =
build_from_clone_with_compiler ?ocluster ~solver ~conf
~compiler_commit:commit repo_clone
in
let _, compiler_gref = Repo_url_utils.url_gref_from_url repo_url in
let compiler_version = gref_to_version compiler_gref in
let compiler_commit =
cascade_component ~build:compiler_build compiler_commit
in
let downstream_builds =
clone_fixed_repos conf.fixed_repos
|> List.filter_map (fun child_repo_clone ->
let child_repo_url, child_commit = child_repo_clone in
if is_compiler_from_repo_url conf child_repo_url then None
else if
is_compiler_blocklisted conf compiler_version child_repo_url
then None
else
let label =
Fmt.str "%s@ (%s)" (tidy_label child_repo_url) compiler_gref
in
Some
(build_with_compiler ?ocluster ~solver ~compiler_gref
~compiler_commit ~label ~repo_url:child_repo_url ~conf
child_commit))
in
Current.all downstream_builds
else if Conf.is_sandmark repo_url then
let packages = Sandmark_packages.v ~repo_url commit in
let compiler_commit =
Git.clone ~schedule:daily ~gref:"trunk"
""
in
Current.component "cascade"
|> let** packages = packages in
packages
|> List.filter (fun package ->
not (Conf.is_skipped_sandmark_package package))
|> List.map (fun package ->
let build =
build_with_compiler ?ocluster ~solver
?sandmark_package:(Some package) ~compiler_gref:"trunk"
~compiler_commit ~repo_url ~conf commit
in
Current.ignore_value build)
|> Current.all
else
let _, build =
build_from_clone_with_compiler ?ocluster ?sandmark_package ~solver ~conf
repo_clone
in
Current.ignore_value build
let v ?ocluster ~solver ~confs () =
let ocluster =
Option.map (Cluster_build.config ~timeout:(Duration.of_hour 7)) ocluster
in
Current.with_context platforms @@ fun () ->
confs
|> List.map (fun (conf : Conf.conf) ->
Current.with_context conf.opam_repository_commits @@ fun () ->
clone_fixed_repos conf.fixed_repos
|> List.map (build_from_clone ?ocluster ~solver ~conf)
|> Current.all)
|> Current.all
let local_test_fixed ~solver confs () : unit Current.t =
Current.with_context platforms @@ fun () ->
confs
|> List.map (fun (conf : Conf.conf) ->
Current.with_context conf.opam_repository_commits @@ fun () ->
clone_fixed_repos conf.fixed_repos
|> List.map (build_from_clone ~solver ~conf)
|> Current.all)
|> Current.all
| null | https://raw.githubusercontent.com/ocurrent/ocaml-multicore-ci/d46eecaf7269283a4b95ee40d2a9d6c7ec34a7bf/service/pipeline.ml | ocaml | let lint_selection = List.hd selections in
Spec.opam ~label:"(lint-fmt)" ~selection:lint_selection ~analysis (`Lint `Fmt);
Spec.opam ~label:"(lint-doc)" ~selection:lint_selection ~analysis (`Lint `Doc);
Spec.opam ~label:"(lint-opam)" ~selection:lint_selection ~analysis (`Lint `Opam);
If we don't have the analysis yet, just use the empty list. | open Current.Syntax
open Ocaml_multicore_ci
open Pipeline_utils
module Git = Current_git
module Github = Current_github
module Docker = Current_docker.Default
module Map = Map.Make (String)
let tidy_label label =
Fmt.str "%a" Fmt.(list string) (String.split_on_char '@' label)
let tidy_label_opt = function
| None -> None
| Some label -> Some (tidy_label label)
let is_compiler_from_repo_url (conf : Conf.conf) repo_url =
let package_name = Repo_url_utils.package_name_from_url repo_url in
conf.is_compiler_package package_name
let is_compiler_blocklisted (conf : Conf.conf) ov repo_url =
let package_name = Repo_url_utils.package_name_from_url repo_url in
conf.is_compiler_blocklisted ov package_name
let gref_to_version gref =
let open Ocaml_version in
match of_string gref with
| Ok v -> v
| _ -> Ocaml_version.of_string_exn "4.12"
let platforms =
let schedule = monthly in
let v { Conf.label; builder; pool; distro; ocaml_version; arch; opam_version }
=
let base =
Platform.pull ~arch ~schedule ~builder ~distro ~ocaml_version
~opam_version
in
let host_base =
match arch with
| `X86_64 -> base
| _ ->
Platform.pull ~arch:`X86_64 ~schedule ~builder ~distro ~ocaml_version
~opam_version
in
Platform.get ~arch ~label ~builder ~pool ~distro ~ocaml_version ~host_base
~opam_version base
in
let v2_1 = Conf.platforms `V2_1 in
Current.list_seq (List.map v v2_1)
let get_job_id x =
let+ md = Current.Analysis.metadata x in
match md with Some { Current.Metadata.job_id; _ } -> job_id | None -> None
let remove_version_re = Str.regexp "\\..*$"
let build_mechanism_for_selection ~selection ~(conf : Conf.conf) =
let mechanisms =
selection.Selection.packages
|> List.map (fun package ->
let package_raw = Str.global_replace remove_version_re "" package in
(package, conf.build_mechanism_for_package package_raw))
in
let _, others =
mechanisms |> List.partition (fun (_, mechanism) -> mechanism = `Build)
in
match others with
| [] -> `Build
| [ (_, (`Make _ as mech)) ] -> mech
| [ (_, (`Script _ as mech)) ] -> mech
| _ -> `Build
let selection_to_opam_spec ~analysis ~conf selection =
let label = Variant.to_string selection.Selection.variant in
let build_mechanism = build_mechanism_for_selection ~selection ~conf in
Spec.opam ~label ~selection ~analysis build_mechanism
let package_and_selection_to_opam_spec ~analysis ~package ~(conf : Conf.conf)
selection =
let label = Variant.to_string selection.Selection.variant in
let build_mechanism = conf.build_mechanism_for_package package in
Spec.opam ~label ~selection ~analysis build_mechanism
let make_opam_specs ~conf analysis =
match Analyse.Analysis.selections analysis with
| `Not_opam (package, selections) ->
selections
|> List.map (package_and_selection_to_opam_spec ~analysis ~package ~conf)
| `Opam_monorepo config ->
let lint_selection = Opam_monorepo.selection_of_config config in
[
Spec.opam ~label:"(lint-fmt)" ~selection:lint_selection ~analysis
(`Lint `Fmt);
Spec.opam_monorepo ~config;
]
| `Opam_build selections ->
let builds =
selections |> List.map (selection_to_opam_spec ~analysis ~conf)
and lint =
in
lint @ builds
let place_build ~ocluster ~repo ?test_repo ?compiler_commit ?sandmark_package
~source spec =
let+ result =
match ocluster with
| None -> Build.v ~platforms ~repo ?test_repo ?compiler_commit ~spec source
| Some ocluster ->
let src = Current.map Git.Commit.id source in
let compiler_commit_id =
Option.map (fun c -> Current.map Git.Commit.id c) compiler_commit
in
Cluster_build.v ocluster ~platforms ~repo ?test_repo
?compiler_commit:compiler_commit_id ?sandmark_package ~spec src
and+ spec = spec in
(Spec.label spec, result)
let place_builds ?ocluster ~repo ?test_repo ?compiler_gref ?compiler_commit
?label ?sandmark_package ~analysis ~conf source =
Current.with_context analysis @@ fun () ->
let specs =
let+ analysis = Current.state ~hidden:true analysis in
match analysis with
| Error _ ->
[]
| Ok analysis -> make_opam_specs ~conf analysis
in
let label = tidy_label_opt label in
let+ builds =
specs
|> Current.list_map ?label
(module Spec)
(place_build ~ocluster ~repo ?test_repo ?compiler_commit
?sandmark_package ~source)
and+ analysis_result =
Current.state ~hidden:true (Current.map (fun _ -> `Checked) analysis)
and+ analysis_id = get_job_id analysis in
(builds |> List.map (fun (l, r) -> (l, compiler_gref, r)))
@ [ ("(analysis)", None, (analysis_result, analysis_id)) ]
let analysis_component ?label ?sandmark_package ~solver ~is_compiler
~(conf : Conf.conf) commit =
let opam_repository_commits = conf.opam_repository_commits in
Analyse.examine ?sandmark_package ?label ~solver ~platforms
~opam_repository_commits ~is_compiler commit
let analysis_with_compiler_component ?label ?sandmark_package ~solver
~compiler_commit ~(conf : Conf.conf) commit =
let opam_repository_commits = conf.opam_repository_commits in
Analyse.examine_with_compiler ?sandmark_package ?label ~solver ~platforms
~opam_repository_commits ~compiler_commit commit
let build_from_clone_component ?compiler_commit repo_clone =
let repo_url, commit = repo_clone in
let repo_url, _ = Repo_url_utils.url_gref_from_url repo_url in
Build_from_clone_component.v ~repo_url ?compiler_commit commit
let cascade_component ~build (commit : Git.Commit.t Current.t) =
Current.component "cascade"
|> let> commit = commit and> _ = build in
Current.Primitive.const commit
let local_test ?label ~solver repo () =
let src = Git.Local.head_commit repo in
let repo = Current.return { Github.Repo_id.owner = "local"; name = "test" } in
let repo_str = Current.map (Fmt.to_to_string Github.Repo_id.pp) repo in
let get_is_compiler_blocklisted _ _ = false in
let conf = Conf.default_conf in
let analysis =
analysis_component ?label ~solver ~is_compiler:false
~get_is_compiler_blocklisted ~repo:repo_str ~conf src
in
Current.component "summarise"
|> let> results =
place_builds ~repo:repo_str ?label ~analysis ~conf:Conf.default_conf src
in
let result = summarise_builds results in
Current_incr.const (result, None)
let local_test_multiple ~solver repos () =
repos
|> List.map (fun repo ->
let label = Git.Local.repo repo |> Fpath.basename in
local_test ~label ~solver repo ())
|> Current.all
let clone_fixed_repos fixed_repos : (string * Git.Commit.t Current.t) list =
let repos_by_owner = fixed_repos |> index_by_owner |> Owner_map.bindings in
repos_by_owner |> List.split |> fst |> set_active_owners;
repos_by_owner
|> List.map (fun (owner, repo_names_urls) ->
let repo_names, repo_urls = repo_names_urls |> List.split in
set_active_repo_names ~owner repo_names;
repo_urls
|> List.map (fun repo_url ->
let url, gref = Repo_url_utils.url_gref_from_url repo_url in
(repo_url, Git.clone ~schedule:daily ~gref url)))
|> List.flatten
let analyse_build_summarise ?ocluster ?sandmark_package ~solver ~repo
~is_compiler ?compiler_gref ?compiler_commit ?label ~conf commit =
let is_compiler_blocklisted = is_compiler_blocklisted conf in
let analysis =
analysis_component ~solver ?label ?sandmark_package ~is_compiler
~get_is_compiler_blocklisted:is_compiler_blocklisted ~repo ~conf commit
in
let builds =
place_builds ?ocluster ~repo ?compiler_gref ?compiler_commit ?label
?sandmark_package ~analysis ~conf commit
in
(builds, summarise_builds_current builds)
let build_from_clone_with_compiler ?ocluster ?sandmark_package ~solver
?compiler_commit ~conf repo_clone =
let repo_url, _ = repo_clone in
let commit = build_from_clone_component ?compiler_commit repo_clone in
let hash = Current.map Git.Commit.hash commit in
let label = Repo_url_utils.owner_name_gref_from_url repo_url in
let is_compiler = is_compiler_from_repo_url conf repo_url in
let builds, summary =
analyse_build_summarise ?ocluster ?sandmark_package ~solver ~is_compiler
?compiler_commit ~label ~repo:(Current.return repo_url) ~conf commit
in
let recorded_builds = record_builds ~repo_url ~hash ~builds ~summary in
(commit, recorded_builds)
let build_with_compiler ?ocluster ?sandmark_package ~solver ~compiler_gref
~compiler_commit ?label ~repo_url ~conf commit =
let hash = Current.map Git.Commit.hash commit in
let cache_hint =
Current.map (fun c -> Git.Commit_id.repo (Git.Commit.id c)) compiler_commit
in
let compiler_commit_id = Current.map Git.Commit.id compiler_commit in
let analysis =
analysis_with_compiler_component ~solver ?label ?sandmark_package
~compiler_commit:compiler_commit_id ~conf commit
in
let builds =
place_builds ?ocluster ~repo:cache_hint ~test_repo:repo_url ~compiler_gref
~compiler_commit ?label ?sandmark_package ~analysis ~conf commit
in
let summary = summarise_builds_current builds in
let recorded_builds = record_builds ~repo_url ~hash ~builds ~summary in
Current.ignore_value recorded_builds
let build_from_clone ?ocluster ?sandmark_package ~solver ~(conf : Conf.conf)
(repo_clone : string * Git.Commit.t Current.t) =
let repo_url, commit = repo_clone in
if is_compiler_from_repo_url conf repo_url then
let compiler_commit, compiler_build =
build_from_clone_with_compiler ?ocluster ~solver ~conf
~compiler_commit:commit repo_clone
in
let _, compiler_gref = Repo_url_utils.url_gref_from_url repo_url in
let compiler_version = gref_to_version compiler_gref in
let compiler_commit =
cascade_component ~build:compiler_build compiler_commit
in
let downstream_builds =
clone_fixed_repos conf.fixed_repos
|> List.filter_map (fun child_repo_clone ->
let child_repo_url, child_commit = child_repo_clone in
if is_compiler_from_repo_url conf child_repo_url then None
else if
is_compiler_blocklisted conf compiler_version child_repo_url
then None
else
let label =
Fmt.str "%s@ (%s)" (tidy_label child_repo_url) compiler_gref
in
Some
(build_with_compiler ?ocluster ~solver ~compiler_gref
~compiler_commit ~label ~repo_url:child_repo_url ~conf
child_commit))
in
Current.all downstream_builds
else if Conf.is_sandmark repo_url then
let packages = Sandmark_packages.v ~repo_url commit in
let compiler_commit =
Git.clone ~schedule:daily ~gref:"trunk"
""
in
Current.component "cascade"
|> let** packages = packages in
packages
|> List.filter (fun package ->
not (Conf.is_skipped_sandmark_package package))
|> List.map (fun package ->
let build =
build_with_compiler ?ocluster ~solver
?sandmark_package:(Some package) ~compiler_gref:"trunk"
~compiler_commit ~repo_url ~conf commit
in
Current.ignore_value build)
|> Current.all
else
let _, build =
build_from_clone_with_compiler ?ocluster ?sandmark_package ~solver ~conf
repo_clone
in
Current.ignore_value build
let v ?ocluster ~solver ~confs () =
let ocluster =
Option.map (Cluster_build.config ~timeout:(Duration.of_hour 7)) ocluster
in
Current.with_context platforms @@ fun () ->
confs
|> List.map (fun (conf : Conf.conf) ->
Current.with_context conf.opam_repository_commits @@ fun () ->
clone_fixed_repos conf.fixed_repos
|> List.map (build_from_clone ?ocluster ~solver ~conf)
|> Current.all)
|> Current.all
let local_test_fixed ~solver confs () : unit Current.t =
Current.with_context platforms @@ fun () ->
confs
|> List.map (fun (conf : Conf.conf) ->
Current.with_context conf.opam_repository_commits @@ fun () ->
clone_fixed_repos conf.fixed_repos
|> List.map (build_from_clone ~solver ~conf)
|> Current.all)
|> Current.all
|
097cd983ee0cf60c974d49dd9cb996869cb67b17934ba85a3d7f8eaf7a205ddd | pupeno/ninjatools | tools.cljs | Copyright © 2015 Carousel Apps , Ltd. All rights reserved .
(ns ninjatools.tools
(:require [clojure.string :as s]
[clojure.walk :as walk]
[reagent.ratom :as ratom :include-macros true]
[re-frame.core :as re-frame]
[ajax.core :as ajax]
[ninjatools.layout :as layout]
[ninjatools.routing :as routing]
[ninjatools.ui :as ui]
[ninjatools.util :as util :refer [dissoc-in println-ret]]))
(defmethod routing/display-page :home [_current-route db]
(when (empty? (get-in db [:tools :by-id]))
(re-frame/dispatch [:get-tools])
(re-frame/dispatch [:get-features])
(re-frame/dispatch [:get-used-tools])
(re-frame/dispatch [:get-wanted-features]))
db)
(defmethod routing/display-page :tools [_current-route db]
(when (empty? (get-in db [:tools :by-id]))
(re-frame/dispatch [:get-tools]))
db)
(defn add-tool [db tool]
(-> db
(assoc-in [:tools :by-id (:id tool)] tool)
(assoc-in [:tools :by-slug (:slug tool)] tool)))
(re-frame/register-handler
:get-tools
(fn [db [_]]
(ajax/GET "/api/v1/tools"
{:handler #(re-frame/dispatch [:got-tools %1])
:error-handler util/report-unexpected-error})
db))
(re-frame/register-handler
:got-tools
(fn [db [_ tools]]
(let [tools (map walk/keywordize-keys tools)]
(reduce add-tool db tools))))
(re-frame/register-sub
:tools
(fn [db _]
(ratom/reaction (:tools @db))))
(defn add-feature [db feature]
(-> db
(assoc-in [:features :by-id (:id feature)] feature)
(assoc-in [:features :by-slug (:id feature)] feature)))
(re-frame/register-handler
:get-features
(fn [db [_]]
(ajax/GET "/api/v1/features"
{:handler #(re-frame/dispatch [:got-features (walk/keywordize-keys %1)])
:error-handler util/report-unexpected-error})
db))
(re-frame/register-handler
:got-features
(fn [db [_ features]]
(reduce add-feature db features)))
(re-frame/register-sub
:features
(fn [db _]
(ratom/reaction (:features @db))))
(re-frame/register-handler
:get-used-tools
(fn [db [_]]
(ajax/GET "/api/v1/used-tools"
{:handler #(re-frame/dispatch [:got-used-tools %1])
:error-handler util/report-unexpected-error})
db))
(re-frame/register-handler
:got-used-tools
(fn [db [_ used-tools]]
(re-frame/dispatch [:get-suggested-tools])
(assoc db :used-tools (set used-tools))))
(re-frame/register-handler
:mark-tool-as-used
(fn [db [_ tool-id]]
(let [db (update-in db [:used-tools] #(conj (or %1 #{}) %2) tool-id)]
(ajax/PUT "/api/v1/used-tools"
{:params tool-id
:handler #(re-frame/dispatch [:got-used-tools %1])
:error-handler util/report-unexpected-error})
db)))
(re-frame/register-handler
:mark-tool-as-unused
(fn [db [_ tool-id]]
(let [db (update-in db [:used-tools] disj tool-id)]
(ajax/DELETE (str "/api/v1/used-tools/" tool-id)
{:handler #(re-frame/dispatch [:got-used-tools %1])
:error-handler util/report-unexpected-error})
db)))
(re-frame/register-sub
:current-available-tools
(fn [db _]
(ratom/reaction
(if (:tools @db)
(let [tools-not-in-use (filter #(not (contains? (:used-tools @db) (:id %)))
(vals (:by-id (:tools @db))))
tools-per-page 10
number-of-pages (Math.ceil (/ (count tools-not-in-use) tools-per-page))
page-number (if-let [raw-page-number ((:query (:url (:current-route @db))) "p")]
(js/parseInt raw-page-number)
1)]
{:tools (doall (take tools-per-page (drop (* tools-per-page (dec page-number))
tools-not-in-use)))
:page-number page-number
:number-of-pages number-of-pages})
nil))))
(re-frame/register-sub
:used-tools
(fn [db _]
(ratom/reaction (:used-tools @db))))
(re-frame/register-handler
:get-wanted-features
(fn [db [_]]
(ajax/GET "/api/v1/wanted-features"
{:handler #(re-frame/dispatch [:got-wanted-features %1])
:error-handler util/report-unexpected-error})
db))
(re-frame/register-handler
:got-wanted-features
(fn [db [_ wanted-features]]
(re-frame/dispatch [:get-suggested-tools])
(assoc db :wanted-features (set wanted-features))))
(re-frame/register-handler
:mark-feature-as-wanted
(fn [db [_ feature-id]]
(let [db (update-in db [:wanted-features] #(conj (or %1 #{}) %2) feature-id)]
(ajax/PUT "/api/v1/wanted-features"
{:params feature-id
:handler #(re-frame/dispatch [:got-wanted-features %1])
:error-handler util/report-unexpected-error})
db)))
(re-frame/register-handler
:mark-feature-as-unwanted
(fn [db [_ feature-id]]
(let [db (update-in db [:wanted-features] disj feature-id)]
(ajax/DELETE (str "/api/v1/wanted-features/" feature-id)
{:handler #(re-frame/dispatch [:got-wanted-features %1])
:error-handler util/report-unexpected-error})
db)))
(re-frame/register-sub
:wanted-features
(fn [db _]
(ratom/reaction (:wanted-features @db))))
(re-frame/register-sub
:current-available-features
(fn [db _]
(ratom/reaction
(if (:features @db)
(let [features-not-wanted (filter #(not (contains? (:wanted-features @db) (:id %)))
(vals (:by-id (:features @db))))
features-per-page 10
number-of-pages (Math.ceil (/ (count features-not-wanted) features-per-page))
page-number (if-let [raw-page-number ((:query (:url (:current-route @db))) "p")]
(js/parseInt raw-page-number)
1)]
{:features (doall (take features-per-page (drop (* features-per-page (dec page-number))
features-not-wanted)))
:page-number page-number
:number-of-pages number-of-pages})
nil))))
(defmethod layout/pages :home [_]
(let [tools (re-frame/subscribe [:tools])
features (re-frame/subscribe [:features])
current-available-tools (re-frame/subscribe [:current-available-tools])
used-tools (re-frame/subscribe [:used-tools])
wanted-features (re-frame/subscribe [:wanted-features])
suggested-tools (re-frame/subscribe [:suggested-tools])]
(fn [_]
[:div
(if (nil? (:tools @current-available-tools))
[ui/loading]
[:div
[:h1 "What tools do you use?"]
[:ul (for [tool (:tools @current-available-tools)]
^{:key (:id tool)}
[:li [:a {:on-click #(ui/dispatch % [:mark-tool-as-used (:id tool)])} (:name tool)]])]
[:div [:a {:href (str (routing/url-for :home) "?p=" (if (= (:page-number @current-available-tools) (:number-of-pages @current-available-tools))
1
(inc (:page-number @current-available-tools))))}
"more tools"]]
(when (and (:tools @current-available-tools) (not (empty? @used-tools)))
[:div
[:div "Your tools"]
[:ul (for [tool (doall (filter identity (map #(get-in @tools [:by-id %]) @used-tools)))]
^{:key (:id tool)}
[:li (:name tool) " "
[:a {:on-click #(ui/dispatch % [:mark-tool-as-unused (:id tool)])} "x"]])]])
[:h1 "What do you need?"]
[:ul (for [feature (vals (:by-id @features))]
^{:key (:id feature)}
[:li [:a {:on-click #(ui/dispatch % [:mark-feature-as-wanted (:id feature)])} (:name feature)]])]
(when (and #_(:features @current-available-features) (not (empty? @wanted-features)))
[:div
[:div "Your desires"]
[:ul (for [feature (doall (filter identity (map #(get-in @features [:by-id %]) @wanted-features)))]
^{:key (:id feature)}
[:li (:name feature) " "
[:a {:on-click #(ui/dispatch % [:mark-feature-as-unwanted (:id feature)])} "x"]])]])
[:h1 "Suggested tools"]
(when (not (empty? @suggested-tools))
[:div
[:ul (for [[feature suggested-tools] (group-by :feature @suggested-tools)]
^{:key feature}
[:div
[:span feature]
[:ul (for [tool suggested-tools]
^{:key (:id tool)}
[:li (:name tool) ": " (s/join ", " (map :name (vals (select-keys (:by-id @tools) (:integration-ids tool)))))])]])]])])])))
(defmethod layout/pages :tools [_]
(let [tools (re-frame/subscribe [:tools])]
(fn [_]
(if (nil? (:by-id @tools))
[ui/loading]
[:div
[:ul (for [tool (vals (:by-id @tools))]
^{:key (:id tool)} [:li [:a {:href (routing/url-for :tool {:slug (:slug tool)})} (:name tool)]])]
[:div [:a {:on-click #(ui/dispatch % [:get-tools])}
"Refresh tools"]]]))))
(re-frame/register-sub
:current-tool
(fn [db _]
(ratom/reaction (get-in @db [:tools :by-slug (get @db :current-tool-slug)]))))
(defmethod routing/display-page :tool [current-route db]
(re-frame/dispatch [:get-tool-with-integrations (:slug current-route)])
(assoc db :current-tool-slug (:slug current-route)))
(re-frame/register-handler
:get-tool-with-integrations
(fn [db [_ tool-slug tool-requested]]
(if-let [tool (get-in db [:tools :by-slug tool-slug])]
(when (empty? (:integration-ids tool))
(ajax/GET (str "/api/v1/tools/" (:id tool) "/integrations")
{:handler #(re-frame/dispatch [:got-integrations (:id tool) %1])
:error-handler util/report-unexpected-error}))
(do (when (not tool-requested)
(re-frame/dispatch [:get-tools])) ; TODO: only get the tool we want, by slug.
(re-frame/dispatch [:get-tool-with-integrations tool-slug true])))
db))
(re-frame/register-handler
:got-integrations
(fn [db [_ tool-id integration-ids]]
(let [tool (assoc (get-in db [:tools :by-id tool-id]) :integration-ids integration-ids)] ; TODO: get the tools that we have integration ids for when we stop getting all the tools all the time.
(add-tool db tool))))
(defmethod layout/pages :tool [_]
(let [current-tool (re-frame/subscribe [:current-tool])
tools (re-frame/subscribe [:tools])]
(fn [_]
(if @current-tool
[:div
[:h1 (:name @current-tool)]
[:ul (for [integrated-tool (vals (select-keys (:by-id @tools) (:integration-ids @current-tool)))]
^{:key (:id integrated-tool)} [:li [:a {:href (routing/url-for :tool {:slug (:slug integrated-tool)})} (:name integrated-tool)]])]]
[ui/loading]))))
(re-frame/register-sub
:suggested-tools
(fn [db _]
(ratom/reaction (:suggested-tools @db))))
(re-frame/register-handler
:get-suggested-tools
(fn [db [_ feature_id tool_ids]]
(ajax/GET "/api/v1/suggested-tools"
{:handler #(re-frame/dispatch [:got-suggested-tools %1])
:error-handler util/report-unexpected-error})
db))
(re-frame/register-handler
:got-suggested-tools
(fn [db [_ suggested-tools]]
(assoc db :suggested-tools (set (map walk/keywordize-keys suggested-tools)))))
| null | https://raw.githubusercontent.com/pupeno/ninjatools/1b73ff22174b5ec196d514062162b252899c1735/src/cljs/ninjatools/tools.cljs | clojure | TODO: only get the tool we want, by slug.
TODO: get the tools that we have integration ids for when we stop getting all the tools all the time. | Copyright © 2015 Carousel Apps , Ltd. All rights reserved .
(ns ninjatools.tools
(:require [clojure.string :as s]
[clojure.walk :as walk]
[reagent.ratom :as ratom :include-macros true]
[re-frame.core :as re-frame]
[ajax.core :as ajax]
[ninjatools.layout :as layout]
[ninjatools.routing :as routing]
[ninjatools.ui :as ui]
[ninjatools.util :as util :refer [dissoc-in println-ret]]))
(defmethod routing/display-page :home [_current-route db]
(when (empty? (get-in db [:tools :by-id]))
(re-frame/dispatch [:get-tools])
(re-frame/dispatch [:get-features])
(re-frame/dispatch [:get-used-tools])
(re-frame/dispatch [:get-wanted-features]))
db)
(defmethod routing/display-page :tools [_current-route db]
(when (empty? (get-in db [:tools :by-id]))
(re-frame/dispatch [:get-tools]))
db)
(defn add-tool [db tool]
(-> db
(assoc-in [:tools :by-id (:id tool)] tool)
(assoc-in [:tools :by-slug (:slug tool)] tool)))
(re-frame/register-handler
:get-tools
(fn [db [_]]
(ajax/GET "/api/v1/tools"
{:handler #(re-frame/dispatch [:got-tools %1])
:error-handler util/report-unexpected-error})
db))
(re-frame/register-handler
:got-tools
(fn [db [_ tools]]
(let [tools (map walk/keywordize-keys tools)]
(reduce add-tool db tools))))
(re-frame/register-sub
:tools
(fn [db _]
(ratom/reaction (:tools @db))))
(defn add-feature [db feature]
(-> db
(assoc-in [:features :by-id (:id feature)] feature)
(assoc-in [:features :by-slug (:id feature)] feature)))
(re-frame/register-handler
:get-features
(fn [db [_]]
(ajax/GET "/api/v1/features"
{:handler #(re-frame/dispatch [:got-features (walk/keywordize-keys %1)])
:error-handler util/report-unexpected-error})
db))
(re-frame/register-handler
:got-features
(fn [db [_ features]]
(reduce add-feature db features)))
(re-frame/register-sub
:features
(fn [db _]
(ratom/reaction (:features @db))))
(re-frame/register-handler
:get-used-tools
(fn [db [_]]
(ajax/GET "/api/v1/used-tools"
{:handler #(re-frame/dispatch [:got-used-tools %1])
:error-handler util/report-unexpected-error})
db))
(re-frame/register-handler
:got-used-tools
(fn [db [_ used-tools]]
(re-frame/dispatch [:get-suggested-tools])
(assoc db :used-tools (set used-tools))))
(re-frame/register-handler
:mark-tool-as-used
(fn [db [_ tool-id]]
(let [db (update-in db [:used-tools] #(conj (or %1 #{}) %2) tool-id)]
(ajax/PUT "/api/v1/used-tools"
{:params tool-id
:handler #(re-frame/dispatch [:got-used-tools %1])
:error-handler util/report-unexpected-error})
db)))
(re-frame/register-handler
:mark-tool-as-unused
(fn [db [_ tool-id]]
(let [db (update-in db [:used-tools] disj tool-id)]
(ajax/DELETE (str "/api/v1/used-tools/" tool-id)
{:handler #(re-frame/dispatch [:got-used-tools %1])
:error-handler util/report-unexpected-error})
db)))
(re-frame/register-sub
:current-available-tools
(fn [db _]
(ratom/reaction
(if (:tools @db)
(let [tools-not-in-use (filter #(not (contains? (:used-tools @db) (:id %)))
(vals (:by-id (:tools @db))))
tools-per-page 10
number-of-pages (Math.ceil (/ (count tools-not-in-use) tools-per-page))
page-number (if-let [raw-page-number ((:query (:url (:current-route @db))) "p")]
(js/parseInt raw-page-number)
1)]
{:tools (doall (take tools-per-page (drop (* tools-per-page (dec page-number))
tools-not-in-use)))
:page-number page-number
:number-of-pages number-of-pages})
nil))))
(re-frame/register-sub
:used-tools
(fn [db _]
(ratom/reaction (:used-tools @db))))
(re-frame/register-handler
:get-wanted-features
(fn [db [_]]
(ajax/GET "/api/v1/wanted-features"
{:handler #(re-frame/dispatch [:got-wanted-features %1])
:error-handler util/report-unexpected-error})
db))
(re-frame/register-handler
:got-wanted-features
(fn [db [_ wanted-features]]
(re-frame/dispatch [:get-suggested-tools])
(assoc db :wanted-features (set wanted-features))))
(re-frame/register-handler
:mark-feature-as-wanted
(fn [db [_ feature-id]]
(let [db (update-in db [:wanted-features] #(conj (or %1 #{}) %2) feature-id)]
(ajax/PUT "/api/v1/wanted-features"
{:params feature-id
:handler #(re-frame/dispatch [:got-wanted-features %1])
:error-handler util/report-unexpected-error})
db)))
(re-frame/register-handler
:mark-feature-as-unwanted
(fn [db [_ feature-id]]
(let [db (update-in db [:wanted-features] disj feature-id)]
(ajax/DELETE (str "/api/v1/wanted-features/" feature-id)
{:handler #(re-frame/dispatch [:got-wanted-features %1])
:error-handler util/report-unexpected-error})
db)))
(re-frame/register-sub
:wanted-features
(fn [db _]
(ratom/reaction (:wanted-features @db))))
(re-frame/register-sub
:current-available-features
(fn [db _]
(ratom/reaction
(if (:features @db)
(let [features-not-wanted (filter #(not (contains? (:wanted-features @db) (:id %)))
(vals (:by-id (:features @db))))
features-per-page 10
number-of-pages (Math.ceil (/ (count features-not-wanted) features-per-page))
page-number (if-let [raw-page-number ((:query (:url (:current-route @db))) "p")]
(js/parseInt raw-page-number)
1)]
{:features (doall (take features-per-page (drop (* features-per-page (dec page-number))
features-not-wanted)))
:page-number page-number
:number-of-pages number-of-pages})
nil))))
(defmethod layout/pages :home [_]
(let [tools (re-frame/subscribe [:tools])
features (re-frame/subscribe [:features])
current-available-tools (re-frame/subscribe [:current-available-tools])
used-tools (re-frame/subscribe [:used-tools])
wanted-features (re-frame/subscribe [:wanted-features])
suggested-tools (re-frame/subscribe [:suggested-tools])]
(fn [_]
[:div
(if (nil? (:tools @current-available-tools))
[ui/loading]
[:div
[:h1 "What tools do you use?"]
[:ul (for [tool (:tools @current-available-tools)]
^{:key (:id tool)}
[:li [:a {:on-click #(ui/dispatch % [:mark-tool-as-used (:id tool)])} (:name tool)]])]
[:div [:a {:href (str (routing/url-for :home) "?p=" (if (= (:page-number @current-available-tools) (:number-of-pages @current-available-tools))
1
(inc (:page-number @current-available-tools))))}
"more tools"]]
(when (and (:tools @current-available-tools) (not (empty? @used-tools)))
[:div
[:div "Your tools"]
[:ul (for [tool (doall (filter identity (map #(get-in @tools [:by-id %]) @used-tools)))]
^{:key (:id tool)}
[:li (:name tool) " "
[:a {:on-click #(ui/dispatch % [:mark-tool-as-unused (:id tool)])} "x"]])]])
[:h1 "What do you need?"]
[:ul (for [feature (vals (:by-id @features))]
^{:key (:id feature)}
[:li [:a {:on-click #(ui/dispatch % [:mark-feature-as-wanted (:id feature)])} (:name feature)]])]
(when (and #_(:features @current-available-features) (not (empty? @wanted-features)))
[:div
[:div "Your desires"]
[:ul (for [feature (doall (filter identity (map #(get-in @features [:by-id %]) @wanted-features)))]
^{:key (:id feature)}
[:li (:name feature) " "
[:a {:on-click #(ui/dispatch % [:mark-feature-as-unwanted (:id feature)])} "x"]])]])
[:h1 "Suggested tools"]
(when (not (empty? @suggested-tools))
[:div
[:ul (for [[feature suggested-tools] (group-by :feature @suggested-tools)]
^{:key feature}
[:div
[:span feature]
[:ul (for [tool suggested-tools]
^{:key (:id tool)}
[:li (:name tool) ": " (s/join ", " (map :name (vals (select-keys (:by-id @tools) (:integration-ids tool)))))])]])]])])])))
(defmethod layout/pages :tools [_]
(let [tools (re-frame/subscribe [:tools])]
(fn [_]
(if (nil? (:by-id @tools))
[ui/loading]
[:div
[:ul (for [tool (vals (:by-id @tools))]
^{:key (:id tool)} [:li [:a {:href (routing/url-for :tool {:slug (:slug tool)})} (:name tool)]])]
[:div [:a {:on-click #(ui/dispatch % [:get-tools])}
"Refresh tools"]]]))))
(re-frame/register-sub
:current-tool
(fn [db _]
(ratom/reaction (get-in @db [:tools :by-slug (get @db :current-tool-slug)]))))
(defmethod routing/display-page :tool [current-route db]
(re-frame/dispatch [:get-tool-with-integrations (:slug current-route)])
(assoc db :current-tool-slug (:slug current-route)))
(re-frame/register-handler
:get-tool-with-integrations
(fn [db [_ tool-slug tool-requested]]
(if-let [tool (get-in db [:tools :by-slug tool-slug])]
(when (empty? (:integration-ids tool))
(ajax/GET (str "/api/v1/tools/" (:id tool) "/integrations")
{:handler #(re-frame/dispatch [:got-integrations (:id tool) %1])
:error-handler util/report-unexpected-error}))
(do (when (not tool-requested)
(re-frame/dispatch [:get-tool-with-integrations tool-slug true])))
db))
(re-frame/register-handler
:got-integrations
(fn [db [_ tool-id integration-ids]]
(add-tool db tool))))
(defmethod layout/pages :tool [_]
(let [current-tool (re-frame/subscribe [:current-tool])
tools (re-frame/subscribe [:tools])]
(fn [_]
(if @current-tool
[:div
[:h1 (:name @current-tool)]
[:ul (for [integrated-tool (vals (select-keys (:by-id @tools) (:integration-ids @current-tool)))]
^{:key (:id integrated-tool)} [:li [:a {:href (routing/url-for :tool {:slug (:slug integrated-tool)})} (:name integrated-tool)]])]]
[ui/loading]))))
(re-frame/register-sub
:suggested-tools
(fn [db _]
(ratom/reaction (:suggested-tools @db))))
(re-frame/register-handler
:get-suggested-tools
(fn [db [_ feature_id tool_ids]]
(ajax/GET "/api/v1/suggested-tools"
{:handler #(re-frame/dispatch [:got-suggested-tools %1])
:error-handler util/report-unexpected-error})
db))
(re-frame/register-handler
:got-suggested-tools
(fn [db [_ suggested-tools]]
(assoc db :suggested-tools (set (map walk/keywordize-keys suggested-tools)))))
|
d19633a4842638194569bfea9120cd3649c2617686e91996aa180adab77f237c | rizo/snowflake-os | cmmgen.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
(* Translation from closed lambda to C-- *)
open Misc
open Arch
open Asttypes
open Primitive
open Types
open Lambda
open Clambda
open Cmm
(* Local binding of complex expressions *)
let bind name arg fn =
match arg with
Cvar _ | Cconst_int _ | Cconst_natint _ | Cconst_symbol _
| Cconst_pointer _ | Cconst_natpointer _ -> fn arg
| _ -> let id = Ident.create name in Clet(id, arg, fn (Cvar id))
let bind_nonvar name arg fn =
match arg with
Cconst_int _ | Cconst_natint _ | Cconst_symbol _
| Cconst_pointer _ | Cconst_natpointer _ -> fn arg
| _ -> let id = Ident.create name in Clet(id, arg, fn (Cvar id))
(* Block headers. Meaning of the tag field: see stdlib/obj.ml *)
let float_tag = Cconst_int Obj.double_tag
let floatarray_tag = Cconst_int Obj.double_array_tag
let block_header tag sz =
Nativeint.add (Nativeint.shift_left (Nativeint.of_int sz) 10)
(Nativeint.of_int tag)
let closure_header sz = block_header Obj.closure_tag sz
let infix_header ofs = block_header Obj.infix_tag ofs
let float_header = block_header Obj.double_tag (size_float / size_addr)
let floatarray_header len =
block_header Obj.double_array_tag (len * size_float / size_addr)
let string_header len =
block_header Obj.string_tag ((len + size_addr) / size_addr)
let boxedint32_header = block_header Obj.custom_tag 2
let boxedint64_header = block_header Obj.custom_tag (1 + 8 / size_addr)
let boxedintnat_header = block_header Obj.custom_tag 2
let alloc_block_header tag sz = Cconst_natint(block_header tag sz)
let alloc_float_header = Cconst_natint(float_header)
let alloc_floatarray_header len = Cconst_natint(floatarray_header len)
let alloc_closure_header sz = Cconst_natint(closure_header sz)
let alloc_infix_header ofs = Cconst_natint(infix_header ofs)
let alloc_boxedint32_header = Cconst_natint(boxedint32_header)
let alloc_boxedint64_header = Cconst_natint(boxedint64_header)
let alloc_boxedintnat_header = Cconst_natint(boxedintnat_header)
(* Integers *)
let max_repr_int = max_int asr 1
let min_repr_int = min_int asr 1
let int_const n =
if n <= max_repr_int && n >= min_repr_int
then Cconst_int((n lsl 1) + 1)
else Cconst_natint
(Nativeint.add (Nativeint.shift_left (Nativeint.of_int n) 1) 1n)
let add_const c n =
if n = 0 then c else Cop(Caddi, [c; Cconst_int n])
let incr_int = function
Cconst_int n when n < max_int -> Cconst_int(n+1)
| Cop(Caddi, [c; Cconst_int n]) when n < max_int -> add_const c (n + 1)
| c -> add_const c 1
let decr_int = function
Cconst_int n when n > min_int -> Cconst_int(n-1)
| Cop(Caddi, [c; Cconst_int n]) when n > min_int -> add_const c (n - 1)
| c -> add_const c (-1)
let add_int c1 c2 =
match (c1, c2) with
(Cop(Caddi, [c1; Cconst_int n1]),
Cop(Caddi, [c2; Cconst_int n2])) when no_overflow_add n1 n2 ->
add_const (Cop(Caddi, [c1; c2])) (n1 + n2)
| (Cop(Caddi, [c1; Cconst_int n1]), c2) ->
add_const (Cop(Caddi, [c1; c2])) n1
| (c1, Cop(Caddi, [c2; Cconst_int n2])) ->
add_const (Cop(Caddi, [c1; c2])) n2
| (Cconst_int _, _) ->
Cop(Caddi, [c2; c1])
| (_, _) ->
Cop(Caddi, [c1; c2])
let sub_int c1 c2 =
match (c1, c2) with
(Cop(Caddi, [c1; Cconst_int n1]),
Cop(Caddi, [c2; Cconst_int n2])) when no_overflow_sub n1 n2 ->
add_const (Cop(Csubi, [c1; c2])) (n1 - n2)
| (Cop(Caddi, [c1; Cconst_int n1]), c2) ->
add_const (Cop(Csubi, [c1; c2])) n1
| (c1, Cop(Caddi, [c2; Cconst_int n2])) when n2 <> min_int ->
add_const (Cop(Csubi, [c1; c2])) (-n2)
| (c1, Cconst_int n) when n <> min_int ->
add_const c1 (-n)
| (c1, c2) ->
Cop(Csubi, [c1; c2])
let mul_int c1 c2 =
match (c1, c2) with
(Cconst_int 0, _) -> c1
| (Cconst_int 1, _) -> c2
| (_, Cconst_int 0) -> c2
| (_, Cconst_int 1) -> c1
| (_, _) -> Cop(Cmuli, [c1; c2])
let tag_int = function
Cconst_int n -> int_const n
| c -> Cop(Caddi, [Cop(Clsl, [c; Cconst_int 1]); Cconst_int 1])
let force_tag_int = function
Cconst_int n -> int_const n
| c -> Cop(Cor, [Cop(Clsl, [c; Cconst_int 1]); Cconst_int 1])
let untag_int = function
Cconst_int n -> Cconst_int(n asr 1)
| Cop(Caddi, [Cop(Clsl, [c; Cconst_int 1]); Cconst_int 1]) -> c
| Cop(Cor, [Cop(Casr, [c; Cconst_int n]); Cconst_int 1])
when n > 0 && n < size_int * 8 ->
Cop(Casr, [c; Cconst_int (n+1)])
| Cop(Cor, [Cop(Clsr, [c; Cconst_int n]); Cconst_int 1])
when n > 0 && n < size_int * 8 ->
Cop(Clsr, [c; Cconst_int (n+1)])
| Cop(Cor, [c; Cconst_int 1]) -> Cop(Casr, [c; Cconst_int 1])
| c -> Cop(Casr, [c; Cconst_int 1])
let lsl_int c1 c2 =
match (c1, c2) with
(Cop(Clsl, [c; Cconst_int n1]), Cconst_int n2)
when n1 > 0 && n2 > 0 && n1 + n2 < size_int * 8 ->
Cop(Clsl, [c; Cconst_int (n1 + n2)])
| (_, _) ->
Cop(Clsl, [c1; c2])
let ignore_low_bit_int = function
Cop(Caddi, [(Cop(Clsl, [_; Cconst_int 1]) as c); Cconst_int 1]) -> c
| Cop(Cor, [c; Cconst_int 1]) -> c
| c -> c
let is_nonzero_constant = function
Cconst_int n -> n <> 0
| Cconst_natint n -> n <> 0n
| _ -> false
let safe_divmod op c1 c2 dbg =
if !Clflags.fast || is_nonzero_constant c2 then
Cop(op, [c1; c2])
else
bind "divisor" c2 (fun c2 ->
Cifthenelse(c2,
Cop(op, [c1; c2]),
Cop(Craise dbg,
[Cconst_symbol "caml_bucket_Division_by_zero"])))
(* Bool *)
let test_bool = function
Cop(Caddi, [Cop(Clsl, [c; Cconst_int 1]); Cconst_int 1]) -> c
| Cop(Clsl, [c; Cconst_int 1]) -> c
| c -> Cop(Ccmpi Cne, [c; Cconst_int 1])
(* Float *)
let box_float c = Cop(Calloc, [alloc_float_header; c])
let rec unbox_float = function
Cop(Calloc, [header; c]) -> c
| Clet(id, exp, body) -> Clet(id, exp, unbox_float body)
| Cifthenelse(cond, e1, e2) ->
Cifthenelse(cond, unbox_float e1, unbox_float e2)
| Csequence(e1, e2) -> Csequence(e1, unbox_float e2)
| Cswitch(e, tbl, el) -> Cswitch(e, tbl, Array.map unbox_float el)
| Ccatch(n, ids, e1, e2) -> Ccatch(n, ids, unbox_float e1, unbox_float e2)
| Ctrywith(e1, id, e2) -> Ctrywith(unbox_float e1, id, unbox_float e2)
| c -> Cop(Cload Double_u, [c])
Complex
let box_complex c_re c_im =
Cop(Calloc, [alloc_floatarray_header 2; c_re; c_im])
let complex_re c = Cop(Cload Double_u, [c])
let complex_im c = Cop(Cload Double_u,
[Cop(Cadda, [c; Cconst_int size_float])])
(* Unit *)
let return_unit c = Csequence(c, Cconst_pointer 1)
let rec remove_unit = function
Cconst_pointer 1 -> Ctuple []
| Csequence(c, Cconst_pointer 1) -> c
| Csequence(c1, c2) ->
Csequence(c1, remove_unit c2)
| Cifthenelse(cond, ifso, ifnot) ->
Cifthenelse(cond, remove_unit ifso, remove_unit ifnot)
| Cswitch(sel, index, cases) ->
Cswitch(sel, index, Array.map remove_unit cases)
| Ccatch(io, ids, body, handler) ->
Ccatch(io, ids, remove_unit body, remove_unit handler)
| Ctrywith(body, exn, handler) ->
Ctrywith(remove_unit body, exn, remove_unit handler)
| Clet(id, c1, c2) ->
Clet(id, c1, remove_unit c2)
| Cop(Capply (mty, dbg), args) ->
Cop(Capply (typ_void, dbg), args)
| Cop(Cextcall(proc, mty, alloc, dbg), args) ->
Cop(Cextcall(proc, typ_void, alloc, dbg), args)
| Cexit (_,_) as c -> c
| Ctuple [] as c -> c
| c -> Csequence(c, Ctuple [])
(* Access to block fields *)
let field_address ptr n =
if n = 0
then ptr
else Cop(Cadda, [ptr; Cconst_int(n * size_addr)])
let get_field ptr n =
Cop(Cload Word, [field_address ptr n])
let set_field ptr n newval =
Cop(Cstore Word, [field_address ptr n; newval])
let header ptr =
Cop(Cload Word, [Cop(Cadda, [ptr; Cconst_int(-size_int)])])
let tag_offset =
if big_endian then -1 else -size_int
let get_tag ptr =
if Proc.word_addressed then (* If byte loads are slow *)
Cop(Cand, [header ptr; Cconst_int 255])
else (* If byte loads are efficient *)
Cop(Cload Byte_unsigned,
[Cop(Cadda, [ptr; Cconst_int(tag_offset)])])
let get_size ptr =
Cop(Clsr, [header ptr; Cconst_int 10])
(* Array indexing *)
let log2_size_addr = Misc.log2 size_addr
let log2_size_float = Misc.log2 size_float
let wordsize_shift = 9
let numfloat_shift = 9 + log2_size_float - log2_size_addr
let is_addr_array_hdr hdr =
Cop(Ccmpi Cne, [Cop(Cand, [hdr; Cconst_int 255]); floatarray_tag])
let is_addr_array_ptr ptr =
Cop(Ccmpi Cne, [get_tag ptr; floatarray_tag])
let addr_array_length hdr = Cop(Clsr, [hdr; Cconst_int wordsize_shift])
let float_array_length hdr = Cop(Clsr, [hdr; Cconst_int numfloat_shift])
let lsl_const c n =
Cop(Clsl, [c; Cconst_int n])
let array_indexing log2size ptr ofs =
match ofs with
Cconst_int n ->
let i = n asr 1 in
if i = 0 then ptr else Cop(Cadda, [ptr; Cconst_int(i lsl log2size)])
| Cop(Caddi, [Cop(Clsl, [c; Cconst_int 1]); Cconst_int 1]) ->
Cop(Cadda, [ptr; lsl_const c log2size])
| Cop(Caddi, [c; Cconst_int n]) ->
Cop(Cadda, [Cop(Cadda, [ptr; lsl_const c (log2size - 1)]);
Cconst_int((n-1) lsl (log2size - 1))])
| _ ->
Cop(Cadda, [Cop(Cadda, [ptr; lsl_const ofs (log2size - 1)]);
Cconst_int((-1) lsl (log2size - 1))])
let addr_array_ref arr ofs =
Cop(Cload Word, [array_indexing log2_size_addr arr ofs])
let unboxed_float_array_ref arr ofs =
Cop(Cload Double_u, [array_indexing log2_size_float arr ofs])
let float_array_ref arr ofs =
box_float(unboxed_float_array_ref arr ofs)
let addr_array_set arr ofs newval =
Cop(Cextcall("caml_modify", typ_void, false, Debuginfo.none),
[array_indexing log2_size_addr arr ofs; newval])
let int_array_set arr ofs newval =
Cop(Cstore Word, [array_indexing log2_size_addr arr ofs; newval])
let float_array_set arr ofs newval =
Cop(Cstore Double_u, [array_indexing log2_size_float arr ofs; newval])
(* String length *)
let string_length exp =
bind "str" exp (fun str ->
let tmp_var = Ident.create "tmp" in
Clet(tmp_var,
Cop(Csubi,
[Cop(Clsl,
[Cop(Clsr, [header str; Cconst_int 10]);
Cconst_int log2_size_addr]);
Cconst_int 1]),
Cop(Csubi,
[Cvar tmp_var;
Cop(Cload Byte_unsigned,
[Cop(Cadda, [str; Cvar tmp_var])])])))
(* Message sending *)
let lookup_tag obj tag =
bind "tag" tag (fun tag ->
Cop(Cextcall("caml_get_public_method", typ_addr, false, Debuginfo.none),
[obj; tag]))
let lookup_label obj lab =
bind "lab" lab (fun lab ->
let table = Cop (Cload Word, [obj]) in
addr_array_ref table lab)
let call_cached_method obj tag cache pos args dbg =
let arity = List.length args in
let cache = array_indexing log2_size_addr cache pos in
Compilenv.need_send_fun arity;
Cop(Capply (typ_addr, dbg),
Cconst_symbol("caml_send" ^ string_of_int arity) ::
obj :: tag :: cache :: args)
(* Allocation *)
let make_alloc_generic set_fn tag wordsize args =
if wordsize <= Config.max_young_wosize then
Cop(Calloc, Cconst_natint(block_header tag wordsize) :: args)
else begin
let id = Ident.create "alloc" in
let rec fill_fields idx = function
[] -> Cvar id
| e1::el -> Csequence(set_fn (Cvar id) (Cconst_int idx) e1,
fill_fields (idx + 2) el) in
Clet(id,
Cop(Cextcall("caml_alloc", typ_addr, true, Debuginfo.none),
[Cconst_int wordsize; Cconst_int tag]),
fill_fields 1 args)
end
let make_alloc tag args =
make_alloc_generic addr_array_set tag (List.length args) args
let make_float_alloc tag args =
make_alloc_generic float_array_set tag
(List.length args * size_float / size_addr) args
(* To compile "let rec" over values *)
let fundecls_size fundecls =
let sz = ref (-1) in
List.iter
(fun (label, arity, params, body) ->
sz := !sz + 1 + (if arity = 1 then 2 else 3))
fundecls;
!sz
type rhs_kind =
| RHS_block of int
| RHS_nonrec
;;
let rec expr_size = function
| Uclosure(fundecls, clos_vars) ->
RHS_block (fundecls_size fundecls + List.length clos_vars)
| Ulet(id, exp, body) ->
expr_size body
| Uletrec(bindings, body) ->
expr_size body
| Uprim(Pmakeblock(tag, mut), args, _) ->
RHS_block (List.length args)
| Uprim(Pmakearray(Paddrarray | Pintarray), args, _) ->
RHS_block (List.length args)
| Usequence(exp, exp') ->
expr_size exp'
| _ -> RHS_nonrec
(* Record application and currying functions *)
let apply_function n =
Compilenv.need_apply_fun n; "caml_apply" ^ string_of_int n
let curry_function n =
Compilenv.need_curry_fun n;
if n >= 0
then "caml_curry" ^ string_of_int n
else "caml_tuplify" ^ string_of_int (-n)
(* Comparisons *)
let transl_comparison = function
Lambda.Ceq -> Ceq
| Lambda.Cneq -> Cne
| Lambda.Cge -> Cge
| Lambda.Cgt -> Cgt
| Lambda.Cle -> Cle
| Lambda.Clt -> Clt
(* Translate structured constants *)
let const_label = ref 0
let new_const_label () =
incr const_label;
!const_label
let new_const_symbol () =
incr const_label;
Compilenv.make_symbol (Some (string_of_int !const_label))
let structured_constants = ref ([] : (string * structured_constant) list)
let transl_constant = function
Const_base(Const_int n) ->
int_const n
| Const_base(Const_char c) ->
Cconst_int(((Char.code c) lsl 1) + 1)
| Const_pointer n ->
if n <= max_repr_int && n >= min_repr_int
then Cconst_pointer((n lsl 1) + 1)
else Cconst_natpointer
(Nativeint.add (Nativeint.shift_left (Nativeint.of_int n) 1) 1n)
| cst ->
let lbl = new_const_symbol() in
structured_constants := (lbl, cst) :: !structured_constants;
Cconst_symbol lbl
(* Translate constant closures *)
let constant_closures =
ref ([] : (string * (string * int * Ident.t list * ulambda) list) list)
(* Boxed integers *)
let box_int_constant bi n =
match bi with
Pnativeint -> Const_base(Const_nativeint n)
| Pint32 -> Const_base(Const_int32 (Nativeint.to_int32 n))
| Pint64 -> Const_base(Const_int64 (Int64.of_nativeint n))
let operations_boxed_int bi =
match bi with
Pnativeint -> "caml_nativeint_ops"
| Pint32 -> "caml_int32_ops"
| Pint64 -> "caml_int64_ops"
let alloc_header_boxed_int bi =
match bi with
Pnativeint -> alloc_boxedintnat_header
| Pint32 -> alloc_boxedint32_header
| Pint64 -> alloc_boxedint64_header
let box_int bi arg =
match arg with
Cconst_int n ->
transl_constant (box_int_constant bi (Nativeint.of_int n))
| Cconst_natint n ->
transl_constant (box_int_constant bi n)
| _ ->
let arg' =
if bi = Pint32 && size_int = 8 && big_endian
then Cop(Clsl, [arg; Cconst_int 32])
else arg in
Cop(Calloc, [alloc_header_boxed_int bi;
Cconst_symbol(operations_boxed_int bi);
arg'])
let rec unbox_int bi arg =
match arg with
Cop(Calloc, [hdr; ops; Cop(Clsl, [contents; Cconst_int 32])])
when bi = Pint32 && size_int = 8 && big_endian ->
Force sign - extension of low 32 bits
Cop(Casr, [Cop(Clsl, [contents; Cconst_int 32]); Cconst_int 32])
| Cop(Calloc, [hdr; ops; contents])
when bi = Pint32 && size_int = 8 && not big_endian ->
Force sign - extension of low 32 bits
Cop(Casr, [Cop(Clsl, [contents; Cconst_int 32]); Cconst_int 32])
| Cop(Calloc, [hdr; ops; contents]) ->
contents
| Clet(id, exp, body) -> Clet(id, exp, unbox_int bi body)
| Cifthenelse(cond, e1, e2) ->
Cifthenelse(cond, unbox_int bi e1, unbox_int bi e2)
| Csequence(e1, e2) -> Csequence(e1, unbox_int bi e2)
| Cswitch(e, tbl, el) -> Cswitch(e, tbl, Array.map (unbox_int bi) el)
| Ccatch(n, ids, e1, e2) -> Ccatch(n, ids, unbox_int bi e1, unbox_int bi e2)
| Ctrywith(e1, id, e2) -> Ctrywith(unbox_int bi e1, id, unbox_int bi e2)
| _ ->
Cop(Cload(if bi = Pint32 then Thirtytwo_signed else Word),
[Cop(Cadda, [arg; Cconst_int size_addr])])
let make_unsigned_int bi arg =
if bi = Pint32 && size_int = 8
then Cop(Cand, [arg; Cconst_natint 0xFFFFFFFFn])
else arg
(* Big arrays *)
let bigarray_elt_size = function
Pbigarray_unknown -> assert false
| Pbigarray_float32 -> 4
| Pbigarray_float64 -> 8
| Pbigarray_sint8 -> 1
| Pbigarray_uint8 -> 1
| Pbigarray_sint16 -> 2
| Pbigarray_uint16 -> 2
| Pbigarray_int32 -> 4
| Pbigarray_int64 -> 8
| Pbigarray_caml_int -> size_int
| Pbigarray_native_int -> size_int
| Pbigarray_complex32 -> 8
| Pbigarray_complex64 -> 16
let bigarray_indexing unsafe elt_kind layout b args dbg =
let check_bound a1 a2 k =
if unsafe then k else Csequence(Cop(Ccheckbound dbg, [a1;a2]), k) in
let rec ba_indexing dim_ofs delta_ofs = function
[] -> assert false
| [arg] ->
bind "idx" (untag_int arg)
(fun idx ->
check_bound (Cop(Cload Word,[field_address b dim_ofs])) idx idx)
| arg1 :: argl ->
let rem = ba_indexing (dim_ofs + delta_ofs) delta_ofs argl in
bind "idx" (untag_int arg1)
(fun idx ->
bind "bound" (Cop(Cload Word, [field_address b dim_ofs]))
(fun bound ->
check_bound bound idx (add_int (mul_int rem bound) idx))) in
let offset =
match layout with
Pbigarray_unknown_layout ->
assert false
| Pbigarray_c_layout ->
ba_indexing (4 + List.length args) (-1) (List.rev args)
| Pbigarray_fortran_layout ->
ba_indexing 5 1 (List.map (fun idx -> sub_int idx (Cconst_int 2)) args)
and elt_size =
bigarray_elt_size elt_kind in
let byte_offset =
if elt_size = 1
then offset
else Cop(Clsl, [offset; Cconst_int(log2 elt_size)]) in
Cop(Cadda, [Cop(Cload Word, [field_address b 1]); byte_offset])
let bigarray_word_kind = function
Pbigarray_unknown -> assert false
| Pbigarray_float32 -> Single
| Pbigarray_float64 -> Double
| Pbigarray_sint8 -> Byte_signed
| Pbigarray_uint8 -> Byte_unsigned
| Pbigarray_sint16 -> Sixteen_signed
| Pbigarray_uint16 -> Sixteen_unsigned
| Pbigarray_int32 -> Thirtytwo_signed
| Pbigarray_int64 -> Word
| Pbigarray_caml_int -> Word
| Pbigarray_native_int -> Word
| Pbigarray_complex32 -> Single
| Pbigarray_complex64 -> Double
let bigarray_get unsafe elt_kind layout b args dbg =
match elt_kind with
Pbigarray_complex32 | Pbigarray_complex64 ->
let kind = bigarray_word_kind elt_kind in
let sz = bigarray_elt_size elt_kind / 2 in
bind "addr" (bigarray_indexing unsafe elt_kind layout b args dbg) (fun addr ->
box_complex
(Cop(Cload kind, [addr]))
(Cop(Cload kind, [Cop(Cadda, [addr; Cconst_int sz])])))
| _ ->
Cop(Cload (bigarray_word_kind elt_kind),
[bigarray_indexing unsafe elt_kind layout b args dbg])
let bigarray_set unsafe elt_kind layout b args newval dbg =
match elt_kind with
Pbigarray_complex32 | Pbigarray_complex64 ->
let kind = bigarray_word_kind elt_kind in
let sz = bigarray_elt_size elt_kind / 2 in
bind "newval" newval (fun newv ->
bind "addr" (bigarray_indexing unsafe elt_kind layout b args dbg) (fun addr ->
Csequence(
Cop(Cstore kind, [addr; complex_re newv]),
Cop(Cstore kind,
[Cop(Cadda, [addr; Cconst_int sz]); complex_im newv]))))
| _ ->
Cop(Cstore (bigarray_word_kind elt_kind),
[bigarray_indexing unsafe elt_kind layout b args dbg; newval])
(* Simplification of some primitives into C calls *)
let default_prim name =
{ prim_name = name; prim_arity = 0 (*ignored*);
prim_alloc = true; prim_native_name = ""; prim_native_float = false }
let simplif_primitive_32bits = function
Pbintofint Pint64 -> Pccall (default_prim "caml_int64_of_int")
| Pintofbint Pint64 -> Pccall (default_prim "caml_int64_to_int")
| Pcvtbint(Pint32, Pint64) -> Pccall (default_prim "caml_int64_of_int32")
| Pcvtbint(Pint64, Pint32) -> Pccall (default_prim "caml_int64_to_int32")
| Pcvtbint(Pnativeint, Pint64) ->
Pccall (default_prim "caml_int64_of_nativeint")
| Pcvtbint(Pint64, Pnativeint) ->
Pccall (default_prim "caml_int64_to_nativeint")
| Pnegbint Pint64 -> Pccall (default_prim "caml_int64_neg")
| Paddbint Pint64 -> Pccall (default_prim "caml_int64_add")
| Psubbint Pint64 -> Pccall (default_prim "caml_int64_sub")
| Pmulbint Pint64 -> Pccall (default_prim "caml_int64_mul")
| Pdivbint Pint64 -> Pccall (default_prim "caml_int64_div")
| Pmodbint Pint64 -> Pccall (default_prim "caml_int64_mod")
| Pandbint Pint64 -> Pccall (default_prim "caml_int64_and")
| Porbint Pint64 -> Pccall (default_prim "caml_int64_or")
| Pxorbint Pint64 -> Pccall (default_prim "caml_int64_xor")
| Plslbint Pint64 -> Pccall (default_prim "caml_int64_shift_left")
| Plsrbint Pint64 -> Pccall (default_prim "caml_int64_shift_right_unsigned")
| Pasrbint Pint64 -> Pccall (default_prim "caml_int64_shift_right")
| Pbintcomp(Pint64, Lambda.Ceq) -> Pccall (default_prim "caml_equal")
| Pbintcomp(Pint64, Lambda.Cneq) -> Pccall (default_prim "caml_notequal")
| Pbintcomp(Pint64, Lambda.Clt) -> Pccall (default_prim "caml_lessthan")
| Pbintcomp(Pint64, Lambda.Cgt) -> Pccall (default_prim "caml_greaterthan")
| Pbintcomp(Pint64, Lambda.Cle) -> Pccall (default_prim "caml_lessequal")
| Pbintcomp(Pint64, Lambda.Cge) -> Pccall (default_prim "caml_greaterequal")
| Pbigarrayref(unsafe, n, Pbigarray_int64, layout) ->
Pccall (default_prim ("caml_ba_get_" ^ string_of_int n))
| Pbigarrayset(unsafe, n, Pbigarray_int64, layout) ->
Pccall (default_prim ("caml_ba_set_" ^ string_of_int n))
| p -> p
let simplif_primitive p =
match p with
| Pduprecord _ ->
Pccall (default_prim "caml_obj_dup")
| Pbigarrayref(unsafe, n, Pbigarray_unknown, layout) ->
Pccall (default_prim ("caml_ba_get_" ^ string_of_int n))
| Pbigarrayset(unsafe, n, Pbigarray_unknown, layout) ->
Pccall (default_prim ("caml_ba_set_" ^ string_of_int n))
| Pbigarrayref(unsafe, n, kind, Pbigarray_unknown_layout) ->
Pccall (default_prim ("caml_ba_get_" ^ string_of_int n))
| Pbigarrayset(unsafe, n, kind, Pbigarray_unknown_layout) ->
Pccall (default_prim ("caml_ba_set_" ^ string_of_int n))
| p ->
if size_int = 8 then p else simplif_primitive_32bits p
(* Build switchers both for constants and blocks *)
constants first
let transl_isout h arg = tag_int (Cop(Ccmpa Clt, [h ; arg]))
exception Found of int
let make_switch_gen arg cases acts =
let lcases = Array.length cases in
let new_cases = Array.create lcases 0 in
let store = Switch.mk_store (=) in
for i = 0 to Array.length cases-1 do
let act = cases.(i) in
let new_act = store.Switch.act_store act in
new_cases.(i) <- new_act
done ;
Cswitch
(arg, new_cases,
Array.map
(fun n -> acts.(n))
(store.Switch.act_get ()))
(* Then for blocks *)
module SArgBlocks =
struct
type primitive = operation
let eqint = Ccmpi Ceq
let neint = Ccmpi Cne
let leint = Ccmpi Cle
let ltint = Ccmpi Clt
let geint = Ccmpi Cge
let gtint = Ccmpi Cgt
type act = expression
let default = Cexit (0,[])
let make_prim p args = Cop (p,args)
let make_offset arg n = add_const arg n
let make_isout h arg = Cop (Ccmpa Clt, [h ; arg])
let make_isin h arg = Cop (Ccmpa Cge, [h ; arg])
let make_if cond ifso ifnot = Cifthenelse (cond, ifso, ifnot)
let make_switch arg cases actions =
make_switch_gen arg cases actions
let bind arg body = bind "switcher" arg body
end
module SwitcherBlocks = Switch.Make(SArgBlocks)
(* Auxiliary functions for optimizing "let" of boxed numbers (floats and
boxed integers *)
type unboxed_number_kind =
No_unboxing
| Boxed_float
| Boxed_integer of boxed_integer
let is_unboxed_number = function
Uconst(Const_base(Const_float f)) ->
Boxed_float
| Uprim(p, _, _) ->
begin match simplif_primitive p with
Pccall p -> if p.prim_native_float then Boxed_float else No_unboxing
| Pfloatfield _ -> Boxed_float
| Pfloatofint -> Boxed_float
| Pnegfloat -> Boxed_float
| Pabsfloat -> Boxed_float
| Paddfloat -> Boxed_float
| Psubfloat -> Boxed_float
| Pmulfloat -> Boxed_float
| Pdivfloat -> Boxed_float
| Parrayrefu Pfloatarray -> Boxed_float
| Parrayrefs Pfloatarray -> Boxed_float
| Pbintofint bi -> Boxed_integer bi
| Pcvtbint(src, dst) -> Boxed_integer dst
| Pnegbint bi -> Boxed_integer bi
| Paddbint bi -> Boxed_integer bi
| Psubbint bi -> Boxed_integer bi
| Pmulbint bi -> Boxed_integer bi
| Pdivbint bi -> Boxed_integer bi
| Pmodbint bi -> Boxed_integer bi
| Pandbint bi -> Boxed_integer bi
| Porbint bi -> Boxed_integer bi
| Pxorbint bi -> Boxed_integer bi
| Plslbint bi -> Boxed_integer bi
| Plsrbint bi -> Boxed_integer bi
| Pasrbint bi -> Boxed_integer bi
| Pbigarrayref(_, _, (Pbigarray_float32 | Pbigarray_float64), _) ->
Boxed_float
| Pbigarrayref(_, _, Pbigarray_int32, _) -> Boxed_integer Pint32
| Pbigarrayref(_, _, Pbigarray_int64, _) -> Boxed_integer Pint64
| Pbigarrayref(_, _, Pbigarray_native_int, _) -> Boxed_integer Pnativeint
| _ -> No_unboxing
end
| _ -> No_unboxing
let subst_boxed_number unbox_fn boxed_id unboxed_id exp =
let need_boxed = ref false in
let assigned = ref false in
let rec subst = function
Cvar id as e ->
if Ident.same id boxed_id then need_boxed := true; e
| Clet(id, arg, body) -> Clet(id, subst arg, subst body)
| Cassign(id, arg) ->
if Ident.same id boxed_id then begin
assigned := true;
Cassign(unboxed_id, subst(unbox_fn arg))
end else
Cassign(id, subst arg)
| Ctuple argv -> Ctuple(List.map subst argv)
| Cop(Cload _, [Cvar id]) as e ->
if Ident.same id boxed_id then Cvar unboxed_id else e
| Cop(Cload _, [Cop(Cadda, [Cvar id; _])]) as e ->
if Ident.same id boxed_id then Cvar unboxed_id else e
| Cop(op, argv) -> Cop(op, List.map subst argv)
| Csequence(e1, e2) -> Csequence(subst e1, subst e2)
| Cifthenelse(e1, e2, e3) -> Cifthenelse(subst e1, subst e2, subst e3)
| Cswitch(arg, index, cases) ->
Cswitch(subst arg, index, Array.map subst cases)
| Cloop e -> Cloop(subst e)
| Ccatch(nfail, ids, e1, e2) -> Ccatch(nfail, ids, subst e1, subst e2)
| Cexit (nfail, el) -> Cexit (nfail, List.map subst el)
| Ctrywith(e1, id, e2) -> Ctrywith(subst e1, id, subst e2)
| e -> e in
let res = subst exp in
(res, !need_boxed, !assigned)
(* Translate an expression *)
let functions = (Queue.create() : (string * Ident.t list * ulambda) Queue.t)
let rec transl = function
Uvar id ->
Cvar id
| Uconst sc ->
transl_constant sc
| Uclosure(fundecls, []) ->
let lbl = new_const_symbol() in
constant_closures := (lbl, fundecls) :: !constant_closures;
List.iter
(fun (label, arity, params, body) ->
Queue.add (label, params, body) functions)
fundecls;
Cconst_symbol lbl
| Uclosure(fundecls, clos_vars) ->
let block_size =
fundecls_size fundecls + List.length clos_vars in
let rec transl_fundecls pos = function
[] ->
List.map transl clos_vars
| (label, arity, params, body) :: rem ->
Queue.add (label, params, body) functions;
let header =
if pos = 0
then alloc_closure_header block_size
else alloc_infix_header pos in
if arity = 1 then
header ::
Cconst_symbol label ::
int_const 1 ::
transl_fundecls (pos + 3) rem
else
header ::
Cconst_symbol(curry_function arity) ::
int_const arity ::
Cconst_symbol label ::
transl_fundecls (pos + 4) rem in
Cop(Calloc, transl_fundecls 0 fundecls)
| Uoffset(arg, offset) ->
field_address (transl arg) offset
| Udirect_apply(lbl, args, dbg) ->
Cop(Capply(typ_addr, dbg), Cconst_symbol lbl :: List.map transl args)
| Ugeneric_apply(clos, [arg], dbg) ->
bind "fun" (transl clos) (fun clos ->
Cop(Capply(typ_addr, dbg), [get_field clos 0; transl arg; clos]))
| Ugeneric_apply(clos, args, dbg) ->
let arity = List.length args in
let cargs = Cconst_symbol(apply_function arity) ::
List.map transl (args @ [clos]) in
Cop(Capply(typ_addr, dbg), cargs)
| Usend(kind, met, obj, args, dbg) ->
let call_met obj args clos =
if args = [] then
Cop(Capply(typ_addr, dbg), [get_field clos 0;obj;clos])
else
let arity = List.length args + 1 in
let cargs = Cconst_symbol(apply_function arity) :: obj ::
(List.map transl args) @ [clos] in
Cop(Capply(typ_addr, dbg), cargs)
in
bind "obj" (transl obj) (fun obj ->
match kind, args with
Self, _ ->
bind "met" (lookup_label obj (transl met)) (call_met obj args)
| Cached, cache :: pos :: args ->
call_cached_method obj (transl met) (transl cache) (transl pos)
(List.map transl args) dbg
| _ ->
bind "met" (lookup_tag obj (transl met)) (call_met obj args))
| Ulet(id, exp, body) ->
begin match is_unboxed_number exp with
No_unboxing ->
Clet(id, transl exp, transl body)
| Boxed_float ->
transl_unbox_let box_float unbox_float transl_unbox_float
id exp body
| Boxed_integer bi ->
transl_unbox_let (box_int bi) (unbox_int bi) (transl_unbox_int bi)
id exp body
end
| Uletrec(bindings, body) ->
transl_letrec bindings (transl body)
(* Primitives *)
| Uprim(prim, args, dbg) ->
begin match (simplif_primitive prim, args) with
(Pgetglobal id, []) ->
Cconst_symbol (Ident.name id)
| (Pmakeblock(tag, mut), []) ->
transl_constant(Const_block(tag, []))
| (Pmakeblock(tag, mut), args) ->
make_alloc tag (List.map transl args)
| (Pccall prim, args) ->
if prim.prim_native_float then
box_float
(Cop(Cextcall(prim.prim_native_name, typ_float, false, dbg),
List.map transl_unbox_float args))
else
Cop(Cextcall(Primitive.native_name prim, typ_addr, prim.prim_alloc, dbg),
List.map transl args)
| (Pmakearray kind, []) ->
transl_constant(Const_block(0, []))
| (Pmakearray kind, args) ->
begin match kind with
Pgenarray ->
Cop(Cextcall("caml_make_array", typ_addr, true, Debuginfo.none),
[make_alloc 0 (List.map transl args)])
| Paddrarray | Pintarray ->
make_alloc 0 (List.map transl args)
| Pfloatarray ->
make_float_alloc Obj.double_array_tag
(List.map transl_unbox_float args)
end
| (Pbigarrayref(unsafe, num_dims, elt_kind, layout), arg1 :: argl) ->
let elt =
bigarray_get unsafe elt_kind layout
(transl arg1) (List.map transl argl) dbg in
begin match elt_kind with
Pbigarray_float32 | Pbigarray_float64 -> box_float elt
| Pbigarray_complex32 | Pbigarray_complex64 -> elt
| Pbigarray_int32 -> box_int Pint32 elt
| Pbigarray_int64 -> box_int Pint64 elt
| Pbigarray_native_int -> box_int Pnativeint elt
| Pbigarray_caml_int -> force_tag_int elt
| _ -> tag_int elt
end
| (Pbigarrayset(unsafe, num_dims, elt_kind, layout), arg1 :: argl) ->
let (argidx, argnewval) = split_last argl in
return_unit(bigarray_set unsafe elt_kind layout
(transl arg1)
(List.map transl argidx)
(match elt_kind with
Pbigarray_float32 | Pbigarray_float64 ->
transl_unbox_float argnewval
| Pbigarray_complex32 | Pbigarray_complex64 -> transl argnewval
| Pbigarray_int32 -> transl_unbox_int Pint32 argnewval
| Pbigarray_int64 -> transl_unbox_int Pint64 argnewval
| Pbigarray_native_int -> transl_unbox_int Pnativeint argnewval
| _ -> untag_int (transl argnewval))
dbg)
| (p, [arg]) ->
transl_prim_1 p arg dbg
| (p, [arg1; arg2]) ->
transl_prim_2 p arg1 arg2 dbg
| (p, [arg1; arg2; arg3]) ->
transl_prim_3 p arg1 arg2 arg3 dbg
| (_, _) ->
fatal_error "Cmmgen.transl:prim"
end
(* Control structures *)
| Uswitch(arg, s) ->
(* As in the bytecode interpreter, only matching against constants
can be checked *)
if Array.length s.us_index_blocks = 0 then
Cswitch
(untag_int (transl arg),
s.us_index_consts,
Array.map transl s.us_actions_consts)
else if Array.length s.us_index_consts = 0 then
transl_switch (get_tag (transl arg))
s.us_index_blocks s.us_actions_blocks
else
bind "switch" (transl arg) (fun arg ->
Cifthenelse(
Cop(Cand, [arg; Cconst_int 1]),
transl_switch
(untag_int arg) s.us_index_consts s.us_actions_consts,
transl_switch
(get_tag arg) s.us_index_blocks s.us_actions_blocks))
| Ustaticfail (nfail, args) ->
Cexit (nfail, List.map transl args)
| Ucatch(nfail, [], body, handler) ->
make_catch nfail (transl body) (transl handler)
| Ucatch(nfail, ids, body, handler) ->
Ccatch(nfail, ids, transl body, transl handler)
| Utrywith(body, exn, handler) ->
Ctrywith(transl body, exn, transl handler)
| Uifthenelse(Uprim(Pnot, [arg], _), ifso, ifnot) ->
transl (Uifthenelse(arg, ifnot, ifso))
| Uifthenelse(cond, ifso, Ustaticfail (nfail, [])) ->
exit_if_false cond (transl ifso) nfail
| Uifthenelse(cond, Ustaticfail (nfail, []), ifnot) ->
exit_if_true cond nfail (transl ifnot)
| Uifthenelse(Uprim(Psequand, _, _) as cond, ifso, ifnot) ->
let raise_num = next_raise_count () in
make_catch
raise_num
(exit_if_false cond (transl ifso) raise_num)
(transl ifnot)
| Uifthenelse(Uprim(Psequor, _, _) as cond, ifso, ifnot) ->
let raise_num = next_raise_count () in
make_catch
raise_num
(exit_if_true cond raise_num (transl ifnot))
(transl ifso)
| Uifthenelse (Uifthenelse (cond, condso, condnot), ifso, ifnot) ->
let num_true = next_raise_count () in
make_catch
num_true
(make_catch2
(fun shared_false ->
Cifthenelse
(test_bool (transl cond),
exit_if_true condso num_true shared_false,
exit_if_true condnot num_true shared_false))
(transl ifnot))
(transl ifso)
| Uifthenelse(cond, ifso, ifnot) ->
Cifthenelse(test_bool(transl cond), transl ifso, transl ifnot)
| Usequence(exp1, exp2) ->
Csequence(remove_unit(transl exp1), transl exp2)
| Uwhile(cond, body) ->
let raise_num = next_raise_count () in
return_unit
(Ccatch
(raise_num, [],
Cloop(exit_if_false cond (remove_unit(transl body)) raise_num),
Ctuple []))
| Ufor(id, low, high, dir, body) ->
let tst = match dir with Upto -> Cgt | Downto -> Clt in
let inc = match dir with Upto -> Caddi | Downto -> Csubi in
let raise_num = next_raise_count () in
let id_prev = Ident.rename id in
return_unit
(Clet
(id, transl low,
bind_nonvar "bound" (transl high) (fun high ->
Ccatch
(raise_num, [],
Cifthenelse
(Cop(Ccmpi tst, [Cvar id; high]), Cexit (raise_num, []),
Cloop
(Csequence
(remove_unit(transl body),
Clet(id_prev, Cvar id,
Csequence
(Cassign(id,
Cop(inc, [Cvar id; Cconst_int 2])),
Cifthenelse
(Cop(Ccmpi Ceq, [Cvar id_prev; high]),
Cexit (raise_num,[]), Ctuple [])))))),
Ctuple []))))
| Uassign(id, exp) ->
return_unit(Cassign(id, transl exp))
and transl_prim_1 p arg dbg =
match p with
Generic operations
Pidentity ->
transl arg
| Pignore ->
return_unit(remove_unit (transl arg))
Heap operations
| Pfield n ->
get_field (transl arg) n
| Pfloatfield n ->
let ptr = transl arg in
box_float(
Cop(Cload Double_u,
[if n = 0 then ptr
else Cop(Cadda, [ptr; Cconst_int(n * size_float)])]))
(* Exceptions *)
| Praise ->
Cop(Craise dbg, [transl arg])
Integer operations
| Pnegint ->
Cop(Csubi, [Cconst_int 2; transl arg])
| Poffsetint n ->
if no_overflow_lsl n then
add_const (transl arg) (n lsl 1)
else
transl_prim_2 Paddint arg (Uconst (Const_base(Const_int n))) Debuginfo.none
| Poffsetref n ->
return_unit
(bind "ref" (transl arg) (fun arg ->
Cop(Cstore Word,
[arg; add_const (Cop(Cload Word, [arg])) (n lsl 1)])))
(* Floating-point operations *)
| Pfloatofint ->
box_float(Cop(Cfloatofint, [untag_int(transl arg)]))
| Pintoffloat ->
tag_int(Cop(Cintoffloat, [transl_unbox_float arg]))
| Pnegfloat ->
box_float(Cop(Cnegf, [transl_unbox_float arg]))
| Pabsfloat ->
box_float(Cop(Cabsf, [transl_unbox_float arg]))
(* String operations *)
| Pstringlength ->
tag_int(string_length (transl arg))
(* Array operations *)
| Parraylength kind ->
begin match kind with
Pgenarray ->
let len =
if wordsize_shift = numfloat_shift then
Cop(Clsr, [header(transl arg); Cconst_int wordsize_shift])
else
bind "header" (header(transl arg)) (fun hdr ->
Cifthenelse(is_addr_array_hdr hdr,
Cop(Clsr, [hdr; Cconst_int wordsize_shift]),
Cop(Clsr, [hdr; Cconst_int numfloat_shift]))) in
Cop(Cor, [len; Cconst_int 1])
| Paddrarray | Pintarray ->
Cop(Cor, [addr_array_length(header(transl arg)); Cconst_int 1])
| Pfloatarray ->
Cop(Cor, [float_array_length(header(transl arg)); Cconst_int 1])
end
Boolean operations
| Pnot ->
1 - > 3 , 3 - > 1
(* Test integer/block *)
| Pisint ->
tag_int(Cop(Cand, [transl arg; Cconst_int 1]))
(* Boxed integers *)
| Pbintofint bi ->
box_int bi (untag_int (transl arg))
| Pintofbint bi ->
force_tag_int (transl_unbox_int bi arg)
| Pcvtbint(bi1, bi2) ->
box_int bi2 (transl_unbox_int bi1 arg)
| Pnegbint bi ->
box_int bi (Cop(Csubi, [Cconst_int 0; transl_unbox_int bi arg]))
| _ ->
fatal_error "Cmmgen.transl_prim_1"
and transl_prim_2 p arg1 arg2 dbg =
match p with
Heap operations
Psetfield(n, ptr) ->
if ptr then
return_unit(Cop(Cextcall("caml_modify", typ_void, false, Debuginfo.none),
[field_address (transl arg1) n; transl arg2]))
else
return_unit(set_field (transl arg1) n (transl arg2))
| Psetfloatfield n ->
let ptr = transl arg1 in
return_unit(
Cop(Cstore Double_u,
[if n = 0 then ptr
else Cop(Cadda, [ptr; Cconst_int(n * size_float)]);
transl_unbox_float arg2]))
Boolean operations
| Psequand ->
Cifthenelse(test_bool(transl arg1), transl arg2, Cconst_int 1)
let i d = Ident.create " res1 " in
Clet(id , transl arg1 ,
Cifthenelse(test_bool(Cvar i d ) , transl arg2 , Cvar i d ) )
Clet(id, transl arg1,
Cifthenelse(test_bool(Cvar id), transl arg2, Cvar id)) *)
| Psequor ->
Cifthenelse(test_bool(transl arg1), Cconst_int 3, transl arg2)
Integer operations
| Paddint ->
decr_int(add_int (transl arg1) (transl arg2))
| Psubint ->
incr_int(sub_int (transl arg1) (transl arg2))
| Pmulint ->
incr_int(Cop(Cmuli, [decr_int(transl arg1); untag_int(transl arg2)]))
| Pdivint ->
tag_int(safe_divmod Cdivi (untag_int(transl arg1)) (untag_int(transl arg2)) dbg)
| Pmodint ->
tag_int(safe_divmod Cmodi (untag_int(transl arg1)) (untag_int(transl arg2)) dbg)
| Pandint ->
Cop(Cand, [transl arg1; transl arg2])
| Porint ->
Cop(Cor, [transl arg1; transl arg2])
| Pxorint ->
Cop(Cor, [Cop(Cxor, [ignore_low_bit_int(transl arg1);
ignore_low_bit_int(transl arg2)]);
Cconst_int 1])
| Plslint ->
incr_int(lsl_int (decr_int(transl arg1)) (untag_int(transl arg2)))
| Plsrint ->
Cop(Cor, [Cop(Clsr, [transl arg1; untag_int(transl arg2)]);
Cconst_int 1])
| Pasrint ->
Cop(Cor, [Cop(Casr, [transl arg1; untag_int(transl arg2)]);
Cconst_int 1])
| Pintcomp cmp ->
tag_int(Cop(Ccmpi(transl_comparison cmp), [transl arg1; transl arg2]))
| Pisout ->
transl_isout (transl arg1) (transl arg2)
(* Float operations *)
| Paddfloat ->
box_float(Cop(Caddf,
[transl_unbox_float arg1; transl_unbox_float arg2]))
| Psubfloat ->
box_float(Cop(Csubf,
[transl_unbox_float arg1; transl_unbox_float arg2]))
| Pmulfloat ->
box_float(Cop(Cmulf,
[transl_unbox_float arg1; transl_unbox_float arg2]))
| Pdivfloat ->
box_float(Cop(Cdivf,
[transl_unbox_float arg1; transl_unbox_float arg2]))
| Pfloatcomp cmp ->
tag_int(Cop(Ccmpf(transl_comparison cmp),
[transl_unbox_float arg1; transl_unbox_float arg2]))
(* String operations *)
| Pstringrefu ->
tag_int(Cop(Cload Byte_unsigned,
[add_int (transl arg1) (untag_int(transl arg2))]))
| Pstringrefs ->
tag_int
(bind "str" (transl arg1) (fun str ->
bind "index" (untag_int (transl arg2)) (fun idx ->
Csequence(
Cop(Ccheckbound dbg, [string_length str; idx]),
Cop(Cload Byte_unsigned, [add_int str idx])))))
(* Array operations *)
| Parrayrefu kind ->
begin match kind with
Pgenarray ->
bind "arr" (transl arg1) (fun arr ->
bind "index" (transl arg2) (fun idx ->
Cifthenelse(is_addr_array_ptr arr,
addr_array_ref arr idx,
float_array_ref arr idx)))
| Paddrarray | Pintarray ->
addr_array_ref (transl arg1) (transl arg2)
| Pfloatarray ->
float_array_ref (transl arg1) (transl arg2)
end
| Parrayrefs kind ->
begin match kind with
Pgenarray ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
bind "header" (header arr) (fun hdr ->
Cifthenelse(is_addr_array_hdr hdr,
Csequence(Cop(Ccheckbound dbg, [addr_array_length hdr; idx]),
addr_array_ref arr idx),
Csequence(Cop(Ccheckbound dbg, [float_array_length hdr; idx]),
float_array_ref arr idx)))))
| Paddrarray | Pintarray ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
Csequence(Cop(Ccheckbound dbg, [addr_array_length(header arr); idx]),
addr_array_ref arr idx)))
| Pfloatarray ->
box_float(
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
Csequence(Cop(Ccheckbound dbg,
[float_array_length(header arr); idx]),
unboxed_float_array_ref arr idx))))
end
Operations on bitvects
| Pbittest ->
bind "index" (untag_int(transl arg2)) (fun idx ->
tag_int(
Cop(Cand, [Cop(Clsr, [Cop(Cload Byte_unsigned,
[add_int (transl arg1)
(Cop(Clsr, [idx; Cconst_int 3]))]);
Cop(Cand, [idx; Cconst_int 7])]);
Cconst_int 1])))
(* Boxed integers *)
| Paddbint bi ->
box_int bi (Cop(Caddi,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Psubbint bi ->
box_int bi (Cop(Csubi,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Pmulbint bi ->
box_int bi (Cop(Cmuli,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Pdivbint bi ->
box_int bi (safe_divmod Cdivi
(transl_unbox_int bi arg1) (transl_unbox_int bi arg2)
dbg)
| Pmodbint bi ->
box_int bi (safe_divmod Cmodi
(transl_unbox_int bi arg1) (transl_unbox_int bi arg2)
dbg)
| Pandbint bi ->
box_int bi (Cop(Cand,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Porbint bi ->
box_int bi (Cop(Cor,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Pxorbint bi ->
box_int bi (Cop(Cxor,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Plslbint bi ->
box_int bi (Cop(Clsl,
[transl_unbox_int bi arg1; untag_int(transl arg2)]))
| Plsrbint bi ->
box_int bi (Cop(Clsr,
[make_unsigned_int bi (transl_unbox_int bi arg1);
untag_int(transl arg2)]))
| Pasrbint bi ->
box_int bi (Cop(Casr,
[transl_unbox_int bi arg1; untag_int(transl arg2)]))
| Pbintcomp(bi, cmp) ->
tag_int (Cop(Ccmpi(transl_comparison cmp),
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| _ ->
fatal_error "Cmmgen.transl_prim_2"
and transl_prim_3 p arg1 arg2 arg3 dbg =
match p with
(* String operations *)
Pstringsetu ->
return_unit(Cop(Cstore Byte_unsigned,
[add_int (transl arg1) (untag_int(transl arg2));
untag_int(transl arg3)]))
| Pstringsets ->
return_unit
(bind "str" (transl arg1) (fun str ->
bind "index" (untag_int (transl arg2)) (fun idx ->
Csequence(
Cop(Ccheckbound dbg, [string_length str; idx]),
Cop(Cstore Byte_unsigned,
[add_int str idx; untag_int(transl arg3)])))))
(* Array operations *)
| Parraysetu kind ->
return_unit(begin match kind with
Pgenarray ->
bind "newval" (transl arg3) (fun newval ->
bind "index" (transl arg2) (fun index ->
bind "arr" (transl arg1) (fun arr ->
Cifthenelse(is_addr_array_ptr arr,
addr_array_set arr index newval,
float_array_set arr index (unbox_float newval)))))
| Paddrarray ->
addr_array_set (transl arg1) (transl arg2) (transl arg3)
| Pintarray ->
int_array_set (transl arg1) (transl arg2) (transl arg3)
| Pfloatarray ->
float_array_set (transl arg1) (transl arg2) (transl_unbox_float arg3)
end)
| Parraysets kind ->
return_unit(begin match kind with
Pgenarray ->
bind "newval" (transl arg3) (fun newval ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
bind "header" (header arr) (fun hdr ->
Cifthenelse(is_addr_array_hdr hdr,
Csequence(Cop(Ccheckbound dbg, [addr_array_length hdr; idx]),
addr_array_set arr idx newval),
Csequence(Cop(Ccheckbound dbg, [float_array_length hdr; idx]),
float_array_set arr idx
(unbox_float newval)))))))
| Paddrarray ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
Csequence(Cop(Ccheckbound dbg, [addr_array_length(header arr); idx]),
addr_array_set arr idx (transl arg3))))
| Pintarray ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
Csequence(Cop(Ccheckbound dbg, [addr_array_length(header arr); idx]),
int_array_set arr idx (transl arg3))))
| Pfloatarray ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
Csequence(Cop(Ccheckbound dbg, [float_array_length(header arr);idx]),
float_array_set arr idx (transl_unbox_float arg3))))
end)
| _ ->
fatal_error "Cmmgen.transl_prim_3"
and transl_unbox_float = function
Uconst(Const_base(Const_float f)) -> Cconst_float f
| exp -> unbox_float(transl exp)
and transl_unbox_int bi = function
Uconst(Const_base(Const_int32 n)) ->
Cconst_natint (Nativeint.of_int32 n)
| Uconst(Const_base(Const_nativeint n)) ->
Cconst_natint n
| Uconst(Const_base(Const_int64 n)) ->
assert (size_int = 8); Cconst_natint (Int64.to_nativeint n)
| Uprim(Pbintofint bi', [Uconst(Const_base(Const_int i))], _) when bi = bi' ->
Cconst_int i
| exp -> unbox_int bi (transl exp)
and transl_unbox_let box_fn unbox_fn transl_unbox_fn id exp body =
let unboxed_id = Ident.create (Ident.name id) in
let trbody1 = transl body in
let (trbody2, need_boxed, is_assigned) =
subst_boxed_number unbox_fn id unboxed_id trbody1 in
if need_boxed && is_assigned then
Clet(id, transl exp, trbody1)
else
Clet(unboxed_id, transl_unbox_fn exp,
if need_boxed
then Clet(id, box_fn(Cvar unboxed_id), trbody2)
else trbody2)
and make_catch ncatch body handler = match body with
| Cexit (nexit,[]) when nexit=ncatch -> handler
| _ -> Ccatch (ncatch, [], body, handler)
and make_catch2 mk_body handler = match handler with
| Cexit (_,[])|Ctuple []|Cconst_int _|Cconst_pointer _ ->
mk_body handler
| _ ->
let nfail = next_raise_count () in
make_catch
nfail
(mk_body (Cexit (nfail,[])))
handler
and exit_if_true cond nfail otherwise =
match cond with
| Uconst (Const_pointer 0) -> otherwise
| Uconst (Const_pointer 1) -> Cexit (nfail,[])
| Uprim(Psequor, [arg1; arg2], _) ->
exit_if_true arg1 nfail (exit_if_true arg2 nfail otherwise)
| Uprim(Psequand, _, _) ->
begin match otherwise with
| Cexit (raise_num,[]) ->
exit_if_false cond (Cexit (nfail,[])) raise_num
| _ ->
let raise_num = next_raise_count () in
make_catch
raise_num
(exit_if_false cond (Cexit (nfail,[])) raise_num)
otherwise
end
| Uprim(Pnot, [arg], _) ->
exit_if_false arg otherwise nfail
| Uifthenelse (cond, ifso, ifnot) ->
make_catch2
(fun shared ->
Cifthenelse
(test_bool (transl cond),
exit_if_true ifso nfail shared,
exit_if_true ifnot nfail shared))
otherwise
| _ ->
Cifthenelse(test_bool(transl cond), Cexit (nfail, []), otherwise)
and exit_if_false cond otherwise nfail =
match cond with
| Uconst (Const_pointer 0) -> Cexit (nfail,[])
| Uconst (Const_pointer 1) -> otherwise
| Uprim(Psequand, [arg1; arg2], _) ->
exit_if_false arg1 (exit_if_false arg2 otherwise nfail) nfail
| Uprim(Psequor, _, _) ->
begin match otherwise with
| Cexit (raise_num,[]) ->
exit_if_true cond raise_num (Cexit (nfail,[]))
| _ ->
let raise_num = next_raise_count () in
make_catch
raise_num
(exit_if_true cond raise_num (Cexit (nfail,[])))
otherwise
end
| Uprim(Pnot, [arg], _) ->
exit_if_true arg nfail otherwise
| Uifthenelse (cond, ifso, ifnot) ->
make_catch2
(fun shared ->
Cifthenelse
(test_bool (transl cond),
exit_if_false ifso shared nfail,
exit_if_false ifnot shared nfail))
otherwise
| _ ->
Cifthenelse(test_bool(transl cond), otherwise, Cexit (nfail, []))
and transl_switch arg index cases = match Array.length cases with
| 0 -> fatal_error "Cmmgen.transl_switch"
| 1 -> transl cases.(0)
| _ ->
let n_index = Array.length index in
let actions = Array.map transl cases in
let inters = ref []
and this_high = ref (n_index-1)
and this_low = ref (n_index-1)
and this_act = ref index.(n_index-1) in
for i = n_index-2 downto 0 do
let act = index.(i) in
if act = !this_act then
decr this_low
else begin
inters := (!this_low, !this_high, !this_act) :: !inters ;
this_high := i ;
this_low := i ;
this_act := act
end
done ;
inters := (0, !this_high, !this_act) :: !inters ;
bind "switcher" arg
(fun a ->
SwitcherBlocks.zyva
(0,n_index-1)
(fun i -> Cconst_int i)
a
(Array.of_list !inters) actions)
and transl_letrec bindings cont =
let bsz = List.map (fun (id, exp) -> (id, exp, expr_size exp)) bindings in
let rec init_blocks = function
| [] -> fill_nonrec bsz
| (id, exp, RHS_block sz) :: rem ->
Clet(id, Cop(Cextcall("caml_alloc_dummy", typ_addr, true, Debuginfo.none),
[int_const sz]),
init_blocks rem)
| (id, exp, RHS_nonrec) :: rem ->
Clet (id, Cconst_int 0, init_blocks rem)
and fill_nonrec = function
| [] -> fill_blocks bsz
| (id, exp, RHS_block sz) :: rem -> fill_nonrec rem
| (id, exp, RHS_nonrec) :: rem ->
Clet (id, transl exp, fill_nonrec rem)
and fill_blocks = function
| [] -> cont
| (id, exp, RHS_block _) :: rem ->
Csequence(Cop(Cextcall("caml_update_dummy", typ_void, false, Debuginfo.none),
[Cvar id; transl exp]),
fill_blocks rem)
| (id, exp, RHS_nonrec) :: rem ->
fill_blocks rem
in init_blocks bsz
(* Translate a function definition *)
let transl_function lbl params body =
Cfunction {fun_name = lbl;
fun_args = List.map (fun id -> (id, typ_addr)) params;
fun_body = transl body;
fun_fast = !Clflags.optimize_for_speed}
(* Translate all function definitions *)
module StringSet =
Set.Make(struct
type t = string
let compare = compare
end)
let rec transl_all_functions already_translated cont =
try
let (lbl, params, body) = Queue.take functions in
if StringSet.mem lbl already_translated then
transl_all_functions already_translated cont
else begin
transl_all_functions (StringSet.add lbl already_translated)
(transl_function lbl params body :: cont)
end
with Queue.Empty ->
cont
(* Emit structured constants *)
let immstrings = Hashtbl.create 17
let rec emit_constant symb cst cont =
match cst with
Const_base(Const_float s) ->
Cint(float_header) :: Cdefine_symbol symb :: Cdouble s :: cont
| Const_base(Const_string s) | Const_immstring s ->
Cint(string_header (String.length s)) ::
Cdefine_symbol symb ::
emit_string_constant s cont
| Const_base(Const_int32 n) ->
Cint(boxedint32_header) :: Cdefine_symbol symb ::
emit_boxed_int32_constant n cont
| Const_base(Const_int64 n) ->
Cint(boxedint64_header) :: Cdefine_symbol symb ::
emit_boxed_int64_constant n cont
| Const_base(Const_nativeint n) ->
Cint(boxedintnat_header) :: Cdefine_symbol symb ::
emit_boxed_nativeint_constant n cont
| Const_block(tag, fields) ->
let (emit_fields, cont1) = emit_constant_fields fields cont in
Cint(block_header tag (List.length fields)) ::
Cdefine_symbol symb ::
emit_fields @ cont1
| Const_float_array(fields) ->
Cint(floatarray_header (List.length fields)) ::
Cdefine_symbol symb ::
Misc.map_end (fun f -> Cdouble f) fields cont
| _ -> fatal_error "gencmm.emit_constant"
and emit_constant_fields fields cont =
match fields with
[] -> ([], cont)
| f1 :: fl ->
let (data1, cont1) = emit_constant_field f1 cont in
let (datal, contl) = emit_constant_fields fl cont1 in
(data1 :: datal, contl)
and emit_constant_field field cont =
match field with
Const_base(Const_int n) ->
(Cint(Nativeint.add (Nativeint.shift_left (Nativeint.of_int n) 1) 1n),
cont)
| Const_base(Const_char c) ->
(Cint(Nativeint.of_int(((Char.code c) lsl 1) + 1)), cont)
| Const_base(Const_float s) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(float_header) :: Cdefine_label lbl :: Cdouble s :: cont)
| Const_base(Const_string s) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(string_header (String.length s)) :: Cdefine_label lbl ::
emit_string_constant s cont)
| Const_immstring s ->
begin try
(Clabel_address (Hashtbl.find immstrings s), cont)
with Not_found ->
let lbl = new_const_label() in
Hashtbl.add immstrings s lbl;
(Clabel_address lbl,
Cint(string_header (String.length s)) :: Cdefine_label lbl ::
emit_string_constant s cont)
end
| Const_base(Const_int32 n) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(boxedint32_header) :: Cdefine_label lbl ::
emit_boxed_int32_constant n cont)
| Const_base(Const_int64 n) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(boxedint64_header) :: Cdefine_label lbl ::
emit_boxed_int64_constant n cont)
| Const_base(Const_nativeint n) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(boxedintnat_header) :: Cdefine_label lbl ::
emit_boxed_nativeint_constant n cont)
| Const_pointer n ->
(Cint(Nativeint.add (Nativeint.shift_left (Nativeint.of_int n) 1) 1n),
cont)
| Const_block(tag, fields) ->
let lbl = new_const_label() in
let (emit_fields, cont1) = emit_constant_fields fields cont in
(Clabel_address lbl,
Cint(block_header tag (List.length fields)) :: Cdefine_label lbl ::
emit_fields @ cont1)
| Const_float_array(fields) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(floatarray_header (List.length fields)) :: Cdefine_label lbl ::
Misc.map_end (fun f -> Cdouble f) fields cont)
and emit_string_constant s cont =
let n = size_int - 1 - (String.length s) mod size_int in
Cstring s :: Cskip n :: Cint8 n :: cont
and emit_boxed_int32_constant n cont =
let n = Nativeint.of_int32 n in
if size_int = 8 then
Csymbol_address("caml_int32_ops") :: Cint32 n :: Cint32 0n :: cont
else
Csymbol_address("caml_int32_ops") :: Cint n :: cont
and emit_boxed_nativeint_constant n cont =
Csymbol_address("caml_nativeint_ops") :: Cint n :: cont
and emit_boxed_int64_constant n cont =
let lo = Int64.to_nativeint n in
if size_int = 8 then
Csymbol_address("caml_int64_ops") :: Cint lo :: cont
else begin
let hi = Int64.to_nativeint (Int64.shift_right n 32) in
if big_endian then
Csymbol_address("caml_int64_ops") :: Cint hi :: Cint lo :: cont
else
Csymbol_address("caml_int64_ops") :: Cint lo :: Cint hi :: cont
end
(* Emit constant closures *)
let emit_constant_closure symb fundecls cont =
match fundecls with
[] -> assert false
| (label, arity, params, body) :: remainder ->
let rec emit_others pos = function
[] -> cont
| (label, arity, params, body) :: rem ->
if arity = 1 then
Cint(infix_header pos) ::
Csymbol_address label ::
Cint 3n ::
emit_others (pos + 3) rem
else
Cint(infix_header pos) ::
Csymbol_address(curry_function arity) ::
Cint(Nativeint.of_int (arity lsl 1 + 1)) ::
Csymbol_address label ::
emit_others (pos + 4) rem in
Cint(closure_header (fundecls_size fundecls)) ::
Cdefine_symbol symb ::
if arity = 1 then
Csymbol_address label ::
Cint 3n ::
emit_others 3 remainder
else
Csymbol_address(curry_function arity) ::
Cint(Nativeint.of_int (arity lsl 1 + 1)) ::
Csymbol_address label ::
emit_others 4 remainder
(* Emit all structured constants *)
let emit_all_constants cont =
let c = ref cont in
List.iter
(fun (lbl, cst) -> c := Cdata(emit_constant lbl cst []) :: !c)
!structured_constants;
structured_constants := [];
PR#3979
List.iter
(fun (symb, fundecls) ->
c := Cdata(emit_constant_closure symb fundecls []) :: !c)
!constant_closures;
constant_closures := [];
!c
(* Translate a compilation unit *)
let compunit size ulam =
let glob = Compilenv.make_symbol None in
let init_code = transl ulam in
let c1 = [Cfunction {fun_name = Compilenv.make_symbol (Some "entry");
fun_args = [];
fun_body = init_code; fun_fast = false}] in
let c2 = transl_all_functions StringSet.empty c1 in
let c3 = emit_all_constants c2 in
Cdata [Cint(block_header 0 size);
Cglobal_symbol glob;
Cdefine_symbol glob;
Cskip(size * size_addr)] :: c3
CAMLprim value caml_cache_public_method ( value meths , value tag , value * cache )
{
int li = 3 , hi = Field(meths,0 ) , mi ;
while ( li < hi ) { // no need to check the 1st time
mi = ( ( li+hi ) > > 1 ) | 1 ;
if ( tag < Field(meths , mi ) ) hi = mi-2 ;
else li = mi ;
}
* cache = ( li-3)*sizeof(value)+1 ;
return Field ( meths , li-1 ) ;
}
CAMLprim value caml_cache_public_method (value meths, value tag, value *cache)
{
int li = 3, hi = Field(meths,0), mi;
while (li < hi) { // no need to check the 1st time
mi = ((li+hi) >> 1) | 1;
if (tag < Field(meths,mi)) hi = mi-2;
else li = mi;
}
*cache = (li-3)*sizeof(value)+1;
return Field (meths, li-1);
}
*)
let cache_public_method meths tag cache =
let raise_num = next_raise_count () in
let li = Ident.create "li" and hi = Ident.create "hi"
and mi = Ident.create "mi" and tagged = Ident.create "tagged" in
Clet (
li, Cconst_int 3,
Clet (
hi, Cop(Cload Word, [meths]),
Csequence(
Ccatch
(raise_num, [],
Cloop
(Clet(
mi,
Cop(Cor,
[Cop(Clsr, [Cop(Caddi, [Cvar li; Cvar hi]); Cconst_int 1]);
Cconst_int 1]),
Csequence(
Cifthenelse
(Cop (Ccmpi Clt,
[tag;
Cop(Cload Word,
[Cop(Cadda,
[meths; lsl_const (Cvar mi) log2_size_addr])])]),
Cassign(hi, Cop(Csubi, [Cvar mi; Cconst_int 2])),
Cassign(li, Cvar mi)),
Cifthenelse
(Cop(Ccmpi Cge, [Cvar li; Cvar hi]), Cexit (raise_num, []),
Ctuple [])))),
Ctuple []),
Clet (
tagged, Cop(Cadda, [lsl_const (Cvar li) log2_size_addr;
Cconst_int(1 - 3 * size_addr)]),
Csequence(Cop (Cstore Word, [cache; Cvar tagged]),
Cvar tagged)))))
Generate an application function :
( defun caml_applyN ( a1 ... aN clos )
( if (= clos.arity N )
( app clos.direct a1 ... aN clos )
( let ( clos1 ( app clos.code a1 clos )
clos2 ( app clos1.code a2 clos )
...
closN-1 ( app closN-2.code ) )
( app closN-1.code aN closN-1 ) ) ) )
(defun caml_applyN (a1 ... aN clos)
(if (= clos.arity N)
(app clos.direct a1 ... aN clos)
(let (clos1 (app clos.code a1 clos)
clos2 (app clos1.code a2 clos)
...
closN-1 (app closN-2.code aN-1 closN-2))
(app closN-1.code aN closN-1))))
*)
let apply_function_body arity =
let arg = Array.create arity (Ident.create "arg") in
for i = 1 to arity - 1 do arg.(i) <- Ident.create "arg" done;
let clos = Ident.create "clos" in
let rec app_fun clos n =
if n = arity-1 then
Cop(Capply(typ_addr, Debuginfo.none),
[get_field (Cvar clos) 0; Cvar arg.(n); Cvar clos])
else begin
let newclos = Ident.create "clos" in
Clet(newclos,
Cop(Capply(typ_addr, Debuginfo.none),
[get_field (Cvar clos) 0; Cvar arg.(n); Cvar clos]),
app_fun newclos (n+1))
end in
let args = Array.to_list arg in
let all_args = args @ [clos] in
(args, clos,
if arity = 1 then app_fun clos 0 else
Cifthenelse(
Cop(Ccmpi Ceq, [get_field (Cvar clos) 1; int_const arity]),
Cop(Capply(typ_addr, Debuginfo.none),
get_field (Cvar clos) 2 :: List.map (fun s -> Cvar s) all_args),
app_fun clos 0))
let send_function arity =
let (args, clos', body) = apply_function_body (1+arity) in
let cache = Ident.create "cache"
and obj = List.hd args
and tag = Ident.create "tag" in
let clos =
let cache = Cvar cache and obj = Cvar obj and tag = Cvar tag in
let meths = Ident.create "meths" and cached = Ident.create "cached" in
let real = Ident.create "real" in
let mask = get_field (Cvar meths) 1 in
let cached_pos = Cvar cached in
let tag_pos = Cop(Cadda, [Cop (Cadda, [cached_pos; Cvar meths]);
Cconst_int(3*size_addr-1)]) in
let tag' = Cop(Cload Word, [tag_pos]) in
Clet (
meths, Cop(Cload Word, [obj]),
Clet (
cached, Cop(Cand, [Cop(Cload Word, [cache]); mask]),
Clet (
real,
Cifthenelse(Cop(Ccmpa Cne, [tag'; tag]),
cache_public_method (Cvar meths) tag cache,
cached_pos),
Cop(Cload Word, [Cop(Cadda, [Cop (Cadda, [Cvar real; Cvar meths]);
Cconst_int(2*size_addr-1)])]))))
in
let body = Clet(clos', clos, body) in
let fun_args =
[obj, typ_addr; tag, typ_int; cache, typ_addr]
@ List.map (fun id -> (id, typ_addr)) (List.tl args) in
Cfunction
{fun_name = "caml_send" ^ string_of_int arity;
fun_args = fun_args;
fun_body = body;
fun_fast = true}
let apply_function arity =
let (args, clos, body) = apply_function_body arity in
let all_args = args @ [clos] in
Cfunction
{fun_name = "caml_apply" ^ string_of_int arity;
fun_args = List.map (fun id -> (id, typ_addr)) all_args;
fun_body = body;
fun_fast = true}
Generate tuplifying functions :
( defun caml_tuplifyN ( arg clos )
( app clos.direct # 0(arg ) ... # N-1(arg ) clos ) )
(defun caml_tuplifyN (arg clos)
(app clos.direct #0(arg) ... #N-1(arg) clos)) *)
let tuplify_function arity =
let arg = Ident.create "arg" in
let clos = Ident.create "clos" in
let rec access_components i =
if i >= arity
then []
else get_field (Cvar arg) i :: access_components(i+1) in
Cfunction
{fun_name = "caml_tuplify" ^ string_of_int arity;
fun_args = [arg, typ_addr; clos, typ_addr];
fun_body =
Cop(Capply(typ_addr, Debuginfo.none),
get_field (Cvar clos) 2 :: access_components 0 @ [Cvar clos]);
fun_fast = true}
Generate currying functions :
( defun caml_curryN ( arg clos )
( alloc HDR caml_curryN_1 arg clos ) )
( defun caml_curryN_1 ( arg clos )
( alloc HDR caml_curryN_2 arg clos ) )
...
( defun caml_curryN_N-1 ( arg clos )
( let ( closN-2 clos.cdr
closN-3 closN-2.cdr
...
clos1 clos2.cdr
clos clos1.cdr )
( app clos.direct
clos1.car clos2.car ... closN-2.car clos.car arg clos ) ) )
(defun caml_curryN (arg clos)
(alloc HDR caml_curryN_1 arg clos))
(defun caml_curryN_1 (arg clos)
(alloc HDR caml_curryN_2 arg clos))
...
(defun caml_curryN_N-1 (arg clos)
(let (closN-2 clos.cdr
closN-3 closN-2.cdr
...
clos1 clos2.cdr
clos clos1.cdr)
(app clos.direct
clos1.car clos2.car ... closN-2.car clos.car arg clos))) *)
let final_curry_function arity =
let last_arg = Ident.create "arg" in
let last_clos = Ident.create "clos" in
let rec curry_fun args clos n =
if n = 0 then
Cop(Capply(typ_addr, Debuginfo.none),
get_field (Cvar clos) 2 ::
args @ [Cvar last_arg; Cvar clos])
else begin
let newclos = Ident.create "clos" in
Clet(newclos,
get_field (Cvar clos) 3,
curry_fun (get_field (Cvar clos) 2 :: args) newclos (n-1))
end in
Cfunction
{fun_name = "caml_curry" ^ string_of_int arity ^
"_" ^ string_of_int (arity-1);
fun_args = [last_arg, typ_addr; last_clos, typ_addr];
fun_body = curry_fun [] last_clos (arity-1);
fun_fast = true}
let rec intermediate_curry_functions arity num =
if num = arity - 1 then
[final_curry_function arity]
else begin
let name1 = "caml_curry" ^ string_of_int arity in
let name2 = if num = 0 then name1 else name1 ^ "_" ^ string_of_int num in
let arg = Ident.create "arg" and clos = Ident.create "clos" in
Cfunction
{fun_name = name2;
fun_args = [arg, typ_addr; clos, typ_addr];
fun_body = Cop(Calloc,
[alloc_closure_header 4;
Cconst_symbol(name1 ^ "_" ^ string_of_int (num+1));
int_const 1; Cvar arg; Cvar clos]);
fun_fast = true}
:: intermediate_curry_functions arity (num+1)
end
let curry_function arity =
if arity >= 0
then intermediate_curry_functions arity 0
else [tuplify_function (-arity)]
module IntSet = Set.Make(
struct
type t = int
let compare = compare
end)
let default_apply = IntSet.add 2 (IntSet.add 3 IntSet.empty)
These apply funs are always present in the main program because
the run - time system needs them ( cf . .
the run-time system needs them (cf. asmrun/<arch>.S) . *)
let generic_functions shared units =
let (apply,send,curry) =
List.fold_left
(fun (apply,send,curry) ui ->
List.fold_right IntSet.add ui.Compilenv.ui_apply_fun apply,
List.fold_right IntSet.add ui.Compilenv.ui_send_fun send,
List.fold_right IntSet.add ui.Compilenv.ui_curry_fun curry)
(IntSet.empty,IntSet.empty,IntSet.empty)
units in
let apply = if shared then apply else IntSet.union apply default_apply in
let accu = IntSet.fold (fun n accu -> apply_function n :: accu) apply [] in
let accu = IntSet.fold (fun n accu -> send_function n :: accu) send accu in
IntSet.fold (fun n accu -> curry_function n @ accu) curry accu
(* Generate the entry point *)
let entry_point namelist =
let incr_global_inited =
Cop(Cstore Word,
[Cconst_symbol "caml_globals_inited";
Cop(Caddi, [Cop(Cload Word, [Cconst_symbol "caml_globals_inited"]);
Cconst_int 1])]) in
let body =
List.fold_right
(fun name next ->
let entry_sym = Compilenv.make_symbol ~unitname:name (Some "entry") in
Csequence(Cop(Capply(typ_void, Debuginfo.none),
[Cconst_symbol entry_sym]),
Csequence(incr_global_inited, next)))
namelist (Cconst_int 1) in
Cfunction {fun_name = "caml_program";
fun_args = [];
fun_body = body;
fun_fast = false}
(* Generate the table of globals *)
let cint_zero = Cint 0n
let global_table namelist =
let mksym name =
Csymbol_address (Compilenv.make_symbol ~unitname:name None)
in
Cdata(Cglobal_symbol "caml_globals" ::
Cdefine_symbol "caml_globals" ::
List.map mksym namelist @
[cint_zero])
let reference_symbols namelist =
let mksym name = Csymbol_address name in
Cdata(List.map mksym namelist)
let global_data name v =
Cdata(Cglobal_symbol name ::
emit_constant name
(Const_base (Const_string (Marshal.to_string v []))) [])
let globals_map v = global_data "caml_globals_map" v
(* Generate the master table of frame descriptors *)
let frame_table namelist =
let mksym name =
Csymbol_address (Compilenv.make_symbol ~unitname:name (Some "frametable"))
in
Cdata(Cglobal_symbol "caml_frametable" ::
Cdefine_symbol "caml_frametable" ::
List.map mksym namelist
@ [cint_zero])
(* Generate the table of module data and code segments *)
let segment_table namelist symbol begname endname =
let addsyms name lst =
Csymbol_address (Compilenv.make_symbol ~unitname:name (Some begname)) ::
Csymbol_address (Compilenv.make_symbol ~unitname:name (Some endname)) ::
lst
in
Cdata(Cglobal_symbol symbol ::
Cdefine_symbol symbol ::
List.fold_right addsyms namelist [cint_zero])
let data_segment_table namelist =
segment_table namelist "caml_data_segments" "data_begin" "data_end"
let code_segment_table namelist =
segment_table namelist "caml_code_segments" "code_begin" "code_end"
Initialize a predefined exception
let predef_exception name =
let bucketname = "caml_bucket_" ^ name in
let symname = "caml_exn_" ^ name in
Cdata(Cglobal_symbol symname ::
emit_constant symname (Const_block(0,[Const_base(Const_string name)]))
[ Cglobal_symbol bucketname;
Cint(block_header 0 1);
Cdefine_symbol bucketname;
Csymbol_address symname ])
(* Header for a plugin *)
let mapflat f l = List.flatten (List.map f l)
type dynunit = {
name: string;
crc: Digest.t;
imports_cmi: (string * Digest.t) list;
imports_cmx: (string * Digest.t) list;
defines: string list;
}
type dynheader = {
magic: string;
units: dynunit list;
}
let dyn_magic_number = "Caml2007D001"
let plugin_header units =
let mk (ui,crc) =
{ name = ui.Compilenv.ui_name;
crc = crc;
imports_cmi = ui.Compilenv.ui_imports_cmi;
imports_cmx = ui.Compilenv.ui_imports_cmx;
defines = ui.Compilenv.ui_defines
} in
global_data "caml_plugin_header"
{ magic = dyn_magic_number; units = List.map mk units }
| null | https://raw.githubusercontent.com/rizo/snowflake-os/51df43d9ba715532d325e8880d3b8b2c589cd075/plugins/ocamlopt.opt/asmcomp/cmmgen.ml | ocaml | *********************************************************************
Objective Caml
*********************************************************************
Translation from closed lambda to C--
Local binding of complex expressions
Block headers. Meaning of the tag field: see stdlib/obj.ml
Integers
Bool
Float
Unit
Access to block fields
If byte loads are slow
If byte loads are efficient
Array indexing
String length
Message sending
Allocation
To compile "let rec" over values
Record application and currying functions
Comparisons
Translate structured constants
Translate constant closures
Boxed integers
Big arrays
Simplification of some primitives into C calls
ignored
Build switchers both for constants and blocks
Then for blocks
Auxiliary functions for optimizing "let" of boxed numbers (floats and
boxed integers
Translate an expression
Primitives
Control structures
As in the bytecode interpreter, only matching against constants
can be checked
Exceptions
Floating-point operations
String operations
Array operations
Test integer/block
Boxed integers
Float operations
String operations
Array operations
Boxed integers
String operations
Array operations
Translate a function definition
Translate all function definitions
Emit structured constants
Emit constant closures
Emit all structured constants
Translate a compilation unit
Generate the entry point
Generate the table of globals
Generate the master table of frame descriptors
Generate the table of module data and code segments
Header for a plugin | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
open Misc
open Arch
open Asttypes
open Primitive
open Types
open Lambda
open Clambda
open Cmm
let bind name arg fn =
match arg with
Cvar _ | Cconst_int _ | Cconst_natint _ | Cconst_symbol _
| Cconst_pointer _ | Cconst_natpointer _ -> fn arg
| _ -> let id = Ident.create name in Clet(id, arg, fn (Cvar id))
let bind_nonvar name arg fn =
match arg with
Cconst_int _ | Cconst_natint _ | Cconst_symbol _
| Cconst_pointer _ | Cconst_natpointer _ -> fn arg
| _ -> let id = Ident.create name in Clet(id, arg, fn (Cvar id))
let float_tag = Cconst_int Obj.double_tag
let floatarray_tag = Cconst_int Obj.double_array_tag
let block_header tag sz =
Nativeint.add (Nativeint.shift_left (Nativeint.of_int sz) 10)
(Nativeint.of_int tag)
let closure_header sz = block_header Obj.closure_tag sz
let infix_header ofs = block_header Obj.infix_tag ofs
let float_header = block_header Obj.double_tag (size_float / size_addr)
let floatarray_header len =
block_header Obj.double_array_tag (len * size_float / size_addr)
let string_header len =
block_header Obj.string_tag ((len + size_addr) / size_addr)
let boxedint32_header = block_header Obj.custom_tag 2
let boxedint64_header = block_header Obj.custom_tag (1 + 8 / size_addr)
let boxedintnat_header = block_header Obj.custom_tag 2
let alloc_block_header tag sz = Cconst_natint(block_header tag sz)
let alloc_float_header = Cconst_natint(float_header)
let alloc_floatarray_header len = Cconst_natint(floatarray_header len)
let alloc_closure_header sz = Cconst_natint(closure_header sz)
let alloc_infix_header ofs = Cconst_natint(infix_header ofs)
let alloc_boxedint32_header = Cconst_natint(boxedint32_header)
let alloc_boxedint64_header = Cconst_natint(boxedint64_header)
let alloc_boxedintnat_header = Cconst_natint(boxedintnat_header)
let max_repr_int = max_int asr 1
let min_repr_int = min_int asr 1
let int_const n =
if n <= max_repr_int && n >= min_repr_int
then Cconst_int((n lsl 1) + 1)
else Cconst_natint
(Nativeint.add (Nativeint.shift_left (Nativeint.of_int n) 1) 1n)
let add_const c n =
if n = 0 then c else Cop(Caddi, [c; Cconst_int n])
let incr_int = function
Cconst_int n when n < max_int -> Cconst_int(n+1)
| Cop(Caddi, [c; Cconst_int n]) when n < max_int -> add_const c (n + 1)
| c -> add_const c 1
let decr_int = function
Cconst_int n when n > min_int -> Cconst_int(n-1)
| Cop(Caddi, [c; Cconst_int n]) when n > min_int -> add_const c (n - 1)
| c -> add_const c (-1)
let add_int c1 c2 =
match (c1, c2) with
(Cop(Caddi, [c1; Cconst_int n1]),
Cop(Caddi, [c2; Cconst_int n2])) when no_overflow_add n1 n2 ->
add_const (Cop(Caddi, [c1; c2])) (n1 + n2)
| (Cop(Caddi, [c1; Cconst_int n1]), c2) ->
add_const (Cop(Caddi, [c1; c2])) n1
| (c1, Cop(Caddi, [c2; Cconst_int n2])) ->
add_const (Cop(Caddi, [c1; c2])) n2
| (Cconst_int _, _) ->
Cop(Caddi, [c2; c1])
| (_, _) ->
Cop(Caddi, [c1; c2])
let sub_int c1 c2 =
match (c1, c2) with
(Cop(Caddi, [c1; Cconst_int n1]),
Cop(Caddi, [c2; Cconst_int n2])) when no_overflow_sub n1 n2 ->
add_const (Cop(Csubi, [c1; c2])) (n1 - n2)
| (Cop(Caddi, [c1; Cconst_int n1]), c2) ->
add_const (Cop(Csubi, [c1; c2])) n1
| (c1, Cop(Caddi, [c2; Cconst_int n2])) when n2 <> min_int ->
add_const (Cop(Csubi, [c1; c2])) (-n2)
| (c1, Cconst_int n) when n <> min_int ->
add_const c1 (-n)
| (c1, c2) ->
Cop(Csubi, [c1; c2])
let mul_int c1 c2 =
match (c1, c2) with
(Cconst_int 0, _) -> c1
| (Cconst_int 1, _) -> c2
| (_, Cconst_int 0) -> c2
| (_, Cconst_int 1) -> c1
| (_, _) -> Cop(Cmuli, [c1; c2])
let tag_int = function
Cconst_int n -> int_const n
| c -> Cop(Caddi, [Cop(Clsl, [c; Cconst_int 1]); Cconst_int 1])
let force_tag_int = function
Cconst_int n -> int_const n
| c -> Cop(Cor, [Cop(Clsl, [c; Cconst_int 1]); Cconst_int 1])
let untag_int = function
Cconst_int n -> Cconst_int(n asr 1)
| Cop(Caddi, [Cop(Clsl, [c; Cconst_int 1]); Cconst_int 1]) -> c
| Cop(Cor, [Cop(Casr, [c; Cconst_int n]); Cconst_int 1])
when n > 0 && n < size_int * 8 ->
Cop(Casr, [c; Cconst_int (n+1)])
| Cop(Cor, [Cop(Clsr, [c; Cconst_int n]); Cconst_int 1])
when n > 0 && n < size_int * 8 ->
Cop(Clsr, [c; Cconst_int (n+1)])
| Cop(Cor, [c; Cconst_int 1]) -> Cop(Casr, [c; Cconst_int 1])
| c -> Cop(Casr, [c; Cconst_int 1])
let lsl_int c1 c2 =
match (c1, c2) with
(Cop(Clsl, [c; Cconst_int n1]), Cconst_int n2)
when n1 > 0 && n2 > 0 && n1 + n2 < size_int * 8 ->
Cop(Clsl, [c; Cconst_int (n1 + n2)])
| (_, _) ->
Cop(Clsl, [c1; c2])
let ignore_low_bit_int = function
Cop(Caddi, [(Cop(Clsl, [_; Cconst_int 1]) as c); Cconst_int 1]) -> c
| Cop(Cor, [c; Cconst_int 1]) -> c
| c -> c
let is_nonzero_constant = function
Cconst_int n -> n <> 0
| Cconst_natint n -> n <> 0n
| _ -> false
let safe_divmod op c1 c2 dbg =
if !Clflags.fast || is_nonzero_constant c2 then
Cop(op, [c1; c2])
else
bind "divisor" c2 (fun c2 ->
Cifthenelse(c2,
Cop(op, [c1; c2]),
Cop(Craise dbg,
[Cconst_symbol "caml_bucket_Division_by_zero"])))
let test_bool = function
Cop(Caddi, [Cop(Clsl, [c; Cconst_int 1]); Cconst_int 1]) -> c
| Cop(Clsl, [c; Cconst_int 1]) -> c
| c -> Cop(Ccmpi Cne, [c; Cconst_int 1])
let box_float c = Cop(Calloc, [alloc_float_header; c])
let rec unbox_float = function
Cop(Calloc, [header; c]) -> c
| Clet(id, exp, body) -> Clet(id, exp, unbox_float body)
| Cifthenelse(cond, e1, e2) ->
Cifthenelse(cond, unbox_float e1, unbox_float e2)
| Csequence(e1, e2) -> Csequence(e1, unbox_float e2)
| Cswitch(e, tbl, el) -> Cswitch(e, tbl, Array.map unbox_float el)
| Ccatch(n, ids, e1, e2) -> Ccatch(n, ids, unbox_float e1, unbox_float e2)
| Ctrywith(e1, id, e2) -> Ctrywith(unbox_float e1, id, unbox_float e2)
| c -> Cop(Cload Double_u, [c])
Complex
let box_complex c_re c_im =
Cop(Calloc, [alloc_floatarray_header 2; c_re; c_im])
let complex_re c = Cop(Cload Double_u, [c])
let complex_im c = Cop(Cload Double_u,
[Cop(Cadda, [c; Cconst_int size_float])])
let return_unit c = Csequence(c, Cconst_pointer 1)
let rec remove_unit = function
Cconst_pointer 1 -> Ctuple []
| Csequence(c, Cconst_pointer 1) -> c
| Csequence(c1, c2) ->
Csequence(c1, remove_unit c2)
| Cifthenelse(cond, ifso, ifnot) ->
Cifthenelse(cond, remove_unit ifso, remove_unit ifnot)
| Cswitch(sel, index, cases) ->
Cswitch(sel, index, Array.map remove_unit cases)
| Ccatch(io, ids, body, handler) ->
Ccatch(io, ids, remove_unit body, remove_unit handler)
| Ctrywith(body, exn, handler) ->
Ctrywith(remove_unit body, exn, remove_unit handler)
| Clet(id, c1, c2) ->
Clet(id, c1, remove_unit c2)
| Cop(Capply (mty, dbg), args) ->
Cop(Capply (typ_void, dbg), args)
| Cop(Cextcall(proc, mty, alloc, dbg), args) ->
Cop(Cextcall(proc, typ_void, alloc, dbg), args)
| Cexit (_,_) as c -> c
| Ctuple [] as c -> c
| c -> Csequence(c, Ctuple [])
let field_address ptr n =
if n = 0
then ptr
else Cop(Cadda, [ptr; Cconst_int(n * size_addr)])
let get_field ptr n =
Cop(Cload Word, [field_address ptr n])
let set_field ptr n newval =
Cop(Cstore Word, [field_address ptr n; newval])
let header ptr =
Cop(Cload Word, [Cop(Cadda, [ptr; Cconst_int(-size_int)])])
let tag_offset =
if big_endian then -1 else -size_int
let get_tag ptr =
Cop(Cand, [header ptr; Cconst_int 255])
Cop(Cload Byte_unsigned,
[Cop(Cadda, [ptr; Cconst_int(tag_offset)])])
let get_size ptr =
Cop(Clsr, [header ptr; Cconst_int 10])
let log2_size_addr = Misc.log2 size_addr
let log2_size_float = Misc.log2 size_float
let wordsize_shift = 9
let numfloat_shift = 9 + log2_size_float - log2_size_addr
let is_addr_array_hdr hdr =
Cop(Ccmpi Cne, [Cop(Cand, [hdr; Cconst_int 255]); floatarray_tag])
let is_addr_array_ptr ptr =
Cop(Ccmpi Cne, [get_tag ptr; floatarray_tag])
let addr_array_length hdr = Cop(Clsr, [hdr; Cconst_int wordsize_shift])
let float_array_length hdr = Cop(Clsr, [hdr; Cconst_int numfloat_shift])
let lsl_const c n =
Cop(Clsl, [c; Cconst_int n])
let array_indexing log2size ptr ofs =
match ofs with
Cconst_int n ->
let i = n asr 1 in
if i = 0 then ptr else Cop(Cadda, [ptr; Cconst_int(i lsl log2size)])
| Cop(Caddi, [Cop(Clsl, [c; Cconst_int 1]); Cconst_int 1]) ->
Cop(Cadda, [ptr; lsl_const c log2size])
| Cop(Caddi, [c; Cconst_int n]) ->
Cop(Cadda, [Cop(Cadda, [ptr; lsl_const c (log2size - 1)]);
Cconst_int((n-1) lsl (log2size - 1))])
| _ ->
Cop(Cadda, [Cop(Cadda, [ptr; lsl_const ofs (log2size - 1)]);
Cconst_int((-1) lsl (log2size - 1))])
let addr_array_ref arr ofs =
Cop(Cload Word, [array_indexing log2_size_addr arr ofs])
let unboxed_float_array_ref arr ofs =
Cop(Cload Double_u, [array_indexing log2_size_float arr ofs])
let float_array_ref arr ofs =
box_float(unboxed_float_array_ref arr ofs)
let addr_array_set arr ofs newval =
Cop(Cextcall("caml_modify", typ_void, false, Debuginfo.none),
[array_indexing log2_size_addr arr ofs; newval])
let int_array_set arr ofs newval =
Cop(Cstore Word, [array_indexing log2_size_addr arr ofs; newval])
let float_array_set arr ofs newval =
Cop(Cstore Double_u, [array_indexing log2_size_float arr ofs; newval])
let string_length exp =
bind "str" exp (fun str ->
let tmp_var = Ident.create "tmp" in
Clet(tmp_var,
Cop(Csubi,
[Cop(Clsl,
[Cop(Clsr, [header str; Cconst_int 10]);
Cconst_int log2_size_addr]);
Cconst_int 1]),
Cop(Csubi,
[Cvar tmp_var;
Cop(Cload Byte_unsigned,
[Cop(Cadda, [str; Cvar tmp_var])])])))
let lookup_tag obj tag =
bind "tag" tag (fun tag ->
Cop(Cextcall("caml_get_public_method", typ_addr, false, Debuginfo.none),
[obj; tag]))
let lookup_label obj lab =
bind "lab" lab (fun lab ->
let table = Cop (Cload Word, [obj]) in
addr_array_ref table lab)
let call_cached_method obj tag cache pos args dbg =
let arity = List.length args in
let cache = array_indexing log2_size_addr cache pos in
Compilenv.need_send_fun arity;
Cop(Capply (typ_addr, dbg),
Cconst_symbol("caml_send" ^ string_of_int arity) ::
obj :: tag :: cache :: args)
let make_alloc_generic set_fn tag wordsize args =
if wordsize <= Config.max_young_wosize then
Cop(Calloc, Cconst_natint(block_header tag wordsize) :: args)
else begin
let id = Ident.create "alloc" in
let rec fill_fields idx = function
[] -> Cvar id
| e1::el -> Csequence(set_fn (Cvar id) (Cconst_int idx) e1,
fill_fields (idx + 2) el) in
Clet(id,
Cop(Cextcall("caml_alloc", typ_addr, true, Debuginfo.none),
[Cconst_int wordsize; Cconst_int tag]),
fill_fields 1 args)
end
let make_alloc tag args =
make_alloc_generic addr_array_set tag (List.length args) args
let make_float_alloc tag args =
make_alloc_generic float_array_set tag
(List.length args * size_float / size_addr) args
let fundecls_size fundecls =
let sz = ref (-1) in
List.iter
(fun (label, arity, params, body) ->
sz := !sz + 1 + (if arity = 1 then 2 else 3))
fundecls;
!sz
type rhs_kind =
| RHS_block of int
| RHS_nonrec
;;
let rec expr_size = function
| Uclosure(fundecls, clos_vars) ->
RHS_block (fundecls_size fundecls + List.length clos_vars)
| Ulet(id, exp, body) ->
expr_size body
| Uletrec(bindings, body) ->
expr_size body
| Uprim(Pmakeblock(tag, mut), args, _) ->
RHS_block (List.length args)
| Uprim(Pmakearray(Paddrarray | Pintarray), args, _) ->
RHS_block (List.length args)
| Usequence(exp, exp') ->
expr_size exp'
| _ -> RHS_nonrec
let apply_function n =
Compilenv.need_apply_fun n; "caml_apply" ^ string_of_int n
let curry_function n =
Compilenv.need_curry_fun n;
if n >= 0
then "caml_curry" ^ string_of_int n
else "caml_tuplify" ^ string_of_int (-n)
let transl_comparison = function
Lambda.Ceq -> Ceq
| Lambda.Cneq -> Cne
| Lambda.Cge -> Cge
| Lambda.Cgt -> Cgt
| Lambda.Cle -> Cle
| Lambda.Clt -> Clt
let const_label = ref 0
let new_const_label () =
incr const_label;
!const_label
let new_const_symbol () =
incr const_label;
Compilenv.make_symbol (Some (string_of_int !const_label))
let structured_constants = ref ([] : (string * structured_constant) list)
let transl_constant = function
Const_base(Const_int n) ->
int_const n
| Const_base(Const_char c) ->
Cconst_int(((Char.code c) lsl 1) + 1)
| Const_pointer n ->
if n <= max_repr_int && n >= min_repr_int
then Cconst_pointer((n lsl 1) + 1)
else Cconst_natpointer
(Nativeint.add (Nativeint.shift_left (Nativeint.of_int n) 1) 1n)
| cst ->
let lbl = new_const_symbol() in
structured_constants := (lbl, cst) :: !structured_constants;
Cconst_symbol lbl
let constant_closures =
ref ([] : (string * (string * int * Ident.t list * ulambda) list) list)
let box_int_constant bi n =
match bi with
Pnativeint -> Const_base(Const_nativeint n)
| Pint32 -> Const_base(Const_int32 (Nativeint.to_int32 n))
| Pint64 -> Const_base(Const_int64 (Int64.of_nativeint n))
let operations_boxed_int bi =
match bi with
Pnativeint -> "caml_nativeint_ops"
| Pint32 -> "caml_int32_ops"
| Pint64 -> "caml_int64_ops"
let alloc_header_boxed_int bi =
match bi with
Pnativeint -> alloc_boxedintnat_header
| Pint32 -> alloc_boxedint32_header
| Pint64 -> alloc_boxedint64_header
let box_int bi arg =
match arg with
Cconst_int n ->
transl_constant (box_int_constant bi (Nativeint.of_int n))
| Cconst_natint n ->
transl_constant (box_int_constant bi n)
| _ ->
let arg' =
if bi = Pint32 && size_int = 8 && big_endian
then Cop(Clsl, [arg; Cconst_int 32])
else arg in
Cop(Calloc, [alloc_header_boxed_int bi;
Cconst_symbol(operations_boxed_int bi);
arg'])
let rec unbox_int bi arg =
match arg with
Cop(Calloc, [hdr; ops; Cop(Clsl, [contents; Cconst_int 32])])
when bi = Pint32 && size_int = 8 && big_endian ->
Force sign - extension of low 32 bits
Cop(Casr, [Cop(Clsl, [contents; Cconst_int 32]); Cconst_int 32])
| Cop(Calloc, [hdr; ops; contents])
when bi = Pint32 && size_int = 8 && not big_endian ->
Force sign - extension of low 32 bits
Cop(Casr, [Cop(Clsl, [contents; Cconst_int 32]); Cconst_int 32])
| Cop(Calloc, [hdr; ops; contents]) ->
contents
| Clet(id, exp, body) -> Clet(id, exp, unbox_int bi body)
| Cifthenelse(cond, e1, e2) ->
Cifthenelse(cond, unbox_int bi e1, unbox_int bi e2)
| Csequence(e1, e2) -> Csequence(e1, unbox_int bi e2)
| Cswitch(e, tbl, el) -> Cswitch(e, tbl, Array.map (unbox_int bi) el)
| Ccatch(n, ids, e1, e2) -> Ccatch(n, ids, unbox_int bi e1, unbox_int bi e2)
| Ctrywith(e1, id, e2) -> Ctrywith(unbox_int bi e1, id, unbox_int bi e2)
| _ ->
Cop(Cload(if bi = Pint32 then Thirtytwo_signed else Word),
[Cop(Cadda, [arg; Cconst_int size_addr])])
let make_unsigned_int bi arg =
if bi = Pint32 && size_int = 8
then Cop(Cand, [arg; Cconst_natint 0xFFFFFFFFn])
else arg
let bigarray_elt_size = function
Pbigarray_unknown -> assert false
| Pbigarray_float32 -> 4
| Pbigarray_float64 -> 8
| Pbigarray_sint8 -> 1
| Pbigarray_uint8 -> 1
| Pbigarray_sint16 -> 2
| Pbigarray_uint16 -> 2
| Pbigarray_int32 -> 4
| Pbigarray_int64 -> 8
| Pbigarray_caml_int -> size_int
| Pbigarray_native_int -> size_int
| Pbigarray_complex32 -> 8
| Pbigarray_complex64 -> 16
let bigarray_indexing unsafe elt_kind layout b args dbg =
let check_bound a1 a2 k =
if unsafe then k else Csequence(Cop(Ccheckbound dbg, [a1;a2]), k) in
let rec ba_indexing dim_ofs delta_ofs = function
[] -> assert false
| [arg] ->
bind "idx" (untag_int arg)
(fun idx ->
check_bound (Cop(Cload Word,[field_address b dim_ofs])) idx idx)
| arg1 :: argl ->
let rem = ba_indexing (dim_ofs + delta_ofs) delta_ofs argl in
bind "idx" (untag_int arg1)
(fun idx ->
bind "bound" (Cop(Cload Word, [field_address b dim_ofs]))
(fun bound ->
check_bound bound idx (add_int (mul_int rem bound) idx))) in
let offset =
match layout with
Pbigarray_unknown_layout ->
assert false
| Pbigarray_c_layout ->
ba_indexing (4 + List.length args) (-1) (List.rev args)
| Pbigarray_fortran_layout ->
ba_indexing 5 1 (List.map (fun idx -> sub_int idx (Cconst_int 2)) args)
and elt_size =
bigarray_elt_size elt_kind in
let byte_offset =
if elt_size = 1
then offset
else Cop(Clsl, [offset; Cconst_int(log2 elt_size)]) in
Cop(Cadda, [Cop(Cload Word, [field_address b 1]); byte_offset])
let bigarray_word_kind = function
Pbigarray_unknown -> assert false
| Pbigarray_float32 -> Single
| Pbigarray_float64 -> Double
| Pbigarray_sint8 -> Byte_signed
| Pbigarray_uint8 -> Byte_unsigned
| Pbigarray_sint16 -> Sixteen_signed
| Pbigarray_uint16 -> Sixteen_unsigned
| Pbigarray_int32 -> Thirtytwo_signed
| Pbigarray_int64 -> Word
| Pbigarray_caml_int -> Word
| Pbigarray_native_int -> Word
| Pbigarray_complex32 -> Single
| Pbigarray_complex64 -> Double
let bigarray_get unsafe elt_kind layout b args dbg =
match elt_kind with
Pbigarray_complex32 | Pbigarray_complex64 ->
let kind = bigarray_word_kind elt_kind in
let sz = bigarray_elt_size elt_kind / 2 in
bind "addr" (bigarray_indexing unsafe elt_kind layout b args dbg) (fun addr ->
box_complex
(Cop(Cload kind, [addr]))
(Cop(Cload kind, [Cop(Cadda, [addr; Cconst_int sz])])))
| _ ->
Cop(Cload (bigarray_word_kind elt_kind),
[bigarray_indexing unsafe elt_kind layout b args dbg])
let bigarray_set unsafe elt_kind layout b args newval dbg =
match elt_kind with
Pbigarray_complex32 | Pbigarray_complex64 ->
let kind = bigarray_word_kind elt_kind in
let sz = bigarray_elt_size elt_kind / 2 in
bind "newval" newval (fun newv ->
bind "addr" (bigarray_indexing unsafe elt_kind layout b args dbg) (fun addr ->
Csequence(
Cop(Cstore kind, [addr; complex_re newv]),
Cop(Cstore kind,
[Cop(Cadda, [addr; Cconst_int sz]); complex_im newv]))))
| _ ->
Cop(Cstore (bigarray_word_kind elt_kind),
[bigarray_indexing unsafe elt_kind layout b args dbg; newval])
let default_prim name =
prim_alloc = true; prim_native_name = ""; prim_native_float = false }
let simplif_primitive_32bits = function
Pbintofint Pint64 -> Pccall (default_prim "caml_int64_of_int")
| Pintofbint Pint64 -> Pccall (default_prim "caml_int64_to_int")
| Pcvtbint(Pint32, Pint64) -> Pccall (default_prim "caml_int64_of_int32")
| Pcvtbint(Pint64, Pint32) -> Pccall (default_prim "caml_int64_to_int32")
| Pcvtbint(Pnativeint, Pint64) ->
Pccall (default_prim "caml_int64_of_nativeint")
| Pcvtbint(Pint64, Pnativeint) ->
Pccall (default_prim "caml_int64_to_nativeint")
| Pnegbint Pint64 -> Pccall (default_prim "caml_int64_neg")
| Paddbint Pint64 -> Pccall (default_prim "caml_int64_add")
| Psubbint Pint64 -> Pccall (default_prim "caml_int64_sub")
| Pmulbint Pint64 -> Pccall (default_prim "caml_int64_mul")
| Pdivbint Pint64 -> Pccall (default_prim "caml_int64_div")
| Pmodbint Pint64 -> Pccall (default_prim "caml_int64_mod")
| Pandbint Pint64 -> Pccall (default_prim "caml_int64_and")
| Porbint Pint64 -> Pccall (default_prim "caml_int64_or")
| Pxorbint Pint64 -> Pccall (default_prim "caml_int64_xor")
| Plslbint Pint64 -> Pccall (default_prim "caml_int64_shift_left")
| Plsrbint Pint64 -> Pccall (default_prim "caml_int64_shift_right_unsigned")
| Pasrbint Pint64 -> Pccall (default_prim "caml_int64_shift_right")
| Pbintcomp(Pint64, Lambda.Ceq) -> Pccall (default_prim "caml_equal")
| Pbintcomp(Pint64, Lambda.Cneq) -> Pccall (default_prim "caml_notequal")
| Pbintcomp(Pint64, Lambda.Clt) -> Pccall (default_prim "caml_lessthan")
| Pbintcomp(Pint64, Lambda.Cgt) -> Pccall (default_prim "caml_greaterthan")
| Pbintcomp(Pint64, Lambda.Cle) -> Pccall (default_prim "caml_lessequal")
| Pbintcomp(Pint64, Lambda.Cge) -> Pccall (default_prim "caml_greaterequal")
| Pbigarrayref(unsafe, n, Pbigarray_int64, layout) ->
Pccall (default_prim ("caml_ba_get_" ^ string_of_int n))
| Pbigarrayset(unsafe, n, Pbigarray_int64, layout) ->
Pccall (default_prim ("caml_ba_set_" ^ string_of_int n))
| p -> p
let simplif_primitive p =
match p with
| Pduprecord _ ->
Pccall (default_prim "caml_obj_dup")
| Pbigarrayref(unsafe, n, Pbigarray_unknown, layout) ->
Pccall (default_prim ("caml_ba_get_" ^ string_of_int n))
| Pbigarrayset(unsafe, n, Pbigarray_unknown, layout) ->
Pccall (default_prim ("caml_ba_set_" ^ string_of_int n))
| Pbigarrayref(unsafe, n, kind, Pbigarray_unknown_layout) ->
Pccall (default_prim ("caml_ba_get_" ^ string_of_int n))
| Pbigarrayset(unsafe, n, kind, Pbigarray_unknown_layout) ->
Pccall (default_prim ("caml_ba_set_" ^ string_of_int n))
| p ->
if size_int = 8 then p else simplif_primitive_32bits p
constants first
let transl_isout h arg = tag_int (Cop(Ccmpa Clt, [h ; arg]))
exception Found of int
let make_switch_gen arg cases acts =
let lcases = Array.length cases in
let new_cases = Array.create lcases 0 in
let store = Switch.mk_store (=) in
for i = 0 to Array.length cases-1 do
let act = cases.(i) in
let new_act = store.Switch.act_store act in
new_cases.(i) <- new_act
done ;
Cswitch
(arg, new_cases,
Array.map
(fun n -> acts.(n))
(store.Switch.act_get ()))
module SArgBlocks =
struct
type primitive = operation
let eqint = Ccmpi Ceq
let neint = Ccmpi Cne
let leint = Ccmpi Cle
let ltint = Ccmpi Clt
let geint = Ccmpi Cge
let gtint = Ccmpi Cgt
type act = expression
let default = Cexit (0,[])
let make_prim p args = Cop (p,args)
let make_offset arg n = add_const arg n
let make_isout h arg = Cop (Ccmpa Clt, [h ; arg])
let make_isin h arg = Cop (Ccmpa Cge, [h ; arg])
let make_if cond ifso ifnot = Cifthenelse (cond, ifso, ifnot)
let make_switch arg cases actions =
make_switch_gen arg cases actions
let bind arg body = bind "switcher" arg body
end
module SwitcherBlocks = Switch.Make(SArgBlocks)
type unboxed_number_kind =
No_unboxing
| Boxed_float
| Boxed_integer of boxed_integer
let is_unboxed_number = function
Uconst(Const_base(Const_float f)) ->
Boxed_float
| Uprim(p, _, _) ->
begin match simplif_primitive p with
Pccall p -> if p.prim_native_float then Boxed_float else No_unboxing
| Pfloatfield _ -> Boxed_float
| Pfloatofint -> Boxed_float
| Pnegfloat -> Boxed_float
| Pabsfloat -> Boxed_float
| Paddfloat -> Boxed_float
| Psubfloat -> Boxed_float
| Pmulfloat -> Boxed_float
| Pdivfloat -> Boxed_float
| Parrayrefu Pfloatarray -> Boxed_float
| Parrayrefs Pfloatarray -> Boxed_float
| Pbintofint bi -> Boxed_integer bi
| Pcvtbint(src, dst) -> Boxed_integer dst
| Pnegbint bi -> Boxed_integer bi
| Paddbint bi -> Boxed_integer bi
| Psubbint bi -> Boxed_integer bi
| Pmulbint bi -> Boxed_integer bi
| Pdivbint bi -> Boxed_integer bi
| Pmodbint bi -> Boxed_integer bi
| Pandbint bi -> Boxed_integer bi
| Porbint bi -> Boxed_integer bi
| Pxorbint bi -> Boxed_integer bi
| Plslbint bi -> Boxed_integer bi
| Plsrbint bi -> Boxed_integer bi
| Pasrbint bi -> Boxed_integer bi
| Pbigarrayref(_, _, (Pbigarray_float32 | Pbigarray_float64), _) ->
Boxed_float
| Pbigarrayref(_, _, Pbigarray_int32, _) -> Boxed_integer Pint32
| Pbigarrayref(_, _, Pbigarray_int64, _) -> Boxed_integer Pint64
| Pbigarrayref(_, _, Pbigarray_native_int, _) -> Boxed_integer Pnativeint
| _ -> No_unboxing
end
| _ -> No_unboxing
let subst_boxed_number unbox_fn boxed_id unboxed_id exp =
let need_boxed = ref false in
let assigned = ref false in
let rec subst = function
Cvar id as e ->
if Ident.same id boxed_id then need_boxed := true; e
| Clet(id, arg, body) -> Clet(id, subst arg, subst body)
| Cassign(id, arg) ->
if Ident.same id boxed_id then begin
assigned := true;
Cassign(unboxed_id, subst(unbox_fn arg))
end else
Cassign(id, subst arg)
| Ctuple argv -> Ctuple(List.map subst argv)
| Cop(Cload _, [Cvar id]) as e ->
if Ident.same id boxed_id then Cvar unboxed_id else e
| Cop(Cload _, [Cop(Cadda, [Cvar id; _])]) as e ->
if Ident.same id boxed_id then Cvar unboxed_id else e
| Cop(op, argv) -> Cop(op, List.map subst argv)
| Csequence(e1, e2) -> Csequence(subst e1, subst e2)
| Cifthenelse(e1, e2, e3) -> Cifthenelse(subst e1, subst e2, subst e3)
| Cswitch(arg, index, cases) ->
Cswitch(subst arg, index, Array.map subst cases)
| Cloop e -> Cloop(subst e)
| Ccatch(nfail, ids, e1, e2) -> Ccatch(nfail, ids, subst e1, subst e2)
| Cexit (nfail, el) -> Cexit (nfail, List.map subst el)
| Ctrywith(e1, id, e2) -> Ctrywith(subst e1, id, subst e2)
| e -> e in
let res = subst exp in
(res, !need_boxed, !assigned)
let functions = (Queue.create() : (string * Ident.t list * ulambda) Queue.t)
let rec transl = function
Uvar id ->
Cvar id
| Uconst sc ->
transl_constant sc
| Uclosure(fundecls, []) ->
let lbl = new_const_symbol() in
constant_closures := (lbl, fundecls) :: !constant_closures;
List.iter
(fun (label, arity, params, body) ->
Queue.add (label, params, body) functions)
fundecls;
Cconst_symbol lbl
| Uclosure(fundecls, clos_vars) ->
let block_size =
fundecls_size fundecls + List.length clos_vars in
let rec transl_fundecls pos = function
[] ->
List.map transl clos_vars
| (label, arity, params, body) :: rem ->
Queue.add (label, params, body) functions;
let header =
if pos = 0
then alloc_closure_header block_size
else alloc_infix_header pos in
if arity = 1 then
header ::
Cconst_symbol label ::
int_const 1 ::
transl_fundecls (pos + 3) rem
else
header ::
Cconst_symbol(curry_function arity) ::
int_const arity ::
Cconst_symbol label ::
transl_fundecls (pos + 4) rem in
Cop(Calloc, transl_fundecls 0 fundecls)
| Uoffset(arg, offset) ->
field_address (transl arg) offset
| Udirect_apply(lbl, args, dbg) ->
Cop(Capply(typ_addr, dbg), Cconst_symbol lbl :: List.map transl args)
| Ugeneric_apply(clos, [arg], dbg) ->
bind "fun" (transl clos) (fun clos ->
Cop(Capply(typ_addr, dbg), [get_field clos 0; transl arg; clos]))
| Ugeneric_apply(clos, args, dbg) ->
let arity = List.length args in
let cargs = Cconst_symbol(apply_function arity) ::
List.map transl (args @ [clos]) in
Cop(Capply(typ_addr, dbg), cargs)
| Usend(kind, met, obj, args, dbg) ->
let call_met obj args clos =
if args = [] then
Cop(Capply(typ_addr, dbg), [get_field clos 0;obj;clos])
else
let arity = List.length args + 1 in
let cargs = Cconst_symbol(apply_function arity) :: obj ::
(List.map transl args) @ [clos] in
Cop(Capply(typ_addr, dbg), cargs)
in
bind "obj" (transl obj) (fun obj ->
match kind, args with
Self, _ ->
bind "met" (lookup_label obj (transl met)) (call_met obj args)
| Cached, cache :: pos :: args ->
call_cached_method obj (transl met) (transl cache) (transl pos)
(List.map transl args) dbg
| _ ->
bind "met" (lookup_tag obj (transl met)) (call_met obj args))
| Ulet(id, exp, body) ->
begin match is_unboxed_number exp with
No_unboxing ->
Clet(id, transl exp, transl body)
| Boxed_float ->
transl_unbox_let box_float unbox_float transl_unbox_float
id exp body
| Boxed_integer bi ->
transl_unbox_let (box_int bi) (unbox_int bi) (transl_unbox_int bi)
id exp body
end
| Uletrec(bindings, body) ->
transl_letrec bindings (transl body)
| Uprim(prim, args, dbg) ->
begin match (simplif_primitive prim, args) with
(Pgetglobal id, []) ->
Cconst_symbol (Ident.name id)
| (Pmakeblock(tag, mut), []) ->
transl_constant(Const_block(tag, []))
| (Pmakeblock(tag, mut), args) ->
make_alloc tag (List.map transl args)
| (Pccall prim, args) ->
if prim.prim_native_float then
box_float
(Cop(Cextcall(prim.prim_native_name, typ_float, false, dbg),
List.map transl_unbox_float args))
else
Cop(Cextcall(Primitive.native_name prim, typ_addr, prim.prim_alloc, dbg),
List.map transl args)
| (Pmakearray kind, []) ->
transl_constant(Const_block(0, []))
| (Pmakearray kind, args) ->
begin match kind with
Pgenarray ->
Cop(Cextcall("caml_make_array", typ_addr, true, Debuginfo.none),
[make_alloc 0 (List.map transl args)])
| Paddrarray | Pintarray ->
make_alloc 0 (List.map transl args)
| Pfloatarray ->
make_float_alloc Obj.double_array_tag
(List.map transl_unbox_float args)
end
| (Pbigarrayref(unsafe, num_dims, elt_kind, layout), arg1 :: argl) ->
let elt =
bigarray_get unsafe elt_kind layout
(transl arg1) (List.map transl argl) dbg in
begin match elt_kind with
Pbigarray_float32 | Pbigarray_float64 -> box_float elt
| Pbigarray_complex32 | Pbigarray_complex64 -> elt
| Pbigarray_int32 -> box_int Pint32 elt
| Pbigarray_int64 -> box_int Pint64 elt
| Pbigarray_native_int -> box_int Pnativeint elt
| Pbigarray_caml_int -> force_tag_int elt
| _ -> tag_int elt
end
| (Pbigarrayset(unsafe, num_dims, elt_kind, layout), arg1 :: argl) ->
let (argidx, argnewval) = split_last argl in
return_unit(bigarray_set unsafe elt_kind layout
(transl arg1)
(List.map transl argidx)
(match elt_kind with
Pbigarray_float32 | Pbigarray_float64 ->
transl_unbox_float argnewval
| Pbigarray_complex32 | Pbigarray_complex64 -> transl argnewval
| Pbigarray_int32 -> transl_unbox_int Pint32 argnewval
| Pbigarray_int64 -> transl_unbox_int Pint64 argnewval
| Pbigarray_native_int -> transl_unbox_int Pnativeint argnewval
| _ -> untag_int (transl argnewval))
dbg)
| (p, [arg]) ->
transl_prim_1 p arg dbg
| (p, [arg1; arg2]) ->
transl_prim_2 p arg1 arg2 dbg
| (p, [arg1; arg2; arg3]) ->
transl_prim_3 p arg1 arg2 arg3 dbg
| (_, _) ->
fatal_error "Cmmgen.transl:prim"
end
| Uswitch(arg, s) ->
if Array.length s.us_index_blocks = 0 then
Cswitch
(untag_int (transl arg),
s.us_index_consts,
Array.map transl s.us_actions_consts)
else if Array.length s.us_index_consts = 0 then
transl_switch (get_tag (transl arg))
s.us_index_blocks s.us_actions_blocks
else
bind "switch" (transl arg) (fun arg ->
Cifthenelse(
Cop(Cand, [arg; Cconst_int 1]),
transl_switch
(untag_int arg) s.us_index_consts s.us_actions_consts,
transl_switch
(get_tag arg) s.us_index_blocks s.us_actions_blocks))
| Ustaticfail (nfail, args) ->
Cexit (nfail, List.map transl args)
| Ucatch(nfail, [], body, handler) ->
make_catch nfail (transl body) (transl handler)
| Ucatch(nfail, ids, body, handler) ->
Ccatch(nfail, ids, transl body, transl handler)
| Utrywith(body, exn, handler) ->
Ctrywith(transl body, exn, transl handler)
| Uifthenelse(Uprim(Pnot, [arg], _), ifso, ifnot) ->
transl (Uifthenelse(arg, ifnot, ifso))
| Uifthenelse(cond, ifso, Ustaticfail (nfail, [])) ->
exit_if_false cond (transl ifso) nfail
| Uifthenelse(cond, Ustaticfail (nfail, []), ifnot) ->
exit_if_true cond nfail (transl ifnot)
| Uifthenelse(Uprim(Psequand, _, _) as cond, ifso, ifnot) ->
let raise_num = next_raise_count () in
make_catch
raise_num
(exit_if_false cond (transl ifso) raise_num)
(transl ifnot)
| Uifthenelse(Uprim(Psequor, _, _) as cond, ifso, ifnot) ->
let raise_num = next_raise_count () in
make_catch
raise_num
(exit_if_true cond raise_num (transl ifnot))
(transl ifso)
| Uifthenelse (Uifthenelse (cond, condso, condnot), ifso, ifnot) ->
let num_true = next_raise_count () in
make_catch
num_true
(make_catch2
(fun shared_false ->
Cifthenelse
(test_bool (transl cond),
exit_if_true condso num_true shared_false,
exit_if_true condnot num_true shared_false))
(transl ifnot))
(transl ifso)
| Uifthenelse(cond, ifso, ifnot) ->
Cifthenelse(test_bool(transl cond), transl ifso, transl ifnot)
| Usequence(exp1, exp2) ->
Csequence(remove_unit(transl exp1), transl exp2)
| Uwhile(cond, body) ->
let raise_num = next_raise_count () in
return_unit
(Ccatch
(raise_num, [],
Cloop(exit_if_false cond (remove_unit(transl body)) raise_num),
Ctuple []))
| Ufor(id, low, high, dir, body) ->
let tst = match dir with Upto -> Cgt | Downto -> Clt in
let inc = match dir with Upto -> Caddi | Downto -> Csubi in
let raise_num = next_raise_count () in
let id_prev = Ident.rename id in
return_unit
(Clet
(id, transl low,
bind_nonvar "bound" (transl high) (fun high ->
Ccatch
(raise_num, [],
Cifthenelse
(Cop(Ccmpi tst, [Cvar id; high]), Cexit (raise_num, []),
Cloop
(Csequence
(remove_unit(transl body),
Clet(id_prev, Cvar id,
Csequence
(Cassign(id,
Cop(inc, [Cvar id; Cconst_int 2])),
Cifthenelse
(Cop(Ccmpi Ceq, [Cvar id_prev; high]),
Cexit (raise_num,[]), Ctuple [])))))),
Ctuple []))))
| Uassign(id, exp) ->
return_unit(Cassign(id, transl exp))
and transl_prim_1 p arg dbg =
match p with
Generic operations
Pidentity ->
transl arg
| Pignore ->
return_unit(remove_unit (transl arg))
Heap operations
| Pfield n ->
get_field (transl arg) n
| Pfloatfield n ->
let ptr = transl arg in
box_float(
Cop(Cload Double_u,
[if n = 0 then ptr
else Cop(Cadda, [ptr; Cconst_int(n * size_float)])]))
| Praise ->
Cop(Craise dbg, [transl arg])
Integer operations
| Pnegint ->
Cop(Csubi, [Cconst_int 2; transl arg])
| Poffsetint n ->
if no_overflow_lsl n then
add_const (transl arg) (n lsl 1)
else
transl_prim_2 Paddint arg (Uconst (Const_base(Const_int n))) Debuginfo.none
| Poffsetref n ->
return_unit
(bind "ref" (transl arg) (fun arg ->
Cop(Cstore Word,
[arg; add_const (Cop(Cload Word, [arg])) (n lsl 1)])))
| Pfloatofint ->
box_float(Cop(Cfloatofint, [untag_int(transl arg)]))
| Pintoffloat ->
tag_int(Cop(Cintoffloat, [transl_unbox_float arg]))
| Pnegfloat ->
box_float(Cop(Cnegf, [transl_unbox_float arg]))
| Pabsfloat ->
box_float(Cop(Cabsf, [transl_unbox_float arg]))
| Pstringlength ->
tag_int(string_length (transl arg))
| Parraylength kind ->
begin match kind with
Pgenarray ->
let len =
if wordsize_shift = numfloat_shift then
Cop(Clsr, [header(transl arg); Cconst_int wordsize_shift])
else
bind "header" (header(transl arg)) (fun hdr ->
Cifthenelse(is_addr_array_hdr hdr,
Cop(Clsr, [hdr; Cconst_int wordsize_shift]),
Cop(Clsr, [hdr; Cconst_int numfloat_shift]))) in
Cop(Cor, [len; Cconst_int 1])
| Paddrarray | Pintarray ->
Cop(Cor, [addr_array_length(header(transl arg)); Cconst_int 1])
| Pfloatarray ->
Cop(Cor, [float_array_length(header(transl arg)); Cconst_int 1])
end
Boolean operations
| Pnot ->
1 - > 3 , 3 - > 1
| Pisint ->
tag_int(Cop(Cand, [transl arg; Cconst_int 1]))
| Pbintofint bi ->
box_int bi (untag_int (transl arg))
| Pintofbint bi ->
force_tag_int (transl_unbox_int bi arg)
| Pcvtbint(bi1, bi2) ->
box_int bi2 (transl_unbox_int bi1 arg)
| Pnegbint bi ->
box_int bi (Cop(Csubi, [Cconst_int 0; transl_unbox_int bi arg]))
| _ ->
fatal_error "Cmmgen.transl_prim_1"
and transl_prim_2 p arg1 arg2 dbg =
match p with
Heap operations
Psetfield(n, ptr) ->
if ptr then
return_unit(Cop(Cextcall("caml_modify", typ_void, false, Debuginfo.none),
[field_address (transl arg1) n; transl arg2]))
else
return_unit(set_field (transl arg1) n (transl arg2))
| Psetfloatfield n ->
let ptr = transl arg1 in
return_unit(
Cop(Cstore Double_u,
[if n = 0 then ptr
else Cop(Cadda, [ptr; Cconst_int(n * size_float)]);
transl_unbox_float arg2]))
Boolean operations
| Psequand ->
Cifthenelse(test_bool(transl arg1), transl arg2, Cconst_int 1)
let i d = Ident.create " res1 " in
Clet(id , transl arg1 ,
Cifthenelse(test_bool(Cvar i d ) , transl arg2 , Cvar i d ) )
Clet(id, transl arg1,
Cifthenelse(test_bool(Cvar id), transl arg2, Cvar id)) *)
| Psequor ->
Cifthenelse(test_bool(transl arg1), Cconst_int 3, transl arg2)
Integer operations
| Paddint ->
decr_int(add_int (transl arg1) (transl arg2))
| Psubint ->
incr_int(sub_int (transl arg1) (transl arg2))
| Pmulint ->
incr_int(Cop(Cmuli, [decr_int(transl arg1); untag_int(transl arg2)]))
| Pdivint ->
tag_int(safe_divmod Cdivi (untag_int(transl arg1)) (untag_int(transl arg2)) dbg)
| Pmodint ->
tag_int(safe_divmod Cmodi (untag_int(transl arg1)) (untag_int(transl arg2)) dbg)
| Pandint ->
Cop(Cand, [transl arg1; transl arg2])
| Porint ->
Cop(Cor, [transl arg1; transl arg2])
| Pxorint ->
Cop(Cor, [Cop(Cxor, [ignore_low_bit_int(transl arg1);
ignore_low_bit_int(transl arg2)]);
Cconst_int 1])
| Plslint ->
incr_int(lsl_int (decr_int(transl arg1)) (untag_int(transl arg2)))
| Plsrint ->
Cop(Cor, [Cop(Clsr, [transl arg1; untag_int(transl arg2)]);
Cconst_int 1])
| Pasrint ->
Cop(Cor, [Cop(Casr, [transl arg1; untag_int(transl arg2)]);
Cconst_int 1])
| Pintcomp cmp ->
tag_int(Cop(Ccmpi(transl_comparison cmp), [transl arg1; transl arg2]))
| Pisout ->
transl_isout (transl arg1) (transl arg2)
| Paddfloat ->
box_float(Cop(Caddf,
[transl_unbox_float arg1; transl_unbox_float arg2]))
| Psubfloat ->
box_float(Cop(Csubf,
[transl_unbox_float arg1; transl_unbox_float arg2]))
| Pmulfloat ->
box_float(Cop(Cmulf,
[transl_unbox_float arg1; transl_unbox_float arg2]))
| Pdivfloat ->
box_float(Cop(Cdivf,
[transl_unbox_float arg1; transl_unbox_float arg2]))
| Pfloatcomp cmp ->
tag_int(Cop(Ccmpf(transl_comparison cmp),
[transl_unbox_float arg1; transl_unbox_float arg2]))
| Pstringrefu ->
tag_int(Cop(Cload Byte_unsigned,
[add_int (transl arg1) (untag_int(transl arg2))]))
| Pstringrefs ->
tag_int
(bind "str" (transl arg1) (fun str ->
bind "index" (untag_int (transl arg2)) (fun idx ->
Csequence(
Cop(Ccheckbound dbg, [string_length str; idx]),
Cop(Cload Byte_unsigned, [add_int str idx])))))
| Parrayrefu kind ->
begin match kind with
Pgenarray ->
bind "arr" (transl arg1) (fun arr ->
bind "index" (transl arg2) (fun idx ->
Cifthenelse(is_addr_array_ptr arr,
addr_array_ref arr idx,
float_array_ref arr idx)))
| Paddrarray | Pintarray ->
addr_array_ref (transl arg1) (transl arg2)
| Pfloatarray ->
float_array_ref (transl arg1) (transl arg2)
end
| Parrayrefs kind ->
begin match kind with
Pgenarray ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
bind "header" (header arr) (fun hdr ->
Cifthenelse(is_addr_array_hdr hdr,
Csequence(Cop(Ccheckbound dbg, [addr_array_length hdr; idx]),
addr_array_ref arr idx),
Csequence(Cop(Ccheckbound dbg, [float_array_length hdr; idx]),
float_array_ref arr idx)))))
| Paddrarray | Pintarray ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
Csequence(Cop(Ccheckbound dbg, [addr_array_length(header arr); idx]),
addr_array_ref arr idx)))
| Pfloatarray ->
box_float(
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
Csequence(Cop(Ccheckbound dbg,
[float_array_length(header arr); idx]),
unboxed_float_array_ref arr idx))))
end
Operations on bitvects
| Pbittest ->
bind "index" (untag_int(transl arg2)) (fun idx ->
tag_int(
Cop(Cand, [Cop(Clsr, [Cop(Cload Byte_unsigned,
[add_int (transl arg1)
(Cop(Clsr, [idx; Cconst_int 3]))]);
Cop(Cand, [idx; Cconst_int 7])]);
Cconst_int 1])))
| Paddbint bi ->
box_int bi (Cop(Caddi,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Psubbint bi ->
box_int bi (Cop(Csubi,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Pmulbint bi ->
box_int bi (Cop(Cmuli,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Pdivbint bi ->
box_int bi (safe_divmod Cdivi
(transl_unbox_int bi arg1) (transl_unbox_int bi arg2)
dbg)
| Pmodbint bi ->
box_int bi (safe_divmod Cmodi
(transl_unbox_int bi arg1) (transl_unbox_int bi arg2)
dbg)
| Pandbint bi ->
box_int bi (Cop(Cand,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Porbint bi ->
box_int bi (Cop(Cor,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Pxorbint bi ->
box_int bi (Cop(Cxor,
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| Plslbint bi ->
box_int bi (Cop(Clsl,
[transl_unbox_int bi arg1; untag_int(transl arg2)]))
| Plsrbint bi ->
box_int bi (Cop(Clsr,
[make_unsigned_int bi (transl_unbox_int bi arg1);
untag_int(transl arg2)]))
| Pasrbint bi ->
box_int bi (Cop(Casr,
[transl_unbox_int bi arg1; untag_int(transl arg2)]))
| Pbintcomp(bi, cmp) ->
tag_int (Cop(Ccmpi(transl_comparison cmp),
[transl_unbox_int bi arg1; transl_unbox_int bi arg2]))
| _ ->
fatal_error "Cmmgen.transl_prim_2"
and transl_prim_3 p arg1 arg2 arg3 dbg =
match p with
Pstringsetu ->
return_unit(Cop(Cstore Byte_unsigned,
[add_int (transl arg1) (untag_int(transl arg2));
untag_int(transl arg3)]))
| Pstringsets ->
return_unit
(bind "str" (transl arg1) (fun str ->
bind "index" (untag_int (transl arg2)) (fun idx ->
Csequence(
Cop(Ccheckbound dbg, [string_length str; idx]),
Cop(Cstore Byte_unsigned,
[add_int str idx; untag_int(transl arg3)])))))
| Parraysetu kind ->
return_unit(begin match kind with
Pgenarray ->
bind "newval" (transl arg3) (fun newval ->
bind "index" (transl arg2) (fun index ->
bind "arr" (transl arg1) (fun arr ->
Cifthenelse(is_addr_array_ptr arr,
addr_array_set arr index newval,
float_array_set arr index (unbox_float newval)))))
| Paddrarray ->
addr_array_set (transl arg1) (transl arg2) (transl arg3)
| Pintarray ->
int_array_set (transl arg1) (transl arg2) (transl arg3)
| Pfloatarray ->
float_array_set (transl arg1) (transl arg2) (transl_unbox_float arg3)
end)
| Parraysets kind ->
return_unit(begin match kind with
Pgenarray ->
bind "newval" (transl arg3) (fun newval ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
bind "header" (header arr) (fun hdr ->
Cifthenelse(is_addr_array_hdr hdr,
Csequence(Cop(Ccheckbound dbg, [addr_array_length hdr; idx]),
addr_array_set arr idx newval),
Csequence(Cop(Ccheckbound dbg, [float_array_length hdr; idx]),
float_array_set arr idx
(unbox_float newval)))))))
| Paddrarray ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
Csequence(Cop(Ccheckbound dbg, [addr_array_length(header arr); idx]),
addr_array_set arr idx (transl arg3))))
| Pintarray ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
Csequence(Cop(Ccheckbound dbg, [addr_array_length(header arr); idx]),
int_array_set arr idx (transl arg3))))
| Pfloatarray ->
bind "index" (transl arg2) (fun idx ->
bind "arr" (transl arg1) (fun arr ->
Csequence(Cop(Ccheckbound dbg, [float_array_length(header arr);idx]),
float_array_set arr idx (transl_unbox_float arg3))))
end)
| _ ->
fatal_error "Cmmgen.transl_prim_3"
and transl_unbox_float = function
Uconst(Const_base(Const_float f)) -> Cconst_float f
| exp -> unbox_float(transl exp)
and transl_unbox_int bi = function
Uconst(Const_base(Const_int32 n)) ->
Cconst_natint (Nativeint.of_int32 n)
| Uconst(Const_base(Const_nativeint n)) ->
Cconst_natint n
| Uconst(Const_base(Const_int64 n)) ->
assert (size_int = 8); Cconst_natint (Int64.to_nativeint n)
| Uprim(Pbintofint bi', [Uconst(Const_base(Const_int i))], _) when bi = bi' ->
Cconst_int i
| exp -> unbox_int bi (transl exp)
and transl_unbox_let box_fn unbox_fn transl_unbox_fn id exp body =
let unboxed_id = Ident.create (Ident.name id) in
let trbody1 = transl body in
let (trbody2, need_boxed, is_assigned) =
subst_boxed_number unbox_fn id unboxed_id trbody1 in
if need_boxed && is_assigned then
Clet(id, transl exp, trbody1)
else
Clet(unboxed_id, transl_unbox_fn exp,
if need_boxed
then Clet(id, box_fn(Cvar unboxed_id), trbody2)
else trbody2)
and make_catch ncatch body handler = match body with
| Cexit (nexit,[]) when nexit=ncatch -> handler
| _ -> Ccatch (ncatch, [], body, handler)
and make_catch2 mk_body handler = match handler with
| Cexit (_,[])|Ctuple []|Cconst_int _|Cconst_pointer _ ->
mk_body handler
| _ ->
let nfail = next_raise_count () in
make_catch
nfail
(mk_body (Cexit (nfail,[])))
handler
and exit_if_true cond nfail otherwise =
match cond with
| Uconst (Const_pointer 0) -> otherwise
| Uconst (Const_pointer 1) -> Cexit (nfail,[])
| Uprim(Psequor, [arg1; arg2], _) ->
exit_if_true arg1 nfail (exit_if_true arg2 nfail otherwise)
| Uprim(Psequand, _, _) ->
begin match otherwise with
| Cexit (raise_num,[]) ->
exit_if_false cond (Cexit (nfail,[])) raise_num
| _ ->
let raise_num = next_raise_count () in
make_catch
raise_num
(exit_if_false cond (Cexit (nfail,[])) raise_num)
otherwise
end
| Uprim(Pnot, [arg], _) ->
exit_if_false arg otherwise nfail
| Uifthenelse (cond, ifso, ifnot) ->
make_catch2
(fun shared ->
Cifthenelse
(test_bool (transl cond),
exit_if_true ifso nfail shared,
exit_if_true ifnot nfail shared))
otherwise
| _ ->
Cifthenelse(test_bool(transl cond), Cexit (nfail, []), otherwise)
and exit_if_false cond otherwise nfail =
match cond with
| Uconst (Const_pointer 0) -> Cexit (nfail,[])
| Uconst (Const_pointer 1) -> otherwise
| Uprim(Psequand, [arg1; arg2], _) ->
exit_if_false arg1 (exit_if_false arg2 otherwise nfail) nfail
| Uprim(Psequor, _, _) ->
begin match otherwise with
| Cexit (raise_num,[]) ->
exit_if_true cond raise_num (Cexit (nfail,[]))
| _ ->
let raise_num = next_raise_count () in
make_catch
raise_num
(exit_if_true cond raise_num (Cexit (nfail,[])))
otherwise
end
| Uprim(Pnot, [arg], _) ->
exit_if_true arg nfail otherwise
| Uifthenelse (cond, ifso, ifnot) ->
make_catch2
(fun shared ->
Cifthenelse
(test_bool (transl cond),
exit_if_false ifso shared nfail,
exit_if_false ifnot shared nfail))
otherwise
| _ ->
Cifthenelse(test_bool(transl cond), otherwise, Cexit (nfail, []))
and transl_switch arg index cases = match Array.length cases with
| 0 -> fatal_error "Cmmgen.transl_switch"
| 1 -> transl cases.(0)
| _ ->
let n_index = Array.length index in
let actions = Array.map transl cases in
let inters = ref []
and this_high = ref (n_index-1)
and this_low = ref (n_index-1)
and this_act = ref index.(n_index-1) in
for i = n_index-2 downto 0 do
let act = index.(i) in
if act = !this_act then
decr this_low
else begin
inters := (!this_low, !this_high, !this_act) :: !inters ;
this_high := i ;
this_low := i ;
this_act := act
end
done ;
inters := (0, !this_high, !this_act) :: !inters ;
bind "switcher" arg
(fun a ->
SwitcherBlocks.zyva
(0,n_index-1)
(fun i -> Cconst_int i)
a
(Array.of_list !inters) actions)
and transl_letrec bindings cont =
let bsz = List.map (fun (id, exp) -> (id, exp, expr_size exp)) bindings in
let rec init_blocks = function
| [] -> fill_nonrec bsz
| (id, exp, RHS_block sz) :: rem ->
Clet(id, Cop(Cextcall("caml_alloc_dummy", typ_addr, true, Debuginfo.none),
[int_const sz]),
init_blocks rem)
| (id, exp, RHS_nonrec) :: rem ->
Clet (id, Cconst_int 0, init_blocks rem)
and fill_nonrec = function
| [] -> fill_blocks bsz
| (id, exp, RHS_block sz) :: rem -> fill_nonrec rem
| (id, exp, RHS_nonrec) :: rem ->
Clet (id, transl exp, fill_nonrec rem)
and fill_blocks = function
| [] -> cont
| (id, exp, RHS_block _) :: rem ->
Csequence(Cop(Cextcall("caml_update_dummy", typ_void, false, Debuginfo.none),
[Cvar id; transl exp]),
fill_blocks rem)
| (id, exp, RHS_nonrec) :: rem ->
fill_blocks rem
in init_blocks bsz
let transl_function lbl params body =
Cfunction {fun_name = lbl;
fun_args = List.map (fun id -> (id, typ_addr)) params;
fun_body = transl body;
fun_fast = !Clflags.optimize_for_speed}
module StringSet =
Set.Make(struct
type t = string
let compare = compare
end)
let rec transl_all_functions already_translated cont =
try
let (lbl, params, body) = Queue.take functions in
if StringSet.mem lbl already_translated then
transl_all_functions already_translated cont
else begin
transl_all_functions (StringSet.add lbl already_translated)
(transl_function lbl params body :: cont)
end
with Queue.Empty ->
cont
let immstrings = Hashtbl.create 17
let rec emit_constant symb cst cont =
match cst with
Const_base(Const_float s) ->
Cint(float_header) :: Cdefine_symbol symb :: Cdouble s :: cont
| Const_base(Const_string s) | Const_immstring s ->
Cint(string_header (String.length s)) ::
Cdefine_symbol symb ::
emit_string_constant s cont
| Const_base(Const_int32 n) ->
Cint(boxedint32_header) :: Cdefine_symbol symb ::
emit_boxed_int32_constant n cont
| Const_base(Const_int64 n) ->
Cint(boxedint64_header) :: Cdefine_symbol symb ::
emit_boxed_int64_constant n cont
| Const_base(Const_nativeint n) ->
Cint(boxedintnat_header) :: Cdefine_symbol symb ::
emit_boxed_nativeint_constant n cont
| Const_block(tag, fields) ->
let (emit_fields, cont1) = emit_constant_fields fields cont in
Cint(block_header tag (List.length fields)) ::
Cdefine_symbol symb ::
emit_fields @ cont1
| Const_float_array(fields) ->
Cint(floatarray_header (List.length fields)) ::
Cdefine_symbol symb ::
Misc.map_end (fun f -> Cdouble f) fields cont
| _ -> fatal_error "gencmm.emit_constant"
and emit_constant_fields fields cont =
match fields with
[] -> ([], cont)
| f1 :: fl ->
let (data1, cont1) = emit_constant_field f1 cont in
let (datal, contl) = emit_constant_fields fl cont1 in
(data1 :: datal, contl)
and emit_constant_field field cont =
match field with
Const_base(Const_int n) ->
(Cint(Nativeint.add (Nativeint.shift_left (Nativeint.of_int n) 1) 1n),
cont)
| Const_base(Const_char c) ->
(Cint(Nativeint.of_int(((Char.code c) lsl 1) + 1)), cont)
| Const_base(Const_float s) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(float_header) :: Cdefine_label lbl :: Cdouble s :: cont)
| Const_base(Const_string s) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(string_header (String.length s)) :: Cdefine_label lbl ::
emit_string_constant s cont)
| Const_immstring s ->
begin try
(Clabel_address (Hashtbl.find immstrings s), cont)
with Not_found ->
let lbl = new_const_label() in
Hashtbl.add immstrings s lbl;
(Clabel_address lbl,
Cint(string_header (String.length s)) :: Cdefine_label lbl ::
emit_string_constant s cont)
end
| Const_base(Const_int32 n) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(boxedint32_header) :: Cdefine_label lbl ::
emit_boxed_int32_constant n cont)
| Const_base(Const_int64 n) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(boxedint64_header) :: Cdefine_label lbl ::
emit_boxed_int64_constant n cont)
| Const_base(Const_nativeint n) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(boxedintnat_header) :: Cdefine_label lbl ::
emit_boxed_nativeint_constant n cont)
| Const_pointer n ->
(Cint(Nativeint.add (Nativeint.shift_left (Nativeint.of_int n) 1) 1n),
cont)
| Const_block(tag, fields) ->
let lbl = new_const_label() in
let (emit_fields, cont1) = emit_constant_fields fields cont in
(Clabel_address lbl,
Cint(block_header tag (List.length fields)) :: Cdefine_label lbl ::
emit_fields @ cont1)
| Const_float_array(fields) ->
let lbl = new_const_label() in
(Clabel_address lbl,
Cint(floatarray_header (List.length fields)) :: Cdefine_label lbl ::
Misc.map_end (fun f -> Cdouble f) fields cont)
and emit_string_constant s cont =
let n = size_int - 1 - (String.length s) mod size_int in
Cstring s :: Cskip n :: Cint8 n :: cont
and emit_boxed_int32_constant n cont =
let n = Nativeint.of_int32 n in
if size_int = 8 then
Csymbol_address("caml_int32_ops") :: Cint32 n :: Cint32 0n :: cont
else
Csymbol_address("caml_int32_ops") :: Cint n :: cont
and emit_boxed_nativeint_constant n cont =
Csymbol_address("caml_nativeint_ops") :: Cint n :: cont
and emit_boxed_int64_constant n cont =
let lo = Int64.to_nativeint n in
if size_int = 8 then
Csymbol_address("caml_int64_ops") :: Cint lo :: cont
else begin
let hi = Int64.to_nativeint (Int64.shift_right n 32) in
if big_endian then
Csymbol_address("caml_int64_ops") :: Cint hi :: Cint lo :: cont
else
Csymbol_address("caml_int64_ops") :: Cint lo :: Cint hi :: cont
end
let emit_constant_closure symb fundecls cont =
match fundecls with
[] -> assert false
| (label, arity, params, body) :: remainder ->
let rec emit_others pos = function
[] -> cont
| (label, arity, params, body) :: rem ->
if arity = 1 then
Cint(infix_header pos) ::
Csymbol_address label ::
Cint 3n ::
emit_others (pos + 3) rem
else
Cint(infix_header pos) ::
Csymbol_address(curry_function arity) ::
Cint(Nativeint.of_int (arity lsl 1 + 1)) ::
Csymbol_address label ::
emit_others (pos + 4) rem in
Cint(closure_header (fundecls_size fundecls)) ::
Cdefine_symbol symb ::
if arity = 1 then
Csymbol_address label ::
Cint 3n ::
emit_others 3 remainder
else
Csymbol_address(curry_function arity) ::
Cint(Nativeint.of_int (arity lsl 1 + 1)) ::
Csymbol_address label ::
emit_others 4 remainder
let emit_all_constants cont =
let c = ref cont in
List.iter
(fun (lbl, cst) -> c := Cdata(emit_constant lbl cst []) :: !c)
!structured_constants;
structured_constants := [];
PR#3979
List.iter
(fun (symb, fundecls) ->
c := Cdata(emit_constant_closure symb fundecls []) :: !c)
!constant_closures;
constant_closures := [];
!c
let compunit size ulam =
let glob = Compilenv.make_symbol None in
let init_code = transl ulam in
let c1 = [Cfunction {fun_name = Compilenv.make_symbol (Some "entry");
fun_args = [];
fun_body = init_code; fun_fast = false}] in
let c2 = transl_all_functions StringSet.empty c1 in
let c3 = emit_all_constants c2 in
Cdata [Cint(block_header 0 size);
Cglobal_symbol glob;
Cdefine_symbol glob;
Cskip(size * size_addr)] :: c3
CAMLprim value caml_cache_public_method ( value meths , value tag , value * cache )
{
int li = 3 , hi = Field(meths,0 ) , mi ;
while ( li < hi ) { // no need to check the 1st time
mi = ( ( li+hi ) > > 1 ) | 1 ;
if ( tag < Field(meths , mi ) ) hi = mi-2 ;
else li = mi ;
}
* cache = ( li-3)*sizeof(value)+1 ;
return Field ( meths , li-1 ) ;
}
CAMLprim value caml_cache_public_method (value meths, value tag, value *cache)
{
int li = 3, hi = Field(meths,0), mi;
while (li < hi) { // no need to check the 1st time
mi = ((li+hi) >> 1) | 1;
if (tag < Field(meths,mi)) hi = mi-2;
else li = mi;
}
*cache = (li-3)*sizeof(value)+1;
return Field (meths, li-1);
}
*)
let cache_public_method meths tag cache =
let raise_num = next_raise_count () in
let li = Ident.create "li" and hi = Ident.create "hi"
and mi = Ident.create "mi" and tagged = Ident.create "tagged" in
Clet (
li, Cconst_int 3,
Clet (
hi, Cop(Cload Word, [meths]),
Csequence(
Ccatch
(raise_num, [],
Cloop
(Clet(
mi,
Cop(Cor,
[Cop(Clsr, [Cop(Caddi, [Cvar li; Cvar hi]); Cconst_int 1]);
Cconst_int 1]),
Csequence(
Cifthenelse
(Cop (Ccmpi Clt,
[tag;
Cop(Cload Word,
[Cop(Cadda,
[meths; lsl_const (Cvar mi) log2_size_addr])])]),
Cassign(hi, Cop(Csubi, [Cvar mi; Cconst_int 2])),
Cassign(li, Cvar mi)),
Cifthenelse
(Cop(Ccmpi Cge, [Cvar li; Cvar hi]), Cexit (raise_num, []),
Ctuple [])))),
Ctuple []),
Clet (
tagged, Cop(Cadda, [lsl_const (Cvar li) log2_size_addr;
Cconst_int(1 - 3 * size_addr)]),
Csequence(Cop (Cstore Word, [cache; Cvar tagged]),
Cvar tagged)))))
Generate an application function :
( defun caml_applyN ( a1 ... aN clos )
( if (= clos.arity N )
( app clos.direct a1 ... aN clos )
( let ( clos1 ( app clos.code a1 clos )
clos2 ( app clos1.code a2 clos )
...
closN-1 ( app closN-2.code ) )
( app closN-1.code aN closN-1 ) ) ) )
(defun caml_applyN (a1 ... aN clos)
(if (= clos.arity N)
(app clos.direct a1 ... aN clos)
(let (clos1 (app clos.code a1 clos)
clos2 (app clos1.code a2 clos)
...
closN-1 (app closN-2.code aN-1 closN-2))
(app closN-1.code aN closN-1))))
*)
let apply_function_body arity =
let arg = Array.create arity (Ident.create "arg") in
for i = 1 to arity - 1 do arg.(i) <- Ident.create "arg" done;
let clos = Ident.create "clos" in
let rec app_fun clos n =
if n = arity-1 then
Cop(Capply(typ_addr, Debuginfo.none),
[get_field (Cvar clos) 0; Cvar arg.(n); Cvar clos])
else begin
let newclos = Ident.create "clos" in
Clet(newclos,
Cop(Capply(typ_addr, Debuginfo.none),
[get_field (Cvar clos) 0; Cvar arg.(n); Cvar clos]),
app_fun newclos (n+1))
end in
let args = Array.to_list arg in
let all_args = args @ [clos] in
(args, clos,
if arity = 1 then app_fun clos 0 else
Cifthenelse(
Cop(Ccmpi Ceq, [get_field (Cvar clos) 1; int_const arity]),
Cop(Capply(typ_addr, Debuginfo.none),
get_field (Cvar clos) 2 :: List.map (fun s -> Cvar s) all_args),
app_fun clos 0))
let send_function arity =
let (args, clos', body) = apply_function_body (1+arity) in
let cache = Ident.create "cache"
and obj = List.hd args
and tag = Ident.create "tag" in
let clos =
let cache = Cvar cache and obj = Cvar obj and tag = Cvar tag in
let meths = Ident.create "meths" and cached = Ident.create "cached" in
let real = Ident.create "real" in
let mask = get_field (Cvar meths) 1 in
let cached_pos = Cvar cached in
let tag_pos = Cop(Cadda, [Cop (Cadda, [cached_pos; Cvar meths]);
Cconst_int(3*size_addr-1)]) in
let tag' = Cop(Cload Word, [tag_pos]) in
Clet (
meths, Cop(Cload Word, [obj]),
Clet (
cached, Cop(Cand, [Cop(Cload Word, [cache]); mask]),
Clet (
real,
Cifthenelse(Cop(Ccmpa Cne, [tag'; tag]),
cache_public_method (Cvar meths) tag cache,
cached_pos),
Cop(Cload Word, [Cop(Cadda, [Cop (Cadda, [Cvar real; Cvar meths]);
Cconst_int(2*size_addr-1)])]))))
in
let body = Clet(clos', clos, body) in
let fun_args =
[obj, typ_addr; tag, typ_int; cache, typ_addr]
@ List.map (fun id -> (id, typ_addr)) (List.tl args) in
Cfunction
{fun_name = "caml_send" ^ string_of_int arity;
fun_args = fun_args;
fun_body = body;
fun_fast = true}
let apply_function arity =
let (args, clos, body) = apply_function_body arity in
let all_args = args @ [clos] in
Cfunction
{fun_name = "caml_apply" ^ string_of_int arity;
fun_args = List.map (fun id -> (id, typ_addr)) all_args;
fun_body = body;
fun_fast = true}
Generate tuplifying functions :
( defun caml_tuplifyN ( arg clos )
( app clos.direct # 0(arg ) ... # N-1(arg ) clos ) )
(defun caml_tuplifyN (arg clos)
(app clos.direct #0(arg) ... #N-1(arg) clos)) *)
let tuplify_function arity =
let arg = Ident.create "arg" in
let clos = Ident.create "clos" in
let rec access_components i =
if i >= arity
then []
else get_field (Cvar arg) i :: access_components(i+1) in
Cfunction
{fun_name = "caml_tuplify" ^ string_of_int arity;
fun_args = [arg, typ_addr; clos, typ_addr];
fun_body =
Cop(Capply(typ_addr, Debuginfo.none),
get_field (Cvar clos) 2 :: access_components 0 @ [Cvar clos]);
fun_fast = true}
Generate currying functions :
( defun caml_curryN ( arg clos )
( alloc HDR caml_curryN_1 arg clos ) )
( defun caml_curryN_1 ( arg clos )
( alloc HDR caml_curryN_2 arg clos ) )
...
( defun caml_curryN_N-1 ( arg clos )
( let ( closN-2 clos.cdr
closN-3 closN-2.cdr
...
clos1 clos2.cdr
clos clos1.cdr )
( app clos.direct
clos1.car clos2.car ... closN-2.car clos.car arg clos ) ) )
(defun caml_curryN (arg clos)
(alloc HDR caml_curryN_1 arg clos))
(defun caml_curryN_1 (arg clos)
(alloc HDR caml_curryN_2 arg clos))
...
(defun caml_curryN_N-1 (arg clos)
(let (closN-2 clos.cdr
closN-3 closN-2.cdr
...
clos1 clos2.cdr
clos clos1.cdr)
(app clos.direct
clos1.car clos2.car ... closN-2.car clos.car arg clos))) *)
let final_curry_function arity =
let last_arg = Ident.create "arg" in
let last_clos = Ident.create "clos" in
let rec curry_fun args clos n =
if n = 0 then
Cop(Capply(typ_addr, Debuginfo.none),
get_field (Cvar clos) 2 ::
args @ [Cvar last_arg; Cvar clos])
else begin
let newclos = Ident.create "clos" in
Clet(newclos,
get_field (Cvar clos) 3,
curry_fun (get_field (Cvar clos) 2 :: args) newclos (n-1))
end in
Cfunction
{fun_name = "caml_curry" ^ string_of_int arity ^
"_" ^ string_of_int (arity-1);
fun_args = [last_arg, typ_addr; last_clos, typ_addr];
fun_body = curry_fun [] last_clos (arity-1);
fun_fast = true}
let rec intermediate_curry_functions arity num =
if num = arity - 1 then
[final_curry_function arity]
else begin
let name1 = "caml_curry" ^ string_of_int arity in
let name2 = if num = 0 then name1 else name1 ^ "_" ^ string_of_int num in
let arg = Ident.create "arg" and clos = Ident.create "clos" in
Cfunction
{fun_name = name2;
fun_args = [arg, typ_addr; clos, typ_addr];
fun_body = Cop(Calloc,
[alloc_closure_header 4;
Cconst_symbol(name1 ^ "_" ^ string_of_int (num+1));
int_const 1; Cvar arg; Cvar clos]);
fun_fast = true}
:: intermediate_curry_functions arity (num+1)
end
let curry_function arity =
if arity >= 0
then intermediate_curry_functions arity 0
else [tuplify_function (-arity)]
module IntSet = Set.Make(
struct
type t = int
let compare = compare
end)
let default_apply = IntSet.add 2 (IntSet.add 3 IntSet.empty)
These apply funs are always present in the main program because
the run - time system needs them ( cf . .
the run-time system needs them (cf. asmrun/<arch>.S) . *)
let generic_functions shared units =
let (apply,send,curry) =
List.fold_left
(fun (apply,send,curry) ui ->
List.fold_right IntSet.add ui.Compilenv.ui_apply_fun apply,
List.fold_right IntSet.add ui.Compilenv.ui_send_fun send,
List.fold_right IntSet.add ui.Compilenv.ui_curry_fun curry)
(IntSet.empty,IntSet.empty,IntSet.empty)
units in
let apply = if shared then apply else IntSet.union apply default_apply in
let accu = IntSet.fold (fun n accu -> apply_function n :: accu) apply [] in
let accu = IntSet.fold (fun n accu -> send_function n :: accu) send accu in
IntSet.fold (fun n accu -> curry_function n @ accu) curry accu
let entry_point namelist =
let incr_global_inited =
Cop(Cstore Word,
[Cconst_symbol "caml_globals_inited";
Cop(Caddi, [Cop(Cload Word, [Cconst_symbol "caml_globals_inited"]);
Cconst_int 1])]) in
let body =
List.fold_right
(fun name next ->
let entry_sym = Compilenv.make_symbol ~unitname:name (Some "entry") in
Csequence(Cop(Capply(typ_void, Debuginfo.none),
[Cconst_symbol entry_sym]),
Csequence(incr_global_inited, next)))
namelist (Cconst_int 1) in
Cfunction {fun_name = "caml_program";
fun_args = [];
fun_body = body;
fun_fast = false}
let cint_zero = Cint 0n
let global_table namelist =
let mksym name =
Csymbol_address (Compilenv.make_symbol ~unitname:name None)
in
Cdata(Cglobal_symbol "caml_globals" ::
Cdefine_symbol "caml_globals" ::
List.map mksym namelist @
[cint_zero])
let reference_symbols namelist =
let mksym name = Csymbol_address name in
Cdata(List.map mksym namelist)
let global_data name v =
Cdata(Cglobal_symbol name ::
emit_constant name
(Const_base (Const_string (Marshal.to_string v []))) [])
let globals_map v = global_data "caml_globals_map" v
let frame_table namelist =
let mksym name =
Csymbol_address (Compilenv.make_symbol ~unitname:name (Some "frametable"))
in
Cdata(Cglobal_symbol "caml_frametable" ::
Cdefine_symbol "caml_frametable" ::
List.map mksym namelist
@ [cint_zero])
let segment_table namelist symbol begname endname =
let addsyms name lst =
Csymbol_address (Compilenv.make_symbol ~unitname:name (Some begname)) ::
Csymbol_address (Compilenv.make_symbol ~unitname:name (Some endname)) ::
lst
in
Cdata(Cglobal_symbol symbol ::
Cdefine_symbol symbol ::
List.fold_right addsyms namelist [cint_zero])
let data_segment_table namelist =
segment_table namelist "caml_data_segments" "data_begin" "data_end"
let code_segment_table namelist =
segment_table namelist "caml_code_segments" "code_begin" "code_end"
Initialize a predefined exception
let predef_exception name =
let bucketname = "caml_bucket_" ^ name in
let symname = "caml_exn_" ^ name in
Cdata(Cglobal_symbol symname ::
emit_constant symname (Const_block(0,[Const_base(Const_string name)]))
[ Cglobal_symbol bucketname;
Cint(block_header 0 1);
Cdefine_symbol bucketname;
Csymbol_address symname ])
let mapflat f l = List.flatten (List.map f l)
type dynunit = {
name: string;
crc: Digest.t;
imports_cmi: (string * Digest.t) list;
imports_cmx: (string * Digest.t) list;
defines: string list;
}
type dynheader = {
magic: string;
units: dynunit list;
}
let dyn_magic_number = "Caml2007D001"
let plugin_header units =
let mk (ui,crc) =
{ name = ui.Compilenv.ui_name;
crc = crc;
imports_cmi = ui.Compilenv.ui_imports_cmi;
imports_cmx = ui.Compilenv.ui_imports_cmx;
defines = ui.Compilenv.ui_defines
} in
global_data "caml_plugin_header"
{ magic = dyn_magic_number; units = List.map mk units }
|
a53b3e3e497bdfc9e8254723e57f41b38f923fbb4b966ddc71aea44a428b3456 | GNOME/gimp-tiny-fu | ripply-anim.scm | " Rippling Image " animation generator ( ripply-anim.scm )
( )
97/05/18
;
; Designed to be used in conjunction with a plugin capable
of saving animations ( i.e. the plugin ) .
;
(define (script-fu-ripply-anim img drawable displacement num-frames edge-type)
(define (copy-layer-ripple dest-image dest-drawable source-image source-drawable)
(gimp-selection-all dest-image)
(gimp-edit-clear dest-drawable)
(gimp-selection-none dest-image)
(gimp-selection-all source-image)
(gimp-edit-copy source-drawable)
(gimp-selection-none source-image)
(let ((floating-sel (car (gimp-edit-paste dest-drawable FALSE))))
(gimp-floating-sel-anchor (car (gimp-edit-paste dest-drawable FALSE)))
)
)
(let* (
(width (car (gimp-drawable-width drawable)))
(height (car (gimp-drawable-height drawable)))
(ripple-image (car (gimp-image-new width height GRAY)))
(ripple-layer (car (gimp-layer-new ripple-image width height GRAY-IMAGE "Ripple Texture" 100 NORMAL-MODE)))
(rippletiled-ret 0)
(rippletiled-image 0)
(rippletiled-layer 0)
(remaining-frames 0)
(xpos 0)
(ypos 0)
(xoffset 0)
(yoffset 0)
(dup-image 0)
(layer-name 0)
(this-image 0)
(this-layer 0)
(dup-layer 0)
)
(gimp-context-push)
; this script generates its own displacement map
(gimp-image-undo-disable ripple-image)
(gimp-context-set-background '(127 127 127))
(gimp-image-insert-layer ripple-image ripple-layer 0 0)
(gimp-edit-fill ripple-layer BACKGROUND-FILL)
(plug-in-noisify RUN-NONINTERACTIVE ripple-image ripple-layer FALSE 1.0 1.0 1.0 0.0)
; tile noise
(set! rippletiled-ret (plug-in-tile RUN-NONINTERACTIVE ripple-image ripple-layer (* width 3) (* height 3) TRUE))
(gimp-image-undo-enable ripple-image)
(gimp-image-delete ripple-image)
(set! rippletiled-image (car rippletiled-ret))
(set! rippletiled-layer (cadr rippletiled-ret))
(gimp-image-undo-disable rippletiled-image)
; process tiled noise into usable displacement map
(plug-in-gauss-iir RUN-NONINTERACTIVE rippletiled-image rippletiled-layer 35 TRUE TRUE)
(gimp-equalize rippletiled-layer TRUE)
(plug-in-gauss-rle RUN-NONINTERACTIVE rippletiled-image rippletiled-layer 5 TRUE TRUE)
(gimp-equalize rippletiled-layer TRUE)
; displacement map is now in rippletiled-layer of rippletiled-image
; loop through the desired frames
(set! remaining-frames num-frames)
(set! xpos (/ width 2))
(set! ypos (/ height 2))
(set! xoffset (/ width num-frames))
(set! yoffset (/ height num-frames))
(let* ((out-imagestack (car (gimp-image-new width height RGB))))
(gimp-image-undo-disable out-imagestack)
(while (> remaining-frames 0)
(set! dup-image (car (gimp-image-duplicate rippletiled-image)))
(gimp-image-undo-disable dup-image)
(gimp-image-crop dup-image width height xpos ypos)
(set! layer-name (string-append "Frame "
(number->string (- num-frames remaining-frames) 10)
" (replace)"))
(set! this-layer (car (gimp-layer-new out-imagestack
width height RGB
layer-name 100 NORMAL-MODE)))
(gimp-image-insert-layer out-imagestack this-layer 0 0)
(copy-layer-ripple out-imagestack this-layer img drawable)
(set! dup-layer (car (gimp-image-get-active-layer dup-image)))
(plug-in-displace RUN-NONINTERACTIVE out-imagestack this-layer
displacement displacement
TRUE TRUE dup-layer dup-layer edge-type)
(gimp-image-undo-enable dup-image)
(gimp-image-delete dup-image)
(set! remaining-frames (- remaining-frames 1))
(set! xpos (+ xoffset xpos))
(set! ypos (+ yoffset ypos))
)
(gimp-image-undo-enable rippletiled-image)
(gimp-image-delete rippletiled-image)
(gimp-image-undo-enable out-imagestack)
(gimp-display-new out-imagestack))
(gimp-context-pop)
)
)
(script-fu-register "script-fu-ripply-anim"
_"_Rippling..."
_"Create a multi-layer image by adding a ripple effect to the current image"
"Adam D. Moss ()"
"Adam D. Moss"
"1997"
"RGB* GRAY*"
SF-IMAGE "Image to animage" 0
SF-DRAWABLE "Drawable to animate" 0
SF-ADJUSTMENT _"Rippling strength" '(3 0 256 1 10 1 0)
SF-ADJUSTMENT _"Number of frames" '(15 0 256 1 10 0 1)
SF-OPTION _"Edge behavior" '(_"Wrap" _"Smear" _"Black")
)
(script-fu-menu-register "script-fu-ripply-anim"
"<Image>/Filters/Animation/Animators")
| null | https://raw.githubusercontent.com/GNOME/gimp-tiny-fu/a64d85eec23b997e535488d67f55b44395ba3f2e/scripts/ripply-anim.scm | scheme |
Designed to be used in conjunction with a plugin capable
this script generates its own displacement map
tile noise
process tiled noise into usable displacement map
displacement map is now in rippletiled-layer of rippletiled-image
loop through the desired frames | " Rippling Image " animation generator ( ripply-anim.scm )
( )
97/05/18
of saving animations ( i.e. the plugin ) .
(define (script-fu-ripply-anim img drawable displacement num-frames edge-type)
(define (copy-layer-ripple dest-image dest-drawable source-image source-drawable)
(gimp-selection-all dest-image)
(gimp-edit-clear dest-drawable)
(gimp-selection-none dest-image)
(gimp-selection-all source-image)
(gimp-edit-copy source-drawable)
(gimp-selection-none source-image)
(let ((floating-sel (car (gimp-edit-paste dest-drawable FALSE))))
(gimp-floating-sel-anchor (car (gimp-edit-paste dest-drawable FALSE)))
)
)
(let* (
(width (car (gimp-drawable-width drawable)))
(height (car (gimp-drawable-height drawable)))
(ripple-image (car (gimp-image-new width height GRAY)))
(ripple-layer (car (gimp-layer-new ripple-image width height GRAY-IMAGE "Ripple Texture" 100 NORMAL-MODE)))
(rippletiled-ret 0)
(rippletiled-image 0)
(rippletiled-layer 0)
(remaining-frames 0)
(xpos 0)
(ypos 0)
(xoffset 0)
(yoffset 0)
(dup-image 0)
(layer-name 0)
(this-image 0)
(this-layer 0)
(dup-layer 0)
)
(gimp-context-push)
(gimp-image-undo-disable ripple-image)
(gimp-context-set-background '(127 127 127))
(gimp-image-insert-layer ripple-image ripple-layer 0 0)
(gimp-edit-fill ripple-layer BACKGROUND-FILL)
(plug-in-noisify RUN-NONINTERACTIVE ripple-image ripple-layer FALSE 1.0 1.0 1.0 0.0)
(set! rippletiled-ret (plug-in-tile RUN-NONINTERACTIVE ripple-image ripple-layer (* width 3) (* height 3) TRUE))
(gimp-image-undo-enable ripple-image)
(gimp-image-delete ripple-image)
(set! rippletiled-image (car rippletiled-ret))
(set! rippletiled-layer (cadr rippletiled-ret))
(gimp-image-undo-disable rippletiled-image)
(plug-in-gauss-iir RUN-NONINTERACTIVE rippletiled-image rippletiled-layer 35 TRUE TRUE)
(gimp-equalize rippletiled-layer TRUE)
(plug-in-gauss-rle RUN-NONINTERACTIVE rippletiled-image rippletiled-layer 5 TRUE TRUE)
(gimp-equalize rippletiled-layer TRUE)
(set! remaining-frames num-frames)
(set! xpos (/ width 2))
(set! ypos (/ height 2))
(set! xoffset (/ width num-frames))
(set! yoffset (/ height num-frames))
(let* ((out-imagestack (car (gimp-image-new width height RGB))))
(gimp-image-undo-disable out-imagestack)
(while (> remaining-frames 0)
(set! dup-image (car (gimp-image-duplicate rippletiled-image)))
(gimp-image-undo-disable dup-image)
(gimp-image-crop dup-image width height xpos ypos)
(set! layer-name (string-append "Frame "
(number->string (- num-frames remaining-frames) 10)
" (replace)"))
(set! this-layer (car (gimp-layer-new out-imagestack
width height RGB
layer-name 100 NORMAL-MODE)))
(gimp-image-insert-layer out-imagestack this-layer 0 0)
(copy-layer-ripple out-imagestack this-layer img drawable)
(set! dup-layer (car (gimp-image-get-active-layer dup-image)))
(plug-in-displace RUN-NONINTERACTIVE out-imagestack this-layer
displacement displacement
TRUE TRUE dup-layer dup-layer edge-type)
(gimp-image-undo-enable dup-image)
(gimp-image-delete dup-image)
(set! remaining-frames (- remaining-frames 1))
(set! xpos (+ xoffset xpos))
(set! ypos (+ yoffset ypos))
)
(gimp-image-undo-enable rippletiled-image)
(gimp-image-delete rippletiled-image)
(gimp-image-undo-enable out-imagestack)
(gimp-display-new out-imagestack))
(gimp-context-pop)
)
)
(script-fu-register "script-fu-ripply-anim"
_"_Rippling..."
_"Create a multi-layer image by adding a ripple effect to the current image"
"Adam D. Moss ()"
"Adam D. Moss"
"1997"
"RGB* GRAY*"
SF-IMAGE "Image to animage" 0
SF-DRAWABLE "Drawable to animate" 0
SF-ADJUSTMENT _"Rippling strength" '(3 0 256 1 10 1 0)
SF-ADJUSTMENT _"Number of frames" '(15 0 256 1 10 0 1)
SF-OPTION _"Edge behavior" '(_"Wrap" _"Smear" _"Black")
)
(script-fu-menu-register "script-fu-ripply-anim"
"<Image>/Filters/Animation/Animators")
|
672eb1dc653733c1b76fb1f075b6dec9c68108998b81b5ceaea8edeeb5fa8063 | jakemcc/sicp-study | exercise1.7.clj | ( good - enough ? 0.01 0.001 )
;; -> true
;; .01 shouldn't be a good guess.
;;
;; Large failures:
;; Potentially because squaring the guess, even when it is very close to the proper guess, will still cause it to be further than .001 away from goal number x. This would cause infinite loop
user= > ( good - enough ? 31622.7766 1000000000 )
;; false
;;
;; Different implementation:
(defn change [first second]
(Math/abs (- first second)))
(defn good-enough?? [first second]
(< (change first second) (* 0.001 second)))
(defn average [x y]
(/ (+ x y) 2))
(defn improve [guess x]
(average guess (/ x guess)))
(defn sqrt-iter [prev-guess curr-guess x]
(if (good-enough?? prev-guess curr-guess)
curr-guess
(recur curr-guess (improve curr-guess x) x)))
(defn sqrt [x]
(sqrt-iter 0 1.0 x))
(sqrt 1000000000)
(sqrt 0.001)
;; Using it:
user= > ( sqrt 1000000000 )
31622.780588899368
;;
user= > ( sqrt 0.001 )
0.03162278245070105
user= > ( * 0.03162278245070105 0.03162278245070105 )
0.0010000003699243661
| null | https://raw.githubusercontent.com/jakemcc/sicp-study/3b9e3d6c8cc30ad92b0d9bbcbbbfe36a8413f89d/clojure/section1.1/exercise1.7.clj | clojure | -> true
.01 shouldn't be a good guess.
Large failures:
Potentially because squaring the guess, even when it is very close to the proper guess, will still cause it to be further than .001 away from goal number x. This would cause infinite loop
false
Different implementation:
Using it:
| ( good - enough ? 0.01 0.001 )
user= > ( good - enough ? 31622.7766 1000000000 )
(defn change [first second]
(Math/abs (- first second)))
(defn good-enough?? [first second]
(< (change first second) (* 0.001 second)))
(defn average [x y]
(/ (+ x y) 2))
(defn improve [guess x]
(average guess (/ x guess)))
(defn sqrt-iter [prev-guess curr-guess x]
(if (good-enough?? prev-guess curr-guess)
curr-guess
(recur curr-guess (improve curr-guess x) x)))
(defn sqrt [x]
(sqrt-iter 0 1.0 x))
(sqrt 1000000000)
(sqrt 0.001)
user= > ( sqrt 1000000000 )
31622.780588899368
user= > ( sqrt 0.001 )
0.03162278245070105
user= > ( * 0.03162278245070105 0.03162278245070105 )
0.0010000003699243661
|
76aae7cb8fa8a862ed42b6f3ca6dbb4e6ff780a2f52a70e5b07e6578b4834ca7 | bhauman/devcards | edn_renderer.cljs | (ns devcards.util.edn-renderer
(:require
[sablono.core :as sab]
[devcards.util.utils :as utils]))
(defonce ^:dynamic *key-counter* nil)
(defn get-key []
(swap! *key-counter* inc)
(str "k-" @*key-counter*))
(declare html)
(defn literal? [x]
(and
(not (map-entry? x))
(not (seq? x))
(not (coll? x))))
(defn separator* [s]
(sab/html [:span.seperator {:key (get-key)} s]))
(defn clearfix-separator* [s]
(sab/html [:span {:key (get-key)} (separator* s) [:span.clearfix]]))
(defn separate-fn [coll]
(try
(if (not (every? literal? coll)) clearfix-separator* separator*)
(catch js/Error e
clearfix-separator*)))
(defn interpose-separator [rct-coll s sep-fn]
(->> (rest rct-coll)
(interleave (repeatedly #(sep-fn s)))
(cons (first rct-coll))
to-array))
(defn literal [class x]
(sab/html [:span { :className class :key (get-key)} (utils/pprint-str x)]))
(defn html-val [index v]
(sab/html [:span {:key index} (html v)]))
(defn join-html [separator coll]
(interpose-separator (into [] (map-indexed html-val coll))
separator
(separate-fn coll)))
(defn html-keyval [[k v]]
(sab/html
[:span.keyval { :key (prn-str k)} (html k) (html v)]))
(defn html-keyvals [coll]
(interpose-separator (mapv html-keyval coll)
" "
(separate-fn (vals coll))))
(defn open-close [class-str opener closer rct-coll]
(sab/html
[:span {:className class-str :key (str (hash rct-coll))}
[:span.opener {:key 1} opener]
[:span.contents {:key 2} rct-coll]
[:span.closer {:key 3} closer]]))
(defn html-collection [class opener closer coll]
(open-close (str "collection " class ) opener closer (join-html " " coll))
this speeds things up but fails in om
#_(rct/pure coll ...)
)
(defn html-map [coll]
(open-close "collection map" "{" "}" (html-keyvals coll))
this speeds things up but fails in om
#_(rct/pure coll ...))
(defn html-string [s]
(open-close "string" "\"" "\"" s))
(defn html [x]
(cond
(number? x) (literal "number" x)
(keyword? x) (literal "keyword" x)
(symbol? x) (literal "symbol" x)
(string? x) (html-string x)
(map? x) (html-map x)
(set? x) (html-collection "set" "#{" "}" x)
(vector? x) (html-collection "vector" "[" "]" x)
(seq? x) (html-collection "seq" "(" ")" x)
:else (literal "literal" x)))
(defn html-edn [e]
(binding [*key-counter* (atom 0)]
(sab/html [:div.com-rigsomelight-rendered-edn.com-rigsomelight-devcards-typog
{:key "devcards-edn-block"} (html e)])))
| null | https://raw.githubusercontent.com/bhauman/devcards/65caa84c9438dd8bdf8cd21964e6033b1a22c178/src/devcards/util/edn_renderer.cljs | clojure | (ns devcards.util.edn-renderer
(:require
[sablono.core :as sab]
[devcards.util.utils :as utils]))
(defonce ^:dynamic *key-counter* nil)
(defn get-key []
(swap! *key-counter* inc)
(str "k-" @*key-counter*))
(declare html)
(defn literal? [x]
(and
(not (map-entry? x))
(not (seq? x))
(not (coll? x))))
(defn separator* [s]
(sab/html [:span.seperator {:key (get-key)} s]))
(defn clearfix-separator* [s]
(sab/html [:span {:key (get-key)} (separator* s) [:span.clearfix]]))
(defn separate-fn [coll]
(try
(if (not (every? literal? coll)) clearfix-separator* separator*)
(catch js/Error e
clearfix-separator*)))
(defn interpose-separator [rct-coll s sep-fn]
(->> (rest rct-coll)
(interleave (repeatedly #(sep-fn s)))
(cons (first rct-coll))
to-array))
(defn literal [class x]
(sab/html [:span { :className class :key (get-key)} (utils/pprint-str x)]))
(defn html-val [index v]
(sab/html [:span {:key index} (html v)]))
(defn join-html [separator coll]
(interpose-separator (into [] (map-indexed html-val coll))
separator
(separate-fn coll)))
(defn html-keyval [[k v]]
(sab/html
[:span.keyval { :key (prn-str k)} (html k) (html v)]))
(defn html-keyvals [coll]
(interpose-separator (mapv html-keyval coll)
" "
(separate-fn (vals coll))))
(defn open-close [class-str opener closer rct-coll]
(sab/html
[:span {:className class-str :key (str (hash rct-coll))}
[:span.opener {:key 1} opener]
[:span.contents {:key 2} rct-coll]
[:span.closer {:key 3} closer]]))
(defn html-collection [class opener closer coll]
(open-close (str "collection " class ) opener closer (join-html " " coll))
this speeds things up but fails in om
#_(rct/pure coll ...)
)
(defn html-map [coll]
(open-close "collection map" "{" "}" (html-keyvals coll))
this speeds things up but fails in om
#_(rct/pure coll ...))
(defn html-string [s]
(open-close "string" "\"" "\"" s))
(defn html [x]
(cond
(number? x) (literal "number" x)
(keyword? x) (literal "keyword" x)
(symbol? x) (literal "symbol" x)
(string? x) (html-string x)
(map? x) (html-map x)
(set? x) (html-collection "set" "#{" "}" x)
(vector? x) (html-collection "vector" "[" "]" x)
(seq? x) (html-collection "seq" "(" ")" x)
:else (literal "literal" x)))
(defn html-edn [e]
(binding [*key-counter* (atom 0)]
(sab/html [:div.com-rigsomelight-rendered-edn.com-rigsomelight-devcards-typog
{:key "devcards-edn-block"} (html e)])))
| |
a90b6294263407bfb7f58ee1670d4bc5feb30e7deec47c9a085cee481795e3ce | GillianPlatform/Gillian | Pred.ml | module SSubst = Gillian.Symbolic.Subst
module L = Logging
module Type = Gillian.Gil_syntax.Type
module Expr = Gillian.Gil_syntax.Expr
module Formula = Gillian.Gil_syntax.Formula
* { b JSIL logic predicate } .
type t = {
name : string; (** Name of the predicate *)
num_params : int; (** Number of parameters *)
params : (string * Type.t option) list; (** Actual parameters *)
ins : int list; (** Ins *)
definitions : ((string * string list) option * Asrt.t) list;
(** Predicate definitions *)
facts : Formula.t list; (** Facts about the predicate *)
pure : bool; (** Is the predicate pure *)
abstract : bool; (** Is the predicate abstract *)
nounfold : bool; (** Should the predicate be unfolded? *)
normalised : bool; (** If the predicate has been previously normalised *)
}
* Creates / populates a from the predicate list pred_defs
let init (preds : t list) : (string, t) Hashtbl.t =
let pred_def_tbl = Hashtbl.create Config.small_tbl_size in
List.iter
(fun pred_def -> Hashtbl.add pred_def_tbl pred_def.name pred_def)
preds;
pred_def_tbl
let pp fmt pred =
let { name; params; ins; definitions; _ } = pred in
let exist_ins = List.length pred.ins <> List.length pred.params in
let params_with_info =
if exist_ins then
List.mapi
(fun i (v, t) -> ((if List.mem i ins then "+" else "") ^ v, t))
params
else params
in
let pp_param fmt (v, t) =
match t with
| None -> Fmt.pf fmt "%s" v
| Some typ -> Fmt.pf fmt "%s : %s" v (Type.str typ)
in
let pp_id_ex fmt id_ex =
match id_ex with
| None -> ()
| Some (id, exs) ->
if List.length exs > 0 then
Fmt.pf fmt "[%s: %a]" id Fmt.(list ~sep:(any ", ") string) exs
else Fmt.pf fmt "[%s]" id
in
let pp_abstract fmt = function
| true -> Fmt.pf fmt "abstract "
| false -> ()
in
let pp_pure fmt = function
| true -> Fmt.pf fmt "pure "
| false -> ()
in
let pp_nounfold fmt = function
| true -> Fmt.pf fmt "nounfold "
| false -> ()
in
let pp_def fmt (id_ex, asrt) =
Fmt.pf fmt "%a%a" pp_id_ex id_ex Asrt.pp asrt
in
let pp_facts fmt = function
| [] -> ()
| facts ->
Fmt.pf fmt "@\nfacts: %a;"
Fmt.(list ~sep:(any " and ") Formula.pp)
facts
in
Fmt.pf fmt "@[<v 2>%a%a%apred %s(%a):@\n%a;%a@]" pp_abstract pred.abstract
pp_pure pred.pure pp_nounfold pred.nounfold name
Fmt.(list ~sep:(any ", ") pp_param)
params_with_info
Fmt.(list ~sep:(any ",@\n") pp_def)
definitions pp_facts pred.facts
let empty_pred_tbl () = Hashtbl.create Config.small_tbl_size
| null | https://raw.githubusercontent.com/GillianPlatform/Gillian/1c8d65120c04ef87cda689a9d41268e25b5ffa7e/Gillian-JS/lib/JSIL/Pred.ml | ocaml | * Name of the predicate
* Number of parameters
* Actual parameters
* Ins
* Predicate definitions
* Facts about the predicate
* Is the predicate pure
* Is the predicate abstract
* Should the predicate be unfolded?
* If the predicate has been previously normalised | module SSubst = Gillian.Symbolic.Subst
module L = Logging
module Type = Gillian.Gil_syntax.Type
module Expr = Gillian.Gil_syntax.Expr
module Formula = Gillian.Gil_syntax.Formula
* { b JSIL logic predicate } .
type t = {
definitions : ((string * string list) option * Asrt.t) list;
}
* Creates / populates a from the predicate list pred_defs
let init (preds : t list) : (string, t) Hashtbl.t =
let pred_def_tbl = Hashtbl.create Config.small_tbl_size in
List.iter
(fun pred_def -> Hashtbl.add pred_def_tbl pred_def.name pred_def)
preds;
pred_def_tbl
let pp fmt pred =
let { name; params; ins; definitions; _ } = pred in
let exist_ins = List.length pred.ins <> List.length pred.params in
let params_with_info =
if exist_ins then
List.mapi
(fun i (v, t) -> ((if List.mem i ins then "+" else "") ^ v, t))
params
else params
in
let pp_param fmt (v, t) =
match t with
| None -> Fmt.pf fmt "%s" v
| Some typ -> Fmt.pf fmt "%s : %s" v (Type.str typ)
in
let pp_id_ex fmt id_ex =
match id_ex with
| None -> ()
| Some (id, exs) ->
if List.length exs > 0 then
Fmt.pf fmt "[%s: %a]" id Fmt.(list ~sep:(any ", ") string) exs
else Fmt.pf fmt "[%s]" id
in
let pp_abstract fmt = function
| true -> Fmt.pf fmt "abstract "
| false -> ()
in
let pp_pure fmt = function
| true -> Fmt.pf fmt "pure "
| false -> ()
in
let pp_nounfold fmt = function
| true -> Fmt.pf fmt "nounfold "
| false -> ()
in
let pp_def fmt (id_ex, asrt) =
Fmt.pf fmt "%a%a" pp_id_ex id_ex Asrt.pp asrt
in
let pp_facts fmt = function
| [] -> ()
| facts ->
Fmt.pf fmt "@\nfacts: %a;"
Fmt.(list ~sep:(any " and ") Formula.pp)
facts
in
Fmt.pf fmt "@[<v 2>%a%a%apred %s(%a):@\n%a;%a@]" pp_abstract pred.abstract
pp_pure pred.pure pp_nounfold pred.nounfold name
Fmt.(list ~sep:(any ", ") pp_param)
params_with_info
Fmt.(list ~sep:(any ",@\n") pp_def)
definitions pp_facts pred.facts
let empty_pred_tbl () = Hashtbl.create Config.small_tbl_size
|
1ebbf9bb9de4f9f0bb959408660652691285c1aef109649bf243721f7cf77ead | yetanalytics/datasim | zip.clj | (ns com.yetanalytics.datasim.json.zip
(:require [clojure.zip :as z]
[clojure.spec.alpha :as s]
[clojure.spec.gen.alpha :as sgen]
[com.yetanalytics.datasim.json :as json]))
(s/def :com.yetanalytics.datasim.json.zip.loc.ppath/l
(s/nilable
(s/every ::json/any)))
(s/def :com.yetanalytics.datasim.json.zip.loc.ppath/r
(s/nilable
(s/every ::json/any)))
(s/def :com.yetanalytics.datasim.json.zip.loc.ppath/pnodes
(s/nilable
(s/every ::json/any)))
(s/def :com.yetanalytics.datasim.json.zip.loc/ppath
(s/nilable
(s/keys :req-un
[:com.yetanalytics.datasim.json.zip.loc.ppath/l
:com.yetanalytics.datasim.json.zip.loc.ppath/r
:com.yetanalytics.datasim.json.zip.loc.ppath/pnodes
:com.yetanalytics.datasim.json.zip.loc/ppath])))
(declare json-zip)
(s/def ::loc
(s/with-gen (s/tuple ::json/any
:com.yetanalytics.datasim.json.zip.loc/ppath)
(fn []
(sgen/bind
(s/gen ::json/any)
(fn [any-json]
(sgen/elements
(take-while (complement z/end?)
(iterate z/next
(json-zip any-json)))))))))
(s/fdef json-zip
:args (s/cat :root ::json/any)
:ret ::loc
:fn (fn [{{root :root} :args
[node _] :ret}]
(= root node)))
(defn json-zip
"Produce a zipper for the JSON"
[root]
(z/zipper
coll?
seq
(fn make-node
[node kids]
(if-let [empty-coll (empty node)]
(into empty-coll
kids)
;; if clojure.core/empty doesn't work, check for map entry
(if (map-entry? node)
(if (= 2 (count kids))
(let [[k v] kids]
(clojure.lang.MapEntry. k v))
(throw (ex-info "Can only have two children in a MapEntry"
{:type ::map-entry-constraint
:node node
:children kids})))
(throw (ex-info (format "Don't know how to make %s node" (type node))
{:type ::unknown-collection
:node node
:node-type (type node)
:children kids})))))
root))
(s/fdef internal?
:args (s/cat :loc ::loc)
:ret boolean?)
(defn internal?
"Is a location internal, ie a map entry or key"
[loc]
(let [node (z/node loc)]
(or (map-entry? node)
;; key position
(and (string? node)
(zero? (count (z/lefts loc)))
(some-> loc z/up z/node map-entry?))
false)))
(s/fdef el-key
:args (s/cat :loc ::loc)
:ret (s/nilable
::json/key))
(defn el-key
[loc]
(when-not (internal? loc)
(when-let [p (peek (z/path loc))]
(cond
(map-entry? p)
(key p)
(vector? p)
(count (z/lefts loc))))))
(s/fdef k-path
:args (s/cat :loc ::loc)
:ret (s/nilable
::json/key-path))
(defn k-path
[loc]
(into []
(reverse
(keep el-key
(take-while some?
(iterate z/up loc))))))
(s/fdef prune
:args (s/cat :loc ::loc)
:ret ::loc)
(defn prune
"Remove the current node, if it is a value in a map entry also remove the parent.
Shouldn't get called on root"
[loc]
(let [ploc (z/up loc)
pnode (z/node ploc)]
(z/remove
(if (map-entry? pnode)
ploc
loc))))
;; given a root and a key-path, can we return a loc at that path?
;; this would make up some for the inefficiency of having to walk everything
;; when there is a known path?
(s/fdef get-child
:args (s/cat :loc ::loc
:k ::json/key)
:ret (s/nilable ::loc))
(defn get-child
"Returns the child of loc at k or nil if key not present.
Will skip map-entries entirely, like clojure.core/get"
[loc k]
(when (and loc
(z/branch? loc)
(not (internal? loc)))
(let [node (z/node loc)]
(when-let [[fk fv :as found] (find node k)]
(let [child-locs (iterate z/right
(z/down loc))]
(if (map? node)
;; if the node is a map, we want to skip the map entries
(-> (some
(fn [cl]
(when (= found (z/node cl))
cl))
child-locs)
z/down
z/right)
(nth child-locs fk)))))))
(s/fdef get-child-in
:args (s/cat :loc (s/nilable ::loc)
:key-path ::json/key-path)
:ret (s/nilable ::loc))
(defn get-child-in
"Like clojure.core/get-in, but for zipper structures."
[loc key-path]
(reduce get-child loc key-path))
(s/fdef loc-in
:args (s/cat :root ::json/any
:key-path ::json/key-path)
:ret (s/nilable ::loc))
(defn loc-in
"Convenience, like get-child-in, but it takes root and returns a loc or nil."
[root key-path]
(-> root json-zip (get-child-in key-path)))
(s/fdef stub-in
:args (s/cat :loc ::loc
:key-path ::json/key-path)
:ret ::loc)
(defn stub-in
"Given a loc an key path, stub out the path if it does not exist, returning
a loc for the destination. If the loc does not exist, it will have the value
::stub. If incorrect keys are given for the data, will throw.
If stub-in encounters an intermediate node of ::stub, it will replce it with
the proper datastructure for the key path."
[loc key-path]
(let [node (z/node loc)]
(if (map-entry? node)
(recur (-> loc z/down z/right) key-path)
(if-let [k (first key-path)]
(if (or (coll? node) (= ::stub node))
(do (assert (cond
(map? node) (string? k)
(coll? node) (number? k)
:else true) "Incorrect key type for node")
(recur
(if (= ::stub node)
(cond
(string? k)
(-> loc
(z/replace
(z/make-node loc
{}
[(clojure.lang.MapEntry. k
::stub)]))
z/down)
(number? k)
(-> loc
(z/replace
(z/make-node loc
[]
(repeat (inc k) ::stub)))
z/down
(->> (iterate z/right))
(nth k)))
(let [child-locs (take-while
(complement nil?)
(iterate z/right
(z/down loc)))]
(if-let [[fk fv :as found] (find node k)]
(if (map? node)
(some
(fn [cl]
(when (= found (z/node cl))
cl))
child-locs)
(nth child-locs fk))
(if (map? node)
(-> loc
(z/append-child
(clojure.lang.MapEntry. k
::stub))
z/down
z/rightmost)
(let [[lc rc] (split-at k child-locs)]
(-> loc
(z/replace
(z/make-node loc
node
(concat
(map z/node lc)
(repeat (- (inc k)
(count lc))
::stub)
(map z/node rc))))
z/down
(->> (iterate z/right))
(nth k)))))))
(rest key-path)))
(throw (ex-info "Can't path into a leaf node"
{:type ::cant-path-leaf-node
:loc loc
:key-path key-path})))
loc))))
(s/def ::path-map
(s/map-of
::json/key-path
::json/any))
(s/fdef json-locs
:args (s/cat :json ::json/any)
:ret (s/every ::loc)
:fn (fn [{locs :ret}]
(every? (complement internal?) locs)))
(defn json-locs
[json]
(->> json
json-zip
(iterate z/next)
(take-while (complement z/end?))
;; don't look at map entries/keys
(remove internal?)))
(s/fdef json->path-map
:args (s/cat :json ::json/any)
:ret ::path-map)
(defn json->path-map
"given some json, return a map of full paths to values"
[json]
(into {}
(map (fn [loc]
[(k-path loc) (z/node loc)])
(json-locs json))))
(s/fdef path-map->json
:args (s/cat :path-map ::path-map)
:ret ::json/any)
(defn path-map->json
[path-map]
(get path-map []))
| null | https://raw.githubusercontent.com/yetanalytics/datasim/0047cb3123e32b72380ea31ec98a26ef85543b63/src/main/com/yetanalytics/datasim/json/zip.clj | clojure | if clojure.core/empty doesn't work, check for map entry
key position
given a root and a key-path, can we return a loc at that path?
this would make up some for the inefficiency of having to walk everything
when there is a known path?
if the node is a map, we want to skip the map entries
don't look at map entries/keys | (ns com.yetanalytics.datasim.json.zip
(:require [clojure.zip :as z]
[clojure.spec.alpha :as s]
[clojure.spec.gen.alpha :as sgen]
[com.yetanalytics.datasim.json :as json]))
(s/def :com.yetanalytics.datasim.json.zip.loc.ppath/l
(s/nilable
(s/every ::json/any)))
(s/def :com.yetanalytics.datasim.json.zip.loc.ppath/r
(s/nilable
(s/every ::json/any)))
(s/def :com.yetanalytics.datasim.json.zip.loc.ppath/pnodes
(s/nilable
(s/every ::json/any)))
(s/def :com.yetanalytics.datasim.json.zip.loc/ppath
(s/nilable
(s/keys :req-un
[:com.yetanalytics.datasim.json.zip.loc.ppath/l
:com.yetanalytics.datasim.json.zip.loc.ppath/r
:com.yetanalytics.datasim.json.zip.loc.ppath/pnodes
:com.yetanalytics.datasim.json.zip.loc/ppath])))
(declare json-zip)
(s/def ::loc
(s/with-gen (s/tuple ::json/any
:com.yetanalytics.datasim.json.zip.loc/ppath)
(fn []
(sgen/bind
(s/gen ::json/any)
(fn [any-json]
(sgen/elements
(take-while (complement z/end?)
(iterate z/next
(json-zip any-json)))))))))
(s/fdef json-zip
:args (s/cat :root ::json/any)
:ret ::loc
:fn (fn [{{root :root} :args
[node _] :ret}]
(= root node)))
(defn json-zip
"Produce a zipper for the JSON"
[root]
(z/zipper
coll?
seq
(fn make-node
[node kids]
(if-let [empty-coll (empty node)]
(into empty-coll
kids)
(if (map-entry? node)
(if (= 2 (count kids))
(let [[k v] kids]
(clojure.lang.MapEntry. k v))
(throw (ex-info "Can only have two children in a MapEntry"
{:type ::map-entry-constraint
:node node
:children kids})))
(throw (ex-info (format "Don't know how to make %s node" (type node))
{:type ::unknown-collection
:node node
:node-type (type node)
:children kids})))))
root))
(s/fdef internal?
:args (s/cat :loc ::loc)
:ret boolean?)
(defn internal?
"Is a location internal, ie a map entry or key"
[loc]
(let [node (z/node loc)]
(or (map-entry? node)
(and (string? node)
(zero? (count (z/lefts loc)))
(some-> loc z/up z/node map-entry?))
false)))
(s/fdef el-key
:args (s/cat :loc ::loc)
:ret (s/nilable
::json/key))
(defn el-key
[loc]
(when-not (internal? loc)
(when-let [p (peek (z/path loc))]
(cond
(map-entry? p)
(key p)
(vector? p)
(count (z/lefts loc))))))
(s/fdef k-path
:args (s/cat :loc ::loc)
:ret (s/nilable
::json/key-path))
(defn k-path
[loc]
(into []
(reverse
(keep el-key
(take-while some?
(iterate z/up loc))))))
(s/fdef prune
:args (s/cat :loc ::loc)
:ret ::loc)
(defn prune
"Remove the current node, if it is a value in a map entry also remove the parent.
Shouldn't get called on root"
[loc]
(let [ploc (z/up loc)
pnode (z/node ploc)]
(z/remove
(if (map-entry? pnode)
ploc
loc))))
(s/fdef get-child
:args (s/cat :loc ::loc
:k ::json/key)
:ret (s/nilable ::loc))
(defn get-child
"Returns the child of loc at k or nil if key not present.
Will skip map-entries entirely, like clojure.core/get"
[loc k]
(when (and loc
(z/branch? loc)
(not (internal? loc)))
(let [node (z/node loc)]
(when-let [[fk fv :as found] (find node k)]
(let [child-locs (iterate z/right
(z/down loc))]
(if (map? node)
(-> (some
(fn [cl]
(when (= found (z/node cl))
cl))
child-locs)
z/down
z/right)
(nth child-locs fk)))))))
(s/fdef get-child-in
:args (s/cat :loc (s/nilable ::loc)
:key-path ::json/key-path)
:ret (s/nilable ::loc))
(defn get-child-in
"Like clojure.core/get-in, but for zipper structures."
[loc key-path]
(reduce get-child loc key-path))
(s/fdef loc-in
:args (s/cat :root ::json/any
:key-path ::json/key-path)
:ret (s/nilable ::loc))
(defn loc-in
"Convenience, like get-child-in, but it takes root and returns a loc or nil."
[root key-path]
(-> root json-zip (get-child-in key-path)))
(s/fdef stub-in
:args (s/cat :loc ::loc
:key-path ::json/key-path)
:ret ::loc)
(defn stub-in
"Given a loc an key path, stub out the path if it does not exist, returning
a loc for the destination. If the loc does not exist, it will have the value
::stub. If incorrect keys are given for the data, will throw.
If stub-in encounters an intermediate node of ::stub, it will replce it with
the proper datastructure for the key path."
[loc key-path]
(let [node (z/node loc)]
(if (map-entry? node)
(recur (-> loc z/down z/right) key-path)
(if-let [k (first key-path)]
(if (or (coll? node) (= ::stub node))
(do (assert (cond
(map? node) (string? k)
(coll? node) (number? k)
:else true) "Incorrect key type for node")
(recur
(if (= ::stub node)
(cond
(string? k)
(-> loc
(z/replace
(z/make-node loc
{}
[(clojure.lang.MapEntry. k
::stub)]))
z/down)
(number? k)
(-> loc
(z/replace
(z/make-node loc
[]
(repeat (inc k) ::stub)))
z/down
(->> (iterate z/right))
(nth k)))
(let [child-locs (take-while
(complement nil?)
(iterate z/right
(z/down loc)))]
(if-let [[fk fv :as found] (find node k)]
(if (map? node)
(some
(fn [cl]
(when (= found (z/node cl))
cl))
child-locs)
(nth child-locs fk))
(if (map? node)
(-> loc
(z/append-child
(clojure.lang.MapEntry. k
::stub))
z/down
z/rightmost)
(let [[lc rc] (split-at k child-locs)]
(-> loc
(z/replace
(z/make-node loc
node
(concat
(map z/node lc)
(repeat (- (inc k)
(count lc))
::stub)
(map z/node rc))))
z/down
(->> (iterate z/right))
(nth k)))))))
(rest key-path)))
(throw (ex-info "Can't path into a leaf node"
{:type ::cant-path-leaf-node
:loc loc
:key-path key-path})))
loc))))
(s/def ::path-map
(s/map-of
::json/key-path
::json/any))
(s/fdef json-locs
:args (s/cat :json ::json/any)
:ret (s/every ::loc)
:fn (fn [{locs :ret}]
(every? (complement internal?) locs)))
(defn json-locs
[json]
(->> json
json-zip
(iterate z/next)
(take-while (complement z/end?))
(remove internal?)))
(s/fdef json->path-map
:args (s/cat :json ::json/any)
:ret ::path-map)
(defn json->path-map
"given some json, return a map of full paths to values"
[json]
(into {}
(map (fn [loc]
[(k-path loc) (z/node loc)])
(json-locs json))))
(s/fdef path-map->json
:args (s/cat :path-map ::path-map)
:ret ::json/any)
(defn path-map->json
[path-map]
(get path-map []))
|
64fbe5f1cdbbafccbb5bcc6c1e63ca355f57db782e7542db45c05e4c590d8497 | manuel-serrano/hop | method.scm | ;*=====================================================================*/
* serrano / prgm / project / hop / hop / js2scheme / method.scm * /
;* ------------------------------------------------------------- */
* Author : * /
* Creation : We d Apr 26 08:28:06 2017 * /
* Last change : Sat Jan 1 07:21:31 2022 ( serrano ) * /
* Copyright : 2017 - 22 * /
;* ------------------------------------------------------------- */
* transformation * /
;* ------------------------------------------------------------- */
;* This optimization duplicates functions as methods, where THIS */
;* is statically known to be an object. This transformation applies */
;* only when the occurrence number of THIS inside a function is */
;* above THIS-OCCURRENCE-THRESHOLD */
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module __js2scheme_method
(include "ast.sch"
"usage.sch")
(import __js2scheme_ast
__js2scheme_dump
__js2scheme_compile
__js2scheme_stage
__js2scheme_utils
__js2scheme_alpha
__js2scheme_use
__js2scheme_node-size
__js2scheme_scheme-constant)
(export j2s-method-stage))
;*---------------------------------------------------------------------*/
;* j2s-method-stage ... */
;*---------------------------------------------------------------------*/
(define j2s-method-stage
(instantiate::J2SStageProc
(name "method")
(comment "Function->method transformation")
(proc (lambda (n args) (j2s-method! n args)))
(optional :optim-method)))
;*---------------------------------------------------------------------*/
;* this-occurrence-threshold ... */
;*---------------------------------------------------------------------*/
(define this-occurrence-threshold 2)
(define body-size-threshold 150)
;*---------------------------------------------------------------------*/
;* j2s-method! ::J2SProgram ... */
;*---------------------------------------------------------------------*/
(define (j2s-method! this::J2SProgram conf)
(when (isa? this J2SProgram)
(with-access::J2SProgram this (nodes decls)
(let ((log (make-cell '())))
(for-each (lambda (d) (method! d this conf log)) decls)
(for-each (lambda (n) (method! n this conf log)) nodes)
(when (>=fx (config-get conf :verbose 0) 3)
(display " " (current-error-port))
(fprintf (current-error-port) "(~(, ))"
(map (lambda (l)
(with-access::J2SNode (cdr l) (loc)
(format "~a(~a)" (car l) (caddr loc))))
(cell-ref log)))))))
this)
;*---------------------------------------------------------------------*/
;* method! ::J2SNode ... */
;*---------------------------------------------------------------------*/
(define-walk-method (method! this::J2SNode prog conf log)
(call-default-walker))
;*---------------------------------------------------------------------*/
;* method! ::J2SAssig ... */
;*---------------------------------------------------------------------*/
(define-walk-method (method! this::J2SAssig prog conf log)
(with-access::J2SAssig this (lhs rhs)
(when (isa? rhs J2SFun)
(with-access::J2SFun rhs (thisp loc body generator name)
(when (and thisp (not generator))
(with-access::J2SDecl thisp (usecnt)
(when (and (>=fx usecnt this-occurrence-threshold)
(<fx (node-size body) body-size-threshold))
(cell-set! log (cons (cons name this) (cell-ref log)))
(let ((met (function->method rhs conf)))
(set! rhs
(instantiate::J2SMethod
(loc loc)
(function (prof-fun rhs prog conf))
(method met)))))))))
this))
;*---------------------------------------------------------------------*/
;* method! ::J2SDeclFun ... */
;*---------------------------------------------------------------------*/
(define-walk-method (method! this::J2SDeclFun prog conf log)
(with-access::J2SDeclFun this (val id)
(set! val (method! val prog conf log))
(with-access::J2SFun val (thisp loc body generator)
(with-access::J2SDecl thisp (usecnt)
(cond
((and (decl-usage-has? this '(ref get))
(not (decl-usage-has? this '(new)))
(>=fx usecnt this-occurrence-threshold)
(<fx (node-size body) body-size-threshold)
(not generator))
(cell-set! log (cons (cons id this) (cell-ref log)))
(let ((met (function->method val conf)))
(set! val
(instantiate::J2SMethod
(loc loc)
(function (prof-fun val prog conf))
(method met))))))))
this))
;*---------------------------------------------------------------------*/
;* prof-fun ... */
;*---------------------------------------------------------------------*/
(define (prof-fun val::J2SFun prog conf)
(with-access::J2SFun val (body name)
(when (config-get conf :profile-method #f)
(with-access::J2SBlock body (loc endloc)
(set! body
(J2SBlock
(J2SStmtExpr
(J2SPragma
`(js-profile-log-method-function
,(& name prog) ',loc)))
body))))
val))
;*---------------------------------------------------------------------*/
;* function->method ... */
;*---------------------------------------------------------------------*/
(define (function->method this::J2SFun conf)
(define (j2sdecl-duplicate p::J2SDecl)
(duplicate::J2SDecl p
(key (ast-decl-key))))
(define (prof name body::J2SBlock)
(if (config-get conf :profile-method #f)
(with-access::J2SBlock body (loc endloc)
(J2SBlock
(J2SStmtExpr
(J2SPragma
`(js-profile-log-method-method
(& ,(symbol->string name)) ',loc)))
body))
body))
(with-access::J2SFun this (params thisp name body method optimize)
(let* ((nparams (map j2sdecl-duplicate params))
(nthisp (j2sdecl-duplicate thisp))
(nbody (j2s-alpha body (cons thisp params) (cons nthisp nparams))))
(set! optimize #f)
(use-count nbody +1 0)
(with-access::J2SDecl nthisp (vtype)
MS CARE UTYPE
;; (set! utype 'object)
(set! vtype 'object)
(let ((m (duplicate::J2SFun this
(optimize #t)
(name (when (symbol? name) (symbol-append name '&)))
(params nparams)
(thisp nthisp)
(body (prof name nbody)))))
(set! method m)
m)))))
| null | https://raw.githubusercontent.com/manuel-serrano/hop/a62327e2423acdafba486a4b8e1902cc978a1a2c/js2scheme/method.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* This optimization duplicates functions as methods, where THIS */
* is statically known to be an object. This transformation applies */
* only when the occurrence number of THIS inside a function is */
* above THIS-OCCURRENCE-THRESHOLD */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* j2s-method-stage ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* this-occurrence-threshold ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* j2s-method! ::J2SProgram ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* method! ::J2SNode ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* method! ::J2SAssig ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* method! ::J2SDeclFun ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* prof-fun ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* function->method ... */
*---------------------------------------------------------------------*/
(set! utype 'object) | * serrano / prgm / project / hop / hop / js2scheme / method.scm * /
* Author : * /
* Creation : We d Apr 26 08:28:06 2017 * /
* Last change : Sat Jan 1 07:21:31 2022 ( serrano ) * /
* Copyright : 2017 - 22 * /
* transformation * /
(module __js2scheme_method
(include "ast.sch"
"usage.sch")
(import __js2scheme_ast
__js2scheme_dump
__js2scheme_compile
__js2scheme_stage
__js2scheme_utils
__js2scheme_alpha
__js2scheme_use
__js2scheme_node-size
__js2scheme_scheme-constant)
(export j2s-method-stage))
(define j2s-method-stage
(instantiate::J2SStageProc
(name "method")
(comment "Function->method transformation")
(proc (lambda (n args) (j2s-method! n args)))
(optional :optim-method)))
(define this-occurrence-threshold 2)
(define body-size-threshold 150)
(define (j2s-method! this::J2SProgram conf)
(when (isa? this J2SProgram)
(with-access::J2SProgram this (nodes decls)
(let ((log (make-cell '())))
(for-each (lambda (d) (method! d this conf log)) decls)
(for-each (lambda (n) (method! n this conf log)) nodes)
(when (>=fx (config-get conf :verbose 0) 3)
(display " " (current-error-port))
(fprintf (current-error-port) "(~(, ))"
(map (lambda (l)
(with-access::J2SNode (cdr l) (loc)
(format "~a(~a)" (car l) (caddr loc))))
(cell-ref log)))))))
this)
(define-walk-method (method! this::J2SNode prog conf log)
(call-default-walker))
(define-walk-method (method! this::J2SAssig prog conf log)
(with-access::J2SAssig this (lhs rhs)
(when (isa? rhs J2SFun)
(with-access::J2SFun rhs (thisp loc body generator name)
(when (and thisp (not generator))
(with-access::J2SDecl thisp (usecnt)
(when (and (>=fx usecnt this-occurrence-threshold)
(<fx (node-size body) body-size-threshold))
(cell-set! log (cons (cons name this) (cell-ref log)))
(let ((met (function->method rhs conf)))
(set! rhs
(instantiate::J2SMethod
(loc loc)
(function (prof-fun rhs prog conf))
(method met)))))))))
this))
(define-walk-method (method! this::J2SDeclFun prog conf log)
(with-access::J2SDeclFun this (val id)
(set! val (method! val prog conf log))
(with-access::J2SFun val (thisp loc body generator)
(with-access::J2SDecl thisp (usecnt)
(cond
((and (decl-usage-has? this '(ref get))
(not (decl-usage-has? this '(new)))
(>=fx usecnt this-occurrence-threshold)
(<fx (node-size body) body-size-threshold)
(not generator))
(cell-set! log (cons (cons id this) (cell-ref log)))
(let ((met (function->method val conf)))
(set! val
(instantiate::J2SMethod
(loc loc)
(function (prof-fun val prog conf))
(method met))))))))
this))
(define (prof-fun val::J2SFun prog conf)
(with-access::J2SFun val (body name)
(when (config-get conf :profile-method #f)
(with-access::J2SBlock body (loc endloc)
(set! body
(J2SBlock
(J2SStmtExpr
(J2SPragma
`(js-profile-log-method-function
,(& name prog) ',loc)))
body))))
val))
(define (function->method this::J2SFun conf)
(define (j2sdecl-duplicate p::J2SDecl)
(duplicate::J2SDecl p
(key (ast-decl-key))))
(define (prof name body::J2SBlock)
(if (config-get conf :profile-method #f)
(with-access::J2SBlock body (loc endloc)
(J2SBlock
(J2SStmtExpr
(J2SPragma
`(js-profile-log-method-method
(& ,(symbol->string name)) ',loc)))
body))
body))
(with-access::J2SFun this (params thisp name body method optimize)
(let* ((nparams (map j2sdecl-duplicate params))
(nthisp (j2sdecl-duplicate thisp))
(nbody (j2s-alpha body (cons thisp params) (cons nthisp nparams))))
(set! optimize #f)
(use-count nbody +1 0)
(with-access::J2SDecl nthisp (vtype)
MS CARE UTYPE
(set! vtype 'object)
(let ((m (duplicate::J2SFun this
(optimize #t)
(name (when (symbol? name) (symbol-append name '&)))
(params nparams)
(thisp nthisp)
(body (prof name nbody)))))
(set! method m)
m)))))
|
6378dd233b4bbdb0a0858cd6d8ae0364441e8a0696e5c1b03b76479f8f8dad9b | clojure-interop/aws-api | AWSServiceDiscoveryClientBuilder.clj | (ns com.amazonaws.services.servicediscovery.AWSServiceDiscoveryClientBuilder
"Fluent builder for AWSServiceDiscovery. Use of the builder is
preferred over using constructors of the client class."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.servicediscovery AWSServiceDiscoveryClientBuilder]))
(defn *standard
"returns: Create new instance of builder with all defaults set. - `com.amazonaws.services.servicediscovery.AWSServiceDiscoveryClientBuilder`"
(^com.amazonaws.services.servicediscovery.AWSServiceDiscoveryClientBuilder []
(AWSServiceDiscoveryClientBuilder/standard )))
(defn *default-client
"returns: Default client using the DefaultAWSCredentialsProviderChain and
DefaultAwsRegionProviderChain chain - `com.amazonaws.services.servicediscovery.AWSServiceDiscovery`"
(^com.amazonaws.services.servicediscovery.AWSServiceDiscovery []
(AWSServiceDiscoveryClientBuilder/defaultClient )))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.servicediscovery/src/com/amazonaws/services/servicediscovery/AWSServiceDiscoveryClientBuilder.clj | clojure | (ns com.amazonaws.services.servicediscovery.AWSServiceDiscoveryClientBuilder
"Fluent builder for AWSServiceDiscovery. Use of the builder is
preferred over using constructors of the client class."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.servicediscovery AWSServiceDiscoveryClientBuilder]))
(defn *standard
"returns: Create new instance of builder with all defaults set. - `com.amazonaws.services.servicediscovery.AWSServiceDiscoveryClientBuilder`"
(^com.amazonaws.services.servicediscovery.AWSServiceDiscoveryClientBuilder []
(AWSServiceDiscoveryClientBuilder/standard )))
(defn *default-client
"returns: Default client using the DefaultAWSCredentialsProviderChain and
DefaultAwsRegionProviderChain chain - `com.amazonaws.services.servicediscovery.AWSServiceDiscovery`"
(^com.amazonaws.services.servicediscovery.AWSServiceDiscovery []
(AWSServiceDiscoveryClientBuilder/defaultClient )))
| |
722e2729fef1a73520c3d34020fbcce03661511324cb78dd3c1425ae627eb697 | twosigma/satellite | core.clj | Copyright 2015 TWO SIGMA OPEN SOURCE , LLC
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns satellite.core
(:require [cemerick.url :as url]
[cheshire.core :as cheshire]
[clj-http.client :as client]
[clj-logging-config.log4j :as log4j-conf]
[clojurewerkz.welle.core :as wc]
[clojure.tools.logging :as log]
[liberator.core :refer (resource)]
[plumbing.core :refer (fnk)]
[plumbing.core]
[plumbing.graph :as graph]
[satellite.recipes]
[satellite.riemann :as riemann]
[satellite.riemann.monitor :as monitor]
[satellite.riemann.services.curator]
[satellite.riemann.services.http]
[satellite.riemann.services.leader]
[satellite.riemann.services.whitelist]
[satellite.time :as time]
[satellite.util :as util]
[satellite.whitelist :as whitelist]
[schema.core :as s])
(:gen-class))
(defn file-exists?
[f]
(.exists (clojure.java.io/as-file f)))
(def riemann-tcp-server-schema
{(s/optional-key :host) s/Str
(s/optional-key :port) s/Int})
(def settings-schema
the Riemann config file that will process events
:blackhole-fails-to-starts-threshold s/Num
:blackhole-fails-to-finishes-threshold s/Num
:blackhole-check-seconds s/Int
:riemann-config (s/pred file-exists? 'file-exists?)
:riemann-tcp-server-options riemann-tcp-server-schema
:sleep-time s/Int
:mesos-master-url cemerick.url.URL
:riak (s/maybe {:endpoint (s/pred url/url)
:bucket s/Str})
:service-host s/Str
:service-port s/Int
:zookeeper s/Str
:zookeeper-root (s/both s/Str (s/pred #(not (#{"","/"} %)) 'valid-zookeeper-root?))
:curator-retry-policy {:base-sleep-time-ms s/Int
:max-sleep-time-ms s/Int
:max-retries s/Int}
:local-whitelist-path s/Str
:whitelist-hostname-pred (s/pred clojure.test/function? 'clojure.test/function)})
(def settings
;; Settings for the Satellite monitor/service
;;
;; Current values are used as the defaults; to override them, assoc values
;; into this var
the Riemann config file that will process events
:riemann-config "config/riemann-config.clj"
;; if you do not explicitly bind you will just get localhost
:riemann-tcp-server-options {:host "127.0.0.1"}
the sleep time between loops of polling Mesos Master endpoints to create
;; the event stream
:sleep-time 60000
;; a cemerick.url.URL record type
:mesos-master-url (url/url ":5050")
endpoint serving cached task metadata , nil if not using
{ : endpoint " / to / riak "
;; :bucket "bucket-name"}
:riak nil
;; service
:service-host "127.0.0.1"
;; Port on which the service is publicly accessible
:service-port 5001
;; Zookeeper string used for whitelist co-ordination
:zookeeper "zk1:port,zk2:port,zk3:port"
;; Root dir in zookeeper to store satellite state
:zookeeper-root "/satellite"
;; Curator retry policy
:curator-retry-policy {:base-sleep-time-ms 100
:max-sleep-time-ms 120000
:max-retries 10}
;; thresholds for automatic black hole host disabling
:blackhole-fails-to-starts-threshold 0.75
:blackhole-fails-to-finishes-threshold 3.0
:blackhole-check-seconds 300
the path on disk to the Mesos whitelist
:local-whitelist-path "whitelist"
;; predicate used to validate hosts that are added to the whitelist
:whitelist-hostname-pred (fn [hostname]
(identity hostname))})
(defn map->graph
[m]
(plumbing.core/map-vals (fn [x] (fnk [] x)) m))
(defn app
[settings]
{:settings settings
:zk-whitelist-path (fnk [[:settings zookeeper-root]]
(str zookeeper-root "/whitelist"))
:riak-conn (fnk [[:settings riak]]
(when riak
(wc/connect (:endpoint riak))))
:curator (fnk [[:settings zookeeper zookeeper-root curator-retry-policy]]
(riemann.config/service!
(satellite.riemann.services.curator/curator-service
zookeeper zookeeper-root curator-retry-policy)))
:leader (fnk [[:settings mesos-master-url]]
(riemann.config/service!
(satellite.riemann.services.leader/leader-service
mesos-master-url)))
the path on to the whitelist coordination node
:whitelist-sync (fnk [curator leader [:settings local-whitelist-path] zk-whitelist-path]
(let [whitelist-sync (satellite.riemann.services.whitelist/whitelist-sync-service
curator zk-whitelist-path
local-whitelist-path local-whitelist-path
(:leader? leader))]
(future
(intern 'satellite.recipes
'on-host
(fn [host]
(whitelist/on-host
@(:curator curator)
(:cache @(:syncer whitelist-sync))
zk-whitelist-path
host)))
(intern 'satellite.recipes
'off-host
(fn [host]
(whitelist/off-host
@(:curator curator)
(:cache @(:syncer whitelist-sync))
zk-whitelist-path
host)))
(intern 'satellite.recipes
'persist-event
(fn [event]
(whitelist/persist-event
@(:curator curator)
(:cache @(:syncer whitelist-sync))
zk-whitelist-path
(:host event)
(:service event)
event)))
(intern 'satellite.recipes
'delete-event
(fn [event]
(whitelist/delete-event
@(:curator curator)
(:cache @(:syncer whitelist-sync))
zk-whitelist-path
(:host event)
(:service event)))))
(riemann.config/service! whitelist-sync)))
if you want a riak - conn , do not start until you have it
:http-service (fnk [[:settings
service-host service-port riak
whitelist-hostname-pred]
zk-whitelist-path
curator riak-conn whitelist-sync]
(riemann.config/service!
(satellite.riemann.services.http/http-service
{:syncer (:syncer whitelist-sync)
:curator (:curator curator)
:riak riak
:riak-conn riak-conn
:whitelist-hostname-pred whitelist-hostname-pred
:zk-whitelist-path zk-whitelist-path}
service-host
service-port)))
:riemann-core (fnk [curator http-service leader whitelist-sync]
riemann.config/core)
:riemann (fnk [[:settings riemann-config] riemann-core]
(try
(intern 'riemann.config
'leader?
(fn [] nil))
(riemann/start-riemann riemann-config)
(catch Throwable t
(log/error t "Riemann failed")
(throw t))))
:monitor (fnk [[:settings
sleep-time riemann-tcp-server-options]
leader riemann riemann-core]
(future
(try
(monitor/do-monitor {:leader leader
:core riemann-core
:opts riemann-tcp-server-options
:sleep-time sleep-time})
(catch Throwable t
(log/error t "Monitor failed")))))})
(defn init-logging
[]
(log4j-conf/set-loggers! (org.apache.log4j.Logger/getRootLogger)
{:out (org.apache.log4j.DailyRollingFileAppender.
(org.apache.log4j.PatternLayout.
"%d{ISO8601} %-5p %c [%t] - %m%n")
"log/satellite.log"
"'.'yyyy-MM-dd")
:level :info}))
(defn enrich-settings
"enrich the data types of settings values"
[raw-settings]
(update-in raw-settings [:mesos-master-url] url/url))
(defn -main
[& [config args]]
(init-logging)
(log/info "Starting Satellite")
(if (and config
(.exists (java.io.File. config)))
(do (log/info (str "Reading config from file: " config))
(load-file config))
(log/info (str "Using default settings" settings)))
(s/validate settings-schema settings)
((graph/eager-compile (app (map->graph settings))) {}))
(comment
(init-logging)
(def inst (-main "config/dev/satellite-config.clj"))
(@(:cli-server inst))
(require 'riemann.core)
(require 'riemann.transport)
(require 'riemann.config)
(satellite.riemann/reload!))
| null | https://raw.githubusercontent.com/twosigma/satellite/d29d982f054d4f87ff47b4f94d23faa26a9b3fe4/satellite-master/src/satellite/core.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Settings for the Satellite monitor/service
Current values are used as the defaults; to override them, assoc values
into this var
if you do not explicitly bind you will just get localhost
the event stream
a cemerick.url.URL record type
:bucket "bucket-name"}
service
Port on which the service is publicly accessible
Zookeeper string used for whitelist co-ordination
Root dir in zookeeper to store satellite state
Curator retry policy
thresholds for automatic black hole host disabling
predicate used to validate hosts that are added to the whitelist | Copyright 2015 TWO SIGMA OPEN SOURCE , LLC
distributed under the License is distributed on an " AS IS " BASIS ,
(ns satellite.core
(:require [cemerick.url :as url]
[cheshire.core :as cheshire]
[clj-http.client :as client]
[clj-logging-config.log4j :as log4j-conf]
[clojurewerkz.welle.core :as wc]
[clojure.tools.logging :as log]
[liberator.core :refer (resource)]
[plumbing.core :refer (fnk)]
[plumbing.core]
[plumbing.graph :as graph]
[satellite.recipes]
[satellite.riemann :as riemann]
[satellite.riemann.monitor :as monitor]
[satellite.riemann.services.curator]
[satellite.riemann.services.http]
[satellite.riemann.services.leader]
[satellite.riemann.services.whitelist]
[satellite.time :as time]
[satellite.util :as util]
[satellite.whitelist :as whitelist]
[schema.core :as s])
(:gen-class))
(defn file-exists?
[f]
(.exists (clojure.java.io/as-file f)))
(def riemann-tcp-server-schema
{(s/optional-key :host) s/Str
(s/optional-key :port) s/Int})
(def settings-schema
the Riemann config file that will process events
:blackhole-fails-to-starts-threshold s/Num
:blackhole-fails-to-finishes-threshold s/Num
:blackhole-check-seconds s/Int
:riemann-config (s/pred file-exists? 'file-exists?)
:riemann-tcp-server-options riemann-tcp-server-schema
:sleep-time s/Int
:mesos-master-url cemerick.url.URL
:riak (s/maybe {:endpoint (s/pred url/url)
:bucket s/Str})
:service-host s/Str
:service-port s/Int
:zookeeper s/Str
:zookeeper-root (s/both s/Str (s/pred #(not (#{"","/"} %)) 'valid-zookeeper-root?))
:curator-retry-policy {:base-sleep-time-ms s/Int
:max-sleep-time-ms s/Int
:max-retries s/Int}
:local-whitelist-path s/Str
:whitelist-hostname-pred (s/pred clojure.test/function? 'clojure.test/function)})
(def settings
the Riemann config file that will process events
:riemann-config "config/riemann-config.clj"
:riemann-tcp-server-options {:host "127.0.0.1"}
the sleep time between loops of polling Mesos Master endpoints to create
:sleep-time 60000
:mesos-master-url (url/url ":5050")
endpoint serving cached task metadata , nil if not using
{ : endpoint " / to / riak "
:riak nil
:service-host "127.0.0.1"
:service-port 5001
:zookeeper "zk1:port,zk2:port,zk3:port"
:zookeeper-root "/satellite"
:curator-retry-policy {:base-sleep-time-ms 100
:max-sleep-time-ms 120000
:max-retries 10}
:blackhole-fails-to-starts-threshold 0.75
:blackhole-fails-to-finishes-threshold 3.0
:blackhole-check-seconds 300
the path on disk to the Mesos whitelist
:local-whitelist-path "whitelist"
:whitelist-hostname-pred (fn [hostname]
(identity hostname))})
(defn map->graph
[m]
(plumbing.core/map-vals (fn [x] (fnk [] x)) m))
(defn app
[settings]
{:settings settings
:zk-whitelist-path (fnk [[:settings zookeeper-root]]
(str zookeeper-root "/whitelist"))
:riak-conn (fnk [[:settings riak]]
(when riak
(wc/connect (:endpoint riak))))
:curator (fnk [[:settings zookeeper zookeeper-root curator-retry-policy]]
(riemann.config/service!
(satellite.riemann.services.curator/curator-service
zookeeper zookeeper-root curator-retry-policy)))
:leader (fnk [[:settings mesos-master-url]]
(riemann.config/service!
(satellite.riemann.services.leader/leader-service
mesos-master-url)))
the path on to the whitelist coordination node
:whitelist-sync (fnk [curator leader [:settings local-whitelist-path] zk-whitelist-path]
(let [whitelist-sync (satellite.riemann.services.whitelist/whitelist-sync-service
curator zk-whitelist-path
local-whitelist-path local-whitelist-path
(:leader? leader))]
(future
(intern 'satellite.recipes
'on-host
(fn [host]
(whitelist/on-host
@(:curator curator)
(:cache @(:syncer whitelist-sync))
zk-whitelist-path
host)))
(intern 'satellite.recipes
'off-host
(fn [host]
(whitelist/off-host
@(:curator curator)
(:cache @(:syncer whitelist-sync))
zk-whitelist-path
host)))
(intern 'satellite.recipes
'persist-event
(fn [event]
(whitelist/persist-event
@(:curator curator)
(:cache @(:syncer whitelist-sync))
zk-whitelist-path
(:host event)
(:service event)
event)))
(intern 'satellite.recipes
'delete-event
(fn [event]
(whitelist/delete-event
@(:curator curator)
(:cache @(:syncer whitelist-sync))
zk-whitelist-path
(:host event)
(:service event)))))
(riemann.config/service! whitelist-sync)))
if you want a riak - conn , do not start until you have it
:http-service (fnk [[:settings
service-host service-port riak
whitelist-hostname-pred]
zk-whitelist-path
curator riak-conn whitelist-sync]
(riemann.config/service!
(satellite.riemann.services.http/http-service
{:syncer (:syncer whitelist-sync)
:curator (:curator curator)
:riak riak
:riak-conn riak-conn
:whitelist-hostname-pred whitelist-hostname-pred
:zk-whitelist-path zk-whitelist-path}
service-host
service-port)))
:riemann-core (fnk [curator http-service leader whitelist-sync]
riemann.config/core)
:riemann (fnk [[:settings riemann-config] riemann-core]
(try
(intern 'riemann.config
'leader?
(fn [] nil))
(riemann/start-riemann riemann-config)
(catch Throwable t
(log/error t "Riemann failed")
(throw t))))
:monitor (fnk [[:settings
sleep-time riemann-tcp-server-options]
leader riemann riemann-core]
(future
(try
(monitor/do-monitor {:leader leader
:core riemann-core
:opts riemann-tcp-server-options
:sleep-time sleep-time})
(catch Throwable t
(log/error t "Monitor failed")))))})
(defn init-logging
[]
(log4j-conf/set-loggers! (org.apache.log4j.Logger/getRootLogger)
{:out (org.apache.log4j.DailyRollingFileAppender.
(org.apache.log4j.PatternLayout.
"%d{ISO8601} %-5p %c [%t] - %m%n")
"log/satellite.log"
"'.'yyyy-MM-dd")
:level :info}))
(defn enrich-settings
"enrich the data types of settings values"
[raw-settings]
(update-in raw-settings [:mesos-master-url] url/url))
(defn -main
[& [config args]]
(init-logging)
(log/info "Starting Satellite")
(if (and config
(.exists (java.io.File. config)))
(do (log/info (str "Reading config from file: " config))
(load-file config))
(log/info (str "Using default settings" settings)))
(s/validate settings-schema settings)
((graph/eager-compile (app (map->graph settings))) {}))
(comment
(init-logging)
(def inst (-main "config/dev/satellite-config.clj"))
(@(:cli-server inst))
(require 'riemann.core)
(require 'riemann.transport)
(require 'riemann.config)
(satellite.riemann/reload!))
|
e8b983cbdde3b161bb749a50167f607aa6325d2fa3101d849ce63f176844b543 | apache/couchdb-rebar | rebar_neotoma_compiler.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
%% ex: ts=4 sw=4 et
%% -------------------------------------------------------------------
%%
rebar : Erlang Build Tools
%%
Copyright ( c ) 2010 ( )
%%
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
%% -------------------------------------------------------------------
%% The rebar_neotoma module is a plugin for rebar that compiles
neotoma peg files . By default , it compiles all src/*.peg to src/*.erl
%%
%% Configuration options should be placed in rebar.config under
%% neotoma_opts. Available options include:
%%
%% doc_root: where to find the peg files to compile.
%% "src" by default
%% out_dir: where to put the generated erl files.
" src " by
%% module_ext: characters to append to the module's name.
%% "" by default
%% source_ext: extension of peg source files
-module(rebar_neotoma_compiler).
-export([compile/2, clean/2]).
%% for internal use only
-export([info/2]).
-include("rebar.hrl").
%% ============================================================================
%% Public API
%% ============================================================================
compile(Config, _AppFile) ->
NeoOpts = neotoma_opts(Config),
rebar_base_compiler:run(Config, [],
option(doc_root, NeoOpts), ".peg",
option(out_dir, NeoOpts),
option(module_ext, NeoOpts) ++ ".erl",
fun compile_neo/3, [{check_last_mod, true}]).
-spec clean(rebar_config:config(), file:filename()) -> 'ok'.
clean(Config, _AppFile) ->
NeoOpts = neotoma_opts(Config),
GeneratedFiles = neotoma_generated_files(
option(out_dir, NeoOpts),
option(module_ext, NeoOpts),
neotoma_source_files(
option(doc_root, NeoOpts),
option(source_ext, NeoOpts)
)
),
ok = rebar_file_utils:delete_each(GeneratedFiles),
ok.
%% ============================================================================
Internal functions
%% ============================================================================
info(help, compile) ->
info_help("Build Neotoma (*.peg) sources.~n");
info(help, clean) ->
info_help("Delete *.peg build results").
info_help(Description) ->
?CONSOLE(
"~s.~n"
"~n"
"Valid rebar.config options:~n"
" ~p~n",
[
Description,
{neotoma_opts, [{doc_root, "src"},
{out_dir, "src"},
{source_ext, ".peg"},
{module_ext, ""}]}
]).
neotoma_opts(Config) ->
rebar_config:get(Config, neotoma_opts, []).
option(Opt, Options) ->
proplists:get_value(Opt, Options, default(Opt)).
default(doc_root) -> "src";
default(out_dir) -> "src";
default(module_ext) -> "";
default(source_ext) -> ".peg".
compile_neo(Source, Target, Config) ->
case code:which(neotoma) of
non_existing ->
?ERROR("~n===============================================~n"
" You need to install neotoma to compile PEG grammars~n"
" Download the latest tarball release from github~n"
" ~n"
" and install it into your erlang library dir~n"
"===============================================~n~n", []),
?FAIL;
_ ->
case needs_compile(Source, Target, Config) of
true ->
do_compile(Source, Target, Config);
false ->
skipped
end
end.
do_compile(Source, _Target, Config) ->
%% TODO: Check last mod on target and referenced DTLs here..
NeoOpts = neotoma_opts(Config),
ensure that and out_dir are defined ,
%% using defaults if necessary
Opts = [{output, option(out_dir, NeoOpts)},
{module, list_to_atom(filename:basename(Source, ".peg")
++ option(module_ext, NeoOpts))}],
case neotoma:file(Source, Opts ++ NeoOpts) of
ok ->
ok;
Reason ->
?ERROR("Compiling peg ~s failed:~n ~p~n",
[Source, Reason]),
?FAIL
end.
needs_compile(Source, Target, Config) ->
LM = filelib:last_modified(Target),
LM < filelib:last_modified(Source) orelse
lists:any(fun(D) -> LM < filelib:last_modified(D) end,
referenced_pegs(Source, Config)).
referenced_pegs(Source, Config) ->
Set = referenced_pegs1([Source], Config,
sets:add_element(Source, sets:new())),
sets:to_list(sets:del_element(Source, Set)).
referenced_pegs1(Step, Config, Seen) ->
NeoOpts = neotoma_opts(Config),
ExtMatch = re:replace(option(source_ext, NeoOpts), "\.", "\\\\\\\\.",
[{return, list}]),
ShOpts = [{use_stdout, false}, return_on_error],
AllRefs =
lists:append(
[begin
Cmd = lists:flatten(["grep -o [^\\\"]*",
ExtMatch, " ", F]),
case rebar_utils:sh(Cmd, ShOpts) of
{ok, Res} ->
string:tokens(Res, "\n");
{error, _} ->
""
end
end || F <- Step]),
DocRoot = option(doc_root, NeoOpts),
WithPaths = [ filename:join([DocRoot, F]) || F <- AllRefs ],
Existing = [F || F <- WithPaths, filelib:is_regular(F)],
New = sets:subtract(sets:from_list(Existing), Seen),
case sets:size(New) of
0 -> Seen;
_ -> referenced_pegs1(sets:to_list(New), Config,
sets:union(New, Seen))
end.
neotoma_source_files(SrcDir, Ext) ->
lists:map(
fun filename:basename/1,
filelib:wildcard(filename:join([SrcDir, "*" ++ Ext]))
).
neotoma_generated_files(OutDir, ModExt, SourceFiles) ->
lists:map(
fun(PegFile) ->
Base = filename:rootname(PegFile),
NewName = Base ++ ModExt ++ ".erl",
filename:join(OutDir, NewName)
end,
SourceFiles
).
| null | https://raw.githubusercontent.com/apache/couchdb-rebar/8578221c20d0caa3deb724e5622a924045ffa8bf/src/rebar_neotoma_compiler.erl | erlang | ex: ts=4 sw=4 et
-------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-------------------------------------------------------------------
The rebar_neotoma module is a plugin for rebar that compiles
Configuration options should be placed in rebar.config under
neotoma_opts. Available options include:
doc_root: where to find the peg files to compile.
"src" by default
out_dir: where to put the generated erl files.
module_ext: characters to append to the module's name.
"" by default
source_ext: extension of peg source files
for internal use only
============================================================================
Public API
============================================================================
============================================================================
============================================================================
TODO: Check last mod on target and referenced DTLs here..
using defaults if necessary | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
rebar : Erlang Build Tools
Copyright ( c ) 2010 ( )
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
neotoma peg files . By default , it compiles all src/*.peg to src/*.erl
" src " by
-module(rebar_neotoma_compiler).
-export([compile/2, clean/2]).
-export([info/2]).
-include("rebar.hrl").
compile(Config, _AppFile) ->
NeoOpts = neotoma_opts(Config),
rebar_base_compiler:run(Config, [],
option(doc_root, NeoOpts), ".peg",
option(out_dir, NeoOpts),
option(module_ext, NeoOpts) ++ ".erl",
fun compile_neo/3, [{check_last_mod, true}]).
-spec clean(rebar_config:config(), file:filename()) -> 'ok'.
clean(Config, _AppFile) ->
NeoOpts = neotoma_opts(Config),
GeneratedFiles = neotoma_generated_files(
option(out_dir, NeoOpts),
option(module_ext, NeoOpts),
neotoma_source_files(
option(doc_root, NeoOpts),
option(source_ext, NeoOpts)
)
),
ok = rebar_file_utils:delete_each(GeneratedFiles),
ok.
Internal functions
info(help, compile) ->
info_help("Build Neotoma (*.peg) sources.~n");
info(help, clean) ->
info_help("Delete *.peg build results").
info_help(Description) ->
?CONSOLE(
"~s.~n"
"~n"
"Valid rebar.config options:~n"
" ~p~n",
[
Description,
{neotoma_opts, [{doc_root, "src"},
{out_dir, "src"},
{source_ext, ".peg"},
{module_ext, ""}]}
]).
neotoma_opts(Config) ->
rebar_config:get(Config, neotoma_opts, []).
option(Opt, Options) ->
proplists:get_value(Opt, Options, default(Opt)).
default(doc_root) -> "src";
default(out_dir) -> "src";
default(module_ext) -> "";
default(source_ext) -> ".peg".
compile_neo(Source, Target, Config) ->
case code:which(neotoma) of
non_existing ->
?ERROR("~n===============================================~n"
" You need to install neotoma to compile PEG grammars~n"
" Download the latest tarball release from github~n"
" ~n"
" and install it into your erlang library dir~n"
"===============================================~n~n", []),
?FAIL;
_ ->
case needs_compile(Source, Target, Config) of
true ->
do_compile(Source, Target, Config);
false ->
skipped
end
end.
do_compile(Source, _Target, Config) ->
NeoOpts = neotoma_opts(Config),
ensure that and out_dir are defined ,
Opts = [{output, option(out_dir, NeoOpts)},
{module, list_to_atom(filename:basename(Source, ".peg")
++ option(module_ext, NeoOpts))}],
case neotoma:file(Source, Opts ++ NeoOpts) of
ok ->
ok;
Reason ->
?ERROR("Compiling peg ~s failed:~n ~p~n",
[Source, Reason]),
?FAIL
end.
needs_compile(Source, Target, Config) ->
LM = filelib:last_modified(Target),
LM < filelib:last_modified(Source) orelse
lists:any(fun(D) -> LM < filelib:last_modified(D) end,
referenced_pegs(Source, Config)).
referenced_pegs(Source, Config) ->
Set = referenced_pegs1([Source], Config,
sets:add_element(Source, sets:new())),
sets:to_list(sets:del_element(Source, Set)).
referenced_pegs1(Step, Config, Seen) ->
NeoOpts = neotoma_opts(Config),
ExtMatch = re:replace(option(source_ext, NeoOpts), "\.", "\\\\\\\\.",
[{return, list}]),
ShOpts = [{use_stdout, false}, return_on_error],
AllRefs =
lists:append(
[begin
Cmd = lists:flatten(["grep -o [^\\\"]*",
ExtMatch, " ", F]),
case rebar_utils:sh(Cmd, ShOpts) of
{ok, Res} ->
string:tokens(Res, "\n");
{error, _} ->
""
end
end || F <- Step]),
DocRoot = option(doc_root, NeoOpts),
WithPaths = [ filename:join([DocRoot, F]) || F <- AllRefs ],
Existing = [F || F <- WithPaths, filelib:is_regular(F)],
New = sets:subtract(sets:from_list(Existing), Seen),
case sets:size(New) of
0 -> Seen;
_ -> referenced_pegs1(sets:to_list(New), Config,
sets:union(New, Seen))
end.
neotoma_source_files(SrcDir, Ext) ->
lists:map(
fun filename:basename/1,
filelib:wildcard(filename:join([SrcDir, "*" ++ Ext]))
).
neotoma_generated_files(OutDir, ModExt, SourceFiles) ->
lists:map(
fun(PegFile) ->
Base = filename:rootname(PegFile),
NewName = Base ++ ModExt ++ ".erl",
filename:join(OutDir, NewName)
end,
SourceFiles
).
|
1d19dfabd776eaaddbb0ef8c84dab3eb89a61cfde2fccd8d0a88c75cd38ea4f2 | CodyReichert/qi | conditions.lisp | (in-package :alexandria)
(defun required-argument (&optional name)
"Signals an error for a missing argument of NAME. Intended for
use as an initialization form for structure and class-slots, and
a default value for required keyword arguments."
(error "Required argument ~@[~S ~]missing." name))
(define-condition simple-style-warning (simple-warning style-warning)
())
(defun simple-style-warning (message &rest args)
(warn 'simple-style-warning :format-control message :format-arguments args))
;; We don't specify a :report for simple-reader-error to let the
;; underlying implementation report the line and column position for
;; us. Unfortunately this way the message from simple-error is not
;; displayed, unless there's special support for that in the
implementation . But even then it 's still inspectable from the
;; debugger...
(define-condition simple-reader-error
#-sbcl(simple-error reader-error)
#+sbcl(sb-int:simple-reader-error)
())
(defun simple-reader-error (stream message &rest args)
(error 'simple-reader-error
:stream stream
:format-control message
:format-arguments args))
(define-condition simple-parse-error (simple-error parse-error)
())
(defun simple-parse-error (message &rest args)
(error 'simple-parse-error
:format-control message
:format-arguments args))
(define-condition simple-program-error (simple-error program-error)
())
(defun simple-program-error (message &rest args)
(error 'simple-program-error
:format-control message
:format-arguments args))
(defmacro ignore-some-conditions ((&rest conditions) &body body)
"Similar to CL:IGNORE-ERRORS but the (unevaluated) CONDITIONS
list determines which specific conditions are to be ignored."
`(handler-case
(progn ,@body)
,@(loop for condition in conditions collect
`(,condition (c) (values nil c)))))
(defmacro unwind-protect-case ((&optional abort-flag) protected-form &body clauses)
"Like CL:UNWIND-PROTECT, but you can specify the circumstances that
the cleanup CLAUSES are run.
clauses ::= (:NORMAL form*)* | (:ABORT form*)* | (:ALWAYS form*)*
Clauses can be given in any order, and more than one clause can be
given for each circumstance. The clauses whose denoted circumstance
occured, are executed in the order the clauses appear.
ABORT-FLAG is the name of a variable that will be bound to T in
CLAUSES if the PROTECTED-FORM aborted preemptively, and to NIL
otherwise.
Examples:
(unwind-protect-case ()
(protected-form)
(:normal (format t \"This is only evaluated if PROTECTED-FORM executed normally.~%\"))
(:abort (format t \"This is only evaluated if PROTECTED-FORM aborted preemptively.~%\"))
(:always (format t \"This is evaluated in either case.~%\")))
(unwind-protect-case (aborted-p)
(protected-form)
(:always (perform-cleanup-if aborted-p)))
"
(check-type abort-flag (or null symbol))
(let ((gflag (gensym "FLAG+")))
`(let ((,gflag t))
(unwind-protect (multiple-value-prog1 ,protected-form (setf ,gflag nil))
(let ,(and abort-flag `((,abort-flag ,gflag)))
,@(loop for (cleanup-kind . forms) in clauses
collect (ecase cleanup-kind
(:normal `(when (not ,gflag) ,@forms))
(:abort `(when ,gflag ,@forms))
(:always `(progn ,@forms))))))))) | null | https://raw.githubusercontent.com/CodyReichert/qi/9cf6d31f40e19f4a7f60891ef7c8c0381ccac66f/dependencies/alexandria-latest/conditions.lisp | lisp | We don't specify a :report for simple-reader-error to let the
underlying implementation report the line and column position for
us. Unfortunately this way the message from simple-error is not
displayed, unless there's special support for that in the
debugger... | (in-package :alexandria)
(defun required-argument (&optional name)
"Signals an error for a missing argument of NAME. Intended for
use as an initialization form for structure and class-slots, and
a default value for required keyword arguments."
(error "Required argument ~@[~S ~]missing." name))
(define-condition simple-style-warning (simple-warning style-warning)
())
(defun simple-style-warning (message &rest args)
(warn 'simple-style-warning :format-control message :format-arguments args))
implementation . But even then it 's still inspectable from the
(define-condition simple-reader-error
#-sbcl(simple-error reader-error)
#+sbcl(sb-int:simple-reader-error)
())
(defun simple-reader-error (stream message &rest args)
(error 'simple-reader-error
:stream stream
:format-control message
:format-arguments args))
(define-condition simple-parse-error (simple-error parse-error)
())
(defun simple-parse-error (message &rest args)
(error 'simple-parse-error
:format-control message
:format-arguments args))
(define-condition simple-program-error (simple-error program-error)
())
(defun simple-program-error (message &rest args)
(error 'simple-program-error
:format-control message
:format-arguments args))
(defmacro ignore-some-conditions ((&rest conditions) &body body)
"Similar to CL:IGNORE-ERRORS but the (unevaluated) CONDITIONS
list determines which specific conditions are to be ignored."
`(handler-case
(progn ,@body)
,@(loop for condition in conditions collect
`(,condition (c) (values nil c)))))
(defmacro unwind-protect-case ((&optional abort-flag) protected-form &body clauses)
"Like CL:UNWIND-PROTECT, but you can specify the circumstances that
the cleanup CLAUSES are run.
clauses ::= (:NORMAL form*)* | (:ABORT form*)* | (:ALWAYS form*)*
Clauses can be given in any order, and more than one clause can be
given for each circumstance. The clauses whose denoted circumstance
occured, are executed in the order the clauses appear.
ABORT-FLAG is the name of a variable that will be bound to T in
CLAUSES if the PROTECTED-FORM aborted preemptively, and to NIL
otherwise.
Examples:
(unwind-protect-case ()
(protected-form)
(:normal (format t \"This is only evaluated if PROTECTED-FORM executed normally.~%\"))
(:abort (format t \"This is only evaluated if PROTECTED-FORM aborted preemptively.~%\"))
(:always (format t \"This is evaluated in either case.~%\")))
(unwind-protect-case (aborted-p)
(protected-form)
(:always (perform-cleanup-if aborted-p)))
"
(check-type abort-flag (or null symbol))
(let ((gflag (gensym "FLAG+")))
`(let ((,gflag t))
(unwind-protect (multiple-value-prog1 ,protected-form (setf ,gflag nil))
(let ,(and abort-flag `((,abort-flag ,gflag)))
,@(loop for (cleanup-kind . forms) in clauses
collect (ecase cleanup-kind
(:normal `(when (not ,gflag) ,@forms))
(:abort `(when ,gflag ,@forms))
(:always `(progn ,@forms))))))))) |
65cf7fdee713d4e0078114e65023846d414a27b046410a680d3b7cf1bb0464bc | dorchard/computational-semantics | FSemF.hs | module FSemF where
import Data.List
import Data.Char (toUpper)
import FSynF
type Grid = [(Column,Row)]
exampleGrid :: Grid
exampleGrid = [(A',9),
(B',4),(B',5),(B',6),(B',7),(B',9),
(C',9),(D',4),(E',4),(F',4),
(G',4),(G',7),(G',8),(G',9),
(H',1),(H',4),(I',1)]
attacks :: Grid
attacks = [(F',9),(E',8),(D',7),(C',6)]
battleship, frigate, sub1, sub2, destroyer :: Grid
battleship = [(D',4),(E',4),(F',4),(G',4),(H',4)]
frigate = [(B',4),(B',5),(B',6),(B',7)]
sub1 = [(A',9),(B',9),(C',9)]
sub2 = [(G',7),(G',8),(G',9)]
destroyer = [(H',1),(I',1)]
type State = ([Grid],Grid)
shipsDistrib :: [Grid]
shipsDistrib = [battleship,frigate,sub1,sub2,destroyer]
exampleState = (shipsDistrib,attacks)
noClashes :: State -> Bool
noClashes (distrib,_) = nodups (concat distrib)
where nodups [] = True
nodups (x:xs) = notElem x xs && nodups xs
hit :: Attack -> State -> Bool
hit pos (gs,_) = elem pos (concat gs)
missed :: Attack -> State -> Bool
missed pos = not . (hit pos)
defeated :: State -> Bool
defeated (gs,g) = all (`elem` g) (concat gs)
updateBattle :: Attack -> State -> State
updateBattle p (gs,g) = (gs, insert p g)
propNames :: Form -> [String]
propNames (P name) = [name]
propNames (Ng f) = propNames f
propNames (Cnj fs) = (sort.nub.concat) (map propNames fs)
propNames (Dsj fs) = (sort.nub.concat) (map propNames fs)
genVals :: [String] -> [[(String,Bool)]]
genVals [] = [[]]
genVals (name:names) = map ((name,True) :) (genVals names)
++ map ((name,False):) (genVals names)
allVals :: Form -> [[(String,Bool)]]
allVals = genVals . propNames
eval :: [(String,Bool)] -> Form -> Bool
eval [] (P c) = error ("no info about " ++ show c)
eval ((i,b):xs) (P c)
| c == i = b
| otherwise = eval xs (P c)
eval xs (Ng f) = not (eval xs f)
eval xs (Cnj fs) = all (eval xs) fs
eval xs (Dsj fs) = any (eval xs) fs
tautology :: Form -> Bool
tautology f = all (\ v -> eval v f) (allVals f)
satisfiable :: Form -> Bool
satisfiable f = any (\ v -> eval v f) (allVals f)
contradiction :: Form -> Bool
contradiction = not . satisfiable
implies :: Form -> Form -> Bool
implies f1 f2 = contradiction (Cnj [f1,Ng f2])
update :: [[(String,Bool)]] -> Form -> [[(String,Bool)]]
update vals f = [ v | v <- vals, eval v f ]
samepos :: Pattern -> Pattern -> Int
samepos _ [] = 0
samepos [] _ = 0
samepos (x:xs) (y:ys) | x == y = samepos xs ys + 1
| otherwise = samepos xs ys
occurscount :: Pattern -> Pattern -> Int
occurscount xs [] = 0
occurscount xs (y:ys)
| y `elem` xs = occurscount (delete y xs) ys + 1
| otherwise = occurscount xs ys
reaction :: Pattern -> Pattern -> [Answer]
reaction secret guess = take n (repeat Black)
++ take m (repeat White)
where n = samepos secret guess
m = occurscount secret guess - n
secret = [Red,Blue,Green,Yellow]
updateMM :: [Pattern] -> Pattern -> Feedback -> [Pattern]
updateMM state guess answer =
[ xs | xs <- state, reaction xs guess == answer ]
string2pattern :: String -> Pattern
string2pattern = convertP . (map toUpper)
convertP :: String -> Pattern
convertP [] = []
convertP (' ':xs) = convertP xs
convertP ('R':xs) = Red : convertP xs
convertP ('Y':xs) = Yellow : convertP xs
convertP ('B':xs) = Blue : convertP xs
convertP ('G':xs) = Green : convertP xs
convertP ('O':xs) = Orange : convertP xs
playMM :: IO ()
playMM =
do
putStrLn "Give a sequence of four colours from RGBYO"
s <- getLine
if (string2pattern s) /= secret
then
let answer = reaction secret (string2pattern s) in
do
putStrLn (show answer)
putStrLn "Please make another guess"
playMM
else putStrLn "correct"
| null | https://raw.githubusercontent.com/dorchard/computational-semantics/e11a80f86140451c9ce7b80a7c263d1482d8565f/FSemF.hs | haskell | module FSemF where
import Data.List
import Data.Char (toUpper)
import FSynF
type Grid = [(Column,Row)]
exampleGrid :: Grid
exampleGrid = [(A',9),
(B',4),(B',5),(B',6),(B',7),(B',9),
(C',9),(D',4),(E',4),(F',4),
(G',4),(G',7),(G',8),(G',9),
(H',1),(H',4),(I',1)]
attacks :: Grid
attacks = [(F',9),(E',8),(D',7),(C',6)]
battleship, frigate, sub1, sub2, destroyer :: Grid
battleship = [(D',4),(E',4),(F',4),(G',4),(H',4)]
frigate = [(B',4),(B',5),(B',6),(B',7)]
sub1 = [(A',9),(B',9),(C',9)]
sub2 = [(G',7),(G',8),(G',9)]
destroyer = [(H',1),(I',1)]
type State = ([Grid],Grid)
shipsDistrib :: [Grid]
shipsDistrib = [battleship,frigate,sub1,sub2,destroyer]
exampleState = (shipsDistrib,attacks)
noClashes :: State -> Bool
noClashes (distrib,_) = nodups (concat distrib)
where nodups [] = True
nodups (x:xs) = notElem x xs && nodups xs
hit :: Attack -> State -> Bool
hit pos (gs,_) = elem pos (concat gs)
missed :: Attack -> State -> Bool
missed pos = not . (hit pos)
defeated :: State -> Bool
defeated (gs,g) = all (`elem` g) (concat gs)
updateBattle :: Attack -> State -> State
updateBattle p (gs,g) = (gs, insert p g)
propNames :: Form -> [String]
propNames (P name) = [name]
propNames (Ng f) = propNames f
propNames (Cnj fs) = (sort.nub.concat) (map propNames fs)
propNames (Dsj fs) = (sort.nub.concat) (map propNames fs)
genVals :: [String] -> [[(String,Bool)]]
genVals [] = [[]]
genVals (name:names) = map ((name,True) :) (genVals names)
++ map ((name,False):) (genVals names)
allVals :: Form -> [[(String,Bool)]]
allVals = genVals . propNames
eval :: [(String,Bool)] -> Form -> Bool
eval [] (P c) = error ("no info about " ++ show c)
eval ((i,b):xs) (P c)
| c == i = b
| otherwise = eval xs (P c)
eval xs (Ng f) = not (eval xs f)
eval xs (Cnj fs) = all (eval xs) fs
eval xs (Dsj fs) = any (eval xs) fs
tautology :: Form -> Bool
tautology f = all (\ v -> eval v f) (allVals f)
satisfiable :: Form -> Bool
satisfiable f = any (\ v -> eval v f) (allVals f)
contradiction :: Form -> Bool
contradiction = not . satisfiable
implies :: Form -> Form -> Bool
implies f1 f2 = contradiction (Cnj [f1,Ng f2])
update :: [[(String,Bool)]] -> Form -> [[(String,Bool)]]
update vals f = [ v | v <- vals, eval v f ]
samepos :: Pattern -> Pattern -> Int
samepos _ [] = 0
samepos [] _ = 0
samepos (x:xs) (y:ys) | x == y = samepos xs ys + 1
| otherwise = samepos xs ys
occurscount :: Pattern -> Pattern -> Int
occurscount xs [] = 0
occurscount xs (y:ys)
| y `elem` xs = occurscount (delete y xs) ys + 1
| otherwise = occurscount xs ys
reaction :: Pattern -> Pattern -> [Answer]
reaction secret guess = take n (repeat Black)
++ take m (repeat White)
where n = samepos secret guess
m = occurscount secret guess - n
secret = [Red,Blue,Green,Yellow]
updateMM :: [Pattern] -> Pattern -> Feedback -> [Pattern]
updateMM state guess answer =
[ xs | xs <- state, reaction xs guess == answer ]
string2pattern :: String -> Pattern
string2pattern = convertP . (map toUpper)
convertP :: String -> Pattern
convertP [] = []
convertP (' ':xs) = convertP xs
convertP ('R':xs) = Red : convertP xs
convertP ('Y':xs) = Yellow : convertP xs
convertP ('B':xs) = Blue : convertP xs
convertP ('G':xs) = Green : convertP xs
convertP ('O':xs) = Orange : convertP xs
playMM :: IO ()
playMM =
do
putStrLn "Give a sequence of four colours from RGBYO"
s <- getLine
if (string2pattern s) /= secret
then
let answer = reaction secret (string2pattern s) in
do
putStrLn (show answer)
putStrLn "Please make another guess"
playMM
else putStrLn "correct"
| |
ee5c02f678ee63ee24361ecc8d174b444b70cdc9b6965fe526f47bd2edb65e6b | dhleong/wish | proficiency_test.cljs | (ns wish.sheets.dnd5e.subs.proficiency-test
(:require [cljs.test :refer-macros [deftest testing is]]
[wish.sheets.dnd5e.subs.proficiency
:refer [level->proficiency-bonus]]))
(deftest level->proficiency-bonus-test
(testing "Low levels"
(is (= 2 (level->proficiency-bonus 1)))
(is (= 2 (level->proficiency-bonus 2)))
(is (= 2 (level->proficiency-bonus 4))))
(testing "Higher levels"
(is (= 3 (level->proficiency-bonus 5)))
(is (= 3 (level->proficiency-bonus 8)))
(is (= 4 (level->proficiency-bonus 9)))))
| null | https://raw.githubusercontent.com/dhleong/wish/9036f9da3706bfcc1e4b4736558b6f7309f53b7b/test/cljs/wish/sheets/dnd5e/subs/proficiency_test.cljs | clojure | (ns wish.sheets.dnd5e.subs.proficiency-test
(:require [cljs.test :refer-macros [deftest testing is]]
[wish.sheets.dnd5e.subs.proficiency
:refer [level->proficiency-bonus]]))
(deftest level->proficiency-bonus-test
(testing "Low levels"
(is (= 2 (level->proficiency-bonus 1)))
(is (= 2 (level->proficiency-bonus 2)))
(is (= 2 (level->proficiency-bonus 4))))
(testing "Higher levels"
(is (= 3 (level->proficiency-bonus 5)))
(is (= 3 (level->proficiency-bonus 8)))
(is (= 4 (level->proficiency-bonus 9)))))
| |
cafd8575b4702859fd355a4e20a303f362d5b5efa4268f8a218002d09f89a6cd | nvim-treesitter/nvim-treesitter | highlights.scm | [
"replace"
"go"
"use"
] @keyword
"=>" @operator
(comment) @comment
[
(version)
(go_version)
] @string
| null | https://raw.githubusercontent.com/nvim-treesitter/nvim-treesitter/3b05ef44927cc245d2c68873d0a526592d3f93cd/queries/gowork/highlights.scm | scheme | [
"replace"
"go"
"use"
] @keyword
"=>" @operator
(comment) @comment
[
(version)
(go_version)
] @string
| |
6ccf0a20ef62dbeddd79f081056e85a7f40567edf85099de982c6ab2a574aa91 | futurice/haskell-mega-repo | Envelope.hs | {-# LANGUAGE OverloadedStrings #-}
module Personio.Types.Envelope where
import Data.Aeson.Compat
import Data.Aeson.Types (FromJSON1 (..), explicitParseField, parseJSON1)
import Futurice.Aeson
import Futurice.Prelude
import Prelude ()
-- | API returns data in the envelope
newtype Envelope a = Envelope { getEnvelope :: a }
instance FromJSON a => FromJSON (Envelope a) where
parseJSON = parseJSON1
instance FromJSON1 Envelope where
liftParseJSON p _ = withObjectDump "Envelope" $ \obj -> do
b <- obj .: "success"
case b of
False -> do
err <- obj .: "error"
fail (errMessage err ^. unpacked)
True -> Envelope <$> explicitParseField p obj "data"
-- | API error.
data Err = Err
{ errCode :: !Int
, errMessage :: !Text
}
instance FromJSON Err where
parseJSON = withObjectDump "Error" $ \obj -> Err
<$> obj .: "code"
<*> obj .: "message"
| null | https://raw.githubusercontent.com/futurice/haskell-mega-repo/2647723f12f5435e2edc373f6738386a9668f603/personio-client/src/Personio/Types/Envelope.hs | haskell | # LANGUAGE OverloadedStrings #
| API returns data in the envelope
| API error. | module Personio.Types.Envelope where
import Data.Aeson.Compat
import Data.Aeson.Types (FromJSON1 (..), explicitParseField, parseJSON1)
import Futurice.Aeson
import Futurice.Prelude
import Prelude ()
newtype Envelope a = Envelope { getEnvelope :: a }
instance FromJSON a => FromJSON (Envelope a) where
parseJSON = parseJSON1
instance FromJSON1 Envelope where
liftParseJSON p _ = withObjectDump "Envelope" $ \obj -> do
b <- obj .: "success"
case b of
False -> do
err <- obj .: "error"
fail (errMessage err ^. unpacked)
True -> Envelope <$> explicitParseField p obj "data"
data Err = Err
{ errCode :: !Int
, errMessage :: !Text
}
instance FromJSON Err where
parseJSON = withObjectDump "Error" $ \obj -> Err
<$> obj .: "code"
<*> obj .: "message"
|
c29a63ae71bd2208625bd72c1c1072344015e957e04c31098f85784e3d5ffaad | hugoduncan/oldmj | help.clj | (ns makejack.impl.help
"Help messages"
(:require [clojure.string :as str]
[makejack.api.core :as makejack]
[makejack.impl.invokers :as invokers]
[makejack.impl.resolve :as resolve]))
(defn invoker-doc-string [f]
(first (str/split-lines (:doc (meta f) ""))))
(defn- target-doc-string [target-map]
(first
(str/split-lines
(:doc target-map
(invoker-doc-string
(get invokers/invokers
(:invoker target-map)))))))
(defn target-doc
"Construct doc for available tools."
[]
(let [mj-config (makejack/load-mj)]
(str/join
"\n"
(map
(fn [[kw m]]
(format "%25s %s" (name kw) (target-doc-string m)))
(sort-by
key
(:targets mj-config))))))
(defn invoker-doc
"Construct doc for available invokers."
[]
(str/join
"\n"
(map
(fn [[kw f]]
(format
"%25s %s"
kw
(invoker-doc-string f)))
invokers/invokers)))
(defn usage [summary]
(println
(str/join
"\n"
["mj [options ...] target"
""
summary
""
"Use mj help <target> for more detailed help on target."
""
"Project targets:"
(target-doc)
""
"Available invokers:"
(invoker-doc)])))
(defn help-on-invoker
"Return help on the specified command."
[invoker-kw]
(let [invoker (resolve/resolve-invoker invoker-kw)
doc (some-> invoker meta :doc)]
(if invoker
(do (println "makejack invoker" invoker-kw "\n")
(println doc))
(binding [*out* *err*]
(makejack/error (str "Unknown invoker: " invoker-kw))))))
(defn help-on-target
"Return help on the specified command."
[target-kw]
(let [config (try
(makejack/load-config)
(catch Exception _))
target (resolve/resolve-target target-kw config)
doc (:doc target)]
(if target
(do (println "mj" (name target-kw) "\n")
(if doc
(println doc)
(println "Undocumented")))
(binding [*out* *err*]
(makejack/error (str "Unknown target: " (name target-kw)))))))
(defn help-on
"Return help on the specified command."
[cmd]
(if (str/starts-with? cmd ":")
(help-on-invoker (read-string cmd))
(help-on-target (keyword cmd))))
| null | https://raw.githubusercontent.com/hugoduncan/oldmj/0a97488be7457baed01d2d9dd0ea6df4383832ab/cli/src/makejack/impl/help.clj | clojure | (ns makejack.impl.help
"Help messages"
(:require [clojure.string :as str]
[makejack.api.core :as makejack]
[makejack.impl.invokers :as invokers]
[makejack.impl.resolve :as resolve]))
(defn invoker-doc-string [f]
(first (str/split-lines (:doc (meta f) ""))))
(defn- target-doc-string [target-map]
(first
(str/split-lines
(:doc target-map
(invoker-doc-string
(get invokers/invokers
(:invoker target-map)))))))
(defn target-doc
"Construct doc for available tools."
[]
(let [mj-config (makejack/load-mj)]
(str/join
"\n"
(map
(fn [[kw m]]
(format "%25s %s" (name kw) (target-doc-string m)))
(sort-by
key
(:targets mj-config))))))
(defn invoker-doc
"Construct doc for available invokers."
[]
(str/join
"\n"
(map
(fn [[kw f]]
(format
"%25s %s"
kw
(invoker-doc-string f)))
invokers/invokers)))
(defn usage [summary]
(println
(str/join
"\n"
["mj [options ...] target"
""
summary
""
"Use mj help <target> for more detailed help on target."
""
"Project targets:"
(target-doc)
""
"Available invokers:"
(invoker-doc)])))
(defn help-on-invoker
"Return help on the specified command."
[invoker-kw]
(let [invoker (resolve/resolve-invoker invoker-kw)
doc (some-> invoker meta :doc)]
(if invoker
(do (println "makejack invoker" invoker-kw "\n")
(println doc))
(binding [*out* *err*]
(makejack/error (str "Unknown invoker: " invoker-kw))))))
(defn help-on-target
"Return help on the specified command."
[target-kw]
(let [config (try
(makejack/load-config)
(catch Exception _))
target (resolve/resolve-target target-kw config)
doc (:doc target)]
(if target
(do (println "mj" (name target-kw) "\n")
(if doc
(println doc)
(println "Undocumented")))
(binding [*out* *err*]
(makejack/error (str "Unknown target: " (name target-kw)))))))
(defn help-on
"Return help on the specified command."
[cmd]
(if (str/starts-with? cmd ":")
(help-on-invoker (read-string cmd))
(help-on-target (keyword cmd))))
| |
7babb8c2ec49e350309b0cc94b2c041e71924268c4e61f30666012e06c029b01 | TyOverby/mono | profunctor.ml | open Base
include Profunctor_intf.Interfaces
module Record_builder_internal
(F : S) (T : sig
type 'a profunctor_term
val prj : ('a, 'a) F.t -> 'a profunctor_term
val inj : 'a profunctor_term -> ('a, 'a) F.t
end) =
struct
include T
type ('b, 'a) profunctor = ('b, 'a) F.t
module Bare = Record_builder.Make_2 (F)
let field term field = Bare.field (F.contra_map (inj term) ~f:(Field.get field)) field
let build_for_record f = prj (Bare.build_for_record f)
end
module Record_builder (F : S) =
Record_builder_internal
(F)
(struct
type 'a profunctor_term = ('a, 'a) F.t
let prj = Fn.id
let inj = Fn.id
end)
module Fn_with_id = struct
module T = struct
type ('b, 'a) t =
| Id : ('a, 'a) t
| Apply : ('a -> 'b) -> ('b, 'a) t
let map (type a b c) (x : (b, a) t) ~(f : b -> c) : (c, a) t =
match x with
| Id -> Apply f
| Apply g -> Apply (Fn.compose f g)
;;
let contra_map (type a b c) (x : (c, b) t) ~(f : a -> b) : (c, a) t =
match x with
| Id -> Apply f
| Apply g -> Apply (Fn.compose g f)
;;
let as_fn' (type a b) (x : (b, a) t) : a -> b =
match x with
| Id -> Fn.id
| Apply f -> f
;;
let both l r =
let l = as_fn' l
and r = as_fn' r in
Apply (fun x -> l x, r x)
;;
end
include T
let split (type a b c d) (l : (b, a) t) (r : (d, c) t) : (b * d, a * c) t =
match l, r with
| Id, Id -> Id
| _, _ ->
let l = as_fn' l
and r = as_fn' r in
Apply (fun (x, y) -> l x, r y)
;;
let id = Id
let of_fn x = Apply x
let as_fn t = Staged.stage (as_fn' t)
let compose (type a b c) (g : (c, b) t) (f : (b, a) t) : (c, a) t =
match g, f with
| Id, Id -> Id
| Id, f -> f
| g, Id -> g
| Apply g, Apply f -> Apply (Fn.compose g f)
;;
module Of_record = Record_builder (T)
end
module Of_applicative (F : Applicative.S) = struct
module T = struct
type ('b, 'a) t = 'b F.t
let contra_map x ~f:_ = x
let map = F.map
let both = F.both
end
include T
module Of_record =
Record_builder_internal
(T)
(struct
type 'a profunctor_term = 'a F.t
let inj = Fn.id
let prj = Fn.id
end)
end
module Of_conv_based (F : Conv_based) = struct
module T = struct
type ('c, 'a) t =
| Embed :
{ pre_map : ('b, 'a) Fn_with_id.t
; inner : 'b F.t
; post_map : ('c, 'b) Fn_with_id.t
}
-> ('c, 'a) t
let contra_map (Embed x) ~f =
Embed { x with pre_map = Fn_with_id.contra_map x.pre_map ~f }
;;
let map (Embed x) ~f = Embed { x with post_map = Fn_with_id.map x.post_map ~f }
let both (Embed l) (Embed r) =
Embed
{ pre_map = Fn_with_id.both l.pre_map r.pre_map
; inner = F.both l.inner r.inner
; post_map = Fn_with_id.split l.post_map r.post_map
}
;;
end
include T
let inj inner = Embed { pre_map = Fn_with_id.Id; inner; post_map = Fn_with_id.Id }
let prj (Embed x) =
F.conv x.inner (Fn_with_id.as_fn' x.post_map) (Fn_with_id.as_fn' x.pre_map)
;;
module Of_record =
Record_builder_internal
(T)
(struct
type 'a profunctor_term = 'a F.t
let inj = inj
let prj = prj
end)
end
| null | https://raw.githubusercontent.com/TyOverby/mono/8d6b3484d5db63f2f5472c7367986ea30290764d/vendor/janestreet-profunctor/src/profunctor.ml | ocaml | open Base
include Profunctor_intf.Interfaces
module Record_builder_internal
(F : S) (T : sig
type 'a profunctor_term
val prj : ('a, 'a) F.t -> 'a profunctor_term
val inj : 'a profunctor_term -> ('a, 'a) F.t
end) =
struct
include T
type ('b, 'a) profunctor = ('b, 'a) F.t
module Bare = Record_builder.Make_2 (F)
let field term field = Bare.field (F.contra_map (inj term) ~f:(Field.get field)) field
let build_for_record f = prj (Bare.build_for_record f)
end
module Record_builder (F : S) =
Record_builder_internal
(F)
(struct
type 'a profunctor_term = ('a, 'a) F.t
let prj = Fn.id
let inj = Fn.id
end)
module Fn_with_id = struct
module T = struct
type ('b, 'a) t =
| Id : ('a, 'a) t
| Apply : ('a -> 'b) -> ('b, 'a) t
let map (type a b c) (x : (b, a) t) ~(f : b -> c) : (c, a) t =
match x with
| Id -> Apply f
| Apply g -> Apply (Fn.compose f g)
;;
let contra_map (type a b c) (x : (c, b) t) ~(f : a -> b) : (c, a) t =
match x with
| Id -> Apply f
| Apply g -> Apply (Fn.compose g f)
;;
let as_fn' (type a b) (x : (b, a) t) : a -> b =
match x with
| Id -> Fn.id
| Apply f -> f
;;
let both l r =
let l = as_fn' l
and r = as_fn' r in
Apply (fun x -> l x, r x)
;;
end
include T
let split (type a b c d) (l : (b, a) t) (r : (d, c) t) : (b * d, a * c) t =
match l, r with
| Id, Id -> Id
| _, _ ->
let l = as_fn' l
and r = as_fn' r in
Apply (fun (x, y) -> l x, r y)
;;
let id = Id
let of_fn x = Apply x
let as_fn t = Staged.stage (as_fn' t)
let compose (type a b c) (g : (c, b) t) (f : (b, a) t) : (c, a) t =
match g, f with
| Id, Id -> Id
| Id, f -> f
| g, Id -> g
| Apply g, Apply f -> Apply (Fn.compose g f)
;;
module Of_record = Record_builder (T)
end
module Of_applicative (F : Applicative.S) = struct
module T = struct
type ('b, 'a) t = 'b F.t
let contra_map x ~f:_ = x
let map = F.map
let both = F.both
end
include T
module Of_record =
Record_builder_internal
(T)
(struct
type 'a profunctor_term = 'a F.t
let inj = Fn.id
let prj = Fn.id
end)
end
module Of_conv_based (F : Conv_based) = struct
module T = struct
type ('c, 'a) t =
| Embed :
{ pre_map : ('b, 'a) Fn_with_id.t
; inner : 'b F.t
; post_map : ('c, 'b) Fn_with_id.t
}
-> ('c, 'a) t
let contra_map (Embed x) ~f =
Embed { x with pre_map = Fn_with_id.contra_map x.pre_map ~f }
;;
let map (Embed x) ~f = Embed { x with post_map = Fn_with_id.map x.post_map ~f }
let both (Embed l) (Embed r) =
Embed
{ pre_map = Fn_with_id.both l.pre_map r.pre_map
; inner = F.both l.inner r.inner
; post_map = Fn_with_id.split l.post_map r.post_map
}
;;
end
include T
let inj inner = Embed { pre_map = Fn_with_id.Id; inner; post_map = Fn_with_id.Id }
let prj (Embed x) =
F.conv x.inner (Fn_with_id.as_fn' x.post_map) (Fn_with_id.as_fn' x.pre_map)
;;
module Of_record =
Record_builder_internal
(T)
(struct
type 'a profunctor_term = 'a F.t
let inj = inj
let prj = prj
end)
end
| |
4877ed9e2f0f79e3b1b72e30cfd964ec838abecd51184d0f650677425f38f3f8 | michaelklishin/quartzite | daily_interval_test.clj | (ns clojurewerkz.quartzite.test.schedule.daily-interval-test
(:refer-clojure :exclude [key])
(:require [clojure.test :refer :all]
[clojurewerkz.quartzite.schedule.daily-interval :refer :all])
(:import [org.quartz DateBuilder DateBuilder$IntervalUnit SimpleTrigger]
[org.quartz.impl.triggers DailyTimeIntervalTriggerImpl]
[java.util TreeSet]))
(deftest test-daily-interval-schedule-dsl-example1
(let [i 2
n 10
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-seconds i)
(with-repeat-count n)
(on-days-of-the-week (TreeSet. [(Integer/valueOf 1) (Integer/valueOf 2) (Integer/valueOf 3) (Integer/valueOf 4)]))
(with-misfire-handling-instruction-ignore-misfires)
(finalize))]
(is (= i (.getRepeatInterval sched)))
(is (= DateBuilder$IntervalUnit/SECOND (.getRepeatIntervalUnit sched)))
(is (= n (.getRepeatCount sched)))))
(deftest test-daily-interval-schedule-dsl-example2
(let [i 5
n 10
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-seconds i)
(with-repeat-count n)
(monday-through-friday)
(starting-daily-at (time-of-day 15 00 00))
(ending-daily-at (time-of-day 15 00 00))
(ignore-misfires)
(finalize))]
(is (= i (.getRepeatInterval sched)))
(is (= DateBuilder$IntervalUnit/SECOND (.getRepeatIntervalUnit sched)))
(is (= n (.getRepeatCount sched)))))
(deftest test-daily-interval-schedule-dsl-example3
(let [i 3
n 10
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-minutes i)
(with-repeat-count n)
(saturday-and-sunday)
(with-misfire-handling-instruction-fire-and-proceed)
(finalize))]
(is (= i (.getRepeatInterval sched)))
(is (= DateBuilder$IntervalUnit/MINUTE (.getRepeatIntervalUnit sched)))
(is (= n (.getRepeatCount sched)))))
(deftest test-daily-interval-schedule-dsl-example4
(let [i 333
n 10
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-hours i)
(with-repeat-count n)
(every-day)
(finalize))]
(is (= i (.getRepeatInterval sched)))
(is (= DateBuilder$IntervalUnit/HOUR (.getRepeatIntervalUnit sched)))
(is (= n (.getRepeatCount sched)))))
(deftest test-daily-interval-schedule-dsl-example5
(let [i 4
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-hours i)
(on-saturday-and-sunday)
(finalize))]
(is (= i (.getRepeatInterval sched)))
(is (= (SimpleTrigger/REPEAT_INDEFINITELY) (.getRepeatCount sched)))))
(deftest test-daily-interval-schedule-dsl-example6
(let [i 3
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-days i)
(on-monday-through-friday)
(finalize))]
(is (= (* 24 i) (.getRepeatInterval sched)))))
| null | https://raw.githubusercontent.com/michaelklishin/quartzite/de96a2ae95ec232df3e24cf10f4e5ff33af3553b/test/clojurewerkz/quartzite/test/schedule/daily_interval_test.clj | clojure | (ns clojurewerkz.quartzite.test.schedule.daily-interval-test
(:refer-clojure :exclude [key])
(:require [clojure.test :refer :all]
[clojurewerkz.quartzite.schedule.daily-interval :refer :all])
(:import [org.quartz DateBuilder DateBuilder$IntervalUnit SimpleTrigger]
[org.quartz.impl.triggers DailyTimeIntervalTriggerImpl]
[java.util TreeSet]))
(deftest test-daily-interval-schedule-dsl-example1
(let [i 2
n 10
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-seconds i)
(with-repeat-count n)
(on-days-of-the-week (TreeSet. [(Integer/valueOf 1) (Integer/valueOf 2) (Integer/valueOf 3) (Integer/valueOf 4)]))
(with-misfire-handling-instruction-ignore-misfires)
(finalize))]
(is (= i (.getRepeatInterval sched)))
(is (= DateBuilder$IntervalUnit/SECOND (.getRepeatIntervalUnit sched)))
(is (= n (.getRepeatCount sched)))))
(deftest test-daily-interval-schedule-dsl-example2
(let [i 5
n 10
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-seconds i)
(with-repeat-count n)
(monday-through-friday)
(starting-daily-at (time-of-day 15 00 00))
(ending-daily-at (time-of-day 15 00 00))
(ignore-misfires)
(finalize))]
(is (= i (.getRepeatInterval sched)))
(is (= DateBuilder$IntervalUnit/SECOND (.getRepeatIntervalUnit sched)))
(is (= n (.getRepeatCount sched)))))
(deftest test-daily-interval-schedule-dsl-example3
(let [i 3
n 10
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-minutes i)
(with-repeat-count n)
(saturday-and-sunday)
(with-misfire-handling-instruction-fire-and-proceed)
(finalize))]
(is (= i (.getRepeatInterval sched)))
(is (= DateBuilder$IntervalUnit/MINUTE (.getRepeatIntervalUnit sched)))
(is (= n (.getRepeatCount sched)))))
(deftest test-daily-interval-schedule-dsl-example4
(let [i 333
n 10
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-hours i)
(with-repeat-count n)
(every-day)
(finalize))]
(is (= i (.getRepeatInterval sched)))
(is (= DateBuilder$IntervalUnit/HOUR (.getRepeatIntervalUnit sched)))
(is (= n (.getRepeatCount sched)))))
(deftest test-daily-interval-schedule-dsl-example5
(let [i 4
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-hours i)
(on-saturday-and-sunday)
(finalize))]
(is (= i (.getRepeatInterval sched)))
(is (= (SimpleTrigger/REPEAT_INDEFINITELY) (.getRepeatCount sched)))))
(deftest test-daily-interval-schedule-dsl-example6
(let [i 3
^DailyTimeIntervalTriggerImpl sched (schedule
(with-interval-in-days i)
(on-monday-through-friday)
(finalize))]
(is (= (* 24 i) (.getRepeatInterval sched)))))
| |
8ceb570bb151296b0d946d276a33159537dbc644d6cf35402adae4397139c411 | league/metaserv | chunked.ml |
type t =
{ fd: Unix.file_descr;
buf: string;
size: int;
mutable pos: int
}
let descr fd size =
{ fd = fd;
buf = String.create size;
so there is always room for CR / LF
pos = 0 }
let channel ch size =
flush ch;
descr (Unix.descr_of_out_channel ch) size
let write fd s n =
let k = Unix.write fd s 0 n in
if n <> k then failwith "Chunked.write: incomplete"
let write_string fd s =
write fd s (String.length s)
let write_chunk_header fd n =
write_string fd (Printf.sprintf "%x\r\n" n)
let add_crlf s k =
String.set s k '\r';
String.set s (k+1) '\n'
let flush stream =
let n = stream.pos in
if n > 0 then
(write_chunk_header stream.fd n;
add_crlf stream.buf n;
write stream.fd stream.buf (n+2);
stream.pos <- 0)
let puts stream text =
let n = String.length text in
if stream.pos + n > stream.size then
flush stream;
if n > stream.size then
(write_chunk_header stream.fd n;
write_string stream.fd text;
write_string stream.fd "\r\n")
else
(String.blit text 0 stream.buf stream.pos n;
stream.pos <- stream.pos + n)
let finish stream =
flush stream;
write_string stream.fd "0\r\n"
| null | https://raw.githubusercontent.com/league/metaserv/35e85832b3d6dfe4e15b8036653d4429a90644f5/server/chunked.ml | ocaml |
type t =
{ fd: Unix.file_descr;
buf: string;
size: int;
mutable pos: int
}
let descr fd size =
{ fd = fd;
buf = String.create size;
so there is always room for CR / LF
pos = 0 }
let channel ch size =
flush ch;
descr (Unix.descr_of_out_channel ch) size
let write fd s n =
let k = Unix.write fd s 0 n in
if n <> k then failwith "Chunked.write: incomplete"
let write_string fd s =
write fd s (String.length s)
let write_chunk_header fd n =
write_string fd (Printf.sprintf "%x\r\n" n)
let add_crlf s k =
String.set s k '\r';
String.set s (k+1) '\n'
let flush stream =
let n = stream.pos in
if n > 0 then
(write_chunk_header stream.fd n;
add_crlf stream.buf n;
write stream.fd stream.buf (n+2);
stream.pos <- 0)
let puts stream text =
let n = String.length text in
if stream.pos + n > stream.size then
flush stream;
if n > stream.size then
(write_chunk_header stream.fd n;
write_string stream.fd text;
write_string stream.fd "\r\n")
else
(String.blit text 0 stream.buf stream.pos n;
stream.pos <- stream.pos + n)
let finish stream =
flush stream;
write_string stream.fd "0\r\n"
| |
79adf612ef253305e16bc6d1f440000dcf74c0c0c22399c69f90f2302b54231d | sbcl/specializable | protocol.lisp | ;;;; package.lisp --- Package definition for the specializable-graph system.
;;;;
Copyright ( C ) 2014 , 2015 , 2016 Jan Moringen
;;;;
Author : < >
(cl:in-package #:specializable.graph)
;;; Specializer labels protocol
(defgeneric specializer-html-label (graph specializer)
(:documentation
"Return a label for the node representing SPECIALIZER in GRAPH."))
;;; Graph
(defgeneric specializer-graph-generic-function (graph)
(:documentation
"Return the generic function for which GRAPH is computed."))
(defgeneric specializer-graph-argument-position (graph)
(:documentation
"Return the index of the mandatory parameter for which GRAPH is computed."))
(defgeneric specializer-graph-argument (graph)
(:documentation
"Return the parameter value for which GRAPH is computed."))
(defgeneric specializer-graph-generalizer (graph)
(:documentation
"Return the generalizer object for which GRAPH is computed.
The generalizer object is derived from the argument for which
GRAPH is computed."))
(defgeneric specializer-graph-specializers (graph)
(:documentation
"Return the list of specializers corresponding to the nodes of GRAPH."))
(defgeneric make-specializer-graph (generic-function argument-position argument)
(:documentation
"Return a specializer graph for GENERIC-FUNCTION, ARGUMENT-POSITION and ARGUMENT.
GENERIC-FUNCTION from the methods of which specializers are
collected. Specializers appearing at ARGUMENT-POSITION in the
respective method lambda list are taken into account.
ARGUMENT is a value for the parameter at ARGUMENT-POSITION in the
generic function lambda-list. The value controls which
specializers are applicable, as well potentially, the order of the
applicable specializers."))
| null | https://raw.githubusercontent.com/sbcl/specializable/a08048ce874a2a8c58e4735d88de3bf3da0de052/src/graph/protocol.lisp | lisp | package.lisp --- Package definition for the specializable-graph system.
Specializer labels protocol
Graph | Copyright ( C ) 2014 , 2015 , 2016 Jan Moringen
Author : < >
(cl:in-package #:specializable.graph)
(defgeneric specializer-html-label (graph specializer)
(:documentation
"Return a label for the node representing SPECIALIZER in GRAPH."))
(defgeneric specializer-graph-generic-function (graph)
(:documentation
"Return the generic function for which GRAPH is computed."))
(defgeneric specializer-graph-argument-position (graph)
(:documentation
"Return the index of the mandatory parameter for which GRAPH is computed."))
(defgeneric specializer-graph-argument (graph)
(:documentation
"Return the parameter value for which GRAPH is computed."))
(defgeneric specializer-graph-generalizer (graph)
(:documentation
"Return the generalizer object for which GRAPH is computed.
The generalizer object is derived from the argument for which
GRAPH is computed."))
(defgeneric specializer-graph-specializers (graph)
(:documentation
"Return the list of specializers corresponding to the nodes of GRAPH."))
(defgeneric make-specializer-graph (generic-function argument-position argument)
(:documentation
"Return a specializer graph for GENERIC-FUNCTION, ARGUMENT-POSITION and ARGUMENT.
GENERIC-FUNCTION from the methods of which specializers are
collected. Specializers appearing at ARGUMENT-POSITION in the
respective method lambda list are taken into account.
ARGUMENT is a value for the parameter at ARGUMENT-POSITION in the
generic function lambda-list. The value controls which
specializers are applicable, as well potentially, the order of the
applicable specializers."))
|
22dc9dae6efdca4b8c34dfbefc7c4fb56257bca80d9b42c47b124c0b91a0c06b | c-cube/trustee | congruence_closure.mli |
* { 1 Congruence closure }
module K = Kernel
val prove_cc_eqn : K.ctx -> K.thm list -> K.expr -> K.expr -> K.thm option
* [ prove_cc_eqn ctx hyps t u ] tries to prove [ hyps |- t = u ] by congruence closure .
If it succeeds it returns [ Some ( \Gamma |- t = u ) ] where [ \Gamma ]
is a subset of [ hyps ] .
If it succeeds it returns [Some (\Gamma |- t=u)] where [\Gamma]
is a subset of [hyps]. *)
val prove_cc_bool : K.ctx -> K.thm list -> K.expr -> K.thm option
* [ prove_cc_bool ctx hyps res ] tries to prove the boolean [ res ]
from the hypotheses [ hyps ] , that is , [ hyps |- res ] .
If [ res ] is an equation , [ prove_cc_bool ] behaves like
[ prove_cc_eqn ] ; otherwise it needs an hypothesis to be [ p ]
and the conclusion to be [ p ' ] , where [ hyps \ { p } |- p = p ' ] .
from the hypotheses [hyps], that is, [hyps |- res].
If [res] is an equation, [prove_cc_bool] behaves like
[prove_cc_eqn]; otherwise it needs an hypothesis to be [p]
and the conclusion to be [p'], where [hyps \ {p} |- p=p'].
*)
val prove_cc_false :
K.ctx ->
prove_false:(K.ctx -> K.thm -> K.thm -> K.thm) ->
not_e:K.expr ->
K.thm list -> K.thm option
* [ prove_cc_false ctx ~prove_false ~not_e hyps ]
tries to prove [ false ] from the theorems in [ hyps ] .
@param prove_false a function such that [ prove_false ctx ( |- a ) ( |- ¬ a ) ]
returns [ |- false ]
@param not_e the constant [ ¬ ]
tries to prove [false] from the theorems in [hyps].
@param prove_false a function such that [prove_false ctx (|- a) (|- ¬ a)]
returns [|- false]
@param not_e the constant [¬]
*)
| null | https://raw.githubusercontent.com/c-cube/trustee/07f18db7f4337f07bb0c5526b02bdd86ec9cfa2c/src/core/congruence_closure.mli | ocaml |
* { 1 Congruence closure }
module K = Kernel
val prove_cc_eqn : K.ctx -> K.thm list -> K.expr -> K.expr -> K.thm option
* [ prove_cc_eqn ctx hyps t u ] tries to prove [ hyps |- t = u ] by congruence closure .
If it succeeds it returns [ Some ( \Gamma |- t = u ) ] where [ \Gamma ]
is a subset of [ hyps ] .
If it succeeds it returns [Some (\Gamma |- t=u)] where [\Gamma]
is a subset of [hyps]. *)
val prove_cc_bool : K.ctx -> K.thm list -> K.expr -> K.thm option
* [ prove_cc_bool ctx hyps res ] tries to prove the boolean [ res ]
from the hypotheses [ hyps ] , that is , [ hyps |- res ] .
If [ res ] is an equation , [ prove_cc_bool ] behaves like
[ prove_cc_eqn ] ; otherwise it needs an hypothesis to be [ p ]
and the conclusion to be [ p ' ] , where [ hyps \ { p } |- p = p ' ] .
from the hypotheses [hyps], that is, [hyps |- res].
If [res] is an equation, [prove_cc_bool] behaves like
[prove_cc_eqn]; otherwise it needs an hypothesis to be [p]
and the conclusion to be [p'], where [hyps \ {p} |- p=p'].
*)
val prove_cc_false :
K.ctx ->
prove_false:(K.ctx -> K.thm -> K.thm -> K.thm) ->
not_e:K.expr ->
K.thm list -> K.thm option
* [ prove_cc_false ctx ~prove_false ~not_e hyps ]
tries to prove [ false ] from the theorems in [ hyps ] .
@param prove_false a function such that [ prove_false ctx ( |- a ) ( |- ¬ a ) ]
returns [ |- false ]
@param not_e the constant [ ¬ ]
tries to prove [false] from the theorems in [hyps].
@param prove_false a function such that [prove_false ctx (|- a) (|- ¬ a)]
returns [|- false]
@param not_e the constant [¬]
*)
| |
7c4b071951cf89eb7aa6588fbfe7b65d3162a222a69d9cd7855b38b1fe693664 | juxt/jig | git.clj | Copyright © 2013 , JUXT LTD . All Rights Reserved .
;;
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file epl-v10.html at the root of this distribution.
;;
;; By using this software in any fashion, you are agreeing to be bound by the
;; terms of this license.
;;
;; You must not remove this notice, or any other, from this software.
(ns jig.git
(:require
jig
[clojure.tools.logging :refer :all]
[clojure.java.shell :as sh])
(:import (jig Lifecycle)))
(deftype GitPull [config]
Lifecycle
(init [_ system]
system)
(start [_ system]
system)
(stop [_ system]
;; We pull the latest system on stop.
(infof "Pulling latest git version")
(let [{:keys [exit out err]}
(if-let [dir (:directory config)]
(sh/with-sh-dir dir
(sh/sh "git" "pull"))
(sh/sh "git" "pull"))]
(cond
(pos? exit) (errorf "Git failed with the following (error %d): %s\n%s" exit out err)
(not (empty? err)) (errorf "Git error: %s" err)))
system))
| null | https://raw.githubusercontent.com/juxt/jig/3997887e5a56faadb1b48eccecbc7034b3d31e41/extensions/git/git.clj | clojure |
The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by the
terms of this license.
You must not remove this notice, or any other, from this software.
We pull the latest system on stop. | Copyright © 2013 , JUXT LTD . All Rights Reserved .
Eclipse Public License 1.0 ( -1.0.php )
(ns jig.git
(:require
jig
[clojure.tools.logging :refer :all]
[clojure.java.shell :as sh])
(:import (jig Lifecycle)))
(deftype GitPull [config]
Lifecycle
(init [_ system]
system)
(start [_ system]
system)
(stop [_ system]
(infof "Pulling latest git version")
(let [{:keys [exit out err]}
(if-let [dir (:directory config)]
(sh/with-sh-dir dir
(sh/sh "git" "pull"))
(sh/sh "git" "pull"))]
(cond
(pos? exit) (errorf "Git failed with the following (error %d): %s\n%s" exit out err)
(not (empty? err)) (errorf "Git error: %s" err)))
system))
|
1703c362bb068820b6c41aa7f0e2b2dcdd975fe7683b0a2bede1b1f4717a039b | HaskellZhangSong/Introduction_to_Haskell_2ed_source | GEq.hs | # LANGUAGE MultiParamTypeClasses , FlexibleInstances #
class GEq a b where
geq :: a -> b -> Bool
instance {-# OVERLAPPABLE #-}Real b => GEq Double b where
geq a b = toRational a == toRational b
instance {-# OVERLAPPABLE #-} Real a => GEq a Double where
geq a b = toRational a == toRational b
instance {-# OVERLAPPING #-} GEq Double Double where
geq a b = toRational a == toRational b
| null | https://raw.githubusercontent.com/HaskellZhangSong/Introduction_to_Haskell_2ed_source/140c50fdccfe608fe499ecf2d8a3732f531173f5/C16/GEq.hs | haskell | # OVERLAPPABLE #
# OVERLAPPABLE #
# OVERLAPPING # | # LANGUAGE MultiParamTypeClasses , FlexibleInstances #
class GEq a b where
geq :: a -> b -> Bool
geq a b = toRational a == toRational b
geq a b = toRational a == toRational b
geq a b = toRational a == toRational b
|
6d1cb6c87455b50a2d182b2f42a785b3355aa0994ab2382a04840da1d79570d6 | kagamilove0707/F2 | Main.hs | # Language TemplateHaskell , QuasiQuotes , FlexibleContexts #
module Main where
import Language.F2
import Language.F2.Util
import System.IO (hFlush, stdout)
import Control.Arrow (first, second, (>>>))
import Control.Monad.State
import Text.Peggy (defaultDelimiter, peggy, parseString)
[peggy|
space :: ()
= [ \r\n\t] { () } / comment { () }
delimiter :: ()
= [()\[\]<>;:,.+*/<>=:^~#$-'|&] { () }
comment :: ()
= '{-' (space / (!"-}" . { () }))* '-}' { () }
top :: (String, String)
= expr !.
expr :: (String, String)
= defExpr
/ otherExpr { ("it", $1) }
defExpr :: (String, String)
= "def" name "=" .+ { ($1, $2) }
/ "def" "(" op ")" "=" .+ { ($1, $2) }
otherExpr :: String
= .+
name ::: String
= !"fun" !"in" !"let" !"rec" !"if" !"then" !"else" [a-z_] [a-zA-Z0-9~']* { $1 : $2 }
/ [~]+ { $1 }
op ::: String
= [.+\-*/<>^~#$|&] [.+\-*/<>^~#$&|=:]* { $1 : $2 }
/ [=:] [.+\-*/<>^~#$&|=:]+ { $1 : $2 }
|]
helloStr
= " ____ _____\n" ++
" // //\n" ++
" //-- ---- \n" ++
"// //___ version " ++ version ++ "\n"
main = do
putStrLn $ helloStr
runStateT mainloop (1, preludeEnv)
mainloop :: StateT (Int, Env) IO ()
mainloop = do
(n, env) <- get
lift $ putStr $ "(" ++ show n ++ ")# "
lift $ hFlush stdout
line <- lift $ getLine
if line == ":q" then do
lift $ putStrLn "\n See you!!"
else if line == ":v" then do
lift $ putStrLn $ " version " ++ version
modify (first (+ 1))
mainloop
else do
case parseString top "<source>" line of
Left e -> lift $ putStrLn $ " parse error : " ++ showParseError e
Right (name, x) -> case exec env x of
Left e -> lift $ putStrLn $ " " ++ e
Right (t, v) -> do
case getValue v of
Left e -> lift $ putStrLn $ " " ++ e
Right v' -> do
modify (second ((name, (t, v')):))
lift $ putStrLn $ " " ++ name ++ " = " ++ x ++ " = " ++ show v' ++ " : " ++ show t
modify (first (+ 1))
mainloop
| null | https://raw.githubusercontent.com/kagamilove0707/F2/3e3a9384337c2d511db59b86114d48507eb22255/src/Main.hs | haskell | ' (space / (!" | # Language TemplateHaskell , QuasiQuotes , FlexibleContexts #
module Main where
import Language.F2
import Language.F2.Util
import System.IO (hFlush, stdout)
import Control.Arrow (first, second, (>>>))
import Control.Monad.State
import Text.Peggy (defaultDelimiter, peggy, parseString)
[peggy|
space :: ()
= [ \r\n\t] { () } / comment { () }
delimiter :: ()
= [()\[\]<>;:,.+*/<>=:^~#$-'|&] { () }
comment :: ()
top :: (String, String)
= expr !.
expr :: (String, String)
= defExpr
/ otherExpr { ("it", $1) }
defExpr :: (String, String)
= "def" name "=" .+ { ($1, $2) }
/ "def" "(" op ")" "=" .+ { ($1, $2) }
otherExpr :: String
= .+
name ::: String
= !"fun" !"in" !"let" !"rec" !"if" !"then" !"else" [a-z_] [a-zA-Z0-9~']* { $1 : $2 }
/ [~]+ { $1 }
op ::: String
= [.+\-*/<>^~#$|&] [.+\-*/<>^~#$&|=:]* { $1 : $2 }
/ [=:] [.+\-*/<>^~#$&|=:]+ { $1 : $2 }
|]
helloStr
= " ____ _____\n" ++
" // //\n" ++
" //-- ---- \n" ++
"// //___ version " ++ version ++ "\n"
main = do
putStrLn $ helloStr
runStateT mainloop (1, preludeEnv)
mainloop :: StateT (Int, Env) IO ()
mainloop = do
(n, env) <- get
lift $ putStr $ "(" ++ show n ++ ")# "
lift $ hFlush stdout
line <- lift $ getLine
if line == ":q" then do
lift $ putStrLn "\n See you!!"
else if line == ":v" then do
lift $ putStrLn $ " version " ++ version
modify (first (+ 1))
mainloop
else do
case parseString top "<source>" line of
Left e -> lift $ putStrLn $ " parse error : " ++ showParseError e
Right (name, x) -> case exec env x of
Left e -> lift $ putStrLn $ " " ++ e
Right (t, v) -> do
case getValue v of
Left e -> lift $ putStrLn $ " " ++ e
Right v' -> do
modify (second ((name, (t, v')):))
lift $ putStrLn $ " " ++ name ++ " = " ++ x ++ " = " ++ show v' ++ " : " ++ show t
modify (first (+ 1))
mainloop
|
bff726bade15ebe9b2a640773e35ad74b5377e666fe0ca2e3d4df13a7b5ab2e5 | monadbobo/ocaml-core | option.mli | (** [Option] wraps the output [x] of successful functions in [Some x]. Failed
functions return [None]. *)
* Options are preferred over exceptions . For example , use
{ [
let data = [ ( 2 , " two " ) ; ( 5 , " five " ) ; ( 8 , " eight " ) ] ; ;
let f x = match List . data with
| Some y - > y
| None - > " zero " ( * where " zero " is some default value
{[
let data = [(2, "two"); (5, "five"); (8, "eight")];;
let f x = match List.Assoc.find_opt x data with
| Some y -> y
| None -> "zero" (* where "zero" is some default value *);; ]}
rather than
{[
let f x = try List.Assoc.find x data with Not_found -> "zero";; ]}
In this case using an exception is shorter, but in nontrivial code options
are easier to understand. *)
type 'a t = 'a option with sexp
include Container.S1 with type 'a t := 'a t
(** Options form a monad, where [return x = Some x],
[(None >>= f) = None], and [(Some x >>= f) = f x]. *)
include Monad.S with type 'a t := 'a t
(** [is_none t] returns true iff t = None. *)
val is_none : 'a t -> bool
(** [is_some t] returns true iff t = Some x. *)
val is_some : 'a t -> bool
* [ value_map t ~f ~default ] is equivalent to [ value ( map t ~f ) ~default ] , except that
it is slightly faster since it avoids creating the intermediate option . I.e.
[ value_map None ] = [ default ]
[ value_map ( Some x ) ~default ~f ] = [ f x ]
it is slightly faster since it avoids creating the intermediate option. I.e.
[value_map None ~default ~f] = [default]
[value_map (Some x) ~default ~f] = [f x] *)
val value_map : 'a t -> default:'b -> f:('a -> 'b) -> 'b
(** [map2 o f] map 'a option and 'b option to a 'c option using ~f *)
val map2 : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t
(** [call x f] run optional function on argument *)
val call : 'a -> f:('a -> unit) t -> unit
(** [apply x f] run optional function on argument and return an option *)
val apply : 'a -> f:('a -> 'b) t -> 'b t
(** [value None ~default] = [default]
[value (Some x) ~default] = [x]
*)
val value : 'a t -> default:'a -> 'a
(** [value_exn (Some x)] = [x].
[value_exn None] raises an exception. *)
val value_exn : 'a t -> 'a
(** [value_exn_message message (Some x)] = [x].
[value_exn_message message None] raises exception Failure with
string [message]. *)
val value_exn_message : string -> 'a t -> 'a
val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
val some : 'a -> 'a t
val both : 'a t -> 'b t -> ('a * 'b) t
val first_some : 'a t -> 'a t -> 'a t
val some_if : bool -> 'a -> 'a t
val filter : f:('a -> bool) -> 'a t -> 'a t
(** [try_with f] returns [Some x] if [f] returns [x] and [None] if [f] raises an
exception. See [Result.try_with] if you'd like to know which exception. *)
val try_with : (unit -> 'a) -> 'a t
val compare : cmp:('a -> 'a -> int) -> 'a t -> 'a t -> int
| null | https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/core/lib/option.mli | ocaml | * [Option] wraps the output [x] of successful functions in [Some x]. Failed
functions return [None].
where "zero" is some default value
* Options form a monad, where [return x = Some x],
[(None >>= f) = None], and [(Some x >>= f) = f x].
* [is_none t] returns true iff t = None.
* [is_some t] returns true iff t = Some x.
* [map2 o f] map 'a option and 'b option to a 'c option using ~f
* [call x f] run optional function on argument
* [apply x f] run optional function on argument and return an option
* [value None ~default] = [default]
[value (Some x) ~default] = [x]
* [value_exn (Some x)] = [x].
[value_exn None] raises an exception.
* [value_exn_message message (Some x)] = [x].
[value_exn_message message None] raises exception Failure with
string [message].
* [try_with f] returns [Some x] if [f] returns [x] and [None] if [f] raises an
exception. See [Result.try_with] if you'd like to know which exception. |
* Options are preferred over exceptions . For example , use
{ [
let data = [ ( 2 , " two " ) ; ( 5 , " five " ) ; ( 8 , " eight " ) ] ; ;
let f x = match List . data with
| Some y - > y
| None - > " zero " ( * where " zero " is some default value
{[
let data = [(2, "two"); (5, "five"); (8, "eight")];;
let f x = match List.Assoc.find_opt x data with
| Some y -> y
rather than
{[
let f x = try List.Assoc.find x data with Not_found -> "zero";; ]}
In this case using an exception is shorter, but in nontrivial code options
are easier to understand. *)
type 'a t = 'a option with sexp
include Container.S1 with type 'a t := 'a t
include Monad.S with type 'a t := 'a t
val is_none : 'a t -> bool
val is_some : 'a t -> bool
* [ value_map t ~f ~default ] is equivalent to [ value ( map t ~f ) ~default ] , except that
it is slightly faster since it avoids creating the intermediate option . I.e.
[ value_map None ] = [ default ]
[ value_map ( Some x ) ~default ~f ] = [ f x ]
it is slightly faster since it avoids creating the intermediate option. I.e.
[value_map None ~default ~f] = [default]
[value_map (Some x) ~default ~f] = [f x] *)
val value_map : 'a t -> default:'b -> f:('a -> 'b) -> 'b
val map2 : 'a t -> 'b t -> f:('a -> 'b -> 'c) -> 'c t
val call : 'a -> f:('a -> unit) t -> unit
val apply : 'a -> f:('a -> 'b) t -> 'b t
val value : 'a t -> default:'a -> 'a
val value_exn : 'a t -> 'a
val value_exn_message : string -> 'a t -> 'a
val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
val some : 'a -> 'a t
val both : 'a t -> 'b t -> ('a * 'b) t
val first_some : 'a t -> 'a t -> 'a t
val some_if : bool -> 'a -> 'a t
val filter : f:('a -> bool) -> 'a t -> 'a t
val try_with : (unit -> 'a) -> 'a t
val compare : cmp:('a -> 'a -> int) -> 'a t -> 'a t -> int
|
d7c2f2913a01d299e718756cdaaabe678e7837927adf4bb34b2760b963a5389b | derui/okeyfum | okeyfum_log.ml | type level =
| DEBUG
| INFO
| ERROR
let level_to_string = function DEBUG -> "debug" | INFO -> "info" | ERROR -> "error"
let level_to_int = function DEBUG -> 0 | INFO -> 1 | ERROR -> 2
let lower_log_level = ref INFO
let set_log_level level = lower_log_level := level
let get_log_level () = !lower_log_level
let output_log ~log ~level =
let current = level_to_int !lower_log_level and level_order = level_to_int level in
if level_order >= current then Printf.printf "[%5s] %s\n%!" (level_to_string level) log else ()
let debug log = output_log ~level:DEBUG ~log
let info log = output_log ~level:INFO ~log
let error log = output_log ~level:ERROR ~log
| null | https://raw.githubusercontent.com/derui/okeyfum/bc47c0bea35c00d720dcde0200256a3f2e1312bd/src/okeyfum_log.ml | ocaml | type level =
| DEBUG
| INFO
| ERROR
let level_to_string = function DEBUG -> "debug" | INFO -> "info" | ERROR -> "error"
let level_to_int = function DEBUG -> 0 | INFO -> 1 | ERROR -> 2
let lower_log_level = ref INFO
let set_log_level level = lower_log_level := level
let get_log_level () = !lower_log_level
let output_log ~log ~level =
let current = level_to_int !lower_log_level and level_order = level_to_int level in
if level_order >= current then Printf.printf "[%5s] %s\n%!" (level_to_string level) log else ()
let debug log = output_log ~level:DEBUG ~log
let info log = output_log ~level:INFO ~log
let error log = output_log ~level:ERROR ~log
| |
0e0210083c470a8f67aa964d6bbcad78b40f53979ecbb159edaf200c0199b371 | voxoz/emqttd | emqttd.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2013 - 2017 EMQ Enterprise , Inc. ( )
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc EMQ Main Module.
-module(emqttd).
-author("Feng Lee <>").
-include("emqttd.hrl").
-include("emqttd_protocol.hrl").
-export([start/0, env/1, env/2, is_running/1, stop/0]).
PubSub API
-export([subscribe/1, subscribe/2, subscribe/3, publish/1,
unsubscribe/1, unsubscribe/2]).
%% PubSub Management API
-export([setqos/3, topics/0, subscriptions/1, subscribers/1,
is_subscribed/2, subscriber_down/1]).
%% Hooks API
-export([hook/4, hook/3, unhook/2, run_hooks/2, run_hooks/3]).
%% Debug API
-export([dump/0]).
-type(subscriber() :: pid() | binary()).
-type(suboption() :: local | {qos, non_neg_integer()} | {share, {'$queue' | binary()}}).
-type(pubsub_error() :: {error, {already_subscribed, binary()}
| {subscription_not_found, binary()}}).
-export_type([subscriber/0, suboption/0, pubsub_error/0]).
-define(APP, ?MODULE).
%%--------------------------------------------------------------------
Bootstrap , environment , configuration , is_running ...
%%--------------------------------------------------------------------
%% @doc Start emqttd application.
-spec(start() -> ok | {error, any()}).
start() -> application:start(?APP).
%% @doc Stop emqttd application.
-spec(stop() -> ok | {error, any()}).
stop() -> application:stop(?APP).
%% @doc Environment
-spec(env(Key:: atom()) -> {ok, any()} | undefined).
env(Key) -> application:get_env(?APP, Key).
%% @doc Get environment
-spec(env(Key:: atom(), Default:: any()) -> undefined | any()).
env(Key, Default) -> application:get_env(?APP, Key, Default).
%% @doc Is running?
-spec(is_running(node()) -> boolean()).
is_running(Node) ->
case rpc:call(Node, erlang, whereis, [?APP]) of
{badrpc, _} -> false;
undefined -> false;
Pid when is_pid(Pid) -> true
end.
%%--------------------------------------------------------------------
PubSub APIs
%%--------------------------------------------------------------------
%% @doc Subscribe
-spec(subscribe(iodata()) -> ok | {error, any()}).
subscribe(Topic) ->
subscribe(Topic, self()).
-spec(subscribe(iodata(), subscriber()) -> ok | {error, any()}).
subscribe(Topic, Subscriber) ->
subscribe(Topic, Subscriber, []).
-spec(subscribe(iodata(), subscriber(), [suboption()]) -> ok | pubsub_error()).
subscribe(Topic, Subscriber, Options) ->
emqttd_server:subscribe(iolist_to_binary(Topic), Subscriber, Options).
%% @doc Publish MQTT Message
-spec(publish(mqtt_message()) -> {ok, mqtt_delivery()} | ignore).
publish(Msg) ->
emqttd_server:publish(Msg).
%% @doc Unsubscribe
-spec(unsubscribe(iodata()) -> ok | pubsub_error()).
unsubscribe(Topic) ->
unsubscribe(Topic, self()).
-spec(unsubscribe(iodata(), subscriber()) -> ok | pubsub_error()).
unsubscribe(Topic, Subscriber) ->
emqttd_server:unsubscribe(iolist_to_binary(Topic), Subscriber).
-spec(setqos(binary(), subscriber(), mqtt_qos()) -> ok).
setqos(Topic, Subscriber, Qos) ->
emqttd_server:setqos(iolist_to_binary(Topic), Subscriber, Qos).
-spec(topics() -> [binary()]).
topics() -> emqttd_router:topics().
-spec(subscribers(iodata()) -> list(subscriber())).
subscribers(Topic) ->
emqttd_server:subscribers(iolist_to_binary(Topic)).
-spec(subscriptions(subscriber()) -> [{binary(), suboption()}]).
subscriptions(Subscriber) ->
emqttd_server:subscriptions(Subscriber).
-spec(is_subscribed(iodata(), subscriber()) -> boolean()).
is_subscribed(Topic, Subscriber) ->
emqttd_server:is_subscribed(iolist_to_binary(Topic), Subscriber).
-spec(subscriber_down(subscriber()) -> ok).
subscriber_down(Subscriber) ->
emqttd_server:subscriber_down(Subscriber).
%%--------------------------------------------------------------------
%% Hooks API
%%--------------------------------------------------------------------
-spec(hook(atom(), function() | {emqttd_hooks:hooktag(), function()}, list(any()))
-> ok | {error, any()}).
hook(Hook, TagFunction, InitArgs) ->
emqttd_hooks:add(Hook, TagFunction, InitArgs).
-spec(hook(atom(), function() | {emqttd_hooks:hooktag(), function()}, list(any()), integer())
-> ok | {error, any()}).
hook(Hook, TagFunction, InitArgs, Priority) ->
emqttd_hooks:add(Hook, TagFunction, InitArgs, Priority).
-spec(unhook(atom(), function() | {emqttd_hooks:hooktag(), function()})
-> ok | {error, any()}).
unhook(Hook, TagFunction) ->
emqttd_hooks:delete(Hook, TagFunction).
-spec(run_hooks(atom(), list(any())) -> ok | stop).
run_hooks(Hook, Args) ->
emqttd_hooks:run(Hook, Args).
-spec(run_hooks(atom(), list(any()), any()) -> {ok | stop, any()}).
run_hooks(Hook, Args, Acc) ->
emqttd_hooks:run(Hook, Args, Acc).
%%--------------------------------------------------------------------
%% Debug
%%--------------------------------------------------------------------
dump() -> lists:append([emqttd_server:dump(), emqttd_router:dump()]).
| null | https://raw.githubusercontent.com/voxoz/emqttd/2be612e0e7a00a866cd9af350a030966d73fbc09/src/emqttd.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
@doc EMQ Main Module.
PubSub Management API
Hooks API
Debug API
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc Start emqttd application.
@doc Stop emqttd application.
@doc Environment
@doc Get environment
@doc Is running?
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc Subscribe
@doc Publish MQTT Message
@doc Unsubscribe
--------------------------------------------------------------------
Hooks API
--------------------------------------------------------------------
--------------------------------------------------------------------
Debug
-------------------------------------------------------------------- | Copyright ( c ) 2013 - 2017 EMQ Enterprise , Inc. ( )
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqttd).
-author("Feng Lee <>").
-include("emqttd.hrl").
-include("emqttd_protocol.hrl").
-export([start/0, env/1, env/2, is_running/1, stop/0]).
PubSub API
-export([subscribe/1, subscribe/2, subscribe/3, publish/1,
unsubscribe/1, unsubscribe/2]).
-export([setqos/3, topics/0, subscriptions/1, subscribers/1,
is_subscribed/2, subscriber_down/1]).
-export([hook/4, hook/3, unhook/2, run_hooks/2, run_hooks/3]).
-export([dump/0]).
-type(subscriber() :: pid() | binary()).
-type(suboption() :: local | {qos, non_neg_integer()} | {share, {'$queue' | binary()}}).
-type(pubsub_error() :: {error, {already_subscribed, binary()}
| {subscription_not_found, binary()}}).
-export_type([subscriber/0, suboption/0, pubsub_error/0]).
-define(APP, ?MODULE).
Bootstrap , environment , configuration , is_running ...
-spec(start() -> ok | {error, any()}).
start() -> application:start(?APP).
-spec(stop() -> ok | {error, any()}).
stop() -> application:stop(?APP).
-spec(env(Key:: atom()) -> {ok, any()} | undefined).
env(Key) -> application:get_env(?APP, Key).
-spec(env(Key:: atom(), Default:: any()) -> undefined | any()).
env(Key, Default) -> application:get_env(?APP, Key, Default).
-spec(is_running(node()) -> boolean()).
is_running(Node) ->
case rpc:call(Node, erlang, whereis, [?APP]) of
{badrpc, _} -> false;
undefined -> false;
Pid when is_pid(Pid) -> true
end.
PubSub APIs
-spec(subscribe(iodata()) -> ok | {error, any()}).
subscribe(Topic) ->
subscribe(Topic, self()).
-spec(subscribe(iodata(), subscriber()) -> ok | {error, any()}).
subscribe(Topic, Subscriber) ->
subscribe(Topic, Subscriber, []).
-spec(subscribe(iodata(), subscriber(), [suboption()]) -> ok | pubsub_error()).
subscribe(Topic, Subscriber, Options) ->
emqttd_server:subscribe(iolist_to_binary(Topic), Subscriber, Options).
-spec(publish(mqtt_message()) -> {ok, mqtt_delivery()} | ignore).
publish(Msg) ->
emqttd_server:publish(Msg).
-spec(unsubscribe(iodata()) -> ok | pubsub_error()).
unsubscribe(Topic) ->
unsubscribe(Topic, self()).
-spec(unsubscribe(iodata(), subscriber()) -> ok | pubsub_error()).
unsubscribe(Topic, Subscriber) ->
emqttd_server:unsubscribe(iolist_to_binary(Topic), Subscriber).
-spec(setqos(binary(), subscriber(), mqtt_qos()) -> ok).
setqos(Topic, Subscriber, Qos) ->
emqttd_server:setqos(iolist_to_binary(Topic), Subscriber, Qos).
-spec(topics() -> [binary()]).
topics() -> emqttd_router:topics().
-spec(subscribers(iodata()) -> list(subscriber())).
subscribers(Topic) ->
emqttd_server:subscribers(iolist_to_binary(Topic)).
-spec(subscriptions(subscriber()) -> [{binary(), suboption()}]).
subscriptions(Subscriber) ->
emqttd_server:subscriptions(Subscriber).
-spec(is_subscribed(iodata(), subscriber()) -> boolean()).
is_subscribed(Topic, Subscriber) ->
emqttd_server:is_subscribed(iolist_to_binary(Topic), Subscriber).
-spec(subscriber_down(subscriber()) -> ok).
subscriber_down(Subscriber) ->
emqttd_server:subscriber_down(Subscriber).
-spec(hook(atom(), function() | {emqttd_hooks:hooktag(), function()}, list(any()))
-> ok | {error, any()}).
hook(Hook, TagFunction, InitArgs) ->
emqttd_hooks:add(Hook, TagFunction, InitArgs).
-spec(hook(atom(), function() | {emqttd_hooks:hooktag(), function()}, list(any()), integer())
-> ok | {error, any()}).
hook(Hook, TagFunction, InitArgs, Priority) ->
emqttd_hooks:add(Hook, TagFunction, InitArgs, Priority).
-spec(unhook(atom(), function() | {emqttd_hooks:hooktag(), function()})
-> ok | {error, any()}).
unhook(Hook, TagFunction) ->
emqttd_hooks:delete(Hook, TagFunction).
-spec(run_hooks(atom(), list(any())) -> ok | stop).
run_hooks(Hook, Args) ->
emqttd_hooks:run(Hook, Args).
-spec(run_hooks(atom(), list(any()), any()) -> {ok | stop, any()}).
run_hooks(Hook, Args, Acc) ->
emqttd_hooks:run(Hook, Args, Acc).
dump() -> lists:append([emqttd_server:dump(), emqttd_router:dump()]).
|
76656bd8f5d32d305faa325857b5dfe702dad83c298e173e31906f5b955543b5 | OCADml/OCADml | bezier2.ml | include Bezier.Make (V2)
let line_intersection ~(line : V2.line) ps =
let ps = Array.of_list ps in
let n = Array.length ps - 1 in
let bez_coefs = coefs' ps
and normal = V2.(v (y line.a -. y line.b) (x line.b -. x line.a)) in
let f i =
if i = n
then V2.(dot (sub bez_coefs.(0) line.a) normal)
else V2.dot bez_coefs.(n - i) normal
in
let roots = Math.real_roots @@ Array.init (n + 1) f in
let f i acc = if roots.(i) >= 0. && roots.(i) <= 1. then roots.(i) :: acc else acc in
Util.fold_init (Array.length roots) f []
let translate p t u = V2.translate p (t u)
let xtrans x t u = V2.xtrans x (t u)
let ytrans y t u = V2.ytrans y (t u)
let rotate ?about r t u = V2.rotate ?about r (t u)
let[@inline] zrot ?about r t u = rotate ?about r t u
let affine m t u = Affine2.transform m (t u)
let affine3 m t u = Affine3.transform m (V3.of_v2 (t u))
let quaternion ?about q t u = Quaternion.transform ?about q (V3.of_v2 (t u))
let axis_rotate ?about ax a = quaternion ?about (Quaternion.make ax a)
let scale s t u = V2.scale s (t u)
let mirror ax t u = V2.mirror ax (t u)
| null | https://raw.githubusercontent.com/OCADml/OCADml/f8cd87da86cd5ea4c31ec169c796640ffdc6bdee/lib/bezier2.ml | ocaml | include Bezier.Make (V2)
let line_intersection ~(line : V2.line) ps =
let ps = Array.of_list ps in
let n = Array.length ps - 1 in
let bez_coefs = coefs' ps
and normal = V2.(v (y line.a -. y line.b) (x line.b -. x line.a)) in
let f i =
if i = n
then V2.(dot (sub bez_coefs.(0) line.a) normal)
else V2.dot bez_coefs.(n - i) normal
in
let roots = Math.real_roots @@ Array.init (n + 1) f in
let f i acc = if roots.(i) >= 0. && roots.(i) <= 1. then roots.(i) :: acc else acc in
Util.fold_init (Array.length roots) f []
let translate p t u = V2.translate p (t u)
let xtrans x t u = V2.xtrans x (t u)
let ytrans y t u = V2.ytrans y (t u)
let rotate ?about r t u = V2.rotate ?about r (t u)
let[@inline] zrot ?about r t u = rotate ?about r t u
let affine m t u = Affine2.transform m (t u)
let affine3 m t u = Affine3.transform m (V3.of_v2 (t u))
let quaternion ?about q t u = Quaternion.transform ?about q (V3.of_v2 (t u))
let axis_rotate ?about ax a = quaternion ?about (Quaternion.make ax a)
let scale s t u = V2.scale s (t u)
let mirror ax t u = V2.mirror ax (t u)
| |
a9df5537861a69518646d9e4381bbaa2bb68d754c5d8ea4bda8aec8b5cee37d8 | JeanHuguesdeRaigniac/effects-landscape | Types.hs | module Types where
import qualified Data.Map as Map
type Name = String -- variable names
data Exp
= Lit Integer -- expressions
| Var Name
| Plus Exp Exp
| Abs Name Exp
| App Exp Exp
deriving (Show)
data Value
= IntVal Integer -- values
| FunVal Env Name Exp
deriving (Show)
type Env = Map.Map Name Value -- mapping from names to values
-- aliases for readability
type Steps = Integer
type Variables = [Name]
| null | https://raw.githubusercontent.com/JeanHuguesdeRaigniac/effects-landscape/e44aea11053ac4db85b862fab027d3777d35e232/app/Types.hs | haskell | variable names
expressions
values
mapping from names to values
aliases for readability | module Types where
import qualified Data.Map as Map
data Exp
| Var Name
| Plus Exp Exp
| Abs Name Exp
| App Exp Exp
deriving (Show)
data Value
| FunVal Env Name Exp
deriving (Show)
type Steps = Integer
type Variables = [Name]
|
ec85c0b65bf2ac27face07d5d7b091772e8ab7e34ef9b2e5b408c957fa67c1f8 | rbkmoney/erlang_capi_v2 | capi_client_payments.erl | -module(capi_client_payments).
-export([get_payment_by_id/3]).
-export([get_payment_by_external_id/2]).
-export([get_payments/2]).
-export([create_payment/3]).
-export([cancel_payment/4]).
-export([capture_payment/4]).
-export([get_refunds/3]).
-export([get_refund_by_id/4]).
-export([get_refund_by_external_id/2]).
-export([get_chargebacks/3]).
-export([get_chargeback_by_id/4]).
-export([create_refund/4]).
-type context() :: capi_client_lib:context().
-spec get_payments(context(), binary()) -> {ok, term()} | {error, term()}.
get_payments(Context, InvoiceID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_payments(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_payment_by_id(context(), binary(), binary()) -> {ok, term()} | {error, term()}.
get_payment_by_id(Context, InvoiceID, PaymentID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_payment_by_id(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_payment_by_external_id(context(), binary()) -> {ok, term()} | {error, term()}.
get_payment_by_external_id(Context, ExternalID) ->
Params = #{
qs_val => #{
<<"externalID">> => ExternalID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_payment_by_external_id(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec create_payment(context(), map(), binary()) -> {ok, term()} | {error, term()}.
create_payment(Context, Request, InvoiceID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID
},
body => Request
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:create_payment(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec cancel_payment(context(), binary(), binary(), binary()) -> ok | {error, term()}.
cancel_payment(Context, InvoiceID, PaymentID, Reason) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
},
body => #{<<"reason">> => genlib:to_binary(Reason)}
},
{Host, Port, PreparedParams} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:cancel_payment(Host, Port, PreparedParams),
case capi_client_lib:handle_response(Response) of
{ok, _Body} -> ok;
{error, Error} -> {error, Error}
end.
-spec capture_payment(context(), map(), binary(), binary()) -> ok | {error, term()}.
capture_payment(Context, Request, InvoiceID, PaymentID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
},
body => Request
},
{Host, Port, PreparedParams} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:capture_payment(Host, Port, PreparedParams),
case capi_client_lib:handle_response(Response) of
{ok, _Body} -> ok;
{error, Error} -> {error, Error}
end.
-spec get_refunds(context(), binary(), binary()) -> {ok, term()} | {error, term()}.
get_refunds(Context, InvoiceID, PaymentID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_refunds(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_refund_by_id(context(), binary(), binary(), binary()) -> {ok, term()} | {error, term()}.
get_refund_by_id(Context, InvoiceID, PaymentID, RefundID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID,
<<"refundID">> => RefundID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_refund_by_id(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_refund_by_external_id(context(), binary()) -> {ok, term()} | {error, term()}.
get_refund_by_external_id(Context, ExternalID) ->
Params = #{
qs_val => #{
<<"externalID">> => ExternalID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_refund_by_external_id(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec create_refund(context(), map(), binary(), binary()) -> {ok, term()} | {error, term()}.
create_refund(Context, Request, InvoiceID, PaymentID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
},
body => Request
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:create_refund(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_chargebacks(context(), binary(), binary()) -> {ok, term()} | {error, term()}.
get_chargebacks(Context, InvoiceID, PaymentID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_chargebacks(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_chargeback_by_id(context(), binary(), binary(), binary()) -> {ok, term()} | {error, term()}.
get_chargeback_by_id(Context, InvoiceID, PaymentID, ChargebackID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID,
<<"chargebackID">> => ChargebackID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_chargeback_by_id(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
| null | https://raw.githubusercontent.com/rbkmoney/erlang_capi_v2/438d0a603475c57dddade8c419f0d70fdf86438d/apps/capi_client/src/capi_client_payments.erl | erlang | -module(capi_client_payments).
-export([get_payment_by_id/3]).
-export([get_payment_by_external_id/2]).
-export([get_payments/2]).
-export([create_payment/3]).
-export([cancel_payment/4]).
-export([capture_payment/4]).
-export([get_refunds/3]).
-export([get_refund_by_id/4]).
-export([get_refund_by_external_id/2]).
-export([get_chargebacks/3]).
-export([get_chargeback_by_id/4]).
-export([create_refund/4]).
-type context() :: capi_client_lib:context().
-spec get_payments(context(), binary()) -> {ok, term()} | {error, term()}.
get_payments(Context, InvoiceID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_payments(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_payment_by_id(context(), binary(), binary()) -> {ok, term()} | {error, term()}.
get_payment_by_id(Context, InvoiceID, PaymentID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_payment_by_id(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_payment_by_external_id(context(), binary()) -> {ok, term()} | {error, term()}.
get_payment_by_external_id(Context, ExternalID) ->
Params = #{
qs_val => #{
<<"externalID">> => ExternalID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_payment_by_external_id(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec create_payment(context(), map(), binary()) -> {ok, term()} | {error, term()}.
create_payment(Context, Request, InvoiceID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID
},
body => Request
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:create_payment(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec cancel_payment(context(), binary(), binary(), binary()) -> ok | {error, term()}.
cancel_payment(Context, InvoiceID, PaymentID, Reason) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
},
body => #{<<"reason">> => genlib:to_binary(Reason)}
},
{Host, Port, PreparedParams} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:cancel_payment(Host, Port, PreparedParams),
case capi_client_lib:handle_response(Response) of
{ok, _Body} -> ok;
{error, Error} -> {error, Error}
end.
-spec capture_payment(context(), map(), binary(), binary()) -> ok | {error, term()}.
capture_payment(Context, Request, InvoiceID, PaymentID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
},
body => Request
},
{Host, Port, PreparedParams} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:capture_payment(Host, Port, PreparedParams),
case capi_client_lib:handle_response(Response) of
{ok, _Body} -> ok;
{error, Error} -> {error, Error}
end.
-spec get_refunds(context(), binary(), binary()) -> {ok, term()} | {error, term()}.
get_refunds(Context, InvoiceID, PaymentID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_refunds(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_refund_by_id(context(), binary(), binary(), binary()) -> {ok, term()} | {error, term()}.
get_refund_by_id(Context, InvoiceID, PaymentID, RefundID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID,
<<"refundID">> => RefundID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_refund_by_id(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_refund_by_external_id(context(), binary()) -> {ok, term()} | {error, term()}.
get_refund_by_external_id(Context, ExternalID) ->
Params = #{
qs_val => #{
<<"externalID">> => ExternalID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_refund_by_external_id(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec create_refund(context(), map(), binary(), binary()) -> {ok, term()} | {error, term()}.
create_refund(Context, Request, InvoiceID, PaymentID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
},
body => Request
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:create_refund(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_chargebacks(context(), binary(), binary()) -> {ok, term()} | {error, term()}.
get_chargebacks(Context, InvoiceID, PaymentID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_chargebacks(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
-spec get_chargeback_by_id(context(), binary(), binary(), binary()) -> {ok, term()} | {error, term()}.
get_chargeback_by_id(Context, InvoiceID, PaymentID, ChargebackID) ->
Params = #{
binding => #{
<<"invoiceID">> => InvoiceID,
<<"paymentID">> => PaymentID,
<<"chargebackID">> => ChargebackID
}
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_payments_api:get_chargeback_by_id(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
| |
e1226b3c4c0c3c04a45710f922701ebe4dae06affc0212c36798de4371353517 | REPROSEC/dolev-yao-star | Vale_Lib_Basic.ml | open Prims
| null | https://raw.githubusercontent.com/REPROSEC/dolev-yao-star/d97a8dd4d07f2322437f186e4db6a1f4d5ee9230/concrete/hacl-star-snapshot/ml/Vale_Lib_Basic.ml | ocaml | open Prims
| |
d129449e18ecc29ffd2074a078c92e7cedbd6684fa8460ecf66d9bd6adcd6c36 | instedd/planwise | subs.cljs | (ns planwise.client.providers-set.subs
(:require [re-frame.core :as rf]
[planwise.client.asdf :as asdf]
[planwise.client.utils :as utils]))
(rf/reg-sub
:providers-set/list
(fn [db _]
(get-in db [:providers-set :list])))
(rf/reg-sub
:providers-set/dropdown-options
(fn [db _]
(let [providers (get-in db [:providers-set :list :value])]
(->> providers
(map (fn [provider-set]
(let [{:keys [id name]} provider-set]
{:value id :label name})))
(sort-by :label)
(into [{:value nil :label "None"}])))))
(rf/reg-sub
:providers-set/view-state
(fn [db _]
(get-in db [:providers-set :view-state])))
(rf/reg-sub
:providers-set/last-error
(fn [db _]
(get-in db [:providers-set :last-error])))
(rf/reg-sub
:providers-set/new-provider-set-state
(fn [db _]
(get-in db [:providers-set :new-provider-set :state])))
(rf/reg-sub
:providers-set/new-provider-set-name
(fn [db _]
(get-in db [:providers-set :new-provider-set :name])))
(rf/reg-sub
:providers-set/new-provider-set-js-file
(fn [db _]
(get-in db [:providers-set :new-provider-set :js-file])))
(rf/reg-sub
:providers-set/delete-selected-provider-set
(fn [db _]
(get-in db [:providers-set :selected-provider])))
| null | https://raw.githubusercontent.com/instedd/planwise/1bc2a5742ae3dc377dddf1f9e9bb60f0d2f59084/client/src/planwise/client/providers_set/subs.cljs | clojure | (ns planwise.client.providers-set.subs
(:require [re-frame.core :as rf]
[planwise.client.asdf :as asdf]
[planwise.client.utils :as utils]))
(rf/reg-sub
:providers-set/list
(fn [db _]
(get-in db [:providers-set :list])))
(rf/reg-sub
:providers-set/dropdown-options
(fn [db _]
(let [providers (get-in db [:providers-set :list :value])]
(->> providers
(map (fn [provider-set]
(let [{:keys [id name]} provider-set]
{:value id :label name})))
(sort-by :label)
(into [{:value nil :label "None"}])))))
(rf/reg-sub
:providers-set/view-state
(fn [db _]
(get-in db [:providers-set :view-state])))
(rf/reg-sub
:providers-set/last-error
(fn [db _]
(get-in db [:providers-set :last-error])))
(rf/reg-sub
:providers-set/new-provider-set-state
(fn [db _]
(get-in db [:providers-set :new-provider-set :state])))
(rf/reg-sub
:providers-set/new-provider-set-name
(fn [db _]
(get-in db [:providers-set :new-provider-set :name])))
(rf/reg-sub
:providers-set/new-provider-set-js-file
(fn [db _]
(get-in db [:providers-set :new-provider-set :js-file])))
(rf/reg-sub
:providers-set/delete-selected-provider-set
(fn [db _]
(get-in db [:providers-set :selected-provider])))
| |
f037fabb1d650ddc34edcf529209527ff6f98b65a920e780e490cc990fd4e3ae | Eventuria/demonstration-gsd | Definitions.hs | module Eventuria.Libraries.CQRS.Write.CommandConsumption.Definitions where
import Control.Exception
import Streamly (SerialT)
import Eventuria.Commons.Logger.Core
import Eventuria.Libraries.PersistedStreamEngine.Interface.PersistedItem
import Eventuria.Libraries.PersistedStreamEngine.Interface.Read.Reading
import Eventuria.Libraries.PersistedStreamEngine.Interface.Write.Writing
import Eventuria.Libraries.CQRS.Write.Aggregate.Commands.Command
import Eventuria.Libraries.CQRS.Write.Aggregate.Ids.AggregateId
import Eventuria.Libraries.CQRS.Write.StreamRepository
import Eventuria.Libraries.CQRS.Write.CommandConsumption.CommandHandlingResult
type ProjectWriteModel writeModel = Maybe writeModel -> CommandHandlingResult -> Maybe writeModel
type HandleCommand writeModel = Maybe writeModel -> (Persisted Command) -> IO (CommandHandlingResult)
type OrchestratreCommandConsumptionForAggregate writeModel = Persisted AggregateId -> SerialT IO (Either SomeException (Maybe writeModel))
type GetAggregateCommandConsumptionOrchestration persistedStream writeModel =
Logger ->
GetCommandStream persistedStream ->
GetCommandTransactionStream persistedStream ->
Reading persistedStream ->
Writing persistedStream ->
ProjectWriteModel writeModel ->
HandleCommand writeModel ->
OrchestratreCommandConsumptionForAggregate writeModel
| null | https://raw.githubusercontent.com/Eventuria/demonstration-gsd/5c7692b310086bc172d3fd4e1eaf09ae51ea468f/src/Eventuria/Libraries/CQRS/Write/CommandConsumption/Definitions.hs | haskell | module Eventuria.Libraries.CQRS.Write.CommandConsumption.Definitions where
import Control.Exception
import Streamly (SerialT)
import Eventuria.Commons.Logger.Core
import Eventuria.Libraries.PersistedStreamEngine.Interface.PersistedItem
import Eventuria.Libraries.PersistedStreamEngine.Interface.Read.Reading
import Eventuria.Libraries.PersistedStreamEngine.Interface.Write.Writing
import Eventuria.Libraries.CQRS.Write.Aggregate.Commands.Command
import Eventuria.Libraries.CQRS.Write.Aggregate.Ids.AggregateId
import Eventuria.Libraries.CQRS.Write.StreamRepository
import Eventuria.Libraries.CQRS.Write.CommandConsumption.CommandHandlingResult
type ProjectWriteModel writeModel = Maybe writeModel -> CommandHandlingResult -> Maybe writeModel
type HandleCommand writeModel = Maybe writeModel -> (Persisted Command) -> IO (CommandHandlingResult)
type OrchestratreCommandConsumptionForAggregate writeModel = Persisted AggregateId -> SerialT IO (Either SomeException (Maybe writeModel))
type GetAggregateCommandConsumptionOrchestration persistedStream writeModel =
Logger ->
GetCommandStream persistedStream ->
GetCommandTransactionStream persistedStream ->
Reading persistedStream ->
Writing persistedStream ->
ProjectWriteModel writeModel ->
HandleCommand writeModel ->
OrchestratreCommandConsumptionForAggregate writeModel
| |
722131b7b5c8c1308d4431cdb10d045edfa4a869779144ca6bb16e85d78d19c0 | nspin/hs-arm | Types.hs | module Harm.Types
( module Harm.Types.W
, module Harm.Types.I
, viewI
, viewW
, module Harm.Types.Operands
) where
import Harm.Types.W
import Harm.Types.I
import Harm.Types.Operands
import GHC.TypeLits
-- TODO(nspin)
viewI :: KnownNat n => W n -> I n
viewI = fromIntegral
viewW :: KnownNat n => I n -> W n
viewW = fromIntegral
| null | https://raw.githubusercontent.com/nspin/hs-arm/8f10870a4afbbba010e78bd98e452ba67adc34e0/harm/harm-types/src/Harm/Types.hs | haskell | TODO(nspin) | module Harm.Types
( module Harm.Types.W
, module Harm.Types.I
, viewI
, viewW
, module Harm.Types.Operands
) where
import Harm.Types.W
import Harm.Types.I
import Harm.Types.Operands
import GHC.TypeLits
viewI :: KnownNat n => W n -> I n
viewI = fromIntegral
viewW :: KnownNat n => I n -> W n
viewW = fromIntegral
|
0bc8dbec4851e1ac8ab6e0bef31cead48bcd464bbdf623d58eff0abeaeeab525 | borodust/alien-works | packages.lisp | (alien-works.utils:define-umbrella-package :alien-works
#:alien-works.memory
#:alien-works.math
#:alien-works.host
#:alien-works.audio
#:alien-works.graphics
#:alien-works.framework)
| null | https://raw.githubusercontent.com/borodust/alien-works/2c5e2368edb453480446640d0df828ad2be8e80e/src/packages.lisp | lisp | (alien-works.utils:define-umbrella-package :alien-works
#:alien-works.memory
#:alien-works.math
#:alien-works.host
#:alien-works.audio
#:alien-works.graphics
#:alien-works.framework)
| |
ee60d29b1a7657e2d0538bb8c8b62250e96dc9714dddd3bd7c0ad8c6eab6e09b | google/mlir-hs | PatternUtil.hs | Copyright 2021 Google LLC
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- -2.0
--
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module MLIR.AST.PatternUtil
( pattern I32ArrayAttr
, pattern I64ArrayAttr
, pattern AffineMapArrayAttr
, DummyIx
) where
import Data.Traversable
import Data.Array
import MLIR.AST
import qualified MLIR.AST.Dialect.Affine as Affine
unwrapI32ArrayAttr :: Attribute -> Maybe [Int]
unwrapI32ArrayAttr (ArrayAttr vals) = for vals \case
IntegerAttr (IntegerType Signed 32) v -> Just v
_ -> Nothing
unwrapI32ArrayAttr _ = Nothing
pattern I32ArrayAttr :: [Int] -> Attribute
pattern I32ArrayAttr vals <- (unwrapI32ArrayAttr -> Just vals)
where I32ArrayAttr vals = ArrayAttr $ fmap (IntegerAttr (IntegerType Signed 32)) vals
unwrapI64ArrayAttr :: Attribute -> Maybe [Int]
unwrapI64ArrayAttr (ArrayAttr vals) = for vals \case
IntegerAttr (IntegerType Signed 64) v -> Just v
_ -> Nothing
unwrapI64ArrayAttr _ = Nothing
pattern I64ArrayAttr :: [Int] -> Attribute
pattern I64ArrayAttr vals <- (unwrapI64ArrayAttr -> Just vals)
where I64ArrayAttr vals = ArrayAttr $ fmap (IntegerAttr (IntegerType Signed 64)) vals
unwrapAffineMapArrayAttr :: Attribute -> Maybe [Affine.Map]
unwrapAffineMapArrayAttr (ArrayAttr vals) = for vals \case
AffineMapAttr m -> Just m
_ -> Nothing
unwrapAffineMapArrayAttr _ = Nothing
pattern AffineMapArrayAttr :: [Affine.Map] -> Attribute
pattern AffineMapArrayAttr vals <- (unwrapAffineMapArrayAttr -> Just vals)
where AffineMapArrayAttr vals = ArrayAttr $ fmap AffineMapAttr vals
data DummyIx
deriving instance Eq DummyIx
deriving instance Ord DummyIx
deriving instance Show DummyIx
instance Ix DummyIx where
range _ = error "Invalid index"
index _ _ = error "Invalid index"
inRange _ _ = error "Invalid index"
| null | https://raw.githubusercontent.com/google/mlir-hs/7fa3a050b17ee94508a07fba8b32a312a967b8bd/src/MLIR/AST/PatternUtil.hs | haskell |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright 2021 Google LLC
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
module MLIR.AST.PatternUtil
( pattern I32ArrayAttr
, pattern I64ArrayAttr
, pattern AffineMapArrayAttr
, DummyIx
) where
import Data.Traversable
import Data.Array
import MLIR.AST
import qualified MLIR.AST.Dialect.Affine as Affine
unwrapI32ArrayAttr :: Attribute -> Maybe [Int]
unwrapI32ArrayAttr (ArrayAttr vals) = for vals \case
IntegerAttr (IntegerType Signed 32) v -> Just v
_ -> Nothing
unwrapI32ArrayAttr _ = Nothing
pattern I32ArrayAttr :: [Int] -> Attribute
pattern I32ArrayAttr vals <- (unwrapI32ArrayAttr -> Just vals)
where I32ArrayAttr vals = ArrayAttr $ fmap (IntegerAttr (IntegerType Signed 32)) vals
unwrapI64ArrayAttr :: Attribute -> Maybe [Int]
unwrapI64ArrayAttr (ArrayAttr vals) = for vals \case
IntegerAttr (IntegerType Signed 64) v -> Just v
_ -> Nothing
unwrapI64ArrayAttr _ = Nothing
pattern I64ArrayAttr :: [Int] -> Attribute
pattern I64ArrayAttr vals <- (unwrapI64ArrayAttr -> Just vals)
where I64ArrayAttr vals = ArrayAttr $ fmap (IntegerAttr (IntegerType Signed 64)) vals
unwrapAffineMapArrayAttr :: Attribute -> Maybe [Affine.Map]
unwrapAffineMapArrayAttr (ArrayAttr vals) = for vals \case
AffineMapAttr m -> Just m
_ -> Nothing
unwrapAffineMapArrayAttr _ = Nothing
pattern AffineMapArrayAttr :: [Affine.Map] -> Attribute
pattern AffineMapArrayAttr vals <- (unwrapAffineMapArrayAttr -> Just vals)
where AffineMapArrayAttr vals = ArrayAttr $ fmap AffineMapAttr vals
data DummyIx
deriving instance Eq DummyIx
deriving instance Ord DummyIx
deriving instance Show DummyIx
instance Ix DummyIx where
range _ = error "Invalid index"
index _ _ = error "Invalid index"
inRange _ _ = error "Invalid index"
|
800bef0c98713ca1aed8194236199664d91d971f0677631ecfdbc84026ea095c | chr15m/c64core | 0-download-latest.cljs | (ns dl
(:require
["fs" :as fs]
["mkdirp$default" :as mkdirp]
["node-fetch$default" :as fetch]
[common :refer [log env bail]]
[nbb.core :refer [*file*]]))
(log *file* "Hello.")
(def board (or (env "PINTEREST_BOARD") (bail "PINTEREST_BOARD is not set.")))
(def fresh-pins-url (str "/" board "/pins/"))
(mkdirp "data")
(log *file* "Starting download.")
(-> (fetch fresh-pins-url)
(.then #(.json %))
(.then (fn [json]
(fs/writeFileSync "data/latest.json" (js/JSON.stringify json nil 2))
(log *file* "Done."))))
| null | https://raw.githubusercontent.com/chr15m/c64core/1a00c0de71172275d178d6d2465c45b16e9fd612/0-download-latest.cljs | clojure | (ns dl
(:require
["fs" :as fs]
["mkdirp$default" :as mkdirp]
["node-fetch$default" :as fetch]
[common :refer [log env bail]]
[nbb.core :refer [*file*]]))
(log *file* "Hello.")
(def board (or (env "PINTEREST_BOARD") (bail "PINTEREST_BOARD is not set.")))
(def fresh-pins-url (str "/" board "/pins/"))
(mkdirp "data")
(log *file* "Starting download.")
(-> (fetch fresh-pins-url)
(.then #(.json %))
(.then (fn [json]
(fs/writeFileSync "data/latest.json" (js/JSON.stringify json nil 2))
(log *file* "Done."))))
| |
9b9d240fb686259465360afee27ef6cffbb9d6b7c253e0bfea4ae2bff3c77761 | mikelevins/folio2 | functions-functions.lisp | ;;;; ***********************************************************************
;;;;
;;;; Name: functions.lisp
;;;; Project: folio2 - Functional idioms for Common Lisp
;;;; Purpose: functional tools
;;;; Author: mikel evins
Copyright : 2015 by mikel evins
;;;;
;;;; ***********************************************************************
(in-package :net.bardcode.folio2.functions)
;;; function flip
;;;
;;; (flip f1) => f2
;;; ---------------------------------------------------------------------
;;; given an argument f1 of the form (lambda (a b)...), returns a
;;; function of the form (lambda (b a) ...). Except for the order of the
;;; arguments a and b, the new function is identical to the old.
(defun flip (f) (lambda (x y) (funcall f y x)))
;;; function function?
;;;
;;; (function? thing) => Boolean
;;; ---------------------------------------------------------------------
;;; returns a true value if THING is a function (that is, it's a function,
;;; but not s generic function or method)
(defmethod function? (thing)
(declare (ignore thing))
nil)
(defmethod function? ((thing function))
(declare (ignore thing))
t)
(defmethod function? ((thing generic-function))
(declare (ignore thing))
nil)
(defmethod function? ((thing method))
(declare (ignore thing))
nil)
;;; function functional?
;;;
;;; (functional? thing) => Boolean
;;; ---------------------------------------------------------------------
;;; returns a true value if THING is a function, a generic function,
;;; or a method
(defmethod functional? (thing)
(declare (ignore thing))
nil)
(defmethod functional? ((thing function))
(declare (ignore thing))
t)
(defmethod functional? ((thing generic-function))
(declare (ignore thing))
t)
(defmethod functional? ((thing method))
(declare (ignore thing))
t)
;;; function generic-function?
;;;
;;; (generic-function? thing) => Boolean
;;; ---------------------------------------------------------------------
;;; returns a true value if THING is a generic function
(defmethod generic-function? (thing)
(declare (ignore thing))
nil)
(defmethod generic-function? ((thing generic-function))
(declare (ignore thing))
t)
;;; function method?
;;;
;;; (method? thing) => Boolean
;;; ---------------------------------------------------------------------
;;; returns a true value if THING is a method
(defmethod method? (thing)
(declare (ignore thing))
nil)
(defmethod method? ((thing function))
(declare (ignore thing))
nil)
(defmethod method? ((thing method))
(declare (ignore thing))
t)
;;; function partial
;;;
;;; (partial f1 arg1..argk) => f2
;;; ---------------------------------------------------------------------
;;; partially applies the function f1 to the arguments arg1..argk,
returning a left section of f1 . In other words , if accepts
;;; arguments a, b, c, and d, then (partial f1 0 1) returns an f2 in
which a is bound to 0 and b is bound to 1 . f2 then requires
two arguments . Evaluating ( f2 2 3 ) binds c and d to 2 and 3 ,
;;; respectively, then computes the same result as if we had
;;; originally called (f1 0 1 2 3).
(defun partial (function &rest arguments)
(apply #'alexandria:curry function arguments))
;;; function rpartial
;;;
;;; (rpartial f1 arg1..argk) => f2
;;; ---------------------------------------------------------------------
;;; partially applies the function f1 to the arguments arg1..argk,
returning a right section of f1 . In other words , if accepts
;;; arguments a, b, c, and d, then (rpartial f1 2 3) returns an f2 in
which c is bound to 2 and d is bound to 3 . f2 then requires
two arguments . Evaluating ( f2 0 1 ) binds a and b to 0 and 1 ,
;;; respectively, then computes the same result as if we had
;;; originally called (f1 0 1 2 3).
(defun rpartial (function &rest arguments)
(apply #'alexandria:rcurry function arguments))
| null | https://raw.githubusercontent.com/mikelevins/folio2/a96052f78f0e0358376a498c6351342ece6a9b7b/src/functions-functions.lisp | lisp | ***********************************************************************
Name: functions.lisp
Project: folio2 - Functional idioms for Common Lisp
Purpose: functional tools
Author: mikel evins
***********************************************************************
function flip
(flip f1) => f2
---------------------------------------------------------------------
given an argument f1 of the form (lambda (a b)...), returns a
function of the form (lambda (b a) ...). Except for the order of the
arguments a and b, the new function is identical to the old.
function function?
(function? thing) => Boolean
---------------------------------------------------------------------
returns a true value if THING is a function (that is, it's a function,
but not s generic function or method)
function functional?
(functional? thing) => Boolean
---------------------------------------------------------------------
returns a true value if THING is a function, a generic function,
or a method
function generic-function?
(generic-function? thing) => Boolean
---------------------------------------------------------------------
returns a true value if THING is a generic function
function method?
(method? thing) => Boolean
---------------------------------------------------------------------
returns a true value if THING is a method
function partial
(partial f1 arg1..argk) => f2
---------------------------------------------------------------------
partially applies the function f1 to the arguments arg1..argk,
arguments a, b, c, and d, then (partial f1 0 1) returns an f2 in
respectively, then computes the same result as if we had
originally called (f1 0 1 2 3).
function rpartial
(rpartial f1 arg1..argk) => f2
---------------------------------------------------------------------
partially applies the function f1 to the arguments arg1..argk,
arguments a, b, c, and d, then (rpartial f1 2 3) returns an f2 in
respectively, then computes the same result as if we had
originally called (f1 0 1 2 3). | Copyright : 2015 by mikel evins
(in-package :net.bardcode.folio2.functions)
(defun flip (f) (lambda (x y) (funcall f y x)))
(defmethod function? (thing)
(declare (ignore thing))
nil)
(defmethod function? ((thing function))
(declare (ignore thing))
t)
(defmethod function? ((thing generic-function))
(declare (ignore thing))
nil)
(defmethod function? ((thing method))
(declare (ignore thing))
nil)
(defmethod functional? (thing)
(declare (ignore thing))
nil)
(defmethod functional? ((thing function))
(declare (ignore thing))
t)
(defmethod functional? ((thing generic-function))
(declare (ignore thing))
t)
(defmethod functional? ((thing method))
(declare (ignore thing))
t)
(defmethod generic-function? (thing)
(declare (ignore thing))
nil)
(defmethod generic-function? ((thing generic-function))
(declare (ignore thing))
t)
(defmethod method? (thing)
(declare (ignore thing))
nil)
(defmethod method? ((thing function))
(declare (ignore thing))
nil)
(defmethod method? ((thing method))
(declare (ignore thing))
t)
returning a left section of f1 . In other words , if accepts
which a is bound to 0 and b is bound to 1 . f2 then requires
two arguments . Evaluating ( f2 2 3 ) binds c and d to 2 and 3 ,
(defun partial (function &rest arguments)
(apply #'alexandria:curry function arguments))
returning a right section of f1 . In other words , if accepts
which c is bound to 2 and d is bound to 3 . f2 then requires
two arguments . Evaluating ( f2 0 1 ) binds a and b to 0 and 1 ,
(defun rpartial (function &rest arguments)
(apply #'alexandria:rcurry function arguments))
|
cdddb6c4e5546367e58e8450348545031eaea871b592c10413268242e84edf30 | bitnomial/prometheus | Push.hs | {-# LANGUAGE OverloadedStrings #-}
module System.Metrics.Prometheus.Http.Push (
pushMetrics,
parseURI,
) where
import Control.Concurrent (threadDelay)
import Control.Monad (forever)
import Data.ByteString.Builder (toLazyByteString)
import Data.Map (foldMapWithKey)
import Data.Text (Text, unpack)
import Network.HTTP.Client (
Request (..),
RequestBody (..),
getUri,
httpNoBody,
parseRequest,
requestBody,
requestFromURI,
requestHeaders,
)
import Network.HTTP.Client.TLS (newTlsManager)
import Network.HTTP.Types (hContentType, methodPut)
import Network.URI (
URI (..),
URIAuth,
nullURI,
)
import System.Metrics.Prometheus.Encode.Text (encodeMetrics)
import System.Metrics.Prometheus.MetricId (Labels (..))
import System.Metrics.Prometheus.Registry (RegistrySample)
-- | Parses a uri such that
-- @
-- parseURI ""
-- ===
-- Just (URI "https:" "//example.com"
-- @
parseURI :: String -> Maybe URI
parseURI = fmap getUri . parseRequest
pushMetrics ::
| PushGateway URI name , including port number ( ex : @parseUri :8080@ )
URI ->
-- | Job name
Text ->
-- | Label set to use as a grouping key for metrics
Labels ->
| push frequency
Int ->
-- | Action to get latest metrics
IO RegistrySample ->
IO ()
pushMetrics gatewayURI jobName labels frequencyMicros getSample = do
manager <- newTlsManager
gn <- maybe (error "Invalid URI Authority") pure gatewayName
requestUri <- requestFromURI $ buildUri scheme gn jobName labels
forever $ getSample >>= flip httpNoBody manager . request requestUri >> threadDelay frequencyMicros
where
URI scheme gatewayName _ _ _ = gatewayURI
request req sample =
req
{ method = methodPut
, requestBody = RequestBodyLBS . toLazyByteString $ encodeMetrics sample
, requestHeaders = [(hContentType, "text/plain; version=0.0.4")]
}
buildUri :: String -> URIAuth -> Text -> Labels -> URI
buildUri scheme gatewayName jobName (Labels ls) =
nullURI
{ uriScheme = scheme
, uriAuthority = Just gatewayName
, uriPath = "/metrics/job/" ++ unpack jobName ++ foldMapWithKey labelPath ls
}
where
labelPath k v = "/" ++ unpack k ++ "/" ++ unpack v
| null | https://raw.githubusercontent.com/bitnomial/prometheus/b7d3160e2a4d971fe03bef6b43fe8bf15a9c9349/src/System/Metrics/Prometheus/Http/Push.hs | haskell | # LANGUAGE OverloadedStrings #
| Parses a uri such that
@
parseURI ""
===
Just (URI "https:" "//example.com"
@
| Job name
| Label set to use as a grouping key for metrics
| Action to get latest metrics |
module System.Metrics.Prometheus.Http.Push (
pushMetrics,
parseURI,
) where
import Control.Concurrent (threadDelay)
import Control.Monad (forever)
import Data.ByteString.Builder (toLazyByteString)
import Data.Map (foldMapWithKey)
import Data.Text (Text, unpack)
import Network.HTTP.Client (
Request (..),
RequestBody (..),
getUri,
httpNoBody,
parseRequest,
requestBody,
requestFromURI,
requestHeaders,
)
import Network.HTTP.Client.TLS (newTlsManager)
import Network.HTTP.Types (hContentType, methodPut)
import Network.URI (
URI (..),
URIAuth,
nullURI,
)
import System.Metrics.Prometheus.Encode.Text (encodeMetrics)
import System.Metrics.Prometheus.MetricId (Labels (..))
import System.Metrics.Prometheus.Registry (RegistrySample)
parseURI :: String -> Maybe URI
parseURI = fmap getUri . parseRequest
pushMetrics ::
| PushGateway URI name , including port number ( ex : @parseUri :8080@ )
URI ->
Text ->
Labels ->
| push frequency
Int ->
IO RegistrySample ->
IO ()
pushMetrics gatewayURI jobName labels frequencyMicros getSample = do
manager <- newTlsManager
gn <- maybe (error "Invalid URI Authority") pure gatewayName
requestUri <- requestFromURI $ buildUri scheme gn jobName labels
forever $ getSample >>= flip httpNoBody manager . request requestUri >> threadDelay frequencyMicros
where
URI scheme gatewayName _ _ _ = gatewayURI
request req sample =
req
{ method = methodPut
, requestBody = RequestBodyLBS . toLazyByteString $ encodeMetrics sample
, requestHeaders = [(hContentType, "text/plain; version=0.0.4")]
}
buildUri :: String -> URIAuth -> Text -> Labels -> URI
buildUri scheme gatewayName jobName (Labels ls) =
nullURI
{ uriScheme = scheme
, uriAuthority = Just gatewayName
, uriPath = "/metrics/job/" ++ unpack jobName ++ foldMapWithKey labelPath ls
}
where
labelPath k v = "/" ++ unpack k ++ "/" ++ unpack v
|
35e74cb67182ae26201bcd79628042b825e566fae507dc2119613a1fceaa74f7 | lambdamikel/DLMAPS | galen-1a.lisp | (define-primitive-role R1)
(define-primitive-role R2)
(define-primitive-role R3)
(define-primitive-role R4)
(define-primitive-role R5)
(define-primitive-role R6)
(define-primitive-role R7)
(define-primitive-role R8)
(define-primitive-role R9)
(define-primitive-role R10)
(define-primitive-role R11)
(define-primitive-role R12)
(define-primitive-role R13)
(define-primitive-role R14)
(define-primitive-role R15)
(define-primitive-role R16)
(define-primitive-role R17)
(define-primitive-role R18)
(define-primitive-role R19)
(define-primitive-role R20)
(define-primitive-role R21)
(define-primitive-role R22)
(define-primitive-role R23)
(define-primitive-role R24)
(define-primitive-role R25)
(define-primitive-role R26)
(define-primitive-role R27)
(define-primitive-role R28)
(define-primitive-role R29)
(define-primitive-role R30)
(define-primitive-role R31)
(define-primitive-role R32)
(define-primitive-role R33)
(define-primitive-role R34)
(define-primitive-role R35)
(define-primitive-role R36)
(define-primitive-role R37)
(define-primitive-role R38)
(define-primitive-role R39)
(define-primitive-role R40)
(define-primitive-role R41)
(define-primitive-role R42)
(define-primitive-role R43)
(define-primitive-role R44)
(define-primitive-role R45)
(define-primitive-role R46)
(define-primitive-role R47)
(define-primitive-role R48)
(define-primitive-role R49)
(define-primitive-role R50)
(define-primitive-role R51)
(define-primitive-role R52)
(define-primitive-role R53)
(define-primitive-role R54)
(define-primitive-role R55)
(define-primitive-role R56)
(define-primitive-role R57)
(define-primitive-role R58)
(define-primitive-role R59)
(define-primitive-role R60)
(define-primitive-role R61)
(define-primitive-role R62)
(define-primitive-role R63)
(define-primitive-role R64)
(define-primitive-role R65)
(define-primitive-role R66)
(define-primitive-role R67)
(define-primitive-role R68)
(define-primitive-role R69)
(define-primitive-role R70)
(define-primitive-role R71)
(define-primitive-role R72)
(define-primitive-role R73)
(define-primitive-role R74)
(define-primitive-role R75)
(define-primitive-role R76)
(define-primitive-role R77)
(define-primitive-role R78)
(define-primitive-role R79)
(define-primitive-role R80)
(define-primitive-role R81)
(define-primitive-role R82)
(define-primitive-role R83)
(define-primitive-role R84)
(define-primitive-role R85)
(define-primitive-role R86)
(define-primitive-role R87)
(define-primitive-role R88)
(define-primitive-role R89)
(define-primitive-role R90)
(define-primitive-role R91)
(define-primitive-role R92)
(define-primitive-role R93)
(define-primitive-role R94)
(define-primitive-role R95)
(define-primitive-role R96)
(define-primitive-role R97)
(define-primitive-role R98)
(define-primitive-role R99)
(define-primitive-role R100)
(define-primitive-role R101)
(define-primitive-role R102)
(define-primitive-role R103)
(define-primitive-role R104)
(define-primitive-role R105)
(define-primitive-role R106)
(define-primitive-role R107)
(define-primitive-role R108)
(define-primitive-role R109)
(define-primitive-role R110)
(define-primitive-role R111)
(define-primitive-role R112)
(define-primitive-role R113)
(define-primitive-role R114)
(define-primitive-role R115)
(define-primitive-role R116)
(define-primitive-role R117)
(define-primitive-role R118)
(define-primitive-role R119)
(define-primitive-role R120)
(define-primitive-role R121)
(define-primitive-role R122)
(define-primitive-role R123)
(define-primitive-role R124)
(define-primitive-role R125)
(define-primitive-role R126)
(define-primitive-role R127)
(define-primitive-role R128)
(define-primitive-role R129)
(define-primitive-role R130)
(define-primitive-role R131)
(define-primitive-role R132)
(define-primitive-role R133)
(define-primitive-role R134)
(define-primitive-role R135)
(define-primitive-role R136)
(define-primitive-role R137)
(define-primitive-role R138)
(define-primitive-role R139)
(define-primitive-role R140)
(define-primitive-role R141)
(define-primitive-role R142)
(define-primitive-role R143)
(define-primitive-role R144)
(define-primitive-role R145)
(define-primitive-role R146)
(define-primitive-role R147)
(define-primitive-role R148)
(define-primitive-role R149)
(define-primitive-role R150)
(define-primitive-role R151)
(define-primitive-role R152)
(define-primitive-role R153)
(define-primitive-role R154)
(define-primitive-role R155)
(define-primitive-role R156)
(define-primitive-role R157)
(define-primitive-role R158)
(define-primitive-role R159)
(define-primitive-role R160)
(define-primitive-role R161)
(define-primitive-role R162)
(define-primitive-role R163)
(define-primitive-role R164)
(define-primitive-role R165)
(define-primitive-role R166)
(define-primitive-role R167)
(define-primitive-role R168)
(define-primitive-role R169)
(define-primitive-role R170)
(define-primitive-role R171)
(define-primitive-role R172)
(define-primitive-role R173)
(define-primitive-role R174)
(define-primitive-role R175)
(define-primitive-role R176)
(define-primitive-role R177)
(define-primitive-role R178)
(define-primitive-role R179)
(define-primitive-role R180)
(define-primitive-role R181)
(define-primitive-role R182)
(define-primitive-role R183)
(define-primitive-role R184)
(define-primitive-role R185)
(define-primitive-role R186)
(define-primitive-role R187)
(define-primitive-role R188)
(define-primitive-role R189)
(define-primitive-role R190)
(define-primitive-role R191)
(define-primitive-role R192)
(define-primitive-role R193)
(define-primitive-role R194)
(define-primitive-role R195)
(define-primitive-role R196)
(define-primitive-role R197)
(define-primitive-role R198)
(define-primitive-role R199)
(define-primitive-role R200)
(define-primitive-role R201)
(define-primitive-role R202)
(define-primitive-role R203)
(define-primitive-role R204)
(define-primitive-role R205)
(define-primitive-role R206)
(define-primitive-role R207)
(define-primitive-role R208)
(define-primitive-role R209)
(define-primitive-role R210)
(define-primitive-role R211)
(define-primitive-role R212)
(define-primitive-role R213)
(define-primitive-role R214)
(define-primitive-role R215)
(define-primitive-role R216)
(define-primitive-role R217)
(define-primitive-role R218)
(define-primitive-role R219)
(define-primitive-role R220)
(define-primitive-role R221)
(define-primitive-role R222)
(define-primitive-role R223)
(define-primitive-role R224)
(define-primitive-role R225)
(define-primitive-role R226)
(define-primitive-role R227)
(define-primitive-role R228)
(define-primitive-role R229)
(define-primitive-role R230)
(define-primitive-role R231)
(define-primitive-role R232)
(define-primitive-role R233)
(define-primitive-role R234)
(define-primitive-role R235)
(define-primitive-role R236)
(define-primitive-role R237)
(define-primitive-role R238)
(define-primitive-role R239)
(define-primitive-role R240)
(define-primitive-role R241)
(define-primitive-role R242)
(define-primitive-role R243)
(define-primitive-role R244)
(define-primitive-role R245)
(define-primitive-role R246)
(define-primitive-role R247)
(define-primitive-role R248)
(define-primitive-role R249)
(define-primitive-role R250)
(define-primitive-role R251)
(define-primitive-role R252)
(define-primitive-role R253)
(define-primitive-role R254)
(define-primitive-role R255)
(define-primitive-role R256)
(define-primitive-role R257)
(define-primitive-role R258)
(define-primitive-role R259)
(define-primitive-role R260)
(define-primitive-role R261)
(define-primitive-role R262)
(define-primitive-role R263)
(define-primitive-role R264)
(define-primitive-role R265)
(define-primitive-role R266)
(define-primitive-role R267)
(define-primitive-role R268)
(define-primitive-role R269)
(define-primitive-role R270)
(define-primitive-role R271)
(define-primitive-role R272)
(define-primitive-role R273)
(define-primitive-role R274)
(define-primitive-role R275)
(define-primitive-role R276)
(define-primitive-role R277)
(define-primitive-role R278)
(define-primitive-role R279)
(define-primitive-role R280)
(define-primitive-role R281)
(define-primitive-role R282)
(define-primitive-role R283)
(define-primitive-role R284)
(define-primitive-role R285)
(define-primitive-role R286)
(define-primitive-role R287)
(define-primitive-role R288)
(define-primitive-role R289)
(define-primitive-role R290)
(define-primitive-role R291)
(define-primitive-role R292)
(define-primitive-role R293)
(define-primitive-role R294)
(define-primitive-role R295)
(define-primitive-role R296)
(define-primitive-role R297)
(define-primitive-role R298)
(define-primitive-role R299)
(define-primitive-role R300)
(define-primitive-role R301)
(define-primitive-role R302)
(define-primitive-role R303)
(define-primitive-role R304)
(define-primitive-role R305)
(define-primitive-role R306)
(define-primitive-role R307)
(define-primitive-role R308)
(define-primitive-role R309)
(define-primitive-role R310)
(define-primitive-role R311)
(define-primitive-role R312)
(define-primitive-role R313)
(define-primitive-role R314)
(define-primitive-role R315)
(define-primitive-role R316)
(define-primitive-role R317)
(define-primitive-role R318)
(define-primitive-role R319)
(define-primitive-role R320)
(define-primitive-role R321)
(define-primitive-role R322)
(define-primitive-role R323)
(define-primitive-role R324)
(define-primitive-role R325)
(define-primitive-role R326)
(define-primitive-role R327)
(define-primitive-role R328)
(define-primitive-role R329)
(define-primitive-role R330)
(define-primitive-role R331)
(define-primitive-role R332)
(define-primitive-role R333)
(define-primitive-role R334)
(define-primitive-role R335)
(define-primitive-role R336)
(define-primitive-role R337)
(define-primitive-role R338)
(define-primitive-role R339)
(define-primitive-role R340)
(define-primitive-role R341)
(define-primitive-role R342)
(define-primitive-role R343)
(define-primitive-role R344)
(define-primitive-role R345)
(define-primitive-role R346)
(define-primitive-role R347)
(define-primitive-role R348)
(define-primitive-role R349)
(define-primitive-role R350)
(define-primitive-role R351)
(define-primitive-role R352)
(define-primitive-role R353)
(define-primitive-role R354)
(define-primitive-role R355)
(define-primitive-role R356)
(define-primitive-role R357)
(define-primitive-role R358)
(define-primitive-role R359)
(define-primitive-role R360)
(define-primitive-role R361)
(define-primitive-role R362)
(define-primitive-role R363)
(define-primitive-role R364)
(define-primitive-role R365)
(define-primitive-role R366)
(define-primitive-role R367)
(define-primitive-role R368)
(define-primitive-role R369)
(define-primitive-role R370)
(define-primitive-role R371)
(define-primitive-role R372)
(define-primitive-role R373)
(define-primitive-role R374)
(define-primitive-role R375)
(define-primitive-role R376)
(define-primitive-role R377)
(define-primitive-role R378)
(define-primitive-role R379)
(define-primitive-role R380)
(define-primitive-role R381)
(define-primitive-role R382)
(define-primitive-role R383)
(define-primitive-role R384)
(define-primitive-role R385)
(define-primitive-role R386)
(define-primitive-role R387)
(define-primitive-role R388)
(define-primitive-role R389)
(define-primitive-role R390)
(define-primitive-role R391)
(define-primitive-role R392)
(define-primitive-role R393)
(define-primitive-role R394)
(define-primitive-role R395)
(define-primitive-role R396)
(define-primitive-role R397)
(define-primitive-role R398)
(define-primitive-role R399)
(define-primitive-role R400)
(define-primitive-role R401)
(define-primitive-role R402)
(define-primitive-role R403)
(define-primitive-role R404)
(define-primitive-role R405)
(define-primitive-role R406)
(define-primitive-role R407)
(define-primitive-role R408)
(define-primitive-role R409)
(define-primitive-role R410)
(define-primitive-role R411)
(define-primitive-role R412)
(define-primitive-role R413)
(define-primitive-concept C1)
(define-primitive-concept C2)
(define-primitive-concept C3)
(define-primitive-concept C4 C1)
(define-primitive-concept C5 C4)
(define-primitive-concept C6 C5)
(define-primitive-concept C7 C5)
(define-primitive-concept C8 C7)
(define-primitive-concept C9 C5)
(define-primitive-concept C10 C9)
(define-primitive-concept C11 C10)
(define-primitive-concept C12 C11)
(define-primitive-concept C13 C12)
(define-primitive-concept C14 C12)
(define-primitive-concept C15 C9)
(define-primitive-concept C16 C15)
(define-primitive-concept C17 C16)
(define-primitive-concept C18 C17)
(define-primitive-concept C19 C18)
(define-primitive-concept C20 (AND C4 (OR (ALL R390 (NOT C13)) (SOME R398 (AND C14 (SOME R202 C19))))))
(define-primitive-concept C21 C9)
(define-primitive-concept C22 C21)
(define-primitive-concept C23 C22)
(define-primitive-concept C24 C23)
(define-primitive-concept C25 C24)
(define-primitive-concept C26 C23)
(define-primitive-concept C27 C26)
(define-primitive-concept C28 C24)
(define-primitive-concept C29 C11)
(define-primitive-concept C30 C16)
(define-primitive-concept C31 C30)
(define-primitive-concept C32 (AND C20 (OR (ALL R180 (NOT C25)) (SOME R178 C25))
(OR (ALL R184 (NOT C27)) (SOME R182 C27))
(OR (ALL R180 (NOT C28))
(ALL R366 (OR (NOT C29) (ALL R202 (NOT C31)))) (SOME R178 C25))))
(define-primitive-concept C33 (AND C4 (OR (ALL R404 (NOT C8)) (SOME R406 C8))
(OR (ALL R129 (NOT C32)) (ALL R180 (NOT C25)) (SOME R178 C25))
(OR (ALL R129 (NOT C32)) (ALL R184 (NOT C27)) (SOME R182 C27))))
(define-primitive-concept C34 C33)
(define-primitive-concept C35 C34)
(define-primitive-concept C36 C10)
(define-primitive-concept C37 C36)
(define-primitive-concept C38 C37)
(define-primitive-concept C39 C33)
(define-primitive-concept C40 C36)
(define-primitive-concept C41 C40)
(define-primitive-concept C42 C41)
(define-primitive-concept C43 C15)
(define-primitive-concept C44 C43)
(define-primitive-concept C45 C44)
(define-primitive-concept C46 C45)
(define-primitive-concept C47 C46)
(define-primitive-concept C48 C21)
(define-primitive-concept C49 C48)
(define-primitive-concept C50 C49)
(define-primitive-concept C51 (AND C39
(OR (ALL R268 (OR (NOT C42) (ALL R202 (NOT C47)))) (SOME R280 C50))))
(define-primitive-concept C52 (AND C51 (SOME R280 C50)))
(define-primitive-concept C53 (AND C52 (SOME R268 (AND C42 (SOME R202 C47)))))
(define-primitive-concept C54 C53)
(define-primitive-concept C55 C54)
(define-primitive-concept C56 (AND C4 (OR (ALL R404 (NOT C8)) (SOME R406 C8))
(OR (ALL R129 (NOT C32)) (ALL R180 (NOT C25)) (SOME R178 C25))
(OR (ALL R129 (NOT C32)) (ALL R184 (NOT C27)) (SOME R182 C27))))
(define-primitive-concept C57 C49)
(define-primitive-concept C58 C57)
(define-primitive-concept C59 (AND C56 (SOME R280 C58)))
(define-primitive-concept C60 (AND C59
(OR (ALL R129 (OR (NOT C32) (ALL R184 (NOT C27)))) (SOME R184 C27))
(OR (ALL R129 (OR (NOT C32) (ALL R180 (NOT C25)))) (SOME R180 C25))
(OR (ALL R180 (NOT C25)) (SOME R178 C25))
(OR (ALL R184 (NOT C27)) (SOME R182 C27))))
(define-primitive-concept C61 C60)
(define-primitive-concept C62 C61)
(define-primitive-concept C63 C10)
(define-primitive-concept C64 C63)
(define-primitive-concept C65 C15)
(define-primitive-concept C66 C65)
(define-primitive-concept C67 C66)
(define-concept C68 (AND C62 (SOME R348 (AND C64 (SOME R202 C67)))))
| null | https://raw.githubusercontent.com/lambdamikel/DLMAPS/7f8dbb9432069d41e6a7d9c13dc5b25602ad35dc/src/prover/KBs/galen-1a.lisp | lisp | (define-primitive-role R1)
(define-primitive-role R2)
(define-primitive-role R3)
(define-primitive-role R4)
(define-primitive-role R5)
(define-primitive-role R6)
(define-primitive-role R7)
(define-primitive-role R8)
(define-primitive-role R9)
(define-primitive-role R10)
(define-primitive-role R11)
(define-primitive-role R12)
(define-primitive-role R13)
(define-primitive-role R14)
(define-primitive-role R15)
(define-primitive-role R16)
(define-primitive-role R17)
(define-primitive-role R18)
(define-primitive-role R19)
(define-primitive-role R20)
(define-primitive-role R21)
(define-primitive-role R22)
(define-primitive-role R23)
(define-primitive-role R24)
(define-primitive-role R25)
(define-primitive-role R26)
(define-primitive-role R27)
(define-primitive-role R28)
(define-primitive-role R29)
(define-primitive-role R30)
(define-primitive-role R31)
(define-primitive-role R32)
(define-primitive-role R33)
(define-primitive-role R34)
(define-primitive-role R35)
(define-primitive-role R36)
(define-primitive-role R37)
(define-primitive-role R38)
(define-primitive-role R39)
(define-primitive-role R40)
(define-primitive-role R41)
(define-primitive-role R42)
(define-primitive-role R43)
(define-primitive-role R44)
(define-primitive-role R45)
(define-primitive-role R46)
(define-primitive-role R47)
(define-primitive-role R48)
(define-primitive-role R49)
(define-primitive-role R50)
(define-primitive-role R51)
(define-primitive-role R52)
(define-primitive-role R53)
(define-primitive-role R54)
(define-primitive-role R55)
(define-primitive-role R56)
(define-primitive-role R57)
(define-primitive-role R58)
(define-primitive-role R59)
(define-primitive-role R60)
(define-primitive-role R61)
(define-primitive-role R62)
(define-primitive-role R63)
(define-primitive-role R64)
(define-primitive-role R65)
(define-primitive-role R66)
(define-primitive-role R67)
(define-primitive-role R68)
(define-primitive-role R69)
(define-primitive-role R70)
(define-primitive-role R71)
(define-primitive-role R72)
(define-primitive-role R73)
(define-primitive-role R74)
(define-primitive-role R75)
(define-primitive-role R76)
(define-primitive-role R77)
(define-primitive-role R78)
(define-primitive-role R79)
(define-primitive-role R80)
(define-primitive-role R81)
(define-primitive-role R82)
(define-primitive-role R83)
(define-primitive-role R84)
(define-primitive-role R85)
(define-primitive-role R86)
(define-primitive-role R87)
(define-primitive-role R88)
(define-primitive-role R89)
(define-primitive-role R90)
(define-primitive-role R91)
(define-primitive-role R92)
(define-primitive-role R93)
(define-primitive-role R94)
(define-primitive-role R95)
(define-primitive-role R96)
(define-primitive-role R97)
(define-primitive-role R98)
(define-primitive-role R99)
(define-primitive-role R100)
(define-primitive-role R101)
(define-primitive-role R102)
(define-primitive-role R103)
(define-primitive-role R104)
(define-primitive-role R105)
(define-primitive-role R106)
(define-primitive-role R107)
(define-primitive-role R108)
(define-primitive-role R109)
(define-primitive-role R110)
(define-primitive-role R111)
(define-primitive-role R112)
(define-primitive-role R113)
(define-primitive-role R114)
(define-primitive-role R115)
(define-primitive-role R116)
(define-primitive-role R117)
(define-primitive-role R118)
(define-primitive-role R119)
(define-primitive-role R120)
(define-primitive-role R121)
(define-primitive-role R122)
(define-primitive-role R123)
(define-primitive-role R124)
(define-primitive-role R125)
(define-primitive-role R126)
(define-primitive-role R127)
(define-primitive-role R128)
(define-primitive-role R129)
(define-primitive-role R130)
(define-primitive-role R131)
(define-primitive-role R132)
(define-primitive-role R133)
(define-primitive-role R134)
(define-primitive-role R135)
(define-primitive-role R136)
(define-primitive-role R137)
(define-primitive-role R138)
(define-primitive-role R139)
(define-primitive-role R140)
(define-primitive-role R141)
(define-primitive-role R142)
(define-primitive-role R143)
(define-primitive-role R144)
(define-primitive-role R145)
(define-primitive-role R146)
(define-primitive-role R147)
(define-primitive-role R148)
(define-primitive-role R149)
(define-primitive-role R150)
(define-primitive-role R151)
(define-primitive-role R152)
(define-primitive-role R153)
(define-primitive-role R154)
(define-primitive-role R155)
(define-primitive-role R156)
(define-primitive-role R157)
(define-primitive-role R158)
(define-primitive-role R159)
(define-primitive-role R160)
(define-primitive-role R161)
(define-primitive-role R162)
(define-primitive-role R163)
(define-primitive-role R164)
(define-primitive-role R165)
(define-primitive-role R166)
(define-primitive-role R167)
(define-primitive-role R168)
(define-primitive-role R169)
(define-primitive-role R170)
(define-primitive-role R171)
(define-primitive-role R172)
(define-primitive-role R173)
(define-primitive-role R174)
(define-primitive-role R175)
(define-primitive-role R176)
(define-primitive-role R177)
(define-primitive-role R178)
(define-primitive-role R179)
(define-primitive-role R180)
(define-primitive-role R181)
(define-primitive-role R182)
(define-primitive-role R183)
(define-primitive-role R184)
(define-primitive-role R185)
(define-primitive-role R186)
(define-primitive-role R187)
(define-primitive-role R188)
(define-primitive-role R189)
(define-primitive-role R190)
(define-primitive-role R191)
(define-primitive-role R192)
(define-primitive-role R193)
(define-primitive-role R194)
(define-primitive-role R195)
(define-primitive-role R196)
(define-primitive-role R197)
(define-primitive-role R198)
(define-primitive-role R199)
(define-primitive-role R200)
(define-primitive-role R201)
(define-primitive-role R202)
(define-primitive-role R203)
(define-primitive-role R204)
(define-primitive-role R205)
(define-primitive-role R206)
(define-primitive-role R207)
(define-primitive-role R208)
(define-primitive-role R209)
(define-primitive-role R210)
(define-primitive-role R211)
(define-primitive-role R212)
(define-primitive-role R213)
(define-primitive-role R214)
(define-primitive-role R215)
(define-primitive-role R216)
(define-primitive-role R217)
(define-primitive-role R218)
(define-primitive-role R219)
(define-primitive-role R220)
(define-primitive-role R221)
(define-primitive-role R222)
(define-primitive-role R223)
(define-primitive-role R224)
(define-primitive-role R225)
(define-primitive-role R226)
(define-primitive-role R227)
(define-primitive-role R228)
(define-primitive-role R229)
(define-primitive-role R230)
(define-primitive-role R231)
(define-primitive-role R232)
(define-primitive-role R233)
(define-primitive-role R234)
(define-primitive-role R235)
(define-primitive-role R236)
(define-primitive-role R237)
(define-primitive-role R238)
(define-primitive-role R239)
(define-primitive-role R240)
(define-primitive-role R241)
(define-primitive-role R242)
(define-primitive-role R243)
(define-primitive-role R244)
(define-primitive-role R245)
(define-primitive-role R246)
(define-primitive-role R247)
(define-primitive-role R248)
(define-primitive-role R249)
(define-primitive-role R250)
(define-primitive-role R251)
(define-primitive-role R252)
(define-primitive-role R253)
(define-primitive-role R254)
(define-primitive-role R255)
(define-primitive-role R256)
(define-primitive-role R257)
(define-primitive-role R258)
(define-primitive-role R259)
(define-primitive-role R260)
(define-primitive-role R261)
(define-primitive-role R262)
(define-primitive-role R263)
(define-primitive-role R264)
(define-primitive-role R265)
(define-primitive-role R266)
(define-primitive-role R267)
(define-primitive-role R268)
(define-primitive-role R269)
(define-primitive-role R270)
(define-primitive-role R271)
(define-primitive-role R272)
(define-primitive-role R273)
(define-primitive-role R274)
(define-primitive-role R275)
(define-primitive-role R276)
(define-primitive-role R277)
(define-primitive-role R278)
(define-primitive-role R279)
(define-primitive-role R280)
(define-primitive-role R281)
(define-primitive-role R282)
(define-primitive-role R283)
(define-primitive-role R284)
(define-primitive-role R285)
(define-primitive-role R286)
(define-primitive-role R287)
(define-primitive-role R288)
(define-primitive-role R289)
(define-primitive-role R290)
(define-primitive-role R291)
(define-primitive-role R292)
(define-primitive-role R293)
(define-primitive-role R294)
(define-primitive-role R295)
(define-primitive-role R296)
(define-primitive-role R297)
(define-primitive-role R298)
(define-primitive-role R299)
(define-primitive-role R300)
(define-primitive-role R301)
(define-primitive-role R302)
(define-primitive-role R303)
(define-primitive-role R304)
(define-primitive-role R305)
(define-primitive-role R306)
(define-primitive-role R307)
(define-primitive-role R308)
(define-primitive-role R309)
(define-primitive-role R310)
(define-primitive-role R311)
(define-primitive-role R312)
(define-primitive-role R313)
(define-primitive-role R314)
(define-primitive-role R315)
(define-primitive-role R316)
(define-primitive-role R317)
(define-primitive-role R318)
(define-primitive-role R319)
(define-primitive-role R320)
(define-primitive-role R321)
(define-primitive-role R322)
(define-primitive-role R323)
(define-primitive-role R324)
(define-primitive-role R325)
(define-primitive-role R326)
(define-primitive-role R327)
(define-primitive-role R328)
(define-primitive-role R329)
(define-primitive-role R330)
(define-primitive-role R331)
(define-primitive-role R332)
(define-primitive-role R333)
(define-primitive-role R334)
(define-primitive-role R335)
(define-primitive-role R336)
(define-primitive-role R337)
(define-primitive-role R338)
(define-primitive-role R339)
(define-primitive-role R340)
(define-primitive-role R341)
(define-primitive-role R342)
(define-primitive-role R343)
(define-primitive-role R344)
(define-primitive-role R345)
(define-primitive-role R346)
(define-primitive-role R347)
(define-primitive-role R348)
(define-primitive-role R349)
(define-primitive-role R350)
(define-primitive-role R351)
(define-primitive-role R352)
(define-primitive-role R353)
(define-primitive-role R354)
(define-primitive-role R355)
(define-primitive-role R356)
(define-primitive-role R357)
(define-primitive-role R358)
(define-primitive-role R359)
(define-primitive-role R360)
(define-primitive-role R361)
(define-primitive-role R362)
(define-primitive-role R363)
(define-primitive-role R364)
(define-primitive-role R365)
(define-primitive-role R366)
(define-primitive-role R367)
(define-primitive-role R368)
(define-primitive-role R369)
(define-primitive-role R370)
(define-primitive-role R371)
(define-primitive-role R372)
(define-primitive-role R373)
(define-primitive-role R374)
(define-primitive-role R375)
(define-primitive-role R376)
(define-primitive-role R377)
(define-primitive-role R378)
(define-primitive-role R379)
(define-primitive-role R380)
(define-primitive-role R381)
(define-primitive-role R382)
(define-primitive-role R383)
(define-primitive-role R384)
(define-primitive-role R385)
(define-primitive-role R386)
(define-primitive-role R387)
(define-primitive-role R388)
(define-primitive-role R389)
(define-primitive-role R390)
(define-primitive-role R391)
(define-primitive-role R392)
(define-primitive-role R393)
(define-primitive-role R394)
(define-primitive-role R395)
(define-primitive-role R396)
(define-primitive-role R397)
(define-primitive-role R398)
(define-primitive-role R399)
(define-primitive-role R400)
(define-primitive-role R401)
(define-primitive-role R402)
(define-primitive-role R403)
(define-primitive-role R404)
(define-primitive-role R405)
(define-primitive-role R406)
(define-primitive-role R407)
(define-primitive-role R408)
(define-primitive-role R409)
(define-primitive-role R410)
(define-primitive-role R411)
(define-primitive-role R412)
(define-primitive-role R413)
(define-primitive-concept C1)
(define-primitive-concept C2)
(define-primitive-concept C3)
(define-primitive-concept C4 C1)
(define-primitive-concept C5 C4)
(define-primitive-concept C6 C5)
(define-primitive-concept C7 C5)
(define-primitive-concept C8 C7)
(define-primitive-concept C9 C5)
(define-primitive-concept C10 C9)
(define-primitive-concept C11 C10)
(define-primitive-concept C12 C11)
(define-primitive-concept C13 C12)
(define-primitive-concept C14 C12)
(define-primitive-concept C15 C9)
(define-primitive-concept C16 C15)
(define-primitive-concept C17 C16)
(define-primitive-concept C18 C17)
(define-primitive-concept C19 C18)
(define-primitive-concept C20 (AND C4 (OR (ALL R390 (NOT C13)) (SOME R398 (AND C14 (SOME R202 C19))))))
(define-primitive-concept C21 C9)
(define-primitive-concept C22 C21)
(define-primitive-concept C23 C22)
(define-primitive-concept C24 C23)
(define-primitive-concept C25 C24)
(define-primitive-concept C26 C23)
(define-primitive-concept C27 C26)
(define-primitive-concept C28 C24)
(define-primitive-concept C29 C11)
(define-primitive-concept C30 C16)
(define-primitive-concept C31 C30)
(define-primitive-concept C32 (AND C20 (OR (ALL R180 (NOT C25)) (SOME R178 C25))
(OR (ALL R184 (NOT C27)) (SOME R182 C27))
(OR (ALL R180 (NOT C28))
(ALL R366 (OR (NOT C29) (ALL R202 (NOT C31)))) (SOME R178 C25))))
(define-primitive-concept C33 (AND C4 (OR (ALL R404 (NOT C8)) (SOME R406 C8))
(OR (ALL R129 (NOT C32)) (ALL R180 (NOT C25)) (SOME R178 C25))
(OR (ALL R129 (NOT C32)) (ALL R184 (NOT C27)) (SOME R182 C27))))
(define-primitive-concept C34 C33)
(define-primitive-concept C35 C34)
(define-primitive-concept C36 C10)
(define-primitive-concept C37 C36)
(define-primitive-concept C38 C37)
(define-primitive-concept C39 C33)
(define-primitive-concept C40 C36)
(define-primitive-concept C41 C40)
(define-primitive-concept C42 C41)
(define-primitive-concept C43 C15)
(define-primitive-concept C44 C43)
(define-primitive-concept C45 C44)
(define-primitive-concept C46 C45)
(define-primitive-concept C47 C46)
(define-primitive-concept C48 C21)
(define-primitive-concept C49 C48)
(define-primitive-concept C50 C49)
(define-primitive-concept C51 (AND C39
(OR (ALL R268 (OR (NOT C42) (ALL R202 (NOT C47)))) (SOME R280 C50))))
(define-primitive-concept C52 (AND C51 (SOME R280 C50)))
(define-primitive-concept C53 (AND C52 (SOME R268 (AND C42 (SOME R202 C47)))))
(define-primitive-concept C54 C53)
(define-primitive-concept C55 C54)
(define-primitive-concept C56 (AND C4 (OR (ALL R404 (NOT C8)) (SOME R406 C8))
(OR (ALL R129 (NOT C32)) (ALL R180 (NOT C25)) (SOME R178 C25))
(OR (ALL R129 (NOT C32)) (ALL R184 (NOT C27)) (SOME R182 C27))))
(define-primitive-concept C57 C49)
(define-primitive-concept C58 C57)
(define-primitive-concept C59 (AND C56 (SOME R280 C58)))
(define-primitive-concept C60 (AND C59
(OR (ALL R129 (OR (NOT C32) (ALL R184 (NOT C27)))) (SOME R184 C27))
(OR (ALL R129 (OR (NOT C32) (ALL R180 (NOT C25)))) (SOME R180 C25))
(OR (ALL R180 (NOT C25)) (SOME R178 C25))
(OR (ALL R184 (NOT C27)) (SOME R182 C27))))
(define-primitive-concept C61 C60)
(define-primitive-concept C62 C61)
(define-primitive-concept C63 C10)
(define-primitive-concept C64 C63)
(define-primitive-concept C65 C15)
(define-primitive-concept C66 C65)
(define-primitive-concept C67 C66)
(define-concept C68 (AND C62 (SOME R348 (AND C64 (SOME R202 C67)))))
| |
27ff47b206123bc1202eafbddd21562da0bcad2b2804e88be2cfdfaaee1275ec | lisp-mirror/clpm | install.lisp | ;;;; clpm install
;;;;
This software is part of CLPM . See README.org for more information . See
;;;; LICENSE for license information.
(uiop:define-package #:clpm-cli/commands/install
(:use #:cl
#:clpm-cli/common-args
#:clpm-cli/interface-defs)
(:import-from #:adopt)
(:import-from #:clpm))
(in-package #:clpm-cli/commands/install)
(adopt:define-string *help-text*
"Install systems or projects into a context.")
(adopt:define-string *manual-text*
"Install systems or projects into a context. This command first computes a
list of releases that need to be installed to satisfy the user's constraints and
then prompts the user to approve before taking action. When computing the
releases, preference is given to reusing already installed releases instead of
upgrading them.
Constraints can be placed on the version to install, the ref to install (from a
VCS), or the source to install from. These constraints can be specified
once (using -v, --source, or -r) in which case they are used as the defaults, or
they can be provided as part of the dependency specifier (which takes precedence
over the flags).
Version constraints consist of a symbol (one of =, !=, >=, >, <=, <) followed by
a version string. If no symbol is provided, = is assumed. Multiple version
constraints can be conjoined by separating them with a comma.
A project or system specifier consists of the following pieces, in this order:
+ The project or system name (required).
+ A colon (:) followed by a version constraint (optional, mutually exclusive
with ref specifier).
+ An at sign (@) followed by a VCS ref specifier (optional, mutually exclusive
with version constraint).
+ Two colons (::) followed by a source name (optional).")
(defparameter *option-install-version*
(adopt:make-option
:install-version
:short #\v
:help "A version constraint applied to all objects (unless overridden)"
:parameter "VERSION"
:reduce #'adopt:last))
(defparameter *option-install-source*
(adopt:make-option
:install-source
:long "source"
:parameter "SOURCE-NAME"
:help "The name of the source to install from"
:reduce #'adopt:last))
(defparameter *option-install-asd*
(adopt:make-option
:install-asds
:long "asd"
:parameter "ASD-PATH"
:help "Install an .asd file into the context"
:reduce (adopt:flip #'cons)))
(defparameter *option-install-project*
(adopt:make-option
:install-projects
:short #\p
:long "project"
:parameter "PROJECT-SPECIFIER"
:help "Install a project instead of a system"
:reduce (adopt:flip #'cons)))
(defparameter *option-install-no-deps*
(adopt:make-option
:install-no-deps
:short #\n
:long "no-deps"
:help "Do not install dependencies"
:reduce (constantly t)))
(defparameter *option-install-ref*
(adopt:make-option
:install-ref
:short #\r
:long "ref"
:parameter "REF"
:help "Install the project from source control, at the given ref"
:reduce #'adopt:last))
(defparameter *install-ui*
(adopt:make-interface
:name "clpm install"
:summary "Common Lisp Project Manager Install"
:usage "install [options] SYSTEM-SPECIFIER*"
:help *help-text*
:manual *manual-text*
:contents (list *group-common*
*option-install-version*
*option-install-source*
*option-install-project*
*option-install-asd*
*option-install-no-deps*
*option-yes*
*option-local*
*option-install-ref*
*option-context*)))
(define-cli-command (("install") *install-ui*) (args options)
(let* ((version-string (gethash :install-version options))
(system-specifiers args)
(project-specifiers (gethash :install-projects options))
(asd-files (gethash :install-asds options))
(source-name (gethash :install-source options))
(no-deps-p (gethash :install-no-deps options))
(yes-p (gethash :yes options))
(ref (gethash :install-ref options)))
(clpm:install :projects project-specifiers
:systems system-specifiers
:asds (mapcar #'merge-pathnames asd-files)
:version version-string
:source source-name
:no-deps-p no-deps-p
:ref ref
:validate (make-diff-validate-fun :yesp yes-p)
:save-context-p t)
t))
| null | https://raw.githubusercontent.com/lisp-mirror/clpm/ad9a704fcdd0df5ce30ead106706ab6cc5fb3e5b/cli/commands/install.lisp | lisp | clpm install
LICENSE for license information. | This software is part of CLPM . See README.org for more information . See
(uiop:define-package #:clpm-cli/commands/install
(:use #:cl
#:clpm-cli/common-args
#:clpm-cli/interface-defs)
(:import-from #:adopt)
(:import-from #:clpm))
(in-package #:clpm-cli/commands/install)
(adopt:define-string *help-text*
"Install systems or projects into a context.")
(adopt:define-string *manual-text*
"Install systems or projects into a context. This command first computes a
list of releases that need to be installed to satisfy the user's constraints and
then prompts the user to approve before taking action. When computing the
releases, preference is given to reusing already installed releases instead of
upgrading them.
Constraints can be placed on the version to install, the ref to install (from a
VCS), or the source to install from. These constraints can be specified
once (using -v, --source, or -r) in which case they are used as the defaults, or
they can be provided as part of the dependency specifier (which takes precedence
over the flags).
Version constraints consist of a symbol (one of =, !=, >=, >, <=, <) followed by
a version string. If no symbol is provided, = is assumed. Multiple version
constraints can be conjoined by separating them with a comma.
A project or system specifier consists of the following pieces, in this order:
+ The project or system name (required).
+ A colon (:) followed by a version constraint (optional, mutually exclusive
with ref specifier).
+ An at sign (@) followed by a VCS ref specifier (optional, mutually exclusive
with version constraint).
+ Two colons (::) followed by a source name (optional).")
(defparameter *option-install-version*
(adopt:make-option
:install-version
:short #\v
:help "A version constraint applied to all objects (unless overridden)"
:parameter "VERSION"
:reduce #'adopt:last))
(defparameter *option-install-source*
(adopt:make-option
:install-source
:long "source"
:parameter "SOURCE-NAME"
:help "The name of the source to install from"
:reduce #'adopt:last))
(defparameter *option-install-asd*
(adopt:make-option
:install-asds
:long "asd"
:parameter "ASD-PATH"
:help "Install an .asd file into the context"
:reduce (adopt:flip #'cons)))
(defparameter *option-install-project*
(adopt:make-option
:install-projects
:short #\p
:long "project"
:parameter "PROJECT-SPECIFIER"
:help "Install a project instead of a system"
:reduce (adopt:flip #'cons)))
(defparameter *option-install-no-deps*
(adopt:make-option
:install-no-deps
:short #\n
:long "no-deps"
:help "Do not install dependencies"
:reduce (constantly t)))
(defparameter *option-install-ref*
(adopt:make-option
:install-ref
:short #\r
:long "ref"
:parameter "REF"
:help "Install the project from source control, at the given ref"
:reduce #'adopt:last))
(defparameter *install-ui*
(adopt:make-interface
:name "clpm install"
:summary "Common Lisp Project Manager Install"
:usage "install [options] SYSTEM-SPECIFIER*"
:help *help-text*
:manual *manual-text*
:contents (list *group-common*
*option-install-version*
*option-install-source*
*option-install-project*
*option-install-asd*
*option-install-no-deps*
*option-yes*
*option-local*
*option-install-ref*
*option-context*)))
(define-cli-command (("install") *install-ui*) (args options)
(let* ((version-string (gethash :install-version options))
(system-specifiers args)
(project-specifiers (gethash :install-projects options))
(asd-files (gethash :install-asds options))
(source-name (gethash :install-source options))
(no-deps-p (gethash :install-no-deps options))
(yes-p (gethash :yes options))
(ref (gethash :install-ref options)))
(clpm:install :projects project-specifiers
:systems system-specifiers
:asds (mapcar #'merge-pathnames asd-files)
:version version-string
:source source-name
:no-deps-p no-deps-p
:ref ref
:validate (make-diff-validate-fun :yesp yes-p)
:save-context-p t)
t))
|
a64c0d6ab4b27b2eb75e35cbfd8c925b564c5b4104e041a084934f675a0f6f1d | theam/haskell-do | State.hs |
- Copyright ( c ) 2017 The Agile Monkeys S.L. < >
-
- Licensed under the Apache License , Version 2.0 ( the " License " ) ;
- you may not use this file except in compliance with the License .
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing , software
- distributed under the License is distributed on an " AS IS " BASIS ,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
- See the License for the specific language governing permissions and
- limitations under the License .
- Copyright (c) 2017 The Agile Monkeys S.L. <>
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module HaskellDo.CodeMirror.State where
import Transient.Move
import HaskellDo.CodeMirror.Types
import Data.DateTime
initialState :: State
initialState = State
{ content = ""
, lastSave = startOfTime
}
update :: Action -> State -> Cloud State
update (NewContent newContent) state = do
return (state { content = newContent } )
| null | https://raw.githubusercontent.com/theam/haskell-do/f339e57859d308437a72800bda08f96d0de12982/src/common/HaskellDo/CodeMirror/State.hs | haskell |
- Copyright ( c ) 2017 The Agile Monkeys S.L. < >
-
- Licensed under the Apache License , Version 2.0 ( the " License " ) ;
- you may not use this file except in compliance with the License .
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing , software
- distributed under the License is distributed on an " AS IS " BASIS ,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
- See the License for the specific language governing permissions and
- limitations under the License .
- Copyright (c) 2017 The Agile Monkeys S.L. <>
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module HaskellDo.CodeMirror.State where
import Transient.Move
import HaskellDo.CodeMirror.Types
import Data.DateTime
initialState :: State
initialState = State
{ content = ""
, lastSave = startOfTime
}
update :: Action -> State -> Cloud State
update (NewContent newContent) state = do
return (state { content = newContent } )
| |
78245e1f1bf6bbc7062048fbe7c5298012ac955b787a7be5134a50ea63cb99d3 | ygmpkk/house | Draw.hs | module Graphics.HGL.Win32.Draw
( DrawFun, drawGraphic, drawBufferedGraphic, drawBufferedGraphicBC
, drawBufferedGraphic
, saveGraphic
, withBitmap
, setDefaults
, createCompatibleBitmap, withCompatibleBitmap, withCompatibleDC, withDC
, createBitmapFile
) where
import Graphics.HGL.Win32.Types
import qualified Graphics.HGL.Win32.Utilities as Utils
import qualified System.Win32 as Win32
type DrawFun = Win32.HWND -> Win32.HDC -> IO ()
drawGraphic :: Draw () -> DrawFun
drawBufferedGraphic :: Draw () -> DrawFun
drawBufferedGraphicBC :: Win32.COLORREF -> Draw () -> DrawFun
saveGraphic :: String -> Point -> Draw () -> IO ()
createBitmapFile :: Win32.HDC -> String -> Bitmap -> IO ()
createCompatibleDC :: Win32.HDC -> IO Win32.HDC
deleteDC :: Win32.HDC -> IO ()
createCompatibleBitmap :: Win32.HDC -> Win32.Int32 -> Win32.Int32 -> IO Bitmap
withCompatibleDC :: Win32.HDC -> (Win32.HDC -> IO a) -> IO a
withBitmap :: Win32.HDC -> Bitmap -> IO a -> IO a
withDC :: Maybe Win32.HWND -> (Win32.HDC -> IO a) -> IO a
withCompatibleBitmap :: Win32.HDC -> Win32.Int32 -> Win32.Int32 -> (Bitmap -> IO a) -> IO a
----------------------------------------------------------------
drawGraphic p = \ hwnd hdc -> do
(w,h) <- windowSize hwnd
Win32.bitBlt hdc 0 0 w h hdc 0 0 backgroundColor
setDefaults hdc
unDraw p hdc
drawBufferedGraphic = drawBufferedGraphicBC backgroundColor
drawBufferedGraphicBC bgColor p = \ hwnd hdc -> do
(w,h) <- windowSize hwnd
withBuffer (Just hwnd) w h bgColor $ \ buffer _ -> do
setDefaults buffer
unDraw p buffer
Win32.bitBlt hdc 0 0 w h buffer 0 0 Win32.sRCCOPY
saveGraphic fileName size p =
withBuffer Nothing w h backgroundColor $ \ buffer bmp -> do
setDefaults buffer
unDraw p buffer
createBitmapFile buffer fileName bmp
where
(w,h) = fromPoint size
backgroundColor = Win32.bLACKNESS
setDefaults :: Win32.HDC -> IO ()
setDefaults hdc = do
setDefaultPen hdc
setDefaultBrush hdc
setDefaultText hdc
return ()
setDefaultPen :: Win32.HDC -> IO ()
setDefaultPen = \ hdc -> do
whitePen <- Win32.getStockPen Win32.wHITE_PEN
Win32.selectPen hdc whitePen
return ()
setDefaultBrush :: Win32.HDC -> IO ()
setDefaultBrush = \ hdc -> do
whiteBrush <- Win32.getStockBrush Win32.wHITE_BRUSH
Win32.selectBrush hdc whiteBrush
return ()
setDefaultText :: Win32.HDC -> IO ()
setDefaultText = \ hdc -> do
Win32.setTextColor hdc white
-- We omit this because it should be redundant (since mode is transparent)
-- And because it causes some examples to crash.
-- Maybe you're not allowed to set a color if the mode is transparent?
hdc black
Win32.setBkMode hdc Win32.tRANSPARENT
return ()
white :: Win32.COLORREF
white = Win32.rgb 255 255 255
black :: Win32.COLORREF
black = Win32.rgb 0 0 0
----------------------------------------------------------------
-- Note that we create a bitmap which is compatible with the hdc
-- onto which we are going to zap the Graphic. It might seem that
-- it would be enough for it to be compatible with the buffer -
-- but, sadly, this isn't the case. The problem is that the buffer
is initially 0 pixels wide , 0 pixels high and 1 bit deep
-- (ie it looks monochrome); it only becomes n-bits deep when you
-- select in a bitmap which is n-bits deep.
--
If it was n't for that , we 'd have swapped these two lines :
--
-- withCompatibleBitmap w h $ \ bitmap ->
-- withCompatibleDC $ \ hdc ->
--
withBuffer :: Maybe Win32.HWND -> Win32.Int32 -> Win32.Int32 -> Win32.COLORREF -> (Win32.HDC -> Bitmap -> IO a) -> IO a
withBuffer mbhwnd w h bgColor p =
withDC mbhwnd $ \ hdc ->
withCompatibleBitmap hdc w h $ \ bitmap ->
withCompatibleDC hdc $ \ buffer ->
withBitmap buffer bitmap $ do
Win32.bitBlt buffer 0 0 w h buffer 0 0 bgColor
p buffer bitmap
----------------------------------------------------------------
-- Get the width and height of a window's client area, in pixels.
windowSize :: Win32.HWND -> IO (Win32.LONG,Win32.LONG)
windowSize hwnd =
Win32.getClientRect hwnd >>= \ (l',t',r',b') ->
return (r' - l', b' - t')
Note that this DC is only " 1 bit " in size - you have to call
-- "createCompatibleBitmap" before it is big enough to hold the bitmap
-- you want.
createCompatibleDC hdc = Win32.createCompatibleDC (Just hdc)
deleteDC = Win32.deleteDC
createCompatibleBitmap hdc w h = do
bmp <- Win32.createCompatibleBitmap hdc w h
return (MkBitmap bmp)
withBitmap hdc bmp = Utils.bracket_ (selectBitmap hdc bmp) (selectBitmap hdc)
withDC mhwnd = Utils.bracket (Win32.getDC mhwnd) (Win32.releaseDC mhwnd)
Note that this DC is only " 1 bit " in size - you have to call
-- "createCompatibleBitmap" before it is big enough to hold the bitmap
-- you want.
withCompatibleDC hdc = Utils.bracket (createCompatibleDC hdc) deleteDC
withCompatibleBitmap hdc w h =
Utils.bracket (createCompatibleBitmap hdc w h) deleteBitmap
deleteBitmap (MkBitmap bmp) = Win32.deleteBitmap bmp
selectBitmap hdc (MkBitmap bmp) = do
bmp' <- Win32.selectBitmap hdc bmp
return (MkBitmap bmp)
createBitmapFile hdc fileName (MkBitmap bmp) =
Win32.createBMPFile fileName bmp hdc
----------------------------------------------------------------
-- End
---------------------------------------------------------------- | null | https://raw.githubusercontent.com/ygmpkk/house/1ed0eed82139869e85e3c5532f2b579cf2566fa2/ghc-6.2/libraries/HGL/Graphics/HGL/Win32/Draw.hs | haskell | --------------------------------------------------------------
We omit this because it should be redundant (since mode is transparent)
And because it causes some examples to crash.
Maybe you're not allowed to set a color if the mode is transparent?
--------------------------------------------------------------
Note that we create a bitmap which is compatible with the hdc
onto which we are going to zap the Graphic. It might seem that
it would be enough for it to be compatible with the buffer -
but, sadly, this isn't the case. The problem is that the buffer
(ie it looks monochrome); it only becomes n-bits deep when you
select in a bitmap which is n-bits deep.
withCompatibleBitmap w h $ \ bitmap ->
withCompatibleDC $ \ hdc ->
--------------------------------------------------------------
Get the width and height of a window's client area, in pixels.
"createCompatibleBitmap" before it is big enough to hold the bitmap
you want.
"createCompatibleBitmap" before it is big enough to hold the bitmap
you want.
--------------------------------------------------------------
End
-------------------------------------------------------------- | module Graphics.HGL.Win32.Draw
( DrawFun, drawGraphic, drawBufferedGraphic, drawBufferedGraphicBC
, drawBufferedGraphic
, saveGraphic
, withBitmap
, setDefaults
, createCompatibleBitmap, withCompatibleBitmap, withCompatibleDC, withDC
, createBitmapFile
) where
import Graphics.HGL.Win32.Types
import qualified Graphics.HGL.Win32.Utilities as Utils
import qualified System.Win32 as Win32
type DrawFun = Win32.HWND -> Win32.HDC -> IO ()
drawGraphic :: Draw () -> DrawFun
drawBufferedGraphic :: Draw () -> DrawFun
drawBufferedGraphicBC :: Win32.COLORREF -> Draw () -> DrawFun
saveGraphic :: String -> Point -> Draw () -> IO ()
createBitmapFile :: Win32.HDC -> String -> Bitmap -> IO ()
createCompatibleDC :: Win32.HDC -> IO Win32.HDC
deleteDC :: Win32.HDC -> IO ()
createCompatibleBitmap :: Win32.HDC -> Win32.Int32 -> Win32.Int32 -> IO Bitmap
withCompatibleDC :: Win32.HDC -> (Win32.HDC -> IO a) -> IO a
withBitmap :: Win32.HDC -> Bitmap -> IO a -> IO a
withDC :: Maybe Win32.HWND -> (Win32.HDC -> IO a) -> IO a
withCompatibleBitmap :: Win32.HDC -> Win32.Int32 -> Win32.Int32 -> (Bitmap -> IO a) -> IO a
drawGraphic p = \ hwnd hdc -> do
(w,h) <- windowSize hwnd
Win32.bitBlt hdc 0 0 w h hdc 0 0 backgroundColor
setDefaults hdc
unDraw p hdc
drawBufferedGraphic = drawBufferedGraphicBC backgroundColor
drawBufferedGraphicBC bgColor p = \ hwnd hdc -> do
(w,h) <- windowSize hwnd
withBuffer (Just hwnd) w h bgColor $ \ buffer _ -> do
setDefaults buffer
unDraw p buffer
Win32.bitBlt hdc 0 0 w h buffer 0 0 Win32.sRCCOPY
saveGraphic fileName size p =
withBuffer Nothing w h backgroundColor $ \ buffer bmp -> do
setDefaults buffer
unDraw p buffer
createBitmapFile buffer fileName bmp
where
(w,h) = fromPoint size
backgroundColor = Win32.bLACKNESS
setDefaults :: Win32.HDC -> IO ()
setDefaults hdc = do
setDefaultPen hdc
setDefaultBrush hdc
setDefaultText hdc
return ()
setDefaultPen :: Win32.HDC -> IO ()
setDefaultPen = \ hdc -> do
whitePen <- Win32.getStockPen Win32.wHITE_PEN
Win32.selectPen hdc whitePen
return ()
setDefaultBrush :: Win32.HDC -> IO ()
setDefaultBrush = \ hdc -> do
whiteBrush <- Win32.getStockBrush Win32.wHITE_BRUSH
Win32.selectBrush hdc whiteBrush
return ()
setDefaultText :: Win32.HDC -> IO ()
setDefaultText = \ hdc -> do
Win32.setTextColor hdc white
hdc black
Win32.setBkMode hdc Win32.tRANSPARENT
return ()
white :: Win32.COLORREF
white = Win32.rgb 255 255 255
black :: Win32.COLORREF
black = Win32.rgb 0 0 0
is initially 0 pixels wide , 0 pixels high and 1 bit deep
If it was n't for that , we 'd have swapped these two lines :
withBuffer :: Maybe Win32.HWND -> Win32.Int32 -> Win32.Int32 -> Win32.COLORREF -> (Win32.HDC -> Bitmap -> IO a) -> IO a
withBuffer mbhwnd w h bgColor p =
withDC mbhwnd $ \ hdc ->
withCompatibleBitmap hdc w h $ \ bitmap ->
withCompatibleDC hdc $ \ buffer ->
withBitmap buffer bitmap $ do
Win32.bitBlt buffer 0 0 w h buffer 0 0 bgColor
p buffer bitmap
windowSize :: Win32.HWND -> IO (Win32.LONG,Win32.LONG)
windowSize hwnd =
Win32.getClientRect hwnd >>= \ (l',t',r',b') ->
return (r' - l', b' - t')
Note that this DC is only " 1 bit " in size - you have to call
createCompatibleDC hdc = Win32.createCompatibleDC (Just hdc)
deleteDC = Win32.deleteDC
createCompatibleBitmap hdc w h = do
bmp <- Win32.createCompatibleBitmap hdc w h
return (MkBitmap bmp)
withBitmap hdc bmp = Utils.bracket_ (selectBitmap hdc bmp) (selectBitmap hdc)
withDC mhwnd = Utils.bracket (Win32.getDC mhwnd) (Win32.releaseDC mhwnd)
Note that this DC is only " 1 bit " in size - you have to call
withCompatibleDC hdc = Utils.bracket (createCompatibleDC hdc) deleteDC
withCompatibleBitmap hdc w h =
Utils.bracket (createCompatibleBitmap hdc w h) deleteBitmap
deleteBitmap (MkBitmap bmp) = Win32.deleteBitmap bmp
selectBitmap hdc (MkBitmap bmp) = do
bmp' <- Win32.selectBitmap hdc bmp
return (MkBitmap bmp)
createBitmapFile hdc fileName (MkBitmap bmp) =
Win32.createBMPFile fileName bmp hdc
|
5034541b5cb141a5c3f70a2b18badf5130dc604b5833ce99a87efc720fa001f1 | janegca/htdp2e | Exercise-323-eval-function.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname Exercise-323-eval-function) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ())))
Exercise 323 .
;
Design eval - function * . The function consumes the BSL - fun - expr representation
of some expression e and the BSL - fun - def * representation of a definitions
area da . It produces the result that shows if you evaluate e in
; the interactions area assuming the definitions area contains da.
;
The function works like from exercise 321 . For an application
; of some function f, it
;
1 . evaluates the argument ;
2 . looks up the definition of f in the BSL - fun - def representation of da ;
3 . substitutes the value of the argument for the function parameter in the
; function’s body; and
4 . evaluates the new expression via recursion .
;
; Remember that the representation of a function definition for f comes with
; a parameter and a body.
;
Like , eval - function * signals an error when it encounters a variable
; or an application whose function is not defined in the definitions area.
; NOTE: needed to modify 'subst' to include a fun? condition
(define-struct def [name param body])
BSL - fun - def is a structure : ( make - def Symbol Symbol BSL - fun - expr )
interpretation : ( make - def n p b ) combines the three elements of
; a function definition: name (n), parameter (p) and body (b)
(define-struct fun [name arg])
; Fun is a structure: (make-fun Symbol BSL-fun-expr)
(define-struct add [left right])
Add is a structure : ( make - add BSL - fun - expr BSL - fun - expr )
(define-struct mul [left right])
; Mul is a structure: (make-mul BSL-fun-expr BSL-fun-expr)
;-- Example function definitions
; (define (f x) (+ 3 x))
(define f (make-def 'f 'x (make-add 3 'x)))
( define ( g y ) ( f ( * 2 y ) ) )
(define g (make-def 'g 'y (make-fun 'f (make-mul 2 'y))))
; (define (h v) (+ (f v) (g v)))
(define h (make-def 'h 'v (make-add (make-fun 'f 'v) (make-fun 'g 'v))))
; bad function definition
(define k (make-def 'k 'z (make-add 2 'x)))
(define da-fghk (list f g h k))
; -- Example functions
(define e1 (make-fun 'f 3))
(define e2 (make-fun 'g 3))
(define e3 (make-fun 'h 3))
(define e4 (make-fun 'k 3))
; -- Error messages
(define ERR_EXPR "invalid expression")
(define ERR_FN_UNDEF "undefined function")
BSL - fun - expr BSL - fun - def * - > Number
; the value of e assuming all functions defined within e are available
; in da; otherwise, an error
(check-expect (eval-function e1 da-fghk) 6)
(check-expect (eval-function e2 da-fghk) 9)
(check-expect (eval-function e3 da-fghk) 15)
(check-error (eval-function (make-fun 'm 3) da-fghk) ERR_FN_UNDEF)
(check-error (eval-function e4 da-fghk) ERR_EXPR)
(define (eval-function e da)
(cond [(number? e) e]
[(fun? e)
(local (; the function definition, if found, otherwise an error
(define fd (lookup-def da (fun-name e)))
(define arg (fun-arg e))
(define b (def-body fd))
(define x (def-param fd)))
(eval-function (subst b x (eval-function arg da)) da))]
[(add? e)
(+ (eval-function (add-left e) da)
(eval-function (add-right e) da))]
[(mul? e)
(* (eval-function (mul-left e) da)
(eval-function (mul-right e) da))]
[else (error ERR_EXPR)]))
BSL - fun - def * Symbol - > BSL - fun - def
retrieves the definition of f in da
or signal " undefined function " if da does not contain one
(define (lookup-def da f)
(cond [(empty? da) (error ERR_FN_UNDEF)]
[(eq? f (def-name (first da))) (first da)]
[else (lookup-def (rest da) f)]))
BSL - fun - expr Symbol Number - > BSL - fun - expr
; replaces all symbol values in the expression with the number value
(define (subst e x v)
(cond [(number? e) e]
[(symbol? e) (if (eq? e x) v e)]
[(fun? e) (make-fun (fun-name e) (subst (fun-arg e) x v))]
[(add? e) (make-add (subst (add-left e) x v)
(subst (add-right e) x v))]
[(mul? e) (make-mul (subst (mul-left e) x v)
(subst (mul-right e) x v))]))
| null | https://raw.githubusercontent.com/janegca/htdp2e/2d50378135edc2b8b1816204021f8763f8b2707b/04-Intertwined%20Data/Exercise-323-eval-function.rkt | racket | about the language level of this file in a form that our tools can easily process.
the interactions area assuming the definitions area contains da.
of some function f, it
function’s body; and
Remember that the representation of a function definition for f comes with
a parameter and a body.
or an application whose function is not defined in the definitions area.
NOTE: needed to modify 'subst' to include a fun? condition
a function definition: name (n), parameter (p) and body (b)
Fun is a structure: (make-fun Symbol BSL-fun-expr)
Mul is a structure: (make-mul BSL-fun-expr BSL-fun-expr)
-- Example function definitions
(define (f x) (+ 3 x))
(define (h v) (+ (f v) (g v)))
bad function definition
-- Example functions
-- Error messages
the value of e assuming all functions defined within e are available
in da; otherwise, an error
the function definition, if found, otherwise an error
replaces all symbol values in the expression with the number value | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname Exercise-323-eval-function) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ())))
Exercise 323 .
Design eval - function * . The function consumes the BSL - fun - expr representation
of some expression e and the BSL - fun - def * representation of a definitions
area da . It produces the result that shows if you evaluate e in
The function works like from exercise 321 . For an application
3 . substitutes the value of the argument for the function parameter in the
4 . evaluates the new expression via recursion .
Like , eval - function * signals an error when it encounters a variable
(define-struct def [name param body])
BSL - fun - def is a structure : ( make - def Symbol Symbol BSL - fun - expr )
interpretation : ( make - def n p b ) combines the three elements of
(define-struct fun [name arg])
(define-struct add [left right])
Add is a structure : ( make - add BSL - fun - expr BSL - fun - expr )
(define-struct mul [left right])
(define f (make-def 'f 'x (make-add 3 'x)))
( define ( g y ) ( f ( * 2 y ) ) )
(define g (make-def 'g 'y (make-fun 'f (make-mul 2 'y))))
(define h (make-def 'h 'v (make-add (make-fun 'f 'v) (make-fun 'g 'v))))
(define k (make-def 'k 'z (make-add 2 'x)))
(define da-fghk (list f g h k))
(define e1 (make-fun 'f 3))
(define e2 (make-fun 'g 3))
(define e3 (make-fun 'h 3))
(define e4 (make-fun 'k 3))
(define ERR_EXPR "invalid expression")
(define ERR_FN_UNDEF "undefined function")
BSL - fun - expr BSL - fun - def * - > Number
(check-expect (eval-function e1 da-fghk) 6)
(check-expect (eval-function e2 da-fghk) 9)
(check-expect (eval-function e3 da-fghk) 15)
(check-error (eval-function (make-fun 'm 3) da-fghk) ERR_FN_UNDEF)
(check-error (eval-function e4 da-fghk) ERR_EXPR)
(define (eval-function e da)
(cond [(number? e) e]
[(fun? e)
(define fd (lookup-def da (fun-name e)))
(define arg (fun-arg e))
(define b (def-body fd))
(define x (def-param fd)))
(eval-function (subst b x (eval-function arg da)) da))]
[(add? e)
(+ (eval-function (add-left e) da)
(eval-function (add-right e) da))]
[(mul? e)
(* (eval-function (mul-left e) da)
(eval-function (mul-right e) da))]
[else (error ERR_EXPR)]))
BSL - fun - def * Symbol - > BSL - fun - def
retrieves the definition of f in da
or signal " undefined function " if da does not contain one
(define (lookup-def da f)
(cond [(empty? da) (error ERR_FN_UNDEF)]
[(eq? f (def-name (first da))) (first da)]
[else (lookup-def (rest da) f)]))
BSL - fun - expr Symbol Number - > BSL - fun - expr
(define (subst e x v)
(cond [(number? e) e]
[(symbol? e) (if (eq? e x) v e)]
[(fun? e) (make-fun (fun-name e) (subst (fun-arg e) x v))]
[(add? e) (make-add (subst (add-left e) x v)
(subst (add-right e) x v))]
[(mul? e) (make-mul (subst (mul-left e) x v)
(subst (mul-right e) x v))]))
|
ed745a94bcb4ad9cd2b04c8f86eaf4b5c226be3655a89970b3d03d5137a74d6d | eudoxia0/lcm | cli.lisp | (in-package :lcm)
(defparameter +version+ "0.0.1")
;;;; Classes to represent CLI commands.
(defclass command ()
()
(:documentation "The base class of CLI commands."))
(defclass help-command (command)
()
(:documentation "Command to print usage information."))
(defclass version-command (command)
()
(:documentation "Command to print version information."))
(defclass get-command (command)
()
(:documentation "Command to get the name of the current configuration."))
(defclass apply-command (command)
((name :reader command-name
:initarg :name
:type string
:documentation "The name of the configuration to apply.")
(files :reader command-files
:initarg :files
:type list
:documentation "The list of files to load in order.")
(secrets :reader command-secrets
:initarg :secrets
:type (or null string)
:documentation "The path to the secrets file, if any."))
(:documentation "Command to apply a configuration."))
(defclass unapply-command (command)
((files :reader command-files
:initarg :files
:type list
:documentation "The list of files to load in order.")
(secrets :reader command-secrets
:initarg :secrets
:type (or null string)
:documentation "The path to the secrets file, if any."))
(:documentation "Command to unapply the current configuration."))
;;;; Command parsing.
(defun starts-with-p (string prefix)
"Test whether STRING starts with the substring PREFIX. If PREFIX is longer than STRING, returns NIL."
(if (> (length prefix) (length string))
nil
(string= (subseq string 0 (length prefix)) prefix)))
(defun secretsp (string)
"Test whether STRING starts with the substring '--secrets='."
(starts-with-p string "--secrets="))
(defun secrets-value (string)
(subseq string (length "--secrets=")))
(defun find-last-secrets (strings)
"Find the value of the `--secrets=` flag in the argument list. NIL otherwise. If there are multiple flags, return the last one."
(let ((result nil))
(dolist (string strings result)
(when (secretsp string)
(setq result (secrets-value string))))
result))
(defun file-args (strings)
"Return a list of all strings in the list of strings STRINGS that don't satisfy the 'secretsp' predicate."
(loop for string in strings
unless (secretsp string)
collect string))
(defun parse-cli (args)
"Parse a list of command line arguments."
(if (null args)
;; Empty list: print help
(make-instance 'help-command)
At least one argument
(let ((first (first args)))
(cond ((string= first "get")
(make-instance 'get-command))
((string= first "apply")
(handler-case
(parse-apply-args args)
(error ()
(format t "Bad `apply` command line arguments.~%")
(uiop:quit -1))))
((string= first "unapply")
(handler-case
(parse-unapply-args args)
(error ()
(format t "Bad `unapply` command line arguments.~%")
(uiop:quit -1))))
((string= first "help")
(make-instance 'help-command))
((string= first "version")
(make-instance 'version-command))
(t
(make-instance 'help-command))))))
(defun parse-apply-args (args)
(destructuring-bind (name &rest args)
(rest args)
(let ((files (file-args args))
(secrets (find-last-secrets args)))
(make-instance 'apply-command
:name name
:files files
:secrets secrets))))
(defun parse-unapply-args (args)
(let ((files (file-args (rest args)))
(secrets (find-last-secrets (rest args))))
(make-instance 'unapply-command
:files files
:secrets secrets)))
| null | https://raw.githubusercontent.com/eudoxia0/lcm/a831bc949da8c9dcc59499fca1a392cd7ce83668/src/cli.lisp | lisp | Classes to represent CLI commands.
Command parsing.
Empty list: print help | (in-package :lcm)
(defparameter +version+ "0.0.1")
(defclass command ()
()
(:documentation "The base class of CLI commands."))
(defclass help-command (command)
()
(:documentation "Command to print usage information."))
(defclass version-command (command)
()
(:documentation "Command to print version information."))
(defclass get-command (command)
()
(:documentation "Command to get the name of the current configuration."))
(defclass apply-command (command)
((name :reader command-name
:initarg :name
:type string
:documentation "The name of the configuration to apply.")
(files :reader command-files
:initarg :files
:type list
:documentation "The list of files to load in order.")
(secrets :reader command-secrets
:initarg :secrets
:type (or null string)
:documentation "The path to the secrets file, if any."))
(:documentation "Command to apply a configuration."))
(defclass unapply-command (command)
((files :reader command-files
:initarg :files
:type list
:documentation "The list of files to load in order.")
(secrets :reader command-secrets
:initarg :secrets
:type (or null string)
:documentation "The path to the secrets file, if any."))
(:documentation "Command to unapply the current configuration."))
(defun starts-with-p (string prefix)
"Test whether STRING starts with the substring PREFIX. If PREFIX is longer than STRING, returns NIL."
(if (> (length prefix) (length string))
nil
(string= (subseq string 0 (length prefix)) prefix)))
(defun secretsp (string)
"Test whether STRING starts with the substring '--secrets='."
(starts-with-p string "--secrets="))
(defun secrets-value (string)
(subseq string (length "--secrets=")))
(defun find-last-secrets (strings)
"Find the value of the `--secrets=` flag in the argument list. NIL otherwise. If there are multiple flags, return the last one."
(let ((result nil))
(dolist (string strings result)
(when (secretsp string)
(setq result (secrets-value string))))
result))
(defun file-args (strings)
"Return a list of all strings in the list of strings STRINGS that don't satisfy the 'secretsp' predicate."
(loop for string in strings
unless (secretsp string)
collect string))
(defun parse-cli (args)
"Parse a list of command line arguments."
(if (null args)
(make-instance 'help-command)
At least one argument
(let ((first (first args)))
(cond ((string= first "get")
(make-instance 'get-command))
((string= first "apply")
(handler-case
(parse-apply-args args)
(error ()
(format t "Bad `apply` command line arguments.~%")
(uiop:quit -1))))
((string= first "unapply")
(handler-case
(parse-unapply-args args)
(error ()
(format t "Bad `unapply` command line arguments.~%")
(uiop:quit -1))))
((string= first "help")
(make-instance 'help-command))
((string= first "version")
(make-instance 'version-command))
(t
(make-instance 'help-command))))))
(defun parse-apply-args (args)
(destructuring-bind (name &rest args)
(rest args)
(let ((files (file-args args))
(secrets (find-last-secrets args)))
(make-instance 'apply-command
:name name
:files files
:secrets secrets))))
(defun parse-unapply-args (args)
(let ((files (file-args (rest args)))
(secrets (find-last-secrets (rest args))))
(make-instance 'unapply-command
:files files
:secrets secrets)))
|
f7a20e4eff89784dc88bffe9f955dd147a1eb0c2fa0049ed105ff5a2bfb62890 | mdedwards/slippery-chicken | pitches-examples.lsp | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; File: pitches-examples.lsp
;;;
Class Hierarchy : None
;;;
Version : 1.0
;;;
;;; Project: slippery chicken (algorithmic composition)
;;;
;;; Purpose: Lisp-code examples to accompany pitches.html
;;;
Author :
;;;
;;; Creation date: 17th July 2012
;;;
$ $ Last modified : 22:45:33 Fri May 17 2013 BST
;;;
SVN ID : $ I d : pitches-examples.lsp 3538 2013 - 05 - 18 08:29:15Z medward2 $
;;;
;;; ****
Licence : Copyright ( c ) 2012
;;;
;;; This file is part of slippery-chicken
;;;
;;; slippery-chicken is free software; you can redistribute it
;;; and/or modify it under the terms of the GNU General
Public License as published by the Free Software
Foundation ; either version 3 of the License , or ( at your
;;; option) any later version.
;;;
;;; slippery-chicken is distributed in the hope that it will
;;; be useful, but WITHOUT ANY WARRANTY; without even the
;;; implied warranty of MERCHANTABILITY or FITNESS FOR A
;;; PARTICULAR PURPOSE. See the GNU General Public License
;;; for more details.
;;;
You should have received a copy of the GNU General Public
;;; License along with slippery-chicken; if not, write to the
Free Software Foundation , Inc. , 59 Temple Place , Suite
330 , Boston , MA 02111 - 1307 USA
;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; ============================================================================
;;; pitches.html examples
;;; ============================================================================
;;; constructing a set-palette and using cmn-play to print it
;;; ----------------------------------------------------------------------------
(let ((sp (make-set-palette
'test
'((set1 ((c3 g3 cs4 e4 fs4 a4 bf4 c5 d5 f5 gf5 af5 ef6)))
(set2 ((c3 fs3 cs4 e4 g4 a4 b4 c5 df5 f5 g5 af5 ef6)))
(set3 ((d3 f3 cs4 e4 fs4 a4 b4 c5 d5 e5 fs5 af5 ef6)))
(set4 ((d3 e3 cs4 ef4 fs4 a4 b4 c5 d5 e5 fs5 af5 d6)))))))
(cmn-display sp :break-line-each-set nil :size 16))
using the recurse - simple - data keyword for 2 - pitch sets
;;; ----------------------------------------------------------------------------
(let ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '(((1 ((gs4 bf4)))) :recurse-simple-data nil)
:set-map '((1 (1)))
:rthm-seq-palette `((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7
8)))))
:rthm-seq-map '((1 ((vn (1))))))))
(midi-play mini)
(cmn-display mini)
(write-lp-data-for-all mini))
;;; creating a set-map and using the :display-sets keyword of cmn-display to
;;; print it below the score
;;; ----------------------------------------------------------------------------
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '((set1 ((c3 g3 cs4 e4 fs4 a4 bf4 c5 d5 f5 gf5 af5
ef6)))
(set2 ((c3 fs3 cs4 e4 g4 a4 b4 c5 df5 f5 g5 af5
ef6)))
(set3 ((d3 f3 cs4 e4 fs4 a4 b4 c5 d5 e5 fs5 af5
ef6)))
(set4 ((d3 e3 cs4 ef4 fs4 a4 b4 c5 d5 e5 fs5 af5
d6))))
:set-map '((1 (set1 set1 set1 set1 set1))
(2 (set2 set3 set2 set3 set2 set3 set3))
(3 (set3 set3 set4 set3 set4 set3 set4 set4 set3 set4
set4))
(4 (set4 set4 set1 set4 set1 set4 set1 set1 set1)))
:rthm-seq-palette `((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7 8)))))
:rthm-seq-map `((1 ((vn ,(loop repeat 5 collect 1))))
(2 ((vn ,(loop repeat 7 collect 1))))
(3 ((vn ,(loop repeat 11 collect 1))))
(4 ((vn ,(loop repeat 9 collect 1))))))))
(midi-play mini)
(cmn-display mini :display-sets t)
(write-lp-data-for-all mini))
;;; simple pitch-seq/pitch-seq-palette example
;;; ----------------------------------------------------------------------------
(let ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '((1 ((c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 4 3 2 5 7 8 6)))))
:rthm-seq-map '((1 ((vn (1))))))))
(midi-play mini)
(cmn-display mini)
(write-lp-data-for-all mini))
;;; same pitch-seq produces different pitches for different instruments
;;; ----------------------------------------------------------------------------
(let* ((scsip-clone (clone +slippery-chicken-standard-instrument-palette+)))
(set-slot 'starting-clef 'tenor 'bassoon scsip-clone)
(let* ((mini
(make-slippery-chicken
'+mini+
:instrument-palette scsip-clone
:ensemble '(((fl (flute :midi-channel 1))
(bn (bassoon :midi-channel 2))))
:set-palette '((1 ((b3 d4 g4 b4 e5 a5 d6 a6 b6))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7 8)))))
:rthm-seq-map '((1 ((fl (1))
(bn (1))))))))
(midi-play mini)
(cmn-display mini)
(write-lp-data-for-all mini)))
;;; indicating chords in a pitch-seq
;;; ----------------------------------------------------------------------------
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((pn (piano :midi-channel 1))))
:set-palette '((1 ((c5 d5 e5 f5 g5 a5 b5 c6))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 (4) (3) 2 5 (7) 8
6)))))
:rthm-seq-map '((1 ((pn (1))))))))
(midi-play mini)
(cmn-display mini)
(write-lp-data-for-all mini))
;;; avoiding/allowing melodic octaves
;;; ----------------------------------------------------------------------------
(let* ((mini-1
(make-slippery-chicken
'+mini-1+
:title "mini 1"
:ensemble '(((fl (flute :midi-channel 1))))
:set-palette '((1 ((c5 d5 e5 f5 g5 a5 b5 c6))))
:set-map '((1 (1 1 1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7 8)))))
:rthm-seq-map '((1 ((fl (1 1 1)))))))
(mini-2
(make-slippery-chicken
'+mini-2+
:title "mini 2"
:ensemble '(((fl (flute :midi-channel 1))))
:set-palette '((1 ((c5 d5 e5 f5 g5 a5 b5 c6))))
:set-map '((1 (1 1 1)))
:avoid-melodic-octaves nil
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7 8)))))
:rthm-seq-map '((1 ((fl (1 1 1))))))))
(midi-play mini-1 :midi-file "/tmp/mini-1.mid")
(cmn-display mini-1 :file "/tmp/mini-1.eps")
(write-lp-data-for-all mini-1)
(midi-play mini-2 :midi-file "/tmp/mini-2.mid")
(cmn-display mini-2 :file "/tmp/mini-2.eps")
(write-lp-data-for-all mini-2))
;;; multiple pitch-seq curves in the same pitch-seq-palette
;;; ----------------------------------------------------------------------------
(let* ((multi-ps
(make-slippery-chicken
'+multi-ps+
:title "Multiple pitch-seqs"
:ensemble '(((fl (flute :midi-channel 1))
(ob (oboe :midi-channel 2))
(cl (b-flat-clarinet :midi-channel 3))))
:tempo-map '((1 (q 60)))
:set-palette '((1 ((c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((8 7 8 7 8 7 8 7)
(5 4 3 4 5 4 3 4)
(1 2 1 2 1 2 1 2)))))
:rthm-seq-map '((1 ((fl (1))
(ob (1))
(cl (1))))))))
(midi-play multi-ps)
(cmn-display multi-ps)
(write-lp-data-for-all multi-ps))
;;; using set-limits-high and set-limits-low to constrain pitches
;;; ----------------------------------------------------------------------------
(let ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((cl (b-flat-clarinet :midi-channel 1))
(vc (cello :midi-channel 2))
(cb (double-bass :midi-channel 3))))
:set-palette '((1 ((e1 f1 g1 a1 b1
c2 d2 e2 f2 g2 a2 b2
c3 d3 e3 f3 g3 a3 b3
c4 d4 e4 f4 g4 a4 b4
c5 d5 e5 f5 g5 a5 b5 c6))))
:set-limits-high '((cl (0 c6 50 c5 100 c6))
(vc (0 g4 50 g3 100 g4))
(cb (0 f3 50 f2 100 f3)))
:set-limits-low '((cl (0 c5 50 c4 100 c5))
(vc (0 g3 50 g2 100 g3))
(cb (0 f2 50 e1 100 f2)))
:set-map `((1 ,(loop repeat 10 collect 1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7 8)))))
:rthm-seq-map (list (list 1
(loop for p in '(cl vc cb)
collect (list p (loop repeat 10
collect 1))))))))
(midi-play mini)
(cmn-display mini)
(write-lp-data-for-all mini))
;;; specifying simple subsets in a set-palette and using cmn-display to print
;;; the result
;;; ----------------------------------------------------------------------------
(let* ((sp (make-set-palette
'test
'((1 ((f3 g3 a3 bf3 c4 d4 e4 f4 g4 a4 b4 c5 d5 e5 f5 g5)
:subsets ((pno1 (c4 e4 g4))
(pno2 (d4 f4 a4))
(mba1 (a3 c4 e4)))
:related-sets ((pno3 (gs4 bf4 df5))
(mba2 (fs3 af3 cs4)))))))))
(cmn-display sp :break-line-each-set nil :size 16))
;;; specifying nested subsets in a set-palette and using cmn-display to print
;;; the result
;;; ----------------------------------------------------------------------------
(let ((sp (make-set-palette
'test
'((1 ((c3 g3 cs4 e4 fs4 a4 bf4 c5 d5 f5 gf5 af5 ef6)
:subsets ((piano ((pno1 (cs4 e4 fs4))
(pno2 (e4 fs4 a4))))
(marimba ((mba1 (c3 g3 cs4))
(mba2 (g3 cs4 e4)))))
:related-sets ((piano ((pno3 (d3 a3 d5))
(pno4 (c3 g3 d5)))))))))))
(cmn-display sp :break-line-each-set nil :size 16))
;;; limiting a player's pitches using subset-id
;;; ----------------------------------------------------------------------------
(progn
(set-slot 'subset-id 'flute-notes 'flute
+slippery-chicken-standard-instrument-palette+)
(set-slot 'subset-id 'oboe-notes 'oboe
+slippery-chicken-standard-instrument-palette+)
(set-slot 'subset-id 'clarinet-notes 'b-flat-clarinet
+slippery-chicken-standard-instrument-palette+)
(let* ((subset-id-piece
(make-slippery-chicken
'+subset-id-piece+
:title "subset id piece"
:ensemble '(((fl (flute :midi-channel 1))
(ob (oboe :midi-channel 2))
(cl (b-flat-clarinet :midi-channel 3))))
:set-palette
'((1 ((b3 c4 d4 e4 f4 g4 a4 b4 c5 d5 e5 f5 g5 a5 b5
c6 d6 e6 f6 g6 a6 b6)
:subsets ((flute-notes (b5 c6 d6 e6 f6 g6 a6 b6))
(oboe-notes (a4 b4 c5 d5 e5 f5 g5 a5))
(clarinet-notes (b3 c4 d4 e4 f4 g4))))))
:set-map '((1 (1 1 1)))
:rthm-seq-palette
'((1 ((((4 4) - e. s - - e e -
- +s s s s - (s) - s s s - ))
:pitch-seq-palette ((2 2 3 1 1 2 1 2 2 2)
(6 8 5 5 7 7 9 6 8 10)
(5 4 3 1 1 2 3 3 4 4)
(1 3 3 2 1 2 3 1 1 1)))))
:rthm-seq-map '((1 ((fl (1 1 1))
(ob (1 1 1))
(cl (1 1 1))))))))
(midi-play subset-id-piece)
(cmn-display subset-id-piece)
(write-lp-data-for-all subset-id-piece)
(loop for p in '(flute oboe b-flat-clarinet)
collect
(set-slot 'subset-id nil p
+slippery-chicken-standard-instrument-palette+)))) | null | https://raw.githubusercontent.com/mdedwards/slippery-chicken/c1c11fadcdb40cd869d5b29091ba5e53c5270e04/doc/examples/pitches-examples.lsp | lisp |
File: pitches-examples.lsp
Project: slippery chicken (algorithmic composition)
Purpose: Lisp-code examples to accompany pitches.html
Creation date: 17th July 2012
****
This file is part of slippery-chicken
slippery-chicken is free software; you can redistribute it
and/or modify it under the terms of the GNU General
either version 3 of the License , or ( at your
option) any later version.
slippery-chicken is distributed in the hope that it will
be useful, but WITHOUT ANY WARRANTY; without even the
implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License
for more details.
License along with slippery-chicken; if not, write to the
============================================================================
pitches.html examples
============================================================================
constructing a set-palette and using cmn-play to print it
----------------------------------------------------------------------------
----------------------------------------------------------------------------
creating a set-map and using the :display-sets keyword of cmn-display to
print it below the score
----------------------------------------------------------------------------
simple pitch-seq/pitch-seq-palette example
----------------------------------------------------------------------------
same pitch-seq produces different pitches for different instruments
----------------------------------------------------------------------------
indicating chords in a pitch-seq
----------------------------------------------------------------------------
avoiding/allowing melodic octaves
----------------------------------------------------------------------------
multiple pitch-seq curves in the same pitch-seq-palette
----------------------------------------------------------------------------
using set-limits-high and set-limits-low to constrain pitches
----------------------------------------------------------------------------
specifying simple subsets in a set-palette and using cmn-display to print
the result
----------------------------------------------------------------------------
specifying nested subsets in a set-palette and using cmn-display to print
the result
----------------------------------------------------------------------------
limiting a player's pitches using subset-id
---------------------------------------------------------------------------- | Class Hierarchy : None
Version : 1.0
Author :
$ $ Last modified : 22:45:33 Fri May 17 2013 BST
SVN ID : $ I d : pitches-examples.lsp 3538 2013 - 05 - 18 08:29:15Z medward2 $
Licence : Copyright ( c ) 2012
Public License as published by the Free Software
You should have received a copy of the GNU General Public
Free Software Foundation , Inc. , 59 Temple Place , Suite
330 , Boston , MA 02111 - 1307 USA
(let ((sp (make-set-palette
'test
'((set1 ((c3 g3 cs4 e4 fs4 a4 bf4 c5 d5 f5 gf5 af5 ef6)))
(set2 ((c3 fs3 cs4 e4 g4 a4 b4 c5 df5 f5 g5 af5 ef6)))
(set3 ((d3 f3 cs4 e4 fs4 a4 b4 c5 d5 e5 fs5 af5 ef6)))
(set4 ((d3 e3 cs4 ef4 fs4 a4 b4 c5 d5 e5 fs5 af5 d6)))))))
(cmn-display sp :break-line-each-set nil :size 16))
using the recurse - simple - data keyword for 2 - pitch sets
(let ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '(((1 ((gs4 bf4)))) :recurse-simple-data nil)
:set-map '((1 (1)))
:rthm-seq-palette `((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7
8)))))
:rthm-seq-map '((1 ((vn (1))))))))
(midi-play mini)
(cmn-display mini)
(write-lp-data-for-all mini))
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '((set1 ((c3 g3 cs4 e4 fs4 a4 bf4 c5 d5 f5 gf5 af5
ef6)))
(set2 ((c3 fs3 cs4 e4 g4 a4 b4 c5 df5 f5 g5 af5
ef6)))
(set3 ((d3 f3 cs4 e4 fs4 a4 b4 c5 d5 e5 fs5 af5
ef6)))
(set4 ((d3 e3 cs4 ef4 fs4 a4 b4 c5 d5 e5 fs5 af5
d6))))
:set-map '((1 (set1 set1 set1 set1 set1))
(2 (set2 set3 set2 set3 set2 set3 set3))
(3 (set3 set3 set4 set3 set4 set3 set4 set4 set3 set4
set4))
(4 (set4 set4 set1 set4 set1 set4 set1 set1 set1)))
:rthm-seq-palette `((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7 8)))))
:rthm-seq-map `((1 ((vn ,(loop repeat 5 collect 1))))
(2 ((vn ,(loop repeat 7 collect 1))))
(3 ((vn ,(loop repeat 11 collect 1))))
(4 ((vn ,(loop repeat 9 collect 1))))))))
(midi-play mini)
(cmn-display mini :display-sets t)
(write-lp-data-for-all mini))
(let ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '((1 ((c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 4 3 2 5 7 8 6)))))
:rthm-seq-map '((1 ((vn (1))))))))
(midi-play mini)
(cmn-display mini)
(write-lp-data-for-all mini))
(let* ((scsip-clone (clone +slippery-chicken-standard-instrument-palette+)))
(set-slot 'starting-clef 'tenor 'bassoon scsip-clone)
(let* ((mini
(make-slippery-chicken
'+mini+
:instrument-palette scsip-clone
:ensemble '(((fl (flute :midi-channel 1))
(bn (bassoon :midi-channel 2))))
:set-palette '((1 ((b3 d4 g4 b4 e5 a5 d6 a6 b6))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7 8)))))
:rthm-seq-map '((1 ((fl (1))
(bn (1))))))))
(midi-play mini)
(cmn-display mini)
(write-lp-data-for-all mini)))
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((pn (piano :midi-channel 1))))
:set-palette '((1 ((c5 d5 e5 f5 g5 a5 b5 c6))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 (4) (3) 2 5 (7) 8
6)))))
:rthm-seq-map '((1 ((pn (1))))))))
(midi-play mini)
(cmn-display mini)
(write-lp-data-for-all mini))
(let* ((mini-1
(make-slippery-chicken
'+mini-1+
:title "mini 1"
:ensemble '(((fl (flute :midi-channel 1))))
:set-palette '((1 ((c5 d5 e5 f5 g5 a5 b5 c6))))
:set-map '((1 (1 1 1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7 8)))))
:rthm-seq-map '((1 ((fl (1 1 1)))))))
(mini-2
(make-slippery-chicken
'+mini-2+
:title "mini 2"
:ensemble '(((fl (flute :midi-channel 1))))
:set-palette '((1 ((c5 d5 e5 f5 g5 a5 b5 c6))))
:set-map '((1 (1 1 1)))
:avoid-melodic-octaves nil
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7 8)))))
:rthm-seq-map '((1 ((fl (1 1 1))))))))
(midi-play mini-1 :midi-file "/tmp/mini-1.mid")
(cmn-display mini-1 :file "/tmp/mini-1.eps")
(write-lp-data-for-all mini-1)
(midi-play mini-2 :midi-file "/tmp/mini-2.mid")
(cmn-display mini-2 :file "/tmp/mini-2.eps")
(write-lp-data-for-all mini-2))
(let* ((multi-ps
(make-slippery-chicken
'+multi-ps+
:title "Multiple pitch-seqs"
:ensemble '(((fl (flute :midi-channel 1))
(ob (oboe :midi-channel 2))
(cl (b-flat-clarinet :midi-channel 3))))
:tempo-map '((1 (q 60)))
:set-palette '((1 ((c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((8 7 8 7 8 7 8 7)
(5 4 3 4 5 4 3 4)
(1 2 1 2 1 2 1 2)))))
:rthm-seq-map '((1 ((fl (1))
(ob (1))
(cl (1))))))))
(midi-play multi-ps)
(cmn-display multi-ps)
(write-lp-data-for-all multi-ps))
(let ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((cl (b-flat-clarinet :midi-channel 1))
(vc (cello :midi-channel 2))
(cb (double-bass :midi-channel 3))))
:set-palette '((1 ((e1 f1 g1 a1 b1
c2 d2 e2 f2 g2 a2 b2
c3 d3 e3 f3 g3 a3 b3
c4 d4 e4 f4 g4 a4 b4
c5 d5 e5 f5 g5 a5 b5 c6))))
:set-limits-high '((cl (0 c6 50 c5 100 c6))
(vc (0 g4 50 g3 100 g4))
(cb (0 f3 50 f2 100 f3)))
:set-limits-low '((cl (0 c5 50 c4 100 c5))
(vc (0 g3 50 g2 100 g3))
(cb (0 f2 50 e1 100 f2)))
:set-map `((1 ,(loop repeat 10 collect 1)))
:rthm-seq-palette '((1 ((((4 4) - e e e e - - e e e e -))
:pitch-seq-palette ((1 2 3 4 5 6 7 8)))))
:rthm-seq-map (list (list 1
(loop for p in '(cl vc cb)
collect (list p (loop repeat 10
collect 1))))))))
(midi-play mini)
(cmn-display mini)
(write-lp-data-for-all mini))
(let* ((sp (make-set-palette
'test
'((1 ((f3 g3 a3 bf3 c4 d4 e4 f4 g4 a4 b4 c5 d5 e5 f5 g5)
:subsets ((pno1 (c4 e4 g4))
(pno2 (d4 f4 a4))
(mba1 (a3 c4 e4)))
:related-sets ((pno3 (gs4 bf4 df5))
(mba2 (fs3 af3 cs4)))))))))
(cmn-display sp :break-line-each-set nil :size 16))
(let ((sp (make-set-palette
'test
'((1 ((c3 g3 cs4 e4 fs4 a4 bf4 c5 d5 f5 gf5 af5 ef6)
:subsets ((piano ((pno1 (cs4 e4 fs4))
(pno2 (e4 fs4 a4))))
(marimba ((mba1 (c3 g3 cs4))
(mba2 (g3 cs4 e4)))))
:related-sets ((piano ((pno3 (d3 a3 d5))
(pno4 (c3 g3 d5)))))))))))
(cmn-display sp :break-line-each-set nil :size 16))
(progn
(set-slot 'subset-id 'flute-notes 'flute
+slippery-chicken-standard-instrument-palette+)
(set-slot 'subset-id 'oboe-notes 'oboe
+slippery-chicken-standard-instrument-palette+)
(set-slot 'subset-id 'clarinet-notes 'b-flat-clarinet
+slippery-chicken-standard-instrument-palette+)
(let* ((subset-id-piece
(make-slippery-chicken
'+subset-id-piece+
:title "subset id piece"
:ensemble '(((fl (flute :midi-channel 1))
(ob (oboe :midi-channel 2))
(cl (b-flat-clarinet :midi-channel 3))))
:set-palette
'((1 ((b3 c4 d4 e4 f4 g4 a4 b4 c5 d5 e5 f5 g5 a5 b5
c6 d6 e6 f6 g6 a6 b6)
:subsets ((flute-notes (b5 c6 d6 e6 f6 g6 a6 b6))
(oboe-notes (a4 b4 c5 d5 e5 f5 g5 a5))
(clarinet-notes (b3 c4 d4 e4 f4 g4))))))
:set-map '((1 (1 1 1)))
:rthm-seq-palette
'((1 ((((4 4) - e. s - - e e -
- +s s s s - (s) - s s s - ))
:pitch-seq-palette ((2 2 3 1 1 2 1 2 2 2)
(6 8 5 5 7 7 9 6 8 10)
(5 4 3 1 1 2 3 3 4 4)
(1 3 3 2 1 2 3 1 1 1)))))
:rthm-seq-map '((1 ((fl (1 1 1))
(ob (1 1 1))
(cl (1 1 1))))))))
(midi-play subset-id-piece)
(cmn-display subset-id-piece)
(write-lp-data-for-all subset-id-piece)
(loop for p in '(flute oboe b-flat-clarinet)
collect
(set-slot 'subset-id nil p
+slippery-chicken-standard-instrument-palette+)))) |
f7e3e1c97a8bec8ca97574ce81d7f5e85a2df331e7984c1347ea388c8e59c05d | gebi/jungerl | ssh_rsa.erl | %%% File : ssh_rsa.erl
Author : < >
Description : RSA encrypt / decrypt / verify / sign ( rfc 3447 )
Created : 30 Aug 2004 by < >
-module(ssh_rsa).
-vsn("$Revision$ ").
-rcsid("$Id$\n").
-compile(export_all).
-export([verify/3]).
-include("../include/ssh.hrl").
-include("PKCS-1.hrl").
-define(MGF(Seed,Len), mgf1((Seed),(Len))).
-define(HASH(X), crypto:sha((X))).
-define(HLen, 20).
start() ->
crypto:start().
sign_file(File) ->
start(),
{ok,Bin} = file:read_file(File),
{ok,Key} = ssh_file:private_host_rsa_key(user),
sign(Key, Bin).
verify_file(File, Sig) ->
start(),
{ok,Bin} = file:read_file(File),
{ok,Key} = ssh_file:public_host_rsa_key(user),
verify(Key, Bin, Sig).
sign(Private,Mb) ->
rsassa_pkcs1_v1_5_sign(Private,Mb).
verify(Public,Mb,Sb) ->
io : format("verify : Key=~p\n Message=~p\n Signature=~p\n " ,
%% [Public, Mb, Sb]),
rsassa_pkcs1_v1_5_verify(Public,Mb,Sb).
Integer to octet string
i2osp(X, XLen) ->
ssh_bits:i2bin(X, XLen).
string to Integer
os2ip(X) ->
ssh_bits:bin2i(X).
%% decrypt1, M = message representative
rsaep(#ssh_key { public={N,E}}, M) ->
?ssh_assert(M >= 0 andalso M =< N-1, out_of_range),
ssh_math:ipow(M, E, N).
encrypt1 , C = cipher representative
rsadp(#ssh_key { public={N,_}, private={_,D}}, C) ->
?ssh_assert(C >= 0 andalso C =< N-1, out_of_range),
ssh_math:ipow(C, D, N).
, M = message representative
rsasp1(#ssh_key { public={N,_}, private={_,D}}, M) ->
?ssh_assert((M >= 0 andalso M =< N-1), out_of_range),
ssh_math:ipow(M, D, N).
verify1 , S = signature representative
rsavp1(#ssh_key { public={N,E}}, S) ->
?ssh_assert(S >= 0 andalso S =< N-1, out_of_range),
ssh_math:ipow(S, E, N).
%% M messaage
rsaes_oaep_encrypt(Public, M) ->
rsaes_oaep_encrypt(Public, M, <<>>).
rsaes_oaep_encrypt(Public=#ssh_key { public={N,_E}}, M, L) ->
?ssh_assert(size(L) =< 16#ffffffffffffffff, label_to_long),
K = (ssh_bits:isize(N)+7) div 8,
MLen = size(M),
%% LLen = size(L),
?ssh_assert(MLen =< K - 2*?HLen - 2, message_to_long),
LHash = ?HASH(L),
PS = ssh_bits:fill_bits(K - MLen - 2*?HLen - 2, 0),
DB = <<LHash/binary, PS/binary, 16#01, M/binary>>,
Seed = ssh_bits:random(?HLen),
DbMask = ?MGF(Seed, K - ?HLen - 1),
MaskedDB = ssh_bits:xor_bits(DB, DbMask),
SeedMask = ?MGF(MaskedDB, ?HLen),
MaskedSeed = ssh_bits:xor_bits(Seed, SeedMask),
EM = <<16#00, MaskedSeed/binary, MaskedDB/binary>>,
Mc = os2ip(EM),
C = rsaep(Public, Mc),
i2osp(C, K).
rsaes_oaep_decrypt(Key, C) ->
rsaes_oaep_decrypt(Key, C, <<>>).
rsaes_oaep_decrypt(Private=#ssh_key { public={N,_},private={_,_D}},Cb,L) ->
?ssh_assert(size(L) =< 16#ffffffffffffffff, label_to_long),
K = (ssh_bits:isize(N)+7) div 8,
?ssh_assert(K == 2*?HLen + 2, decryption_error),
C = os2ip(Cb),
M = rsadp(Private, C),
EM = i2osp(M, K),
LHash = ?HASH(L),
MLen = K - ?HLen -1,
case EM of
<<16#00, MaskedSeed:?HLen/binary, MaskedDB:MLen>> ->
SeedMask = ?MGF(MaskedDB, ?HLen),
Seed = ssh_bits:xor_bits(MaskedSeed, SeedMask),
DbMask = ?MGF(Seed, K - ?HLen - 1),
DB = ssh_bits:xor_bits(MaskedDB, DbMask),
PSLen = K - MLen - 2*?HLen - 2,
case DB of
<<LHash:?HLen, _PS:PSLen/binary, 16#01, M/binary>> ->
M;
_ ->
exit(decryption_error)
end;
_ ->
exit(decryption_error)
end.
rsaes_pkcs1_v1_5_encrypt(Public=#ssh_key { public={N,_E}}, M) ->
K = (ssh_bits:isize(N)+7) div 8,
MLen = size(M),
?ssh_assert(MLen =< K - 11, message_to_long),
PS = ssh_bits:random(K - MLen - 3),
EM = <<16#00,16#02,PS/binary,16#00,M/binary>>,
Mc = os2ip(EM),
C = rsaep(Public, Mc),
i2osp(C, K).
rsaes_pkcs1_v1_5_decrypt(Private=#ssh_key { public={N,_},private={_,_D}},
Cb) ->
K = (ssh_bits:isize(N)+7) div 8,
CLen = size(Cb),
?ssh_assert(CLen == K andalso K >= 11, decryption_error),
C = os2ip(Cb),
M = rsadp(Private, C),
EM = i2osp(M, K),
PSLen = K - CLen - 3,
case EM of
<<16#00, 16#02, _PS:PSLen/binary, 16#00, M>> ->
M;
_ ->
exit(decryption_error)
end.
rsassa_pss_sign(Private=#ssh_key { public={N,_},private={_,_D}},Mb) ->
ModBits = ssh_bits:isize(N),
K = (ModBits+7) div 8,
EM = emsa_pss_encode(Mb, ModBits - 1),
M = os2ip(EM),
S = rsasp1(Private, M),
i2osp(S, K).
rsassa_pss_verify(Public=#ssh_key { public={N,_E}},Mb,Sb) ->
ModBits = ssh_bits:isize(N),
K = (ModBits+7) div 8,
?ssh_assert(size(Sb) == K, invalid_signature),
S = os2ip(Sb),
M = rsavp1(Public,S),
EMLen = (ModBits-1+7) div 8,
EM = i2osp(M, EMLen),
emsa_pss_verify(Mb, EM, ModBits-1).
rsassa_pkcs1_v1_5_sign(Private=#ssh_key { public={N,_},private={_,_D}},Mb) ->
K = (ssh_bits:isize(N)+7) div 8,
EM = emsa_pkcs1_v1_5_encode(Mb, K),
M = os2ip(EM),
S = rsasp1(Private, M),
i2osp(S, K).
rsassa_pkcs1_v1_5_verify(Public=#ssh_key { public={N,_E}}, Mb, Sb) ->
K = (ssh_bits:isize(N)+7) div 8,
?ssh_assert(size(Sb) == K, invalid_signature),
S = os2ip(Sb),
M = rsavp1(Public, S),
EM = i2osp(M, K),
case emsa_pkcs1_v1_5_encode(Mb, K) of
EM -> ok;
_ -> exit(invalid_signature)
end.
emsa_pkcs1_v1_5_encode(M, EMLen) ->
H = ?HASH(M),
%% Must use speical xxNull types here!
Alg = #'AlgorithmNull' { algorithm = ?'id-sha1',
parameters = <<>> },
{ok,TCode} = asn1rt:encode('PKCS-1', 'DigestInfoNull',
#'DigestInfoNull' { digestAlgorithm = Alg,
digest = H }),
T = list_to_binary(TCode),
TLen = size(T),
io : format("T[~w ] = ~w\n " , [ TLen , T ] ) ,
?ssh_assert(EMLen >= TLen + 11, message_to_short),
PS = ssh_bits:fill_bits(EMLen - TLen - 3, 16#ff),
<<16#00, 16#01, PS/binary, 16#00, T/binary>>.
emsa_pss_encode(M, EMBits) ->
emsa_pss_encode(M, EMBits, 0).
emsa_pss_encode(M, EMBits, SLen) ->
?ssh_assert(size(M) =< 16#ffffffffffffffff, message_to_long),
EMLen = (EMBits + 7) div 8,
MHash = ?HASH(M),
?ssh_assert(EMLen >= ?HLen + SLen + 2, encoding_error),
Salt = ssh_bits:random(SLen),
M1 = [16#00,16#00,16#00,16#00,16#00,16#00,16#00,16#00,
MHash, Salt],
H = ?HASH(M1),
PS = ssh_bits:fill_bits(EMLen-SLen-?HLen-2, 0),
DB = <<PS/binary, 16#01, Salt/binary>>,
DbMask = ?MGF(H, EMLen - ?HLen -1),
io : format("DB=~p , DbMask=~p\n " , [ DB , DbMask ] ) ,
MaskedDB = ssh_bits:xor_bits(DB, DbMask),
io : format("MaskedDB=~p\n " , [ ] ) ,
ZLen = 8*EMLen - EMBits,
NZLen = (8 - (ZLen rem 8)) rem 8,
io : format("ZLen=~p , NZLen=~p\n " , [ ZLen , NZLen ] ) ,
<<_:ZLen, NZ:NZLen, MaskedDB1/binary>> = MaskedDB,
MaskedDB2 = <<0:ZLen, NZ:NZLen, MaskedDB1/binary>>,
<<MaskedDB2/binary, H/binary, 16#BC>>.
emsa_pss_verify(M, EM, EMBits) ->
emsa_pss_verify(M, EM, EMBits, 0).
emsa_pss_verify(M, EM, EMBits, SLen) ->
?ssh_assert(size(M) =< 16#ffffffffffffffff, message_to_long),
EMLen = (EMBits + 7) div 8,
MHash = ?HASH(M),
?ssh_assert(EMLen >= ?HLen + SLen + 2, inconsistent),
MaskLen = (EMLen - ?HLen - 1)-1,
ZLen = 8*EMLen - EMBits,
NZLen = (8 - (ZLen rem 8)) rem 8,
case EM of
<<0:ZLen,Nz:NZLen,MaskedDB1:MaskLen/binary, H:?HLen/binary, 16#BC>> ->
MaskedDB = <<0:ZLen,Nz:NZLen,MaskedDB1/binary>>,
DbMask = ?MGF(H, EMLen - ?HLen - 1),
DB = ssh_bits:xor_bits(MaskedDB, DbMask),
PSLen1 = (EMLen - SLen - ?HLen - 2) - 1,
PS = ssh_bits:fill_bits(PSLen1, 0),
case DB of
<<_:ZLen,0:NZLen,PS:PSLen1/binary,16#01,Salt:SLen/binary>> ->
M1 = [16#00,16#00,16#00,16#00,16#00,16#00,16#00,16#00,
MHash, Salt],
case ?HASH(M1) of
H -> ok;
_ -> exit(inconsistent)
end;
_ ->
exit(inconsistent)
end;
_ ->
exit(inconsistent)
end.
%% Mask generating function MGF1
mgf1(MGFSeed, MaskLen) ->
T = mgf1_loop(0, ((MaskLen + ?HLen -1) div ?HLen) - 1, MGFSeed, ""),
<<R:MaskLen/binary, _/binary>> = T,
R.
mgf1_loop(Counter, N, _, T) when Counter > N ->
list_to_binary(T);
mgf1_loop(Counter, N, MGFSeed, T) ->
C = i2osp(Counter, 4),
mgf1_loop(Counter+1, N, MGFSeed, [T, ?HASH([MGFSeed, C])]).
| null | https://raw.githubusercontent.com/gebi/jungerl/8f5c102295dbe903f47d79fd64714b7de17026ec/lib/ssh/src/ssh_rsa.erl | erlang | File : ssh_rsa.erl
[Public, Mb, Sb]),
decrypt1, M = message representative
M messaage
LLen = size(L),
Must use speical xxNull types here!
Mask generating function MGF1 | Author : < >
Description : RSA encrypt / decrypt / verify / sign ( rfc 3447 )
Created : 30 Aug 2004 by < >
-module(ssh_rsa).
-vsn("$Revision$ ").
-rcsid("$Id$\n").
-compile(export_all).
-export([verify/3]).
-include("../include/ssh.hrl").
-include("PKCS-1.hrl").
-define(MGF(Seed,Len), mgf1((Seed),(Len))).
-define(HASH(X), crypto:sha((X))).
-define(HLen, 20).
start() ->
crypto:start().
sign_file(File) ->
start(),
{ok,Bin} = file:read_file(File),
{ok,Key} = ssh_file:private_host_rsa_key(user),
sign(Key, Bin).
verify_file(File, Sig) ->
start(),
{ok,Bin} = file:read_file(File),
{ok,Key} = ssh_file:public_host_rsa_key(user),
verify(Key, Bin, Sig).
sign(Private,Mb) ->
rsassa_pkcs1_v1_5_sign(Private,Mb).
verify(Public,Mb,Sb) ->
io : format("verify : Key=~p\n Message=~p\n Signature=~p\n " ,
rsassa_pkcs1_v1_5_verify(Public,Mb,Sb).
Integer to octet string
i2osp(X, XLen) ->
ssh_bits:i2bin(X, XLen).
string to Integer
os2ip(X) ->
ssh_bits:bin2i(X).
rsaep(#ssh_key { public={N,E}}, M) ->
?ssh_assert(M >= 0 andalso M =< N-1, out_of_range),
ssh_math:ipow(M, E, N).
encrypt1 , C = cipher representative
rsadp(#ssh_key { public={N,_}, private={_,D}}, C) ->
?ssh_assert(C >= 0 andalso C =< N-1, out_of_range),
ssh_math:ipow(C, D, N).
, M = message representative
rsasp1(#ssh_key { public={N,_}, private={_,D}}, M) ->
?ssh_assert((M >= 0 andalso M =< N-1), out_of_range),
ssh_math:ipow(M, D, N).
verify1 , S = signature representative
rsavp1(#ssh_key { public={N,E}}, S) ->
?ssh_assert(S >= 0 andalso S =< N-1, out_of_range),
ssh_math:ipow(S, E, N).
rsaes_oaep_encrypt(Public, M) ->
rsaes_oaep_encrypt(Public, M, <<>>).
rsaes_oaep_encrypt(Public=#ssh_key { public={N,_E}}, M, L) ->
?ssh_assert(size(L) =< 16#ffffffffffffffff, label_to_long),
K = (ssh_bits:isize(N)+7) div 8,
MLen = size(M),
?ssh_assert(MLen =< K - 2*?HLen - 2, message_to_long),
LHash = ?HASH(L),
PS = ssh_bits:fill_bits(K - MLen - 2*?HLen - 2, 0),
DB = <<LHash/binary, PS/binary, 16#01, M/binary>>,
Seed = ssh_bits:random(?HLen),
DbMask = ?MGF(Seed, K - ?HLen - 1),
MaskedDB = ssh_bits:xor_bits(DB, DbMask),
SeedMask = ?MGF(MaskedDB, ?HLen),
MaskedSeed = ssh_bits:xor_bits(Seed, SeedMask),
EM = <<16#00, MaskedSeed/binary, MaskedDB/binary>>,
Mc = os2ip(EM),
C = rsaep(Public, Mc),
i2osp(C, K).
rsaes_oaep_decrypt(Key, C) ->
rsaes_oaep_decrypt(Key, C, <<>>).
rsaes_oaep_decrypt(Private=#ssh_key { public={N,_},private={_,_D}},Cb,L) ->
?ssh_assert(size(L) =< 16#ffffffffffffffff, label_to_long),
K = (ssh_bits:isize(N)+7) div 8,
?ssh_assert(K == 2*?HLen + 2, decryption_error),
C = os2ip(Cb),
M = rsadp(Private, C),
EM = i2osp(M, K),
LHash = ?HASH(L),
MLen = K - ?HLen -1,
case EM of
<<16#00, MaskedSeed:?HLen/binary, MaskedDB:MLen>> ->
SeedMask = ?MGF(MaskedDB, ?HLen),
Seed = ssh_bits:xor_bits(MaskedSeed, SeedMask),
DbMask = ?MGF(Seed, K - ?HLen - 1),
DB = ssh_bits:xor_bits(MaskedDB, DbMask),
PSLen = K - MLen - 2*?HLen - 2,
case DB of
<<LHash:?HLen, _PS:PSLen/binary, 16#01, M/binary>> ->
M;
_ ->
exit(decryption_error)
end;
_ ->
exit(decryption_error)
end.
rsaes_pkcs1_v1_5_encrypt(Public=#ssh_key { public={N,_E}}, M) ->
K = (ssh_bits:isize(N)+7) div 8,
MLen = size(M),
?ssh_assert(MLen =< K - 11, message_to_long),
PS = ssh_bits:random(K - MLen - 3),
EM = <<16#00,16#02,PS/binary,16#00,M/binary>>,
Mc = os2ip(EM),
C = rsaep(Public, Mc),
i2osp(C, K).
rsaes_pkcs1_v1_5_decrypt(Private=#ssh_key { public={N,_},private={_,_D}},
Cb) ->
K = (ssh_bits:isize(N)+7) div 8,
CLen = size(Cb),
?ssh_assert(CLen == K andalso K >= 11, decryption_error),
C = os2ip(Cb),
M = rsadp(Private, C),
EM = i2osp(M, K),
PSLen = K - CLen - 3,
case EM of
<<16#00, 16#02, _PS:PSLen/binary, 16#00, M>> ->
M;
_ ->
exit(decryption_error)
end.
rsassa_pss_sign(Private=#ssh_key { public={N,_},private={_,_D}},Mb) ->
ModBits = ssh_bits:isize(N),
K = (ModBits+7) div 8,
EM = emsa_pss_encode(Mb, ModBits - 1),
M = os2ip(EM),
S = rsasp1(Private, M),
i2osp(S, K).
rsassa_pss_verify(Public=#ssh_key { public={N,_E}},Mb,Sb) ->
ModBits = ssh_bits:isize(N),
K = (ModBits+7) div 8,
?ssh_assert(size(Sb) == K, invalid_signature),
S = os2ip(Sb),
M = rsavp1(Public,S),
EMLen = (ModBits-1+7) div 8,
EM = i2osp(M, EMLen),
emsa_pss_verify(Mb, EM, ModBits-1).
rsassa_pkcs1_v1_5_sign(Private=#ssh_key { public={N,_},private={_,_D}},Mb) ->
K = (ssh_bits:isize(N)+7) div 8,
EM = emsa_pkcs1_v1_5_encode(Mb, K),
M = os2ip(EM),
S = rsasp1(Private, M),
i2osp(S, K).
rsassa_pkcs1_v1_5_verify(Public=#ssh_key { public={N,_E}}, Mb, Sb) ->
K = (ssh_bits:isize(N)+7) div 8,
?ssh_assert(size(Sb) == K, invalid_signature),
S = os2ip(Sb),
M = rsavp1(Public, S),
EM = i2osp(M, K),
case emsa_pkcs1_v1_5_encode(Mb, K) of
EM -> ok;
_ -> exit(invalid_signature)
end.
emsa_pkcs1_v1_5_encode(M, EMLen) ->
H = ?HASH(M),
Alg = #'AlgorithmNull' { algorithm = ?'id-sha1',
parameters = <<>> },
{ok,TCode} = asn1rt:encode('PKCS-1', 'DigestInfoNull',
#'DigestInfoNull' { digestAlgorithm = Alg,
digest = H }),
T = list_to_binary(TCode),
TLen = size(T),
io : format("T[~w ] = ~w\n " , [ TLen , T ] ) ,
?ssh_assert(EMLen >= TLen + 11, message_to_short),
PS = ssh_bits:fill_bits(EMLen - TLen - 3, 16#ff),
<<16#00, 16#01, PS/binary, 16#00, T/binary>>.
emsa_pss_encode(M, EMBits) ->
emsa_pss_encode(M, EMBits, 0).
emsa_pss_encode(M, EMBits, SLen) ->
?ssh_assert(size(M) =< 16#ffffffffffffffff, message_to_long),
EMLen = (EMBits + 7) div 8,
MHash = ?HASH(M),
?ssh_assert(EMLen >= ?HLen + SLen + 2, encoding_error),
Salt = ssh_bits:random(SLen),
M1 = [16#00,16#00,16#00,16#00,16#00,16#00,16#00,16#00,
MHash, Salt],
H = ?HASH(M1),
PS = ssh_bits:fill_bits(EMLen-SLen-?HLen-2, 0),
DB = <<PS/binary, 16#01, Salt/binary>>,
DbMask = ?MGF(H, EMLen - ?HLen -1),
io : format("DB=~p , DbMask=~p\n " , [ DB , DbMask ] ) ,
MaskedDB = ssh_bits:xor_bits(DB, DbMask),
io : format("MaskedDB=~p\n " , [ ] ) ,
ZLen = 8*EMLen - EMBits,
NZLen = (8 - (ZLen rem 8)) rem 8,
io : format("ZLen=~p , NZLen=~p\n " , [ ZLen , NZLen ] ) ,
<<_:ZLen, NZ:NZLen, MaskedDB1/binary>> = MaskedDB,
MaskedDB2 = <<0:ZLen, NZ:NZLen, MaskedDB1/binary>>,
<<MaskedDB2/binary, H/binary, 16#BC>>.
emsa_pss_verify(M, EM, EMBits) ->
emsa_pss_verify(M, EM, EMBits, 0).
emsa_pss_verify(M, EM, EMBits, SLen) ->
?ssh_assert(size(M) =< 16#ffffffffffffffff, message_to_long),
EMLen = (EMBits + 7) div 8,
MHash = ?HASH(M),
?ssh_assert(EMLen >= ?HLen + SLen + 2, inconsistent),
MaskLen = (EMLen - ?HLen - 1)-1,
ZLen = 8*EMLen - EMBits,
NZLen = (8 - (ZLen rem 8)) rem 8,
case EM of
<<0:ZLen,Nz:NZLen,MaskedDB1:MaskLen/binary, H:?HLen/binary, 16#BC>> ->
MaskedDB = <<0:ZLen,Nz:NZLen,MaskedDB1/binary>>,
DbMask = ?MGF(H, EMLen - ?HLen - 1),
DB = ssh_bits:xor_bits(MaskedDB, DbMask),
PSLen1 = (EMLen - SLen - ?HLen - 2) - 1,
PS = ssh_bits:fill_bits(PSLen1, 0),
case DB of
<<_:ZLen,0:NZLen,PS:PSLen1/binary,16#01,Salt:SLen/binary>> ->
M1 = [16#00,16#00,16#00,16#00,16#00,16#00,16#00,16#00,
MHash, Salt],
case ?HASH(M1) of
H -> ok;
_ -> exit(inconsistent)
end;
_ ->
exit(inconsistent)
end;
_ ->
exit(inconsistent)
end.
mgf1(MGFSeed, MaskLen) ->
T = mgf1_loop(0, ((MaskLen + ?HLen -1) div ?HLen) - 1, MGFSeed, ""),
<<R:MaskLen/binary, _/binary>> = T,
R.
mgf1_loop(Counter, N, _, T) when Counter > N ->
list_to_binary(T);
mgf1_loop(Counter, N, MGFSeed, T) ->
C = i2osp(Counter, 4),
mgf1_loop(Counter+1, N, MGFSeed, [T, ?HASH([MGFSeed, C])]).
|
28e574b9383501f8e93fee11818e07b0e44e3d8505619826806addce64e77062 | phantomics/seed | stage-controls.graph.lisp | ;;;; stage-controls.graph.lisp
(in-package #:seed.ui-spec.stage.base)
(defmacro stage-controls-graph-base (meta-symbol spec-symbol params-symbol output-symbol)
(declare (ignorable spec-symbol))
`(((eq :add-graph-node (first ,params-symbol))
(cons `(,',meta-symbol :add-node :mode (:interaction :add-graph-node))
,output-symbol))
((eq :add-graph-link (first ,params-symbol))
(cons `(,',meta-symbol :add-link :mode (:interaction :add-graph-link))
,output-symbol))
((eq :remove-graph-object (first ,params-symbol))
(cons `(,',meta-symbol :delete-object :mode (:interaction :remove-graph-object))
,output-symbol))))
| null | https://raw.githubusercontent.com/phantomics/seed/f128969c671c078543574395d6b23a1a5f2723f8/seed.ui-spec.stage-controls.graph/stage-controls.graph.lisp | lisp | stage-controls.graph.lisp |
(in-package #:seed.ui-spec.stage.base)
(defmacro stage-controls-graph-base (meta-symbol spec-symbol params-symbol output-symbol)
(declare (ignorable spec-symbol))
`(((eq :add-graph-node (first ,params-symbol))
(cons `(,',meta-symbol :add-node :mode (:interaction :add-graph-node))
,output-symbol))
((eq :add-graph-link (first ,params-symbol))
(cons `(,',meta-symbol :add-link :mode (:interaction :add-graph-link))
,output-symbol))
((eq :remove-graph-object (first ,params-symbol))
(cons `(,',meta-symbol :delete-object :mode (:interaction :remove-graph-object))
,output-symbol))))
|
024373f17c180438869d0c803781a1e036500e5f729ece8951bb2ee809e4aa4a | CompSciCabal/SMRTYPRTY | greg-6.rkt | #lang racket
(print-as-expression #f)
;(pretty-print-abbreviate-read-macros #f)
(define-syntax example
(syntax-rules ()
((_ e) (begin (newline)
(pretty-print 'e)
(displayln "==>")
(pretty-print e)))))
(define (sub* env d)
(define (loop d) (sub* env d))
(cond ((assoc d env) => (lambda (old-new) (cdr old-new)))
((pair? d) (cons (loop (car d)) (loop (cdr d))))
((vector? d) (vector-map loop d))
(else d)))
(define (var? x)
(and (symbol? x) (string-prefix? (symbol->string x) "?")))
(define (var-match env pattern input)
(define existing (assoc pattern env))
(if existing
(and (equal? (cdr existing) input) env)
(cons (cons pattern input) env)))
(define (segment-pattern? pattern)
(and (pair? pattern) (pair? (car pattern))
(assoc (car (car pattern)) segment-match-table)))
(define (segment-match env pattern input)
((cdr (assoc (car (car pattern)) segment-match-table))
env pattern input))
(define (single-pattern? pattern)
(and (pair? pattern)
(assoc (car pattern) single-match-table)))
(define (single-match env pattern input)
((cdr (assoc (car pattern) single-match-table))
env (cdr pattern) input))
(define (pat-match/env env pattern input)
(and env
(cond ((var? pattern) (var-match env pattern input))
((segment-pattern? pattern) (segment-match env pattern input))
((single-pattern? pattern) (single-match env pattern input))
((and (pair? pattern) (pair? input))
(pat-match/env (pat-match/env env (car pattern) (car input))
(cdr pattern) (cdr input)))
(else (and (equal? pattern input) env)))))
(define (pat-match pattern input) (pat-match/env '() pattern input))
(define (match-literal env quoted input)
(and (equal? (car quoted) input) env))
(define (match-is env var-and-pred input)
(define vr (car var-and-pred))
(define is? (cadr var-and-pred))
(define e2 (pat-match/env env vr input))
(and e2 ((eval is? (make-base-namespace)) input) e2))
(define (match-and env patterns input)
(and env (if (null? patterns) env
(match-and (pat-match/env env (car patterns) input)
(cdr patterns) input))))
(define (match-or env patterns input)
(and (pair? patterns)
(or (pat-match/env env (car patterns) input)
(match-or env (cdr patterns) input))))
(define (match-not env patterns input)
(and (not (match-or env patterns input)) env))
(define (segment-match-*+ env pattern input min-count)
(define seg-var (cadar pattern))
(define pat (cdr pattern))
(define binding (assoc seg-var env))
(if binding
For efficiency as mentioned in exercise 5.13 .
(and (list? binding) (<= min-count (length binding))
(let loop ((input input) (binding binding))
(cond ((null? binding) (pat-match/env env pat input))
((pair? binding) (and (pair? input)
(equal? (car binding) (car input))
(loop (cdr input) (cdr binding))))
(else #f))))
(let loop ((count min-count) (input input) (seg '()))
(cond ((= 0 count)
(let loop ((input input) (seg seg))
(define e2 (pat-match/env env pat input))
(cond ((and e2 (var-match e2 seg-var (reverse seg))))
((pair? input) (loop (cdr input) (cons (car input) seg)))
(else #f))))
((pair? input)
(loop (- count 1) (cdr input) (cons (car input) seg)))
(else #f)))))
(define (segment-match-* env pattern input)
(segment-match-*+ env pattern input 0))
(define (segment-match-+ env pattern input)
(segment-match-*+ env pattern input 1))
(define (segment-match-? env pattern input)
(define seg-var (cadar pattern))
(define pat (cdr pattern))
;; Swap these to minimize greed.
(or (pat-match/env env (cons seg-var pat) input)
(pat-match/env env pat input)))
(define (match-if env pattern input)
(and (eval `(let ,(map (lambda (kv) `(,(car kv) ,(cdr kv))) env)
,(cadar pattern))
(make-base-namespace))
(pat-match/env env (cdr pattern) input)))
(define single-match-table
`((?quote . ,match-literal)
(?is . ,match-is)
(?or . ,match-or)
(?and . ,match-and)
(?not . ,match-not)))
(define segment-match-table
`((?* . ,segment-match-*)
(?+ . ,segment-match-+)
(?? . ,segment-match-?)
(?if . ,match-if)))
(example (pat-match '(x = (?is ?n number?)) '(x = 34)))
(example (pat-match '(x = (?is ?n number?)) '(x = x)))
(example (pat-match '(?x (?or < = >) ?y) '(3 < 4)))
(example (pat-match '(x = (?and (?is ?n number?) (?is ?n odd?)))
'(x = 3)))
(example (pat-match '(x = (?and (?is ?n number?) (?is ?n odd?)))
'(x = 2)))
(example (pat-match '(x = (?and (?is ?n number?) (?is ?n odd?)))
'(x = x)))
(example (pat-match '(?x /= (?not ?x)) '(3 /= 4)))
(example (pat-match '(?x > ?y (?if (> ?x ?y))) '(4 > 3)))
(example (pat-match '(a (?* ?x) d) '(a b c d)))
(example (pat-match '(a (?* ?x) (?* ?y) d) '(a b c d)))
(example (pat-match '(a (?* ?x) (?* ?y) ?x ?y) '(a b c d (b c) (d))))
(example (pat-match '(?x ?op ?y is ?z (?if (equal? (?op ?x ?y) ?z)))
'(3 + 4 is 7)))
(example (pat-match '(?x ?op ?y (?if (?op ?x ?y)))
'(3 > 4)))
(example (pat-match '(?x ?op ?y (?if (?op ?x ?y)))
'(3 < 4)))
(define (rule-system rule-match rule-if rule-then action)
(list rule-match rule-if rule-then action))
(define (rule-based-translator system input rules)
(define rule-match (car system))
(define rule-if (cadr system))
(define rule-then (caddr system))
(define action (cadddr system))
(ormap (lambda (rule)
(define result (rule-match (rule-if rule) input))
(and result (action result (rule-then rule))))
rules))
Exercise 6.6
(example (pat-match '(?one ?two ?three) '(?one ?two ?three)))
(example (pat-match '(?one (?quote ?two) ?three) '(?one ?two ?three)))
(example (pat-match '(?one (?quote ?two) ?three) '(?one two ?three)))
TODO : use this in chapter 5 .
;(define eliza-rule-system
;(rule-system pat-match car cdr
;(lambda (env responses)
;(sub* (switch-viewpoint env)
;(random-elt responses)))))
;(define (use-eliza-rules input)
( rule - based - translator - system input eliza - rules ) )
(define debug-search? #f)
;; (set! debug-search? #t) wherever you'd like to step through search examples.
(define debug-proj #f)
(define (debug-search states)
(when debug-search? (printf ";; States: ~a\n"
(if debug-proj (map debug-proj states) states))
(read-line)))
(define (tree-search states goal? successors combine)
(debug-search states)
(cond ((null? states) #f)
((goal? (car states)) (car states))
(else (tree-search (combine (successors (car states)) (cdr states))
goal? successors combine))))
(define (prepend xs ys) (append ys xs))
(define (depth-first-search states goal? successors)
(tree-search states goal? successors append))
(define (breadth-first-search states goal? successors)
(tree-search states goal? successors prepend))
(define (binary-tree x) (list (* 2 x) (+ 1 (* 2 x))))
(define (is value) (lambda (x) (equal? x value)))
(define (is/proj value proj ?) (lambda (x) (? (proj x) value)))
( example ( depth - first - search ' ( 1 ) ( is 12 ) binary - tree ) )
(example (breadth-first-search '(1) (is 12) binary-tree))
(define (finite-binary-tree n)
(lambda (x)
(filter-not (lambda (child) (> child n)) (binary-tree x))))
(example (depth-first-search '(1) (is 12) (finite-binary-tree 15)))
(define (diff n) (lambda (x) (abs (- x n))))
Exercise 6.9
(define (merge cost ps qs)
(cond
((null? ps) qs)
((null? qs) ps)
((< (cost (car qs))
(cost (car ps))) (cons (car qs) (merge cost ps (cdr qs))))
(else (cons (car ps) (merge cost (cdr ps) qs)))))
(define (sorter cost)
(lambda (new old)
;(sort (append new old) (lambda (a b) (< (cost a) (cost b))))
(merge cost (sort new (lambda (a b) (< (cost a) (cost b)))) old)))
(define (best-first-search states goal? successors cost)
(tree-search states goal? successors (sorter cost)))
(example (best-first-search '(1) (is 12) binary-tree (diff 12)))
(define max-fixnum (- (expt 2 62) 1))
(define (price-is-right price)
(lambda (x) (if (> x price) max-fixnum (- price x))))
(example (best-first-search '(1) (is 12) binary-tree (price-is-right 12)))
(define (beam-search states goal? successors cost beam-width)
(tree-search states goal? successors
(lambda (new old)
(define sorted ((sorter cost) new old))
(if (> beam-width (length sorted))
sorted
(take sorted beam-width)))))
(example (beam-search '(1) (is 12) binary-tree (price-is-right 12) 2))
;(set! debug-search? #t)
( example ( beam - search ' ( 1 ) ( is 12 ) binary - tree ( diff 12 ) 2 ) )
(example (beam-search '(1) (is 12) binary-tree (diff 12) 3))
(define (city-name c) (car c))
(define (city-long c) (cadr c))
(define (city-lat c) (caddr c))
(define cities
;; (name longitude latitude)
'((Atlanta 84.23 33.45) (Los-Angeles 118.15 34.03)
(Boston 71.05 42.21) (Memphis 90.03 35.09)
(Chicago 87.37 41.50) (New-York 73.58 40.47)
(Denver 105.00 39.45) (Oklahoma-City 97.28 35.26)
(Eugene 123.05 44.03) (Pittsburgh 79.57 40.27)
(Flagstaff 111.41 35.13) (Quebec 71.11 46.49)
(Grand-Jct 108.37 39.05) (Reno 119.49 39.30)
(Houston 105.00 34.00) (San-Francisco 122.26 37.47)
(Indianapolis 86.10 39.46) (Tampa 82.27 27.57)
(Jacksonville 81.40 30.22) (Victoria 123.21 48.25)
(Kansas-City 94.35 39.06) (Wilmington 77.57 34.14)))
(define (neighbors city)
(filter (lambda (c) (and (not (equal? c city))
(< (air-distance c city) 1000.0)))
cities))
(define (city name) (assoc name cities))
(define (trip1 start dest)
(beam-search (list start) (is dest) neighbors
(lambda (c) (air-distance c dest)) 1))
(define earth-diameter 12765.0)
(define (air-distance c1 c2)
(define d (distance (xyz-coords c1) (xyz-coords c2)))
(* earth-diameter (asin (/ d 2))))
(define (xyz-coords city)
(define psi (deg->radians (city-lat city)))
(define phi (deg->radians (city-long city)))
(list (* (cos psi) (cos phi))
(* (cos psi) (sin phi))
(sin psi)))
(define (distance p1 p2)
(sqrt (foldl + 0 (map (lambda (a b) (expt (- a b) 2)) p1 p2))))
(define (deg->radians deg)
(define trunc (truncate deg))
(* (+ trunc (* (- deg trunc) 100/60)) pi 1/180))
(example (trip1 (city 'San-Francisco) (city 'Boston)))
(example (trip1 (city 'Boston) (city 'San-Francisco)))
(define (path state previous cost-so-far total-cost)
(vector state previous cost-so-far total-cost))
(define (path-state p) (vector-ref p 0))
(define (path-previous p) (vector-ref p 1))
(define (path-cost-so-far p) (vector-ref p 2))
(define (path-total-cost p) (vector-ref p 3))
(define (path/state state) (vector state #f 0 0))
(define (trip2 start dest beam-width)
(beam-search (list (path/state start))
(is/proj dest path-state equal?)
(path-saver neighbors air-distance
(lambda (c) (air-distance c dest)))
path-total-cost
beam-width))
(define (path-saver successors cost cost-remaining)
(lambda (old-path)
(define old-state (path-state old-path))
(map (lambda (new-state)
(define old-cost (+ (path-cost-so-far old-path)
(cost old-state new-state)))
(path new-state old-path old-cost
(+ old-cost (cost-remaining new-state))))
(successors old-state))))
(define (show-path path)
(format "#<Path to ~a cost ~a>" (path-state path)
(~r (path-total-cost path) #:precision 1)))
(define (show-city-path path)
(define names (reverse (map-path city-name path)))
(format "#<Path ~a km: ~a~a>" (path-total-cost path)
(car names)
(string-append*
(map (lambda (n) (string-append " - " (symbol->string n)))
(cdr names)))))
(define (map-path f path)
(if path (cons (f (path-state path))
(map-path f (path-previous path)))
'()))
;(set! debug-search? #t)
(set! debug-proj show-path)
(example (show-city-path (trip2 (city 'San-Francisco) (city 'Boston) 1)))
(example (show-city-path (trip2 (city 'Boston) (city 'San-Francisco) 1)))
(example (show-city-path (trip2 (city 'Boston) (city 'San-Francisco) 3)))
(set! debug-proj #f)
(define (iter-wide-search states goal? successors cost width max-width)
(when debug-search? (printf "; Width: ~a\n" width))
(unless (> width max-width)
(or (beam-search states goal? successors cost width)
(iter-wide-search states goal? successors cost
(+ width 1) max-width))))
(example (iter-wide-search '(1) (is 12) (finite-binary-tree 15) (diff 12)
1 100))
(define (adjoin x xs =?)
(if (memf (lambda (y) (=? x y)) xs)
xs
(cons x xs)))
(define (graph-search states goal? successors combine state=? old-states)
(debug-search states)
(cond ((null? states) #f)
((goal? (car states)) (car states))
(else (graph-search
(combine (new-states states successors state=? old-states)
(cdr states))
goal? successors combine state=?
(adjoin (car states) old-states state=?)))))
(define (new-states states successors state=? old-states)
(define (mem state states) (memf (lambda (s) (state=? s state)) states))
(filter-not (lambda (state) (or (mem state states) (mem state old-states)))
(successors (car states))))
(define (next2 x) (list (+ x 1) (+ x 2)))
(define (a*-search paths goal? successors cost cost-left state=? old-paths)
(debug-search (map show-path paths))
(cond ((null? paths) #f)
((goal? (path-state (car paths))) (car paths))
(else
(let* ((p (car paths)) (state (path-state p)))
(let loop0 ((states (successors state))
(paths (cdr paths))
(old-paths (insert-path p old-paths)))
(define (loop paths old-paths)
(loop0 (cdr states) paths old-paths))
(if (null? states) (a*-search paths goal? successors
cost cost-left state=? old-paths)
(let* ((state2 (car states))
(cost (+ (path-cost-so-far p) (cost state state2)))
(cost2 (cost-left state2))
(p2 (path state2 p cost (+ cost cost2))))
(cond
((find-path state2 paths state=?)
=> (lambda (old)
(if (path<? p2 old)
(loop (insert-path p2 (remove old paths))
old-paths)
(loop paths old-paths))))
((find-path state2 old-paths state=?)
=> (lambda (old)
(if (path<? p2 old)
(loop (insert-path p2 paths)
(remove old old-paths))
(loop paths old-paths))))
(else (loop (insert-path p2 paths) old-paths))))))))))
(define (find-path state paths state=?)
(findf (lambda (p) (state=? (path-state p) state)) paths))
(define (path<? p1 p2) (< (path-total-cost p1) (path-total-cost p2)))
(define (merge-paths ps qs)
(cond
((null? ps) qs)
((null? qs) ps)
((path<? (car qs) (car ps)) (cons (car qs) (merge-paths ps (cdr qs))))
(else (cons (car ps) (merge-paths (cdr ps) qs)))))
(define (insert-path p paths) (merge-paths (list p) paths))
(define (path-states p) (map-path (lambda (x) x) p))
(example (tree-search '(1) (is 6) next2 prepend))
(example (graph-search '(1) (is 6) next2 prepend equal? '()))
(example (path-states (a*-search (list (path/state 1)) (is 6) next2
(lambda (x y) 1) (diff 6) equal? '())))
(define (search-all start goal? successors cost beam-width)
(define solutions '())
(beam-search (list start)
(lambda (x)
(when (goal? x) (set! solutions (cons x solutions)))
#f)
successors cost beam-width)
solutions)
(define (subset? xs ys) (andmap (lambda (x) (member x ys)) xs))
(define (executing? x) (and (pair? x) (eq? 'executing (car x))))
(struct op (action preconds add-list del-list) #:prefab)
(define *ops* #f)
(define (search-gps ops start goal beam-width)
(define (cost state)
(+ (length (filter executing? state))
(length (filter (lambda (con) (not (member con state)))
goal))))
(define old-ops *ops*)
(set! *ops* ops)
(define result
(beam-search (list start) (lambda (state) (subset? goal state))
gps-successors cost beam-width))
(set! *ops* old-ops)
(and result (filter executing? result)))
(define (gps-successors state)
(map (lambda (op)
(append (filter-not (lambda (x) (member x (op-del-list op))) state)
(cons (list 'executing (op-action op)) (op-add-list op))))
(applicable-ops state)))
(define (applicable-ops state)
(filter (lambda (op) (subset? (op-preconds op) state)) *ops*))
(define (make-block-ops blocks)
(define (move-op a b c)
(op `(move ,a from ,b to ,c)
`((space on ,a) (space on ,c) (,a on ,b))
(move-ons a b c)
(move-ons a c b)))
(define (move-ons a b c)
(if (eq? b 'table)
`((,a on ,c))
`((,a on ,c) (space on ,b))))
(append*
(map (lambda (a)
(append*
(map (lambda (b)
(if (equal? a b) '()
(append*
(cons (list (move-op a b 'table)
(move-op a 'table b))
(map (lambda (c)
(append (if (or (equal? c a)
(equal? c b))
'()
(list (move-op a b c)))))
blocks))))) blocks))) blocks)))
(define start '((c on a) (a on table) (b on table) (space on c)
(space on b) (space on table)))
(example (search-gps (make-block-ops '(a b c)) start '((a on b) (b on c)) 10))
(example (search-gps (make-block-ops '(a b c)) start '((b on c) (a on b)) 10))
Exercise 6.11
(define (beam-search/n n start goal? successors cost beam-width)
(define solutions '())
(when (< 0 n)
(beam-search (list start)
(lambda (x)
(and (goal? x)
(set! n (- n 1))
(set! solutions (cons x solutions))
(= 0 n)
x))
successors cost beam-width))
solutions)
(example (beam-search/n 1 1 (is 12) binary-tree (price-is-right 12) 2))
( example ( beam - search / n 2 1 ( is 12 ) binary - tree ( price - is - right 12 ) 2 ) )
(example (beam-search/n 5 1 (is 6) next2 (price-is-right 6) 20))
Exercise 6.15
(define (search-gps-backwards ops start goal beam-width)
(define (cost state)
(+ (length (filter (lambda (con) (not (member con state))) start))
(- (length state)
(length (filter (lambda (con) (member con state)) start)))))
(define old-ops *ops*)
(set! *ops* ops)
(define result
(beam-search (list goal)
(lambda (state)
(define non-actions (filter-not executing? state))
(and (subset? start non-actions)
(subset? non-actions start)))
gps-backwards-successors cost beam-width))
(set! *ops* old-ops)
(and result (filter executing? result)))
(define (gps-backwards-successors state)
(map (lambda (op)
(append (cons (list 'executing (op-action op))
(filter-not (lambda (x) (member x (op-add-list op)))
state))
(foldl (lambda (p acc) (if (member p acc) acc (cons p acc)))
(op-del-list op) (op-preconds op))))
(applicable-ops-backwards state)))
(define (applicable-ops-backwards state)
(filter (lambda (op)
(null? (filter (lambda (x) (member x state)) (op-del-list op))))
*ops*))
(example (search-gps-backwards
(make-block-ops '(a b c)) start '((a on b) (b on c)) 10))
(example (search-gps-backwards
(make-block-ops '(a b c)) start '((b on c) (a on b)) 10))
| null | https://raw.githubusercontent.com/CompSciCabal/SMRTYPRTY/4a5550789c997c20fb7256b81469de1f1fce3514/paip/gregr/greg-6.rkt | racket | (pretty-print-abbreviate-read-macros #f)
Swap these to minimize greed.
(define eliza-rule-system
(rule-system pat-match car cdr
(lambda (env responses)
(sub* (switch-viewpoint env)
(random-elt responses)))))
(define (use-eliza-rules input)
(set! debug-search? #t) wherever you'd like to step through search examples.
(sort (append new old) (lambda (a b) (< (cost a) (cost b))))
(set! debug-search? #t)
(name longitude latitude)
(set! debug-search? #t) | #lang racket
(print-as-expression #f)
(define-syntax example
(syntax-rules ()
((_ e) (begin (newline)
(pretty-print 'e)
(displayln "==>")
(pretty-print e)))))
(define (sub* env d)
(define (loop d) (sub* env d))
(cond ((assoc d env) => (lambda (old-new) (cdr old-new)))
((pair? d) (cons (loop (car d)) (loop (cdr d))))
((vector? d) (vector-map loop d))
(else d)))
(define (var? x)
(and (symbol? x) (string-prefix? (symbol->string x) "?")))
(define (var-match env pattern input)
(define existing (assoc pattern env))
(if existing
(and (equal? (cdr existing) input) env)
(cons (cons pattern input) env)))
(define (segment-pattern? pattern)
(and (pair? pattern) (pair? (car pattern))
(assoc (car (car pattern)) segment-match-table)))
(define (segment-match env pattern input)
((cdr (assoc (car (car pattern)) segment-match-table))
env pattern input))
(define (single-pattern? pattern)
(and (pair? pattern)
(assoc (car pattern) single-match-table)))
(define (single-match env pattern input)
((cdr (assoc (car pattern) single-match-table))
env (cdr pattern) input))
(define (pat-match/env env pattern input)
(and env
(cond ((var? pattern) (var-match env pattern input))
((segment-pattern? pattern) (segment-match env pattern input))
((single-pattern? pattern) (single-match env pattern input))
((and (pair? pattern) (pair? input))
(pat-match/env (pat-match/env env (car pattern) (car input))
(cdr pattern) (cdr input)))
(else (and (equal? pattern input) env)))))
(define (pat-match pattern input) (pat-match/env '() pattern input))
(define (match-literal env quoted input)
(and (equal? (car quoted) input) env))
(define (match-is env var-and-pred input)
(define vr (car var-and-pred))
(define is? (cadr var-and-pred))
(define e2 (pat-match/env env vr input))
(and e2 ((eval is? (make-base-namespace)) input) e2))
(define (match-and env patterns input)
(and env (if (null? patterns) env
(match-and (pat-match/env env (car patterns) input)
(cdr patterns) input))))
(define (match-or env patterns input)
(and (pair? patterns)
(or (pat-match/env env (car patterns) input)
(match-or env (cdr patterns) input))))
(define (match-not env patterns input)
(and (not (match-or env patterns input)) env))
(define (segment-match-*+ env pattern input min-count)
(define seg-var (cadar pattern))
(define pat (cdr pattern))
(define binding (assoc seg-var env))
(if binding
For efficiency as mentioned in exercise 5.13 .
(and (list? binding) (<= min-count (length binding))
(let loop ((input input) (binding binding))
(cond ((null? binding) (pat-match/env env pat input))
((pair? binding) (and (pair? input)
(equal? (car binding) (car input))
(loop (cdr input) (cdr binding))))
(else #f))))
(let loop ((count min-count) (input input) (seg '()))
(cond ((= 0 count)
(let loop ((input input) (seg seg))
(define e2 (pat-match/env env pat input))
(cond ((and e2 (var-match e2 seg-var (reverse seg))))
((pair? input) (loop (cdr input) (cons (car input) seg)))
(else #f))))
((pair? input)
(loop (- count 1) (cdr input) (cons (car input) seg)))
(else #f)))))
(define (segment-match-* env pattern input)
(segment-match-*+ env pattern input 0))
(define (segment-match-+ env pattern input)
(segment-match-*+ env pattern input 1))
(define (segment-match-? env pattern input)
(define seg-var (cadar pattern))
(define pat (cdr pattern))
(or (pat-match/env env (cons seg-var pat) input)
(pat-match/env env pat input)))
(define (match-if env pattern input)
(and (eval `(let ,(map (lambda (kv) `(,(car kv) ,(cdr kv))) env)
,(cadar pattern))
(make-base-namespace))
(pat-match/env env (cdr pattern) input)))
(define single-match-table
`((?quote . ,match-literal)
(?is . ,match-is)
(?or . ,match-or)
(?and . ,match-and)
(?not . ,match-not)))
(define segment-match-table
`((?* . ,segment-match-*)
(?+ . ,segment-match-+)
(?? . ,segment-match-?)
(?if . ,match-if)))
(example (pat-match '(x = (?is ?n number?)) '(x = 34)))
(example (pat-match '(x = (?is ?n number?)) '(x = x)))
(example (pat-match '(?x (?or < = >) ?y) '(3 < 4)))
(example (pat-match '(x = (?and (?is ?n number?) (?is ?n odd?)))
'(x = 3)))
(example (pat-match '(x = (?and (?is ?n number?) (?is ?n odd?)))
'(x = 2)))
(example (pat-match '(x = (?and (?is ?n number?) (?is ?n odd?)))
'(x = x)))
(example (pat-match '(?x /= (?not ?x)) '(3 /= 4)))
(example (pat-match '(?x > ?y (?if (> ?x ?y))) '(4 > 3)))
(example (pat-match '(a (?* ?x) d) '(a b c d)))
(example (pat-match '(a (?* ?x) (?* ?y) d) '(a b c d)))
(example (pat-match '(a (?* ?x) (?* ?y) ?x ?y) '(a b c d (b c) (d))))
(example (pat-match '(?x ?op ?y is ?z (?if (equal? (?op ?x ?y) ?z)))
'(3 + 4 is 7)))
(example (pat-match '(?x ?op ?y (?if (?op ?x ?y)))
'(3 > 4)))
(example (pat-match '(?x ?op ?y (?if (?op ?x ?y)))
'(3 < 4)))
(define (rule-system rule-match rule-if rule-then action)
(list rule-match rule-if rule-then action))
(define (rule-based-translator system input rules)
(define rule-match (car system))
(define rule-if (cadr system))
(define rule-then (caddr system))
(define action (cadddr system))
(ormap (lambda (rule)
(define result (rule-match (rule-if rule) input))
(and result (action result (rule-then rule))))
rules))
Exercise 6.6
(example (pat-match '(?one ?two ?three) '(?one ?two ?three)))
(example (pat-match '(?one (?quote ?two) ?three) '(?one ?two ?three)))
(example (pat-match '(?one (?quote ?two) ?three) '(?one two ?three)))
TODO : use this in chapter 5 .
( rule - based - translator - system input eliza - rules ) )
(define debug-search? #f)
(define debug-proj #f)
(define (debug-search states)
(when debug-search? (printf ";; States: ~a\n"
(if debug-proj (map debug-proj states) states))
(read-line)))
(define (tree-search states goal? successors combine)
(debug-search states)
(cond ((null? states) #f)
((goal? (car states)) (car states))
(else (tree-search (combine (successors (car states)) (cdr states))
goal? successors combine))))
(define (prepend xs ys) (append ys xs))
(define (depth-first-search states goal? successors)
(tree-search states goal? successors append))
(define (breadth-first-search states goal? successors)
(tree-search states goal? successors prepend))
(define (binary-tree x) (list (* 2 x) (+ 1 (* 2 x))))
(define (is value) (lambda (x) (equal? x value)))
(define (is/proj value proj ?) (lambda (x) (? (proj x) value)))
( example ( depth - first - search ' ( 1 ) ( is 12 ) binary - tree ) )
(example (breadth-first-search '(1) (is 12) binary-tree))
(define (finite-binary-tree n)
(lambda (x)
(filter-not (lambda (child) (> child n)) (binary-tree x))))
(example (depth-first-search '(1) (is 12) (finite-binary-tree 15)))
(define (diff n) (lambda (x) (abs (- x n))))
Exercise 6.9
(define (merge cost ps qs)
(cond
((null? ps) qs)
((null? qs) ps)
((< (cost (car qs))
(cost (car ps))) (cons (car qs) (merge cost ps (cdr qs))))
(else (cons (car ps) (merge cost (cdr ps) qs)))))
(define (sorter cost)
(lambda (new old)
(merge cost (sort new (lambda (a b) (< (cost a) (cost b)))) old)))
(define (best-first-search states goal? successors cost)
(tree-search states goal? successors (sorter cost)))
(example (best-first-search '(1) (is 12) binary-tree (diff 12)))
(define max-fixnum (- (expt 2 62) 1))
(define (price-is-right price)
(lambda (x) (if (> x price) max-fixnum (- price x))))
(example (best-first-search '(1) (is 12) binary-tree (price-is-right 12)))
(define (beam-search states goal? successors cost beam-width)
(tree-search states goal? successors
(lambda (new old)
(define sorted ((sorter cost) new old))
(if (> beam-width (length sorted))
sorted
(take sorted beam-width)))))
(example (beam-search '(1) (is 12) binary-tree (price-is-right 12) 2))
( example ( beam - search ' ( 1 ) ( is 12 ) binary - tree ( diff 12 ) 2 ) )
(example (beam-search '(1) (is 12) binary-tree (diff 12) 3))
(define (city-name c) (car c))
(define (city-long c) (cadr c))
(define (city-lat c) (caddr c))
(define cities
'((Atlanta 84.23 33.45) (Los-Angeles 118.15 34.03)
(Boston 71.05 42.21) (Memphis 90.03 35.09)
(Chicago 87.37 41.50) (New-York 73.58 40.47)
(Denver 105.00 39.45) (Oklahoma-City 97.28 35.26)
(Eugene 123.05 44.03) (Pittsburgh 79.57 40.27)
(Flagstaff 111.41 35.13) (Quebec 71.11 46.49)
(Grand-Jct 108.37 39.05) (Reno 119.49 39.30)
(Houston 105.00 34.00) (San-Francisco 122.26 37.47)
(Indianapolis 86.10 39.46) (Tampa 82.27 27.57)
(Jacksonville 81.40 30.22) (Victoria 123.21 48.25)
(Kansas-City 94.35 39.06) (Wilmington 77.57 34.14)))
(define (neighbors city)
(filter (lambda (c) (and (not (equal? c city))
(< (air-distance c city) 1000.0)))
cities))
(define (city name) (assoc name cities))
(define (trip1 start dest)
(beam-search (list start) (is dest) neighbors
(lambda (c) (air-distance c dest)) 1))
(define earth-diameter 12765.0)
(define (air-distance c1 c2)
(define d (distance (xyz-coords c1) (xyz-coords c2)))
(* earth-diameter (asin (/ d 2))))
(define (xyz-coords city)
(define psi (deg->radians (city-lat city)))
(define phi (deg->radians (city-long city)))
(list (* (cos psi) (cos phi))
(* (cos psi) (sin phi))
(sin psi)))
(define (distance p1 p2)
(sqrt (foldl + 0 (map (lambda (a b) (expt (- a b) 2)) p1 p2))))
(define (deg->radians deg)
(define trunc (truncate deg))
(* (+ trunc (* (- deg trunc) 100/60)) pi 1/180))
(example (trip1 (city 'San-Francisco) (city 'Boston)))
(example (trip1 (city 'Boston) (city 'San-Francisco)))
(define (path state previous cost-so-far total-cost)
(vector state previous cost-so-far total-cost))
(define (path-state p) (vector-ref p 0))
(define (path-previous p) (vector-ref p 1))
(define (path-cost-so-far p) (vector-ref p 2))
(define (path-total-cost p) (vector-ref p 3))
(define (path/state state) (vector state #f 0 0))
(define (trip2 start dest beam-width)
(beam-search (list (path/state start))
(is/proj dest path-state equal?)
(path-saver neighbors air-distance
(lambda (c) (air-distance c dest)))
path-total-cost
beam-width))
(define (path-saver successors cost cost-remaining)
(lambda (old-path)
(define old-state (path-state old-path))
(map (lambda (new-state)
(define old-cost (+ (path-cost-so-far old-path)
(cost old-state new-state)))
(path new-state old-path old-cost
(+ old-cost (cost-remaining new-state))))
(successors old-state))))
(define (show-path path)
(format "#<Path to ~a cost ~a>" (path-state path)
(~r (path-total-cost path) #:precision 1)))
(define (show-city-path path)
(define names (reverse (map-path city-name path)))
(format "#<Path ~a km: ~a~a>" (path-total-cost path)
(car names)
(string-append*
(map (lambda (n) (string-append " - " (symbol->string n)))
(cdr names)))))
(define (map-path f path)
(if path (cons (f (path-state path))
(map-path f (path-previous path)))
'()))
(set! debug-proj show-path)
(example (show-city-path (trip2 (city 'San-Francisco) (city 'Boston) 1)))
(example (show-city-path (trip2 (city 'Boston) (city 'San-Francisco) 1)))
(example (show-city-path (trip2 (city 'Boston) (city 'San-Francisco) 3)))
(set! debug-proj #f)
(define (iter-wide-search states goal? successors cost width max-width)
(when debug-search? (printf "; Width: ~a\n" width))
(unless (> width max-width)
(or (beam-search states goal? successors cost width)
(iter-wide-search states goal? successors cost
(+ width 1) max-width))))
(example (iter-wide-search '(1) (is 12) (finite-binary-tree 15) (diff 12)
1 100))
(define (adjoin x xs =?)
(if (memf (lambda (y) (=? x y)) xs)
xs
(cons x xs)))
(define (graph-search states goal? successors combine state=? old-states)
(debug-search states)
(cond ((null? states) #f)
((goal? (car states)) (car states))
(else (graph-search
(combine (new-states states successors state=? old-states)
(cdr states))
goal? successors combine state=?
(adjoin (car states) old-states state=?)))))
(define (new-states states successors state=? old-states)
(define (mem state states) (memf (lambda (s) (state=? s state)) states))
(filter-not (lambda (state) (or (mem state states) (mem state old-states)))
(successors (car states))))
(define (next2 x) (list (+ x 1) (+ x 2)))
(define (a*-search paths goal? successors cost cost-left state=? old-paths)
(debug-search (map show-path paths))
(cond ((null? paths) #f)
((goal? (path-state (car paths))) (car paths))
(else
(let* ((p (car paths)) (state (path-state p)))
(let loop0 ((states (successors state))
(paths (cdr paths))
(old-paths (insert-path p old-paths)))
(define (loop paths old-paths)
(loop0 (cdr states) paths old-paths))
(if (null? states) (a*-search paths goal? successors
cost cost-left state=? old-paths)
(let* ((state2 (car states))
(cost (+ (path-cost-so-far p) (cost state state2)))
(cost2 (cost-left state2))
(p2 (path state2 p cost (+ cost cost2))))
(cond
((find-path state2 paths state=?)
=> (lambda (old)
(if (path<? p2 old)
(loop (insert-path p2 (remove old paths))
old-paths)
(loop paths old-paths))))
((find-path state2 old-paths state=?)
=> (lambda (old)
(if (path<? p2 old)
(loop (insert-path p2 paths)
(remove old old-paths))
(loop paths old-paths))))
(else (loop (insert-path p2 paths) old-paths))))))))))
(define (find-path state paths state=?)
(findf (lambda (p) (state=? (path-state p) state)) paths))
(define (path<? p1 p2) (< (path-total-cost p1) (path-total-cost p2)))
(define (merge-paths ps qs)
(cond
((null? ps) qs)
((null? qs) ps)
((path<? (car qs) (car ps)) (cons (car qs) (merge-paths ps (cdr qs))))
(else (cons (car ps) (merge-paths (cdr ps) qs)))))
(define (insert-path p paths) (merge-paths (list p) paths))
(define (path-states p) (map-path (lambda (x) x) p))
(example (tree-search '(1) (is 6) next2 prepend))
(example (graph-search '(1) (is 6) next2 prepend equal? '()))
(example (path-states (a*-search (list (path/state 1)) (is 6) next2
(lambda (x y) 1) (diff 6) equal? '())))
(define (search-all start goal? successors cost beam-width)
(define solutions '())
(beam-search (list start)
(lambda (x)
(when (goal? x) (set! solutions (cons x solutions)))
#f)
successors cost beam-width)
solutions)
(define (subset? xs ys) (andmap (lambda (x) (member x ys)) xs))
(define (executing? x) (and (pair? x) (eq? 'executing (car x))))
(struct op (action preconds add-list del-list) #:prefab)
(define *ops* #f)
(define (search-gps ops start goal beam-width)
(define (cost state)
(+ (length (filter executing? state))
(length (filter (lambda (con) (not (member con state)))
goal))))
(define old-ops *ops*)
(set! *ops* ops)
(define result
(beam-search (list start) (lambda (state) (subset? goal state))
gps-successors cost beam-width))
(set! *ops* old-ops)
(and result (filter executing? result)))
(define (gps-successors state)
(map (lambda (op)
(append (filter-not (lambda (x) (member x (op-del-list op))) state)
(cons (list 'executing (op-action op)) (op-add-list op))))
(applicable-ops state)))
(define (applicable-ops state)
(filter (lambda (op) (subset? (op-preconds op) state)) *ops*))
(define (make-block-ops blocks)
(define (move-op a b c)
(op `(move ,a from ,b to ,c)
`((space on ,a) (space on ,c) (,a on ,b))
(move-ons a b c)
(move-ons a c b)))
(define (move-ons a b c)
(if (eq? b 'table)
`((,a on ,c))
`((,a on ,c) (space on ,b))))
(append*
(map (lambda (a)
(append*
(map (lambda (b)
(if (equal? a b) '()
(append*
(cons (list (move-op a b 'table)
(move-op a 'table b))
(map (lambda (c)
(append (if (or (equal? c a)
(equal? c b))
'()
(list (move-op a b c)))))
blocks))))) blocks))) blocks)))
(define start '((c on a) (a on table) (b on table) (space on c)
(space on b) (space on table)))
(example (search-gps (make-block-ops '(a b c)) start '((a on b) (b on c)) 10))
(example (search-gps (make-block-ops '(a b c)) start '((b on c) (a on b)) 10))
Exercise 6.11
(define (beam-search/n n start goal? successors cost beam-width)
(define solutions '())
(when (< 0 n)
(beam-search (list start)
(lambda (x)
(and (goal? x)
(set! n (- n 1))
(set! solutions (cons x solutions))
(= 0 n)
x))
successors cost beam-width))
solutions)
(example (beam-search/n 1 1 (is 12) binary-tree (price-is-right 12) 2))
( example ( beam - search / n 2 1 ( is 12 ) binary - tree ( price - is - right 12 ) 2 ) )
(example (beam-search/n 5 1 (is 6) next2 (price-is-right 6) 20))
Exercise 6.15
(define (search-gps-backwards ops start goal beam-width)
(define (cost state)
(+ (length (filter (lambda (con) (not (member con state))) start))
(- (length state)
(length (filter (lambda (con) (member con state)) start)))))
(define old-ops *ops*)
(set! *ops* ops)
(define result
(beam-search (list goal)
(lambda (state)
(define non-actions (filter-not executing? state))
(and (subset? start non-actions)
(subset? non-actions start)))
gps-backwards-successors cost beam-width))
(set! *ops* old-ops)
(and result (filter executing? result)))
(define (gps-backwards-successors state)
(map (lambda (op)
(append (cons (list 'executing (op-action op))
(filter-not (lambda (x) (member x (op-add-list op)))
state))
(foldl (lambda (p acc) (if (member p acc) acc (cons p acc)))
(op-del-list op) (op-preconds op))))
(applicable-ops-backwards state)))
(define (applicable-ops-backwards state)
(filter (lambda (op)
(null? (filter (lambda (x) (member x state)) (op-del-list op))))
*ops*))
(example (search-gps-backwards
(make-block-ops '(a b c)) start '((a on b) (b on c)) 10))
(example (search-gps-backwards
(make-block-ops '(a b c)) start '((b on c) (a on b)) 10))
|
5638bc42e822e8e5b80a98a2e6c9951b89eb17c5c9f0e40fd85e447a6766b4ae | ejgallego/dfuzz | whyCore.ml | Copyright ( c ) 2013 , The Trustees of the University of Pennsylvania
Copyright ( c ) 2013 , The IMDEA Software Institute
All rights reserved .
LICENSE : 3 - clause BSD style .
See the LICENSE file for details on licensing .
Copyright (c) 2013, The IMDEA Software Institute
All rights reserved.
LICENSE: 3-clause BSD style.
See the LICENSE file for details on licensing.
*)
open Why3
Native @@ is already in ocaml 4.0
let (@@) x y = x y
Core file , setup env and config
(* Read the config file *)
let config : Whyconf.config = Whyconf.read_config None
(* the [main] section of the config file *)
let main : Whyconf.main = Whyconf.get_main config
(* all the provers detected, from the config file *)
let provers : Whyconf.config_prover Whyconf.Mprover.t =
Whyconf.get_provers config
(* builds the environment from the [loadpath] *)
let env : Env.env =
try Env.create_env @@ (Whyconf.loadpath main) @ ["./"]
with _ -> Format.eprintf "foo@\n%!"; raise (Invalid_argument "foo")
| null | https://raw.githubusercontent.com/ejgallego/dfuzz/09b408e4f917cdd6945bdc20b7dd325ff80e5843/src/whyCore.ml | ocaml | Read the config file
the [main] section of the config file
all the provers detected, from the config file
builds the environment from the [loadpath] | Copyright ( c ) 2013 , The Trustees of the University of Pennsylvania
Copyright ( c ) 2013 , The IMDEA Software Institute
All rights reserved .
LICENSE : 3 - clause BSD style .
See the LICENSE file for details on licensing .
Copyright (c) 2013, The IMDEA Software Institute
All rights reserved.
LICENSE: 3-clause BSD style.
See the LICENSE file for details on licensing.
*)
open Why3
Native @@ is already in ocaml 4.0
let (@@) x y = x y
Core file , setup env and config
let config : Whyconf.config = Whyconf.read_config None
let main : Whyconf.main = Whyconf.get_main config
let provers : Whyconf.config_prover Whyconf.Mprover.t =
Whyconf.get_provers config
let env : Env.env =
try Env.create_env @@ (Whyconf.loadpath main) @ ["./"]
with _ -> Format.eprintf "foo@\n%!"; raise (Invalid_argument "foo")
|
b7212e93b3393a780fd92cb5a2c79a39768bcf3c42247ffa4b5f9eb5c83e6379 | deadpendency/deadpendency | GetCheckRunOutputRequest.hs | module Common.GitHub.GetCheckRunOutput.Model.GetCheckRunOutputRequest
( GetCheckRunOutputRequest (..),
)
where
import Common.Model.Git.GitSha
import Common.Model.Git.QualifiedRepo
data GetCheckRunOutputRequest = GetCheckRunOutputRequest
{ _appId :: Int,
_qualifiedRepo :: QualifiedRepo,
_commitSha :: GitSha
}
deriving stock (Eq, Show, Generic)
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/common/src/Common/GitHub/GetCheckRunOutput/Model/GetCheckRunOutputRequest.hs | haskell | module Common.GitHub.GetCheckRunOutput.Model.GetCheckRunOutputRequest
( GetCheckRunOutputRequest (..),
)
where
import Common.Model.Git.GitSha
import Common.Model.Git.QualifiedRepo
data GetCheckRunOutputRequest = GetCheckRunOutputRequest
{ _appId :: Int,
_qualifiedRepo :: QualifiedRepo,
_commitSha :: GitSha
}
deriving stock (Eq, Show, Generic)
| |
1e85b30f3d6a85ad9e779427466bde69dccb7f78b0051e6739ade5a54b3ba4e7 | OCamlPro/OCamlPro-OCaml-Branch | odoc_merge.ml | (***********************************************************************)
(* OCamldoc *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
* Merge of information from [ .ml ] and [ .mli ] for a module .
open Odoc_types
module Name = Odoc_name
open Odoc_parameter
open Odoc_value
open Odoc_type
open Odoc_exception
open Odoc_class
open Odoc_module
let merge_before_tags l =
let rec iter acc = function
[] -> List.rev acc
| (v, text) :: q ->
let (l1, l2) = List.partition
(fun (v2,_) -> v = v2) q
in
let acc =
let text =
List.fold_left
(fun acc t -> acc @ [Raw " "] @ t)
text (List.map snd l1)
in
(v, text) :: acc
in
iter acc l2
in
iter [] l
;;
* Merge two Odoctypes.info struture , completing the information of
the first one with the information in the second one .
The merge treatment depends on a given merge_option list .
@return the new info structure .
the first one with the information in the second one.
The merge treatment depends on a given merge_option list.
@return the new info structure.*)
let merge_info merge_options (m1 : info) (m2 : info) =
let new_desc_opt =
match m1.i_desc, m2.i_desc with
None, None -> None
| None, Some d
| Some d, None -> Some d
| Some d1, Some d2 ->
if List.mem Merge_description merge_options then
Some (d1 @ (Newline :: d2))
else
Some d1
in
let new_authors =
match m1.i_authors, m2.i_authors with
[], [] -> []
| l, []
| [], l -> l
| l1, l2 ->
if List.mem Merge_author merge_options then
l1 @ l2
else
l1
in
let new_version =
match m1.i_version , m2.i_version with
None, None -> None
| Some v, None
| None, Some v -> Some v
| Some v1, Some v2 ->
if List.mem Merge_version merge_options then
Some (v1^" "^v2)
else
Some v1
in
let new_sees =
match m1.i_sees, m2.i_sees with
[], [] -> []
| l, []
| [], l -> l
| l1, l2 ->
if List.mem Merge_see merge_options then
l1 @ l2
else
l1
in
let new_since =
match m1.i_since, m2.i_since with
None, None -> None
| Some v, None
| None, Some v -> Some v
| Some v1, Some v2 ->
if List.mem Merge_since merge_options then
Some (v1^" "^v2)
else
Some v1
in
let new_before = merge_before_tags (m1.i_before @ m2.i_before) in
let new_dep =
match m1.i_deprecated, m2.i_deprecated with
None, None -> None
| None, Some t
| Some t, None -> Some t
| Some t1, Some t2 ->
if List.mem Merge_deprecated merge_options then
Some (t1 @ (Newline :: t2))
else
Some t1
in
let new_params =
match m1.i_params, m2.i_params with
[], [] -> []
| l, []
| [], l -> l
| l1, l2 ->
if List.mem Merge_param merge_options then
(
let l_in_m1_and_m2, l_in_m2_only = List.partition
(fun (param2, _) -> List.mem_assoc param2 l1)
l2
in
let rec iter = function
[] -> []
| (param2, desc2) :: q ->
let desc1 = List.assoc param2 l1 in
(param2, desc1 @ (Newline :: desc2)) :: (iter q)
in
let l1_completed = iter l_in_m1_and_m2 in
l1_completed @ l_in_m2_only
)
else
l1
in
let new_raised_exceptions =
match m1.i_raised_exceptions, m2.i_raised_exceptions with
[], [] -> []
| l, []
| [], l -> l
| l1, l2 ->
if List.mem Merge_raised_exception merge_options then
(
let l_in_m1_and_m2, l_in_m2_only = List.partition
(fun (exc2, _) -> List.mem_assoc exc2 l1)
l2
in
let rec iter = function
[] -> []
| (exc2, desc2) :: q ->
let desc1 = List.assoc exc2 l1 in
(exc2, desc1 @ (Newline :: desc2)) :: (iter q)
in
let l1_completed = iter l_in_m1_and_m2 in
l1_completed @ l_in_m2_only
)
else
l1
in
let new_rv =
match m1.i_return_value, m2.i_return_value with
None, None -> None
| None, Some t
| Some t, None -> Some t
| Some t1, Some t2 ->
if List.mem Merge_return_value merge_options then
Some (t1 @ (Newline :: t2))
else
Some t1
in
let new_custom =
match m1.i_custom, m2.i_custom with
[], [] -> []
| [], l
| l, [] -> l
| l1, l2 ->
if List.mem Merge_custom merge_options then
l1 @ l2
else
l1
in
{
Odoc_types.i_desc = new_desc_opt ;
Odoc_types.i_authors = new_authors ;
Odoc_types.i_version = new_version ;
Odoc_types.i_sees = new_sees ;
Odoc_types.i_since = new_since ;
Odoc_types.i_before = new_before ;
Odoc_types.i_deprecated = new_dep ;
Odoc_types.i_params = new_params ;
Odoc_types.i_raised_exceptions = new_raised_exceptions ;
Odoc_types.i_return_value = new_rv ;
Odoc_types.i_custom = new_custom ;
}
* Merge of two optional info structures .
let merge_info_opt merge_options mli_opt ml_opt =
match mli_opt, ml_opt with
None, Some i -> Some i
| Some i, None -> Some i
| None, None -> None
| Some i1, Some i2 -> Some (merge_info merge_options i1 i2)
* merge of two t_type , one for a .mli , another for the .ml .
The .mli type is completed with the information in the .ml type .
The .mli type is completed with the information in the .ml type. *)
let merge_types merge_options mli ml =
mli.ty_info <- merge_info_opt merge_options mli.ty_info ml.ty_info;
mli.ty_loc <- { mli.ty_loc with loc_impl = ml.ty_loc.loc_impl } ;
mli.ty_code <- (match mli.ty_code with None -> ml.ty_code | _ -> mli.ty_code) ;
match mli.ty_kind, ml.ty_kind with
Type_abstract, _ ->
()
| Type_variant l1, Type_variant l2 ->
let f cons =
try
let cons2 = List.find
(fun c2 -> c2.vc_name = cons.vc_name)
l2
in
let new_desc =
match cons.vc_text, cons2.vc_text with
None, None -> None
| Some d, None
| None, Some d -> Some d
| Some d1, Some d2 ->
if List.mem Merge_description merge_options then
Some (d1 @ d2)
else
Some d1
in
cons.vc_text <- new_desc
with
Not_found ->
if !Odoc_global.inverse_merge_ml_mli then
()
else
raise (Failure (Odoc_messages.different_types mli.ty_name))
in
List.iter f l1
| Type_record l1, Type_record l2 ->
let f record =
try
let record2= List.find
(fun r -> r.rf_name = record.rf_name)
l2
in
let new_desc =
match record.rf_text, record2.rf_text with
None, None -> None
| Some d, None
| None, Some d -> Some d
| Some d1, Some d2 ->
if List.mem Merge_description merge_options then
Some (d1 @ d2)
else
Some d1
in
record.rf_text <- new_desc
with
Not_found ->
if !Odoc_global.inverse_merge_ml_mli then
()
else
raise (Failure (Odoc_messages.different_types mli.ty_name))
in
List.iter f l1
| _ ->
if !Odoc_global.inverse_merge_ml_mli then
()
else
raise (Failure (Odoc_messages.different_types mli.ty_name))
* Merge of two param_info , one from a .mli , one from a .ml .
The text fields are not handled but will be recreated from the
i_params field of the info structure .
Here , if a parameter in the .mli has no name , we take the one
from the .ml . When two parameters have two different forms ,
we take the one from the .mli .
The text fields are not handled but will be recreated from the
i_params field of the info structure.
Here, if a parameter in the .mli has no name, we take the one
from the .ml. When two parameters have two different forms,
we take the one from the .mli. *)
let rec merge_param_info pi_mli pi_ml =
match (pi_mli, pi_ml) with
(Simple_name sn_mli, Simple_name sn_ml) ->
if sn_mli.sn_name = "" then
Simple_name { sn_mli with sn_name = sn_ml.sn_name }
else
pi_mli
| (Simple_name _, Tuple _) ->
pi_mli
| (Tuple (_, t_mli), Simple_name sn_ml) ->
if we 're here , then the tuple in the .mli has no parameter names ;
then we take the name of the parameter of the .ml and the type of the .mli .
then we take the name of the parameter of the .ml and the type of the .mli. *)
Simple_name { sn_ml with sn_type = t_mli }
| (Tuple (l_mli, t_mli), Tuple (l_ml, _)) ->
if the two tuples have different lengths
( which should not occurs ) , we return the pi_mli ,
without further investigation .
(which should not occurs), we return the pi_mli,
without further investigation.*)
if (List.length l_mli) <> (List.length l_ml) then
pi_mli
else
let new_l = List.map2 merge_param_info l_mli l_ml in
Tuple (new_l, t_mli)
* Merge of the parameters of two functions / methods / classes , one for a .mli , another for a .ml .
The prameters in the .mli are completed by the name in the .ml .
The prameters in the .mli are completed by the name in the .ml.*)
let rec merge_parameters param_mli param_ml =
match (param_mli, param_ml) with
([], []) -> []
| (l, []) | ([], l) -> l
| ((pi_mli :: li), (pi_ml :: l)) ->
(merge_param_info pi_mli pi_ml) :: merge_parameters li l
* Merge of two t_class , one for a .mli , another for the .ml .
The .mli class is completed with the information in the .ml class .
The .mli class is completed with the information in the .ml class. *)
let merge_classes merge_options mli ml =
mli.cl_info <- merge_info_opt merge_options mli.cl_info ml.cl_info;
mli.cl_loc <- { mli.cl_loc with loc_impl = ml.cl_loc.loc_impl } ;
mli.cl_parameters <- merge_parameters mli.cl_parameters ml.cl_parameters;
(* we must reassociate comments in @param to the the corresponding
parameters because the associated comment of a parameter may have been changed y the merge.*)
Odoc_class.class_update_parameters_text mli;
(* merge values *)
List.iter
(fun a ->
try
let _ = List.find
(fun ele ->
match ele with
Class_attribute a2 ->
if a2.att_value.val_name = a.att_value.val_name then
(
a.att_value.val_info <- merge_info_opt merge_options
a.att_value.val_info a2.att_value.val_info;
a.att_value.val_loc <- { a.att_value.val_loc with loc_impl = a2.att_value.val_loc.loc_impl } ;
if !Odoc_global.keep_code then
a.att_value.val_code <- a2.att_value.val_code;
true
)
else
false
| _ ->
false
)
(* we look for the last attribute with this name defined in the implementation *)
(List.rev (Odoc_class.class_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_class.class_attributes mli);
(* merge methods *)
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Class_method m2 ->
if m2.met_value.val_name = m.met_value.val_name then
(
m.met_value.val_info <- merge_info_opt
merge_options m.met_value.val_info m2.met_value.val_info;
m.met_value.val_loc <- { m.met_value.val_loc with loc_impl = m2.met_value.val_loc.loc_impl } ;
(* merge the parameter names *)
m.met_value.val_parameters <- (merge_parameters
m.met_value.val_parameters
m2.met_value.val_parameters) ;
(* we must reassociate comments in @param to the corresponding
parameters because the associated comment of a parameter may have been changed by the merge.*)
Odoc_value.update_value_parameters_text m.met_value;
if !Odoc_global.keep_code then
m.met_value.val_code <- m2.met_value.val_code;
true
)
else
false
| _ ->
false
)
(* we look for the last method with this name defined in the implementation *)
(List.rev (Odoc_class.class_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_class.class_methods mli)
* merge of two t_class_type , one for a .mli , another for the .ml .
The .mli class is completed with the information in the .ml class .
The .mli class is completed with the information in the .ml class. *)
let merge_class_types merge_options mli ml =
mli.clt_info <- merge_info_opt merge_options mli.clt_info ml.clt_info;
mli.clt_loc <- { mli.clt_loc with loc_impl = ml.clt_loc.loc_impl } ;
(* merge values *)
List.iter
(fun a ->
try
let _ = List.find
(fun ele ->
match ele with
Class_attribute a2 ->
if a2.att_value.val_name = a.att_value.val_name then
(
a.att_value.val_info <- merge_info_opt merge_options
a.att_value.val_info a2.att_value.val_info;
a.att_value.val_loc <- { a.att_value.val_loc with loc_impl = a2.att_value.val_loc.loc_impl } ;
if !Odoc_global.keep_code then
a.att_value.val_code <- a2.att_value.val_code;
true
)
else
false
| _ ->
false
)
(* we look for the last attribute with this name defined in the implementation *)
(List.rev (Odoc_class.class_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_class.class_type_attributes mli);
(* merge methods *)
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Class_method m2 ->
if m2.met_value.val_name = m.met_value.val_name then
(
m.met_value.val_info <- merge_info_opt
merge_options m.met_value.val_info m2.met_value.val_info;
m.met_value.val_loc <- { m.met_value.val_loc with loc_impl = m2.met_value.val_loc.loc_impl } ;
m.met_value.val_parameters <- (merge_parameters
m.met_value.val_parameters
m2.met_value.val_parameters) ;
(* we must reassociate comments in @param to the the corresponding
parameters because the associated comment of a parameter may have been changed y the merge.*)
Odoc_value.update_value_parameters_text m.met_value;
if !Odoc_global.keep_code then
m.met_value.val_code <- m2.met_value.val_code;
true
)
else
false
| _ ->
false
)
(* we look for the last method with this name defined in the implementation *)
(List.rev (Odoc_class.class_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_class.class_type_methods mli)
* merge of two t_module_type , one for a .mli , another for the .ml .
The .mli module is completed with the information in the .ml module .
The .mli module is completed with the information in the .ml module. *)
let rec merge_module_types merge_options mli ml =
mli.mt_info <- merge_info_opt merge_options mli.mt_info ml.mt_info;
mli.mt_loc <- { mli.mt_loc with loc_impl = ml.mt_loc.loc_impl } ;
(* merge exceptions *)
List.iter
(fun ex ->
try
let _ = List.find
(fun ele ->
match ele with
Element_exception ex2 ->
if ex2.ex_name = ex.ex_name then
(
ex.ex_info <- merge_info_opt merge_options ex.ex_info ex2.ex_info;
ex.ex_loc <- { ex.ex_loc with loc_impl = ex2.ex_loc.loc_impl } ;
ex.ex_code <- (match ex.ex_code with None -> ex2.ex_code | _ -> ex.ex_code) ;
true
)
else
false
| _ ->
false
)
(* we look for the last exception with this name defined in the implementation *)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_exceptions mli);
(* merge types *)
List.iter
(fun ty ->
try
let _ = List.find
(fun ele ->
match ele with
Element_type ty2 ->
if ty2.ty_name = ty.ty_name then
(
merge_types merge_options ty ty2;
true
)
else
false
| _ ->
false
)
(* we look for the last type with this name defined in the implementation *)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_types mli);
(* merge submodules *)
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Element_module m2 ->
if m2.m_name = m.m_name then
(
ignore (merge_modules merge_options m m2);
;
m.m_loc < - { m.m_loc with loc_impl = m2.m_loc.loc_impl } ;
m.m_info <- merge_info_opt merge_options m.m_info m2.m_info;
m.m_loc <- { m.m_loc with loc_impl = m2.m_loc.loc_impl } ;
*)
true
)
else
false
| _ ->
false
)
(* we look for the last module with this name defined in the implementation *)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_modules mli);
(* merge module types *)
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Element_module_type m2 ->
if m2.mt_name = m.mt_name then
(
merge_module_types merge_options m m2;
true
)
else
false
| _ ->
false
)
(* we look for the last module with this name defined in the implementation *)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_module_types mli);
(* A VOIR : merge included modules ? *)
(* merge values *)
List.iter
(fun v ->
try
let _ = List.find
(fun ele ->
match ele with
Element_value v2 ->
if v2.val_name = v.val_name then
(
v.val_info <- merge_info_opt merge_options v.val_info v2.val_info ;
v.val_loc <- { v.val_loc with loc_impl = v2.val_loc.loc_impl } ;
in the .mli we do n't know any parameters so we add the ones in the .ml
v.val_parameters <- (merge_parameters
v.val_parameters
v2.val_parameters) ;
(* we must reassociate comments in @param to the the corresponding
parameters because the associated comment of a parameter may have been changed y the merge.*)
Odoc_value.update_value_parameters_text v;
if !Odoc_global.keep_code then
v.val_code <- v2.val_code;
true
)
else
false
| _ ->
false
)
(* we look for the last value with this name defined in the implementation *)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_values mli);
(* merge classes *)
List.iter
(fun c ->
try
let _ = List.find
(fun ele ->
match ele with
Element_class c2 ->
if c2.cl_name = c.cl_name then
(
merge_classes merge_options c c2;
true
)
else
false
| _ ->
false
)
(* we look for the last value with this name defined in the implementation *)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_classes mli);
(* merge class types *)
List.iter
(fun c ->
try
let _ = List.find
(fun ele ->
match ele with
Element_class_type c2 ->
if c2.clt_name = c.clt_name then
(
merge_class_types merge_options c c2;
true
)
else
false
| _ ->
false
)
(* we look for the last value with this name defined in the implementation *)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_class_types mli)
* merge of two t_module , one for a .mli , another for the .ml .
The .mli module is completed with the information in the .ml module .
The .mli module is completed with the information in the .ml module. *)
and merge_modules merge_options mli ml =
mli.m_info <- merge_info_opt merge_options mli.m_info ml.m_info;
mli.m_loc <- { mli.m_loc with loc_impl = ml.m_loc.loc_impl } ;
let rec remove_doubles acc = function
[] -> acc
| h :: q ->
if List.mem h acc then remove_doubles acc q
else remove_doubles (h :: acc) q
in
mli.m_top_deps <- remove_doubles mli.m_top_deps ml.m_top_deps ;
let code =
if !Odoc_global.keep_code then
match mli.m_code, ml.m_code with
Some s, _ -> Some s
| _, Some s -> Some s
| _ -> None
else
None
in
let code_intf =
if !Odoc_global.keep_code then
match mli.m_code_intf, ml.m_code_intf with
Some s, _ -> Some s
| _, Some s -> Some s
| _ -> None
else
None
in
mli.m_code <- code;
mli.m_code_intf <- code_intf;
(* merge exceptions *)
List.iter
(fun ex ->
try
let _ = List.find
(fun ele ->
match ele with
Element_exception ex2 ->
if ex2.ex_name = ex.ex_name then
(
ex.ex_info <- merge_info_opt merge_options ex.ex_info ex2.ex_info;
ex.ex_loc <- { ex.ex_loc with loc_impl = ex.ex_loc.loc_impl } ;
ex.ex_code <- (match ex.ex_code with None -> ex2.ex_code | _ -> ex.ex_code) ;
true
)
else
false
| _ ->
false
)
(* we look for the last exception with this name defined in the implementation *)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_exceptions mli);
(* merge types *)
List.iter
(fun ty ->
try
let _ = List.find
(fun ele ->
match ele with
Element_type ty2 ->
if ty2.ty_name = ty.ty_name then
(
merge_types merge_options ty ty2;
true
)
else
false
| _ ->
false
)
(* we look for the last type with this name defined in the implementation *)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_types mli);
(* merge submodules *)
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Element_module m2 ->
if m2.m_name = m.m_name then
(
ignore (merge_modules merge_options m m2);
;
m.m_loc < - { m.m_loc with loc_impl = m2.m_loc.loc_impl } ;
m.m_info <- merge_info_opt merge_options m.m_info m2.m_info;
m.m_loc <- { m.m_loc with loc_impl = m2.m_loc.loc_impl } ;
*)
true
)
else
false
| _ ->
false
)
(* we look for the last module with this name defined in the implementation *)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_modules mli);
(* merge module types *)
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Element_module_type m2 ->
if m2.mt_name = m.mt_name then
(
merge_module_types merge_options m m2;
true
)
else
false
| _ ->
false
)
(* we look for the last module with this name defined in the implementation *)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_module_types mli);
(* A VOIR : merge included modules ? *)
(* merge values *)
List.iter
(fun v ->
try
let _ = List.find
(fun v2 ->
if v2.val_name = v.val_name then
(
v.val_info <- merge_info_opt merge_options v.val_info v2.val_info ;
v.val_loc <- { v.val_loc with loc_impl = v2.val_loc.loc_impl } ;
in the .mli we do n't know any parameters so we add the ones in the .ml
v.val_parameters <- (merge_parameters
v.val_parameters
v2.val_parameters) ;
(* we must reassociate comments in @param to the the corresponding
parameters because the associated comment of a parameter may have been changed y the merge.*)
Odoc_value.update_value_parameters_text v;
if !Odoc_global.keep_code then
v.val_code <- v2.val_code;
true
)
else
false
)
(* we look for the last value with this name defined in the implementation *)
(List.rev (Odoc_module.module_values ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_values mli);
(* merge classes *)
List.iter
(fun c ->
try
let _ = List.find
(fun ele ->
match ele with
Element_class c2 ->
if c2.cl_name = c.cl_name then
(
merge_classes merge_options c c2;
true
)
else
false
| _ ->
false
)
(* we look for the last value with this name defined in the implementation *)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_classes mli);
(* merge class types *)
List.iter
(fun c ->
try
let _ = List.find
(fun ele ->
match ele with
Element_class_type c2 ->
if c2.clt_name = c.clt_name then
(
merge_class_types merge_options c c2;
true
)
else
false
| _ ->
false
)
(* we look for the last value with this name defined in the implementation *)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_class_types mli);
mli
let merge merge_options modules_list =
let rec iter = function
[] -> []
| m :: q ->
(* look for another module with the same name *)
let (l_same, l_others) = List.partition
(fun m2 -> m.m_name = m2.m_name)
q
in
match l_same with
[] ->
(* no other module to merge with *)
m :: (iter l_others)
| m2 :: [] ->
(
we can merge m with if there is an implementation
and an interface .
and an interface.*)
let f b = if !Odoc_global.inverse_merge_ml_mli then not b else b in
match f m.m_is_interface, f m2.m_is_interface with
true, false -> (merge_modules merge_options m m2) :: (iter l_others)
| false, true -> (merge_modules merge_options m2 m) :: (iter l_others)
| false, false ->
if !Odoc_global.inverse_merge_ml_mli then
two Module.ts for the .mli !
raise (Failure (Odoc_messages.two_interfaces m.m_name))
else
two Module.t for the .ml !
raise (Failure (Odoc_messages.two_implementations m.m_name))
| true, true ->
if !Odoc_global.inverse_merge_ml_mli then
two Module.t for the .ml !
raise (Failure (Odoc_messages.two_implementations m.m_name))
else
two Module.ts for the .mli !
raise (Failure (Odoc_messages.two_interfaces m.m_name))
)
| _ ->
two many Module.t !
raise (Failure (Odoc_messages.too_many_module_objects m.m_name))
in
iter modules_list
| null | https://raw.githubusercontent.com/OCamlPro/OCamlPro-OCaml-Branch/3a522985649389f89dac73e655d562c54f0456a5/inline-more/ocamldoc/odoc_merge.ml | ocaml | *********************************************************************
OCamldoc
*********************************************************************
we must reassociate comments in @param to the the corresponding
parameters because the associated comment of a parameter may have been changed y the merge.
merge values
we look for the last attribute with this name defined in the implementation
merge methods
merge the parameter names
we must reassociate comments in @param to the corresponding
parameters because the associated comment of a parameter may have been changed by the merge.
we look for the last method with this name defined in the implementation
merge values
we look for the last attribute with this name defined in the implementation
merge methods
we must reassociate comments in @param to the the corresponding
parameters because the associated comment of a parameter may have been changed y the merge.
we look for the last method with this name defined in the implementation
merge exceptions
we look for the last exception with this name defined in the implementation
merge types
we look for the last type with this name defined in the implementation
merge submodules
we look for the last module with this name defined in the implementation
merge module types
we look for the last module with this name defined in the implementation
A VOIR : merge included modules ?
merge values
we must reassociate comments in @param to the the corresponding
parameters because the associated comment of a parameter may have been changed y the merge.
we look for the last value with this name defined in the implementation
merge classes
we look for the last value with this name defined in the implementation
merge class types
we look for the last value with this name defined in the implementation
merge exceptions
we look for the last exception with this name defined in the implementation
merge types
we look for the last type with this name defined in the implementation
merge submodules
we look for the last module with this name defined in the implementation
merge module types
we look for the last module with this name defined in the implementation
A VOIR : merge included modules ?
merge values
we must reassociate comments in @param to the the corresponding
parameters because the associated comment of a parameter may have been changed y the merge.
we look for the last value with this name defined in the implementation
merge classes
we look for the last value with this name defined in the implementation
merge class types
we look for the last value with this name defined in the implementation
look for another module with the same name
no other module to merge with | , projet Cristal , INRIA Rocquencourt
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
* Merge of information from [ .ml ] and [ .mli ] for a module .
open Odoc_types
module Name = Odoc_name
open Odoc_parameter
open Odoc_value
open Odoc_type
open Odoc_exception
open Odoc_class
open Odoc_module
let merge_before_tags l =
let rec iter acc = function
[] -> List.rev acc
| (v, text) :: q ->
let (l1, l2) = List.partition
(fun (v2,_) -> v = v2) q
in
let acc =
let text =
List.fold_left
(fun acc t -> acc @ [Raw " "] @ t)
text (List.map snd l1)
in
(v, text) :: acc
in
iter acc l2
in
iter [] l
;;
* Merge two Odoctypes.info struture , completing the information of
the first one with the information in the second one .
The merge treatment depends on a given merge_option list .
@return the new info structure .
the first one with the information in the second one.
The merge treatment depends on a given merge_option list.
@return the new info structure.*)
let merge_info merge_options (m1 : info) (m2 : info) =
let new_desc_opt =
match m1.i_desc, m2.i_desc with
None, None -> None
| None, Some d
| Some d, None -> Some d
| Some d1, Some d2 ->
if List.mem Merge_description merge_options then
Some (d1 @ (Newline :: d2))
else
Some d1
in
let new_authors =
match m1.i_authors, m2.i_authors with
[], [] -> []
| l, []
| [], l -> l
| l1, l2 ->
if List.mem Merge_author merge_options then
l1 @ l2
else
l1
in
let new_version =
match m1.i_version , m2.i_version with
None, None -> None
| Some v, None
| None, Some v -> Some v
| Some v1, Some v2 ->
if List.mem Merge_version merge_options then
Some (v1^" "^v2)
else
Some v1
in
let new_sees =
match m1.i_sees, m2.i_sees with
[], [] -> []
| l, []
| [], l -> l
| l1, l2 ->
if List.mem Merge_see merge_options then
l1 @ l2
else
l1
in
let new_since =
match m1.i_since, m2.i_since with
None, None -> None
| Some v, None
| None, Some v -> Some v
| Some v1, Some v2 ->
if List.mem Merge_since merge_options then
Some (v1^" "^v2)
else
Some v1
in
let new_before = merge_before_tags (m1.i_before @ m2.i_before) in
let new_dep =
match m1.i_deprecated, m2.i_deprecated with
None, None -> None
| None, Some t
| Some t, None -> Some t
| Some t1, Some t2 ->
if List.mem Merge_deprecated merge_options then
Some (t1 @ (Newline :: t2))
else
Some t1
in
let new_params =
match m1.i_params, m2.i_params with
[], [] -> []
| l, []
| [], l -> l
| l1, l2 ->
if List.mem Merge_param merge_options then
(
let l_in_m1_and_m2, l_in_m2_only = List.partition
(fun (param2, _) -> List.mem_assoc param2 l1)
l2
in
let rec iter = function
[] -> []
| (param2, desc2) :: q ->
let desc1 = List.assoc param2 l1 in
(param2, desc1 @ (Newline :: desc2)) :: (iter q)
in
let l1_completed = iter l_in_m1_and_m2 in
l1_completed @ l_in_m2_only
)
else
l1
in
let new_raised_exceptions =
match m1.i_raised_exceptions, m2.i_raised_exceptions with
[], [] -> []
| l, []
| [], l -> l
| l1, l2 ->
if List.mem Merge_raised_exception merge_options then
(
let l_in_m1_and_m2, l_in_m2_only = List.partition
(fun (exc2, _) -> List.mem_assoc exc2 l1)
l2
in
let rec iter = function
[] -> []
| (exc2, desc2) :: q ->
let desc1 = List.assoc exc2 l1 in
(exc2, desc1 @ (Newline :: desc2)) :: (iter q)
in
let l1_completed = iter l_in_m1_and_m2 in
l1_completed @ l_in_m2_only
)
else
l1
in
let new_rv =
match m1.i_return_value, m2.i_return_value with
None, None -> None
| None, Some t
| Some t, None -> Some t
| Some t1, Some t2 ->
if List.mem Merge_return_value merge_options then
Some (t1 @ (Newline :: t2))
else
Some t1
in
let new_custom =
match m1.i_custom, m2.i_custom with
[], [] -> []
| [], l
| l, [] -> l
| l1, l2 ->
if List.mem Merge_custom merge_options then
l1 @ l2
else
l1
in
{
Odoc_types.i_desc = new_desc_opt ;
Odoc_types.i_authors = new_authors ;
Odoc_types.i_version = new_version ;
Odoc_types.i_sees = new_sees ;
Odoc_types.i_since = new_since ;
Odoc_types.i_before = new_before ;
Odoc_types.i_deprecated = new_dep ;
Odoc_types.i_params = new_params ;
Odoc_types.i_raised_exceptions = new_raised_exceptions ;
Odoc_types.i_return_value = new_rv ;
Odoc_types.i_custom = new_custom ;
}
* Merge of two optional info structures .
let merge_info_opt merge_options mli_opt ml_opt =
match mli_opt, ml_opt with
None, Some i -> Some i
| Some i, None -> Some i
| None, None -> None
| Some i1, Some i2 -> Some (merge_info merge_options i1 i2)
* merge of two t_type , one for a .mli , another for the .ml .
The .mli type is completed with the information in the .ml type .
The .mli type is completed with the information in the .ml type. *)
let merge_types merge_options mli ml =
mli.ty_info <- merge_info_opt merge_options mli.ty_info ml.ty_info;
mli.ty_loc <- { mli.ty_loc with loc_impl = ml.ty_loc.loc_impl } ;
mli.ty_code <- (match mli.ty_code with None -> ml.ty_code | _ -> mli.ty_code) ;
match mli.ty_kind, ml.ty_kind with
Type_abstract, _ ->
()
| Type_variant l1, Type_variant l2 ->
let f cons =
try
let cons2 = List.find
(fun c2 -> c2.vc_name = cons.vc_name)
l2
in
let new_desc =
match cons.vc_text, cons2.vc_text with
None, None -> None
| Some d, None
| None, Some d -> Some d
| Some d1, Some d2 ->
if List.mem Merge_description merge_options then
Some (d1 @ d2)
else
Some d1
in
cons.vc_text <- new_desc
with
Not_found ->
if !Odoc_global.inverse_merge_ml_mli then
()
else
raise (Failure (Odoc_messages.different_types mli.ty_name))
in
List.iter f l1
| Type_record l1, Type_record l2 ->
let f record =
try
let record2= List.find
(fun r -> r.rf_name = record.rf_name)
l2
in
let new_desc =
match record.rf_text, record2.rf_text with
None, None -> None
| Some d, None
| None, Some d -> Some d
| Some d1, Some d2 ->
if List.mem Merge_description merge_options then
Some (d1 @ d2)
else
Some d1
in
record.rf_text <- new_desc
with
Not_found ->
if !Odoc_global.inverse_merge_ml_mli then
()
else
raise (Failure (Odoc_messages.different_types mli.ty_name))
in
List.iter f l1
| _ ->
if !Odoc_global.inverse_merge_ml_mli then
()
else
raise (Failure (Odoc_messages.different_types mli.ty_name))
* Merge of two param_info , one from a .mli , one from a .ml .
The text fields are not handled but will be recreated from the
i_params field of the info structure .
Here , if a parameter in the .mli has no name , we take the one
from the .ml . When two parameters have two different forms ,
we take the one from the .mli .
The text fields are not handled but will be recreated from the
i_params field of the info structure.
Here, if a parameter in the .mli has no name, we take the one
from the .ml. When two parameters have two different forms,
we take the one from the .mli. *)
let rec merge_param_info pi_mli pi_ml =
match (pi_mli, pi_ml) with
(Simple_name sn_mli, Simple_name sn_ml) ->
if sn_mli.sn_name = "" then
Simple_name { sn_mli with sn_name = sn_ml.sn_name }
else
pi_mli
| (Simple_name _, Tuple _) ->
pi_mli
| (Tuple (_, t_mli), Simple_name sn_ml) ->
if we 're here , then the tuple in the .mli has no parameter names ;
then we take the name of the parameter of the .ml and the type of the .mli .
then we take the name of the parameter of the .ml and the type of the .mli. *)
Simple_name { sn_ml with sn_type = t_mli }
| (Tuple (l_mli, t_mli), Tuple (l_ml, _)) ->
if the two tuples have different lengths
( which should not occurs ) , we return the pi_mli ,
without further investigation .
(which should not occurs), we return the pi_mli,
without further investigation.*)
if (List.length l_mli) <> (List.length l_ml) then
pi_mli
else
let new_l = List.map2 merge_param_info l_mli l_ml in
Tuple (new_l, t_mli)
* Merge of the parameters of two functions / methods / classes , one for a .mli , another for a .ml .
The prameters in the .mli are completed by the name in the .ml .
The prameters in the .mli are completed by the name in the .ml.*)
let rec merge_parameters param_mli param_ml =
match (param_mli, param_ml) with
([], []) -> []
| (l, []) | ([], l) -> l
| ((pi_mli :: li), (pi_ml :: l)) ->
(merge_param_info pi_mli pi_ml) :: merge_parameters li l
* Merge of two t_class , one for a .mli , another for the .ml .
The .mli class is completed with the information in the .ml class .
The .mli class is completed with the information in the .ml class. *)
let merge_classes merge_options mli ml =
mli.cl_info <- merge_info_opt merge_options mli.cl_info ml.cl_info;
mli.cl_loc <- { mli.cl_loc with loc_impl = ml.cl_loc.loc_impl } ;
mli.cl_parameters <- merge_parameters mli.cl_parameters ml.cl_parameters;
Odoc_class.class_update_parameters_text mli;
List.iter
(fun a ->
try
let _ = List.find
(fun ele ->
match ele with
Class_attribute a2 ->
if a2.att_value.val_name = a.att_value.val_name then
(
a.att_value.val_info <- merge_info_opt merge_options
a.att_value.val_info a2.att_value.val_info;
a.att_value.val_loc <- { a.att_value.val_loc with loc_impl = a2.att_value.val_loc.loc_impl } ;
if !Odoc_global.keep_code then
a.att_value.val_code <- a2.att_value.val_code;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_class.class_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_class.class_attributes mli);
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Class_method m2 ->
if m2.met_value.val_name = m.met_value.val_name then
(
m.met_value.val_info <- merge_info_opt
merge_options m.met_value.val_info m2.met_value.val_info;
m.met_value.val_loc <- { m.met_value.val_loc with loc_impl = m2.met_value.val_loc.loc_impl } ;
m.met_value.val_parameters <- (merge_parameters
m.met_value.val_parameters
m2.met_value.val_parameters) ;
Odoc_value.update_value_parameters_text m.met_value;
if !Odoc_global.keep_code then
m.met_value.val_code <- m2.met_value.val_code;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_class.class_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_class.class_methods mli)
* merge of two t_class_type , one for a .mli , another for the .ml .
The .mli class is completed with the information in the .ml class .
The .mli class is completed with the information in the .ml class. *)
let merge_class_types merge_options mli ml =
mli.clt_info <- merge_info_opt merge_options mli.clt_info ml.clt_info;
mli.clt_loc <- { mli.clt_loc with loc_impl = ml.clt_loc.loc_impl } ;
List.iter
(fun a ->
try
let _ = List.find
(fun ele ->
match ele with
Class_attribute a2 ->
if a2.att_value.val_name = a.att_value.val_name then
(
a.att_value.val_info <- merge_info_opt merge_options
a.att_value.val_info a2.att_value.val_info;
a.att_value.val_loc <- { a.att_value.val_loc with loc_impl = a2.att_value.val_loc.loc_impl } ;
if !Odoc_global.keep_code then
a.att_value.val_code <- a2.att_value.val_code;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_class.class_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_class.class_type_attributes mli);
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Class_method m2 ->
if m2.met_value.val_name = m.met_value.val_name then
(
m.met_value.val_info <- merge_info_opt
merge_options m.met_value.val_info m2.met_value.val_info;
m.met_value.val_loc <- { m.met_value.val_loc with loc_impl = m2.met_value.val_loc.loc_impl } ;
m.met_value.val_parameters <- (merge_parameters
m.met_value.val_parameters
m2.met_value.val_parameters) ;
Odoc_value.update_value_parameters_text m.met_value;
if !Odoc_global.keep_code then
m.met_value.val_code <- m2.met_value.val_code;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_class.class_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_class.class_type_methods mli)
* merge of two t_module_type , one for a .mli , another for the .ml .
The .mli module is completed with the information in the .ml module .
The .mli module is completed with the information in the .ml module. *)
let rec merge_module_types merge_options mli ml =
mli.mt_info <- merge_info_opt merge_options mli.mt_info ml.mt_info;
mli.mt_loc <- { mli.mt_loc with loc_impl = ml.mt_loc.loc_impl } ;
List.iter
(fun ex ->
try
let _ = List.find
(fun ele ->
match ele with
Element_exception ex2 ->
if ex2.ex_name = ex.ex_name then
(
ex.ex_info <- merge_info_opt merge_options ex.ex_info ex2.ex_info;
ex.ex_loc <- { ex.ex_loc with loc_impl = ex2.ex_loc.loc_impl } ;
ex.ex_code <- (match ex.ex_code with None -> ex2.ex_code | _ -> ex.ex_code) ;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_exceptions mli);
List.iter
(fun ty ->
try
let _ = List.find
(fun ele ->
match ele with
Element_type ty2 ->
if ty2.ty_name = ty.ty_name then
(
merge_types merge_options ty ty2;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_types mli);
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Element_module m2 ->
if m2.m_name = m.m_name then
(
ignore (merge_modules merge_options m m2);
;
m.m_loc < - { m.m_loc with loc_impl = m2.m_loc.loc_impl } ;
m.m_info <- merge_info_opt merge_options m.m_info m2.m_info;
m.m_loc <- { m.m_loc with loc_impl = m2.m_loc.loc_impl } ;
*)
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_modules mli);
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Element_module_type m2 ->
if m2.mt_name = m.mt_name then
(
merge_module_types merge_options m m2;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_module_types mli);
List.iter
(fun v ->
try
let _ = List.find
(fun ele ->
match ele with
Element_value v2 ->
if v2.val_name = v.val_name then
(
v.val_info <- merge_info_opt merge_options v.val_info v2.val_info ;
v.val_loc <- { v.val_loc with loc_impl = v2.val_loc.loc_impl } ;
in the .mli we do n't know any parameters so we add the ones in the .ml
v.val_parameters <- (merge_parameters
v.val_parameters
v2.val_parameters) ;
Odoc_value.update_value_parameters_text v;
if !Odoc_global.keep_code then
v.val_code <- v2.val_code;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_values mli);
List.iter
(fun c ->
try
let _ = List.find
(fun ele ->
match ele with
Element_class c2 ->
if c2.cl_name = c.cl_name then
(
merge_classes merge_options c c2;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_classes mli);
List.iter
(fun c ->
try
let _ = List.find
(fun ele ->
match ele with
Element_class_type c2 ->
if c2.clt_name = c.clt_name then
(
merge_class_types merge_options c c2;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_type_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_type_class_types mli)
* merge of two t_module , one for a .mli , another for the .ml .
The .mli module is completed with the information in the .ml module .
The .mli module is completed with the information in the .ml module. *)
and merge_modules merge_options mli ml =
mli.m_info <- merge_info_opt merge_options mli.m_info ml.m_info;
mli.m_loc <- { mli.m_loc with loc_impl = ml.m_loc.loc_impl } ;
let rec remove_doubles acc = function
[] -> acc
| h :: q ->
if List.mem h acc then remove_doubles acc q
else remove_doubles (h :: acc) q
in
mli.m_top_deps <- remove_doubles mli.m_top_deps ml.m_top_deps ;
let code =
if !Odoc_global.keep_code then
match mli.m_code, ml.m_code with
Some s, _ -> Some s
| _, Some s -> Some s
| _ -> None
else
None
in
let code_intf =
if !Odoc_global.keep_code then
match mli.m_code_intf, ml.m_code_intf with
Some s, _ -> Some s
| _, Some s -> Some s
| _ -> None
else
None
in
mli.m_code <- code;
mli.m_code_intf <- code_intf;
List.iter
(fun ex ->
try
let _ = List.find
(fun ele ->
match ele with
Element_exception ex2 ->
if ex2.ex_name = ex.ex_name then
(
ex.ex_info <- merge_info_opt merge_options ex.ex_info ex2.ex_info;
ex.ex_loc <- { ex.ex_loc with loc_impl = ex.ex_loc.loc_impl } ;
ex.ex_code <- (match ex.ex_code with None -> ex2.ex_code | _ -> ex.ex_code) ;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_exceptions mli);
List.iter
(fun ty ->
try
let _ = List.find
(fun ele ->
match ele with
Element_type ty2 ->
if ty2.ty_name = ty.ty_name then
(
merge_types merge_options ty ty2;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_types mli);
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Element_module m2 ->
if m2.m_name = m.m_name then
(
ignore (merge_modules merge_options m m2);
;
m.m_loc < - { m.m_loc with loc_impl = m2.m_loc.loc_impl } ;
m.m_info <- merge_info_opt merge_options m.m_info m2.m_info;
m.m_loc <- { m.m_loc with loc_impl = m2.m_loc.loc_impl } ;
*)
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_modules mli);
List.iter
(fun m ->
try
let _ = List.find
(fun ele ->
match ele with
Element_module_type m2 ->
if m2.mt_name = m.mt_name then
(
merge_module_types merge_options m m2;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_module_types mli);
List.iter
(fun v ->
try
let _ = List.find
(fun v2 ->
if v2.val_name = v.val_name then
(
v.val_info <- merge_info_opt merge_options v.val_info v2.val_info ;
v.val_loc <- { v.val_loc with loc_impl = v2.val_loc.loc_impl } ;
in the .mli we do n't know any parameters so we add the ones in the .ml
v.val_parameters <- (merge_parameters
v.val_parameters
v2.val_parameters) ;
Odoc_value.update_value_parameters_text v;
if !Odoc_global.keep_code then
v.val_code <- v2.val_code;
true
)
else
false
)
(List.rev (Odoc_module.module_values ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_values mli);
List.iter
(fun c ->
try
let _ = List.find
(fun ele ->
match ele with
Element_class c2 ->
if c2.cl_name = c.cl_name then
(
merge_classes merge_options c c2;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_classes mli);
List.iter
(fun c ->
try
let _ = List.find
(fun ele ->
match ele with
Element_class_type c2 ->
if c2.clt_name = c.clt_name then
(
merge_class_types merge_options c c2;
true
)
else
false
| _ ->
false
)
(List.rev (Odoc_module.module_elements ml))
in
()
with
Not_found ->
()
)
(Odoc_module.module_class_types mli);
mli
let merge merge_options modules_list =
let rec iter = function
[] -> []
| m :: q ->
let (l_same, l_others) = List.partition
(fun m2 -> m.m_name = m2.m_name)
q
in
match l_same with
[] ->
m :: (iter l_others)
| m2 :: [] ->
(
we can merge m with if there is an implementation
and an interface .
and an interface.*)
let f b = if !Odoc_global.inverse_merge_ml_mli then not b else b in
match f m.m_is_interface, f m2.m_is_interface with
true, false -> (merge_modules merge_options m m2) :: (iter l_others)
| false, true -> (merge_modules merge_options m2 m) :: (iter l_others)
| false, false ->
if !Odoc_global.inverse_merge_ml_mli then
two Module.ts for the .mli !
raise (Failure (Odoc_messages.two_interfaces m.m_name))
else
two Module.t for the .ml !
raise (Failure (Odoc_messages.two_implementations m.m_name))
| true, true ->
if !Odoc_global.inverse_merge_ml_mli then
two Module.t for the .ml !
raise (Failure (Odoc_messages.two_implementations m.m_name))
else
two Module.ts for the .mli !
raise (Failure (Odoc_messages.two_interfaces m.m_name))
)
| _ ->
two many Module.t !
raise (Failure (Odoc_messages.too_many_module_objects m.m_name))
in
iter modules_list
|
3290cb46b6b8cd148d83d96664235e1fa016f2e1dc2dcab115372c7e76423297 | himura/twitter-conduit | Request.hs | # LANGUAGE DataKinds #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
# LANGUAGE TypeFamilies #
module Web.Twitter.Conduit.Request (
HasParam,
APIRequest (..),
) where
import Data.Aeson
import GHC.TypeLits (Symbol)
import Network.HTTP.Client.MultipartFormData
import qualified Network.HTTP.Types as HT
import Web.Twitter.Conduit.Request.Internal
-- $setup
-- >>> :set -XOverloadedStrings -XDataKinds -XTypeOperators
> > > import Control . Lens
-- >>> import Web.Twitter.Conduit.Parameters
> > > type SampleId = Integer
> > > type SampleApi = ' [ " count " ' : = Integer , " max_id " ' : = Integer ]
> > > let sampleApiRequest : : [ SampleId ] ; sampleApiRequest = APIRequest " GET " " " [ ]
-- | API request. You should use specific builder functions instead of building this directly.
--
-- For example, if there were a @SampleApi@ type and a builder function which named @sampleApiRequest@.
--
-- @
type Integer '
-- sampleApiRequest :: 'APIRequest' SampleApi [SampleId]
-- sampleApiRequest = 'APIRequest' \"GET\" \"https:\/\/api.twitter.com\/sample\/api.json\" []
type SampleApi = ' [ " count " ' : = Integer
-- , "max_id" ':= Integer
-- ]
--
-- @
--
We can obtain request params from @'APIRequest ' SampleApi [ SampleId]@ :
--
-- >>> sampleApiRequest ^. params
-- []
--
The second type parameter of the APIRequest represents the allowed parameters for the APIRequest .
For example , @sampleApiRequest@ has 2 @Integer@ parameters , that is " count " and " max_id " .
-- You can update those parameters by label lenses (@#count@ and @#max_id@ respectively)
--
> > > ( sampleApiRequest & # count ? ~ 100 & # max_id ? ~ 1234567890 ) ^. params
[ ( " max_id",PVInteger { unPVInteger = 1234567890}),("count",PVInteger { unPVInteger = 100 } ) ]
> > > ( sampleApiRequest & # count ? ~ 100 & # max_id ? ~ 1234567890 & # count .~ Nothing ) ^. params
[ ( " max_id",PVInteger { unPVInteger = 1234567890 } ) ]
data APIRequest (supports :: [Param Symbol *]) responseType
= APIRequest
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
}
| APIRequestMultipart
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
, _part :: [Part]
}
| APIRequestJSON
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
, _body :: Value
}
instance Parameters (APIRequest supports responseType) where
type SupportParameters (APIRequest supports responseType) = supports
params f (APIRequest m u pa) = APIRequest m u <$> f pa
params f (APIRequestMultipart m u pa prt) =
(\p -> APIRequestMultipart m u p prt) <$> f pa
params f (APIRequestJSON m u pa body) = (\p -> APIRequestJSON m u p body) <$> f pa
instance Show (APIRequest apiName responseType) where
show (APIRequest m u p) = "APIRequest " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
show (APIRequestMultipart m u p _) = "APIRequestMultipart " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
show (APIRequestJSON m u p _) = "APIRequestJSON " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
| null | https://raw.githubusercontent.com/himura/twitter-conduit/a327d7727faf2fb38f54f48ef0a61cbc5537b5d2/src/Web/Twitter/Conduit/Request.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE Rank2Types #
$setup
>>> :set -XOverloadedStrings -XDataKinds -XTypeOperators
>>> import Web.Twitter.Conduit.Parameters
| API request. You should use specific builder functions instead of building this directly.
For example, if there were a @SampleApi@ type and a builder function which named @sampleApiRequest@.
@
sampleApiRequest :: 'APIRequest' SampleApi [SampleId]
sampleApiRequest = 'APIRequest' \"GET\" \"https:\/\/api.twitter.com\/sample\/api.json\" []
, "max_id" ':= Integer
]
@
>>> sampleApiRequest ^. params
[]
You can update those parameters by label lenses (@#count@ and @#max_id@ respectively)
| # LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
module Web.Twitter.Conduit.Request (
HasParam,
APIRequest (..),
) where
import Data.Aeson
import GHC.TypeLits (Symbol)
import Network.HTTP.Client.MultipartFormData
import qualified Network.HTTP.Types as HT
import Web.Twitter.Conduit.Request.Internal
> > > import Control . Lens
> > > type SampleId = Integer
> > > type SampleApi = ' [ " count " ' : = Integer , " max_id " ' : = Integer ]
> > > let sampleApiRequest : : [ SampleId ] ; sampleApiRequest = APIRequest " GET " " " [ ]
type Integer '
type SampleApi = ' [ " count " ' : = Integer
We can obtain request params from @'APIRequest ' SampleApi [ SampleId]@ :
The second type parameter of the APIRequest represents the allowed parameters for the APIRequest .
For example , @sampleApiRequest@ has 2 @Integer@ parameters , that is " count " and " max_id " .
> > > ( sampleApiRequest & # count ? ~ 100 & # max_id ? ~ 1234567890 ) ^. params
[ ( " max_id",PVInteger { unPVInteger = 1234567890}),("count",PVInteger { unPVInteger = 100 } ) ]
> > > ( sampleApiRequest & # count ? ~ 100 & # max_id ? ~ 1234567890 & # count .~ Nothing ) ^. params
[ ( " max_id",PVInteger { unPVInteger = 1234567890 } ) ]
data APIRequest (supports :: [Param Symbol *]) responseType
= APIRequest
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
}
| APIRequestMultipart
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
, _part :: [Part]
}
| APIRequestJSON
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
, _body :: Value
}
instance Parameters (APIRequest supports responseType) where
type SupportParameters (APIRequest supports responseType) = supports
params f (APIRequest m u pa) = APIRequest m u <$> f pa
params f (APIRequestMultipart m u pa prt) =
(\p -> APIRequestMultipart m u p prt) <$> f pa
params f (APIRequestJSON m u pa body) = (\p -> APIRequestJSON m u p body) <$> f pa
instance Show (APIRequest apiName responseType) where
show (APIRequest m u p) = "APIRequest " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
show (APIRequestMultipart m u p _) = "APIRequestMultipart " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
show (APIRequestJSON m u p _) = "APIRequestJSON " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
|
6832eff4f24f32ed30f5e96f9bb8e96c9a3009f5de9a6ed3cb02e818f60bf5a7 | ocaml-multicore/tezos | ticket_lazy_storage_diff.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2021 , < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Alpha_context
type error += Failed_to_load_big_map_value_type of Big_map.Id.t
let () =
let open Data_encoding in
register_error_kind
`Permanent
~id:"Failed_to_load_big_map_value_type"
~title:"Failed to load big-map value type"
~description:
"Failed to load big-map value type when computing ticket diffs."
~pp:(fun ppf big_map_id ->
Format.fprintf
ppf
"Failed to load big-map value type for big-map-id: '%a'"
Z.pp_print
(Big_map.Id.unparse_to_z big_map_id))
(obj1 (req "big_map_id" Big_map.Id.encoding))
(function
| Failed_to_load_big_map_value_type big_map_id -> Some big_map_id
| _ -> None)
(fun big_map_id -> Failed_to_load_big_map_value_type big_map_id)
(** Extracts the ticket-token and amount from an ex_ticket value. *)
let token_and_amount ctxt ex_ticket =
Gas.consume ctxt Ticket_costs.Constants.cost_token_and_amount_of_ticket
>|? fun ctxt ->
let (token, amount) = Ticket_token.token_and_amount_of_ex_ticket ex_ticket in
((token, Script_int.to_zint amount), ctxt)
(** Extracts the ticket-token and amount from an ex_ticket value and returns
the opposite of the amount. This is used to account for removal of tickets inside
big maps when either a ticket is taken out of a big map or a whole big map is
dropped. *)
let neg_token_and_amount ctxt ex_ticket =
token_and_amount ctxt ex_ticket >>? fun ((token, amount), ctxt) ->
Gas.consume ctxt (Ticket_costs.negate_cost amount) >|? fun ctxt ->
((token, Z.neg amount), ctxt)
let parse_value_type ctxt value_type =
Script_ir_translator.parse_big_map_value_ty
ctxt
~legacy:true
(Micheline.root value_type)
* Collects all ticket - token balances contained in the given node and prepends
them to the accumulator [ acc ] . The given [ get_token_and_amount ] function
extracts the ticket - token and amount ( either positive or negative ) from an
[ ex_ticket ] value , depending on whether the diff stems from adding or
removing a value containing tickets .
them to the accumulator [acc]. The given [get_token_and_amount] function
extracts the ticket-token and amount (either positive or negative) from an
[ex_ticket] value, depending on whether the diff stems from adding or
removing a value containing tickets. *)
let collect_token_diffs_of_node ctxt has_tickets node ~get_token_and_amount acc
=
Ticket_scanner.tickets_of_node
ctxt
(* It's currently not possible to have nested lazy structures, but this is
for future proofing. *)
~include_lazy:true
has_tickets
(Micheline.root node)
>>=? fun (ex_tickets, ctxt) ->
List.fold_left_e
(fun (acc, ctxt) ticket ->
get_token_and_amount ctxt ticket >|? fun (item, ctxt) ->
(item :: acc, ctxt))
(acc, ctxt)
ex_tickets
>>?= return
(** A module for keeping track of script-key-hashes. It's used for looking up
keys for multiple big-map updates referencing the same key.
*)
module Key_hash_map = Carbonated_map.Make (struct
type t = Script_expr_hash.t
let compare = Script_expr_hash.compare
let compare_cost _ = Ticket_costs.Constants.cost_compare_ticket_hash
end)
* Collects all ticket - token diffs from a big - map update and prepends them
to the accumulator [ acc ] .
to the accumulator [acc]. *)
let collect_token_diffs_of_big_map_update ctxt ~big_map_id has_tickets
{Lazy_storage_kind.Big_map.key = _; key_hash; value} already_updated acc =
let collect_token_diffs_of_node_option ctxt ~get_token_and_amount expr_opt acc
=
match expr_opt with
| Some expr ->
collect_token_diffs_of_node
ctxt
has_tickets
expr
~get_token_and_amount
acc
| None -> return (acc, ctxt)
in
First check if the key - hash has already been updated , in that case pull the
value from the [ already_updated ] map . Note that this should not happen with
the current implementation of big - map overlays as it guarantees that keys
are unique . The extra check is used for future proofing .
value from the [already_updated] map. Note that this should not happen with
the current implementation of big-map overlays as it guarantees that keys
are unique. The extra check is used for future proofing.
*)
( Key_hash_map.find ctxt key_hash already_updated >>?= fun (val_opt, ctxt) ->
match val_opt with
| Some updated_value -> return (updated_value, ctxt)
| None ->
(* Load tickets from the old value that was removed. *)
Big_map.get_opt ctxt big_map_id key_hash >|=? fun (ctxt, old_value) ->
(old_value, ctxt) )
>>=? fun (old_value, ctxt) ->
collect_token_diffs_of_node_option
ctxt
~get_token_and_amount:neg_token_and_amount
old_value
acc
>>=? fun (acc, ctxt) ->
Key_hash_map.update
ctxt
key_hash
(fun ctxt _ -> ok (Some value, ctxt))
already_updated
>>?= fun (already_updated, ctxt) ->
TODO : # 2303
Avoid re - parsing the value .
In order to find tickets from the new value , we need to parse it . It would
be more efficient if the value was already present .
Avoid re-parsing the value.
In order to find tickets from the new value, we need to parse it. It would
be more efficient if the value was already present.
*)
collect_token_diffs_of_node_option
ctxt
~get_token_and_amount:token_and_amount
value
acc
>|=? fun (tickets, ctxt) -> (tickets, already_updated, ctxt)
* Collects all ticket - token diffs from a list of big - map updates and prepends
them to the accumulator [ acc ] .
them to the accumulator [acc]. *)
let collect_token_diffs_of_big_map_updates ctxt big_map_id ~value_type updates
acc =
TODO : # 2303
Avoid re - parsing the value type .
We should have the non - serialized version of the value type .
Avoid re-parsing the value type.
We should have the non-serialized version of the value type.
*)
parse_value_type ctxt value_type
>>?= fun (Script_ir_translator.Ex_ty value_type, ctxt) ->
Ticket_scanner.type_has_tickets ctxt value_type
>>?= fun (has_tickets, ctxt) ->
List.fold_left_es
(fun (acc, already_updated, ctxt) update ->
collect_token_diffs_of_big_map_update
ctxt
~big_map_id
has_tickets
update
already_updated
acc)
(acc, Key_hash_map.empty, ctxt)
updates
>|=? fun (acc, _already_updated, ctxt) -> (acc, ctxt)
* Given a big - map i d , this function collects ticket - token diffs and prepends
them to the accumulator [ acc ] .
them to the accumulator [acc]. *)
let collect_token_diffs_of_big_map ctxt ~get_token_and_amount big_map_id acc =
Gas.consume ctxt Ticket_costs.Constants.cost_collect_tickets_step
>>?= fun ctxt ->
Big_map.exists ctxt big_map_id >>=? fun (ctxt, key_val_tys) ->
match key_val_tys with
| Some (_key_ty, value_ty) ->
TODO : # 2303
Avoid re - parsing the value type .
In order to find tickets from the value , we need to parse the value
type . It would be more efficient if the value preserved .
Avoid re-parsing the value type.
In order to find tickets from the value, we need to parse the value
type. It would be more efficient if the value preserved.
*)
parse_value_type ctxt value_ty
>>?= fun (Script_ir_translator.Ex_ty value_type, ctxt) ->
Ticket_scanner.type_has_tickets ctxt value_type
>>?= fun (has_tickets, ctxt) ->
(* Iterate over big-map items. *)
TODO : # 2316
Verify gas - model for [ Big_map.list_values ] .
This is to make sure that we pay sufficient gas for traversing the
values .
Verify gas-model for [Big_map.list_values].
This is to make sure that we pay sufficient gas for traversing the
values.
*)
Big_map.list_values ctxt big_map_id >>=? fun (ctxt, exprs) ->
List.fold_left_es
(fun (acc, ctxt) node ->
collect_token_diffs_of_node
ctxt
has_tickets
node
~get_token_and_amount
acc)
(acc, ctxt)
exprs
| None -> fail (Failed_to_load_big_map_value_type big_map_id)
* Collects ticket - token diffs from a big - map and a list of updates , and
prepends them to the given accumulator [ acc ] .
prepends them to the given accumulator [acc]. *)
let collect_token_diffs_of_big_map_and_updates ctxt big_map_id updates acc =
Gas.consume ctxt Ticket_costs.Constants.cost_collect_tickets_step
>>?= fun ctxt ->
Big_map.exists ctxt big_map_id >>=? fun (ctxt, key_val_opt) ->
match key_val_opt with
| Some (_val, value_type) ->
collect_token_diffs_of_big_map_updates
ctxt
big_map_id
~value_type
updates
acc
| None -> fail (Failed_to_load_big_map_value_type big_map_id)
* Inspects the given [ Lazy_storage.diffs_item ] and prepends all ticket - token
diffs , resulting from the updates , to the given accumulator [ acc ] .
diffs, resulting from the updates, to the given accumulator [acc]. *)
let collect_token_diffs_of_big_map_diff ctxt diff_item acc =
Gas.consume ctxt Ticket_costs.Constants.cost_collect_tickets_step
>>?= fun ctxt ->
match diff_item with
| Lazy_storage.Item (Lazy_storage_kind.Big_map, big_map_id, Remove) ->
(* Collect all removed tokens from the big-map. *)
collect_token_diffs_of_big_map
ctxt
~get_token_and_amount:neg_token_and_amount
big_map_id
acc
| Item (Lazy_storage_kind.Big_map, big_map_id, Update {init; updates}) -> (
match init with
| Lazy_storage.Existing ->
(* Collect token diffs from the updates to the big-map. *)
collect_token_diffs_of_big_map_and_updates ctxt big_map_id updates acc
| Copy {src} ->
(* Collect tokens diffs from the source of the copied big-map. *)
collect_token_diffs_of_big_map
ctxt
~get_token_and_amount:token_and_amount
src
acc
>>=? fun (acc, ctxt) ->
(* Collect token diffs from the updates to the copied big-map. *)
collect_token_diffs_of_big_map_and_updates ctxt src updates acc
| Alloc {key_type = _; value_type} ->
collect_token_diffs_of_big_map_updates
ctxt
big_map_id
~value_type
updates
acc)
| Item (Sapling_state, _, _) -> return (acc, ctxt)
let ticket_diffs_of_lazy_storage_diff ctxt diffs_items =
List.fold_left_es
(fun (acc, ctxt) diff_item ->
collect_token_diffs_of_big_map_diff ctxt diff_item acc)
([], ctxt)
diffs_items
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/src/proto_alpha/lib_protocol/ticket_lazy_storage_diff.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* Extracts the ticket-token and amount from an ex_ticket value.
* Extracts the ticket-token and amount from an ex_ticket value and returns
the opposite of the amount. This is used to account for removal of tickets inside
big maps when either a ticket is taken out of a big map or a whole big map is
dropped.
It's currently not possible to have nested lazy structures, but this is
for future proofing.
* A module for keeping track of script-key-hashes. It's used for looking up
keys for multiple big-map updates referencing the same key.
Load tickets from the old value that was removed.
Iterate over big-map items.
Collect all removed tokens from the big-map.
Collect token diffs from the updates to the big-map.
Collect tokens diffs from the source of the copied big-map.
Collect token diffs from the updates to the copied big-map. | Copyright ( c ) 2021 , < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Alpha_context
type error += Failed_to_load_big_map_value_type of Big_map.Id.t
let () =
let open Data_encoding in
register_error_kind
`Permanent
~id:"Failed_to_load_big_map_value_type"
~title:"Failed to load big-map value type"
~description:
"Failed to load big-map value type when computing ticket diffs."
~pp:(fun ppf big_map_id ->
Format.fprintf
ppf
"Failed to load big-map value type for big-map-id: '%a'"
Z.pp_print
(Big_map.Id.unparse_to_z big_map_id))
(obj1 (req "big_map_id" Big_map.Id.encoding))
(function
| Failed_to_load_big_map_value_type big_map_id -> Some big_map_id
| _ -> None)
(fun big_map_id -> Failed_to_load_big_map_value_type big_map_id)
let token_and_amount ctxt ex_ticket =
Gas.consume ctxt Ticket_costs.Constants.cost_token_and_amount_of_ticket
>|? fun ctxt ->
let (token, amount) = Ticket_token.token_and_amount_of_ex_ticket ex_ticket in
((token, Script_int.to_zint amount), ctxt)
let neg_token_and_amount ctxt ex_ticket =
token_and_amount ctxt ex_ticket >>? fun ((token, amount), ctxt) ->
Gas.consume ctxt (Ticket_costs.negate_cost amount) >|? fun ctxt ->
((token, Z.neg amount), ctxt)
let parse_value_type ctxt value_type =
Script_ir_translator.parse_big_map_value_ty
ctxt
~legacy:true
(Micheline.root value_type)
* Collects all ticket - token balances contained in the given node and prepends
them to the accumulator [ acc ] . The given [ get_token_and_amount ] function
extracts the ticket - token and amount ( either positive or negative ) from an
[ ex_ticket ] value , depending on whether the diff stems from adding or
removing a value containing tickets .
them to the accumulator [acc]. The given [get_token_and_amount] function
extracts the ticket-token and amount (either positive or negative) from an
[ex_ticket] value, depending on whether the diff stems from adding or
removing a value containing tickets. *)
let collect_token_diffs_of_node ctxt has_tickets node ~get_token_and_amount acc
=
Ticket_scanner.tickets_of_node
ctxt
~include_lazy:true
has_tickets
(Micheline.root node)
>>=? fun (ex_tickets, ctxt) ->
List.fold_left_e
(fun (acc, ctxt) ticket ->
get_token_and_amount ctxt ticket >|? fun (item, ctxt) ->
(item :: acc, ctxt))
(acc, ctxt)
ex_tickets
>>?= return
module Key_hash_map = Carbonated_map.Make (struct
type t = Script_expr_hash.t
let compare = Script_expr_hash.compare
let compare_cost _ = Ticket_costs.Constants.cost_compare_ticket_hash
end)
* Collects all ticket - token diffs from a big - map update and prepends them
to the accumulator [ acc ] .
to the accumulator [acc]. *)
let collect_token_diffs_of_big_map_update ctxt ~big_map_id has_tickets
{Lazy_storage_kind.Big_map.key = _; key_hash; value} already_updated acc =
let collect_token_diffs_of_node_option ctxt ~get_token_and_amount expr_opt acc
=
match expr_opt with
| Some expr ->
collect_token_diffs_of_node
ctxt
has_tickets
expr
~get_token_and_amount
acc
| None -> return (acc, ctxt)
in
First check if the key - hash has already been updated , in that case pull the
value from the [ already_updated ] map . Note that this should not happen with
the current implementation of big - map overlays as it guarantees that keys
are unique . The extra check is used for future proofing .
value from the [already_updated] map. Note that this should not happen with
the current implementation of big-map overlays as it guarantees that keys
are unique. The extra check is used for future proofing.
*)
( Key_hash_map.find ctxt key_hash already_updated >>?= fun (val_opt, ctxt) ->
match val_opt with
| Some updated_value -> return (updated_value, ctxt)
| None ->
Big_map.get_opt ctxt big_map_id key_hash >|=? fun (ctxt, old_value) ->
(old_value, ctxt) )
>>=? fun (old_value, ctxt) ->
collect_token_diffs_of_node_option
ctxt
~get_token_and_amount:neg_token_and_amount
old_value
acc
>>=? fun (acc, ctxt) ->
Key_hash_map.update
ctxt
key_hash
(fun ctxt _ -> ok (Some value, ctxt))
already_updated
>>?= fun (already_updated, ctxt) ->
TODO : # 2303
Avoid re - parsing the value .
In order to find tickets from the new value , we need to parse it . It would
be more efficient if the value was already present .
Avoid re-parsing the value.
In order to find tickets from the new value, we need to parse it. It would
be more efficient if the value was already present.
*)
collect_token_diffs_of_node_option
ctxt
~get_token_and_amount:token_and_amount
value
acc
>|=? fun (tickets, ctxt) -> (tickets, already_updated, ctxt)
* Collects all ticket - token diffs from a list of big - map updates and prepends
them to the accumulator [ acc ] .
them to the accumulator [acc]. *)
let collect_token_diffs_of_big_map_updates ctxt big_map_id ~value_type updates
acc =
TODO : # 2303
Avoid re - parsing the value type .
We should have the non - serialized version of the value type .
Avoid re-parsing the value type.
We should have the non-serialized version of the value type.
*)
parse_value_type ctxt value_type
>>?= fun (Script_ir_translator.Ex_ty value_type, ctxt) ->
Ticket_scanner.type_has_tickets ctxt value_type
>>?= fun (has_tickets, ctxt) ->
List.fold_left_es
(fun (acc, already_updated, ctxt) update ->
collect_token_diffs_of_big_map_update
ctxt
~big_map_id
has_tickets
update
already_updated
acc)
(acc, Key_hash_map.empty, ctxt)
updates
>|=? fun (acc, _already_updated, ctxt) -> (acc, ctxt)
* Given a big - map i d , this function collects ticket - token diffs and prepends
them to the accumulator [ acc ] .
them to the accumulator [acc]. *)
let collect_token_diffs_of_big_map ctxt ~get_token_and_amount big_map_id acc =
Gas.consume ctxt Ticket_costs.Constants.cost_collect_tickets_step
>>?= fun ctxt ->
Big_map.exists ctxt big_map_id >>=? fun (ctxt, key_val_tys) ->
match key_val_tys with
| Some (_key_ty, value_ty) ->
TODO : # 2303
Avoid re - parsing the value type .
In order to find tickets from the value , we need to parse the value
type . It would be more efficient if the value preserved .
Avoid re-parsing the value type.
In order to find tickets from the value, we need to parse the value
type. It would be more efficient if the value preserved.
*)
parse_value_type ctxt value_ty
>>?= fun (Script_ir_translator.Ex_ty value_type, ctxt) ->
Ticket_scanner.type_has_tickets ctxt value_type
>>?= fun (has_tickets, ctxt) ->
TODO : # 2316
Verify gas - model for [ Big_map.list_values ] .
This is to make sure that we pay sufficient gas for traversing the
values .
Verify gas-model for [Big_map.list_values].
This is to make sure that we pay sufficient gas for traversing the
values.
*)
Big_map.list_values ctxt big_map_id >>=? fun (ctxt, exprs) ->
List.fold_left_es
(fun (acc, ctxt) node ->
collect_token_diffs_of_node
ctxt
has_tickets
node
~get_token_and_amount
acc)
(acc, ctxt)
exprs
| None -> fail (Failed_to_load_big_map_value_type big_map_id)
* Collects ticket - token diffs from a big - map and a list of updates , and
prepends them to the given accumulator [ acc ] .
prepends them to the given accumulator [acc]. *)
let collect_token_diffs_of_big_map_and_updates ctxt big_map_id updates acc =
Gas.consume ctxt Ticket_costs.Constants.cost_collect_tickets_step
>>?= fun ctxt ->
Big_map.exists ctxt big_map_id >>=? fun (ctxt, key_val_opt) ->
match key_val_opt with
| Some (_val, value_type) ->
collect_token_diffs_of_big_map_updates
ctxt
big_map_id
~value_type
updates
acc
| None -> fail (Failed_to_load_big_map_value_type big_map_id)
* Inspects the given [ Lazy_storage.diffs_item ] and prepends all ticket - token
diffs , resulting from the updates , to the given accumulator [ acc ] .
diffs, resulting from the updates, to the given accumulator [acc]. *)
let collect_token_diffs_of_big_map_diff ctxt diff_item acc =
Gas.consume ctxt Ticket_costs.Constants.cost_collect_tickets_step
>>?= fun ctxt ->
match diff_item with
| Lazy_storage.Item (Lazy_storage_kind.Big_map, big_map_id, Remove) ->
collect_token_diffs_of_big_map
ctxt
~get_token_and_amount:neg_token_and_amount
big_map_id
acc
| Item (Lazy_storage_kind.Big_map, big_map_id, Update {init; updates}) -> (
match init with
| Lazy_storage.Existing ->
collect_token_diffs_of_big_map_and_updates ctxt big_map_id updates acc
| Copy {src} ->
collect_token_diffs_of_big_map
ctxt
~get_token_and_amount:token_and_amount
src
acc
>>=? fun (acc, ctxt) ->
collect_token_diffs_of_big_map_and_updates ctxt src updates acc
| Alloc {key_type = _; value_type} ->
collect_token_diffs_of_big_map_updates
ctxt
big_map_id
~value_type
updates
acc)
| Item (Sapling_state, _, _) -> return (acc, ctxt)
let ticket_diffs_of_lazy_storage_diff ctxt diffs_items =
List.fold_left_es
(fun (acc, ctxt) diff_item ->
collect_token_diffs_of_big_map_diff ctxt diff_item acc)
([], ctxt)
diffs_items
|
63065ced93c598d579e4c91d56a140b47e95548f514945cf11e1b6b08514f455 | armedbear/abcl | source-transform.lisp | ;;; source-transform.lisp
;;;
Copyright ( C ) 2004 - 2005
$ Id$
;;;
;;; This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version 2
of the License , or ( at your option ) any later version .
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
;;;
;;; As a special exception, the copyright holders of this library give you
;;; permission to link this library with independent modules to produce an
;;; executable, regardless of the license terms of these independent
;;; modules, and to copy and distribute the resulting executable under
;;; terms of your choice, provided that you also meet, for each linked
;;; independent module, the terms and conditions of the license of that
;;; module. An independent module is a module which is not derived from
;;; or based on this library. If you modify this library, you may extend
;;; this exception to your version of the library, but you are not
;;; obligated to do so. If you do not wish to do so, delete this
;;; exception statement from your version.
(in-package #:system)
(export '(source-transform define-source-transform expand-source-transform))
(defun source-transform (name)
(get-function-info-value name :source-transform))
(defun set-source-transform (name transform)
(set-function-info-value name :source-transform transform))
(defsetf source-transform set-source-transform)
(defmacro define-source-transform (name lambda-list &rest body)
(let* ((form (gensym))
(env (gensym))
(block-name (if (symbolp name) name (cadr name)))
(body (parse-defmacro lambda-list form body name 'defmacro
:environment env
;; when we encounter an error
;; parsing the arguments in the call
;; (not in the difinition!), return
;; the arguments unmodified -- ie skip the
transform ( see also )
:error-fun `(lambda (&rest ignored)
(declare (ignore ignored))
(return-from ,block-name ,form))))
(expander
`(lambda (,form) (block ,block-name ,body))))
`(progn
(record-source-information-for-type ',name '(:source-transform ,name))
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf (source-transform ',name) ,expander)
',name))))
(defun expand-source-transform-1 (form)
(let ((expander nil)
(newdef nil))
(cond ((atom form)
(values form nil))
((and (consp (%car form))
(eq (caar form) 'SETF)
(setf expander (source-transform (%car form))))
(values (setf newdef (funcall expander form))
(not (eq newdef form))))
((and (symbolp (%car form))
(setf expander (source-transform (%car form))))
(values (setf newdef (funcall expander form))
(not (eq newdef form))))
(t
(values form nil)))))
(defun expand-source-transform (form)
(let ((expanded-p nil))
(loop
(multiple-value-bind (expansion exp-p) (expand-source-transform-1 form)
(if exp-p
(setf form expansion
expanded-p t)
(return))))
(values form expanded-p)))
| null | https://raw.githubusercontent.com/armedbear/abcl/0631ea551523bb93c06263e772fbe849008e2f68/src/org/armedbear/lisp/source-transform.lisp | lisp | source-transform.lisp
This program is free software; you can redistribute it and/or
either version 2
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, write to the Free Software
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version.
when we encounter an error
parsing the arguments in the call
(not in the difinition!), return
the arguments unmodified -- ie skip the | Copyright ( C ) 2004 - 2005
$ Id$
modify it under the terms of the GNU General Public License
of the License , or ( at your option ) any later version .
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
(in-package #:system)
(export '(source-transform define-source-transform expand-source-transform))
(defun source-transform (name)
(get-function-info-value name :source-transform))
(defun set-source-transform (name transform)
(set-function-info-value name :source-transform transform))
(defsetf source-transform set-source-transform)
(defmacro define-source-transform (name lambda-list &rest body)
(let* ((form (gensym))
(env (gensym))
(block-name (if (symbolp name) name (cadr name)))
(body (parse-defmacro lambda-list form body name 'defmacro
:environment env
transform ( see also )
:error-fun `(lambda (&rest ignored)
(declare (ignore ignored))
(return-from ,block-name ,form))))
(expander
`(lambda (,form) (block ,block-name ,body))))
`(progn
(record-source-information-for-type ',name '(:source-transform ,name))
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf (source-transform ',name) ,expander)
',name))))
(defun expand-source-transform-1 (form)
(let ((expander nil)
(newdef nil))
(cond ((atom form)
(values form nil))
((and (consp (%car form))
(eq (caar form) 'SETF)
(setf expander (source-transform (%car form))))
(values (setf newdef (funcall expander form))
(not (eq newdef form))))
((and (symbolp (%car form))
(setf expander (source-transform (%car form))))
(values (setf newdef (funcall expander form))
(not (eq newdef form))))
(t
(values form nil)))))
(defun expand-source-transform (form)
(let ((expanded-p nil))
(loop
(multiple-value-bind (expansion exp-p) (expand-source-transform-1 form)
(if exp-p
(setf form expansion
expanded-p t)
(return))))
(values form expanded-p)))
|
421fea4f27a0b0f2ca18e98669d0dce90630e7db539cea38d14523b40d6f828b | wireless-net/erlang-nommu | reltool_target.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2009 - 2013 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
-module(reltool_target).
%% Public
-export([
gen_config/2,
gen_app/1,
gen_rel/2,
gen_rel_files/2,
gen_boot/1,
gen_script/4,
gen_spec/1,
eval_spec/3,
gen_target/2,
install/2
]).
-include("reltool.hrl").
-include_lib("kernel/include/file.hrl").
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Hardcoded internals about the kernel application
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Mandatory modules are modules that must be loaded before processes
%% can be started. These are a collection of modules from the kernel
%% and stdlib applications. Nowadays, error_handler dynamically loads
%% almost every module. The error_handler self must still be there
%% though.
mandatory_modules() ->
[error_handler].
%% Kernel processes are specially treated by the init process. If a
%% kernel process terminates the whole system terminates.
kernel_processes(KernelApp) ->
[
{kernelProcess, heart, {heart, start, []}},
{kernelProcess, error_logger , {error_logger, start_link, []}},
{kernelProcess,
application_controller,
{application_controller, start, [KernelApp]}}
].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Generate the contents of a config file
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
gen_config(Sys, InclDefs) ->
{ok, do_gen_config(Sys, InclDefs)}.
do_gen_config(#sys{root_dir = RootDir,
lib_dirs = LibDirs,
mod_cond = ModCond,
incl_cond = AppCond,
apps = Apps,
boot_rel = BootRel,
rels = Rels,
emu_name = EmuName,
profile = Profile,
incl_sys_filters = InclSysFiles,
excl_sys_filters = ExclSysFiles,
incl_app_filters = InclAppFiles,
excl_app_filters = ExclAppFiles,
incl_archive_filters = InclArchiveDirs,
excl_archive_filters = ExclArchiveDirs,
archive_opts = ArchiveOpts,
relocatable = Relocatable,
rel_app_type = RelAppType,
embedded_app_type = InclAppType,
app_file = AppFile,
debug_info = DebugInfo},
InclDefs) ->
ErtsItems =
case lists:keyfind(erts, #app.name, Apps) of
false ->
[];
Erts ->
[{erts, do_gen_config(Erts, InclDefs)}]
end,
AppsItems =
[do_gen_config(A, InclDefs)
|| A <- Apps,
A#app.name =/= ?MISSING_APP_NAME,
A#app.name =/= erts,
A#app.is_escript =/= true],
EscriptItems = [{escript,
A#app.active_dir,
emit(incl_cond, A#app.incl_cond, undefined, InclDefs)}
|| A <- Apps, A#app.is_escript],
DefaultRels = reltool_utils:default_rels(),
RelsItems =
[{rel, R#rel.name, R#rel.vsn, do_gen_config(R, InclDefs)} ||
R <- Rels],
DefaultRelsItems =
[{rel, R#rel.name, R#rel.vsn, do_gen_config(R, InclDefs)} ||
R <- DefaultRels],
RelsItems2 =
case InclDefs of
true -> RelsItems;
false -> RelsItems -- DefaultRelsItems
end,
X = fun(List) -> [Re || #regexp{source = Re} <- List] end,
{sys,
emit(root_dir, RootDir, code:root_dir(), InclDefs) ++
emit(lib_dirs, LibDirs, ?DEFAULT_LIBS, InclDefs) ++
EscriptItems ++
emit(mod_cond, ModCond, ?DEFAULT_MOD_COND, InclDefs) ++
emit(incl_cond, AppCond, ?DEFAULT_INCL_COND, InclDefs) ++
ErtsItems ++
lists:flatten(AppsItems) ++
emit(boot_rel, BootRel, ?DEFAULT_REL_NAME, InclDefs) ++
RelsItems2 ++
emit(emu_name, EmuName, ?DEFAULT_EMU_NAME, InclDefs) ++
emit(relocatable, Relocatable, ?DEFAULT_RELOCATABLE, InclDefs) ++
emit(profile, Profile, ?DEFAULT_PROFILE, InclDefs) ++
emit(incl_sys_filters, X(InclSysFiles), reltool_utils:choose_default(incl_sys_filters, Profile, InclDefs), InclDefs) ++
emit(excl_sys_filters, X(ExclSysFiles), reltool_utils:choose_default(excl_sys_filters, Profile, InclDefs), InclDefs) ++
emit(incl_app_filters, X(InclAppFiles), reltool_utils:choose_default(incl_app_filters, Profile, InclDefs), InclDefs) ++
emit(excl_app_filters, X(ExclAppFiles), reltool_utils:choose_default(excl_app_filters, Profile, InclDefs), InclDefs) ++
emit(incl_archive_filters, X(InclArchiveDirs), ?DEFAULT_INCL_ARCHIVE_FILTERS, InclDefs) ++
emit(excl_archive_filters, X(ExclArchiveDirs), ?DEFAULT_EXCL_ARCHIVE_FILTERS, InclDefs) ++
emit(archive_opts, ArchiveOpts, ?DEFAULT_ARCHIVE_OPTS, InclDefs) ++
emit(rel_app_type, RelAppType, ?DEFAULT_REL_APP_TYPE, InclDefs) ++
emit(embedded_app_type, InclAppType, reltool_utils:choose_default(embedded_app_type, Profile, InclDefs), InclDefs) ++
emit(app_file, AppFile, ?DEFAULT_APP_FILE, InclDefs) ++
emit(debug_info, DebugInfo, ?DEFAULT_DEBUG_INFO, InclDefs)};
do_gen_config(#app{name = Name,
mod_cond = ModCond,
incl_cond = AppCond,
debug_info = DebugInfo,
app_file = AppFile,
incl_app_filters = InclAppFiles,
excl_app_filters = ExclAppFiles,
incl_archive_filters = InclArchiveDirs,
excl_archive_filters = ExclArchiveDirs,
archive_opts = ArchiveOpts,
use_selected_vsn = UseSelected,
vsn = Vsn,
active_dir = ActiveDir,
mods = Mods,
is_included = IsIncl},
InclDefs) ->
AppConfig =
[
emit(mod_cond, ModCond, undefined, InclDefs),
emit(incl_cond, AppCond, undefined, InclDefs),
emit(debug_info, DebugInfo, undefined, InclDefs),
emit(app_file, AppFile, undefined, InclDefs),
emit(incl_app_filters, InclAppFiles, undefined, InclDefs),
emit(excl_app_filters, ExclAppFiles, undefined, InclDefs),
emit(incl_archive_filters, InclArchiveDirs, undefined, InclDefs),
emit(excl_archive_filters, ExclArchiveDirs, undefined, InclDefs),
emit(archive_opts, ArchiveOpts, undefined, InclDefs),
if
IsIncl, InclDefs -> [{vsn, Vsn}, {lib_dir, ActiveDir}];
UseSelected =:= vsn -> [{vsn, Vsn}];
UseSelected =:= dir -> [{lib_dir, ActiveDir}];
true -> []
end,
[do_gen_config(M, InclDefs) || M <- Mods]
],
case lists:flatten(AppConfig) of
FlatAppConfig when FlatAppConfig =/= []; IsIncl ->
[{app, Name, FlatAppConfig}];
[] ->
[]
end;
do_gen_config(#mod{name = Name,
incl_cond = AppCond,
debug_info = DebugInfo,
is_included = IsIncl},
InclDefs) ->
ModConfig =
[
emit(incl_cond, AppCond, undefined, InclDefs),
emit(debug_info, DebugInfo, undefined, InclDefs)
],
case lists:flatten(ModConfig) of
FlatModConfig when FlatModConfig =/= []; IsIncl ->
[{mod, Name, FlatModConfig}];
_ ->
[]
end;
do_gen_config(#rel{name = _Name,
vsn = _Vsn,
rel_apps = RelApps},
InclDefs) ->
[do_gen_config(RA, InclDefs) || RA <- RelApps];
do_gen_config(#rel_app{name = Name,
app_type = Type,
incl_apps = InclApps},
_InclDefs) ->
case {Type, InclApps} of
{undefined, undefined} -> Name;
{undefined, _} -> {Name, InclApps};
{_, undefined} -> {Name, Type};
{_, _} -> {Name, Type, InclApps}
end;
do_gen_config({Tag, Val}, InclDefs) ->
emit(Tag, Val, undefined, InclDefs);
do_gen_config([], _InclDefs) ->
[];
do_gen_config([H | T], InclDefs) ->
lists:flatten([do_gen_config(H, InclDefs), do_gen_config(T, InclDefs)]).
emit(Tag, Val, Default, InclDefs) ->
%% io:format("~p(~p):\n\t~p\n\t~p\n",
[ Tag , Val = /= Default , , Default ] ) ,
if
Val == undefined -> [];
InclDefs -> [{Tag, Val}];
Val =/= Default -> [{Tag, Val}];
true -> []
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Generate the contents of an app file
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
gen_app(#app{name = Name,
info = #app_info{description = Desc,
id = Id,
vsn = Vsn,
modules = Mods,
maxP = MaxP,
maxT = MaxT,
registered = Regs,
incl_apps = InclApps,
applications = ReqApps,
env = Env,
mod = StartMod,
start_phases = StartPhases}}) ->
StartPhases2 =
case StartPhases of
undefined -> [];
_ -> [{start_phases, StartPhases}]
end,
Tail =
case StartMod of
undefined -> StartPhases2;
_ -> [{mod, StartMod} | StartPhases2]
end,
{application, Name,
[{description, Desc},
{vsn, Vsn},
{id, Id},
{modules, Mods},
{registered, Regs},
{applications, ReqApps},
{included_applications, InclApps},
{env, Env},
{maxT, MaxT},
{maxP, MaxP} |
Tail]}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Generate the contents of a rel file
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
gen_rel(Rel, Sys) ->
try
MergedApps = merge_apps(Rel, Sys),
{ok, do_gen_rel(Rel, Sys, MergedApps)}
catch
throw:{error, Text} ->
{error, Text}
end.
do_gen_rel(#rel{name = RelName, vsn = RelVsn, rel_apps = RelApps},
#sys{apps = Apps},
MergedApps) ->
ErtsName = erts,
case lists:keysearch(ErtsName, #app.name, Apps) of
{value, Erts} ->
{release,
{RelName, RelVsn},
{ErtsName, Erts#app.vsn},
[strip_rel_info(App, RelApps) || App <- MergedApps]};
false ->
reltool_utils:throw_error("Mandatory application ~w is "
"not included",
[ErtsName])
end.
strip_rel_info(#app{name = Name,
vsn = Vsn,
app_type = Type,
info = #app_info{incl_apps = AppInclApps}},
RelApps) when Type =/= undefined ->
RelInclApps = case lists:keyfind(Name,#rel_app.name,RelApps) of
#rel_app{incl_apps = RIA} when RIA =/= undefined -> RIA;
_ -> undefined
end,
case {Type, RelInclApps} of
{permanent, undefined} -> {Name, Vsn};
{permanent, _} -> {Name, Vsn, AppInclApps};
{_, undefined} -> {Name, Vsn, Type};
{_, _} -> {Name, Vsn, Type, AppInclApps}
end.
merge_apps(#rel{name = RelName,
rel_apps = RelApps},
#sys{apps = Apps,
rel_app_type = RelAppType,
embedded_app_type = EmbAppType}) ->
Mandatory = [kernel, stdlib],
MergedApps = do_merge_apps(RelName, Mandatory, Apps, permanent, []),
MergedApps2 = do_merge_apps(RelName, RelApps, Apps, RelAppType, MergedApps),
Embedded =
[A#app.name || A <- Apps,
EmbAppType =/= undefined,
A#app.is_included,
A#app.name =/= erts,
A#app.name =/= ?MISSING_APP_NAME,
not lists:keymember(A#app.name, #app.name, MergedApps2)],
MergedApps3 = do_merge_apps(RelName, Embedded, Apps, EmbAppType, MergedApps2),
RevMerged = lists:reverse(MergedApps3),
MergedSortedUsedAndIncs = sort_used_and_incl_apps(RevMerged,RevMerged),
sort_apps(MergedSortedUsedAndIncs).
do_merge_apps(RelName, [#rel_app{name = Name} = RA | RelApps], Apps, RelAppType, Acc) ->
case is_already_merged(Name, RelApps, Acc) of
true ->
do_merge_apps(RelName, RelApps, Apps, RelAppType, Acc);
false ->
{value, App} = lists:keysearch(Name, #app.name, Apps),
MergedApp = merge_app(RelName, RA, RelAppType, App),
ReqNames = (MergedApp#app.info)#app_info.applications,
IncNames = (MergedApp#app.info)#app_info.incl_apps,
Acc2 = [MergedApp | Acc],
do_merge_apps(RelName, ReqNames ++ IncNames ++ RelApps,
Apps, RelAppType, Acc2)
end;
do_merge_apps(RelName, [Name | RelApps], Apps, RelAppType, Acc) ->
case is_already_merged(Name, RelApps, Acc) of
true ->
do_merge_apps(RelName, RelApps, Apps, RelAppType, Acc);
false ->
RelApp = #rel_app{name = Name},
do_merge_apps(RelName, [RelApp | RelApps], Apps, RelAppType, Acc)
end;
do_merge_apps(_RelName, [], _Apps, _RelAppType, Acc) ->
Acc.
merge_app(RelName,
#rel_app{name = Name,
app_type = Type,
incl_apps = InclApps0},
RelAppType,
App) ->
Type2 =
case {Type, App#app.app_type} of
{undefined, undefined} -> RelAppType;
{undefined, AppAppType} -> AppAppType;
{_, _} -> Type
end,
Info = App#app.info,
InclApps =
case InclApps0 of
undefined -> Info#app_info.incl_apps;
_ -> InclApps0
end,
case InclApps -- Info#app_info.incl_apps of
[] ->
App#app{app_type = Type2, info = Info#app_info{incl_apps = InclApps}};
BadIncl ->
reltool_utils:throw_error("~w: These applications are "
"used by release ~ts but are "
"missing as included_applications "
"in the app file: ~p",
[Name, RelName, BadIncl])
end.
is_already_merged(Name, [Name | _], _MergedApps) ->
true;
is_already_merged(Name, [#rel_app{name = Name} | _], _MergedApps) ->
true;
is_already_merged(Name, [_ | RelApps], MergedApps) ->
is_already_merged(Name, RelApps, MergedApps);
is_already_merged(Name, [], [#app{name = Name} | _MergedApps]) ->
true;
is_already_merged(Name, [] = RelApps, [_ | MergedApps]) ->
is_already_merged(Name, RelApps, MergedApps);
is_already_merged(_Name, [], []) ->
false.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Generate the contents of a boot file
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
gen_boot({script, {_, _}, _} = Script) ->
{ok, term_to_binary(Script)}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Generate the contents of a script file
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
gen_script(Rel, Sys, PathFlag, Variables) ->
try
MergedApps = merge_apps(Rel, Sys),
do_gen_script(Rel, Sys, MergedApps, PathFlag, Variables)
catch
throw:{error, Text} ->
{error, Text}
end.
do_gen_script(#rel{name = RelName, vsn = RelVsn},
#sys{apps = Apps},
MergedApps,
PathFlag,
Variables) ->
{value, Erts} = lists:keysearch(erts, #app.name, Apps),
Preloaded = [Mod#mod.name || Mod <- Erts#app.mods],
Mandatory = mandatory_modules(),
Early = Mandatory ++ Preloaded,
{value, KernelApp} = lists:keysearch(kernel, #app.name, MergedApps),
InclApps = lists:flatmap(fun(#app{info = #app_info{incl_apps = I}}) ->
I
end,
MergedApps),
%% Create the script
DeepList =
[
Register preloaded modules
{preLoaded, lists:sort(Preloaded)},
{progress, preloaded},
%% Load mandatory modules
{path, create_mandatory_path(MergedApps, PathFlag, Variables)},
{primLoad, lists:sort(Mandatory)},
{kernel_load_completed},
{progress, kernel_load_completed},
%% Load remaining modules
[load_app_mods(A, Early, PathFlag, Variables) || A <- MergedApps],
{progress, modules_loaded},
%% Start kernel processes
{path, create_path(MergedApps, PathFlag, Variables)},
kernel_processes(gen_app(KernelApp)),
{progress, init_kernel_started},
%% Load applications
[{apply, {application, load, [gen_app(A)]}} ||
A = #app{name = Name, app_type = Type} <- MergedApps,
Name =/= kernel,
Type =/= none],
{progress, applications_loaded},
%% Start applications
[{apply, {application, start_boot, [Name, Type]}} ||
#app{name = Name, app_type = Type} <- MergedApps,
Type =/= none,
Type =/= load,
not lists:member(Name, InclApps)],
%% Apply user specific customizations
{apply, {c, erlangrc, []}},
{progress, started}
],
{ok, {script, {RelName, RelVsn}, lists:flatten(DeepList)}}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
load_app_mods(#app{mods = Mods0} = App, Mand, PathFlag, Variables) ->
Path = cr_path(App, PathFlag, Variables),
Mods = [M || #mod{name = M, is_included=true} <- Mods0,
not lists:member(M, Mand)],
[{path, [filename:join([Path])]},
{primLoad, lists:sort(Mods)}].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
sort_used_and_incl_apps(Apps , OrderedApps ) - > Apps
%% Apps = [#app{}]
OrderedApps = [ # app { } ]
%%
OTP-4121 , OTP-9984
%% (Tickets are written for systools, but needs to be implemented here
%% as well.)
%% Make sure that used and included applications are given in the same
%% order as in the release resource file (.rel). Otherwise load and
%% start instructions in the boot script, and consequently release
upgrade instructions in relup , may end up in the wrong order .
sort_used_and_incl_apps([#app{info=Info} = App|Apps], OrderedApps) ->
Incls2 =
case Info#app_info.incl_apps of
Incls when length(Incls)>1 ->
sort_appl_list(Incls, OrderedApps);
Incls ->
Incls
end,
Uses2 =
case Info#app_info.applications of
Uses when length(Uses)>1 ->
sort_appl_list(Uses, OrderedApps);
Uses ->
Uses
end,
App2 = App#app{info=Info#app_info{incl_apps=Incls2, applications=Uses2}},
[App2|sort_used_and_incl_apps(Apps, OrderedApps)];
sort_used_and_incl_apps([], _OrderedApps) ->
[].
sort_appl_list(List, Order) ->
IndexedList = find_pos(List, Order),
SortedIndexedList = lists:keysort(1, IndexedList),
lists:map(fun({_Index,Name}) -> Name end, SortedIndexedList).
find_pos([Name|Incs], OrderedApps) ->
[find_pos(1, Name, OrderedApps)|find_pos(Incs, OrderedApps)];
find_pos([], _OrderedApps) ->
[].
find_pos(N, Name, [#app{name=Name}|_OrderedApps]) ->
{N, Name};
find_pos(N, Name, [_OtherAppl|OrderedApps]) ->
find_pos(N+1, Name, OrderedApps).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Function : ) - > { ok , Apps ' } | throw({error , Error } )
Types : Apps = { { Name , , # application } ]
%% Purpose: Sort applications according to dependencies among
%% applications. If order doesn't matter, use the same
%% order as in the original list.
Alg . written by ( )
Mod . by mbj
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
sort_apps(Apps) ->
sort_apps(Apps, [], [], []).
sort_apps([#app{name = Name, info = Info} = App | Apps],
Missing,
Circular,
Visited) ->
{Uses, Apps1, NotFnd1} =
find_all(Name,
lists:reverse(Info#app_info.applications),
Apps,
Visited,
[],
[]),
{Incs, Apps2, NotFnd2} =
find_all(Name,
lists:reverse(Info#app_info.incl_apps),
Apps1,
Visited,
[],
[]),
Missing1 = NotFnd1 ++ NotFnd2 ++ Missing,
case Uses ++ Incs of
[] ->
%% No more app that must be started before this one is
%% found; they are all already taken care of (and present
%% in Visited list)
[App | sort_apps(Apps, Missing1, Circular, [Name | Visited])];
L ->
%% The apps in L must be started before the app.
Check if we have already taken care of some app in L ,
%% in that case we have a circular dependency.
NewCircular = [N || #app{name = N} <- L, N2 <- Visited, N =:= N2],
Circular1 = case NewCircular of
[] -> Circular;
_ -> [Name | NewCircular] ++ Circular
end,
%% L must be started before N, try again, with all apps
in L added before N.
Apps3 = del_apps(NewCircular, L ++ [App | Apps2]),
sort_apps(Apps3, Missing1, Circular1, [Name | Visited])
end;
sort_apps([], [], [], _) ->
[];
sort_apps([], Missing, [], _) ->
%% this has already been checked before, but as we have the info...
reltool_utils:throw_error("Undefined applications: ~p",
[make_set(Missing)]);
sort_apps([], [], Circular, _) ->
reltool_utils:throw_error("Circular dependencies: ~p",
[make_set(Circular)]);
sort_apps([], Missing, Circular, _) ->
reltool_utils:throw_error("Circular dependencies: ~p"
"Undefined applications: ~p\n",
[make_set(Circular), make_set(Missing)]).
find_all(CheckingApp, [Name | Names], Apps, Visited, Found, NotFound) ->
case lists:keyfind(Name, #app.name, Apps) of
#app{info = Info} = App ->
%% It is OK to have a dependency like
X includes Y , Y uses X.
case lists:member(CheckingApp, Info#app_info.incl_apps) of
true ->
case lists:member(Name, Visited) of
true ->
find_all(CheckingApp,
Names,
Apps,
Visited,
Found,
NotFound);
false ->
find_all(CheckingApp,
Names,
Apps,
Visited,
Found,
[Name | NotFound])
end;
false ->
find_all(CheckingApp,
Names,
Apps -- [App],
Visited,
[App|Found],
NotFound)
end;
false ->
case lists:member(Name, Visited) of
true ->
find_all(CheckingApp,
Names,
Apps,
Visited,
Found,
NotFound);
false ->
find_all(CheckingApp,
Names,
Apps,
Visited,
Found,
[Name|NotFound])
end
end;
find_all(_CheckingApp, [], Apps, _Visited, Found, NotFound) ->
{Found, Apps, NotFound}.
del_apps([Name | Names], Apps) ->
del_apps(Names, lists:keydelete(Name, #app.name, Apps));
del_apps([], Apps) ->
Apps.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Create the load path used in the generated script.
If PathFlag is true a script intended to be used as a complete
%% system (e.g. in an embbeded system), i.e. all applications are
%% located under $ROOT/lib.
%% Otherwise all paths are set according to dir per application.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Create the complete path.
create_path(Apps, PathFlag, Variables) ->
make_set([cr_path(App, PathFlag, Variables) || App <- Apps]).
%% Create the path to a specific application.
( The otp_build flag is only used for OTP internal system make )
cr_path(#app{label = Label}, true, []) ->
filename:join(["$ROOT", "lib", Label, "ebin"]);
cr_path(#app{name = Name, vsn = Vsn, label = Label, active_dir = Dir},
true,
Variables) ->
Tail = [Label, "ebin"],
case variable_dir(Dir, atom_to_list(Name), Vsn, Variables) of
{ok, VarDir} ->
filename:join([VarDir] ++ Tail);
_ ->
filename:join(["$ROOT", "lib"] ++ Tail)
end;
cr_path(#app{name = Name}, otp_build, _) ->
filename:join(["$ROOT", "lib", atom_to_list(Name), "ebin"]);
cr_path(#app{active_dir = Dir}, _, _) ->
filename:join([Dir, "ebin"]).
variable_dir(Dir, Name, Vsn, [{Var,Path} | Variables]) ->
case lists:prefix(Path, Dir) of
true ->
D0 = strip_prefix(Path, Dir),
case strip_name_ebin(D0, Name, Vsn) of
{ok, D} ->
{ok, filename:join(["\$" ++ Var] ++ D)};
_ ->
%% We know at least that we are located
%% under the variable dir.
{ok, filename:join(["\$" ++ Var] ++ D0)}
end;
false ->
variable_dir(Dir, Name, Vsn, Variables)
end;
variable_dir(_Dir, _, _, []) ->
false.
strip_prefix(Path, Dir) ->
L = length(filename:split(Path)),
lists:nthtail(L, filename:split(Dir)).
strip_name_ebin(Dir, Name, Vsn) ->
FullName = Name ++ "-" ++ Vsn,
case lists:reverse(Dir) of
["ebin", Name | D] -> {ok, lists:reverse(D)};
["ebin", FullName | D] -> {ok, lists:reverse(D)};
[Name | D] -> {ok, lists:reverse(D)};
[FullName | D] -> {ok, lists:reverse(D)};
_ -> false
end.
%% Create the path to the kernel and stdlib applications.
create_mandatory_path(Apps, PathFlag, Variables) ->
Mandatory = [kernel, stdlib],
make_set(lists:map(fun(#app{name = Name} = App) ->
case lists:member(Name, Mandatory) of
true ->
cr_path(App, PathFlag, Variables);
false ->
""
end
end,
Apps)).
make_set([]) ->
[];
make_set([""|T]) -> % Ignore empty items.
make_set(T);
make_set([H|T]) ->
[H | [ Y || Y<- make_set(T),
Y =/= H]].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Generate rel, script and boot files
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
gen_rel_files(Sys, TargetDir) ->
try
Spec = spec_rel_files(Sys),
eval_spec(Spec, Sys#sys.root_dir, TargetDir)
catch
throw:{error, Text} ->
{error, Text}
end.
spec_rel_files(#sys{rels = Rels} = Sys) ->
lists:append([do_spec_rel_files(R, Sys) || R <- Rels]).
do_spec_rel_files(#rel{name = RelName} = Rel, Sys) ->
RelFile = RelName ++ ".rel",
ScriptFile = RelName ++ ".script",
BootFile = RelName ++ ".boot",
MergedApps = merge_apps(Rel, Sys),
GenRel = do_gen_rel(Rel, Sys, MergedApps),
Variables =
case Sys#sys.excl_lib of
otp_root ->
%% All applications that are fetched from somewhere
%% other than $OTP_ROOT/lib will get $RELTOOL_EXT_LIB
%% as path prefix in the .script file.
[{"RELTOOL_EXT_LIB",LibDir} || LibDir <- Sys#sys.lib_dirs] ++
[{"RELTOOL_EXT_LIB",filename:dirname(AppLibDir)} ||
#app{active_dir=AppLibDir,use_selected_vsn=dir}
<- MergedApps];
_ ->
[]
end,
PathFlag = true,
{ok, Script} = do_gen_script(Rel, Sys, MergedApps, PathFlag, Variables),
{ok, BootBin} = gen_boot(Script),
Date = date(),
Time = time(),
RelIoList = io_lib:format("%% rel generated at ~w ~w\n~p.\n\n",
[Date, Time, GenRel]),
ScriptIoList = io_lib:format("%% script generated at ~w ~w\n~p.\n\n",
[Date, Time, Script]),
[
{write_file, RelFile, RelIoList},
{write_file, ScriptFile, ScriptIoList},
{write_file, BootFile, BootBin}
].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Generate a complete target system
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
gen_target(Sys, TargetDir) ->
try
Spec = do_gen_spec(Sys),
eval_spec(Spec, Sys#sys.root_dir, TargetDir)
catch
throw:{error, Text} ->
{error, Text}
end.
gen_spec(Sys) ->
try
{ok, do_gen_spec(Sys)}
catch
throw:{error, Text} ->
{error, Text}
end.
do_gen_spec(#sys{root_dir = RootDir,
excl_lib = ExclLib,
incl_sys_filters = InclRegexps,
excl_sys_filters = ExclRegexps,
relocatable = Relocatable,
apps = Apps} = Sys) ->
RelFiles = spec_rel_files(Sys),
{SysFiles, InclRegexps2, ExclRegexps2, Mandatory} =
case ExclLib of
otp_root ->
{[],InclRegexps,ExclRegexps,["lib"]};
_ ->
{create_dir, _, SF} = spec_dir(RootDir),
{ER2, SF2} = strip_sys_files(Relocatable, SF, Apps, ExclRegexps),
{IR2, BinFiles} =
spec_bin_files(Sys, SF, SF2, RelFiles, InclRegexps),
SF3 = [{create_dir, "bin", BinFiles}] ++ SF2,
{SF3,IR2,ER2,["bin","erts","lib"]}
end,
LibFiles = spec_lib_files(Sys),
{BootVsn, StartFile} = spec_start_file(Sys),
SysFiles2 =
[{create_dir, "releases",
[StartFile,
{create_dir,BootVsn, RelFiles}]}] ++ SysFiles,
SysFiles3 = filter_spec(SysFiles2, InclRegexps2, ExclRegexps2),
SysFiles4 = SysFiles3 ++ [{create_dir, "lib", LibFiles}],
check_sys(Mandatory, SysFiles4),
SysFiles4.
strip_sys_files(Relocatable, SysFiles, Apps, ExclRegexps) ->
ExclRegexps2 =
case Relocatable of
true ->
ExtraExcl = ["^erts.*/bin/.*src\$"],
reltool_utils:decode_regexps(excl_sys_filters,
{add, ExtraExcl},
ExclRegexps);
false ->
ExclRegexps
end,
{value, Erts} = lists:keysearch(erts, #app.name, Apps),
FilterErts =
fun(Spec) ->
File = element(2, Spec),
case File of
"erts" ->
reltool_utils:throw_error("This system is not installed. "
"The directory ~ts is missing.",
[Erts#app.label]);
_ when File =:= Erts#app.label ->
replace_dyn_erl(Relocatable, Spec);
"erts-" ++ _ ->
false;
_ ->
true
end
end,
SysFiles2 = lists:zf(FilterErts, SysFiles),
SysFiles3 = lists:foldl(fun(F, Acc) -> lists:keydelete(F, 2, Acc) end,
SysFiles2,
["releases", "lib", "bin"]),
{ExclRegexps2, SysFiles3}.
replace_dyn_erl(false, _ErtsSpec) ->
true;
replace_dyn_erl(true, {create_dir, ErtsDir, ErtsFiles}) ->
[{create_dir, _, BinFiles}] =
safe_lookup_spec("bin", ErtsFiles),
case lookup_spec("dyn_erl", BinFiles) of
[] ->
case lookup_spec("erl.ini", BinFiles) of
[] ->
true;
[{copy_file, ErlIni}] ->
%% Remove Windows .ini file
BinFiles2 = lists:keydelete(ErlIni, 2, BinFiles),
ErtsFiles2 =
lists:keyreplace("bin",
2,
ErtsFiles,
{create_dir, "bin", BinFiles2}),
{true, {create_dir, ErtsDir, ErtsFiles2}}
end;
[{copy_file, DynErlExe}] ->
%% Replace erl with dyn_erl
ErlExe = "erl" ++ filename:extension(DynErlExe),
BinFiles2 = lists:keydelete(DynErlExe, 2, BinFiles),
DynErlExe2 = filename:join([ErtsDir, "bin", DynErlExe]),
BinFiles3 = lists:keyreplace(ErlExe,
2,
BinFiles2,
{copy_file, ErlExe, DynErlExe2}),
ErtsFiles2 = lists:keyreplace("bin",
2,
ErtsFiles,
{create_dir, "bin", BinFiles3}),
{true, {create_dir, ErtsDir, ErtsFiles2}}
end.
spec_bin_files(Sys, AllSysFiles, StrippedSysFiles, RelFiles, InclRegexps) ->
[{create_dir, ErtsLabel, ErtsFiles}] =
safe_lookup_spec("erts", StrippedSysFiles),
[{create_dir, _, BinFiles}] = safe_lookup_spec("bin", ErtsFiles),
ErtsBin = filename:join([ErtsLabel, "bin"]),
Escripts = spec_escripts(Sys, ErtsBin, BinFiles),
Map = fun({copy_file, File}) ->
{copy_file, File, filename:join([ErtsBin, File])};
({copy_file, NewFile, OldFile}) ->
{_, OldFile2} =
abs_to_rel_path(ErtsBin,
filename:join([ErtsBin, OldFile])),
{copy_file, NewFile, OldFile2}
end,
Do only copy those bin files from erts / that also exists in bin
[{create_dir, _, OldBinFiles}] = safe_lookup_spec("bin", AllSysFiles),
GoodNames = [F || {copy_file, F} <- OldBinFiles,
not lists:suffix(".boot", F),
not lists:suffix(".script", F)],
BinFiles2 = [Map(S) || S <- BinFiles,
lists:member(element(2, S), GoodNames)],
BootFiles = [F || F <- RelFiles, lists:suffix(".boot", element(2, F))],
[{write_file, _, BootRel}] =
safe_lookup_spec(Sys#sys.boot_rel ++ ".boot", BootFiles),
BootFiles2 = lists:keystore("start.boot",
2,
BootFiles,
{write_file, "start.boot", BootRel}),
MakeRegexp =
fun(File) -> "^bin/" ++ element(2, File) ++ "(|.escript)\$" end,
ExtraIncl = lists:map(MakeRegexp, Escripts),
InclRegexps2 = reltool_utils:decode_regexps(incl_sys_filters,
{add, ExtraIncl},
InclRegexps),
{InclRegexps2, Escripts ++ BinFiles2 ++ BootFiles2}.
spec_escripts(#sys{apps = Apps}, ErtsBin, BinFiles) ->
Filter = fun(#app{is_escript = IsEscript,
is_included = IsIncl,
is_pre_included = IsPre,
name = Name,
active_dir = File}) ->
if
Name =:= ?MISSING_APP_NAME ->
false;
IsEscript =/= true ->
false;
IsIncl; IsPre ->
{true, do_spec_escript(File, ErtsBin, BinFiles)};
true ->
false
end
end,
lists:flatten(lists:zf(Filter, Apps)).
do_spec_escript(File, ErtsBin, BinFiles) ->
[{copy_file, EscriptExe}] = safe_lookup_spec("escript", BinFiles),
EscriptExt = ".escript",
Base = filename:basename(File, EscriptExt),
ExeExt = filename:extension(EscriptExe),
[{copy_file, Base ++ EscriptExt, File},
{copy_file, Base ++ ExeExt, filename:join([ErtsBin, EscriptExe])}].
check_sys(Mandatory, SysFiles) ->
lists:foreach(fun(M) -> do_check_sys(M, SysFiles) end, Mandatory).
do_check_sys(Prefix, Specs) ->
case lookup_spec(Prefix, Specs) of
[] ->
reltool_utils:throw_error("Mandatory system directory ~ts "
"is not included",
[Prefix]);
_ ->
ok
end.
spec_start_file(#sys{boot_rel = BootRelName,
rels = Rels,
apps = Apps}) ->
{value, Erts} = lists:keysearch(erts, #app.name, Apps),
{value, BootRel} = lists:keysearch(BootRelName, #rel.name, Rels),
Data = Erts#app.vsn ++ " " ++ BootRel#rel.vsn ++ "\n",
{BootRel#rel.vsn, {write_file, "start_erl.data", Data}}.
lookup_spec(Prefix, Specs) ->
lists:filter(fun(S) -> lists:prefix(Prefix, element(2, S)) end, Specs).
safe_lookup_spec(Prefix, Specs) ->
case lookup_spec(Prefix, Specs) of
[] ->
%% io:format("lookup fail ~ts:\n\t~p\n", [Prefix, Specs]),
reltool_utils:throw_error("Mandatory system file ~ts is "
"not included", [Prefix]);
Match ->
Match
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Specify applications
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
spec_lib_files(#sys{root_dir = RootDir,
apps = Apps,
excl_lib = ExclLib} = Sys) ->
Filter = fun(#app{is_escript = IsEscript, is_included = IsIncl,
is_pre_included = IsPre, name = Name,
active_dir = ActiveDir}) ->
if
Name =:= ?MISSING_APP_NAME ->
false;
IsEscript =/= false ->
false;
IsIncl; IsPre ->
case ExclLib of
otp_root ->
not lists:prefix(RootDir,ActiveDir);
_ ->
true
end;
true ->
false
end
end,
SelectedApps = lists:filter(Filter, Apps),
case ExclLib of
otp_root ->
ok;
_ ->
check_apps([kernel, stdlib], SelectedApps)
end,
lists:flatten([spec_app(App, Sys) || App <- SelectedApps]).
check_apps([Mandatory | Names], Apps) ->
case lists:keymember(Mandatory, #app.name, Apps) of
false ->
reltool_utils:throw_error("Mandatory application ~w is "
"not included in ~p",
[Mandatory, Apps]);
true ->
check_apps(Names, Apps)
end;
check_apps([], _) ->
ok.
spec_app(#app{name = Name,
mods = Mods,
active_dir = SourceDir,
incl_app_filters = AppInclRegexps,
excl_app_filters = AppExclRegexps} = App,
#sys{incl_app_filters = SysInclRegexps,
excl_app_filters = SysExclRegexps,
debug_info = SysDebugInfo} = Sys) ->
%% List files recursively
{create_dir, _, AppFiles} = spec_dir(SourceDir),
%% Replace ebin
AppUpFilename = atom_to_list(Name) ++ ".appup",
EbinDir = filename:join([SourceDir, "ebin"]),
OptAppUpFileSpec = spec_opt_copy_file(EbinDir, AppUpFilename),
OptAppFileSpec = spec_app_file(App, Sys, EbinDir),
ModSpecs = [spec_mod(M, SysDebugInfo) || M <- Mods,
M#mod.is_included,
M#mod.exists],
NewEbin = {create_dir,
"ebin",
OptAppUpFileSpec ++ OptAppFileSpec ++ ModSpecs},
AppFiles2 = lists:keystore("ebin", 2, AppFiles, NewEbin),
%% Apply file filter
InclRegexps = reltool_utils:default_val(AppInclRegexps, SysInclRegexps),
ExclRegexps = reltool_utils:default_val(AppExclRegexps, SysExclRegexps),
AppFiles3 = filter_spec(AppFiles2, InclRegexps, ExclRegexps),
%% Regular top directory and/or archive
spec_archive(App, Sys, AppFiles3).
spec_archive(#app{label = Label,
active_dir = SourceDir,
incl_archive_filters = AppInclArchiveDirs,
excl_archive_filters = AppExclArchiveDirs,
archive_opts = AppArchiveOpts},
#sys{root_dir = RootDir,
incl_archive_filters = SysInclArchiveDirs,
excl_archive_filters = SysExclArchiveDirs,
archive_opts = SysArchiveOpts},
Files) ->
InclArchiveDirs =
reltool_utils:default_val(AppInclArchiveDirs, SysInclArchiveDirs),
ExclArchiveDirs =
reltool_utils:default_val(AppExclArchiveDirs, SysExclArchiveDirs),
ArchiveOpts =
reltool_utils:default_val(AppArchiveOpts, SysArchiveOpts),
Match = fun(F) -> match(element(2, F), InclArchiveDirs, ExclArchiveDirs) end,
case lists:filter(Match, Files) of
[] ->
%% Nothing to archive
[spec_create_dir(RootDir, SourceDir, Label, Files)];
ArchiveFiles ->
OptDir =
case Files -- ArchiveFiles of
[] ->
[];
ExternalFiles ->
[spec_create_dir(RootDir,
SourceDir,
Label,
ExternalFiles)]
end,
ArchiveOpts =
reltool_utils:default_val(AppArchiveOpts, SysArchiveOpts),
ArchiveDir =
spec_create_dir(RootDir, SourceDir, Label, ArchiveFiles),
[{archive, Label ++ ".ez", ArchiveOpts, [ArchiveDir]} | OptDir]
end.
spec_dir(Dir) ->
Base = filename:basename(Dir),
case erl_prim_loader:read_file_info(Dir) of
{ok, #file_info{type = directory}} ->
case erl_prim_loader:list_dir(Dir) of
{ok, Files} ->
%% Directory
{create_dir,
Base,
[spec_dir(filename:join([Dir, F])) || F <- Files]};
error ->
reltool_utils:throw_error("list dir ~ts failed", [Dir])
end;
{ok, #file_info{type = regular}} ->
%% Plain file
{copy_file, Base};
_ ->
reltool_utils:throw_error("read file info ~ts failed", [Dir])
end.
spec_mod(Mod, DebugInfo) ->
File = atom_to_list(Mod#mod.name) ++ code:objfile_extension(),
case reltool_utils:default_val(Mod#mod.debug_info, DebugInfo) of
keep ->
{copy_file, File};
strip ->
{strip_beam, File}
end.
spec_app_file(#app{name = Name,
info = Info,
mods = Mods,
app_file = AppFile} = App,
#sys{app_file = SysAppFile},
EbinDir) ->
AppFilename = atom_to_list(Name) ++ ".app",
case reltool_utils:default_val(AppFile, SysAppFile) of
keep ->
%% Copy if it exists
spec_opt_copy_file(EbinDir, AppFilename);
strip ->
%% Remove non-included modules
%% Generate new file
ModNames = [M#mod.name || M <- Mods,
M#mod.is_included,
lists:member(M#mod.name,
Info#app_info.modules)],
App2 = App#app{info = Info#app_info{modules = ModNames}},
Contents = gen_app(App2),
AppIoList = io_lib:format("%% app generated at ~w ~w\n~p.\n\n",
[date(), time(), Contents]),
[{write_file, AppFilename, AppIoList}];
all ->
%% Include all included modules
%% Generate new file
ModNames = [M#mod.name || M <- Mods, M#mod.is_included],
App2 = App#app{info = Info#app_info{modules = ModNames}},
Contents = gen_app(App2),
AppIoList = io_lib:format("%% app generated at ~w ~w\n~p.\n\n",
[date(), time(), Contents]),
[{write_file, AppFilename, AppIoList}]
end.
spec_opt_copy_file(DirName, BaseName) ->
case filelib:is_regular(filename:join([DirName, BaseName]),
erl_prim_loader) of
true -> [{copy_file, BaseName}];
false -> []
end.
spec_create_dir(RootDir, SourceDir, BaseDir, Files) ->
LibDir = filename:join([RootDir, "lib"]),
case abs_to_rel_path(LibDir, SourceDir) of
{relative, Dir} -> {create_dir, Dir, Files};
{absolute, Dir} -> {create_dir, BaseDir, Dir, Files}
end.
abs_to_rel_path(RootDir, SourcePath) ->
R = filename:split(RootDir),
S = filename:split(SourcePath),
abs_to_rel_path(R, S, SourcePath).
abs_to_rel_path([H | R], [H | S], SourcePath) ->
abs_to_rel_path(R, S, SourcePath);
abs_to_rel_path([], S, _SourcePath) ->
{relative, filename:join(S)};
abs_to_rel_path(_, _, SourcePath) ->
{absolute, SourcePath}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Evaluate specification
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
eval_spec(Spec, SourceDir, TargetDir) ->
SourceDir2 = filename:absname(SourceDir),
TargetDir2 = filename:absname(TargetDir),
try
case filelib:is_dir(TargetDir2) of
true ->
do_eval_spec(Spec, SourceDir2, SourceDir2, TargetDir2),
ok;
false ->
{error, TargetDir2 ++ ": " ++ file:format_error(enoent)}
end
catch
throw:{error, Text} ->
cleanup_spec(Spec, TargetDir2),
{error, Text}
end.
do_eval_spec(List, OrigSourceDir, SourceDir, TargetDir) when is_list(List) ->
lists:foreach(fun(F) ->
do_eval_spec(F, OrigSourceDir, SourceDir, TargetDir)
end,
List);
do_eval_spec({source_dir , SourceDir2 , Spec } , , _ SourceDir , TargetDir ) - >
%% %% Source dir is absolute or relative the original source dir
SourceDir3 = filename : , SourceDir2 ] ) ,
do_eval_spec(Spec , OrigSourceDir , SourceDir3 , TargetDir ) ;
do_eval_spec({create_dir, Dir, Files}, OrigSourceDir, SourceDir, TargetDir) ->
SourceDir2 = filename:join([SourceDir, Dir]),
TargetDir2 = filename:join([TargetDir, Dir]),
reltool_utils:create_dir(TargetDir2),
do_eval_spec(Files, OrigSourceDir, SourceDir2, TargetDir2);
do_eval_spec({create_dir, Dir, OldDir, Files},
OrigSourceDir,
_SourceDir,
TargetDir) ->
SourceDir2 = filename:join([OrigSourceDir, OldDir]),
TargetDir2 = filename:join([TargetDir, Dir]),
reltool_utils:create_dir(TargetDir2),
do_eval_spec(Files, SourceDir2, SourceDir2, TargetDir2);
do_eval_spec({archive, Archive, Options, Files},
OrigSourceDir,
SourceDir,
TargetDir) ->
TmpSpec = {create_dir, "tmp", Files},
TmpDir = filename:join([TargetDir, "tmp"]),
reltool_utils:create_dir(TmpDir),
do_eval_spec(Files, OrigSourceDir, SourceDir, TmpDir),
ArchiveFile = filename:join([TargetDir, Archive]),
Files2 = [element(2, F) || F <- Files],
Res = zip:create(ArchiveFile, Files2, [{cwd, TmpDir} | Options]),
cleanup_spec(TmpSpec, TargetDir),
case Res of
{ok, _} ->
ok;
{error, Reason} ->
reltool_utils:throw_error("create archive ~ts failed: ~p",
[ArchiveFile, Reason])
end;
do_eval_spec({copy_file, File}, _OrigSourceDir, SourceDir, TargetDir) ->
SourceFile = filename:join([SourceDir, File]),
TargetFile = filename:join([TargetDir, File]),
reltool_utils:copy_file(SourceFile, TargetFile);
do_eval_spec({copy_file, File, OldFile},
OrigSourceDir,
_SourceDir,
TargetDir) ->
SourceFile = filename:join([OrigSourceDir, OldFile]),
TargetFile = filename:join([TargetDir, File]),
reltool_utils:copy_file(SourceFile, TargetFile);
do_eval_spec({write_file, File, IoList},
_OrigSourceDir,
_SourceDir,
TargetDir) ->
TargetFile = filename:join([TargetDir, File]),
reltool_utils:write_file(TargetFile, IoList);
do_eval_spec({strip_beam, File}, _OrigSourceDir, SourceDir, TargetDir) ->
SourceFile = filename:join([SourceDir, File]),
TargetFile = filename:join([TargetDir, File]),
BeamBin = reltool_utils:read_file(SourceFile),
{ok, {_, BeamBin2}} = beam_lib:strip(BeamBin),
reltool_utils:write_file(TargetFile, BeamBin2).
cleanup_spec(List, TargetDir) when is_list(List) ->
lists:foreach(fun(F) -> cleanup_spec(F, TargetDir) end, List);
cleanup_spec({source_dir , _ SourceDir , Spec } , TargetDir ) - >
%% cleanup_spec(Spec, TargetDir);
cleanup_spec({create_dir, Dir, Files}, TargetDir) ->
TargetDir2 = filename:join([TargetDir, Dir]),
cleanup_spec(Files, TargetDir2),
file:del_dir(TargetDir2);
cleanup_spec({create_dir, Dir, _OldDir, Files}, TargetDir) ->
TargetDir2 = filename:join([TargetDir, Dir]),
cleanup_spec(Files, TargetDir2),
file:del_dir(TargetDir2);
cleanup_spec({archive, Archive, _Options, Files}, TargetDir) ->
TargetFile = filename:join([TargetDir, Archive]),
file:delete(TargetFile),
TmpDir = filename:join([TargetDir, "tmp"]),
cleanup_spec(Files, TmpDir),
file:del_dir(TmpDir);
cleanup_spec({copy_file, File}, TargetDir) ->
TargetFile = filename:join([TargetDir, File]),
file:delete(TargetFile);
cleanup_spec({copy_file, NewFile, _OldFile}, TargetDir) ->
TargetFile = filename:join([TargetDir, NewFile]),
file:delete(TargetFile);
cleanup_spec({write_file, File, _IoList}, TargetDir) ->
TargetFile = filename:join([TargetDir, File]),
file:delete(TargetFile);
cleanup_spec({strip_beam, File}, TargetDir) ->
TargetFile = filename:join([TargetDir, File]),
file:delete(TargetFile).
filter_spec(List, InclRegexps, ExclRegexps) ->
do_filter_spec("", List, InclRegexps, ExclRegexps).
do_filter_spec(Path, List, InclRegexps, ExclRegexps) when is_list(List) ->
lists:zf(fun(File) ->
do_filter_spec(Path, File, InclRegexps, ExclRegexps)
end,
List);
do_filter_spec(Path , { source_dir , _ SourceDir , Spec } , InclRegexps , ExclRegexps ) - >
do_filter_spec(Path , Spec , InclRegexps , ExclRegexps ) ;
do_filter_spec(Path, {create_dir, Dir, Files}, InclRegexps, ExclRegexps) ->
Path2 = opt_join(Path, Dir),
case do_filter_spec(Path2, Files, InclRegexps, ExclRegexps) of
[] ->
case match(Path2, InclRegexps, ExclRegexps) of
true ->
{true, {create_dir, Dir, []}};
false ->
false
end;
Files2 when is_list(Files2) ->
{true, {create_dir, Dir, Files2}}
end;
do_filter_spec(Path,
{create_dir, NewDir, OldDir, Files},
InclRegexps,
ExclRegexps) ->
Path2 = opt_join(Path, NewDir),
case do_filter_spec(Path2, Files, InclRegexps, ExclRegexps) of
[] ->
case match(Path2, InclRegexps, ExclRegexps) of
true ->
{true, {create_dir, NewDir, OldDir, []}};
false ->
false
end;
Files2 when is_list(Files2) ->
{true, {create_dir, NewDir, OldDir, Files2}}
end;
do_filter_spec(Path,
{archive, Archive, Options, Files},
InclRegexps,
ExclRegexps) ->
case do_filter_spec(Path, Files, InclRegexps, ExclRegexps) of
[] ->
case match(Path, InclRegexps, ExclRegexps) of
true ->
{true, {archive, Archive, Options, []}};
false ->
false
end;
Files2 when is_list(Files2) ->
{true, {archive, Archive, Options, Files2}}
end;
do_filter_spec(Path, {copy_file, File}, InclRegexps, ExclRegexps) ->
Path2 = opt_join(Path, File),
match(Path2, InclRegexps, ExclRegexps);
do_filter_spec(Path,
{copy_file, NewFile, _OldFile},
InclRegexps,
ExclRegexps) ->
Path2 = opt_join(Path, NewFile),
match(Path2, InclRegexps, ExclRegexps);
do_filter_spec(Path, {write_file, File, _IoList}, InclRegexps, ExclRegexps) ->
Path2 = opt_join(Path, File),
match(Path2, InclRegexps, ExclRegexps);
do_filter_spec(Path, {strip_beam, File}, InclRegexps, ExclRegexps) ->
Path2 = opt_join(Path, File),
match(Path2, InclRegexps, ExclRegexps).
opt_join([], File) ->
File;
opt_join(Path, File) ->
filename:join([Path, File]).
match(String, InclRegexps, ExclRegexps) ->
match(String, InclRegexps) andalso not match(String, ExclRegexps).
Match at least one regexp
match(_String, []) ->
false;
match(String, [#regexp{source = _, compiled = MP} | Regexps]) ->
io : format("Regexp : ~p ~p\n " , [ String , Regexp ] ) ,
case re:run(String, MP, [{capture, none}]) of
nomatch -> match(String, Regexps);
match -> true
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Old style installation
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
install(RelName, TargetDir) ->
try
do_install(RelName, TargetDir)
catch
throw:{error, Text} ->
{error, Text}
end.
do_install(RelName, TargetDir) ->
TargetDir2 = filename:absname(TargetDir),
RelDir = filename:join([TargetDir2, "releases"]),
DataFile = filename:join([RelDir, "start_erl.data"]),
Bin = reltool_utils:read_file(DataFile),
case string:tokens(binary_to_list(Bin), " \n") of
[ErlVsn, RelVsn | _] ->
ErtsBinDir = filename:join([TargetDir2, "erts-" ++ ErlVsn, "bin"]),
BinDir = filename:join([TargetDir2, "bin"]),
case os:type() of
{win32, _} ->
NativeRootDir = nativename(TargetDir2),
NativeErtsBinDir = nativename(ErtsBinDir),
IniData0 = ["[erlang]\r\n",
"Bindir=", NativeErtsBinDir, "\r\n",
"Progname=erl\r\n",
"Rootdir=", NativeRootDir, "\r\n"],
IniData = unicode:characters_to_binary(IniData0),
IniFile = filename:join([BinDir, "erl.ini"]),
ok = file:write_file(IniFile, IniData);
_ ->
subst_src_scripts(start_scripts(),
ErtsBinDir,
BinDir,
[{"FINAL_ROOTDIR", TargetDir2},
{"EMU", "beam"}],
[preserve])
end,
RelFile = filename:join([RelDir, RelVsn, RelName ++ ".rel"]),
ok = release_handler:create_RELEASES(TargetDir2, RelFile),
ok;
_ ->
reltool_utils:throw_error("~ts: Illegal data file syntax",[DataFile])
end.
nativename(Dir) ->
escape_backslash(filename:nativename(Dir)).
escape_backslash([$\\|T]) ->
[$\\,$\\|escape_backslash(T)];
escape_backslash([H|T]) ->
[H|escape_backslash(T)];
escape_backslash([]) ->
[].
subst_src_scripts(Scripts, SrcDir, DestDir, Vars, Opts) ->
Fun = fun(Script) ->
subst_src_script(Script, SrcDir, DestDir, Vars, Opts)
end,
lists:foreach(Fun, Scripts).
subst_src_script(Script, SrcDir, DestDir, Vars, Opts) ->
subst_file(filename:join([SrcDir, Script ++ ".src"]),
filename:join([DestDir, Script]),
Vars,
Opts).
subst_file(Src, Dest, Vars, Opts) ->
Bin = reltool_utils:read_file(Src),
Chars = subst(binary_to_list(Bin), Vars),
reltool_utils:write_file(Dest, Chars),
case lists:member(preserve, Opts) of
true ->
FileInfo = reltool_utils:read_file_info(Src),
reltool_utils:write_file_info(Dest, FileInfo);
false ->
ok
end.
subst(Str , Vars )
Vars = [ { Var , } ]
Var = Val = string ( )
Substitute all occurrences of % Var% for in Str , using the list
of variables in Vars .
%%
subst(Str, Vars) ->
subst(Str, Vars, []).
, C | Rest ] , , Result ) when $ A = < C , C = < $ Z - >
subst_var([C| Rest], Vars, Result, []);
, C | Rest ] , , Result ) when $ a = < C , C = < $ z - >
subst_var([C| Rest], Vars, Result, []);
, C | Rest ] , , Result ) when C = = $ _ - >
subst_var([C| Rest], Vars, Result, []);
subst([C| Rest], Vars, Result) ->
subst(Rest, Vars, [C| Result]);
subst([], _Vars, Result) ->
lists:reverse(Result).
| Rest ] , , Result , VarAcc ) - >
Key = lists:reverse(VarAcc),
case lists:keyfind(Key, 1, Vars) of
{Key, Value} ->
subst(Rest, Vars, lists:reverse(Value, Result));
false ->
subst(Rest, Vars, [$% | VarAcc ++ [$% | Result]])
end;
subst_var([C| Rest], Vars, Result, VarAcc) ->
subst_var(Rest, Vars, Result, [C| VarAcc]);
subst_var([], Vars, Result, VarAcc) ->
subst([], Vars, [VarAcc ++ [$% | Result]]).
start_scripts() ->
["erl", "start", "start_erl"].
| null | https://raw.githubusercontent.com/wireless-net/erlang-nommu/79f32f81418e022d8ad8e0e447deaea407289926/lib/reltool/src/reltool_target.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
Public
Hardcoded internals about the kernel application
Mandatory modules are modules that must be loaded before processes
can be started. These are a collection of modules from the kernel
and stdlib applications. Nowadays, error_handler dynamically loads
almost every module. The error_handler self must still be there
though.
Kernel processes are specially treated by the init process. If a
kernel process terminates the whole system terminates.
Generate the contents of a config file
io:format("~p(~p):\n\t~p\n\t~p\n",
Generate the contents of an app file
Generate the contents of a rel file
Generate the contents of a boot file
Generate the contents of a script file
Create the script
Load mandatory modules
Load remaining modules
Start kernel processes
Load applications
Start applications
Apply user specific customizations
Apps = [#app{}]
(Tickets are written for systools, but needs to be implemented here
as well.)
Make sure that used and included applications are given in the same
order as in the release resource file (.rel). Otherwise load and
start instructions in the boot script, and consequently release
Purpose: Sort applications according to dependencies among
applications. If order doesn't matter, use the same
order as in the original list.
No more app that must be started before this one is
found; they are all already taken care of (and present
in Visited list)
The apps in L must be started before the app.
in that case we have a circular dependency.
L must be started before N, try again, with all apps
this has already been checked before, but as we have the info...
It is OK to have a dependency like
Create the load path used in the generated script.
system (e.g. in an embbeded system), i.e. all applications are
located under $ROOT/lib.
Otherwise all paths are set according to dir per application.
Create the complete path.
Create the path to a specific application.
We know at least that we are located
under the variable dir.
Create the path to the kernel and stdlib applications.
Ignore empty items.
Generate rel, script and boot files
All applications that are fetched from somewhere
other than $OTP_ROOT/lib will get $RELTOOL_EXT_LIB
as path prefix in the .script file.
Generate a complete target system
Remove Windows .ini file
Replace erl with dyn_erl
io:format("lookup fail ~ts:\n\t~p\n", [Prefix, Specs]),
Specify applications
List files recursively
Replace ebin
Apply file filter
Regular top directory and/or archive
Nothing to archive
Directory
Plain file
Copy if it exists
Remove non-included modules
Generate new file
Include all included modules
Generate new file
Evaluate specification
%% Source dir is absolute or relative the original source dir
cleanup_spec(Spec, TargetDir);
Old style installation
Var% for in Str , using the list
| VarAcc ++ [$% | Result]])
| Result]]). | Copyright Ericsson AB 2009 - 2013 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(reltool_target).
-export([
gen_config/2,
gen_app/1,
gen_rel/2,
gen_rel_files/2,
gen_boot/1,
gen_script/4,
gen_spec/1,
eval_spec/3,
gen_target/2,
install/2
]).
-include("reltool.hrl").
-include_lib("kernel/include/file.hrl").
mandatory_modules() ->
[error_handler].
kernel_processes(KernelApp) ->
[
{kernelProcess, heart, {heart, start, []}},
{kernelProcess, error_logger , {error_logger, start_link, []}},
{kernelProcess,
application_controller,
{application_controller, start, [KernelApp]}}
].
gen_config(Sys, InclDefs) ->
{ok, do_gen_config(Sys, InclDefs)}.
do_gen_config(#sys{root_dir = RootDir,
lib_dirs = LibDirs,
mod_cond = ModCond,
incl_cond = AppCond,
apps = Apps,
boot_rel = BootRel,
rels = Rels,
emu_name = EmuName,
profile = Profile,
incl_sys_filters = InclSysFiles,
excl_sys_filters = ExclSysFiles,
incl_app_filters = InclAppFiles,
excl_app_filters = ExclAppFiles,
incl_archive_filters = InclArchiveDirs,
excl_archive_filters = ExclArchiveDirs,
archive_opts = ArchiveOpts,
relocatable = Relocatable,
rel_app_type = RelAppType,
embedded_app_type = InclAppType,
app_file = AppFile,
debug_info = DebugInfo},
InclDefs) ->
ErtsItems =
case lists:keyfind(erts, #app.name, Apps) of
false ->
[];
Erts ->
[{erts, do_gen_config(Erts, InclDefs)}]
end,
AppsItems =
[do_gen_config(A, InclDefs)
|| A <- Apps,
A#app.name =/= ?MISSING_APP_NAME,
A#app.name =/= erts,
A#app.is_escript =/= true],
EscriptItems = [{escript,
A#app.active_dir,
emit(incl_cond, A#app.incl_cond, undefined, InclDefs)}
|| A <- Apps, A#app.is_escript],
DefaultRels = reltool_utils:default_rels(),
RelsItems =
[{rel, R#rel.name, R#rel.vsn, do_gen_config(R, InclDefs)} ||
R <- Rels],
DefaultRelsItems =
[{rel, R#rel.name, R#rel.vsn, do_gen_config(R, InclDefs)} ||
R <- DefaultRels],
RelsItems2 =
case InclDefs of
true -> RelsItems;
false -> RelsItems -- DefaultRelsItems
end,
X = fun(List) -> [Re || #regexp{source = Re} <- List] end,
{sys,
emit(root_dir, RootDir, code:root_dir(), InclDefs) ++
emit(lib_dirs, LibDirs, ?DEFAULT_LIBS, InclDefs) ++
EscriptItems ++
emit(mod_cond, ModCond, ?DEFAULT_MOD_COND, InclDefs) ++
emit(incl_cond, AppCond, ?DEFAULT_INCL_COND, InclDefs) ++
ErtsItems ++
lists:flatten(AppsItems) ++
emit(boot_rel, BootRel, ?DEFAULT_REL_NAME, InclDefs) ++
RelsItems2 ++
emit(emu_name, EmuName, ?DEFAULT_EMU_NAME, InclDefs) ++
emit(relocatable, Relocatable, ?DEFAULT_RELOCATABLE, InclDefs) ++
emit(profile, Profile, ?DEFAULT_PROFILE, InclDefs) ++
emit(incl_sys_filters, X(InclSysFiles), reltool_utils:choose_default(incl_sys_filters, Profile, InclDefs), InclDefs) ++
emit(excl_sys_filters, X(ExclSysFiles), reltool_utils:choose_default(excl_sys_filters, Profile, InclDefs), InclDefs) ++
emit(incl_app_filters, X(InclAppFiles), reltool_utils:choose_default(incl_app_filters, Profile, InclDefs), InclDefs) ++
emit(excl_app_filters, X(ExclAppFiles), reltool_utils:choose_default(excl_app_filters, Profile, InclDefs), InclDefs) ++
emit(incl_archive_filters, X(InclArchiveDirs), ?DEFAULT_INCL_ARCHIVE_FILTERS, InclDefs) ++
emit(excl_archive_filters, X(ExclArchiveDirs), ?DEFAULT_EXCL_ARCHIVE_FILTERS, InclDefs) ++
emit(archive_opts, ArchiveOpts, ?DEFAULT_ARCHIVE_OPTS, InclDefs) ++
emit(rel_app_type, RelAppType, ?DEFAULT_REL_APP_TYPE, InclDefs) ++
emit(embedded_app_type, InclAppType, reltool_utils:choose_default(embedded_app_type, Profile, InclDefs), InclDefs) ++
emit(app_file, AppFile, ?DEFAULT_APP_FILE, InclDefs) ++
emit(debug_info, DebugInfo, ?DEFAULT_DEBUG_INFO, InclDefs)};
do_gen_config(#app{name = Name,
mod_cond = ModCond,
incl_cond = AppCond,
debug_info = DebugInfo,
app_file = AppFile,
incl_app_filters = InclAppFiles,
excl_app_filters = ExclAppFiles,
incl_archive_filters = InclArchiveDirs,
excl_archive_filters = ExclArchiveDirs,
archive_opts = ArchiveOpts,
use_selected_vsn = UseSelected,
vsn = Vsn,
active_dir = ActiveDir,
mods = Mods,
is_included = IsIncl},
InclDefs) ->
AppConfig =
[
emit(mod_cond, ModCond, undefined, InclDefs),
emit(incl_cond, AppCond, undefined, InclDefs),
emit(debug_info, DebugInfo, undefined, InclDefs),
emit(app_file, AppFile, undefined, InclDefs),
emit(incl_app_filters, InclAppFiles, undefined, InclDefs),
emit(excl_app_filters, ExclAppFiles, undefined, InclDefs),
emit(incl_archive_filters, InclArchiveDirs, undefined, InclDefs),
emit(excl_archive_filters, ExclArchiveDirs, undefined, InclDefs),
emit(archive_opts, ArchiveOpts, undefined, InclDefs),
if
IsIncl, InclDefs -> [{vsn, Vsn}, {lib_dir, ActiveDir}];
UseSelected =:= vsn -> [{vsn, Vsn}];
UseSelected =:= dir -> [{lib_dir, ActiveDir}];
true -> []
end,
[do_gen_config(M, InclDefs) || M <- Mods]
],
case lists:flatten(AppConfig) of
FlatAppConfig when FlatAppConfig =/= []; IsIncl ->
[{app, Name, FlatAppConfig}];
[] ->
[]
end;
do_gen_config(#mod{name = Name,
incl_cond = AppCond,
debug_info = DebugInfo,
is_included = IsIncl},
InclDefs) ->
ModConfig =
[
emit(incl_cond, AppCond, undefined, InclDefs),
emit(debug_info, DebugInfo, undefined, InclDefs)
],
case lists:flatten(ModConfig) of
FlatModConfig when FlatModConfig =/= []; IsIncl ->
[{mod, Name, FlatModConfig}];
_ ->
[]
end;
do_gen_config(#rel{name = _Name,
vsn = _Vsn,
rel_apps = RelApps},
InclDefs) ->
[do_gen_config(RA, InclDefs) || RA <- RelApps];
do_gen_config(#rel_app{name = Name,
app_type = Type,
incl_apps = InclApps},
_InclDefs) ->
case {Type, InclApps} of
{undefined, undefined} -> Name;
{undefined, _} -> {Name, InclApps};
{_, undefined} -> {Name, Type};
{_, _} -> {Name, Type, InclApps}
end;
do_gen_config({Tag, Val}, InclDefs) ->
emit(Tag, Val, undefined, InclDefs);
do_gen_config([], _InclDefs) ->
[];
do_gen_config([H | T], InclDefs) ->
lists:flatten([do_gen_config(H, InclDefs), do_gen_config(T, InclDefs)]).
emit(Tag, Val, Default, InclDefs) ->
[ Tag , Val = /= Default , , Default ] ) ,
if
Val == undefined -> [];
InclDefs -> [{Tag, Val}];
Val =/= Default -> [{Tag, Val}];
true -> []
end.
gen_app(#app{name = Name,
info = #app_info{description = Desc,
id = Id,
vsn = Vsn,
modules = Mods,
maxP = MaxP,
maxT = MaxT,
registered = Regs,
incl_apps = InclApps,
applications = ReqApps,
env = Env,
mod = StartMod,
start_phases = StartPhases}}) ->
StartPhases2 =
case StartPhases of
undefined -> [];
_ -> [{start_phases, StartPhases}]
end,
Tail =
case StartMod of
undefined -> StartPhases2;
_ -> [{mod, StartMod} | StartPhases2]
end,
{application, Name,
[{description, Desc},
{vsn, Vsn},
{id, Id},
{modules, Mods},
{registered, Regs},
{applications, ReqApps},
{included_applications, InclApps},
{env, Env},
{maxT, MaxT},
{maxP, MaxP} |
Tail]}.
gen_rel(Rel, Sys) ->
try
MergedApps = merge_apps(Rel, Sys),
{ok, do_gen_rel(Rel, Sys, MergedApps)}
catch
throw:{error, Text} ->
{error, Text}
end.
do_gen_rel(#rel{name = RelName, vsn = RelVsn, rel_apps = RelApps},
#sys{apps = Apps},
MergedApps) ->
ErtsName = erts,
case lists:keysearch(ErtsName, #app.name, Apps) of
{value, Erts} ->
{release,
{RelName, RelVsn},
{ErtsName, Erts#app.vsn},
[strip_rel_info(App, RelApps) || App <- MergedApps]};
false ->
reltool_utils:throw_error("Mandatory application ~w is "
"not included",
[ErtsName])
end.
strip_rel_info(#app{name = Name,
vsn = Vsn,
app_type = Type,
info = #app_info{incl_apps = AppInclApps}},
RelApps) when Type =/= undefined ->
RelInclApps = case lists:keyfind(Name,#rel_app.name,RelApps) of
#rel_app{incl_apps = RIA} when RIA =/= undefined -> RIA;
_ -> undefined
end,
case {Type, RelInclApps} of
{permanent, undefined} -> {Name, Vsn};
{permanent, _} -> {Name, Vsn, AppInclApps};
{_, undefined} -> {Name, Vsn, Type};
{_, _} -> {Name, Vsn, Type, AppInclApps}
end.
merge_apps(#rel{name = RelName,
rel_apps = RelApps},
#sys{apps = Apps,
rel_app_type = RelAppType,
embedded_app_type = EmbAppType}) ->
Mandatory = [kernel, stdlib],
MergedApps = do_merge_apps(RelName, Mandatory, Apps, permanent, []),
MergedApps2 = do_merge_apps(RelName, RelApps, Apps, RelAppType, MergedApps),
Embedded =
[A#app.name || A <- Apps,
EmbAppType =/= undefined,
A#app.is_included,
A#app.name =/= erts,
A#app.name =/= ?MISSING_APP_NAME,
not lists:keymember(A#app.name, #app.name, MergedApps2)],
MergedApps3 = do_merge_apps(RelName, Embedded, Apps, EmbAppType, MergedApps2),
RevMerged = lists:reverse(MergedApps3),
MergedSortedUsedAndIncs = sort_used_and_incl_apps(RevMerged,RevMerged),
sort_apps(MergedSortedUsedAndIncs).
do_merge_apps(RelName, [#rel_app{name = Name} = RA | RelApps], Apps, RelAppType, Acc) ->
case is_already_merged(Name, RelApps, Acc) of
true ->
do_merge_apps(RelName, RelApps, Apps, RelAppType, Acc);
false ->
{value, App} = lists:keysearch(Name, #app.name, Apps),
MergedApp = merge_app(RelName, RA, RelAppType, App),
ReqNames = (MergedApp#app.info)#app_info.applications,
IncNames = (MergedApp#app.info)#app_info.incl_apps,
Acc2 = [MergedApp | Acc],
do_merge_apps(RelName, ReqNames ++ IncNames ++ RelApps,
Apps, RelAppType, Acc2)
end;
do_merge_apps(RelName, [Name | RelApps], Apps, RelAppType, Acc) ->
case is_already_merged(Name, RelApps, Acc) of
true ->
do_merge_apps(RelName, RelApps, Apps, RelAppType, Acc);
false ->
RelApp = #rel_app{name = Name},
do_merge_apps(RelName, [RelApp | RelApps], Apps, RelAppType, Acc)
end;
do_merge_apps(_RelName, [], _Apps, _RelAppType, Acc) ->
Acc.
merge_app(RelName,
#rel_app{name = Name,
app_type = Type,
incl_apps = InclApps0},
RelAppType,
App) ->
Type2 =
case {Type, App#app.app_type} of
{undefined, undefined} -> RelAppType;
{undefined, AppAppType} -> AppAppType;
{_, _} -> Type
end,
Info = App#app.info,
InclApps =
case InclApps0 of
undefined -> Info#app_info.incl_apps;
_ -> InclApps0
end,
case InclApps -- Info#app_info.incl_apps of
[] ->
App#app{app_type = Type2, info = Info#app_info{incl_apps = InclApps}};
BadIncl ->
reltool_utils:throw_error("~w: These applications are "
"used by release ~ts but are "
"missing as included_applications "
"in the app file: ~p",
[Name, RelName, BadIncl])
end.
is_already_merged(Name, [Name | _], _MergedApps) ->
true;
is_already_merged(Name, [#rel_app{name = Name} | _], _MergedApps) ->
true;
is_already_merged(Name, [_ | RelApps], MergedApps) ->
is_already_merged(Name, RelApps, MergedApps);
is_already_merged(Name, [], [#app{name = Name} | _MergedApps]) ->
true;
is_already_merged(Name, [] = RelApps, [_ | MergedApps]) ->
is_already_merged(Name, RelApps, MergedApps);
is_already_merged(_Name, [], []) ->
false.
gen_boot({script, {_, _}, _} = Script) ->
{ok, term_to_binary(Script)}.
gen_script(Rel, Sys, PathFlag, Variables) ->
try
MergedApps = merge_apps(Rel, Sys),
do_gen_script(Rel, Sys, MergedApps, PathFlag, Variables)
catch
throw:{error, Text} ->
{error, Text}
end.
do_gen_script(#rel{name = RelName, vsn = RelVsn},
#sys{apps = Apps},
MergedApps,
PathFlag,
Variables) ->
{value, Erts} = lists:keysearch(erts, #app.name, Apps),
Preloaded = [Mod#mod.name || Mod <- Erts#app.mods],
Mandatory = mandatory_modules(),
Early = Mandatory ++ Preloaded,
{value, KernelApp} = lists:keysearch(kernel, #app.name, MergedApps),
InclApps = lists:flatmap(fun(#app{info = #app_info{incl_apps = I}}) ->
I
end,
MergedApps),
DeepList =
[
Register preloaded modules
{preLoaded, lists:sort(Preloaded)},
{progress, preloaded},
{path, create_mandatory_path(MergedApps, PathFlag, Variables)},
{primLoad, lists:sort(Mandatory)},
{kernel_load_completed},
{progress, kernel_load_completed},
[load_app_mods(A, Early, PathFlag, Variables) || A <- MergedApps],
{progress, modules_loaded},
{path, create_path(MergedApps, PathFlag, Variables)},
kernel_processes(gen_app(KernelApp)),
{progress, init_kernel_started},
[{apply, {application, load, [gen_app(A)]}} ||
A = #app{name = Name, app_type = Type} <- MergedApps,
Name =/= kernel,
Type =/= none],
{progress, applications_loaded},
[{apply, {application, start_boot, [Name, Type]}} ||
#app{name = Name, app_type = Type} <- MergedApps,
Type =/= none,
Type =/= load,
not lists:member(Name, InclApps)],
{apply, {c, erlangrc, []}},
{progress, started}
],
{ok, {script, {RelName, RelVsn}, lists:flatten(DeepList)}}.
load_app_mods(#app{mods = Mods0} = App, Mand, PathFlag, Variables) ->
Path = cr_path(App, PathFlag, Variables),
Mods = [M || #mod{name = M, is_included=true} <- Mods0,
not lists:member(M, Mand)],
[{path, [filename:join([Path])]},
{primLoad, lists:sort(Mods)}].
sort_used_and_incl_apps(Apps , OrderedApps ) - > Apps
OrderedApps = [ # app { } ]
OTP-4121 , OTP-9984
upgrade instructions in relup , may end up in the wrong order .
sort_used_and_incl_apps([#app{info=Info} = App|Apps], OrderedApps) ->
Incls2 =
case Info#app_info.incl_apps of
Incls when length(Incls)>1 ->
sort_appl_list(Incls, OrderedApps);
Incls ->
Incls
end,
Uses2 =
case Info#app_info.applications of
Uses when length(Uses)>1 ->
sort_appl_list(Uses, OrderedApps);
Uses ->
Uses
end,
App2 = App#app{info=Info#app_info{incl_apps=Incls2, applications=Uses2}},
[App2|sort_used_and_incl_apps(Apps, OrderedApps)];
sort_used_and_incl_apps([], _OrderedApps) ->
[].
sort_appl_list(List, Order) ->
IndexedList = find_pos(List, Order),
SortedIndexedList = lists:keysort(1, IndexedList),
lists:map(fun({_Index,Name}) -> Name end, SortedIndexedList).
find_pos([Name|Incs], OrderedApps) ->
[find_pos(1, Name, OrderedApps)|find_pos(Incs, OrderedApps)];
find_pos([], _OrderedApps) ->
[].
find_pos(N, Name, [#app{name=Name}|_OrderedApps]) ->
{N, Name};
find_pos(N, Name, [_OtherAppl|OrderedApps]) ->
find_pos(N+1, Name, OrderedApps).
Function : ) - > { ok , Apps ' } | throw({error , Error } )
Types : Apps = { { Name , , # application } ]
Alg . written by ( )
Mod . by mbj
sort_apps(Apps) ->
sort_apps(Apps, [], [], []).
sort_apps([#app{name = Name, info = Info} = App | Apps],
Missing,
Circular,
Visited) ->
{Uses, Apps1, NotFnd1} =
find_all(Name,
lists:reverse(Info#app_info.applications),
Apps,
Visited,
[],
[]),
{Incs, Apps2, NotFnd2} =
find_all(Name,
lists:reverse(Info#app_info.incl_apps),
Apps1,
Visited,
[],
[]),
Missing1 = NotFnd1 ++ NotFnd2 ++ Missing,
case Uses ++ Incs of
[] ->
[App | sort_apps(Apps, Missing1, Circular, [Name | Visited])];
L ->
Check if we have already taken care of some app in L ,
NewCircular = [N || #app{name = N} <- L, N2 <- Visited, N =:= N2],
Circular1 = case NewCircular of
[] -> Circular;
_ -> [Name | NewCircular] ++ Circular
end,
in L added before N.
Apps3 = del_apps(NewCircular, L ++ [App | Apps2]),
sort_apps(Apps3, Missing1, Circular1, [Name | Visited])
end;
sort_apps([], [], [], _) ->
[];
sort_apps([], Missing, [], _) ->
reltool_utils:throw_error("Undefined applications: ~p",
[make_set(Missing)]);
sort_apps([], [], Circular, _) ->
reltool_utils:throw_error("Circular dependencies: ~p",
[make_set(Circular)]);
sort_apps([], Missing, Circular, _) ->
reltool_utils:throw_error("Circular dependencies: ~p"
"Undefined applications: ~p\n",
[make_set(Circular), make_set(Missing)]).
find_all(CheckingApp, [Name | Names], Apps, Visited, Found, NotFound) ->
case lists:keyfind(Name, #app.name, Apps) of
#app{info = Info} = App ->
X includes Y , Y uses X.
case lists:member(CheckingApp, Info#app_info.incl_apps) of
true ->
case lists:member(Name, Visited) of
true ->
find_all(CheckingApp,
Names,
Apps,
Visited,
Found,
NotFound);
false ->
find_all(CheckingApp,
Names,
Apps,
Visited,
Found,
[Name | NotFound])
end;
false ->
find_all(CheckingApp,
Names,
Apps -- [App],
Visited,
[App|Found],
NotFound)
end;
false ->
case lists:member(Name, Visited) of
true ->
find_all(CheckingApp,
Names,
Apps,
Visited,
Found,
NotFound);
false ->
find_all(CheckingApp,
Names,
Apps,
Visited,
Found,
[Name|NotFound])
end
end;
find_all(_CheckingApp, [], Apps, _Visited, Found, NotFound) ->
{Found, Apps, NotFound}.
del_apps([Name | Names], Apps) ->
del_apps(Names, lists:keydelete(Name, #app.name, Apps));
del_apps([], Apps) ->
Apps.
If PathFlag is true a script intended to be used as a complete
create_path(Apps, PathFlag, Variables) ->
make_set([cr_path(App, PathFlag, Variables) || App <- Apps]).
( The otp_build flag is only used for OTP internal system make )
cr_path(#app{label = Label}, true, []) ->
filename:join(["$ROOT", "lib", Label, "ebin"]);
cr_path(#app{name = Name, vsn = Vsn, label = Label, active_dir = Dir},
true,
Variables) ->
Tail = [Label, "ebin"],
case variable_dir(Dir, atom_to_list(Name), Vsn, Variables) of
{ok, VarDir} ->
filename:join([VarDir] ++ Tail);
_ ->
filename:join(["$ROOT", "lib"] ++ Tail)
end;
cr_path(#app{name = Name}, otp_build, _) ->
filename:join(["$ROOT", "lib", atom_to_list(Name), "ebin"]);
cr_path(#app{active_dir = Dir}, _, _) ->
filename:join([Dir, "ebin"]).
variable_dir(Dir, Name, Vsn, [{Var,Path} | Variables]) ->
case lists:prefix(Path, Dir) of
true ->
D0 = strip_prefix(Path, Dir),
case strip_name_ebin(D0, Name, Vsn) of
{ok, D} ->
{ok, filename:join(["\$" ++ Var] ++ D)};
_ ->
{ok, filename:join(["\$" ++ Var] ++ D0)}
end;
false ->
variable_dir(Dir, Name, Vsn, Variables)
end;
variable_dir(_Dir, _, _, []) ->
false.
strip_prefix(Path, Dir) ->
L = length(filename:split(Path)),
lists:nthtail(L, filename:split(Dir)).
strip_name_ebin(Dir, Name, Vsn) ->
FullName = Name ++ "-" ++ Vsn,
case lists:reverse(Dir) of
["ebin", Name | D] -> {ok, lists:reverse(D)};
["ebin", FullName | D] -> {ok, lists:reverse(D)};
[Name | D] -> {ok, lists:reverse(D)};
[FullName | D] -> {ok, lists:reverse(D)};
_ -> false
end.
create_mandatory_path(Apps, PathFlag, Variables) ->
Mandatory = [kernel, stdlib],
make_set(lists:map(fun(#app{name = Name} = App) ->
case lists:member(Name, Mandatory) of
true ->
cr_path(App, PathFlag, Variables);
false ->
""
end
end,
Apps)).
make_set([]) ->
[];
make_set(T);
make_set([H|T]) ->
[H | [ Y || Y<- make_set(T),
Y =/= H]].
gen_rel_files(Sys, TargetDir) ->
try
Spec = spec_rel_files(Sys),
eval_spec(Spec, Sys#sys.root_dir, TargetDir)
catch
throw:{error, Text} ->
{error, Text}
end.
spec_rel_files(#sys{rels = Rels} = Sys) ->
lists:append([do_spec_rel_files(R, Sys) || R <- Rels]).
do_spec_rel_files(#rel{name = RelName} = Rel, Sys) ->
RelFile = RelName ++ ".rel",
ScriptFile = RelName ++ ".script",
BootFile = RelName ++ ".boot",
MergedApps = merge_apps(Rel, Sys),
GenRel = do_gen_rel(Rel, Sys, MergedApps),
Variables =
case Sys#sys.excl_lib of
otp_root ->
[{"RELTOOL_EXT_LIB",LibDir} || LibDir <- Sys#sys.lib_dirs] ++
[{"RELTOOL_EXT_LIB",filename:dirname(AppLibDir)} ||
#app{active_dir=AppLibDir,use_selected_vsn=dir}
<- MergedApps];
_ ->
[]
end,
PathFlag = true,
{ok, Script} = do_gen_script(Rel, Sys, MergedApps, PathFlag, Variables),
{ok, BootBin} = gen_boot(Script),
Date = date(),
Time = time(),
RelIoList = io_lib:format("%% rel generated at ~w ~w\n~p.\n\n",
[Date, Time, GenRel]),
ScriptIoList = io_lib:format("%% script generated at ~w ~w\n~p.\n\n",
[Date, Time, Script]),
[
{write_file, RelFile, RelIoList},
{write_file, ScriptFile, ScriptIoList},
{write_file, BootFile, BootBin}
].
gen_target(Sys, TargetDir) ->
try
Spec = do_gen_spec(Sys),
eval_spec(Spec, Sys#sys.root_dir, TargetDir)
catch
throw:{error, Text} ->
{error, Text}
end.
gen_spec(Sys) ->
try
{ok, do_gen_spec(Sys)}
catch
throw:{error, Text} ->
{error, Text}
end.
do_gen_spec(#sys{root_dir = RootDir,
excl_lib = ExclLib,
incl_sys_filters = InclRegexps,
excl_sys_filters = ExclRegexps,
relocatable = Relocatable,
apps = Apps} = Sys) ->
RelFiles = spec_rel_files(Sys),
{SysFiles, InclRegexps2, ExclRegexps2, Mandatory} =
case ExclLib of
otp_root ->
{[],InclRegexps,ExclRegexps,["lib"]};
_ ->
{create_dir, _, SF} = spec_dir(RootDir),
{ER2, SF2} = strip_sys_files(Relocatable, SF, Apps, ExclRegexps),
{IR2, BinFiles} =
spec_bin_files(Sys, SF, SF2, RelFiles, InclRegexps),
SF3 = [{create_dir, "bin", BinFiles}] ++ SF2,
{SF3,IR2,ER2,["bin","erts","lib"]}
end,
LibFiles = spec_lib_files(Sys),
{BootVsn, StartFile} = spec_start_file(Sys),
SysFiles2 =
[{create_dir, "releases",
[StartFile,
{create_dir,BootVsn, RelFiles}]}] ++ SysFiles,
SysFiles3 = filter_spec(SysFiles2, InclRegexps2, ExclRegexps2),
SysFiles4 = SysFiles3 ++ [{create_dir, "lib", LibFiles}],
check_sys(Mandatory, SysFiles4),
SysFiles4.
strip_sys_files(Relocatable, SysFiles, Apps, ExclRegexps) ->
ExclRegexps2 =
case Relocatable of
true ->
ExtraExcl = ["^erts.*/bin/.*src\$"],
reltool_utils:decode_regexps(excl_sys_filters,
{add, ExtraExcl},
ExclRegexps);
false ->
ExclRegexps
end,
{value, Erts} = lists:keysearch(erts, #app.name, Apps),
FilterErts =
fun(Spec) ->
File = element(2, Spec),
case File of
"erts" ->
reltool_utils:throw_error("This system is not installed. "
"The directory ~ts is missing.",
[Erts#app.label]);
_ when File =:= Erts#app.label ->
replace_dyn_erl(Relocatable, Spec);
"erts-" ++ _ ->
false;
_ ->
true
end
end,
SysFiles2 = lists:zf(FilterErts, SysFiles),
SysFiles3 = lists:foldl(fun(F, Acc) -> lists:keydelete(F, 2, Acc) end,
SysFiles2,
["releases", "lib", "bin"]),
{ExclRegexps2, SysFiles3}.
replace_dyn_erl(false, _ErtsSpec) ->
true;
replace_dyn_erl(true, {create_dir, ErtsDir, ErtsFiles}) ->
[{create_dir, _, BinFiles}] =
safe_lookup_spec("bin", ErtsFiles),
case lookup_spec("dyn_erl", BinFiles) of
[] ->
case lookup_spec("erl.ini", BinFiles) of
[] ->
true;
[{copy_file, ErlIni}] ->
BinFiles2 = lists:keydelete(ErlIni, 2, BinFiles),
ErtsFiles2 =
lists:keyreplace("bin",
2,
ErtsFiles,
{create_dir, "bin", BinFiles2}),
{true, {create_dir, ErtsDir, ErtsFiles2}}
end;
[{copy_file, DynErlExe}] ->
ErlExe = "erl" ++ filename:extension(DynErlExe),
BinFiles2 = lists:keydelete(DynErlExe, 2, BinFiles),
DynErlExe2 = filename:join([ErtsDir, "bin", DynErlExe]),
BinFiles3 = lists:keyreplace(ErlExe,
2,
BinFiles2,
{copy_file, ErlExe, DynErlExe2}),
ErtsFiles2 = lists:keyreplace("bin",
2,
ErtsFiles,
{create_dir, "bin", BinFiles3}),
{true, {create_dir, ErtsDir, ErtsFiles2}}
end.
spec_bin_files(Sys, AllSysFiles, StrippedSysFiles, RelFiles, InclRegexps) ->
[{create_dir, ErtsLabel, ErtsFiles}] =
safe_lookup_spec("erts", StrippedSysFiles),
[{create_dir, _, BinFiles}] = safe_lookup_spec("bin", ErtsFiles),
ErtsBin = filename:join([ErtsLabel, "bin"]),
Escripts = spec_escripts(Sys, ErtsBin, BinFiles),
Map = fun({copy_file, File}) ->
{copy_file, File, filename:join([ErtsBin, File])};
({copy_file, NewFile, OldFile}) ->
{_, OldFile2} =
abs_to_rel_path(ErtsBin,
filename:join([ErtsBin, OldFile])),
{copy_file, NewFile, OldFile2}
end,
Do only copy those bin files from erts / that also exists in bin
[{create_dir, _, OldBinFiles}] = safe_lookup_spec("bin", AllSysFiles),
GoodNames = [F || {copy_file, F} <- OldBinFiles,
not lists:suffix(".boot", F),
not lists:suffix(".script", F)],
BinFiles2 = [Map(S) || S <- BinFiles,
lists:member(element(2, S), GoodNames)],
BootFiles = [F || F <- RelFiles, lists:suffix(".boot", element(2, F))],
[{write_file, _, BootRel}] =
safe_lookup_spec(Sys#sys.boot_rel ++ ".boot", BootFiles),
BootFiles2 = lists:keystore("start.boot",
2,
BootFiles,
{write_file, "start.boot", BootRel}),
MakeRegexp =
fun(File) -> "^bin/" ++ element(2, File) ++ "(|.escript)\$" end,
ExtraIncl = lists:map(MakeRegexp, Escripts),
InclRegexps2 = reltool_utils:decode_regexps(incl_sys_filters,
{add, ExtraIncl},
InclRegexps),
{InclRegexps2, Escripts ++ BinFiles2 ++ BootFiles2}.
spec_escripts(#sys{apps = Apps}, ErtsBin, BinFiles) ->
Filter = fun(#app{is_escript = IsEscript,
is_included = IsIncl,
is_pre_included = IsPre,
name = Name,
active_dir = File}) ->
if
Name =:= ?MISSING_APP_NAME ->
false;
IsEscript =/= true ->
false;
IsIncl; IsPre ->
{true, do_spec_escript(File, ErtsBin, BinFiles)};
true ->
false
end
end,
lists:flatten(lists:zf(Filter, Apps)).
do_spec_escript(File, ErtsBin, BinFiles) ->
[{copy_file, EscriptExe}] = safe_lookup_spec("escript", BinFiles),
EscriptExt = ".escript",
Base = filename:basename(File, EscriptExt),
ExeExt = filename:extension(EscriptExe),
[{copy_file, Base ++ EscriptExt, File},
{copy_file, Base ++ ExeExt, filename:join([ErtsBin, EscriptExe])}].
check_sys(Mandatory, SysFiles) ->
lists:foreach(fun(M) -> do_check_sys(M, SysFiles) end, Mandatory).
do_check_sys(Prefix, Specs) ->
case lookup_spec(Prefix, Specs) of
[] ->
reltool_utils:throw_error("Mandatory system directory ~ts "
"is not included",
[Prefix]);
_ ->
ok
end.
spec_start_file(#sys{boot_rel = BootRelName,
rels = Rels,
apps = Apps}) ->
{value, Erts} = lists:keysearch(erts, #app.name, Apps),
{value, BootRel} = lists:keysearch(BootRelName, #rel.name, Rels),
Data = Erts#app.vsn ++ " " ++ BootRel#rel.vsn ++ "\n",
{BootRel#rel.vsn, {write_file, "start_erl.data", Data}}.
lookup_spec(Prefix, Specs) ->
lists:filter(fun(S) -> lists:prefix(Prefix, element(2, S)) end, Specs).
safe_lookup_spec(Prefix, Specs) ->
case lookup_spec(Prefix, Specs) of
[] ->
reltool_utils:throw_error("Mandatory system file ~ts is "
"not included", [Prefix]);
Match ->
Match
end.
spec_lib_files(#sys{root_dir = RootDir,
apps = Apps,
excl_lib = ExclLib} = Sys) ->
Filter = fun(#app{is_escript = IsEscript, is_included = IsIncl,
is_pre_included = IsPre, name = Name,
active_dir = ActiveDir}) ->
if
Name =:= ?MISSING_APP_NAME ->
false;
IsEscript =/= false ->
false;
IsIncl; IsPre ->
case ExclLib of
otp_root ->
not lists:prefix(RootDir,ActiveDir);
_ ->
true
end;
true ->
false
end
end,
SelectedApps = lists:filter(Filter, Apps),
case ExclLib of
otp_root ->
ok;
_ ->
check_apps([kernel, stdlib], SelectedApps)
end,
lists:flatten([spec_app(App, Sys) || App <- SelectedApps]).
check_apps([Mandatory | Names], Apps) ->
case lists:keymember(Mandatory, #app.name, Apps) of
false ->
reltool_utils:throw_error("Mandatory application ~w is "
"not included in ~p",
[Mandatory, Apps]);
true ->
check_apps(Names, Apps)
end;
check_apps([], _) ->
ok.
spec_app(#app{name = Name,
mods = Mods,
active_dir = SourceDir,
incl_app_filters = AppInclRegexps,
excl_app_filters = AppExclRegexps} = App,
#sys{incl_app_filters = SysInclRegexps,
excl_app_filters = SysExclRegexps,
debug_info = SysDebugInfo} = Sys) ->
{create_dir, _, AppFiles} = spec_dir(SourceDir),
AppUpFilename = atom_to_list(Name) ++ ".appup",
EbinDir = filename:join([SourceDir, "ebin"]),
OptAppUpFileSpec = spec_opt_copy_file(EbinDir, AppUpFilename),
OptAppFileSpec = spec_app_file(App, Sys, EbinDir),
ModSpecs = [spec_mod(M, SysDebugInfo) || M <- Mods,
M#mod.is_included,
M#mod.exists],
NewEbin = {create_dir,
"ebin",
OptAppUpFileSpec ++ OptAppFileSpec ++ ModSpecs},
AppFiles2 = lists:keystore("ebin", 2, AppFiles, NewEbin),
InclRegexps = reltool_utils:default_val(AppInclRegexps, SysInclRegexps),
ExclRegexps = reltool_utils:default_val(AppExclRegexps, SysExclRegexps),
AppFiles3 = filter_spec(AppFiles2, InclRegexps, ExclRegexps),
spec_archive(App, Sys, AppFiles3).
spec_archive(#app{label = Label,
active_dir = SourceDir,
incl_archive_filters = AppInclArchiveDirs,
excl_archive_filters = AppExclArchiveDirs,
archive_opts = AppArchiveOpts},
#sys{root_dir = RootDir,
incl_archive_filters = SysInclArchiveDirs,
excl_archive_filters = SysExclArchiveDirs,
archive_opts = SysArchiveOpts},
Files) ->
InclArchiveDirs =
reltool_utils:default_val(AppInclArchiveDirs, SysInclArchiveDirs),
ExclArchiveDirs =
reltool_utils:default_val(AppExclArchiveDirs, SysExclArchiveDirs),
ArchiveOpts =
reltool_utils:default_val(AppArchiveOpts, SysArchiveOpts),
Match = fun(F) -> match(element(2, F), InclArchiveDirs, ExclArchiveDirs) end,
case lists:filter(Match, Files) of
[] ->
[spec_create_dir(RootDir, SourceDir, Label, Files)];
ArchiveFiles ->
OptDir =
case Files -- ArchiveFiles of
[] ->
[];
ExternalFiles ->
[spec_create_dir(RootDir,
SourceDir,
Label,
ExternalFiles)]
end,
ArchiveOpts =
reltool_utils:default_val(AppArchiveOpts, SysArchiveOpts),
ArchiveDir =
spec_create_dir(RootDir, SourceDir, Label, ArchiveFiles),
[{archive, Label ++ ".ez", ArchiveOpts, [ArchiveDir]} | OptDir]
end.
spec_dir(Dir) ->
Base = filename:basename(Dir),
case erl_prim_loader:read_file_info(Dir) of
{ok, #file_info{type = directory}} ->
case erl_prim_loader:list_dir(Dir) of
{ok, Files} ->
{create_dir,
Base,
[spec_dir(filename:join([Dir, F])) || F <- Files]};
error ->
reltool_utils:throw_error("list dir ~ts failed", [Dir])
end;
{ok, #file_info{type = regular}} ->
{copy_file, Base};
_ ->
reltool_utils:throw_error("read file info ~ts failed", [Dir])
end.
spec_mod(Mod, DebugInfo) ->
File = atom_to_list(Mod#mod.name) ++ code:objfile_extension(),
case reltool_utils:default_val(Mod#mod.debug_info, DebugInfo) of
keep ->
{copy_file, File};
strip ->
{strip_beam, File}
end.
spec_app_file(#app{name = Name,
info = Info,
mods = Mods,
app_file = AppFile} = App,
#sys{app_file = SysAppFile},
EbinDir) ->
AppFilename = atom_to_list(Name) ++ ".app",
case reltool_utils:default_val(AppFile, SysAppFile) of
keep ->
spec_opt_copy_file(EbinDir, AppFilename);
strip ->
ModNames = [M#mod.name || M <- Mods,
M#mod.is_included,
lists:member(M#mod.name,
Info#app_info.modules)],
App2 = App#app{info = Info#app_info{modules = ModNames}},
Contents = gen_app(App2),
AppIoList = io_lib:format("%% app generated at ~w ~w\n~p.\n\n",
[date(), time(), Contents]),
[{write_file, AppFilename, AppIoList}];
all ->
ModNames = [M#mod.name || M <- Mods, M#mod.is_included],
App2 = App#app{info = Info#app_info{modules = ModNames}},
Contents = gen_app(App2),
AppIoList = io_lib:format("%% app generated at ~w ~w\n~p.\n\n",
[date(), time(), Contents]),
[{write_file, AppFilename, AppIoList}]
end.
spec_opt_copy_file(DirName, BaseName) ->
case filelib:is_regular(filename:join([DirName, BaseName]),
erl_prim_loader) of
true -> [{copy_file, BaseName}];
false -> []
end.
spec_create_dir(RootDir, SourceDir, BaseDir, Files) ->
LibDir = filename:join([RootDir, "lib"]),
case abs_to_rel_path(LibDir, SourceDir) of
{relative, Dir} -> {create_dir, Dir, Files};
{absolute, Dir} -> {create_dir, BaseDir, Dir, Files}
end.
abs_to_rel_path(RootDir, SourcePath) ->
R = filename:split(RootDir),
S = filename:split(SourcePath),
abs_to_rel_path(R, S, SourcePath).
abs_to_rel_path([H | R], [H | S], SourcePath) ->
abs_to_rel_path(R, S, SourcePath);
abs_to_rel_path([], S, _SourcePath) ->
{relative, filename:join(S)};
abs_to_rel_path(_, _, SourcePath) ->
{absolute, SourcePath}.
eval_spec(Spec, SourceDir, TargetDir) ->
SourceDir2 = filename:absname(SourceDir),
TargetDir2 = filename:absname(TargetDir),
try
case filelib:is_dir(TargetDir2) of
true ->
do_eval_spec(Spec, SourceDir2, SourceDir2, TargetDir2),
ok;
false ->
{error, TargetDir2 ++ ": " ++ file:format_error(enoent)}
end
catch
throw:{error, Text} ->
cleanup_spec(Spec, TargetDir2),
{error, Text}
end.
do_eval_spec(List, OrigSourceDir, SourceDir, TargetDir) when is_list(List) ->
lists:foreach(fun(F) ->
do_eval_spec(F, OrigSourceDir, SourceDir, TargetDir)
end,
List);
do_eval_spec({source_dir , SourceDir2 , Spec } , , _ SourceDir , TargetDir ) - >
SourceDir3 = filename : , SourceDir2 ] ) ,
do_eval_spec(Spec , OrigSourceDir , SourceDir3 , TargetDir ) ;
do_eval_spec({create_dir, Dir, Files}, OrigSourceDir, SourceDir, TargetDir) ->
SourceDir2 = filename:join([SourceDir, Dir]),
TargetDir2 = filename:join([TargetDir, Dir]),
reltool_utils:create_dir(TargetDir2),
do_eval_spec(Files, OrigSourceDir, SourceDir2, TargetDir2);
do_eval_spec({create_dir, Dir, OldDir, Files},
OrigSourceDir,
_SourceDir,
TargetDir) ->
SourceDir2 = filename:join([OrigSourceDir, OldDir]),
TargetDir2 = filename:join([TargetDir, Dir]),
reltool_utils:create_dir(TargetDir2),
do_eval_spec(Files, SourceDir2, SourceDir2, TargetDir2);
do_eval_spec({archive, Archive, Options, Files},
OrigSourceDir,
SourceDir,
TargetDir) ->
TmpSpec = {create_dir, "tmp", Files},
TmpDir = filename:join([TargetDir, "tmp"]),
reltool_utils:create_dir(TmpDir),
do_eval_spec(Files, OrigSourceDir, SourceDir, TmpDir),
ArchiveFile = filename:join([TargetDir, Archive]),
Files2 = [element(2, F) || F <- Files],
Res = zip:create(ArchiveFile, Files2, [{cwd, TmpDir} | Options]),
cleanup_spec(TmpSpec, TargetDir),
case Res of
{ok, _} ->
ok;
{error, Reason} ->
reltool_utils:throw_error("create archive ~ts failed: ~p",
[ArchiveFile, Reason])
end;
do_eval_spec({copy_file, File}, _OrigSourceDir, SourceDir, TargetDir) ->
SourceFile = filename:join([SourceDir, File]),
TargetFile = filename:join([TargetDir, File]),
reltool_utils:copy_file(SourceFile, TargetFile);
do_eval_spec({copy_file, File, OldFile},
OrigSourceDir,
_SourceDir,
TargetDir) ->
SourceFile = filename:join([OrigSourceDir, OldFile]),
TargetFile = filename:join([TargetDir, File]),
reltool_utils:copy_file(SourceFile, TargetFile);
do_eval_spec({write_file, File, IoList},
_OrigSourceDir,
_SourceDir,
TargetDir) ->
TargetFile = filename:join([TargetDir, File]),
reltool_utils:write_file(TargetFile, IoList);
do_eval_spec({strip_beam, File}, _OrigSourceDir, SourceDir, TargetDir) ->
SourceFile = filename:join([SourceDir, File]),
TargetFile = filename:join([TargetDir, File]),
BeamBin = reltool_utils:read_file(SourceFile),
{ok, {_, BeamBin2}} = beam_lib:strip(BeamBin),
reltool_utils:write_file(TargetFile, BeamBin2).
cleanup_spec(List, TargetDir) when is_list(List) ->
lists:foreach(fun(F) -> cleanup_spec(F, TargetDir) end, List);
cleanup_spec({source_dir , _ SourceDir , Spec } , TargetDir ) - >
cleanup_spec({create_dir, Dir, Files}, TargetDir) ->
TargetDir2 = filename:join([TargetDir, Dir]),
cleanup_spec(Files, TargetDir2),
file:del_dir(TargetDir2);
cleanup_spec({create_dir, Dir, _OldDir, Files}, TargetDir) ->
TargetDir2 = filename:join([TargetDir, Dir]),
cleanup_spec(Files, TargetDir2),
file:del_dir(TargetDir2);
cleanup_spec({archive, Archive, _Options, Files}, TargetDir) ->
TargetFile = filename:join([TargetDir, Archive]),
file:delete(TargetFile),
TmpDir = filename:join([TargetDir, "tmp"]),
cleanup_spec(Files, TmpDir),
file:del_dir(TmpDir);
cleanup_spec({copy_file, File}, TargetDir) ->
TargetFile = filename:join([TargetDir, File]),
file:delete(TargetFile);
cleanup_spec({copy_file, NewFile, _OldFile}, TargetDir) ->
TargetFile = filename:join([TargetDir, NewFile]),
file:delete(TargetFile);
cleanup_spec({write_file, File, _IoList}, TargetDir) ->
TargetFile = filename:join([TargetDir, File]),
file:delete(TargetFile);
cleanup_spec({strip_beam, File}, TargetDir) ->
TargetFile = filename:join([TargetDir, File]),
file:delete(TargetFile).
filter_spec(List, InclRegexps, ExclRegexps) ->
do_filter_spec("", List, InclRegexps, ExclRegexps).
do_filter_spec(Path, List, InclRegexps, ExclRegexps) when is_list(List) ->
lists:zf(fun(File) ->
do_filter_spec(Path, File, InclRegexps, ExclRegexps)
end,
List);
do_filter_spec(Path , { source_dir , _ SourceDir , Spec } , InclRegexps , ExclRegexps ) - >
do_filter_spec(Path , Spec , InclRegexps , ExclRegexps ) ;
do_filter_spec(Path, {create_dir, Dir, Files}, InclRegexps, ExclRegexps) ->
Path2 = opt_join(Path, Dir),
case do_filter_spec(Path2, Files, InclRegexps, ExclRegexps) of
[] ->
case match(Path2, InclRegexps, ExclRegexps) of
true ->
{true, {create_dir, Dir, []}};
false ->
false
end;
Files2 when is_list(Files2) ->
{true, {create_dir, Dir, Files2}}
end;
do_filter_spec(Path,
{create_dir, NewDir, OldDir, Files},
InclRegexps,
ExclRegexps) ->
Path2 = opt_join(Path, NewDir),
case do_filter_spec(Path2, Files, InclRegexps, ExclRegexps) of
[] ->
case match(Path2, InclRegexps, ExclRegexps) of
true ->
{true, {create_dir, NewDir, OldDir, []}};
false ->
false
end;
Files2 when is_list(Files2) ->
{true, {create_dir, NewDir, OldDir, Files2}}
end;
do_filter_spec(Path,
{archive, Archive, Options, Files},
InclRegexps,
ExclRegexps) ->
case do_filter_spec(Path, Files, InclRegexps, ExclRegexps) of
[] ->
case match(Path, InclRegexps, ExclRegexps) of
true ->
{true, {archive, Archive, Options, []}};
false ->
false
end;
Files2 when is_list(Files2) ->
{true, {archive, Archive, Options, Files2}}
end;
do_filter_spec(Path, {copy_file, File}, InclRegexps, ExclRegexps) ->
Path2 = opt_join(Path, File),
match(Path2, InclRegexps, ExclRegexps);
do_filter_spec(Path,
{copy_file, NewFile, _OldFile},
InclRegexps,
ExclRegexps) ->
Path2 = opt_join(Path, NewFile),
match(Path2, InclRegexps, ExclRegexps);
do_filter_spec(Path, {write_file, File, _IoList}, InclRegexps, ExclRegexps) ->
Path2 = opt_join(Path, File),
match(Path2, InclRegexps, ExclRegexps);
do_filter_spec(Path, {strip_beam, File}, InclRegexps, ExclRegexps) ->
Path2 = opt_join(Path, File),
match(Path2, InclRegexps, ExclRegexps).
opt_join([], File) ->
File;
opt_join(Path, File) ->
filename:join([Path, File]).
match(String, InclRegexps, ExclRegexps) ->
match(String, InclRegexps) andalso not match(String, ExclRegexps).
Match at least one regexp
match(_String, []) ->
false;
match(String, [#regexp{source = _, compiled = MP} | Regexps]) ->
io : format("Regexp : ~p ~p\n " , [ String , Regexp ] ) ,
case re:run(String, MP, [{capture, none}]) of
nomatch -> match(String, Regexps);
match -> true
end.
install(RelName, TargetDir) ->
try
do_install(RelName, TargetDir)
catch
throw:{error, Text} ->
{error, Text}
end.
do_install(RelName, TargetDir) ->
TargetDir2 = filename:absname(TargetDir),
RelDir = filename:join([TargetDir2, "releases"]),
DataFile = filename:join([RelDir, "start_erl.data"]),
Bin = reltool_utils:read_file(DataFile),
case string:tokens(binary_to_list(Bin), " \n") of
[ErlVsn, RelVsn | _] ->
ErtsBinDir = filename:join([TargetDir2, "erts-" ++ ErlVsn, "bin"]),
BinDir = filename:join([TargetDir2, "bin"]),
case os:type() of
{win32, _} ->
NativeRootDir = nativename(TargetDir2),
NativeErtsBinDir = nativename(ErtsBinDir),
IniData0 = ["[erlang]\r\n",
"Bindir=", NativeErtsBinDir, "\r\n",
"Progname=erl\r\n",
"Rootdir=", NativeRootDir, "\r\n"],
IniData = unicode:characters_to_binary(IniData0),
IniFile = filename:join([BinDir, "erl.ini"]),
ok = file:write_file(IniFile, IniData);
_ ->
subst_src_scripts(start_scripts(),
ErtsBinDir,
BinDir,
[{"FINAL_ROOTDIR", TargetDir2},
{"EMU", "beam"}],
[preserve])
end,
RelFile = filename:join([RelDir, RelVsn, RelName ++ ".rel"]),
ok = release_handler:create_RELEASES(TargetDir2, RelFile),
ok;
_ ->
reltool_utils:throw_error("~ts: Illegal data file syntax",[DataFile])
end.
nativename(Dir) ->
escape_backslash(filename:nativename(Dir)).
escape_backslash([$\\|T]) ->
[$\\,$\\|escape_backslash(T)];
escape_backslash([H|T]) ->
[H|escape_backslash(T)];
escape_backslash([]) ->
[].
subst_src_scripts(Scripts, SrcDir, DestDir, Vars, Opts) ->
Fun = fun(Script) ->
subst_src_script(Script, SrcDir, DestDir, Vars, Opts)
end,
lists:foreach(Fun, Scripts).
subst_src_script(Script, SrcDir, DestDir, Vars, Opts) ->
subst_file(filename:join([SrcDir, Script ++ ".src"]),
filename:join([DestDir, Script]),
Vars,
Opts).
subst_file(Src, Dest, Vars, Opts) ->
Bin = reltool_utils:read_file(Src),
Chars = subst(binary_to_list(Bin), Vars),
reltool_utils:write_file(Dest, Chars),
case lists:member(preserve, Opts) of
true ->
FileInfo = reltool_utils:read_file_info(Src),
reltool_utils:write_file_info(Dest, FileInfo);
false ->
ok
end.
subst(Str , Vars )
Vars = [ { Var , } ]
Var = Val = string ( )
of variables in Vars .
subst(Str, Vars) ->
subst(Str, Vars, []).
, C | Rest ] , , Result ) when $ A = < C , C = < $ Z - >
subst_var([C| Rest], Vars, Result, []);
, C | Rest ] , , Result ) when $ a = < C , C = < $ z - >
subst_var([C| Rest], Vars, Result, []);
, C | Rest ] , , Result ) when C = = $ _ - >
subst_var([C| Rest], Vars, Result, []);
subst([C| Rest], Vars, Result) ->
subst(Rest, Vars, [C| Result]);
subst([], _Vars, Result) ->
lists:reverse(Result).
| Rest ] , , Result , VarAcc ) - >
Key = lists:reverse(VarAcc),
case lists:keyfind(Key, 1, Vars) of
{Key, Value} ->
subst(Rest, Vars, lists:reverse(Value, Result));
false ->
end;
subst_var([C| Rest], Vars, Result, VarAcc) ->
subst_var(Rest, Vars, Result, [C| VarAcc]);
subst_var([], Vars, Result, VarAcc) ->
start_scripts() ->
["erl", "start", "start_erl"].
|
f4d69cbc30170e30737f48d147a07a395e2c7fbbd9b5f164a14007799fd76f23 | erlang/otp | etop.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2002 - 2022 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(etop).
-author('').
-export([start/0, start/1, config/2, stop/0, dump/1, help/0]).
Internal
-export([update/1]).
-export([loadinfo/2, meminfo/2, getopt/2]).
-include("etop.hrl").
-include("etop_defs.hrl").
-define(change_at_runtime_config,[lines,interval,sort,accumulate]).
help() ->
io:format(
"Usage of the Erlang top program~n~n"
"Options are set as command line parameters as in -node my@host~n"
"or as parameters to etop:start([{node, my@host}, {...}]).~n~n"
"Options are:~n"
" node atom Required The erlang node to measure ~n"
" port integer The used port, NOTE: due to a bug this program~n"
" will hang if the port is not available~n"
" accumulate boolean If true execution time is accumulated ~n"
" lines integer Number of displayed processes~n"
" interval integer Display update interval in secs~n"
" sort runtime | reductions | memory | msg_q~n"
" What information to sort by~n"
" Default: runtime (reductions if tracing=off)~n"
" tracing on | off etop uses the erlang trace facility, and thus~n"
" no other tracing is possible on the node while~n"
" etop is running, unless this option is set to~n"
" 'off'. Also helpful if the etop tracing causes~n"
" too high load on the measured node.~n"
" With tracing off, runtime is not measured!~n"
" setcookie string Only applicable on operating system command~n"
" line. Set cookie for the etop node, must be~n"
" same as the cookie for the measured node.~n"
" This is not an etop parameter~n"
).
stop() ->
case whereis(etop_server) of
undefined -> not_started;
Pid when is_pid(Pid) -> etop_server ! stop
end.
config(Key,Value) ->
case check_runtime_config(Key,Value) of
ok ->
etop_server ! {config,{Key,Value}},
ok;
error ->
{error,illegal_opt}
end.
check_runtime_config(lines,L) when is_integer(L),L>0 -> ok;
check_runtime_config(interval,I) when is_integer(I),I>0 -> ok;
check_runtime_config(sort,S) when S=:=runtime;
S=:=reductions;
S=:=memory;
S=:=msg_q -> ok;
check_runtime_config(accumulate,A) when A=:=true; A=:=false -> ok;
check_runtime_config(_Key,_Value) -> error.
dump(File) ->
case file:open(File,[write,{encoding,utf8}]) of
{ok,Fd} -> etop_server ! {dump,Fd};
Error -> Error
end.
start() ->
start([]).
start(Opts) ->
process_flag(trap_exit, true),
Config1 = handle_args(init:get_arguments() ++ Opts, #opts{}),
Config2 = Config1#opts{server=self()},
Connect to the node we want to look at
Node = getopt(node, Config2),
case net_adm:ping(Node) of
pang when Node /= node() ->
io:format("Error Couldn't connect to node ~p ~n~n", [Node]),
help(),
exit("connection error");
_pong ->
check_runtime_tools_vsn(Node)
end,
%% Maybe set up the tracing
Config3 =
if Config2#opts.tracing == on, Node /= node() ->
%% Cannot trace on current node since the tracer will
%% trace itself
etop_tr:setup_tracer(Config2);
true ->
if Config2#opts.sort == runtime ->
Config2#opts{sort=reductions,tracing=off};
true ->
Config2#opts{tracing=off}
end
end,
AccumTab = ets:new(accum_tab,
[set,public,{keypos,#etop_proc_info.pid}]),
Config4 = Config3#opts{accum_tab=AccumTab},
%% Start the output server
Out = spawn_link(Config4#opts.out_mod, init, [Config4]),
Config5 = Config4#opts{out_proc = Out},
init_data_handler(Config5),
ok.
check_runtime_tools_vsn(Node) ->
case rpc:call(Node,observer_backend,vsn,[]) of
{ok,Vsn} -> check_vsn(Vsn);
_ -> exit("Faulty version of runtime_tools on remote node")
end.
check_vsn(_Vsn) -> ok.
%check_vsn(_Vsn) -> exit("Faulty version of runtime_tools on remote node").
%% Handle the incoming data
init_data_handler(Config) ->
register(etop_server,self()),
Reader =
if Config#opts.tracing == on -> etop_tr:reader(Config);
true -> undefined
end,
data_handler(Reader, Config).
data_handler(Reader, Opts) ->
receive
stop ->
stop(Opts),
ok;
{config,{Key,Value}} ->
data_handler(Reader,putopt(Key,Value,Opts));
{dump,Fd} ->
Opts#opts.out_proc ! {dump,Fd},
data_handler(Reader,Opts);
{'EXIT', EPid, Reason} when EPid == Opts#opts.out_proc ->
case Reason of
normal -> ok;
_ -> io:format("Output server crashed: ~tp~n",[Reason])
end,
stop(Opts),
out_proc_stopped;
{'EXIT', Reader, eof} ->
io:format("Lost connection to node ~p exiting~n", [Opts#opts.node]),
stop(Opts),
connection_lost;
_ ->
data_handler(Reader, Opts)
end.
stop(Opts) ->
(Opts#opts.out_mod):stop(Opts#opts.out_proc),
if Opts#opts.tracing == on -> etop_tr:stop_tracer(Opts);
true -> ok
end,
unregister(etop_server).
update(#opts{store=Store,node=Node,tracing=Tracing,intv=Interval}=Opts) ->
Pid = spawn_link(Node,observer_backend,etop_collect,[self()]),
Info = receive {Pid,I} -> I
after Interval ->
%% Took more than the update interval to fetch
%% data. Either the connection is lost or the
%% fetching took too long...
io:format("Timeout when waiting for process info from "
"node ~p; exiting~n", [Node]),
exit(timeout)
end,
#etop_info{procinfo=ProcInfo} = Info,
ProcInfo1 =
if Tracing == on ->
PI=lists:map(fun(PI=#etop_proc_info{pid=P}) ->
case ets:lookup(Store,P) of
[{P,T}] -> PI#etop_proc_info{runtime=T};
[] -> PI
end
end,
ProcInfo),
PI;
true ->
lists:map(fun(PI) -> PI#etop_proc_info{runtime='-'} end,ProcInfo)
end,
ProcInfo2 = sort(Opts,ProcInfo1),
Info#etop_info{procinfo=ProcInfo2}.
sort(Opts,PI) ->
Tag = get_tag(Opts#opts.sort),
PI1 = if Opts#opts.accum ->
PI;
true ->
AccumTab = Opts#opts.accum_tab,
lists:map(
fun(#etop_proc_info{pid=Pid,reds=Reds,runtime=RT}=I) ->
NewI =
case ets:lookup(AccumTab,Pid) of
[#etop_proc_info{reds=OldReds,
runtime='-'}] ->
I#etop_proc_info{reds=Reds-OldReds,
runtime='-'};
[#etop_proc_info{reds=OldReds,
runtime=OldRT}] ->
I#etop_proc_info{reds=Reds-OldReds,
runtime=RT-OldRT};
[] ->
I
end,
ets:insert(AccumTab,I),
NewI
end,
PI)
end,
PI2 = lists:reverse(lists:keysort(Tag,PI1)),
lists:sublist(PI2,Opts#opts.lines).
get_tag(runtime) -> #etop_proc_info.runtime;
get_tag(memory) -> #etop_proc_info.mem;
get_tag(reductions) -> #etop_proc_info.reds;
get_tag(msg_q) -> #etop_proc_info.mq.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Configuration Management
getopt(What, Config) when is_record(Config, opts) ->
case What of
node -> Config#opts.node;
port -> Config#opts.port;
accum -> Config#opts.accum;
intv -> Config#opts.intv;
lines -> Config#opts.lines;
sort -> Config#opts.sort;
width -> Config#opts.width;
height-> Config#opts.height;
store -> Config#opts.store;
host -> Config#opts.host
end.
putopt(Key, Value, Config) when is_record(Config, opts) ->
Config1 = handle_args([{Key,Value}],Config),
Config1#opts.out_proc ! {config,{Key,Value},Config1},
Config1.
handle_args([{node, [NodeString]}| R], Config) when is_list(NodeString) ->
Node = list_to_atom(NodeString),
NewC = Config#opts{node = Node},
handle_args(R, NewC);
handle_args([{node, Node} |R], Config) when is_atom(Node) ->
NewC = Config#opts{node = Node},
handle_args(R, NewC);
handle_args([{port, Port}| R], Config) when is_integer(Port) ->
NewC = Config#opts{port=Port},
handle_args(R, NewC);
handle_args([{port, [Port]}| R], Config) when is_list(Port) ->
NewC = Config#opts{port= list_to_integer(Port)},
handle_args(R, NewC);
handle_args([{interval, Time}| R], Config) when is_integer(Time)->
NewC = Config#opts{intv=Time*1000},
handle_args(R, NewC);
handle_args([{interval, [Time]}| R], Config) when is_list(Time)->
NewC = Config#opts{intv=list_to_integer(Time)*1000},
handle_args(R, NewC);
handle_args([{lines, Lines}| R], Config) when is_integer(Lines) ->
NewC = Config#opts{lines=Lines},
handle_args(R, NewC);
handle_args([{lines, [Lines]}| R], Config) when is_list(Lines) ->
NewC = Config#opts{lines= list_to_integer(Lines)},
handle_args(R, NewC);
handle_args([{accumulate, Bool}| R], Config) when is_atom(Bool) ->
NewC = Config#opts{accum=Bool},
handle_args(R, NewC);
handle_args([{accumulate, [Bool]}| R], Config) when is_list(Bool) ->
NewC = Config#opts{accum= list_to_atom(Bool)},
handle_args(R, NewC);
handle_args([{sort, Sort}| R], Config) when is_atom(Sort) ->
NewC = Config#opts{sort=Sort},
handle_args(R, NewC);
handle_args([{sort, [Sort]}| R], Config) when is_list(Sort) ->
NewC = Config#opts{sort= list_to_atom(Sort)},
handle_args(R, NewC);
handle_args([{output, Output}| R], Config) when is_atom(Output) ->
NewC = Config#opts{out_mod=output(Output)},
handle_args(R, NewC);
handle_args([{output, [Output]}| R], Config) when is_list(Output) ->
NewC = Config#opts{out_mod= output(list_to_atom(Output))},
handle_args(R, NewC);
handle_args([{tracing, OnOff}| R], Config) when is_atom(OnOff) ->
NewC = Config#opts{tracing=OnOff},
handle_args(R, NewC);
handle_args([{tracing, [OnOff]}| R], Config) when is_list(OnOff) ->
NewC = Config#opts{tracing=list_to_atom(OnOff)},
handle_args(R, NewC);
handle_args([_| R], C) ->
handle_args(R, C);
handle_args([], C) ->
C.
output(graphical) -> exit({deprecated, "Use observer instead"});
output(text) -> etop_txt.
loadinfo(SysI,Prev) ->
#etop_info{n_procs = Procs,
run_queue = RQ,
now = Now,
wall_clock = WC,
runtime = RT} = SysI,
Cpu = calculate_cpu_utilization(WC,RT,Prev#etop_info.runtime),
Clock = io_lib:format("~2.2.0w:~2.2.0w:~2.2.0w",
tuple_to_list(element(2,calendar:now_to_datetime(Now)))),
{Cpu,Procs,RQ,Clock}.
calculate_cpu_utilization({_,WC},{_,RT},_) ->
%% Old version of observer_backend, using statistics(wall_clock)
%% and statistics(runtime)
case {WC,RT} of
{0,0} ->
0;
{0,_} ->
100;
_ ->
round(100*RT/WC)
end;
calculate_cpu_utilization(_,undefined,_) ->
First time collecting - no cpu utilization has been measured
%% since scheduler_wall_time flag is not yet on
0;
calculate_cpu_utilization(WC,RTInfo,undefined) ->
Second time collecting - RTInfo shows scheduler_wall_time since
%% flag was set to true. Faking previous values by setting
everything to zero .
ZeroRT = [{Id,0,0} || {Id,_,_} <- RTInfo],
calculate_cpu_utilization(WC,RTInfo,ZeroRT);
calculate_cpu_utilization(_,RTInfo,PrevRTInfo) ->
%% New version of observer_backend, using statistics(scheduler_wall_time)
Sum = lists:foldl(fun({{_, A0, T0}, {_, A1, T1}},{AAcc,TAcc}) ->
{(A1 - A0)+AAcc,(T1 - T0)+TAcc}
end,
{0,0},
lists:zip(PrevRTInfo,RTInfo)),
case Sum of
{0,0} ->
0;
{Active,Total} ->
round(100*Active/Total)
end.
meminfo(MemI, [Tag|Tags]) ->
[round(get_mem(Tag, MemI)/1024)|meminfo(MemI, Tags)];
meminfo(_MemI, []) -> [].
get_mem(Tag, MemI) ->
case lists:keysearch(Tag, 1, MemI) of
{value, {Tag, I}} -> I; %these are in bytes
_ -> 0
end.
| null | https://raw.githubusercontent.com/erlang/otp/eccc556e79f315d1f87c10fb46f2c4af50a63f20/lib/observer/src/etop.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
Maybe set up the tracing
Cannot trace on current node since the tracer will
trace itself
Start the output server
check_vsn(_Vsn) -> exit("Faulty version of runtime_tools on remote node").
Handle the incoming data
Took more than the update interval to fetch
data. Either the connection is lost or the
fetching took too long...
Configuration Management
Old version of observer_backend, using statistics(wall_clock)
and statistics(runtime)
since scheduler_wall_time flag is not yet on
flag was set to true. Faking previous values by setting
New version of observer_backend, using statistics(scheduler_wall_time)
these are in bytes | Copyright Ericsson AB 2002 - 2022 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(etop).
-author('').
-export([start/0, start/1, config/2, stop/0, dump/1, help/0]).
Internal
-export([update/1]).
-export([loadinfo/2, meminfo/2, getopt/2]).
-include("etop.hrl").
-include("etop_defs.hrl").
-define(change_at_runtime_config,[lines,interval,sort,accumulate]).
help() ->
io:format(
"Usage of the Erlang top program~n~n"
"Options are set as command line parameters as in -node my@host~n"
"or as parameters to etop:start([{node, my@host}, {...}]).~n~n"
"Options are:~n"
" node atom Required The erlang node to measure ~n"
" port integer The used port, NOTE: due to a bug this program~n"
" will hang if the port is not available~n"
" accumulate boolean If true execution time is accumulated ~n"
" lines integer Number of displayed processes~n"
" interval integer Display update interval in secs~n"
" sort runtime | reductions | memory | msg_q~n"
" What information to sort by~n"
" Default: runtime (reductions if tracing=off)~n"
" tracing on | off etop uses the erlang trace facility, and thus~n"
" no other tracing is possible on the node while~n"
" etop is running, unless this option is set to~n"
" 'off'. Also helpful if the etop tracing causes~n"
" too high load on the measured node.~n"
" With tracing off, runtime is not measured!~n"
" setcookie string Only applicable on operating system command~n"
" line. Set cookie for the etop node, must be~n"
" same as the cookie for the measured node.~n"
" This is not an etop parameter~n"
).
stop() ->
case whereis(etop_server) of
undefined -> not_started;
Pid when is_pid(Pid) -> etop_server ! stop
end.
config(Key,Value) ->
case check_runtime_config(Key,Value) of
ok ->
etop_server ! {config,{Key,Value}},
ok;
error ->
{error,illegal_opt}
end.
check_runtime_config(lines,L) when is_integer(L),L>0 -> ok;
check_runtime_config(interval,I) when is_integer(I),I>0 -> ok;
check_runtime_config(sort,S) when S=:=runtime;
S=:=reductions;
S=:=memory;
S=:=msg_q -> ok;
check_runtime_config(accumulate,A) when A=:=true; A=:=false -> ok;
check_runtime_config(_Key,_Value) -> error.
dump(File) ->
case file:open(File,[write,{encoding,utf8}]) of
{ok,Fd} -> etop_server ! {dump,Fd};
Error -> Error
end.
start() ->
start([]).
start(Opts) ->
process_flag(trap_exit, true),
Config1 = handle_args(init:get_arguments() ++ Opts, #opts{}),
Config2 = Config1#opts{server=self()},
Connect to the node we want to look at
Node = getopt(node, Config2),
case net_adm:ping(Node) of
pang when Node /= node() ->
io:format("Error Couldn't connect to node ~p ~n~n", [Node]),
help(),
exit("connection error");
_pong ->
check_runtime_tools_vsn(Node)
end,
Config3 =
if Config2#opts.tracing == on, Node /= node() ->
etop_tr:setup_tracer(Config2);
true ->
if Config2#opts.sort == runtime ->
Config2#opts{sort=reductions,tracing=off};
true ->
Config2#opts{tracing=off}
end
end,
AccumTab = ets:new(accum_tab,
[set,public,{keypos,#etop_proc_info.pid}]),
Config4 = Config3#opts{accum_tab=AccumTab},
Out = spawn_link(Config4#opts.out_mod, init, [Config4]),
Config5 = Config4#opts{out_proc = Out},
init_data_handler(Config5),
ok.
check_runtime_tools_vsn(Node) ->
case rpc:call(Node,observer_backend,vsn,[]) of
{ok,Vsn} -> check_vsn(Vsn);
_ -> exit("Faulty version of runtime_tools on remote node")
end.
check_vsn(_Vsn) -> ok.
init_data_handler(Config) ->
register(etop_server,self()),
Reader =
if Config#opts.tracing == on -> etop_tr:reader(Config);
true -> undefined
end,
data_handler(Reader, Config).
data_handler(Reader, Opts) ->
receive
stop ->
stop(Opts),
ok;
{config,{Key,Value}} ->
data_handler(Reader,putopt(Key,Value,Opts));
{dump,Fd} ->
Opts#opts.out_proc ! {dump,Fd},
data_handler(Reader,Opts);
{'EXIT', EPid, Reason} when EPid == Opts#opts.out_proc ->
case Reason of
normal -> ok;
_ -> io:format("Output server crashed: ~tp~n",[Reason])
end,
stop(Opts),
out_proc_stopped;
{'EXIT', Reader, eof} ->
io:format("Lost connection to node ~p exiting~n", [Opts#opts.node]),
stop(Opts),
connection_lost;
_ ->
data_handler(Reader, Opts)
end.
stop(Opts) ->
(Opts#opts.out_mod):stop(Opts#opts.out_proc),
if Opts#opts.tracing == on -> etop_tr:stop_tracer(Opts);
true -> ok
end,
unregister(etop_server).
update(#opts{store=Store,node=Node,tracing=Tracing,intv=Interval}=Opts) ->
Pid = spawn_link(Node,observer_backend,etop_collect,[self()]),
Info = receive {Pid,I} -> I
after Interval ->
io:format("Timeout when waiting for process info from "
"node ~p; exiting~n", [Node]),
exit(timeout)
end,
#etop_info{procinfo=ProcInfo} = Info,
ProcInfo1 =
if Tracing == on ->
PI=lists:map(fun(PI=#etop_proc_info{pid=P}) ->
case ets:lookup(Store,P) of
[{P,T}] -> PI#etop_proc_info{runtime=T};
[] -> PI
end
end,
ProcInfo),
PI;
true ->
lists:map(fun(PI) -> PI#etop_proc_info{runtime='-'} end,ProcInfo)
end,
ProcInfo2 = sort(Opts,ProcInfo1),
Info#etop_info{procinfo=ProcInfo2}.
sort(Opts,PI) ->
Tag = get_tag(Opts#opts.sort),
PI1 = if Opts#opts.accum ->
PI;
true ->
AccumTab = Opts#opts.accum_tab,
lists:map(
fun(#etop_proc_info{pid=Pid,reds=Reds,runtime=RT}=I) ->
NewI =
case ets:lookup(AccumTab,Pid) of
[#etop_proc_info{reds=OldReds,
runtime='-'}] ->
I#etop_proc_info{reds=Reds-OldReds,
runtime='-'};
[#etop_proc_info{reds=OldReds,
runtime=OldRT}] ->
I#etop_proc_info{reds=Reds-OldReds,
runtime=RT-OldRT};
[] ->
I
end,
ets:insert(AccumTab,I),
NewI
end,
PI)
end,
PI2 = lists:reverse(lists:keysort(Tag,PI1)),
lists:sublist(PI2,Opts#opts.lines).
get_tag(runtime) -> #etop_proc_info.runtime;
get_tag(memory) -> #etop_proc_info.mem;
get_tag(reductions) -> #etop_proc_info.reds;
get_tag(msg_q) -> #etop_proc_info.mq.
getopt(What, Config) when is_record(Config, opts) ->
case What of
node -> Config#opts.node;
port -> Config#opts.port;
accum -> Config#opts.accum;
intv -> Config#opts.intv;
lines -> Config#opts.lines;
sort -> Config#opts.sort;
width -> Config#opts.width;
height-> Config#opts.height;
store -> Config#opts.store;
host -> Config#opts.host
end.
putopt(Key, Value, Config) when is_record(Config, opts) ->
Config1 = handle_args([{Key,Value}],Config),
Config1#opts.out_proc ! {config,{Key,Value},Config1},
Config1.
handle_args([{node, [NodeString]}| R], Config) when is_list(NodeString) ->
Node = list_to_atom(NodeString),
NewC = Config#opts{node = Node},
handle_args(R, NewC);
handle_args([{node, Node} |R], Config) when is_atom(Node) ->
NewC = Config#opts{node = Node},
handle_args(R, NewC);
handle_args([{port, Port}| R], Config) when is_integer(Port) ->
NewC = Config#opts{port=Port},
handle_args(R, NewC);
handle_args([{port, [Port]}| R], Config) when is_list(Port) ->
NewC = Config#opts{port= list_to_integer(Port)},
handle_args(R, NewC);
handle_args([{interval, Time}| R], Config) when is_integer(Time)->
NewC = Config#opts{intv=Time*1000},
handle_args(R, NewC);
handle_args([{interval, [Time]}| R], Config) when is_list(Time)->
NewC = Config#opts{intv=list_to_integer(Time)*1000},
handle_args(R, NewC);
handle_args([{lines, Lines}| R], Config) when is_integer(Lines) ->
NewC = Config#opts{lines=Lines},
handle_args(R, NewC);
handle_args([{lines, [Lines]}| R], Config) when is_list(Lines) ->
NewC = Config#opts{lines= list_to_integer(Lines)},
handle_args(R, NewC);
handle_args([{accumulate, Bool}| R], Config) when is_atom(Bool) ->
NewC = Config#opts{accum=Bool},
handle_args(R, NewC);
handle_args([{accumulate, [Bool]}| R], Config) when is_list(Bool) ->
NewC = Config#opts{accum= list_to_atom(Bool)},
handle_args(R, NewC);
handle_args([{sort, Sort}| R], Config) when is_atom(Sort) ->
NewC = Config#opts{sort=Sort},
handle_args(R, NewC);
handle_args([{sort, [Sort]}| R], Config) when is_list(Sort) ->
NewC = Config#opts{sort= list_to_atom(Sort)},
handle_args(R, NewC);
handle_args([{output, Output}| R], Config) when is_atom(Output) ->
NewC = Config#opts{out_mod=output(Output)},
handle_args(R, NewC);
handle_args([{output, [Output]}| R], Config) when is_list(Output) ->
NewC = Config#opts{out_mod= output(list_to_atom(Output))},
handle_args(R, NewC);
handle_args([{tracing, OnOff}| R], Config) when is_atom(OnOff) ->
NewC = Config#opts{tracing=OnOff},
handle_args(R, NewC);
handle_args([{tracing, [OnOff]}| R], Config) when is_list(OnOff) ->
NewC = Config#opts{tracing=list_to_atom(OnOff)},
handle_args(R, NewC);
handle_args([_| R], C) ->
handle_args(R, C);
handle_args([], C) ->
C.
output(graphical) -> exit({deprecated, "Use observer instead"});
output(text) -> etop_txt.
loadinfo(SysI,Prev) ->
#etop_info{n_procs = Procs,
run_queue = RQ,
now = Now,
wall_clock = WC,
runtime = RT} = SysI,
Cpu = calculate_cpu_utilization(WC,RT,Prev#etop_info.runtime),
Clock = io_lib:format("~2.2.0w:~2.2.0w:~2.2.0w",
tuple_to_list(element(2,calendar:now_to_datetime(Now)))),
{Cpu,Procs,RQ,Clock}.
calculate_cpu_utilization({_,WC},{_,RT},_) ->
case {WC,RT} of
{0,0} ->
0;
{0,_} ->
100;
_ ->
round(100*RT/WC)
end;
calculate_cpu_utilization(_,undefined,_) ->
First time collecting - no cpu utilization has been measured
0;
calculate_cpu_utilization(WC,RTInfo,undefined) ->
Second time collecting - RTInfo shows scheduler_wall_time since
everything to zero .
ZeroRT = [{Id,0,0} || {Id,_,_} <- RTInfo],
calculate_cpu_utilization(WC,RTInfo,ZeroRT);
calculate_cpu_utilization(_,RTInfo,PrevRTInfo) ->
Sum = lists:foldl(fun({{_, A0, T0}, {_, A1, T1}},{AAcc,TAcc}) ->
{(A1 - A0)+AAcc,(T1 - T0)+TAcc}
end,
{0,0},
lists:zip(PrevRTInfo,RTInfo)),
case Sum of
{0,0} ->
0;
{Active,Total} ->
round(100*Active/Total)
end.
meminfo(MemI, [Tag|Tags]) ->
[round(get_mem(Tag, MemI)/1024)|meminfo(MemI, Tags)];
meminfo(_MemI, []) -> [].
get_mem(Tag, MemI) ->
case lists:keysearch(Tag, 1, MemI) of
_ -> 0
end.
|
efd2d891fcca6630fe9c37b4e087cedc137a6d632ef8e6c9e24a3e27010bfb4c | shirok/Gauche | compile-4.scm | ;;;
compile-4.scm - The compiler : Pass 4
;;;
Copyright ( c ) 2004 - 2022 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; 3. Neither the name of the authors nor the names of its contributors
;;; may be used to endorse or promote products derived from this
;;; software without specific prior written permission.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
;;===============================================================
;; Pass 4. Lambda lifting
;;
First we traverse down the IForm and find free local variables
;; for each lambda node. Within this traversal, found $lambda nodes
are chained into the first element of label - dic .
;;
;; Once all free lvars are sorted out, we look at the list of $lambda
;; nodes and determine the ones that doesn't need to form a closure.
;; They are to be bound to a freshly created global identifier. If other
;; $lambda nodes have a reference to the lifted lambda node through
;; local variables, they are substituted to the reference to this global
;; identifier.
;;
;; Note for the reader of this code: The term "lambda lifting" usually
;; includes a transformation that substitutes closed variables for
;; arguments. We don't do such transformation so far. It trades the
;; cost of closure allocation for pushing extra arguments. It may be
a win if the closure is allocated lots of times . OTOH , if the closure
;; is created only a few times, but called lots of times, the overhead of
;; extra arguments may exceed the gain by not allocating the closure.
;;
Pass4 is done in three steps .
;;
- The first step , pass4 / scan , recursively descends the IForm and determine
;; a set of free variables for each $LAMBDA nodes. It also collects lambda
;; nodes inside the iform into label-dic-info.
;; In this pass, we mark the toplevel lambda node by setting
$ lambda - lifted - var to other lambda nodes have # f at this moment .
;;
- The second step , pass4 / lift , takes a set of $ LAMBDA nodes in the IForm
;; and finds which $LAMBDA nodes can be lifted. When lifted, we set
;; its $lambda-lifted-var with the variable that bound to the lifted lambda.
;;
- The third step , pass4 / subst , walks the IForm again , and replaces the
;; occurence of $LAMBDA nodes to be lifted for $LREFs. Then wraps the
;; entire iform with $LET to save the lifted lambdas.
;;
This pass introduces CL - like toplevel environment , e.g.
;;
;; ($let rec (<lvar> ...)
;; (($lambda ...) ...) ; lifted lambdas
;; ($define ...) ; toplevel definition
;; )
;;
;; This works, since we already dissolved internal defines, and the $DEFINE
;; we have is all toplevel defines, and the toplevel variable to be bound
;; is already resolved to the identifier.
NB : Whether we use ' rec ' or not in this outermost lambda does n't matter ,
for we directly refer to < lvar > s. We just use ' rec ' to remind it is
;; conceptually a letrec.
(define-inline (pass4/add-lvar lvar bound free)
(if (or (memq lvar bound) (memq lvar free)) free (cons lvar free)))
Pass 4 entry point . Returns IForm and list of lifted lvars
NB : If the toplevel iform is $ seq , we process each form of its body
;; separately. Sometimes such toplevel $seq is a result of macro expansion,
and can have large number of $ define nodes . If we process such $ seq
;; at once, it may yield a huge environment frame to hold all lifted closures.
(define (pass4 iform module)
(if (vm-compiler-flag-no-lifting?)
iform
(pass4/top iform module)))
(define (pass4/top iform module)
(if (has-tag? iform $SEQ)
($seq (imap (cut pass4/top <> module) ($seq-body iform)))
(let1 dic (make-label-dic '())
(pass4/scan iform '() '() #t dic) ; Mark free variables
(let1 lambda-nodes (label-dic-info dic)
(if (or (null? lambda-nodes)
(and (null? (cdr lambda-nodes)) ; iform has only a toplevel lambda
($lambda-lifted-var (car lambda-nodes))))
iform ;shortcut
(let1 lifted (pass4/lift lambda-nodes module)
(if (null? lifted)
iform ;shortcut
(let1 iform. (pass4/subst iform (make-label-dic '()))
($let #f 'rec
(imap (^[x] ($lambda-lifted-var x)) lifted)
lifted
iform.)))))))))
Pass4 step1 - scan
;; bs - List of lvars whose binding is introduced in the current scope.
;; fs - List of free lvars found so far in the current scope.
;; t? - #t in toplevel expression, not inside another $lambda or $let.
;; $lambda node appears in this context doesn't need to be lifted,
;; for they're created at the time the entire toplevel expression
;; is created anyway.
;; labels - label-dic. the info field is used to hold $LAMBDA nodes.
;; Eacl call returns a list of free lvars.
(define-macro (pass4/scan* iforms bs fs t? labels)
(let1 iforms. (gensym)
`(let1 ,iforms. ,iforms
(cond [(null? ,iforms.) ,fs]
[(null? (cdr ,iforms.))
(pass4/scan (car ,iforms.) ,bs ,fs ,t? ,labels)]
[else
(let loop ([,iforms. ,iforms.] [,fs ,fs])
(if (null? ,iforms.)
,fs
(loop (cdr ,iforms.)
(pass4/scan (car ,iforms.) ,bs ,fs ,t? ,labels))))]))))
(define/case (pass4/scan iform bs fs t? labels)
(iform-tag iform)
[($DEFINE) (unless t?
(warn "define appears in non-top-level expression: ~s"
(unravel-syntax ($define-src iform))))
(pass4/scan ($define-expr iform) bs fs t? labels)]
[($LREF) (pass4/add-lvar ($lref-lvar iform) bs fs)]
[($LSET) (let1 fs (pass4/scan ($lset-expr iform) bs fs t? labels)
(pass4/add-lvar ($lset-lvar iform) bs fs))]
[($GSET) (pass4/scan ($gset-expr iform) bs fs t? labels)]
[($IF) (let* ([fs (pass4/scan ($if-test iform) bs fs t? labels)]
[fs (pass4/scan ($if-then iform) bs fs t? labels)])
(pass4/scan ($if-else iform) bs fs t? labels))]
[($LET) (let* ([new-bs (append ($let-lvars iform) bs)]
[bs (if (memv ($let-type iform) '(rec rec*)) new-bs bs)]
[fs (pass4/scan* ($let-inits iform) bs fs #f labels)])
(pass4/scan ($let-body iform) new-bs fs #f labels))]
[($RECEIVE)(let ([fs (pass4/scan ($receive-expr iform) bs fs #f labels)]
[bs (append ($receive-lvars iform) bs)])
(pass4/scan ($receive-body iform) bs fs #f labels))]
[($LAMBDA) (let1 inner-fs (pass4/scan ($lambda-body iform)
($lambda-lvars iform) '() #f labels)
;; If this $LAMBDA is outermost in the original expression,
;; we don't need to lift it, nor need to set free-lvars.
;; We just mark it by setting lifted-var to #t so that
pass4 / lift phase can treat it specially .
(unless (eq? ($lambda-flag iform) 'dissolved)
(label-dic-info-push! labels iform) ;save the lambda node
(when t? ;mark this is toplevel
($lambda-lifted-var-set! iform #t)))
(cond [t? '()]
[else ($lambda-free-lvars-set! iform inner-fs)
(let loop ([inner-fs inner-fs] [fs fs])
(if (null? inner-fs)
fs
(loop (cdr inner-fs)
(pass4/add-lvar (car inner-fs) bs fs))))]))]
[($CLAMBDA) (pass4/scan* ($clambda-closures iform) bs fs t? labels)]
[($LABEL) (cond [(label-seen? labels iform) fs]
[else (label-push! labels iform)
(pass4/scan ($label-body iform) bs fs t? labels)])]
[($SEQ) (pass4/scan* ($seq-body iform) bs fs t? labels)]
[($CALL) (let1 fs (if (eq? ($call-flag iform) 'jump)
fs
(pass4/scan ($call-proc iform) bs fs t? labels))
(pass4/scan* ($call-args iform) bs fs t? labels))]
[($ASM) (pass4/scan* ($asm-args iform) bs fs t? labels)]
[($CONS $APPEND $MEMV $EQ? $EQV?) (pass4/scan2 iform bs fs t? labels)]
[($VECTOR $LIST $LIST*) (pass4/scan* ($*-args iform) bs fs t? labels)]
[($LIST->VECTOR) (pass4/scan ($*-arg0 iform) bs fs t? labels)]
[($DYNENV) (let1 fs (pass4/scan* ($dynenv-kvs iform) bs fs #f labels)
(pass4/scan ($dynenv-body iform) bs fs t? labels))]
[else fs])
(define (pass4/scan2 iform bs fs t? labels)
(let1 fs (pass4/scan ($*-arg0 iform) bs fs t? labels)
(pass4/scan ($*-arg1 iform) bs fs t? labels)))
;; Sort out the liftable lambda nodes.
;; Returns a list of lambda nodes, in each of which $lambda-lifted-var
;; contains an identifier.
;;
;; At this moment, we only detect closures without free variables,
;; or self-recursive closures.
;;
;; Eventually we want to detect mutual recursive case like this:
;;
;; (define (foo ...)
;; (define (a x) ... (b ...) ...)
;; (define (b y) ... (a ...) ...)
;; ...)
;;
;; If a's only free variable is b, and b's only free variable is a,
;; then we can lift both nodes to the toplevel.
;;
;; Tentative algorithm:
;; - Create a directed graph consists of free lvars and lambda nodes.
An edge from an lvar to a lambda node if the lvar is free in
the lambda node . An edge from lambda node to an lvar if the lambda
node is bound to the lvar .
;; - Find lvars without incoming edge, and remove them and all reachable
;; nodes from them.
;; - The lambda nodes in the remaining graph are the ones we can lift.
;;
(define (pass4/lift lambda-nodes module)
(let1 top-name #f
;; Find a toplevel $lambda node (marked by #t in lifted-var).
;; Its name can be used to generate names for lifted lambdas.
(let loop ([lms lambda-nodes])
(when (pair? lms)
(or (and-let* ([ ($lambda-lifted-var (car lms)) ]
[n ($lambda-name (car lms))])
(set! top-name (identifier->symbol n))
#t)
(loop (cdr lms)))))
Loop over liftable lambda candidates . Lifting one lambda node
;; can turn another lambda nodes liftable, so we repeat the pass until
;; we can't find liftable ones.
(rlet1 lifted '()
(let loop ([lms lambda-nodes]
[lifted? #f]
[remaining '()])
(cond [(null? lms) (when lifted? (loop remaining #f '()))] ;repeat
[($lambda-lifted-var (car lms)) ; this is toplevel node
($lambda-lifted-var-set! (car lms) #f)
(loop (cdr lms) lifted? remaining)]
[else
(let* ([lm (car lms)]
[fvs ($lambda-free-lvars lm)])
(if (or (null? fvs)
(and (null? (cdr fvs))
(lvar-immutable? (car fvs))
(memq (lvar-initval (car fvs)) lifted)))
(let1 lvar (make-lvar (gensym))
($lambda-name-set! lm (list top-name
(or ($lambda-name lm)
(lvar-name lvar))))
($lambda-lifted-var-set! lm lvar)
(push! lifted lm)
(loop (cdr lms) #t remaining))
(loop (cdr lms) lifted? (cons lm remaining))))])))))
Final touch of pass4 - replace lifted lambda nodes to the reference of
;; pre-bound lvars.
Returns ( possibly modified ) IForm .
(define-macro (pass4/subst! access-form labels)
(match-let1 (accessor expr) access-form
(let ([orig (gensym)]
[result (gensym)]
[setter (if (eq? accessor 'car)
'set-car!
(string->symbol #"~|accessor|-set!"))])
`(let* ([,orig (,accessor ,expr)]
[,result (pass4/subst ,orig ,labels)])
(unless (eq? ,orig ,result)
(,setter ,expr ,result))
,expr))))
(define-macro (pass4/subst*! iforms labels)
(let1 iforms. (gensym)
`(let1 ,iforms. ,iforms
(cond [(null? ,iforms.)]
[(null? (cdr ,iforms.)) (pass4/subst! (car ,iforms.) ,labels)]
[else
(let loop ([,iforms. ,iforms.])
(unless (null? ,iforms.)
(pass4/subst! (car ,iforms.) ,labels)
(loop (cdr ,iforms.))))]))))
(define/case (pass4/subst iform labels)
(iform-tag iform)
[($DEFINE) (pass4/subst! ($define-expr iform) labels)]
[($LREF) (or (and-let* ([ (lvar-immutable? ($lref-lvar iform)) ]
[init (lvar-initval ($lref-lvar iform))]
[ (vector? init) ]
[ (has-tag? init $LAMBDA) ]
[lifted ($lambda-lifted-var init)])
(lvar-ref--! ($lref-lvar iform))
($lref-lvar-set! iform lifted)
(lvar-ref++! lifted)
iform)
iform)]
[($LSET) (pass4/subst! ($lset-expr iform) labels)]
[($GSET) (pass4/subst! ($gset-expr iform) labels)]
[($IF) (pass4/subst! ($if-test iform) labels)
(pass4/subst! ($if-then iform) labels)
(pass4/subst! ($if-else iform) labels)]
[($LET) (pass4/subst*! ($let-inits iform) labels)
(pass4/subst! ($let-body iform) labels)]
[($RECEIVE)(pass4/subst! ($receive-expr iform) labels)
(pass4/subst! ($receive-body iform) labels)]
[($LAMBDA) (pass4/subst! ($lambda-body iform) labels)
(or (and-let* ([lifted ($lambda-lifted-var iform)])
($lref lifted))
iform)]
[($CLAMBDA) (pass4/subst*! ($clambda-closures iform) labels) iform]
[($LABEL) (unless (label-seen? labels iform)
(label-push! labels iform)
(pass4/subst! ($label-body iform) labels))
iform]
[($SEQ) (pass4/subst*! ($seq-body iform) labels) iform]
[($CALL) (pass4/subst*! ($call-args iform) labels)
(pass4/subst! ($call-proc iform) labels)]
[($ASM) (pass4/subst*! ($asm-args iform) labels) iform]
[($CONS $APPEND $MEMV $EQ? $EQV?) (pass4/subst! ($*-arg0 iform) labels)
(pass4/subst! ($*-arg1 iform) labels)]
[($VECTOR $LIST $LIST*) (pass4/subst*! ($*-args iform) labels) iform]
[($LIST->VECTOR) (pass4/subst! ($*-arg0 iform) labels)]
[($DYNENV) (pass4/subst*! ($dynenv-kvs iform) labels)
(pass4/subst! ($dynenv-body iform) labels)
iform]
[else iform])
| null | https://raw.githubusercontent.com/shirok/Gauche/17a001bc3769adcbb7b2ee1fd9885ae8db336487/src/compile-4.scm | scheme |
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the authors nor the names of its contributors
may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
===============================================================
Pass 4. Lambda lifting
for each lambda node. Within this traversal, found $lambda nodes
Once all free lvars are sorted out, we look at the list of $lambda
nodes and determine the ones that doesn't need to form a closure.
They are to be bound to a freshly created global identifier. If other
$lambda nodes have a reference to the lifted lambda node through
local variables, they are substituted to the reference to this global
identifier.
Note for the reader of this code: The term "lambda lifting" usually
includes a transformation that substitutes closed variables for
arguments. We don't do such transformation so far. It trades the
cost of closure allocation for pushing extra arguments. It may be
is created only a few times, but called lots of times, the overhead of
extra arguments may exceed the gain by not allocating the closure.
a set of free variables for each $LAMBDA nodes. It also collects lambda
nodes inside the iform into label-dic-info.
In this pass, we mark the toplevel lambda node by setting
and finds which $LAMBDA nodes can be lifted. When lifted, we set
its $lambda-lifted-var with the variable that bound to the lifted lambda.
occurence of $LAMBDA nodes to be lifted for $LREFs. Then wraps the
entire iform with $LET to save the lifted lambdas.
($let rec (<lvar> ...)
(($lambda ...) ...) ; lifted lambdas
($define ...) ; toplevel definition
)
This works, since we already dissolved internal defines, and the $DEFINE
we have is all toplevel defines, and the toplevel variable to be bound
is already resolved to the identifier.
conceptually a letrec.
separately. Sometimes such toplevel $seq is a result of macro expansion,
at once, it may yield a huge environment frame to hold all lifted closures.
Mark free variables
iform has only a toplevel lambda
shortcut
shortcut
bs - List of lvars whose binding is introduced in the current scope.
fs - List of free lvars found so far in the current scope.
t? - #t in toplevel expression, not inside another $lambda or $let.
$lambda node appears in this context doesn't need to be lifted,
for they're created at the time the entire toplevel expression
is created anyway.
labels - label-dic. the info field is used to hold $LAMBDA nodes.
Eacl call returns a list of free lvars.
If this $LAMBDA is outermost in the original expression,
we don't need to lift it, nor need to set free-lvars.
We just mark it by setting lifted-var to #t so that
save the lambda node
mark this is toplevel
Sort out the liftable lambda nodes.
Returns a list of lambda nodes, in each of which $lambda-lifted-var
contains an identifier.
At this moment, we only detect closures without free variables,
or self-recursive closures.
Eventually we want to detect mutual recursive case like this:
(define (foo ...)
(define (a x) ... (b ...) ...)
(define (b y) ... (a ...) ...)
...)
If a's only free variable is b, and b's only free variable is a,
then we can lift both nodes to the toplevel.
Tentative algorithm:
- Create a directed graph consists of free lvars and lambda nodes.
- Find lvars without incoming edge, and remove them and all reachable
nodes from them.
- The lambda nodes in the remaining graph are the ones we can lift.
Find a toplevel $lambda node (marked by #t in lifted-var).
Its name can be used to generate names for lifted lambdas.
can turn another lambda nodes liftable, so we repeat the pass until
we can't find liftable ones.
repeat
this is toplevel node
pre-bound lvars. | compile-4.scm - The compiler : Pass 4
Copyright ( c ) 2004 - 2022 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
First we traverse down the IForm and find free local variables
are chained into the first element of label - dic .
a win if the closure is allocated lots of times . OTOH , if the closure
Pass4 is done in three steps .
- The first step , pass4 / scan , recursively descends the IForm and determine
$ lambda - lifted - var to other lambda nodes have # f at this moment .
- The second step , pass4 / lift , takes a set of $ LAMBDA nodes in the IForm
- The third step , pass4 / subst , walks the IForm again , and replaces the
This pass introduces CL - like toplevel environment , e.g.
NB : Whether we use ' rec ' or not in this outermost lambda does n't matter ,
for we directly refer to < lvar > s. We just use ' rec ' to remind it is
(define-inline (pass4/add-lvar lvar bound free)
(if (or (memq lvar bound) (memq lvar free)) free (cons lvar free)))
Pass 4 entry point . Returns IForm and list of lifted lvars
NB : If the toplevel iform is $ seq , we process each form of its body
and can have large number of $ define nodes . If we process such $ seq
(define (pass4 iform module)
(if (vm-compiler-flag-no-lifting?)
iform
(pass4/top iform module)))
(define (pass4/top iform module)
(if (has-tag? iform $SEQ)
($seq (imap (cut pass4/top <> module) ($seq-body iform)))
(let1 dic (make-label-dic '())
(let1 lambda-nodes (label-dic-info dic)
(if (or (null? lambda-nodes)
($lambda-lifted-var (car lambda-nodes))))
(let1 lifted (pass4/lift lambda-nodes module)
(if (null? lifted)
(let1 iform. (pass4/subst iform (make-label-dic '()))
($let #f 'rec
(imap (^[x] ($lambda-lifted-var x)) lifted)
lifted
iform.)))))))))
Pass4 step1 - scan
(define-macro (pass4/scan* iforms bs fs t? labels)
(let1 iforms. (gensym)
`(let1 ,iforms. ,iforms
(cond [(null? ,iforms.) ,fs]
[(null? (cdr ,iforms.))
(pass4/scan (car ,iforms.) ,bs ,fs ,t? ,labels)]
[else
(let loop ([,iforms. ,iforms.] [,fs ,fs])
(if (null? ,iforms.)
,fs
(loop (cdr ,iforms.)
(pass4/scan (car ,iforms.) ,bs ,fs ,t? ,labels))))]))))
(define/case (pass4/scan iform bs fs t? labels)
(iform-tag iform)
[($DEFINE) (unless t?
(warn "define appears in non-top-level expression: ~s"
(unravel-syntax ($define-src iform))))
(pass4/scan ($define-expr iform) bs fs t? labels)]
[($LREF) (pass4/add-lvar ($lref-lvar iform) bs fs)]
[($LSET) (let1 fs (pass4/scan ($lset-expr iform) bs fs t? labels)
(pass4/add-lvar ($lset-lvar iform) bs fs))]
[($GSET) (pass4/scan ($gset-expr iform) bs fs t? labels)]
[($IF) (let* ([fs (pass4/scan ($if-test iform) bs fs t? labels)]
[fs (pass4/scan ($if-then iform) bs fs t? labels)])
(pass4/scan ($if-else iform) bs fs t? labels))]
[($LET) (let* ([new-bs (append ($let-lvars iform) bs)]
[bs (if (memv ($let-type iform) '(rec rec*)) new-bs bs)]
[fs (pass4/scan* ($let-inits iform) bs fs #f labels)])
(pass4/scan ($let-body iform) new-bs fs #f labels))]
[($RECEIVE)(let ([fs (pass4/scan ($receive-expr iform) bs fs #f labels)]
[bs (append ($receive-lvars iform) bs)])
(pass4/scan ($receive-body iform) bs fs #f labels))]
[($LAMBDA) (let1 inner-fs (pass4/scan ($lambda-body iform)
($lambda-lvars iform) '() #f labels)
pass4 / lift phase can treat it specially .
(unless (eq? ($lambda-flag iform) 'dissolved)
($lambda-lifted-var-set! iform #t)))
(cond [t? '()]
[else ($lambda-free-lvars-set! iform inner-fs)
(let loop ([inner-fs inner-fs] [fs fs])
(if (null? inner-fs)
fs
(loop (cdr inner-fs)
(pass4/add-lvar (car inner-fs) bs fs))))]))]
[($CLAMBDA) (pass4/scan* ($clambda-closures iform) bs fs t? labels)]
[($LABEL) (cond [(label-seen? labels iform) fs]
[else (label-push! labels iform)
(pass4/scan ($label-body iform) bs fs t? labels)])]
[($SEQ) (pass4/scan* ($seq-body iform) bs fs t? labels)]
[($CALL) (let1 fs (if (eq? ($call-flag iform) 'jump)
fs
(pass4/scan ($call-proc iform) bs fs t? labels))
(pass4/scan* ($call-args iform) bs fs t? labels))]
[($ASM) (pass4/scan* ($asm-args iform) bs fs t? labels)]
[($CONS $APPEND $MEMV $EQ? $EQV?) (pass4/scan2 iform bs fs t? labels)]
[($VECTOR $LIST $LIST*) (pass4/scan* ($*-args iform) bs fs t? labels)]
[($LIST->VECTOR) (pass4/scan ($*-arg0 iform) bs fs t? labels)]
[($DYNENV) (let1 fs (pass4/scan* ($dynenv-kvs iform) bs fs #f labels)
(pass4/scan ($dynenv-body iform) bs fs t? labels))]
[else fs])
(define (pass4/scan2 iform bs fs t? labels)
(let1 fs (pass4/scan ($*-arg0 iform) bs fs t? labels)
(pass4/scan ($*-arg1 iform) bs fs t? labels)))
An edge from an lvar to a lambda node if the lvar is free in
the lambda node . An edge from lambda node to an lvar if the lambda
node is bound to the lvar .
(define (pass4/lift lambda-nodes module)
(let1 top-name #f
(let loop ([lms lambda-nodes])
(when (pair? lms)
(or (and-let* ([ ($lambda-lifted-var (car lms)) ]
[n ($lambda-name (car lms))])
(set! top-name (identifier->symbol n))
#t)
(loop (cdr lms)))))
Loop over liftable lambda candidates . Lifting one lambda node
(rlet1 lifted '()
(let loop ([lms lambda-nodes]
[lifted? #f]
[remaining '()])
($lambda-lifted-var-set! (car lms) #f)
(loop (cdr lms) lifted? remaining)]
[else
(let* ([lm (car lms)]
[fvs ($lambda-free-lvars lm)])
(if (or (null? fvs)
(and (null? (cdr fvs))
(lvar-immutable? (car fvs))
(memq (lvar-initval (car fvs)) lifted)))
(let1 lvar (make-lvar (gensym))
($lambda-name-set! lm (list top-name
(or ($lambda-name lm)
(lvar-name lvar))))
($lambda-lifted-var-set! lm lvar)
(push! lifted lm)
(loop (cdr lms) #t remaining))
(loop (cdr lms) lifted? (cons lm remaining))))])))))
Final touch of pass4 - replace lifted lambda nodes to the reference of
Returns ( possibly modified ) IForm .
(define-macro (pass4/subst! access-form labels)
(match-let1 (accessor expr) access-form
(let ([orig (gensym)]
[result (gensym)]
[setter (if (eq? accessor 'car)
'set-car!
(string->symbol #"~|accessor|-set!"))])
`(let* ([,orig (,accessor ,expr)]
[,result (pass4/subst ,orig ,labels)])
(unless (eq? ,orig ,result)
(,setter ,expr ,result))
,expr))))
(define-macro (pass4/subst*! iforms labels)
(let1 iforms. (gensym)
`(let1 ,iforms. ,iforms
(cond [(null? ,iforms.)]
[(null? (cdr ,iforms.)) (pass4/subst! (car ,iforms.) ,labels)]
[else
(let loop ([,iforms. ,iforms.])
(unless (null? ,iforms.)
(pass4/subst! (car ,iforms.) ,labels)
(loop (cdr ,iforms.))))]))))
(define/case (pass4/subst iform labels)
(iform-tag iform)
[($DEFINE) (pass4/subst! ($define-expr iform) labels)]
[($LREF) (or (and-let* ([ (lvar-immutable? ($lref-lvar iform)) ]
[init (lvar-initval ($lref-lvar iform))]
[ (vector? init) ]
[ (has-tag? init $LAMBDA) ]
[lifted ($lambda-lifted-var init)])
(lvar-ref--! ($lref-lvar iform))
($lref-lvar-set! iform lifted)
(lvar-ref++! lifted)
iform)
iform)]
[($LSET) (pass4/subst! ($lset-expr iform) labels)]
[($GSET) (pass4/subst! ($gset-expr iform) labels)]
[($IF) (pass4/subst! ($if-test iform) labels)
(pass4/subst! ($if-then iform) labels)
(pass4/subst! ($if-else iform) labels)]
[($LET) (pass4/subst*! ($let-inits iform) labels)
(pass4/subst! ($let-body iform) labels)]
[($RECEIVE)(pass4/subst! ($receive-expr iform) labels)
(pass4/subst! ($receive-body iform) labels)]
[($LAMBDA) (pass4/subst! ($lambda-body iform) labels)
(or (and-let* ([lifted ($lambda-lifted-var iform)])
($lref lifted))
iform)]
[($CLAMBDA) (pass4/subst*! ($clambda-closures iform) labels) iform]
[($LABEL) (unless (label-seen? labels iform)
(label-push! labels iform)
(pass4/subst! ($label-body iform) labels))
iform]
[($SEQ) (pass4/subst*! ($seq-body iform) labels) iform]
[($CALL) (pass4/subst*! ($call-args iform) labels)
(pass4/subst! ($call-proc iform) labels)]
[($ASM) (pass4/subst*! ($asm-args iform) labels) iform]
[($CONS $APPEND $MEMV $EQ? $EQV?) (pass4/subst! ($*-arg0 iform) labels)
(pass4/subst! ($*-arg1 iform) labels)]
[($VECTOR $LIST $LIST*) (pass4/subst*! ($*-args iform) labels) iform]
[($LIST->VECTOR) (pass4/subst! ($*-arg0 iform) labels)]
[($DYNENV) (pass4/subst*! ($dynenv-kvs iform) labels)
(pass4/subst! ($dynenv-body iform) labels)
iform]
[else iform])
|
bb6d2372d7641396d4c186e977dda7b05f868bdad4586ffc3ea392720ae409b0 | pdarragh/camlrack | errors.ml | exception CamlrackError of string
| null | https://raw.githubusercontent.com/pdarragh/camlrack/b80f35a973529f029c0715b83ce64c1de4e8467d/camlrack/src/errors.ml | ocaml | exception CamlrackError of string
| |
60e8c0a3229c258f0d013edfe040c87c7991e9f855d6ad9918dd391e1857819d | lmj/lfarm | kernel-test.lisp | Copyright ( c ) 2013 , . All rights reserved .
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials provided
;;; with the distribution.
;;;
;;; * Neither the name of the project nor the names of its
;;; contributors may be used to endorse or promote products derived
;;; from this software without specific prior written permission.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
;;; DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
;;; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
;;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package #:lfarm-test)
(full-test basic-test
(submit-task *channel* '+ 3 4)
(is (= 7 (receive-result *channel*)))
(submit-task *channel* '* 5 6)
(is (= 30 (receive-result *channel*)))
(submit-task *channel* #'+ 7 8)
(is (= 15 (receive-result *channel*)))
(submit-task *channel* 'floor 7 3)
(is (= 2 (receive-result *channel*)))
(submit-task *channel* (lambda () (values)))
(is (equal nil (receive-result *channel*)))
(let ((fn '+))
(submit-task *channel* fn 1 2)
(is (= 3 (receive-result *channel*)))))
(full-test lambda-test
(submit-task *channel* (lambda (x y) (+ x y)) 3 4)
(is (= 7 (receive-result *channel*)))
(submit-task *channel* #'(lambda (x y) (* x y)) 5 6)
(is (= 30 (receive-result *channel*)))
(submit-task *channel* (lambda () 9))
(is (= 9 (receive-result *channel*))))
(deftask foo (x y)
(+ x y))
(deftask* bar (x y)
(* x y))
(deftask* fib (n)
(if (< n 2)
n
(+ (fib (- n 1))
(fib (- n 2)))))
(deftask add2 (&key x y)
(+ x y))
(deftask llk-check (&rest args &key x y z)
(declare (ignore x y z))
args)
(full-test deftask-test
(submit-task *channel* 'foo 3 4)
(is (= 7 (receive-result *channel*)))
(submit-task *channel* #'foo 5 6)
(is (= 11 (receive-result *channel*)))
(submit-task *channel* 'bar 3 4)
(is (= 12 (receive-result *channel*)))
(submit-task *channel* #'bar 5 6)
(is (= 30 (receive-result *channel*)))
(let ((f 'foo))
(submit-task *channel* f 1 2)
(is (= 3 (receive-result *channel*))))
(let ((f '(lambda (p q) (+ p q))))
(submit-task *channel* f 1 2)
(is (= 3 (receive-result *channel*))))
(submit-task *channel* #'fib 10)
(is (= 55 (receive-result *channel*)))
(submit-task *channel* #'add2 :x 1 :y 2)
(is (= 3 (receive-result *channel*)))
(submit-task *channel* #'llk-check :x 1 :y 2 :z 3)
(is (equal '(:x 1 :y 2 :z 3) (receive-result *channel*))))
(defvar *somevar* nil)
(deftask hello (&key world)
world)
(full-test task-error-test
(submit-task *channel* #'hello :z 9)
(signals task-execution-error
(receive-result *channel*))
(submit-task *channel* 'blah 3 4)
(signals task-execution-error
(receive-result *channel*))
(submit-task *channel* (lambda () (+ 3 *somevar*)))
(signals task-execution-error
(receive-result *channel*))
(submit-task *channel* (lambda () (error "foo")))
(signals task-execution-error
(receive-result *channel*))
(setf *somevar* nil)
(submit-task *channel* (lambda () (funcall *somevar*)))
(signals task-execution-error
(receive-result *channel*))
(let ((f #'foo))
(signals invalid-task-error
(submit-task *channel* f 1 2))))
(defwith with-temp-package (name)
(unwind-protect/safe
:prepare (make-package name)
:main (call-body)
:cleanup (delete-package name)))
(full-test package-test ()
(let ((name :lfarm-test.bar))
(with-temp-package (name)
(let ((sym (intern "FOO" name)))
(submit-task *channel* (lambda (x) x) sym)
(is (eq sym (receive-result *channel*)))))))
#+lfarm.with-closures
(full-test package-test/closure ()
(let ((name :lfarm-test.bar))
(with-temp-package (name)
(let ((sym (intern "FOO" name)))
(submit-task *channel* (lambda () sym))
(is (eq sym (receive-result *channel*)))))))
(full-test invalid-task-test
(signals invalid-task-error
(submit-task* *channel* #'+))
(signals invalid-task-error
(submit-task* *channel* '(junk 9)))
(signals invalid-task-error
(broadcast-task* *channel* #'+))
(signals invalid-task-error
(broadcast-task* *channel* '(junk 9)))
(let ((f #'+))
(signals invalid-task-error
(submit-task *channel* f 3 4))
(signals invalid-task-error
(broadcast-task *channel* f 3 4))))
(base-test raw-local-test
(let ((host *local-host*)
(port (next-port)))
(with-server (host port)
(with-connection (connection host port)
(let ((stream (socket-stream connection)))
(send-object '(1111 + 3 4) stream)
(is (= 7 (receive-object stream))))))))
(base-test raw-remote-test
(let* ((host *remote-host*)
(port (next-port)))
(with-remote-servers (`((,host ,port)))
(with-connection (connection host port)
(let ((stream (socket-stream connection)))
(send-object '(1111 + 3 4) stream)
(is (= 7 (receive-object stream))))))))
(remote-test broadcast-test
(is (not (find-package :lfarm-test.foo)))
(broadcast-task (lambda () (make-package :lfarm-test.foo) nil))
(submit-task *channel* (lambda () (and (find-package :lfarm-test.foo) 3)))
(is (not (find-package :lfarm-test.foo)))
(is (eql 3 (receive-result *channel*))))
#-abcl
(base-test reconnect-test
(let ((host *local-host*)
(port (next-port)))
(with-server (host port)
(with-kernel (*kernel* `((,host ,port)))
(let ((channel (make-channel)))
(lparallel:submit-task (lfarm-client.kernel::internal-channel channel)
(lambda () lfarm-client.kernel::*connection*))
(let ((connection (lparallel:receive-result
(lfarm-client.kernel::internal-channel channel))))
(lfarm-client.kernel::end-connection connection)
(submit-task channel #'+ 3 4)
(is (= 7 (receive-result channel)))))))))
(base-test local-ping-test
(let ((host *local-host*)
(port (next-port)))
(with-server (host port)
(is (truep (ping host port))))))
(base-test remote-ping-test
(let ((host *remote-host*)
(port (next-port)))
(with-remote-servers (`((,host ,port)))
(is (truep (ping host port))))))
#-abcl
(base-test no-server-test
(with-thread-count-check (0.4)
(let ((host *local-host*)
(port (next-port)))
(is (null (ping host port :timeout 1)))
(let (kernel)
(unwind-protect/safe
:main (progn
(bordeaux-threads:make-thread
(lambda ()
(setf kernel (make-kernel `((,host ,port))))))
(sleep 0.4)
(with-server (host port)
(sleep 0.4)
(is (truep kernel))
(let* ((*kernel* kernel)
(channel (make-channel)))
(submit-task channel '+ 7 8)
(is (= 15 (receive-result channel))))))
:cleanup (when kernel
(let ((*kernel* kernel))
(end-kernel :wait t))))))))
(remote-test unreadable-result-test
(submit-task *channel* (lambda ()
(intern (string '#:blah)
(make-package :abc :use nil))))
(signals (or type-error reader-error package-error)
(receive-result *channel*)))
(base-test big-data-test
(let ((addresses `((,*local-host* ,(next-port)))))
(with-local-servers (addresses)
(with-kernel (*kernel* addresses)
(is (= (length addresses) (kernel-worker-count)))
(let ((channel (make-channel))
(data (make-array 100 :initial-element 9)))
(submit-task channel
(lambda (data)
(map 'vector (lambda (x) (* x x)) data))
data)
(is (equalp (map 'vector (lambda (x) (* x x)) data)
(receive-result channel))))))))
(full-test circular-test
(let ((list (list 1 2 3)))
(setf (cdr (last list)) list)
(submit-task *channel*
(lambda (list)
(+ (first list) (second list) (third list)))
list))
(is (= 6 (receive-result *channel*))))
#-lparallel.without-kill
(base-test kill-test
(let ((addresses `((,*local-host* ,(next-port))
(,*local-host* ,(next-port)))))
(with-local-servers (addresses)
;; manually muffle warnings from worker threads
(let ((*error-output* (make-broadcast-stream)))
(with-kernel (kernel addresses)
(let* ((*kernel* kernel)
(*channel* (make-channel)))
(submit-task *channel* #'+ 3 4)
(is (= 7 (receive-result *channel*)))
(let ((lfarm-client:*task-category* 'sleeper))
(submit-task *channel* 'sleep 9999))
(sleep 0.2)
(is (= 1 (count 'sleeper (task-categories-running))))
(kill-tasks 'sleeper)
(sleep 0.2)
(is (every #'null (task-categories-running)))
(signals task-killed-error
(receive-result *channel*))
(submit-task *channel* #'+ 5 6)
(is (= 11 (receive-result *channel*)))
(sleep 2)))))))
(base-test kernel-error-test
(let ((host *local-host*)
(port (next-port))
(handler-called-p nil)
(*kernel* nil))
(with-server (host port)
(signals no-kernel-error
(make-channel))
(with-kernel (kernel `((,host ,port)))
(let ((channel (handler-bind
((no-kernel-error
(lambda (err)
(declare (ignore err))
(setf handler-called-p t)
(invoke-restart 'store-value kernel))))
(make-channel))))
(submit-task channel '+ 4 5)
(is (not (null handler-called-p)))
(is (= 9 (receive-result channel))))))))
(local-test submit-timeout-test
(let ((channel (make-channel)))
(submit-timeout channel 0.1 'timeout)
(submit-task channel (lambda () 3))
(is (eql 3 (receive-result channel)))
(is (eq 'timeout (receive-result channel)))))
#-lparallel.without-kill
(local-test cancel-timeout-test
(let* ((channel (make-channel))
(timeout (submit-timeout channel 999 'timeout)))
(sleep 0.2)
(cancel-timeout timeout 'a)
(is (eq 'a (receive-result channel)))))
(local-test try-receive-test
(multiple-value-bind (a b) (try-receive-result *channel*)
(is (null a))
(is (null b)))
(submit-task *channel* (lambda () 3))
(sleep 0.5)
(multiple-value-bind (a b) (try-receive-result *channel*)
(is (eq t b))
(is (= 3 a)))
(multiple-value-bind (a b) (try-receive-result *channel*)
(is (null a))
(is (null b))))
#-lparallel.without-bordeaux-threads-condition-wait-timeout
(local-test try-receive-result-timeout-test
(submit-task *channel*
(lambda ()
(sleep 1.0)
99))
(let ((flag nil))
(make-thread (lambda ()
(sleep 0.25)
(setf flag t)))
(multiple-value-bind (a b) (try-receive-result *channel* :timeout 0.5)
(is (null a))
(is (null b)))
(is (eq t flag))
(multiple-value-bind (a b) (try-receive-result *channel* :timeout 1.0)
(is (= 99 a))
(is (eq t b)))))
(local-test multi-receive-test
(submit-task *channel* '+ 3 4)
(submit-task *channel* '+ 5 6)
(submit-task *channel* '+ 7 8)
(let ((results nil))
(do-fast-receives (r *channel* 3)
(push r results))
(is (equal '(7 11 15) (sort results #'<)))))
(full-test many-task-test
(let ((n (ecase *log-level*
(:info 5)
(:error 1000))))
(repeat n
(submit-task *channel* (lambda ()))
(is (null (receive-result *channel*))))
(repeat n
(submit-task *channel* (lambda ())))
(repeat n
(is (null (receive-result *channel*))))
(repeat n
(let ((*task-priority* :low))
(submit-task *channel* (lambda ())))
(is (null (receive-result *channel*))))
(repeat n
(let ((*task-priority* :low))
(submit-task *channel* (lambda ()))))
(repeat n
(is (null (receive-result *channel*))))))
(base-test task-categories-test
(with-local-setup (2)
(is (notany #'identity (task-categories-running)))
(let ((channel (make-channel)))
(submit-task channel (lambda () (sleep 0.4)))
(sleep 0.2)
(is (eql 1 (count :default (task-categories-running))))))
(with-local-setup (2)
(let ((channel (make-channel)))
(let ((*task-category* :foo))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 1 (count :foo (task-categories-running))))))
(with-local-setup (2)
(let ((channel (make-channel)))
(let ((*task-category* 999))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 1 (count 999 (task-categories-running))))))
(with-local-setup (2)
(let ((channel (make-channel)))
(let ((*task-category* :foo))
(submit-task channel (lambda () (sleep 0.4)))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 2 (count :foo (task-categories-running)))))))
(defparameter *nil* nil)
#-lparallel.without-kill
(base-test default-kill-task-test
(let ((*error-output* (make-broadcast-stream)))
(with-local-setup (2)
(submit-task *channel* (lambda () (loop until *nil*)))
(submit-task *channel* (lambda () (loop until *nil*)))
(sleep 0.2)
(submit-task *channel* (lambda () 'survived))
(sleep 0.2)
(kill-tasks *task-category*)
(sleep 0.2)
(let ((errors nil)
(regulars nil))
(repeat 3
(handler-case (push (receive-result *channel*) regulars)
(task-killed-error (e)
(push e errors))))
(is (= 2 (length errors)))
(is (equal '(survived) regulars))))))
#-lparallel.without-kill
(base-test custom-kill-task-test
(let ((*error-output* (make-broadcast-stream)))
(with-remote-setup (2)
(let ((*task-category* 'blah))
(submit-task *channel* (lambda ()
(let ((*nil* nil))
(declare (special *nil*))
(loop until *nil*))))
(submit-task *channel* (lambda ()
(let ((*nil* nil))
(declare (special *nil*))
(loop until *nil*)))))
(sleep 0.2)
(submit-task *channel* (lambda () 'survived))
(sleep 0.2)
(kill-tasks 'blah)
(sleep 0.2)
(let ((errors nil)
(regulars nil))
(repeat 3
(handler-case (push (receive-result *channel*) regulars)
(task-killed-error (e)
(push e errors))))
(is (= 2 (length errors)))
(is (equal '(survived) regulars))))))
(local-test submit-after-end-kernel-test
(let ((channel (make-channel)))
(end-kernel :wait t)
(signals error
(submit-task channel (lambda ())))))
(local-test double-end-kernel-test
(let ((addresses `((,*local-host* ,(next-port))
(,*local-host* ,(next-port)))))
(with-local-servers (addresses)
(let* ((kernel (make-kernel addresses))
(*kernel* kernel))
(end-kernel :wait t)
(let ((*kernel* kernel))
(end-kernel :wait t)))))
;; got here without an error
(is (= 1 1)))
(defparameter *memo* nil)
#-lparallel.without-kill
(base-test resubmit-test
(setf *memo* 0)
(with-local-setup (1)
(submit-task *channel* (lambda ()
(incf *memo*)
(sleep 0.6)
:done))
(sleep 0.2)
(dolist (thread (bordeaux-threads:all-threads))
(when (string= "lfarm-server response TASK-LOOP"
(bordeaux-threads:thread-name thread))
(bordeaux-threads:destroy-thread thread)))
(is (eq :done (receive-result *channel*)))
(is (= 2 *memo*))))
| null | https://raw.githubusercontent.com/lmj/lfarm/f7ba49f1ec01fb99a7aeb8f18e245a44411c361b/lfarm-test/kernel-test.lisp | lisp |
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the project nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
LOSS OF USE ,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
manually muffle warnings from worker threads
got here without an error | Copyright ( c ) 2013 , . All rights reserved .
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
HOLDER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
(in-package #:lfarm-test)
(full-test basic-test
(submit-task *channel* '+ 3 4)
(is (= 7 (receive-result *channel*)))
(submit-task *channel* '* 5 6)
(is (= 30 (receive-result *channel*)))
(submit-task *channel* #'+ 7 8)
(is (= 15 (receive-result *channel*)))
(submit-task *channel* 'floor 7 3)
(is (= 2 (receive-result *channel*)))
(submit-task *channel* (lambda () (values)))
(is (equal nil (receive-result *channel*)))
(let ((fn '+))
(submit-task *channel* fn 1 2)
(is (= 3 (receive-result *channel*)))))
(full-test lambda-test
(submit-task *channel* (lambda (x y) (+ x y)) 3 4)
(is (= 7 (receive-result *channel*)))
(submit-task *channel* #'(lambda (x y) (* x y)) 5 6)
(is (= 30 (receive-result *channel*)))
(submit-task *channel* (lambda () 9))
(is (= 9 (receive-result *channel*))))
(deftask foo (x y)
(+ x y))
(deftask* bar (x y)
(* x y))
(deftask* fib (n)
(if (< n 2)
n
(+ (fib (- n 1))
(fib (- n 2)))))
(deftask add2 (&key x y)
(+ x y))
(deftask llk-check (&rest args &key x y z)
(declare (ignore x y z))
args)
(full-test deftask-test
(submit-task *channel* 'foo 3 4)
(is (= 7 (receive-result *channel*)))
(submit-task *channel* #'foo 5 6)
(is (= 11 (receive-result *channel*)))
(submit-task *channel* 'bar 3 4)
(is (= 12 (receive-result *channel*)))
(submit-task *channel* #'bar 5 6)
(is (= 30 (receive-result *channel*)))
(let ((f 'foo))
(submit-task *channel* f 1 2)
(is (= 3 (receive-result *channel*))))
(let ((f '(lambda (p q) (+ p q))))
(submit-task *channel* f 1 2)
(is (= 3 (receive-result *channel*))))
(submit-task *channel* #'fib 10)
(is (= 55 (receive-result *channel*)))
(submit-task *channel* #'add2 :x 1 :y 2)
(is (= 3 (receive-result *channel*)))
(submit-task *channel* #'llk-check :x 1 :y 2 :z 3)
(is (equal '(:x 1 :y 2 :z 3) (receive-result *channel*))))
(defvar *somevar* nil)
(deftask hello (&key world)
world)
(full-test task-error-test
(submit-task *channel* #'hello :z 9)
(signals task-execution-error
(receive-result *channel*))
(submit-task *channel* 'blah 3 4)
(signals task-execution-error
(receive-result *channel*))
(submit-task *channel* (lambda () (+ 3 *somevar*)))
(signals task-execution-error
(receive-result *channel*))
(submit-task *channel* (lambda () (error "foo")))
(signals task-execution-error
(receive-result *channel*))
(setf *somevar* nil)
(submit-task *channel* (lambda () (funcall *somevar*)))
(signals task-execution-error
(receive-result *channel*))
(let ((f #'foo))
(signals invalid-task-error
(submit-task *channel* f 1 2))))
(defwith with-temp-package (name)
(unwind-protect/safe
:prepare (make-package name)
:main (call-body)
:cleanup (delete-package name)))
(full-test package-test ()
(let ((name :lfarm-test.bar))
(with-temp-package (name)
(let ((sym (intern "FOO" name)))
(submit-task *channel* (lambda (x) x) sym)
(is (eq sym (receive-result *channel*)))))))
#+lfarm.with-closures
(full-test package-test/closure ()
(let ((name :lfarm-test.bar))
(with-temp-package (name)
(let ((sym (intern "FOO" name)))
(submit-task *channel* (lambda () sym))
(is (eq sym (receive-result *channel*)))))))
(full-test invalid-task-test
(signals invalid-task-error
(submit-task* *channel* #'+))
(signals invalid-task-error
(submit-task* *channel* '(junk 9)))
(signals invalid-task-error
(broadcast-task* *channel* #'+))
(signals invalid-task-error
(broadcast-task* *channel* '(junk 9)))
(let ((f #'+))
(signals invalid-task-error
(submit-task *channel* f 3 4))
(signals invalid-task-error
(broadcast-task *channel* f 3 4))))
(base-test raw-local-test
(let ((host *local-host*)
(port (next-port)))
(with-server (host port)
(with-connection (connection host port)
(let ((stream (socket-stream connection)))
(send-object '(1111 + 3 4) stream)
(is (= 7 (receive-object stream))))))))
(base-test raw-remote-test
(let* ((host *remote-host*)
(port (next-port)))
(with-remote-servers (`((,host ,port)))
(with-connection (connection host port)
(let ((stream (socket-stream connection)))
(send-object '(1111 + 3 4) stream)
(is (= 7 (receive-object stream))))))))
(remote-test broadcast-test
(is (not (find-package :lfarm-test.foo)))
(broadcast-task (lambda () (make-package :lfarm-test.foo) nil))
(submit-task *channel* (lambda () (and (find-package :lfarm-test.foo) 3)))
(is (not (find-package :lfarm-test.foo)))
(is (eql 3 (receive-result *channel*))))
#-abcl
(base-test reconnect-test
(let ((host *local-host*)
(port (next-port)))
(with-server (host port)
(with-kernel (*kernel* `((,host ,port)))
(let ((channel (make-channel)))
(lparallel:submit-task (lfarm-client.kernel::internal-channel channel)
(lambda () lfarm-client.kernel::*connection*))
(let ((connection (lparallel:receive-result
(lfarm-client.kernel::internal-channel channel))))
(lfarm-client.kernel::end-connection connection)
(submit-task channel #'+ 3 4)
(is (= 7 (receive-result channel)))))))))
(base-test local-ping-test
(let ((host *local-host*)
(port (next-port)))
(with-server (host port)
(is (truep (ping host port))))))
(base-test remote-ping-test
(let ((host *remote-host*)
(port (next-port)))
(with-remote-servers (`((,host ,port)))
(is (truep (ping host port))))))
#-abcl
(base-test no-server-test
(with-thread-count-check (0.4)
(let ((host *local-host*)
(port (next-port)))
(is (null (ping host port :timeout 1)))
(let (kernel)
(unwind-protect/safe
:main (progn
(bordeaux-threads:make-thread
(lambda ()
(setf kernel (make-kernel `((,host ,port))))))
(sleep 0.4)
(with-server (host port)
(sleep 0.4)
(is (truep kernel))
(let* ((*kernel* kernel)
(channel (make-channel)))
(submit-task channel '+ 7 8)
(is (= 15 (receive-result channel))))))
:cleanup (when kernel
(let ((*kernel* kernel))
(end-kernel :wait t))))))))
(remote-test unreadable-result-test
(submit-task *channel* (lambda ()
(intern (string '#:blah)
(make-package :abc :use nil))))
(signals (or type-error reader-error package-error)
(receive-result *channel*)))
(base-test big-data-test
(let ((addresses `((,*local-host* ,(next-port)))))
(with-local-servers (addresses)
(with-kernel (*kernel* addresses)
(is (= (length addresses) (kernel-worker-count)))
(let ((channel (make-channel))
(data (make-array 100 :initial-element 9)))
(submit-task channel
(lambda (data)
(map 'vector (lambda (x) (* x x)) data))
data)
(is (equalp (map 'vector (lambda (x) (* x x)) data)
(receive-result channel))))))))
(full-test circular-test
(let ((list (list 1 2 3)))
(setf (cdr (last list)) list)
(submit-task *channel*
(lambda (list)
(+ (first list) (second list) (third list)))
list))
(is (= 6 (receive-result *channel*))))
#-lparallel.without-kill
(base-test kill-test
(let ((addresses `((,*local-host* ,(next-port))
(,*local-host* ,(next-port)))))
(with-local-servers (addresses)
(let ((*error-output* (make-broadcast-stream)))
(with-kernel (kernel addresses)
(let* ((*kernel* kernel)
(*channel* (make-channel)))
(submit-task *channel* #'+ 3 4)
(is (= 7 (receive-result *channel*)))
(let ((lfarm-client:*task-category* 'sleeper))
(submit-task *channel* 'sleep 9999))
(sleep 0.2)
(is (= 1 (count 'sleeper (task-categories-running))))
(kill-tasks 'sleeper)
(sleep 0.2)
(is (every #'null (task-categories-running)))
(signals task-killed-error
(receive-result *channel*))
(submit-task *channel* #'+ 5 6)
(is (= 11 (receive-result *channel*)))
(sleep 2)))))))
(base-test kernel-error-test
(let ((host *local-host*)
(port (next-port))
(handler-called-p nil)
(*kernel* nil))
(with-server (host port)
(signals no-kernel-error
(make-channel))
(with-kernel (kernel `((,host ,port)))
(let ((channel (handler-bind
((no-kernel-error
(lambda (err)
(declare (ignore err))
(setf handler-called-p t)
(invoke-restart 'store-value kernel))))
(make-channel))))
(submit-task channel '+ 4 5)
(is (not (null handler-called-p)))
(is (= 9 (receive-result channel))))))))
(local-test submit-timeout-test
(let ((channel (make-channel)))
(submit-timeout channel 0.1 'timeout)
(submit-task channel (lambda () 3))
(is (eql 3 (receive-result channel)))
(is (eq 'timeout (receive-result channel)))))
#-lparallel.without-kill
(local-test cancel-timeout-test
(let* ((channel (make-channel))
(timeout (submit-timeout channel 999 'timeout)))
(sleep 0.2)
(cancel-timeout timeout 'a)
(is (eq 'a (receive-result channel)))))
(local-test try-receive-test
(multiple-value-bind (a b) (try-receive-result *channel*)
(is (null a))
(is (null b)))
(submit-task *channel* (lambda () 3))
(sleep 0.5)
(multiple-value-bind (a b) (try-receive-result *channel*)
(is (eq t b))
(is (= 3 a)))
(multiple-value-bind (a b) (try-receive-result *channel*)
(is (null a))
(is (null b))))
#-lparallel.without-bordeaux-threads-condition-wait-timeout
(local-test try-receive-result-timeout-test
(submit-task *channel*
(lambda ()
(sleep 1.0)
99))
(let ((flag nil))
(make-thread (lambda ()
(sleep 0.25)
(setf flag t)))
(multiple-value-bind (a b) (try-receive-result *channel* :timeout 0.5)
(is (null a))
(is (null b)))
(is (eq t flag))
(multiple-value-bind (a b) (try-receive-result *channel* :timeout 1.0)
(is (= 99 a))
(is (eq t b)))))
(local-test multi-receive-test
(submit-task *channel* '+ 3 4)
(submit-task *channel* '+ 5 6)
(submit-task *channel* '+ 7 8)
(let ((results nil))
(do-fast-receives (r *channel* 3)
(push r results))
(is (equal '(7 11 15) (sort results #'<)))))
(full-test many-task-test
(let ((n (ecase *log-level*
(:info 5)
(:error 1000))))
(repeat n
(submit-task *channel* (lambda ()))
(is (null (receive-result *channel*))))
(repeat n
(submit-task *channel* (lambda ())))
(repeat n
(is (null (receive-result *channel*))))
(repeat n
(let ((*task-priority* :low))
(submit-task *channel* (lambda ())))
(is (null (receive-result *channel*))))
(repeat n
(let ((*task-priority* :low))
(submit-task *channel* (lambda ()))))
(repeat n
(is (null (receive-result *channel*))))))
(base-test task-categories-test
(with-local-setup (2)
(is (notany #'identity (task-categories-running)))
(let ((channel (make-channel)))
(submit-task channel (lambda () (sleep 0.4)))
(sleep 0.2)
(is (eql 1 (count :default (task-categories-running))))))
(with-local-setup (2)
(let ((channel (make-channel)))
(let ((*task-category* :foo))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 1 (count :foo (task-categories-running))))))
(with-local-setup (2)
(let ((channel (make-channel)))
(let ((*task-category* 999))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 1 (count 999 (task-categories-running))))))
(with-local-setup (2)
(let ((channel (make-channel)))
(let ((*task-category* :foo))
(submit-task channel (lambda () (sleep 0.4)))
(submit-task channel (lambda () (sleep 0.4))))
(sleep 0.2)
(is (eql 2 (count :foo (task-categories-running)))))))
(defparameter *nil* nil)
#-lparallel.without-kill
(base-test default-kill-task-test
(let ((*error-output* (make-broadcast-stream)))
(with-local-setup (2)
(submit-task *channel* (lambda () (loop until *nil*)))
(submit-task *channel* (lambda () (loop until *nil*)))
(sleep 0.2)
(submit-task *channel* (lambda () 'survived))
(sleep 0.2)
(kill-tasks *task-category*)
(sleep 0.2)
(let ((errors nil)
(regulars nil))
(repeat 3
(handler-case (push (receive-result *channel*) regulars)
(task-killed-error (e)
(push e errors))))
(is (= 2 (length errors)))
(is (equal '(survived) regulars))))))
#-lparallel.without-kill
(base-test custom-kill-task-test
(let ((*error-output* (make-broadcast-stream)))
(with-remote-setup (2)
(let ((*task-category* 'blah))
(submit-task *channel* (lambda ()
(let ((*nil* nil))
(declare (special *nil*))
(loop until *nil*))))
(submit-task *channel* (lambda ()
(let ((*nil* nil))
(declare (special *nil*))
(loop until *nil*)))))
(sleep 0.2)
(submit-task *channel* (lambda () 'survived))
(sleep 0.2)
(kill-tasks 'blah)
(sleep 0.2)
(let ((errors nil)
(regulars nil))
(repeat 3
(handler-case (push (receive-result *channel*) regulars)
(task-killed-error (e)
(push e errors))))
(is (= 2 (length errors)))
(is (equal '(survived) regulars))))))
(local-test submit-after-end-kernel-test
(let ((channel (make-channel)))
(end-kernel :wait t)
(signals error
(submit-task channel (lambda ())))))
(local-test double-end-kernel-test
(let ((addresses `((,*local-host* ,(next-port))
(,*local-host* ,(next-port)))))
(with-local-servers (addresses)
(let* ((kernel (make-kernel addresses))
(*kernel* kernel))
(end-kernel :wait t)
(let ((*kernel* kernel))
(end-kernel :wait t)))))
(is (= 1 1)))
(defparameter *memo* nil)
#-lparallel.without-kill
(base-test resubmit-test
(setf *memo* 0)
(with-local-setup (1)
(submit-task *channel* (lambda ()
(incf *memo*)
(sleep 0.6)
:done))
(sleep 0.2)
(dolist (thread (bordeaux-threads:all-threads))
(when (string= "lfarm-server response TASK-LOOP"
(bordeaux-threads:thread-name thread))
(bordeaux-threads:destroy-thread thread)))
(is (eq :done (receive-result *channel*)))
(is (= 2 *memo*))))
|
d356c69821ff82fe863f6756a0f53bcd38b82bfa7b55ab1970dbe2437c4838ee | hunt-framework/hunt | CLI.hs | module Hunt.CLI
( CLI.run
, parser
) where
import Data.Monoid ((<>))
import qualified Hunt.CLI.Command as CLI
import qualified Hunt.Client as HC
import Options.Applicative
-- PARSER
| Build the full parser for parsing a @Command@
-- from command line arguments.
parser :: ParserInfo CLI.Command
parser = info (helper <*> CLI.parser)
( fullDesc
<> progDesc "Run the following command"
<> header "A central command line interface for interacting with Hunt."
)
| null | https://raw.githubusercontent.com/hunt-framework/hunt/d692aae756b7bdfb4c99f5a3951aec12893649a8/hunt-cli/src/Hunt/CLI.hs | haskell | PARSER
from command line arguments. | module Hunt.CLI
( CLI.run
, parser
) where
import Data.Monoid ((<>))
import qualified Hunt.CLI.Command as CLI
import qualified Hunt.Client as HC
import Options.Applicative
| Build the full parser for parsing a @Command@
parser :: ParserInfo CLI.Command
parser = info (helper <*> CLI.parser)
( fullDesc
<> progDesc "Run the following command"
<> header "A central command line interface for interacting with Hunt."
)
|
c1aaabcc2465811715919aaab0ea9321ff03df9b1b3edfd8150681cc84f6d338 | adamwalker/clash-utils | Hamming.hs | |
Hamming code encoding and decoding . .
_ _ FPGA proven _ _
Hamming code encoding and decoding. .
__FPGA proven__
-}
module Clash.ErrorControl.Hamming (
generator,
hammingParity,
correctError',
correctError
) where
import qualified Prelude as P
import qualified Data.List as L
import Clash.Prelude
| Generator matrix for the Hamming parity bits . Intended for use from template to ensure the matrix is generated a compile time . It 's a lazy list of lazy lists , so use ` Prelude.take ` to make a generator matrix of the desired size . See the tests for an example .
generator :: [[Bool]]
generator = L.transpose $ P.map generatorRow [0..]
where
generatorRow :: Int -> [Bool]
generatorRow parityIndex = L.map (flip testBit parityIndex) nonPows
nonPows :: [Int]
nonPows = filter notPow2 [1..]
where
notPow2 :: Int -> Bool
notPow2 n = n .&. (n - 1) /= 0
-- | Calculate hamming parity bits given the generator matrix and data
hammingParity
:: forall m n. (KnownNat m, KnownNat n)
=> Vec (m + 1) (BitVector n) -- ^ Parity bits generator matrix
-> BitVector (m + 1) -- ^ Input vector
-> BitVector n -- ^ Parity bits
hammingParity table input = fold xor $ zipWith func (unpack input) table
where
func :: Bit -> BitVector n -> BitVector n
func x = pack . map (.&. x) . unpack
--TODO: there is probably a smarter way of doing this
-- | Correct a single bit error in a hamming code word
correctError'
:: forall n. (KnownNat n, 1 <= n)
=> Vec n (BitVector (CLog 2 n)) -- ^ Parity bits generator matrix
-> BitVector (CLog 2 n) -- ^ Parity bits which are wrong
-> BitVector n -- ^ Data bits
-> BitVector n -- ^ Corrected data word
correctError' nonPows parity dat = pack $ zipWith (func parity) nonPows (unpack dat)
where
func :: BitVector (CLog 2 n) -> BitVector (CLog 2 n) -> Bool -> Bool
func parity thisIdx dat
| parity == thisIdx = not dat
| otherwise = dat
-- | Correct a single bit error in a hamming code word
correctError
:: forall n m. (KnownNat n, n ~ (m + 1))
=> Vec n (BitVector (CLog 2 n)) -- ^ Parity bits generator matrix
-> BitVector (CLog 2 n) -- ^ Parity bits
-> BitVector n -- ^ Data bits
-> BitVector n -- ^ Corrected data word
correctError gen parity dat = correctError' gen parityBitsInError dat
where
parityBitsInError = hammingParity gen dat `xor` parity
| null | https://raw.githubusercontent.com/adamwalker/clash-utils/375c61131e21e9a239b80bdb929ae77f156d056f/src/Clash/ErrorControl/Hamming.hs | haskell | | Calculate hamming parity bits given the generator matrix and data
^ Parity bits generator matrix
^ Input vector
^ Parity bits
TODO: there is probably a smarter way of doing this
| Correct a single bit error in a hamming code word
^ Parity bits generator matrix
^ Parity bits which are wrong
^ Data bits
^ Corrected data word
| Correct a single bit error in a hamming code word
^ Parity bits generator matrix
^ Parity bits
^ Data bits
^ Corrected data word | |
Hamming code encoding and decoding . .
_ _ FPGA proven _ _
Hamming code encoding and decoding. .
__FPGA proven__
-}
module Clash.ErrorControl.Hamming (
generator,
hammingParity,
correctError',
correctError
) where
import qualified Prelude as P
import qualified Data.List as L
import Clash.Prelude
| Generator matrix for the Hamming parity bits . Intended for use from template to ensure the matrix is generated a compile time . It 's a lazy list of lazy lists , so use ` Prelude.take ` to make a generator matrix of the desired size . See the tests for an example .
generator :: [[Bool]]
generator = L.transpose $ P.map generatorRow [0..]
where
generatorRow :: Int -> [Bool]
generatorRow parityIndex = L.map (flip testBit parityIndex) nonPows
nonPows :: [Int]
nonPows = filter notPow2 [1..]
where
notPow2 :: Int -> Bool
notPow2 n = n .&. (n - 1) /= 0
hammingParity
:: forall m n. (KnownNat m, KnownNat n)
hammingParity table input = fold xor $ zipWith func (unpack input) table
where
func :: Bit -> BitVector n -> BitVector n
func x = pack . map (.&. x) . unpack
correctError'
:: forall n. (KnownNat n, 1 <= n)
correctError' nonPows parity dat = pack $ zipWith (func parity) nonPows (unpack dat)
where
func :: BitVector (CLog 2 n) -> BitVector (CLog 2 n) -> Bool -> Bool
func parity thisIdx dat
| parity == thisIdx = not dat
| otherwise = dat
correctError
:: forall n m. (KnownNat n, n ~ (m + 1))
correctError gen parity dat = correctError' gen parityBitsInError dat
where
parityBitsInError = hammingParity gen dat `xor` parity
|
6e4cd45219670f345557faefdb3681a5d8dc43b0c9593dd86fbdb3633130c387 | data61/Mirza | API.hs | # LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
module Mirza.Trails.API where
import Mirza.Trails.Types (SignaturePlaceholder, TrailEntry)
import Mirza.Common.Types (HealthResponse)
import Data.GS1.EventId (EventId)
import Servant
import Servant.Swagger.UI
type API
-- This serves both: swagger.json and swagger-ui
= SwaggerSchemaUI "swagger-ui" "swagger.json"
:<|> ServerAPI
api :: Proxy API
api = Proxy
type ServerAPI = PublicAPI
serverAPI :: Proxy ServerAPI
serverAPI = Proxy
type PublicAPI =
"healthz" :> Get '[JSON] HealthResponse
:<|> "version" :> Get '[JSON] String
:<|> "trail" :> Capture "eventId" EventId :> Get '[JSON] [TrailEntry]
:<|> "trail" :> Capture "signature" SignaturePlaceholder :> Get '[JSON] [TrailEntry]
:<|> "trail" :> ReqBody '[JSON] [TrailEntry] :> Post '[JSON] NoContent -- TODO: Should fix this type so that its the correct status code.
| null | https://raw.githubusercontent.com/data61/Mirza/24e5ccddfc307cceebcc5ce26d35e91020b8ee10/projects/trails/src/Mirza/Trails/API.hs | haskell | This serves both: swagger.json and swagger-ui
TODO: Should fix this type so that its the correct status code. | # LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
module Mirza.Trails.API where
import Mirza.Trails.Types (SignaturePlaceholder, TrailEntry)
import Mirza.Common.Types (HealthResponse)
import Data.GS1.EventId (EventId)
import Servant
import Servant.Swagger.UI
type API
= SwaggerSchemaUI "swagger-ui" "swagger.json"
:<|> ServerAPI
api :: Proxy API
api = Proxy
type ServerAPI = PublicAPI
serverAPI :: Proxy ServerAPI
serverAPI = Proxy
type PublicAPI =
"healthz" :> Get '[JSON] HealthResponse
:<|> "version" :> Get '[JSON] String
:<|> "trail" :> Capture "eventId" EventId :> Get '[JSON] [TrailEntry]
:<|> "trail" :> Capture "signature" SignaturePlaceholder :> Get '[JSON] [TrailEntry]
|
b1b3f2eaa7518e36868c859a657f45673bf964aefe6ac4561cb3bc10b9c7df3b | mokus0/junkbox | Unloadable.hs | {-# LANGUAGE EmptyDataDecls #-}
-- hypothetical interface for 'unloadable' plugins. the idea being that
-- a plugin can be loaded and then tracked at the type level so that when
-- the user asks to unload it, all values that were constructed from it
-- become invalidated. Alternatively, the user has the option of explicitly
-- pinning the object, after which no other object derived from the same
-- module can be unloaded.
module System.Plugins.Unloadable where
import Control.Applicative
import System.Plugins.Loadable
import Control.DeepSeq
data Unloadable t
instance Functor Unloadable
instance Applicative Unloadable
instance Monad Unloadable
load :: Loadable t -> IO (Unloadable t)
unload :: Unloadable t -> IO Bool
isUnloaded :: Unloadable t -> IO Bool
pin :: Unloadable t -> IO (Maybe t)
isPinned :: Unloadable t -> IO Bool
withUnloadable :: Unloadable a -> (a -> IO b) -> IO (Maybe (Unloadable b))
-- The idea with 'sever' is that if you have an unloadable value of a
-- type which is:
-- a) not defined in an unloadable module and which contains no values of such types
-- b) 'rnf'able
-- and
-- c) not already unloaded
-- then you can sever it from the unloadable module that produced it by
-- fully evaluating it.
--
class NFData t => Severable t where {- ??? -}
sever :: Severable t => Unloadable t -> IO (Maybe t)
load = undefined
unload = undefined
reload = undefined
isUnloaded = undefined
pin = undefined
isPinned = undefined
withUnloadable = undefined
sever = undefined | null | https://raw.githubusercontent.com/mokus0/junkbox/151014bbef9db2b9205209df66c418d6d58b0d9e/Projects/plugins/src/System/Plugins/Unloadable.hs | haskell | # LANGUAGE EmptyDataDecls #
hypothetical interface for 'unloadable' plugins. the idea being that
a plugin can be loaded and then tracked at the type level so that when
the user asks to unload it, all values that were constructed from it
become invalidated. Alternatively, the user has the option of explicitly
pinning the object, after which no other object derived from the same
module can be unloaded.
The idea with 'sever' is that if you have an unloadable value of a
type which is:
a) not defined in an unloadable module and which contains no values of such types
b) 'rnf'able
and
c) not already unloaded
then you can sever it from the unloadable module that produced it by
fully evaluating it.
??? |
module System.Plugins.Unloadable where
import Control.Applicative
import System.Plugins.Loadable
import Control.DeepSeq
data Unloadable t
instance Functor Unloadable
instance Applicative Unloadable
instance Monad Unloadable
load :: Loadable t -> IO (Unloadable t)
unload :: Unloadable t -> IO Bool
isUnloaded :: Unloadable t -> IO Bool
pin :: Unloadable t -> IO (Maybe t)
isPinned :: Unloadable t -> IO Bool
withUnloadable :: Unloadable a -> (a -> IO b) -> IO (Maybe (Unloadable b))
sever :: Severable t => Unloadable t -> IO (Maybe t)
load = undefined
unload = undefined
reload = undefined
isUnloaded = undefined
pin = undefined
isPinned = undefined
withUnloadable = undefined
sever = undefined |
19fdf8549822a3a17a81928a1873bcb0bffd21fafdc7bc38a3a382904474f2b3 | lylek/vintage-basic | DurableTraps.hs | # LANGUAGE FlexibleContexts , RankNTypes #
| Support for advanced exception handling within the CPST monad .
module Control.Monad.CPST.DurableTraps where
import Control.Monad.CPST
import Control.Monad.CPST.ExceptionHandlers(capture,install,raise)
class ResultType o where
okValue :: o
| A continuation in the CPST monad , for code that may
-- throw exceptions.
type Cont o m i = i -> CPST (Excep o m i) m (Excep o m i)
-- | The full-blown exception including not only an exception value,
-- but a handler stack and continuation. Having all this allows
-- an exception handler to manipulate the handler stack or resume
-- using the exception's continuation.
--
[ @o@ ] is the result type ( e.g. , OK , or an exception value )
--
[ @i@ ] is the type to pass to the continuation ( bound to the raise )
--
Both types must remain the same throughout your CPS computation .
data Excep o m i =
Excep { exceptionValue :: o
, exceptionHandlerStack :: (Excep o m i -> CPST (Excep o m i) m (Excep o m i))
, exceptionContinuation :: (Cont o m i)
}
-- | Packages the current continuation with the exception value,
-- and an empty handler stack.
raiseCC :: (Monad m, ResultType o) => o -> CPST (Excep o m i) m i
raiseCC x = callCC (\k -> raise (Excep x return k))
type ExceptionResumer o m = forall a. Bool -> CPST (Excep o m ()) m a
type ExceptionHandler o m = o -> ExceptionResumer o m -> ExceptionResumer o m
-> ExceptionResumer o m -> CPST (Excep o m ()) m (Excep o m ())
-- | A flexible exception handling mechanism. It takes a function 'f' as a
parameter , to which it passes three continuations : @passOn@ , @resume@ ,
-- and @continue@.
--
-- [@passOn@] pass control of the exception on up the handler chain (re-raise)
--
-- [@resume@] return to where the exception was raised (the exception's continuation)
--
-- [@continue@] continue with the code following the 'trap' statement
-- (the handler's continuation)
--
-- Each of these continuations is parametrized by a boolean value:
--
-- [@endure = True@] keep this handler in force
--
[ @endure = False@ ] disable this handler ( remove it from the chain )
trap :: Monad m
=> ExceptionHandler o m -- ^ exception handler
-> CPST (Excep o m ()) m ()
-- Local variables:
--
-- [@h hk@] the full handler with all logic
--
-- [@xv @] exception value
--
-- [@hk @] handler's continuation
--
-- [@hc @] handler chain
--
-- [@xk @] exception's continuation
trap f = callCC (\hk -> install (h hk))
where h hk (Excep xv hc xk) =
let hc' exc' = capture (h hk) (hc exc')
xk' v = capture (h hk) (xk v)
passOn endure =
if endure
then raise (Excep xv hc' xk')
else raise (Excep xv hc xk')
resume endure =
if endure
then xk' () >>= raise -- might want other than (),
else xk () >>= raise -- say, for implementing state
continue endure =
if endure
then install hc' >>= hk >>= raise
else install hc >>= hk >>= raise
in f xv passOn resume continue
-- | The catching version of trap. It catches exceptions from
-- an expression, instead of from its continuation.
catchC :: Monad m
=> ExceptionHandler o m -- ^ exception handler
-> CPST (Excep o m ()) m (Excep o m ()) -- ^ computation in which to catch exceptions
-> CPST (Excep o m ()) m (Excep o m ())
catchC f m = callCC (\hk -> capture (h hk) m)
where h hk (Excep xv hc xk) =
let hc' exc' = capture (h hk) (hc exc')
xk' v = capture (h hk) (xk v) >>= hk
passOn endure =
if endure
then raise (Excep xv hc' xk')
else raise (Excep xv hc xk')
resume endure =
if endure
then xk' () >>= raise -- might want other than (),
else xk () >>= raise -- say, for implementing state
continue endure =
if endure
then install hc' >> hk (Excep xv hc' xk') >>= raise
else install hc >> hk (Excep xv hc xk') >>= raise
in f xv passOn resume continue
-- | Like abort, ends in the middle of a program. Can be resumed.
end :: (Monad m, ResultType o) => CPST (Excep o m i) m i
end = raiseCC okValue
-- | Can be added at the end of a @do@-sequence to make the return type
-- match the ultimate return type. It returns OK with a continuation that
-- regenerates itself.
done :: (Monad m, ResultType o) => CPST (Excep o m i) m (Excep o m i)
done = return (Excep okValue return (\_ -> done))
-- | Raises an exception that can't be resumed, and can be used in any context.
die :: (Monad m, ResultType o) => o -> CPST (Excep o m i) m a
die x = raise (Excep x return (error "cannot resume from a die"))
-- Note: getCC might seem like a nice idea, but it's untypable, since the continuation
-- has to take itself as an argument. An incorrect attempt:
--
-- getcc :: CPST o m (Cont m o i)
-- getcc = callCC (\k -> return k)
| null | https://raw.githubusercontent.com/lylek/vintage-basic/6cb46767aeb6d2c0b9da4175c7fa8c0ffc31b1c7/src/Control/Monad/CPST/DurableTraps.hs | haskell | throw exceptions.
| The full-blown exception including not only an exception value,
but a handler stack and continuation. Having all this allows
an exception handler to manipulate the handler stack or resume
using the exception's continuation.
| Packages the current continuation with the exception value,
and an empty handler stack.
| A flexible exception handling mechanism. It takes a function 'f' as a
and @continue@.
[@passOn@] pass control of the exception on up the handler chain (re-raise)
[@resume@] return to where the exception was raised (the exception's continuation)
[@continue@] continue with the code following the 'trap' statement
(the handler's continuation)
Each of these continuations is parametrized by a boolean value:
[@endure = True@] keep this handler in force
^ exception handler
Local variables:
[@h hk@] the full handler with all logic
[@xv @] exception value
[@hk @] handler's continuation
[@hc @] handler chain
[@xk @] exception's continuation
might want other than (),
say, for implementing state
| The catching version of trap. It catches exceptions from
an expression, instead of from its continuation.
^ exception handler
^ computation in which to catch exceptions
might want other than (),
say, for implementing state
| Like abort, ends in the middle of a program. Can be resumed.
| Can be added at the end of a @do@-sequence to make the return type
match the ultimate return type. It returns OK with a continuation that
regenerates itself.
| Raises an exception that can't be resumed, and can be used in any context.
Note: getCC might seem like a nice idea, but it's untypable, since the continuation
has to take itself as an argument. An incorrect attempt:
getcc :: CPST o m (Cont m o i)
getcc = callCC (\k -> return k) | # LANGUAGE FlexibleContexts , RankNTypes #
| Support for advanced exception handling within the CPST monad .
module Control.Monad.CPST.DurableTraps where
import Control.Monad.CPST
import Control.Monad.CPST.ExceptionHandlers(capture,install,raise)
class ResultType o where
okValue :: o
| A continuation in the CPST monad , for code that may
type Cont o m i = i -> CPST (Excep o m i) m (Excep o m i)
[ @o@ ] is the result type ( e.g. , OK , or an exception value )
[ @i@ ] is the type to pass to the continuation ( bound to the raise )
Both types must remain the same throughout your CPS computation .
data Excep o m i =
Excep { exceptionValue :: o
, exceptionHandlerStack :: (Excep o m i -> CPST (Excep o m i) m (Excep o m i))
, exceptionContinuation :: (Cont o m i)
}
raiseCC :: (Monad m, ResultType o) => o -> CPST (Excep o m i) m i
raiseCC x = callCC (\k -> raise (Excep x return k))
type ExceptionResumer o m = forall a. Bool -> CPST (Excep o m ()) m a
type ExceptionHandler o m = o -> ExceptionResumer o m -> ExceptionResumer o m
-> ExceptionResumer o m -> CPST (Excep o m ()) m (Excep o m ())
parameter , to which it passes three continuations : @passOn@ , @resume@ ,
[ @endure = False@ ] disable this handler ( remove it from the chain )
trap :: Monad m
-> CPST (Excep o m ()) m ()
trap f = callCC (\hk -> install (h hk))
where h hk (Excep xv hc xk) =
let hc' exc' = capture (h hk) (hc exc')
xk' v = capture (h hk) (xk v)
passOn endure =
if endure
then raise (Excep xv hc' xk')
else raise (Excep xv hc xk')
resume endure =
if endure
continue endure =
if endure
then install hc' >>= hk >>= raise
else install hc >>= hk >>= raise
in f xv passOn resume continue
catchC :: Monad m
-> CPST (Excep o m ()) m (Excep o m ())
catchC f m = callCC (\hk -> capture (h hk) m)
where h hk (Excep xv hc xk) =
let hc' exc' = capture (h hk) (hc exc')
xk' v = capture (h hk) (xk v) >>= hk
passOn endure =
if endure
then raise (Excep xv hc' xk')
else raise (Excep xv hc xk')
resume endure =
if endure
continue endure =
if endure
then install hc' >> hk (Excep xv hc' xk') >>= raise
else install hc >> hk (Excep xv hc xk') >>= raise
in f xv passOn resume continue
end :: (Monad m, ResultType o) => CPST (Excep o m i) m i
end = raiseCC okValue
done :: (Monad m, ResultType o) => CPST (Excep o m i) m (Excep o m i)
done = return (Excep okValue return (\_ -> done))
die :: (Monad m, ResultType o) => o -> CPST (Excep o m i) m a
die x = raise (Excep x return (error "cannot resume from a die"))
|
9a63f4d4a835f9cce9106dd5e9f280301c796b039e76156bed8770df12a5c656 | swarmpit/swarmpit | util.cljs | (ns material.component.list.util
(:refer-clojure :exclude [filter])
(:require [swarmpit.utils :refer [map-values]]))
(defn- primary-key
[render-metadata]
(->> (clojure.core/filter #(= true (:primary %)) render-metadata)
(first)
:key))
(defn render-keys
[render-metadata]
(->> render-metadata
(map :key)
(into [])))
(defn render-value?
[value]
(if value
(if (coll? value)
(not (empty? value))
true)
false))
(defn filter
[items query]
(if (or (empty? query)
(< (count query) 2))
items
(clojure.core/filter
(fn [item]
(->> (map-values item)
(clojure.core/filter #(clojure.string/includes? % query))
(empty?)
(not))) items)))
| null | https://raw.githubusercontent.com/swarmpit/swarmpit/38ffbe08e717d8620bf433c99f2e85a9e5984c32/src/cljs/material/component/list/util.cljs | clojure | (ns material.component.list.util
(:refer-clojure :exclude [filter])
(:require [swarmpit.utils :refer [map-values]]))
(defn- primary-key
[render-metadata]
(->> (clojure.core/filter #(= true (:primary %)) render-metadata)
(first)
:key))
(defn render-keys
[render-metadata]
(->> render-metadata
(map :key)
(into [])))
(defn render-value?
[value]
(if value
(if (coll? value)
(not (empty? value))
true)
false))
(defn filter
[items query]
(if (or (empty? query)
(< (count query) 2))
items
(clojure.core/filter
(fn [item]
(->> (map-values item)
(clojure.core/filter #(clojure.string/includes? % query))
(empty?)
(not))) items)))
| |
1e0d1c9a00b913cc682aa56e9815dde3c499ec474fcb8c3cc5411c01159044ad | chumsley/jwacs | test-ugly-print.lisp | ;;;; test-ugly-print.lisp
;;;
;;; Tests for the ugly printer
;;;
Copyright ( c ) 2005
;;; See LICENSE for full licensing details.
;;;
(in-package :jwacs-tests)
;;;; Helper functions
(defun ugly-string (elm)
"Uglyprint LM to a string value instead of a stream"
(with-output-to-string (s)
(ugly-print elm s)))
(defmacro with-fresh-genvar (&body body)
"Make sure that GENVAR variable names will start from 0 and that
continuation arguments will have a known value"
`(let* ((*genvar-counter* 0))
,@body))
;;;; Test categories
(defnote ugly-print "tests for the ugly printer")
(defnote uniquify "tests for the uniquify transformation")
;;;; Ugly-printer tests
(deftest ugly-print/var-decl/1 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "var x = 3;"))))
"var JW0=3;")
(deftest ugly-print/function-decl/1 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "function FOO(){}"))))
"function JW0(){}")
(deftest ugly-print/function-decl/2 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "function FOO(x){}"))))
"function JW0(JW1){}")
(deftest ugly-print/function-decl/3 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "function FOO(x){ var y = x; }")))
"function FOO(JW0){var JW1=JW0;}")
(deftest ugly-print/function-decl/4 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "function FOO(){ FOO(); }"))))
"function JW0(){JW0();}")
(deftest ugly-print/function-decl/5 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(let ((jw::*pretty-mode* nil))
(jw::uglify-vars (test-parse "
function recursiveCount(i, n)
{
if(i > n)
return i - 1;
else
{
WScript.echo(i + '/' + n);
return recursiveCount(i + 1, n);
}
}")))))
#.(test-parse "
function JW0(JW1, JW2)
{
if(JW1 > JW2)
return JW1 - 1;
else
{
WScript.echo(JW1 + '/' + JW2);
return JW0(JW1 + 1, JW2);
}
}"))
ensure vardecls in blocks shadow function vars
;;
;; function foo(x) <-- this x could be JW0
;; {
var x = 3 ; < -- this x should be JW1 not 0
;; bar(x); <-- this x should be JW1
;; }
;;
(deftest ugly-print/function-decl-arg-shadow/1 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "function FOO(x){ var x = 3; }")))
"function FOO(JW0){var JW1=3;}")
(deftest ugly-print/function-decl-arg-shadow/2 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "function FOO(x){ var x = 3; FOO(x);}"))))
"function JW0(JW1){var JW2=3;JW0(JW2);}")
(deftest ugly-print/function-in-function/1 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "function FOO(x) {
function BAR(z) {
return z + y;
}
var y = 3;
BAR(3);
}")))
"function FOO(JW0){function JW2(JW3){return JW3+JW1;}var JW1=3;JW2(3);}")
(deftest ugly-print/function-in-function/2 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "function FOO(x) {
var y = 3;
function BAR(z) {
return z + y;
}
BAR(3);
}")))
"function FOO(JW0){var JW1=3;function JW2(JW3){return JW3+JW1;}JW2(3);}")
(deftest ugly-print/function-in-function-in-function/1 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "function FOO(x) {
function BAR(z) {
function BAZ(xz) {
return 3 + y;
}
return z + y + BAZ(3);
}
var y = 3;
BAR(3);
}")))
"function FOO(JW0){function JW2(JW3){function JW4(JW5){return 3+JW1;}return JW3+JW1+JW4(3);}var JW1=3;JW2(3);}")
(deftest ugly-print/blocks/1 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "{ var y = 3;
{
var x = 1;
}
x + y;
}"))))
"{var JW0=3;{var JW1=1;}JW1+JW0;}")
(deftest ugly-print/free-variables/1 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "var x = 10;
var y = x + z;"))))
"var JW0=10;var JW1=JW0+z;")
(deftest ugly-print/free-variables/2 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "var x = foo;
function bar(m)
{
var y=m*2;
if(y > x)
return bar(m--);
else
return m;
}")))
"var x=foo;function bar(JW0){var JW1=JW0*2;if(JW1>x)return bar(JW0--);else return JW0;}")
(deftest ugly-print/pretty-variable/1 :notes ugly-print
(with-fresh-genvar
(let ((jw::*pretty-mode* t))
(jw::uglify-vars (test-parse "
function fn(arg1, arg2)
{
function bar() { return 7; }
var foo = 10;
WScript.echo(foo + arg2);
}"))))
#.(test-parse "
function fn(arg1$0, arg2$1)
{
function bar$3() { return 7; }
var foo$2 = 10;
WScript.echo(foo$2 + arg2$1);
}"))
(deftest ugly-print/for-loop-does-not-create-new-scope/1 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "
var top = 10;
top < 100 ; top++ )
{
echo(top);
}")))
"var top=10;for(var top=0;top<100;top++)echo(top);")
(deftest ugly-print/for-loop-does-not-create-new-scope/2 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "
var top = 10;
for(var top in topVars)
{
echo(top);
}")))
"var top=10;for(var top in topVars)echo(top);")
(deftest ugly-print/case-sensitivity/1 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "
function Counter() {}
function foo()
{
var counter = new Counter;
}")))
"function Counter(){}function foo(){var JW0=new Counter;}")
(deftest ugly-print/case-sensitivity/2 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "
function Counter() {}
var counter = new Counter;")))
Toplevel identifiers ( including the ` counter ` var ) should not be changed
Uniquify tests
(deftest uniquify/position-preservation/1 :notes uniquify
(with-fresh-genvar
(transform 'uniquify (parse "function foo(x) { var x = 10; x = 5;}")))
(#s(function-decl :name "foo" :parameters ("x$0")
:body (#s(var-decl-statement :var-decls (#s(var-decl :name "x$1"
:initializer #s(numeric-literal :value 10 :start 26 :end 28)
:start 22 :end 28))
:start 18 :end 29)
#s(binary-operator :op-symbol :assign
:left-arg #s(identifier :name "x$1"
:start 30 :end 31)
:right-arg #s(numeric-literal :value 5 :start 34 :end 35)
:start 30 :end 35))
:start 0 :end 35))) | null | https://raw.githubusercontent.com/chumsley/jwacs/c25adb3bb31fc2dc6e8c8a58346949ee400633d7/tests/test-ugly-print.lisp | lisp | test-ugly-print.lisp
Tests for the ugly printer
See LICENSE for full licensing details.
Helper functions
Test categories
Ugly-printer tests
function foo(x) <-- this x could be JW0
{
< -- this x should be JW1 not 0
bar(x); <-- this x should be JW1
}
"))))
}
}
top++ )
"))) | Copyright ( c ) 2005
(in-package :jwacs-tests)
(defun ugly-string (elm)
"Uglyprint LM to a string value instead of a stream"
(with-output-to-string (s)
(ugly-print elm s)))
(defmacro with-fresh-genvar (&body body)
"Make sure that GENVAR variable names will start from 0 and that
continuation arguments will have a known value"
`(let* ((*genvar-counter* 0))
,@body))
(defnote ugly-print "tests for the ugly printer")
(defnote uniquify "tests for the uniquify transformation")
(deftest ugly-print/var-decl/1 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "var x = 3;"))))
"var JW0=3;")
(deftest ugly-print/function-decl/1 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "function FOO(){}"))))
"function JW0(){}")
(deftest ugly-print/function-decl/2 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "function FOO(x){}"))))
"function JW0(JW1){}")
(deftest ugly-print/function-decl/3 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "function FOO(x){ var y = x; }")))
"function FOO(JW0){var JW1=JW0;}")
(deftest ugly-print/function-decl/4 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "function FOO(){ FOO(); }"))))
"function JW0(){JW0();}")
(deftest ugly-print/function-decl/5 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(let ((jw::*pretty-mode* nil))
(jw::uglify-vars (test-parse "
function recursiveCount(i, n)
{
if(i > n)
else
{
}
}")))))
#.(test-parse "
function JW0(JW1, JW2)
{
if(JW1 > JW2)
else
{
}
}"))
ensure vardecls in blocks shadow function vars
(deftest ugly-print/function-decl-arg-shadow/1 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "function FOO(x){ var x = 3; }")))
"function FOO(JW0){var JW1=3;}")
(deftest ugly-print/function-decl-arg-shadow/2 :notes ugly-print
(with-fresh-genvar
(in-local-scope
(ugly-string (test-parse "function FOO(x){ var x = 3; FOO(x);}"))))
"function JW0(JW1){var JW2=3;JW0(JW2);}")
(deftest ugly-print/function-in-function/1 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "function FOO(x) {
function BAR(z) {
}
}")))
"function FOO(JW0){function JW2(JW3){return JW3+JW1;}var JW1=3;JW2(3);}")
(deftest ugly-print/function-in-function/2 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "function FOO(x) {
function BAR(z) {
}
}")))
"function FOO(JW0){var JW1=3;function JW2(JW3){return JW3+JW1;}JW2(3);}")
(deftest ugly-print/function-in-function-in-function/1 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "function FOO(x) {
function BAR(z) {
function BAZ(xz) {
}
}
}")))
"function FOO(JW0){function JW2(JW3){function JW4(JW5){return 3+JW1;}return JW3+JW1+JW4(3);}var JW1=3;JW2(3);}")
(deftest ugly-print/blocks/1 :notes ugly-print
(with-fresh-genvar
(in-local-scope
{
}
}"))))
"{var JW0=3;{var JW1=1;}JW1+JW0;}")
(deftest ugly-print/free-variables/1 :notes ugly-print
(with-fresh-genvar
(in-local-scope
"var JW0=10;var JW1=JW0+z;")
(deftest ugly-print/free-variables/2 :notes ugly-print
(with-fresh-genvar
function bar(m)
{
if(y > x)
else
}")))
"var x=foo;function bar(JW0){var JW1=JW0*2;if(JW1>x)return bar(JW0--);else return JW0;}")
(deftest ugly-print/pretty-variable/1 :notes ugly-print
(with-fresh-genvar
(let ((jw::*pretty-mode* t))
(jw::uglify-vars (test-parse "
function fn(arg1, arg2)
{
}"))))
#.(test-parse "
function fn(arg1$0, arg2$1)
{
}"))
(deftest ugly-print/for-loop-does-not-create-new-scope/1 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "
{
}")))
"var top=10;for(var top=0;top<100;top++)echo(top);")
(deftest ugly-print/for-loop-does-not-create-new-scope/2 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "
for(var top in topVars)
{
}")))
"var top=10;for(var top in topVars)echo(top);")
(deftest ugly-print/case-sensitivity/1 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "
function Counter() {}
function foo()
{
}")))
"function Counter(){}function foo(){var JW0=new Counter;}")
(deftest ugly-print/case-sensitivity/2 :notes ugly-print
(with-fresh-genvar
(ugly-string (test-parse "
function Counter() {}
Toplevel identifiers ( including the ` counter ` var ) should not be changed
Uniquify tests
(deftest uniquify/position-preservation/1 :notes uniquify
(with-fresh-genvar
(transform 'uniquify (parse "function foo(x) { var x = 10; x = 5;}")))
(#s(function-decl :name "foo" :parameters ("x$0")
:body (#s(var-decl-statement :var-decls (#s(var-decl :name "x$1"
:initializer #s(numeric-literal :value 10 :start 26 :end 28)
:start 22 :end 28))
:start 18 :end 29)
#s(binary-operator :op-symbol :assign
:left-arg #s(identifier :name "x$1"
:start 30 :end 31)
:right-arg #s(numeric-literal :value 5 :start 34 :end 35)
:start 30 :end 35))
:start 0 :end 35))) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.