_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
fc69e1efbc248d3f0386e941a733c66b43d7fd30c32323fc573f8f314d35ddae | swtwsk/vinci-lang | FreeVariables.hs | module Core.FreeVariables (
freeVariablesProg,
freeVariablesExpr
) where
import Control.Monad.Reader
import qualified Data.Set as Set
import Data.Functor ((<&>))
import Core.AST
type VarSet a = Set.Set (VarId a)
freeVariablesProg :: (EquableFunctor a) => Prog a -> VarSet a
freeVariablesProg (Prog f args e) = freeVariablesExpr e (Set.fromList (f:args))
freeVariablesExpr :: (EquableFunctor a) => Expr a -> VarSet a -> VarSet a
freeVariablesExpr e = runReader (freeVariables' e)
freeVariables' :: (EquableFunctor a) => Expr a -> Reader (VarSet a) (VarSet a)
freeVariables' (Var v) = ask <&> \s -> if Set.member v s
then Set.empty
else Set.singleton v
freeVariables' (Lit _) = return Set.empty
freeVariables' (App e1 e2) = do
fv1 <- freeVariables' e1
fv2 <- freeVariables' e2
return $ fv1 `Set.union` fv2
freeVariables' (If c e1 e2) = do
fvc <- freeVariables' c
fv1 <- freeVariables' e1
fv2 <- freeVariables' e2
return $ fvc `Set.union` fv1 `Set.union` fv2
freeVariables' (Cons _ exprs) = do
fvs <- mapM freeVariables' exprs
return $ foldl1 Set.union fvs
freeVariables' (FieldGet _ e) = freeVariables' e
freeVariables' (TupleCons exprs) = do
fvs <- mapM freeVariables' exprs
return $ foldl1 Set.union fvs
freeVariables' (TupleProj _i e) = freeVariables' e
freeVariables' (Let n e1 e2) = do
fv1 <- freeVariables' e1
fv2 <- local (Set.insert n) (freeVariables' e2)
return $ fv1 `Set.union` fv2
freeVariables' (LetFun (Prog f args e1) e2) = do
fv1 <- local (Set.union (Set.fromList args) . Set.insert f) (freeVariables' e1)
fv2 <- local (Set.insert f) (freeVariables' e2)
return $ fv1 `Set.union` fv2
freeVariables' (BinOp _ e1 e2) = do
fv1 <- freeVariables' e1
fv2 <- freeVariables' e2
return $ fv1 `Set.union` fv2
freeVariables' (UnOp _ e) = freeVariables' e
| null | https://raw.githubusercontent.com/swtwsk/vinci-lang/9c7e01953e0b1cf135af7188e0c71fe6195bdfa1/src/Core/FreeVariables.hs | haskell | module Core.FreeVariables (
freeVariablesProg,
freeVariablesExpr
) where
import Control.Monad.Reader
import qualified Data.Set as Set
import Data.Functor ((<&>))
import Core.AST
type VarSet a = Set.Set (VarId a)
freeVariablesProg :: (EquableFunctor a) => Prog a -> VarSet a
freeVariablesProg (Prog f args e) = freeVariablesExpr e (Set.fromList (f:args))
freeVariablesExpr :: (EquableFunctor a) => Expr a -> VarSet a -> VarSet a
freeVariablesExpr e = runReader (freeVariables' e)
freeVariables' :: (EquableFunctor a) => Expr a -> Reader (VarSet a) (VarSet a)
freeVariables' (Var v) = ask <&> \s -> if Set.member v s
then Set.empty
else Set.singleton v
freeVariables' (Lit _) = return Set.empty
freeVariables' (App e1 e2) = do
fv1 <- freeVariables' e1
fv2 <- freeVariables' e2
return $ fv1 `Set.union` fv2
freeVariables' (If c e1 e2) = do
fvc <- freeVariables' c
fv1 <- freeVariables' e1
fv2 <- freeVariables' e2
return $ fvc `Set.union` fv1 `Set.union` fv2
freeVariables' (Cons _ exprs) = do
fvs <- mapM freeVariables' exprs
return $ foldl1 Set.union fvs
freeVariables' (FieldGet _ e) = freeVariables' e
freeVariables' (TupleCons exprs) = do
fvs <- mapM freeVariables' exprs
return $ foldl1 Set.union fvs
freeVariables' (TupleProj _i e) = freeVariables' e
freeVariables' (Let n e1 e2) = do
fv1 <- freeVariables' e1
fv2 <- local (Set.insert n) (freeVariables' e2)
return $ fv1 `Set.union` fv2
freeVariables' (LetFun (Prog f args e1) e2) = do
fv1 <- local (Set.union (Set.fromList args) . Set.insert f) (freeVariables' e1)
fv2 <- local (Set.insert f) (freeVariables' e2)
return $ fv1 `Set.union` fv2
freeVariables' (BinOp _ e1 e2) = do
fv1 <- freeVariables' e1
fv2 <- freeVariables' e2
return $ fv1 `Set.union` fv2
freeVariables' (UnOp _ e) = freeVariables' e
| |
163fac98892d7d143c754d7475fefdf63de4ca640332f391036ecc57ac241038 | andy128k/cl-gobject-introspection | trampoline.lisp | (in-package :gir)
(defvar *trampolines* (make-hash-table))
(defun make-trampoline (func &optional pointer)
(let ((ptr (or pointer (cffi:foreign-alloc :int))))
(setf (gethash (cffi:pointer-address ptr) *trampolines*) func)
ptr))
(defun trampoline-get-function (ptr)
(gethash (cffi:pointer-address ptr) *trampolines*))
(defun destroy-trampoline (ptr)
(when (not (cffi:null-pointer-p ptr))
(remhash (cffi:pointer-address ptr) *trampolines*)))
(cffi:defcallback destroy-trampoline :void ((ptr :pointer))
(destroy-trampoline ptr))
| null | https://raw.githubusercontent.com/andy128k/cl-gobject-introspection/13f7ea0c4b33ec0f91eed5131d271dc74f6ea3d2/src/trampoline.lisp | lisp | (in-package :gir)
(defvar *trampolines* (make-hash-table))
(defun make-trampoline (func &optional pointer)
(let ((ptr (or pointer (cffi:foreign-alloc :int))))
(setf (gethash (cffi:pointer-address ptr) *trampolines*) func)
ptr))
(defun trampoline-get-function (ptr)
(gethash (cffi:pointer-address ptr) *trampolines*))
(defun destroy-trampoline (ptr)
(when (not (cffi:null-pointer-p ptr))
(remhash (cffi:pointer-address ptr) *trampolines*)))
(cffi:defcallback destroy-trampoline :void ((ptr :pointer))
(destroy-trampoline ptr))
| |
81b3df90f8423584e02481a62eefd53e69e665fb9a2368f62581056e433b711a | ocaml/merlin | mreader_recover.mli | module Make
(Parser : MenhirLib.IncrementalEngine.EVERYTHING)
(Recovery : sig
val default_value : Location.t -> 'a Parser.symbol -> 'a
type action =
| Abort
| R of int
| S : 'a Parser.symbol -> action
| Sub of action list
type decision =
| Nothing
| One of action list
| Select of (int -> action list)
val depth : int array
val can_pop : 'a Parser.terminal -> bool
val recover : int -> decision
val guide : 'a Parser.symbol -> bool
val token_of_terminal : 'a Parser.terminal -> 'a -> Parser.token
val nullable : 'a Parser.nonterminal -> bool
end)
(Dump : sig
val symbol : unit -> Parser.xsymbol -> string
end) :
sig
type 'a candidate = {
line: int;
min_col: int;
max_col: int;
env: 'a Parser.env;
}
type 'a candidates = {
popped: Parser.xsymbol list;
shifted: Parser.xsymbol option;
final: 'a option;
candidates: 'a candidate list;
}
val attempt : 'a candidates ->
Parser.token * Lexing.position * Lexing.position ->
[> `Accept of 'a
| `Fail
| `Ok of 'a Parser.checkpoint * 'a Parser.env ]
val generate : 'a Parser.env -> 'a candidates
end
| null | https://raw.githubusercontent.com/ocaml/merlin/e576bc75f11323ec8489d2e58a701264f5a7fe0e/src/kernel/mreader_recover.mli | ocaml | module Make
(Parser : MenhirLib.IncrementalEngine.EVERYTHING)
(Recovery : sig
val default_value : Location.t -> 'a Parser.symbol -> 'a
type action =
| Abort
| R of int
| S : 'a Parser.symbol -> action
| Sub of action list
type decision =
| Nothing
| One of action list
| Select of (int -> action list)
val depth : int array
val can_pop : 'a Parser.terminal -> bool
val recover : int -> decision
val guide : 'a Parser.symbol -> bool
val token_of_terminal : 'a Parser.terminal -> 'a -> Parser.token
val nullable : 'a Parser.nonterminal -> bool
end)
(Dump : sig
val symbol : unit -> Parser.xsymbol -> string
end) :
sig
type 'a candidate = {
line: int;
min_col: int;
max_col: int;
env: 'a Parser.env;
}
type 'a candidates = {
popped: Parser.xsymbol list;
shifted: Parser.xsymbol option;
final: 'a option;
candidates: 'a candidate list;
}
val attempt : 'a candidates ->
Parser.token * Lexing.position * Lexing.position ->
[> `Accept of 'a
| `Fail
| `Ok of 'a Parser.checkpoint * 'a Parser.env ]
val generate : 'a Parser.env -> 'a candidates
end
| |
18ec27fe200a23ac20dc247cc80b0b8717f699b9d6dd1094e506cef06bb29ab9 | blindglobe/lisp-matrix | macros.lisp | (in-package :lisp-matrix)
(defmacro define-abstract-class (classname super-list &body body)
"A wrapper for DEFCLASS that lets you define abstract base classes.
If you try to instantiate an object of this class, a warning is signaled."
`(progn
(defclass ,classname ,super-list ,@body)
;; Protect against abstract class instantiation.
;; We could remove this programmatically later using a
;; compile-time constant (or even check the optimization options
;; and remove it if SAFETY is set low enough).
(defmethod initialize-instance :before ((x ,classname) &key)
(if (eql (type-of x) ',classname)
(warn "~A is an abstract base class and not to be instantiated."
(quote ',classname))))))
| null | https://raw.githubusercontent.com/blindglobe/lisp-matrix/f9c88dae132baf52884dd54612581d1331b82b40/src/macros.lisp | lisp | Protect against abstract class instantiation.
We could remove this programmatically later using a
compile-time constant (or even check the optimization options
and remove it if SAFETY is set low enough). | (in-package :lisp-matrix)
(defmacro define-abstract-class (classname super-list &body body)
"A wrapper for DEFCLASS that lets you define abstract base classes.
If you try to instantiate an object of this class, a warning is signaled."
`(progn
(defclass ,classname ,super-list ,@body)
(defmethod initialize-instance :before ((x ,classname) &key)
(if (eql (type-of x) ',classname)
(warn "~A is an abstract base class and not to be instantiated."
(quote ',classname))))))
|
ef482631b6900cbee65fcef57c07034869e21e5b699dcf720e309a341fc40cc5 | ocaml/uchar | testpkg.ml |
let () =
Format.printf "%a\n" Uchar.dump (Uchar.of_int 0x1F42B) | null | https://raw.githubusercontent.com/ocaml/uchar/f9988830581a1f233d32e79aaacf8af76ddb9613/test/testpkg.ml | ocaml |
let () =
Format.printf "%a\n" Uchar.dump (Uchar.of_int 0x1F42B) | |
eaeb6d083fd773fce85ec4fe8a597377483bcbfa1c4c4e065d0d968f85c31051 | slipstream/SlipStreamServer | resource_metadata_value_scope.cljc | (ns com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope
"schema definitions for the 'vscope' field of a ResourceMetadata resource"
(:require
[clojure.spec.alpha :as s]
[com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope-enumeration :as enumeration]
[com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope-item :as item]
[com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope-range :as range]
[com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope-single-value :as single-value]
[com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope-unit :as unit]
[spec-tools.core :as st]))
(s/def ::value-scope (s/or :unit ::unit/unit
:single-value ::single-value/single-value
:range ::range/range
:enumeration ::enumeration/enumeration
:item ::item/collection-item))
;; FIXME: This function shouldn't be necessary!
;; There is a problem when using the ::value-scope spec directly in the
s / map - of expression in st / spec . Validation throws an exception when
;; trying to validate against single-value or collection-item. Hiding
;; the details behind this function works, but clearly isn't ideal for
;; error reporting. The reason for the problem needs to be determined
;; and either worked around or fixed.
(defn valid-value?
[x]
(s/valid? ::value-scope x))
(s/def ::vscope
(st/spec {:spec (s/map-of keyword? valid-value? :min-count 1)
:json-schema/indexed false}))
| null | https://raw.githubusercontent.com/slipstream/SlipStreamServer/3ee5c516877699746c61c48fc72779fe3d4e4652/cimi/src/com/sixsq/slipstream/ssclj/resources/spec/resource_metadata_value_scope.cljc | clojure | FIXME: This function shouldn't be necessary!
There is a problem when using the ::value-scope spec directly in the
trying to validate against single-value or collection-item. Hiding
the details behind this function works, but clearly isn't ideal for
error reporting. The reason for the problem needs to be determined
and either worked around or fixed. | (ns com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope
"schema definitions for the 'vscope' field of a ResourceMetadata resource"
(:require
[clojure.spec.alpha :as s]
[com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope-enumeration :as enumeration]
[com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope-item :as item]
[com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope-range :as range]
[com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope-single-value :as single-value]
[com.sixsq.slipstream.ssclj.resources.spec.resource-metadata-value-scope-unit :as unit]
[spec-tools.core :as st]))
(s/def ::value-scope (s/or :unit ::unit/unit
:single-value ::single-value/single-value
:range ::range/range
:enumeration ::enumeration/enumeration
:item ::item/collection-item))
s / map - of expression in st / spec . Validation throws an exception when
(defn valid-value?
[x]
(s/valid? ::value-scope x))
(s/def ::vscope
(st/spec {:spec (s/map-of keyword? valid-value? :min-count 1)
:json-schema/indexed false}))
|
27599d3cb73f8227a981f3a18fee23513e88cbe2e85059757dbbfb1fea1f0c1e | susisu/est-ocaml | printer.mli | open Core
module type Printer_intf = sig
module Config : sig
type options
type t
val options_of_sexp : Sexp.t -> options
val empty_options : options
val merge_options : options -> options -> options
val of_options : default:t -> options -> t
end
val print_to_channel : Config.t -> Out_channel.t -> Value.t -> unit
end
exception Print_error of string
module Table_config : sig
type options = {
strict : bool sexp_option;
separator: string sexp_option;
precision: int sexp_option;
default : float sexp_option;
transpose: bool sexp_option;
} [@@deriving sexp]
type t = {
strict : bool;
separator: string;
precision: int;
default : float;
transpose: bool;
}
val empty_options : options
val merge_options : options -> options -> options
val of_options : default:t -> options -> t
end
module Table : Printer_intf with module Config = Table_config
| null | https://raw.githubusercontent.com/susisu/est-ocaml/e610d07b166a51e5763aa4f7b12449ec0438071c/src/printer.mli | ocaml | open Core
module type Printer_intf = sig
module Config : sig
type options
type t
val options_of_sexp : Sexp.t -> options
val empty_options : options
val merge_options : options -> options -> options
val of_options : default:t -> options -> t
end
val print_to_channel : Config.t -> Out_channel.t -> Value.t -> unit
end
exception Print_error of string
module Table_config : sig
type options = {
strict : bool sexp_option;
separator: string sexp_option;
precision: int sexp_option;
default : float sexp_option;
transpose: bool sexp_option;
} [@@deriving sexp]
type t = {
strict : bool;
separator: string;
precision: int;
default : float;
transpose: bool;
}
val empty_options : options
val merge_options : options -> options -> options
val of_options : default:t -> options -> t
end
module Table : Printer_intf with module Config = Table_config
| |
1879aa15101e2478e0be8a719f9a572f050488ba925da5f2bedd5620534857f3 | treeowl/lazify | Internal.hs | # language AllowAmbiguousTypes #
{-# language CPP #-}
# language DataKinds #
{-# language DefaultSignatures #-}
{-# language FlexibleContexts #-}
# language FlexibleInstances #
# language MultiParamTypeClasses #
# language PolyKinds #
{-# language ScopedTypeVariables #-}
{-# language TypeFamilies #-}
{-# language TypeInType #-}
# language TypeOperators #
{-# language UndecidableInstances #-}
# language TypeApplications #
# OPTIONS_HADDOCK not - home #
| Record types in Haskell can be made lazy through lazy pattern
-- matching. This module offers functions for making them lazy
-- /generically/.
module Data.Lazify.Internal (
Lazifiable (..)
, GLazifiable (..)
, genericLazify
, ($~)
) where
import GHC.Generics
import Data.Functor.Product
import Data.Proxy
import Data.Functor.Identity (Identity)
import Control.Monad.Trans.Identity (IdentityT)
import Data.Functor.Compose (Compose)
import Data.Coerce (Coercible)
import Data.Type.Coercion (Coercion(..))
import Control.Applicative (Const)
import Data.Tagged (Tagged)
import GHC.Exts (TYPE)
import Data.Type.Equality ((:~:)(..))
import qualified Data.Monoid as M
import qualified Data.Semigroup as S
import Data.List.NonEmpty (NonEmpty)
import Data.Type.Equality ((:~~:)(..), type (~~))
import Type.Reflection (Typeable, TypeRep, typeRep)
import Data.Tree (Tree (..))
#if MIN_VERSION_base (4,15,0)
import GHC.Tuple (Solo)
#endif
import qualified Control.Applicative.Backwards as AppBackwards
import qualified Data.Functor.Reverse as TravReverse
import GHC.TypeLits
-- | A class for types that can be lazified. A generic
default is provided for convenience . To a type using
-- its generic representation, use 'genericLazify'.
class Lazifiable a where
-- | Lazily rewrap a record. Applying @lazify@ to a record and then
-- pattern matching on it strictly is equivalent to pattern matching
-- on it lazily.
--
-- @
-- strictFirst :: (a -> a') -> (a, b) -> (a', b)
-- strictFirst f (a, b) = (f a, b)
--
-- lazyFirst :: (a -> a') -> (a, b) -> (a', b)
lazyFirst f = strictFirst f .
-- -- Equivalently
-- lazyFirst f ~(a, b) = (f a, b)
-- @
lazify :: a -> a
default lazify :: (Generic a, GLazifiable a (Rep a)) => a -> a
lazify x = genericLazify x
-- | A 'Generic' representation that can be lazified.
class GLazifiable a f where
-- | Lazify a 'Generic' representation.
glazify :: f p -> f p
-- | Lazify a record using its generic representation.
--
-- Note that newtypes are treated specially: a newtype is lazified
by lazifying its /underlying/ type using its ' Lazifiable ' instance .
genericLazify :: forall a. (Generic a, GLazifiable a (Rep a)) => a -> a
genericLazify = to . glazify @a . from
-- | Apply a function to a lazified value.
--
Note to users of @TypeApplications@ : For GHC > = 9.0.1 , the representation
-- is marked as inferred. Before that, doing so is impossible and the
representation must be passed as the first type argument . I 'm sorry .
#if __GLASGOW_HASKELL__ >= 900
($~) :: forall {rep} a (b :: TYPE rep). Lazifiable a => (a -> b) -> a -> b
#else
($~) :: forall rep a (b :: TYPE rep). Lazifiable a => (a -> b) -> a -> b
#endif
f $~ a = f (lazify a)
-- Non-newtype cases
instance GLazifiable a f => GLazifiable a (D1 ('MetaData x y z 'False) f) where
glazify (M1 x) = M1 (glazify @a x)
instance GLazifiable a f => GLazifiable a (C1 c f) where
glazify (M1 x) = M1 (glazify @a x)
instance GLazifiable a f => GLazifiable a (S1 ('MetaSel _p _q _r 'DecidedLazy) f) where
glazify (M1 m) = M1 (glazify @a m)
instance TypeError ('Text "Can't lazify " ':<>: 'ShowType a ':<>: 'Text ":"
':$$: 'Text "It has a strict field.")
=> GLazifiable a (S1 ('MetaSel _p _q _r 'DecidedStrict) f) where
glazify _ = error "Unreachable"
instance TypeError ('Text "Can't lazify " ':<>: 'ShowType a ':<>: 'Text ":"
':$$: 'Text "It has a strict (unpacked) field.")
=> GLazifiable a (S1 ('MetaSel _p _q _r 'DecidedUnpack) f) where
glazify _ = error "Unreachable"
For a newtype , we need to whatever it * wraps *
instance Lazifiable c
=> GLazifiable a (D1 ('MetaData x y z 'True) (C1 _m (S1 _o (Rec0 c)))) where
glazify (M1 (M1 (M1 (K1 x)))) = M1 (M1 (M1 (K1 (lazify x))))
instance GLazifiable a (K1 i c) where
glazify x = x
instance GLazifiable a U1 where
glazify _ = U1
instance (GLazifiable a f, GLazifiable a g) => GLazifiable a (f :*: g) where
glazify ~(x :*: y) = glazify @a x :*: glazify @a y
-- There is no instance for V1 because an uninhabited datatype can't be
-- lazified.
--
There is no instance for f : + : can only be lazified if
-- one of its components is *strict* and *uninhabited* while the other
-- is lazifiable. Unfortunately, there are lots of ways this can
-- occur, leading to incompatible constraints.
instance TypeError ('Text "Can't lazify " ':<>: 'ShowType a ':<>: 'Text ":"
':$$: 'Text "It is a sum type.")
=> GLazifiable a (f :+: g) where
glazify _ = error "Unreachable"
-- Miscellaneous instances
instance Lazifiable (Proxy a)
instance Lazifiable (Product f g a)
instance Lazifiable a => Lazifiable (Identity a)
instance Lazifiable a => Lazifiable (Const a b)
instance Lazifiable b => Lazifiable (Tagged a b)
instance Lazifiable (f (g a)) => Lazifiable (Compose f g a)
instance Lazifiable (f (g a)) => Lazifiable ((f :.: g) a)
instance Lazifiable a => Lazifiable (S.First a)
instance Lazifiable a => Lazifiable (S.Last a)
instance Lazifiable a => Lazifiable (S.Min a)
instance Lazifiable a => Lazifiable (S.Max a)
instance Lazifiable a => Lazifiable (S.Product a)
instance Lazifiable a => Lazifiable (S.Sum a)
instance Lazifiable a => Lazifiable (S.Dual a)
instance Lazifiable a => Lazifiable (S.WrappedMonoid a)
instance Lazifiable (S.Arg a b)
instance Lazifiable (NonEmpty a)
instance Lazifiable (Tree a)
instance Lazifiable (f a) => Lazifiable (M.Alt f a)
#if MIN_VERSION_base(4,12,0)
instance Lazifiable (f a) => Lazifiable (M.Ap f a)
#endif
instance Lazifiable (f a) => Lazifiable (AppBackwards.Backwards f a)
instance Lazifiable (t a) => Lazifiable (TravReverse.Reverse t a)
instance Lazifiable (f a) => Lazifiable (IdentityT f a)
-- Singletons are generally lazifiable under sufficiently boring
-- conditions. These could, at least theoretically, help guide type
-- inference in some cases, if it's more convenient to explain
-- how one *could* get the singleton than to pin down its type
-- by hand.
instance a ~ b => Lazifiable (a :~: b) where
lazify _ = Refl
instance a ~~ b => Lazifiable (a :~~: b) where
lazify _ = HRefl
instance Typeable a => Lazifiable (TypeRep a) where
lazify _ = typeRep
instance Coercible a b => Lazifiable (Coercion a b) where
lazify _ = Coercion
-- Tuple instances
instance Lazifiable ()
#if MIN_VERSION_base (4,15,0)
instance Lazifiable (Solo a)
#endif
instance Lazifiable (a,b)
instance Lazifiable (a,b,c)
instance Lazifiable (a,b,c,d)
instance Lazifiable (a,b,c,d,e)
instance Lazifiable (a,b,c,d,e,f)
instance Lazifiable (a,b,c,d,e,f,g)
-- The below are written by hand because the generic
versions are too big for GHC to compile away the
cruft .
instance Lazifiable (a,b,c,d,e,f,g,h) where
lazify ~(a,b,c,d,e,f,g,h) = (a,b,c,d,e,f,g,h)
instance Lazifiable (a,b,c,d,e,f,g,h,i) where
lazify ~(a,b,c,d,e,f,g,h,i) = (a,b,c,d,e,f,g,h,i)
instance Lazifiable (a,b,c,d,e,f,g,h,i,j) where
lazify ~(a,b,c,d,e,f,g,h,i,j) = (a,b,c,d,e,f,g,h,i,j)
| null | https://raw.githubusercontent.com/treeowl/lazify/3a8edcd13e97779c0848d6eda29e71a15ef299c9/src/Data/Lazify/Internal.hs | haskell | # language CPP #
# language DefaultSignatures #
# language FlexibleContexts #
# language ScopedTypeVariables #
# language TypeFamilies #
# language TypeInType #
# language UndecidableInstances #
matching. This module offers functions for making them lazy
/generically/.
| A class for types that can be lazified. A generic
its generic representation, use 'genericLazify'.
| Lazily rewrap a record. Applying @lazify@ to a record and then
pattern matching on it strictly is equivalent to pattern matching
on it lazily.
@
strictFirst :: (a -> a') -> (a, b) -> (a', b)
strictFirst f (a, b) = (f a, b)
lazyFirst :: (a -> a') -> (a, b) -> (a', b)
-- Equivalently
lazyFirst f ~(a, b) = (f a, b)
@
| A 'Generic' representation that can be lazified.
| Lazify a 'Generic' representation.
| Lazify a record using its generic representation.
Note that newtypes are treated specially: a newtype is lazified
| Apply a function to a lazified value.
is marked as inferred. Before that, doing so is impossible and the
Non-newtype cases
There is no instance for V1 because an uninhabited datatype can't be
lazified.
one of its components is *strict* and *uninhabited* while the other
is lazifiable. Unfortunately, there are lots of ways this can
occur, leading to incompatible constraints.
Miscellaneous instances
Singletons are generally lazifiable under sufficiently boring
conditions. These could, at least theoretically, help guide type
inference in some cases, if it's more convenient to explain
how one *could* get the singleton than to pin down its type
by hand.
Tuple instances
The below are written by hand because the generic | # language AllowAmbiguousTypes #
# language DataKinds #
# language FlexibleInstances #
# language MultiParamTypeClasses #
# language PolyKinds #
# language TypeOperators #
# language TypeApplications #
# OPTIONS_HADDOCK not - home #
| Record types in Haskell can be made lazy through lazy pattern
module Data.Lazify.Internal (
Lazifiable (..)
, GLazifiable (..)
, genericLazify
, ($~)
) where
import GHC.Generics
import Data.Functor.Product
import Data.Proxy
import Data.Functor.Identity (Identity)
import Control.Monad.Trans.Identity (IdentityT)
import Data.Functor.Compose (Compose)
import Data.Coerce (Coercible)
import Data.Type.Coercion (Coercion(..))
import Control.Applicative (Const)
import Data.Tagged (Tagged)
import GHC.Exts (TYPE)
import Data.Type.Equality ((:~:)(..))
import qualified Data.Monoid as M
import qualified Data.Semigroup as S
import Data.List.NonEmpty (NonEmpty)
import Data.Type.Equality ((:~~:)(..), type (~~))
import Type.Reflection (Typeable, TypeRep, typeRep)
import Data.Tree (Tree (..))
#if MIN_VERSION_base (4,15,0)
import GHC.Tuple (Solo)
#endif
import qualified Control.Applicative.Backwards as AppBackwards
import qualified Data.Functor.Reverse as TravReverse
import GHC.TypeLits
default is provided for convenience . To a type using
class Lazifiable a where
lazyFirst f = strictFirst f .
lazify :: a -> a
default lazify :: (Generic a, GLazifiable a (Rep a)) => a -> a
lazify x = genericLazify x
class GLazifiable a f where
glazify :: f p -> f p
by lazifying its /underlying/ type using its ' Lazifiable ' instance .
genericLazify :: forall a. (Generic a, GLazifiable a (Rep a)) => a -> a
genericLazify = to . glazify @a . from
Note to users of @TypeApplications@ : For GHC > = 9.0.1 , the representation
representation must be passed as the first type argument . I 'm sorry .
#if __GLASGOW_HASKELL__ >= 900
($~) :: forall {rep} a (b :: TYPE rep). Lazifiable a => (a -> b) -> a -> b
#else
($~) :: forall rep a (b :: TYPE rep). Lazifiable a => (a -> b) -> a -> b
#endif
f $~ a = f (lazify a)
instance GLazifiable a f => GLazifiable a (D1 ('MetaData x y z 'False) f) where
glazify (M1 x) = M1 (glazify @a x)
instance GLazifiable a f => GLazifiable a (C1 c f) where
glazify (M1 x) = M1 (glazify @a x)
instance GLazifiable a f => GLazifiable a (S1 ('MetaSel _p _q _r 'DecidedLazy) f) where
glazify (M1 m) = M1 (glazify @a m)
instance TypeError ('Text "Can't lazify " ':<>: 'ShowType a ':<>: 'Text ":"
':$$: 'Text "It has a strict field.")
=> GLazifiable a (S1 ('MetaSel _p _q _r 'DecidedStrict) f) where
glazify _ = error "Unreachable"
instance TypeError ('Text "Can't lazify " ':<>: 'ShowType a ':<>: 'Text ":"
':$$: 'Text "It has a strict (unpacked) field.")
=> GLazifiable a (S1 ('MetaSel _p _q _r 'DecidedUnpack) f) where
glazify _ = error "Unreachable"
For a newtype , we need to whatever it * wraps *
instance Lazifiable c
=> GLazifiable a (D1 ('MetaData x y z 'True) (C1 _m (S1 _o (Rec0 c)))) where
glazify (M1 (M1 (M1 (K1 x)))) = M1 (M1 (M1 (K1 (lazify x))))
instance GLazifiable a (K1 i c) where
glazify x = x
instance GLazifiable a U1 where
glazify _ = U1
instance (GLazifiable a f, GLazifiable a g) => GLazifiable a (f :*: g) where
glazify ~(x :*: y) = glazify @a x :*: glazify @a y
There is no instance for f : + : can only be lazified if
instance TypeError ('Text "Can't lazify " ':<>: 'ShowType a ':<>: 'Text ":"
':$$: 'Text "It is a sum type.")
=> GLazifiable a (f :+: g) where
glazify _ = error "Unreachable"
instance Lazifiable (Proxy a)
instance Lazifiable (Product f g a)
instance Lazifiable a => Lazifiable (Identity a)
instance Lazifiable a => Lazifiable (Const a b)
instance Lazifiable b => Lazifiable (Tagged a b)
instance Lazifiable (f (g a)) => Lazifiable (Compose f g a)
instance Lazifiable (f (g a)) => Lazifiable ((f :.: g) a)
instance Lazifiable a => Lazifiable (S.First a)
instance Lazifiable a => Lazifiable (S.Last a)
instance Lazifiable a => Lazifiable (S.Min a)
instance Lazifiable a => Lazifiable (S.Max a)
instance Lazifiable a => Lazifiable (S.Product a)
instance Lazifiable a => Lazifiable (S.Sum a)
instance Lazifiable a => Lazifiable (S.Dual a)
instance Lazifiable a => Lazifiable (S.WrappedMonoid a)
instance Lazifiable (S.Arg a b)
instance Lazifiable (NonEmpty a)
instance Lazifiable (Tree a)
instance Lazifiable (f a) => Lazifiable (M.Alt f a)
#if MIN_VERSION_base(4,12,0)
instance Lazifiable (f a) => Lazifiable (M.Ap f a)
#endif
instance Lazifiable (f a) => Lazifiable (AppBackwards.Backwards f a)
instance Lazifiable (t a) => Lazifiable (TravReverse.Reverse t a)
instance Lazifiable (f a) => Lazifiable (IdentityT f a)
instance a ~ b => Lazifiable (a :~: b) where
lazify _ = Refl
instance a ~~ b => Lazifiable (a :~~: b) where
lazify _ = HRefl
instance Typeable a => Lazifiable (TypeRep a) where
lazify _ = typeRep
instance Coercible a b => Lazifiable (Coercion a b) where
lazify _ = Coercion
instance Lazifiable ()
#if MIN_VERSION_base (4,15,0)
instance Lazifiable (Solo a)
#endif
instance Lazifiable (a,b)
instance Lazifiable (a,b,c)
instance Lazifiable (a,b,c,d)
instance Lazifiable (a,b,c,d,e)
instance Lazifiable (a,b,c,d,e,f)
instance Lazifiable (a,b,c,d,e,f,g)
versions are too big for GHC to compile away the
cruft .
instance Lazifiable (a,b,c,d,e,f,g,h) where
lazify ~(a,b,c,d,e,f,g,h) = (a,b,c,d,e,f,g,h)
instance Lazifiable (a,b,c,d,e,f,g,h,i) where
lazify ~(a,b,c,d,e,f,g,h,i) = (a,b,c,d,e,f,g,h,i)
instance Lazifiable (a,b,c,d,e,f,g,h,i,j) where
lazify ~(a,b,c,d,e,f,g,h,i,j) = (a,b,c,d,e,f,g,h,i,j)
|
03c9cf7c84f23409e2e652475a4fbdcdbc740c33ada03d2b13a5736d0fd692ea | rurban/clisp | clclock.lisp | ;;; Adapted from -lisp.net/~crhodes/clx by...
Copyright ( C ) 2007 - 2008 < >
This is free software , distributed under the GNU GPL v2 +
(in-package :clx-demos)
(defun required-size (font &optional (extra-width 0) (extra-height 0))
(multiple-value-bind (width-R ascent-R)
(xlib:text-extents font "XVIIII XXXVIIII XXXVIIII")
(multiple-value-bind (width-I ascent-I)
(xlib:text-extents font "WWWW-WW-WW WW:WW:WW")
(values (+ extra-width (max width-R width-I))
(+ extra-height (max ascent-R ascent-I))))))
(defun romanize (arg)
(if (zerop arg)
"O"
(format nil "~@R" arg)))
(defun roman-time-string ()
(multiple-value-bind (s m h) (decode-universal-time (get-universal-time))
(format nil "~a ~a ~a" (romanize h) (romanize m) (romanize s))))
(defun iso-time-string ()
(multiple-value-bind (se mi ho da mo ye)
(decode-universal-time (get-universal-time))
(format nil "~d-~2,'0d-~2,'0dT~2,'0d:~2,'0d:~2,'0d" ye mo da ho mi se)))
(defun clclock (&key (font "fixed") (duration 100) (time-string :roman)
(background "midnightblue") (foreground "yellow")
(x 10) (y 10) (extra-width 20) (extra-height 20))
"Show a digital clock."
(xlib:with-open-display (dpy)
(let* ((screen (xlib:display-default-screen dpy))
(white-pixel (xlib:screen-white-pixel screen))
(colormap (xlib:screen-default-colormap screen))
(bg (xlib:alloc-color colormap
(xlib:lookup-color colormap background)))
(fg (xlib:alloc-color colormap
(xlib:lookup-color colormap foreground)))
(font-o (xlib:open-font dpy font))
(time-string-f (ecase time-string
(:roman #'roman-time-string)
(:iso #'iso-time-string))))
(multiple-value-bind (width height)
(required-size font-o extra-width extra-height)
(let* ((window (xlib:create-window
:parent (xlib:screen-root screen) :x x :y y
:width width :height height :background bg))
(gcontext (xlib:create-gcontext
:drawable window :fill-style :solid
:background white-pixel
:foreground fg :font font-o))
(background (xlib:create-gcontext
:drawable window :fill-style :solid
:background white-pixel
:foreground bg :font font-o)))
(xlib:map-window window)
(loop :for count :upfrom 0 :until (and duration (= count duration))
:for t-string = (funcall time-string-f)
:for string-width = (xlib:text-width gcontext t-string)
:do (xlib:draw-rectangle window background 0 0 width height :fill-p)
(xlib:draw-glyphs window gcontext
(ash (- width string-width extra-width) -1)
(- height (ash extra-height -1))
t-string)
(xlib:display-force-output dpy)
(sleep 1))
(xlib:free-colors colormap (list fg bg))
(xlib:close-font font-o)
(xlib:free-gcontext background)
(xlib:free-gcontext gcontext)
(xlib:unmap-window window)
(xlib:display-force-output dpy))))))
| null | https://raw.githubusercontent.com/rurban/clisp/75ed2995ff8f5364bcc18727cde9438cca4e7c2c/modules/clx/new-clx/demos/clclock.lisp | lisp | Adapted from -lisp.net/~crhodes/clx by... | Copyright ( C ) 2007 - 2008 < >
This is free software , distributed under the GNU GPL v2 +
(in-package :clx-demos)
(defun required-size (font &optional (extra-width 0) (extra-height 0))
(multiple-value-bind (width-R ascent-R)
(xlib:text-extents font "XVIIII XXXVIIII XXXVIIII")
(multiple-value-bind (width-I ascent-I)
(xlib:text-extents font "WWWW-WW-WW WW:WW:WW")
(values (+ extra-width (max width-R width-I))
(+ extra-height (max ascent-R ascent-I))))))
(defun romanize (arg)
(if (zerop arg)
"O"
(format nil "~@R" arg)))
(defun roman-time-string ()
(multiple-value-bind (s m h) (decode-universal-time (get-universal-time))
(format nil "~a ~a ~a" (romanize h) (romanize m) (romanize s))))
(defun iso-time-string ()
(multiple-value-bind (se mi ho da mo ye)
(decode-universal-time (get-universal-time))
(format nil "~d-~2,'0d-~2,'0dT~2,'0d:~2,'0d:~2,'0d" ye mo da ho mi se)))
(defun clclock (&key (font "fixed") (duration 100) (time-string :roman)
(background "midnightblue") (foreground "yellow")
(x 10) (y 10) (extra-width 20) (extra-height 20))
"Show a digital clock."
(xlib:with-open-display (dpy)
(let* ((screen (xlib:display-default-screen dpy))
(white-pixel (xlib:screen-white-pixel screen))
(colormap (xlib:screen-default-colormap screen))
(bg (xlib:alloc-color colormap
(xlib:lookup-color colormap background)))
(fg (xlib:alloc-color colormap
(xlib:lookup-color colormap foreground)))
(font-o (xlib:open-font dpy font))
(time-string-f (ecase time-string
(:roman #'roman-time-string)
(:iso #'iso-time-string))))
(multiple-value-bind (width height)
(required-size font-o extra-width extra-height)
(let* ((window (xlib:create-window
:parent (xlib:screen-root screen) :x x :y y
:width width :height height :background bg))
(gcontext (xlib:create-gcontext
:drawable window :fill-style :solid
:background white-pixel
:foreground fg :font font-o))
(background (xlib:create-gcontext
:drawable window :fill-style :solid
:background white-pixel
:foreground bg :font font-o)))
(xlib:map-window window)
(loop :for count :upfrom 0 :until (and duration (= count duration))
:for t-string = (funcall time-string-f)
:for string-width = (xlib:text-width gcontext t-string)
:do (xlib:draw-rectangle window background 0 0 width height :fill-p)
(xlib:draw-glyphs window gcontext
(ash (- width string-width extra-width) -1)
(- height (ash extra-height -1))
t-string)
(xlib:display-force-output dpy)
(sleep 1))
(xlib:free-colors colormap (list fg bg))
(xlib:close-font font-o)
(xlib:free-gcontext background)
(xlib:free-gcontext gcontext)
(xlib:unmap-window window)
(xlib:display-force-output dpy))))))
|
7cd2e1fe7662765851ff56d8082d84878bf295c340534449514a23bea5b23201 | dschrempf/elynx | Distance.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
-- Description : Compute distances between trees
Copyright : 2021
License : GPL-3.0 - or - later
--
-- Maintainer :
-- Stability : unstable
-- Portability : portable
--
Creation date : We d May 29 18:09:39 2019 .
--
- Symmetric ( Robinson - Foulds ) distance .
-- - Incompatible splits distance.
module TLynx.Distance.Distance
( distance,
)
where
import Control.Monad
( unless,
when,
)
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader hiding (local)
import Data.Bifunctor
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.List hiding (intersect)
import Data.Maybe
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Vector.Unboxed as V
import ELynx.Tools.ByteString
import ELynx.Tools.ELynx
import ELynx.Tools.Environment
import ELynx.Tools.Logger
import ELynx.Tree
import Statistics.Sample
import System.IO
import TLynx.Distance.Options
import TLynx.Parsers
import Text.Printf
median :: Ord a => [a] -> a
median xs = sort xs !! l2 where l2 = length xs `div` 2
pf :: String
pf = "%.3f"
header :: Int -> Int -> DistanceMeasure -> BL.ByteString
header n m d =
alignLeft (n + 2) "Tree 1"
<> alignLeft (n + 2) "Tree 2"
<> alignRight
(m + 2)
(BL.pack $ show d)
showTriplet ::
(PrintfArg a) => Int -> Int -> [String] -> (Int, Int, a) -> BL.ByteString
showTriplet n m args (i, j, d) = i' <> j' <> d'
where
i' = alignLeft (n + 2) $ BL.pack (args !! i)
j' = alignLeft (n + 2) $ BL.pack (args !! j)
d' = alignRight (m + 2) $ BL.pack (printf pf d)
-- Compute pairwise distances of a list of input trees. Use given distance
measure . Returns a triple , the first two elements are the indices of the
compared trees , the third is the distance .
pairwise ::
-- | Distance function
(a -> a -> b) ->
-- | Input trees
[a] ->
-- | (index i, index j, distance i j)
[(Int, Int, b)]
pairwise dist trs =
[ (i, j, dist x y)
| (i : is, x : xs) <- zip (tails [0 ..]) (tails trs),
(j, y) <- zip is xs
]
-- | Compute distance functions between phylogenetic trees.
distance :: ELynx DistanceArguments ()
distance = do
l <- localArguments <$> ask
let nwFormat = argsNewickFormat l
-- Determine output handle (stdout or file).
outH <- outHandle "results" ".out"
-- Master tree (in case it is given).
let mname = argsMasterTreeFile l
mtree <- case mname of
Nothing -> return Nothing
Just f -> do
logInfoS $ "Read master tree from file: " <> f <> "."
t <- liftIO $ parseTree nwFormat f
logInfoS "Compute distances between all trees and master tree."
return $ Just t
let tfps = argsInFiles l
(trees, names) <- case tfps of
[] -> error "No tree input files given."
[tf] -> do
logInfoS "Read trees from single file."
ts <- liftIO $ parseTrees nwFormat tf
logInfoS $ show (length ts) <> " trees found in file."
logInfoS "Trees are indexed with integers."
return (ts, map show [0 .. length ts - 1])
_ -> do
logInfoS "Read trees from files."
ts <- liftIO $ mapM (parseTree nwFormat) tfps
logInfoS "Trees are named according to their file names."
return (ts, tfps)
when (null trees) (error "Not enough trees found in files.")
when
(isNothing mtree && length trees == 1)
(error "Not enough trees found in files.")
-- when (isNothing mtree) $ logInfoS
-- "Compute pairwise distances between trees from different files."
logDebugS "The trees are:"
logDebugB $ BL.unlines $ map toNewick trees
-- Set the distance measure.
let dist = argsDistance l
case argsDistance l of
Symmetric -> logInfoS "Use symmetric (Robinson-Foulds) distance."
IncompatibleSplit val -> do
logInfoS "Use incompatible split distance."
logInfoS $
"Collapse nodes with support less than "
++ show val
++ "."
BranchScore -> logInfoS "Use branch score distance."
let distanceMeasure' ::
Tree Phylo Name ->
Tree Phylo Name ->
Double
distanceMeasure' t1 t2 = either error id $ case dist of
Symmetric -> second fromIntegral $ symmetric t1 t2
IncompatibleSplit val ->
second fromIntegral $
incompatibleSplits
(collapse val $ normalizeBranchSupport $ either error id $ toSupportTree t1)
(collapse val $ normalizeBranchSupport $ either error id $ toSupportTree t2)
BranchScore ->
branchScore
(normalizeF $ either error id $ toLengthTree t1)
(normalizeF $ either error id $ toLengthTree t2)
where
normalizeF = if argsNormalize l then normalizeBranchLengths else id
-- Possibly intersect trees before distance calculation.
when (argsIntersect l) $
logInfoS "Intersect trees before calculation of distances."
let distanceMeasure =
if argsIntersect l
then
( \t1 t2 -> case either error id $ intersect [t1, t2] of
[t1', t2'] -> distanceMeasure' t1' t2'
_ -> error "distance: Could not intersect trees."
)
else distanceMeasure'
-- Possibly normalize trees.
when (argsNormalize l) $
logInfoS "Normalize trees before calculation of distances."
let dsTriplets = case mtree of
Nothing -> pairwise distanceMeasure trees
Just masterTree -> [(0, i, distanceMeasure masterTree t') | (i, t') <- zip [1 ..] trees]
ds = map (\(_, _, x) -> x) dsTriplets
dsVec = V.fromList ds
liftIO $
hPutStrLn outH $
"Summary statistics of "
++ show dist
++ " Distance:"
liftIO $
T.hPutStrLn outH $
T.justifyLeft 10 ' ' "Mean: "
<> T.pack
(printf pf (mean dsVec))
liftIO $
T.hPutStrLn outH $
T.justifyLeft 10 ' ' "Median: "
<> T.pack
(printf pf (median ds))
liftIO $
T.hPutStrLn outH $
T.justifyLeft 10 ' ' "Variance: "
<> T.pack
(printf pf (variance dsVec))
unless
(argsSummaryStatistics l)
( do
let n = maximum $ 6 : map length names
m = length $ show dist
lift $ hPutStrLn outH ""
lift $ BL.hPutStrLn outH $ header n m dist
case mname of
Nothing ->
lift $
BL.hPutStr outH $
BL.unlines
(map (showTriplet n m names) dsTriplets)
Just mn ->
lift $
BL.hPutStr outH $
BL.unlines
(map (showTriplet n m (mn : names)) dsTriplets)
)
liftIO $ hClose outH
| null | https://raw.githubusercontent.com/dschrempf/elynx/4b6907b240d2f91bf7932c79ed38a469b5ed64ed/tlynx/src/TLynx/Distance/Distance.hs | haskell | # LANGUAGE OverloadedStrings #
|
Description : Compute distances between trees
Maintainer :
Stability : unstable
Portability : portable
- Incompatible splits distance.
Compute pairwise distances of a list of input trees. Use given distance
| Distance function
| Input trees
| (index i, index j, distance i j)
| Compute distance functions between phylogenetic trees.
Determine output handle (stdout or file).
Master tree (in case it is given).
when (isNothing mtree) $ logInfoS
"Compute pairwise distances between trees from different files."
Set the distance measure.
Possibly intersect trees before distance calculation.
Possibly normalize trees. |
Copyright : 2021
License : GPL-3.0 - or - later
Creation date : We d May 29 18:09:39 2019 .
- Symmetric ( Robinson - Foulds ) distance .
module TLynx.Distance.Distance
( distance,
)
where
import Control.Monad
( unless,
when,
)
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader hiding (local)
import Data.Bifunctor
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.List hiding (intersect)
import Data.Maybe
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Vector.Unboxed as V
import ELynx.Tools.ByteString
import ELynx.Tools.ELynx
import ELynx.Tools.Environment
import ELynx.Tools.Logger
import ELynx.Tree
import Statistics.Sample
import System.IO
import TLynx.Distance.Options
import TLynx.Parsers
import Text.Printf
median :: Ord a => [a] -> a
median xs = sort xs !! l2 where l2 = length xs `div` 2
pf :: String
pf = "%.3f"
header :: Int -> Int -> DistanceMeasure -> BL.ByteString
header n m d =
alignLeft (n + 2) "Tree 1"
<> alignLeft (n + 2) "Tree 2"
<> alignRight
(m + 2)
(BL.pack $ show d)
showTriplet ::
(PrintfArg a) => Int -> Int -> [String] -> (Int, Int, a) -> BL.ByteString
showTriplet n m args (i, j, d) = i' <> j' <> d'
where
i' = alignLeft (n + 2) $ BL.pack (args !! i)
j' = alignLeft (n + 2) $ BL.pack (args !! j)
d' = alignRight (m + 2) $ BL.pack (printf pf d)
measure . Returns a triple , the first two elements are the indices of the
compared trees , the third is the distance .
pairwise ::
(a -> a -> b) ->
[a] ->
[(Int, Int, b)]
pairwise dist trs =
[ (i, j, dist x y)
| (i : is, x : xs) <- zip (tails [0 ..]) (tails trs),
(j, y) <- zip is xs
]
distance :: ELynx DistanceArguments ()
distance = do
l <- localArguments <$> ask
let nwFormat = argsNewickFormat l
outH <- outHandle "results" ".out"
let mname = argsMasterTreeFile l
mtree <- case mname of
Nothing -> return Nothing
Just f -> do
logInfoS $ "Read master tree from file: " <> f <> "."
t <- liftIO $ parseTree nwFormat f
logInfoS "Compute distances between all trees and master tree."
return $ Just t
let tfps = argsInFiles l
(trees, names) <- case tfps of
[] -> error "No tree input files given."
[tf] -> do
logInfoS "Read trees from single file."
ts <- liftIO $ parseTrees nwFormat tf
logInfoS $ show (length ts) <> " trees found in file."
logInfoS "Trees are indexed with integers."
return (ts, map show [0 .. length ts - 1])
_ -> do
logInfoS "Read trees from files."
ts <- liftIO $ mapM (parseTree nwFormat) tfps
logInfoS "Trees are named according to their file names."
return (ts, tfps)
when (null trees) (error "Not enough trees found in files.")
when
(isNothing mtree && length trees == 1)
(error "Not enough trees found in files.")
logDebugS "The trees are:"
logDebugB $ BL.unlines $ map toNewick trees
let dist = argsDistance l
case argsDistance l of
Symmetric -> logInfoS "Use symmetric (Robinson-Foulds) distance."
IncompatibleSplit val -> do
logInfoS "Use incompatible split distance."
logInfoS $
"Collapse nodes with support less than "
++ show val
++ "."
BranchScore -> logInfoS "Use branch score distance."
let distanceMeasure' ::
Tree Phylo Name ->
Tree Phylo Name ->
Double
distanceMeasure' t1 t2 = either error id $ case dist of
Symmetric -> second fromIntegral $ symmetric t1 t2
IncompatibleSplit val ->
second fromIntegral $
incompatibleSplits
(collapse val $ normalizeBranchSupport $ either error id $ toSupportTree t1)
(collapse val $ normalizeBranchSupport $ either error id $ toSupportTree t2)
BranchScore ->
branchScore
(normalizeF $ either error id $ toLengthTree t1)
(normalizeF $ either error id $ toLengthTree t2)
where
normalizeF = if argsNormalize l then normalizeBranchLengths else id
when (argsIntersect l) $
logInfoS "Intersect trees before calculation of distances."
let distanceMeasure =
if argsIntersect l
then
( \t1 t2 -> case either error id $ intersect [t1, t2] of
[t1', t2'] -> distanceMeasure' t1' t2'
_ -> error "distance: Could not intersect trees."
)
else distanceMeasure'
when (argsNormalize l) $
logInfoS "Normalize trees before calculation of distances."
let dsTriplets = case mtree of
Nothing -> pairwise distanceMeasure trees
Just masterTree -> [(0, i, distanceMeasure masterTree t') | (i, t') <- zip [1 ..] trees]
ds = map (\(_, _, x) -> x) dsTriplets
dsVec = V.fromList ds
liftIO $
hPutStrLn outH $
"Summary statistics of "
++ show dist
++ " Distance:"
liftIO $
T.hPutStrLn outH $
T.justifyLeft 10 ' ' "Mean: "
<> T.pack
(printf pf (mean dsVec))
liftIO $
T.hPutStrLn outH $
T.justifyLeft 10 ' ' "Median: "
<> T.pack
(printf pf (median ds))
liftIO $
T.hPutStrLn outH $
T.justifyLeft 10 ' ' "Variance: "
<> T.pack
(printf pf (variance dsVec))
unless
(argsSummaryStatistics l)
( do
let n = maximum $ 6 : map length names
m = length $ show dist
lift $ hPutStrLn outH ""
lift $ BL.hPutStrLn outH $ header n m dist
case mname of
Nothing ->
lift $
BL.hPutStr outH $
BL.unlines
(map (showTriplet n m names) dsTriplets)
Just mn ->
lift $
BL.hPutStr outH $
BL.unlines
(map (showTriplet n m (mn : names)) dsTriplets)
)
liftIO $ hClose outH
|
b7b3890bb3841c183075a416cf32d8864ffe0699005d70228b4e402144d56fbf | chetmurthy/ensemble | emrg.ml | (**************************************************************)
EMV.ML
Author : , 12/96
Changes : Ohad Rodeh 1/2001
(**************************************************************)
open Printf
(**************************************************************)
type platform =
| Unix
| Other
let platform = match Sys.os_type with
| "Unix" -> Unix
| _ -> Other
let read_lines c =
let lines = ref [] in
try
while true do
let line = input_line c in
lines := line :: !lines
done ; []
with End_of_file ->
List.rev !lines
(**************************************************************)
let path = ref []
let files = ref []
let dest = ref None
let mlfile = ref false
let copy = ref false
let proc_file oc file =
(*
let file =
try find_path !path file with Not_found ->
exit 0
in
*)
let modul = Filename.basename file in
let modul = Filename.chop_extension modul in
let modul = String.capitalize modul in
if not (Sys.file_exists file) then
failwith "file does not exist" ;
if !mlfile then (
if not !copy then (
fprintf oc "module %s = %s\n" modul modul
) else (
fprintf oc "module %s = struct\n" modul ;
let ch = open_in file in
let lines = read_lines ch in
List.iter (fun line ->
fprintf oc " %s\n" line
) lines ;
fprintf oc "end\n\n"
)
) else (
fprintf oc "module %s : sig\n" modul ;
let ch = open_in file in
let lines = read_lines ch in
List.iter (fun line ->
fprintf oc " %s\n" line
) lines ;
fprintf oc "end\n\n"
)
let main () =
Arg.parse [
"-I",Arg.String(fun s -> path := s :: !path),": add dir to path" ;
"-o",Arg.String(fun s -> dest := Some s),"" ;
"-ml",Arg.Set mlfile,"" ;
"-mli",Arg.Clear mlfile,"";
"-copy",Arg.Set copy, "copy ML module bodies"
] (fun file -> files := file :: !files) "" ;
let dest =
match !dest with
| None -> failwith "no destination file given"
| Some dest -> dest
in
if Sys.file_exists dest then
Sys.remove dest ;
let oc = open_out_bin dest in
List.iter (proc_file oc) (List.rev !files) ;
flush oc ;
close_out oc ;
if platform = Unix then (
let cmd = sprintf "chmod -w %s" dest in
let _ = Sys.command cmd in
()
)
let _ = main ()
| null | https://raw.githubusercontent.com/chetmurthy/ensemble/8266a89e68be24a4aaa5d594662e211eeaa6dc89/ensemble/tools/emrg.ml | ocaml | ************************************************************
************************************************************
************************************************************
************************************************************
let file =
try find_path !path file with Not_found ->
exit 0
in
| EMV.ML
Author : , 12/96
Changes : Ohad Rodeh 1/2001
open Printf
type platform =
| Unix
| Other
let platform = match Sys.os_type with
| "Unix" -> Unix
| _ -> Other
let read_lines c =
let lines = ref [] in
try
while true do
let line = input_line c in
lines := line :: !lines
done ; []
with End_of_file ->
List.rev !lines
let path = ref []
let files = ref []
let dest = ref None
let mlfile = ref false
let copy = ref false
let proc_file oc file =
let modul = Filename.basename file in
let modul = Filename.chop_extension modul in
let modul = String.capitalize modul in
if not (Sys.file_exists file) then
failwith "file does not exist" ;
if !mlfile then (
if not !copy then (
fprintf oc "module %s = %s\n" modul modul
) else (
fprintf oc "module %s = struct\n" modul ;
let ch = open_in file in
let lines = read_lines ch in
List.iter (fun line ->
fprintf oc " %s\n" line
) lines ;
fprintf oc "end\n\n"
)
) else (
fprintf oc "module %s : sig\n" modul ;
let ch = open_in file in
let lines = read_lines ch in
List.iter (fun line ->
fprintf oc " %s\n" line
) lines ;
fprintf oc "end\n\n"
)
let main () =
Arg.parse [
"-I",Arg.String(fun s -> path := s :: !path),": add dir to path" ;
"-o",Arg.String(fun s -> dest := Some s),"" ;
"-ml",Arg.Set mlfile,"" ;
"-mli",Arg.Clear mlfile,"";
"-copy",Arg.Set copy, "copy ML module bodies"
] (fun file -> files := file :: !files) "" ;
let dest =
match !dest with
| None -> failwith "no destination file given"
| Some dest -> dest
in
if Sys.file_exists dest then
Sys.remove dest ;
let oc = open_out_bin dest in
List.iter (proc_file oc) (List.rev !files) ;
flush oc ;
close_out oc ;
if platform = Unix then (
let cmd = sprintf "chmod -w %s" dest in
let _ = Sys.command cmd in
()
)
let _ = main ()
|
73e68e38a897fbd17e710ff012debad6f07d6fbf9581c6a77884f31230160cde | thelema/ocaml-community | printtyped.ml | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, INRIA Saclay
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Tublic License version 1.0 .
(* *)
(***********************************************************************)
open Asttypes;;
open Format;;
open Lexing;;
open Location;;
open Typedtree;;
let fmt_position f l =
if l.pos_lnum = -1
then fprintf f "%s[%d]" l.pos_fname l.pos_cnum
else fprintf f "%s[%d,%d+%d]" l.pos_fname l.pos_lnum l.pos_bol
(l.pos_cnum - l.pos_bol)
;;
let fmt_location f loc =
fprintf f "(%a..%a)" fmt_position loc.loc_start fmt_position loc.loc_end;
if loc.loc_ghost then fprintf f " ghost";
;;
let rec fmt_longident_aux f x =
match x with
| Longident.Lident (s) -> fprintf f "%s" s;
| Longident.Ldot (y, s) -> fprintf f "%a.%s" fmt_longident_aux y s;
| Longident.Lapply (y, z) ->
fprintf f "%a(%a)" fmt_longident_aux y fmt_longident_aux z;
;;
let fmt_longident_noloc f x = fprintf f "\"%a\"" fmt_longident_aux x;;
let fmt_longident f x = fprintf f "\"%a\"" fmt_longident_aux x.txt;;
let fmt_ident = Ident.print
let rec fmt_path_aux f x =
match x with
| Path.Pident (s) -> fprintf f "%a" fmt_ident s;
| Path.Pdot (y, s, _pos) -> fprintf f "%a.%s" fmt_path_aux y s;
| Path.Papply (y, z) ->
fprintf f "%a(%a)" fmt_path_aux y fmt_path_aux z;
;;
let fmt_path f x = fprintf f "\"%a\"" fmt_path_aux x;;
let fmt_path_loc f x = fprintf f "\"%a\"" fmt_path_aux x.txt;;
let fmt_constant f x =
match x with
| Const_int (i) -> fprintf f "Const_int %d" i;
| Const_char (c) -> fprintf f "Const_char %02x" (Char.code c);
| Const_string (s) -> fprintf f "Const_string %S" s;
| Const_float (s) -> fprintf f "Const_float %s" s;
| Const_int32 (i) -> fprintf f "Const_int32 %ld" i;
| Const_int64 (i) -> fprintf f "Const_int64 %Ld" i;
| Const_nativeint (i) -> fprintf f "Const_nativeint %nd" i;
;;
let fmt_mutable_flag f x =
match x with
| Immutable -> fprintf f "Immutable";
| Mutable -> fprintf f "Mutable";
;;
let fmt_virtual_flag f x =
match x with
| Virtual -> fprintf f "Virtual";
| Concrete -> fprintf f "Concrete";
;;
let fmt_override_flag f x =
match x with
| Override -> fprintf f "Override";
| Fresh -> fprintf f "Fresh";
;;
let fmt_rec_flag f x =
match x with
| Nonrecursive -> fprintf f "Nonrec";
| Recursive -> fprintf f "Rec";
| Default -> fprintf f "Default";
;;
let fmt_direction_flag f x =
match x with
| Upto -> fprintf f "Up";
| Downto -> fprintf f "Down";
;;
let fmt_private_flag f x =
match x with
| Public -> fprintf f "Public";
| Private -> fprintf f "Private";
;;
let line i f s (*...*) =
fprintf f "%s" (String.make (2*i) ' ');
fprintf f s (*...*)
;;
let list i f ppf l =
match l with
| [] -> line i ppf "[]\n";
| _ :: _ ->
line i ppf "[\n";
List.iter (f (i+1) ppf) l;
line i ppf "]\n";
;;
let option i f ppf x =
match x with
| None -> line i ppf "None\n";
| Some x ->
line i ppf "Some\n";
f (i+1) ppf x;
;;
let longident i ppf li = line i ppf "%a\n" fmt_longident li;;
let path i ppf li = line i ppf "%a\n" fmt_path li;;
let ident i ppf li = line i ppf "%a\n" fmt_ident li;;
let string i ppf s = line i ppf "\"%s\"\n" s;;
let string_loc i ppf s = line i ppf "\"%s\"\n" s.txt;;
let bool i ppf x = line i ppf "%s\n" (string_of_bool x);;
let label i ppf x = line i ppf "label=\"%s\"\n" x;;
let rec core_type i ppf x =
line i ppf "core_type %a\n" fmt_location x.ctyp_loc;
let i = i+1 in
match x.ctyp_desc with
| Ttyp_any -> line i ppf "Ptyp_any\n";
| Ttyp_var (s) -> line i ppf "Ptyp_var %s\n" s;
| Ttyp_arrow (l, ct1, ct2) ->
line i ppf "Ptyp_arrow\n";
string i ppf l;
core_type i ppf ct1;
core_type i ppf ct2;
| Ttyp_tuple l ->
line i ppf "Ptyp_tuple\n";
list i core_type ppf l;
| Ttyp_constr (li, _, l) ->
line i ppf "Ptyp_constr %a\n" fmt_path li;
list i core_type ppf l;
| Ttyp_variant (l, closed, low) ->
line i ppf "Ptyp_variant closed=%s\n" (string_of_bool closed);
list i label_x_bool_x_core_type_list ppf l;
option i (fun i -> list i string) ppf low
| Ttyp_object (l) ->
line i ppf "Ptyp_object\n";
list i core_field_type ppf l;
| Ttyp_class (li, _, l, low) ->
line i ppf "Ptyp_class %a\n" fmt_path li;
list i core_type ppf l;
list i string ppf low
| Ttyp_alias (ct, s) ->
line i ppf "Ptyp_alias \"%s\"\n" s;
core_type i ppf ct;
| Ttyp_poly (sl, ct) ->
line i ppf "Ptyp_poly%a\n"
(fun ppf -> List.iter (fun x -> fprintf ppf " '%s" x)) sl;
core_type i ppf ct;
| Ttyp_package { pack_name = s; pack_fields = l } ->
line i ppf "Ptyp_package %a\n" fmt_path s;
list i package_with ppf l;
and package_with i ppf (s, t) =
line i ppf "with type %a\n" fmt_longident s;
core_type i ppf t
and core_field_type i ppf x =
line i ppf "core_field_type %a\n" fmt_location x.field_loc;
let i = i+1 in
match x.field_desc with
| Tcfield (s, ct) ->
line i ppf "Pfield \"%s\"\n" s;
core_type i ppf ct;
| Tcfield_var -> line i ppf "Pfield_var\n";
and pattern i ppf x =
line i ppf "pattern %a\n" fmt_location x.pat_loc;
let i = i+1 in
match x.pat_extra with
| (Tpat_unpack, _) :: rem ->
line i ppf "Tpat_unpack\n";
pattern i ppf { x with pat_extra = rem }
| (Tpat_constraint cty, _) :: rem ->
line i ppf "Tpat_constraint\n";
core_type i ppf cty;
pattern i ppf { x with pat_extra = rem }
| (Tpat_type (id, _), _) :: rem ->
line i ppf "Tpat_type %a\n" fmt_path id;
pattern i ppf { x with pat_extra = rem }
| [] ->
match x.pat_desc with
| Tpat_any -> line i ppf "Ppat_any\n";
| Tpat_var (s,_) -> line i ppf "Ppat_var \"%a\"\n" fmt_ident s;
| Tpat_alias (p, s,_) ->
line i ppf "Ppat_alias \"%a\"\n" fmt_ident s;
pattern i ppf p;
| Tpat_constant (c) -> line i ppf "Ppat_constant %a\n" fmt_constant c;
| Tpat_tuple (l) ->
line i ppf "Ppat_tuple\n";
list i pattern ppf l;
| Tpat_construct (li, _, po, explicity_arity) ->
line i ppf "Ppat_construct %a\n" fmt_longident li;
list i pattern ppf po;
bool i ppf explicity_arity;
| Tpat_variant (l, po, _) ->
line i ppf "Ppat_variant \"%s\"\n" l;
option i pattern ppf po;
| Tpat_record (l, c) ->
line i ppf "Ppat_record\n";
list i longident_x_pattern ppf l;
| Tpat_array (l) ->
line i ppf "Ppat_array\n";
list i pattern ppf l;
| Tpat_or (p1, p2, _) ->
line i ppf "Ppat_or\n";
pattern i ppf p1;
pattern i ppf p2;
| Tpat_lazy p ->
line i ppf "Ppat_lazy\n";
pattern i ppf p;
and expression_extra i ppf x =
match x with
| Texp_constraint (cto1, cto2) ->
line i ppf "Pexp_constraint\n";
option i core_type ppf cto1;
option i core_type ppf cto2;
| Texp_open (m, _, _) ->
line i ppf "Pexp_open \"%a\"\n" fmt_path m;
| Texp_poly cto ->
line i ppf "Pexp_poly\n";
option i core_type ppf cto;
| Texp_newtype s ->
line i ppf "Pexp_newtype \"%s\"\n" s;
and expression i ppf x =
line i ppf "expression %a\n" fmt_location x.exp_loc;
let i =
List.fold_left (fun i (extra,_) -> expression_extra i ppf extra; i+1)
(i+1) x.exp_extra
in
match x.exp_desc with
| Texp_ident (li,_,_) -> line i ppf "Pexp_ident %a\n" fmt_path li;
| Texp_instvar (_, li,_) -> line i ppf "Pexp_instvar %a\n" fmt_path li;
| Texp_constant (c) -> line i ppf "Pexp_constant %a\n" fmt_constant c;
| Texp_let (rf, l, e) ->
line i ppf "Pexp_let %a\n" fmt_rec_flag rf;
list i pattern_x_expression_def ppf l;
expression i ppf e;
| Texp_function (p, l, _partial) ->
line i ppf "Pexp_function \"%s\"\n" p;
(* option i expression ppf eo; *)
list i pattern_x_expression_case ppf l;
| Texp_apply (e, l) ->
line i ppf "Pexp_apply\n";
expression i ppf e;
list i label_x_expression ppf l;
| Texp_match (e, l, partial) ->
line i ppf "Pexp_match\n";
expression i ppf e;
list i pattern_x_expression_case ppf l;
| Texp_try (e, l) ->
line i ppf "Pexp_try\n";
expression i ppf e;
list i pattern_x_expression_case ppf l;
| Texp_tuple (l) ->
line i ppf "Pexp_tuple\n";
list i expression ppf l;
| Texp_construct (li, _, eo, b) ->
line i ppf "Pexp_construct %a\n" fmt_longident li;
list i expression ppf eo;
bool i ppf b;
| Texp_variant (l, eo) ->
line i ppf "Pexp_variant \"%s\"\n" l;
option i expression ppf eo;
| Texp_record (l, eo) ->
line i ppf "Pexp_record\n";
list i longident_x_expression ppf l;
option i expression ppf eo;
| Texp_field (e, li, _) ->
line i ppf "Pexp_field\n";
expression i ppf e;
longident i ppf li;
| Texp_setfield (e1, li, _, e2) ->
line i ppf "Pexp_setfield\n";
expression i ppf e1;
longident i ppf li;
expression i ppf e2;
| Texp_array (l) ->
line i ppf "Pexp_array\n";
list i expression ppf l;
| Texp_ifthenelse (e1, e2, eo) ->
line i ppf "Pexp_ifthenelse\n";
expression i ppf e1;
expression i ppf e2;
option i expression ppf eo;
| Texp_sequence (e1, e2) ->
line i ppf "Pexp_sequence\n";
expression i ppf e1;
expression i ppf e2;
| Texp_while (e1, e2) ->
line i ppf "Pexp_while\n";
expression i ppf e1;
expression i ppf e2;
| Texp_for (s, _, e1, e2, df, e3) ->
line i ppf "Pexp_for \"%a\" %a\n" fmt_ident s fmt_direction_flag df;
expression i ppf e1;
expression i ppf e2;
expression i ppf e3;
| Texp_when (e1, e2) ->
line i ppf "Pexp_when\n";
expression i ppf e1;
expression i ppf e2;
| Texp_send (e, Tmeth_name s, eo) ->
line i ppf "Pexp_send \"%s\"\n" s;
expression i ppf e;
option i expression ppf eo
| Texp_send (e, Tmeth_val s, eo) ->
line i ppf "Pexp_send \"%a\"\n" fmt_ident s;
expression i ppf e;
option i expression ppf eo
| Texp_new (li, _, _) -> line i ppf "Pexp_new %a\n" fmt_path li;
| Texp_setinstvar (_, s, _, e) ->
line i ppf "Pexp_setinstvar \"%a\"\n" fmt_path s;
expression i ppf e;
| Texp_override (_, l) ->
line i ppf "Pexp_override\n";
list i string_x_expression ppf l;
| Texp_letmodule (s, _, me, e) ->
line i ppf "Pexp_letmodule \"%a\"\n" fmt_ident s;
module_expr i ppf me;
expression i ppf e;
| Texp_assert (e) ->
line i ppf "Pexp_assert";
expression i ppf e;
| Texp_assertfalse ->
line i ppf "Pexp_assertfalse";
| Texp_lazy (e) ->
line i ppf "Pexp_lazy";
expression i ppf e;
| Texp_object (s, _) ->
line i ppf "Pexp_object";
class_structure i ppf s
| Texp_pack me ->
line i ppf "Pexp_pack";
module_expr i ppf me
and value_description i ppf x =
line i ppf "value_description\n";
core_type (i+1) ppf x.val_desc;
list (i+1) string ppf x.val_prim;
and string_option_underscore i ppf =
function
| Some x ->
string i ppf x.txt
| None ->
string i ppf "_"
and type_declaration i ppf x =
line i ppf "type_declaration %a\n" fmt_location x.typ_loc;
let i = i+1 in
line i ppf "ptype_params =\n";
list (i+1) string_option_underscore ppf x.typ_params;
line i ppf "ptype_cstrs =\n";
list (i+1) core_type_x_core_type_x_location ppf x.typ_cstrs;
line i ppf "ptype_kind =\n";
type_kind (i+1) ppf x.typ_kind;
line i ppf "ptype_private = %a\n" fmt_private_flag x.typ_private;
line i ppf "ptype_manifest =\n";
option (i+1) core_type ppf x.typ_manifest;
and type_kind i ppf x =
match x with
| Ttype_abstract ->
line i ppf "Ptype_abstract\n"
| Ttype_variant l ->
line i ppf "Ptype_variant\n";
list (i+1) string_x_core_type_list_x_location ppf l;
| Ttype_record l ->
line i ppf "Ptype_record\n";
list (i+1) string_x_mutable_flag_x_core_type_x_location ppf l;
and exception_declaration i ppf x = list i core_type ppf x
and class_type i ppf x =
line i ppf "class_type %a\n" fmt_location x.cltyp_loc;
let i = i+1 in
match x.cltyp_desc with
| Tcty_constr (li, _, l) ->
line i ppf "Pcty_constr %a\n" fmt_path li;
list i core_type ppf l;
| Tcty_signature (cs) ->
line i ppf "Pcty_signature\n";
class_signature i ppf cs;
| Tcty_fun (l, co, cl) ->
line i ppf "Pcty_fun \"%s\"\n" l;
core_type i ppf co;
class_type i ppf cl;
and class_signature i ppf { csig_self = ct; csig_fields = l } =
line i ppf "class_signature\n";
core_type (i+1) ppf ct;
list (i+1) class_type_field ppf l;
and class_type_field i ppf x =
let loc = x.ctf_loc in
match x.ctf_desc with
| Tctf_inher (ct) ->
line i ppf "Pctf_inher\n";
class_type i ppf ct;
| Tctf_val (s, mf, vf, ct) ->
line i ppf
"Pctf_val \"%s\" %a %a %a\n" s
fmt_mutable_flag mf fmt_virtual_flag vf fmt_location loc;
core_type (i+1) ppf ct;
| Tctf_virt (s, pf, ct) ->
line i ppf
"Pctf_virt \"%s\" %a %a\n" s fmt_private_flag pf fmt_location loc;
core_type (i+1) ppf ct;
| Tctf_meth (s, pf, ct) ->
line i ppf
"Pctf_meth \"%s\" %a %a\n" s fmt_private_flag pf fmt_location loc;
core_type (i+1) ppf ct;
| Tctf_cstr (ct1, ct2) ->
line i ppf "Pctf_cstr %a\n" fmt_location loc;
core_type i ppf ct1;
core_type i ppf ct2;
and class_description i ppf x =
line i ppf "class_description %a\n" fmt_location x.ci_loc;
let i = i+1 in
line i ppf "pci_virt = %a\n" fmt_virtual_flag x.ci_virt;
line i ppf "pci_params =\n";
string_list_x_location (i+1) ppf x.ci_params;
line i ppf "pci_name = \"%s\"\n" x.ci_id_name.txt;
line i ppf "pci_expr =\n";
class_type (i+1) ppf x.ci_expr;
and class_type_declaration i ppf x =
line i ppf "class_type_declaration %a\n" fmt_location x.ci_loc;
let i = i+1 in
line i ppf "pci_virt = %a\n" fmt_virtual_flag x.ci_virt;
line i ppf "pci_params =\n";
string_list_x_location (i+1) ppf x.ci_params;
line i ppf "pci_name = \"%s\"\n" x.ci_id_name.txt;
line i ppf "pci_expr =\n";
class_type (i+1) ppf x.ci_expr;
and class_expr i ppf x =
line i ppf "class_expr %a\n" fmt_location x.cl_loc;
let i = i+1 in
match x.cl_desc with
| Tcl_ident (li, _, l) ->
line i ppf "Pcl_constr %a\n" fmt_path li;
list i core_type ppf l;
| Tcl_structure (cs) ->
line i ppf "Pcl_structure\n";
class_structure i ppf cs;
TODO
line i ppf " Pcl_fun\n " ;
label i ppf l ;
option i expression ppf eo ;
pattern i ppf p ;
class_expr i ppf e ;
label i ppf l;
option i expression ppf eo;
pattern i ppf p;
class_expr i ppf e; *)
| Tcl_apply (ce, l) ->
line i ppf "Pcl_apply\n";
class_expr i ppf ce;
list i label_x_expression ppf l;
| Tcl_let (rf, l1, l2, ce) ->
line i ppf "Pcl_let %a\n" fmt_rec_flag rf;
list i pattern_x_expression_def ppf l1;
list i ident_x_loc_x_expression_def ppf l2;
class_expr i ppf ce;
| Tcl_constraint (ce, Some ct, _, _, _) ->
line i ppf "Pcl_constraint\n";
class_expr i ppf ce;
class_type i ppf ct;
| Tcl_constraint (_, None, _, _, _) -> assert false
TODO : is it possible ? see
and class_structure i ppf { cstr_pat = p; cstr_fields = l } =
line i ppf "class_structure\n";
pattern (i+1) ppf p;
list (i+1) class_field ppf l;
TODO
let loc = x.cf_loc in
match x.cf_desc with
| Tcf_inher ( ovf , ce , so ) - >
line i ppf " Pcf_inher % a\n " fmt_override_flag ovf ;
class_expr ( i+1 ) ppf ce ;
option ( i+1 ) string ppf so ;
| Tcf_valvirt ( s , mf , ct ) - >
line i ppf " Pcf_valvirt \"%s\ " % a % a\n "
s.txt fmt_mutable_flag mf fmt_location loc ;
core_type ( i+1 ) ppf ct ;
| Tcf_val ( s , mf , ovf , e ) - >
line i ppf " Pcf_val \"%s\ " % a % a % a\n "
s.txt fmt_mutable_flag mf fmt_override_flag ovf fmt_location loc ;
expression ( i+1 ) ppf e ;
| Tcf_virt ( s , pf , ct ) - >
line i ppf " Pcf_virt \"%s\ " % a % a\n "
s.txt fmt_private_flag ;
core_type ( i+1 ) ppf ct ;
| Tcf_meth ( s , pf , ovf , e ) - >
line i ppf " Pcf_meth \"%s\ " % a % a % a\n "
s.txt fmt_private_flag pf fmt_override_flag ovf fmt_location loc ;
expression ( i+1 ) ppf e ;
| Tcf_constr ( ct1 , ct2 ) - >
line i ppf " Pcf_constr % a\n " fmt_location loc ;
core_type ( i+1 ) ppf ct1 ;
core_type ( i+1 ) ppf ct2 ;
| Tcf_init ( e ) - >
line i ppf " Pcf_init\n " ;
expression ( i+1 ) ppf e ;
match x.cf_desc with
| Tcf_inher (ovf, ce, so) ->
line i ppf "Pcf_inher %a\n" fmt_override_flag ovf;
class_expr (i+1) ppf ce;
option (i+1) string ppf so;
| Tcf_valvirt (s, mf, ct) ->
line i ppf "Pcf_valvirt \"%s\" %a %a\n"
s.txt fmt_mutable_flag mf fmt_location loc;
core_type (i+1) ppf ct;
| Tcf_val (s, mf, ovf, e) ->
line i ppf "Pcf_val \"%s\" %a %a %a\n"
s.txt fmt_mutable_flag mf fmt_override_flag ovf fmt_location loc;
expression (i+1) ppf e;
| Tcf_virt (s, pf, ct) ->
line i ppf "Pcf_virt \"%s\" %a %a\n"
s.txt fmt_private_flag pf fmt_location loc;
core_type (i+1) ppf ct;
| Tcf_meth (s, pf, ovf, e) ->
line i ppf "Pcf_meth \"%s\" %a %a %a\n"
s.txt fmt_private_flag pf fmt_override_flag ovf fmt_location loc;
expression (i+1) ppf e;
| Tcf_constr (ct1, ct2) ->
line i ppf "Pcf_constr %a\n" fmt_location loc;
core_type (i+1) ppf ct1;
core_type (i+1) ppf ct2;
| Tcf_init (e) ->
line i ppf "Pcf_init\n";
expression (i+1) ppf e;
*)
and class_declaration i ppf x =
line i ppf "class_declaration %a\n" fmt_location x.ci_loc;
let i = i+1 in
line i ppf "pci_virt = %a\n" fmt_virtual_flag x.ci_virt;
line i ppf "pci_params =\n";
string_list_x_location (i+1) ppf x.ci_params;
line i ppf "pci_name = \"%s\"\n" x.ci_id_name.txt;
line i ppf "pci_expr =\n";
class_expr (i+1) ppf x.ci_expr;
and module_type i ppf x =
line i ppf "module_type %a\n" fmt_location x.mty_loc;
let i = i+1 in
match x.mty_desc with
| Tmty_ident (li,_) -> line i ppf "Pmty_ident %a\n" fmt_path li;
| Tmty_signature (s) ->
line i ppf "Pmty_signature\n";
signature i ppf s;
| Tmty_functor (s, _, mt1, mt2) ->
line i ppf "Pmty_functor \"%a\"\n" fmt_ident s;
module_type i ppf mt1;
module_type i ppf mt2;
| Tmty_with (mt, l) ->
line i ppf "Pmty_with\n";
module_type i ppf mt;
list i longident_x_with_constraint ppf l;
| Tmty_typeof m ->
line i ppf "Pmty_typeof\n";
module_expr i ppf m;
and signature i ppf x = list i signature_item ppf x.sig_items
and signature_item i ppf x =
line i ppf "signature_item %a\n" fmt_location x.sig_loc;
let i = i+1 in
match x.sig_desc with
| Tsig_value (s, _, vd) ->
line i ppf "Psig_value \"%a\"\n" fmt_ident s;
value_description i ppf vd;
| Tsig_type (l) ->
line i ppf "Psig_type\n";
list i string_x_type_declaration ppf l;
| Tsig_exception (s, _, ed) ->
line i ppf "Psig_exception \"%a\"\n" fmt_ident s;
exception_declaration i ppf ed.exn_params;
| Tsig_module (s, _, mt) ->
line i ppf "Psig_module \"%a\"\n" fmt_ident s;
module_type i ppf mt;
| Tsig_recmodule decls ->
line i ppf "Psig_recmodule\n";
list i string_x_module_type ppf decls;
| Tsig_modtype (s, _, md) ->
line i ppf "Psig_modtype \"%a\"\n" fmt_ident s;
modtype_declaration i ppf md;
| Tsig_open (li,_) -> line i ppf "Psig_open %a\n" fmt_path li;
| Tsig_include (mt, _) ->
line i ppf "Psig_include\n";
module_type i ppf mt;
| Tsig_class (l) ->
line i ppf "Psig_class\n";
list i class_description ppf l;
| Tsig_class_type (l) ->
line i ppf "Psig_class_type\n";
list i class_type_declaration ppf l;
and modtype_declaration i ppf x =
match x with
| Tmodtype_abstract -> line i ppf "Pmodtype_abstract\n";
| Tmodtype_manifest (mt) ->
line i ppf "Pmodtype_manifest\n";
module_type (i+1) ppf mt;
and with_constraint i ppf x =
match x with
| Twith_type (td) ->
line i ppf "Pwith_type\n";
type_declaration (i+1) ppf td;
| Twith_typesubst (td) ->
line i ppf "Pwith_typesubst\n";
type_declaration (i+1) ppf td;
| Twith_module (li,_) -> line i ppf "Pwith_module %a\n" fmt_path li;
| Twith_modsubst (li,_) -> line i ppf "Pwith_modsubst %a\n" fmt_path li;
and module_expr i ppf x =
line i ppf "module_expr %a\n" fmt_location x.mod_loc;
let i = i+1 in
match x.mod_desc with
| Tmod_ident (li,_) -> line i ppf "Pmod_ident %a\n" fmt_path li;
| Tmod_structure (s) ->
line i ppf "Pmod_structure\n";
structure i ppf s;
| Tmod_functor (s, _, mt, me) ->
line i ppf "Pmod_functor \"%a\"\n" fmt_ident s;
module_type i ppf mt;
module_expr i ppf me;
| Tmod_apply (me1, me2, _) ->
line i ppf "Pmod_apply\n";
module_expr i ppf me1;
module_expr i ppf me2;
| Tmod_constraint (me, _, Tmodtype_explicit mt, _) ->
line i ppf "Pmod_constraint\n";
module_expr i ppf me;
module_type i ppf mt;
TODO
line i ppf " Pmod_constraint\n " ;
module_expr i ppf me ;
module_type i ppf mt ;
module_expr i ppf me;
module_type i ppf mt; *)
| Tmod_unpack (e, _) ->
line i ppf "Pmod_unpack\n";
expression i ppf e;
and structure i ppf x = list i structure_item ppf x.str_items
and structure_item i ppf x =
line i ppf "structure_item %a\n" fmt_location x.str_loc;
let i = i+1 in
match x.str_desc with
| Tstr_eval (e) ->
line i ppf "Pstr_eval\n";
expression i ppf e;
| Tstr_value (rf, l) ->
line i ppf "Pstr_value %a\n" fmt_rec_flag rf;
list i pattern_x_expression_def ppf l;
| Tstr_primitive (s, _, vd) ->
line i ppf "Pstr_primitive \"%a\"\n" fmt_ident s;
value_description i ppf vd;
| Tstr_type l ->
line i ppf "Pstr_type\n";
list i string_x_type_declaration ppf l;
| Tstr_exception (s, _, ed) ->
line i ppf "Pstr_exception \"%a\"\n" fmt_ident s;
exception_declaration i ppf ed.exn_params;
| Tstr_exn_rebind (s, _, li, _) ->
line i ppf "Pstr_exn_rebind \"%a\" %a\n" fmt_ident s fmt_path li;
| Tstr_module (s, _, me) ->
line i ppf "Pstr_module \"%a\"\n" fmt_ident s;
module_expr i ppf me;
| Tstr_recmodule bindings ->
line i ppf "Pstr_recmodule\n";
list i string_x_modtype_x_module ppf bindings;
| Tstr_modtype (s, _, mt) ->
line i ppf "Pstr_modtype \"%a\"\n" fmt_ident s;
module_type i ppf mt;
| Tstr_open (li, _) -> line i ppf "Pstr_open %a\n" fmt_path li;
| Tstr_class (l) ->
line i ppf "Pstr_class\n";
list i class_declaration ppf (List.map (fun (cl, _,_) -> cl) l);
| Tstr_class_type (l) ->
line i ppf "Pstr_class_type\n";
list i class_type_declaration ppf (List.map (fun (_, _, cl) -> cl) l);
| Tstr_include (me, _) ->
line i ppf "Pstr_include";
module_expr i ppf me
and string_x_type_declaration i ppf (s, _, td) =
ident i ppf s;
type_declaration (i+1) ppf td;
and string_x_module_type i ppf (s, _, mty) =
ident i ppf s;
module_type (i+1) ppf mty;
and string_x_modtype_x_module i ppf (s, _, mty, modl) =
ident i ppf s;
module_type (i+1) ppf mty;
module_expr (i+1) ppf modl;
and longident_x_with_constraint i ppf (li, _, wc) =
line i ppf "%a\n" fmt_path li;
with_constraint (i+1) ppf wc;
and core_type_x_core_type_x_location i ppf (ct1, ct2, l) =
line i ppf "<constraint> %a\n" fmt_location l;
core_type (i+1) ppf ct1;
core_type (i+1) ppf ct2;
and string_x_core_type_list_x_location i ppf (s, _, l, r_opt) =
line i ppf "\"%a\"\n" fmt_ident s;
list (i+1) core_type ppf l;
(* option (i+1) core_type ppf r_opt; *)
and string_x_mutable_flag_x_core_type_x_location i ppf (s, _, mf, ct, loc) =
line i ppf "\"%a\" %a %a\n" fmt_ident s fmt_mutable_flag mf fmt_location loc;
core_type (i+1) ppf ct;
and string_list_x_location i ppf (l, loc) =
line i ppf "<params> %a\n" fmt_location loc;
list (i+1) string_loc ppf l;
and longident_x_pattern i ppf (li, _, p) =
line i ppf "%a\n" fmt_longident li;
pattern (i+1) ppf p;
and pattern_x_expression_case i ppf (p, e) =
line i ppf "<case>\n";
pattern (i+1) ppf p;
expression (i+1) ppf e;
and pattern_x_expression_def i ppf (p, e) =
line i ppf "<def>\n";
pattern (i+1) ppf p;
expression (i+1) ppf e;
and string_x_expression i ppf (s, _, e) =
line i ppf "<override> \"%a\"\n" fmt_path s;
expression (i+1) ppf e;
and longident_x_expression i ppf (li, _, e) =
line i ppf "%a\n" fmt_longident li;
expression (i+1) ppf e;
and label_x_expression i ppf (l, e, _) =
line i ppf "<label> \"%s\"\n" l;
(match e with None -> () | Some e -> expression (i+1) ppf e)
and ident_x_loc_x_expression_def i ppf (l,_, e) =
line i ppf "<def> \"%a\"\n" fmt_ident l;
expression (i+1) ppf e;
and label_x_bool_x_core_type_list i ppf x =
match x with
Ttag (l, b, ctl) ->
line i ppf "Rtag \"%s\" %s\n" l (string_of_bool b);
list (i+1) core_type ppf ctl
| Tinherit (ct) ->
line i ppf "Rinherit\n";
core_type (i+1) ppf ct
;;
let interface ppf x = list 0 signature_item ppf x.sig_items;;
let implementation ppf x = list 0 structure_item ppf x.str_items;;
let implementation_with_coercion ppf (x, _) = implementation ppf x
| null | https://raw.githubusercontent.com/thelema/ocaml-community/ed0a2424bbf13d1b33292725e089f0d7ba94b540/typing/printtyped.ml | ocaml | *********************************************************************
OCaml
*********************************************************************
...
...
option i expression ppf eo;
option (i+1) core_type ppf r_opt; | , INRIA Saclay
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Tublic License version 1.0 .
open Asttypes;;
open Format;;
open Lexing;;
open Location;;
open Typedtree;;
let fmt_position f l =
if l.pos_lnum = -1
then fprintf f "%s[%d]" l.pos_fname l.pos_cnum
else fprintf f "%s[%d,%d+%d]" l.pos_fname l.pos_lnum l.pos_bol
(l.pos_cnum - l.pos_bol)
;;
let fmt_location f loc =
fprintf f "(%a..%a)" fmt_position loc.loc_start fmt_position loc.loc_end;
if loc.loc_ghost then fprintf f " ghost";
;;
let rec fmt_longident_aux f x =
match x with
| Longident.Lident (s) -> fprintf f "%s" s;
| Longident.Ldot (y, s) -> fprintf f "%a.%s" fmt_longident_aux y s;
| Longident.Lapply (y, z) ->
fprintf f "%a(%a)" fmt_longident_aux y fmt_longident_aux z;
;;
let fmt_longident_noloc f x = fprintf f "\"%a\"" fmt_longident_aux x;;
let fmt_longident f x = fprintf f "\"%a\"" fmt_longident_aux x.txt;;
let fmt_ident = Ident.print
let rec fmt_path_aux f x =
match x with
| Path.Pident (s) -> fprintf f "%a" fmt_ident s;
| Path.Pdot (y, s, _pos) -> fprintf f "%a.%s" fmt_path_aux y s;
| Path.Papply (y, z) ->
fprintf f "%a(%a)" fmt_path_aux y fmt_path_aux z;
;;
let fmt_path f x = fprintf f "\"%a\"" fmt_path_aux x;;
let fmt_path_loc f x = fprintf f "\"%a\"" fmt_path_aux x.txt;;
let fmt_constant f x =
match x with
| Const_int (i) -> fprintf f "Const_int %d" i;
| Const_char (c) -> fprintf f "Const_char %02x" (Char.code c);
| Const_string (s) -> fprintf f "Const_string %S" s;
| Const_float (s) -> fprintf f "Const_float %s" s;
| Const_int32 (i) -> fprintf f "Const_int32 %ld" i;
| Const_int64 (i) -> fprintf f "Const_int64 %Ld" i;
| Const_nativeint (i) -> fprintf f "Const_nativeint %nd" i;
;;
let fmt_mutable_flag f x =
match x with
| Immutable -> fprintf f "Immutable";
| Mutable -> fprintf f "Mutable";
;;
let fmt_virtual_flag f x =
match x with
| Virtual -> fprintf f "Virtual";
| Concrete -> fprintf f "Concrete";
;;
let fmt_override_flag f x =
match x with
| Override -> fprintf f "Override";
| Fresh -> fprintf f "Fresh";
;;
let fmt_rec_flag f x =
match x with
| Nonrecursive -> fprintf f "Nonrec";
| Recursive -> fprintf f "Rec";
| Default -> fprintf f "Default";
;;
let fmt_direction_flag f x =
match x with
| Upto -> fprintf f "Up";
| Downto -> fprintf f "Down";
;;
let fmt_private_flag f x =
match x with
| Public -> fprintf f "Public";
| Private -> fprintf f "Private";
;;
fprintf f "%s" (String.make (2*i) ' ');
;;
let list i f ppf l =
match l with
| [] -> line i ppf "[]\n";
| _ :: _ ->
line i ppf "[\n";
List.iter (f (i+1) ppf) l;
line i ppf "]\n";
;;
let option i f ppf x =
match x with
| None -> line i ppf "None\n";
| Some x ->
line i ppf "Some\n";
f (i+1) ppf x;
;;
let longident i ppf li = line i ppf "%a\n" fmt_longident li;;
let path i ppf li = line i ppf "%a\n" fmt_path li;;
let ident i ppf li = line i ppf "%a\n" fmt_ident li;;
let string i ppf s = line i ppf "\"%s\"\n" s;;
let string_loc i ppf s = line i ppf "\"%s\"\n" s.txt;;
let bool i ppf x = line i ppf "%s\n" (string_of_bool x);;
let label i ppf x = line i ppf "label=\"%s\"\n" x;;
let rec core_type i ppf x =
line i ppf "core_type %a\n" fmt_location x.ctyp_loc;
let i = i+1 in
match x.ctyp_desc with
| Ttyp_any -> line i ppf "Ptyp_any\n";
| Ttyp_var (s) -> line i ppf "Ptyp_var %s\n" s;
| Ttyp_arrow (l, ct1, ct2) ->
line i ppf "Ptyp_arrow\n";
string i ppf l;
core_type i ppf ct1;
core_type i ppf ct2;
| Ttyp_tuple l ->
line i ppf "Ptyp_tuple\n";
list i core_type ppf l;
| Ttyp_constr (li, _, l) ->
line i ppf "Ptyp_constr %a\n" fmt_path li;
list i core_type ppf l;
| Ttyp_variant (l, closed, low) ->
line i ppf "Ptyp_variant closed=%s\n" (string_of_bool closed);
list i label_x_bool_x_core_type_list ppf l;
option i (fun i -> list i string) ppf low
| Ttyp_object (l) ->
line i ppf "Ptyp_object\n";
list i core_field_type ppf l;
| Ttyp_class (li, _, l, low) ->
line i ppf "Ptyp_class %a\n" fmt_path li;
list i core_type ppf l;
list i string ppf low
| Ttyp_alias (ct, s) ->
line i ppf "Ptyp_alias \"%s\"\n" s;
core_type i ppf ct;
| Ttyp_poly (sl, ct) ->
line i ppf "Ptyp_poly%a\n"
(fun ppf -> List.iter (fun x -> fprintf ppf " '%s" x)) sl;
core_type i ppf ct;
| Ttyp_package { pack_name = s; pack_fields = l } ->
line i ppf "Ptyp_package %a\n" fmt_path s;
list i package_with ppf l;
and package_with i ppf (s, t) =
line i ppf "with type %a\n" fmt_longident s;
core_type i ppf t
and core_field_type i ppf x =
line i ppf "core_field_type %a\n" fmt_location x.field_loc;
let i = i+1 in
match x.field_desc with
| Tcfield (s, ct) ->
line i ppf "Pfield \"%s\"\n" s;
core_type i ppf ct;
| Tcfield_var -> line i ppf "Pfield_var\n";
and pattern i ppf x =
line i ppf "pattern %a\n" fmt_location x.pat_loc;
let i = i+1 in
match x.pat_extra with
| (Tpat_unpack, _) :: rem ->
line i ppf "Tpat_unpack\n";
pattern i ppf { x with pat_extra = rem }
| (Tpat_constraint cty, _) :: rem ->
line i ppf "Tpat_constraint\n";
core_type i ppf cty;
pattern i ppf { x with pat_extra = rem }
| (Tpat_type (id, _), _) :: rem ->
line i ppf "Tpat_type %a\n" fmt_path id;
pattern i ppf { x with pat_extra = rem }
| [] ->
match x.pat_desc with
| Tpat_any -> line i ppf "Ppat_any\n";
| Tpat_var (s,_) -> line i ppf "Ppat_var \"%a\"\n" fmt_ident s;
| Tpat_alias (p, s,_) ->
line i ppf "Ppat_alias \"%a\"\n" fmt_ident s;
pattern i ppf p;
| Tpat_constant (c) -> line i ppf "Ppat_constant %a\n" fmt_constant c;
| Tpat_tuple (l) ->
line i ppf "Ppat_tuple\n";
list i pattern ppf l;
| Tpat_construct (li, _, po, explicity_arity) ->
line i ppf "Ppat_construct %a\n" fmt_longident li;
list i pattern ppf po;
bool i ppf explicity_arity;
| Tpat_variant (l, po, _) ->
line i ppf "Ppat_variant \"%s\"\n" l;
option i pattern ppf po;
| Tpat_record (l, c) ->
line i ppf "Ppat_record\n";
list i longident_x_pattern ppf l;
| Tpat_array (l) ->
line i ppf "Ppat_array\n";
list i pattern ppf l;
| Tpat_or (p1, p2, _) ->
line i ppf "Ppat_or\n";
pattern i ppf p1;
pattern i ppf p2;
| Tpat_lazy p ->
line i ppf "Ppat_lazy\n";
pattern i ppf p;
and expression_extra i ppf x =
match x with
| Texp_constraint (cto1, cto2) ->
line i ppf "Pexp_constraint\n";
option i core_type ppf cto1;
option i core_type ppf cto2;
| Texp_open (m, _, _) ->
line i ppf "Pexp_open \"%a\"\n" fmt_path m;
| Texp_poly cto ->
line i ppf "Pexp_poly\n";
option i core_type ppf cto;
| Texp_newtype s ->
line i ppf "Pexp_newtype \"%s\"\n" s;
and expression i ppf x =
line i ppf "expression %a\n" fmt_location x.exp_loc;
let i =
List.fold_left (fun i (extra,_) -> expression_extra i ppf extra; i+1)
(i+1) x.exp_extra
in
match x.exp_desc with
| Texp_ident (li,_,_) -> line i ppf "Pexp_ident %a\n" fmt_path li;
| Texp_instvar (_, li,_) -> line i ppf "Pexp_instvar %a\n" fmt_path li;
| Texp_constant (c) -> line i ppf "Pexp_constant %a\n" fmt_constant c;
| Texp_let (rf, l, e) ->
line i ppf "Pexp_let %a\n" fmt_rec_flag rf;
list i pattern_x_expression_def ppf l;
expression i ppf e;
| Texp_function (p, l, _partial) ->
line i ppf "Pexp_function \"%s\"\n" p;
list i pattern_x_expression_case ppf l;
| Texp_apply (e, l) ->
line i ppf "Pexp_apply\n";
expression i ppf e;
list i label_x_expression ppf l;
| Texp_match (e, l, partial) ->
line i ppf "Pexp_match\n";
expression i ppf e;
list i pattern_x_expression_case ppf l;
| Texp_try (e, l) ->
line i ppf "Pexp_try\n";
expression i ppf e;
list i pattern_x_expression_case ppf l;
| Texp_tuple (l) ->
line i ppf "Pexp_tuple\n";
list i expression ppf l;
| Texp_construct (li, _, eo, b) ->
line i ppf "Pexp_construct %a\n" fmt_longident li;
list i expression ppf eo;
bool i ppf b;
| Texp_variant (l, eo) ->
line i ppf "Pexp_variant \"%s\"\n" l;
option i expression ppf eo;
| Texp_record (l, eo) ->
line i ppf "Pexp_record\n";
list i longident_x_expression ppf l;
option i expression ppf eo;
| Texp_field (e, li, _) ->
line i ppf "Pexp_field\n";
expression i ppf e;
longident i ppf li;
| Texp_setfield (e1, li, _, e2) ->
line i ppf "Pexp_setfield\n";
expression i ppf e1;
longident i ppf li;
expression i ppf e2;
| Texp_array (l) ->
line i ppf "Pexp_array\n";
list i expression ppf l;
| Texp_ifthenelse (e1, e2, eo) ->
line i ppf "Pexp_ifthenelse\n";
expression i ppf e1;
expression i ppf e2;
option i expression ppf eo;
| Texp_sequence (e1, e2) ->
line i ppf "Pexp_sequence\n";
expression i ppf e1;
expression i ppf e2;
| Texp_while (e1, e2) ->
line i ppf "Pexp_while\n";
expression i ppf e1;
expression i ppf e2;
| Texp_for (s, _, e1, e2, df, e3) ->
line i ppf "Pexp_for \"%a\" %a\n" fmt_ident s fmt_direction_flag df;
expression i ppf e1;
expression i ppf e2;
expression i ppf e3;
| Texp_when (e1, e2) ->
line i ppf "Pexp_when\n";
expression i ppf e1;
expression i ppf e2;
| Texp_send (e, Tmeth_name s, eo) ->
line i ppf "Pexp_send \"%s\"\n" s;
expression i ppf e;
option i expression ppf eo
| Texp_send (e, Tmeth_val s, eo) ->
line i ppf "Pexp_send \"%a\"\n" fmt_ident s;
expression i ppf e;
option i expression ppf eo
| Texp_new (li, _, _) -> line i ppf "Pexp_new %a\n" fmt_path li;
| Texp_setinstvar (_, s, _, e) ->
line i ppf "Pexp_setinstvar \"%a\"\n" fmt_path s;
expression i ppf e;
| Texp_override (_, l) ->
line i ppf "Pexp_override\n";
list i string_x_expression ppf l;
| Texp_letmodule (s, _, me, e) ->
line i ppf "Pexp_letmodule \"%a\"\n" fmt_ident s;
module_expr i ppf me;
expression i ppf e;
| Texp_assert (e) ->
line i ppf "Pexp_assert";
expression i ppf e;
| Texp_assertfalse ->
line i ppf "Pexp_assertfalse";
| Texp_lazy (e) ->
line i ppf "Pexp_lazy";
expression i ppf e;
| Texp_object (s, _) ->
line i ppf "Pexp_object";
class_structure i ppf s
| Texp_pack me ->
line i ppf "Pexp_pack";
module_expr i ppf me
and value_description i ppf x =
line i ppf "value_description\n";
core_type (i+1) ppf x.val_desc;
list (i+1) string ppf x.val_prim;
and string_option_underscore i ppf =
function
| Some x ->
string i ppf x.txt
| None ->
string i ppf "_"
and type_declaration i ppf x =
line i ppf "type_declaration %a\n" fmt_location x.typ_loc;
let i = i+1 in
line i ppf "ptype_params =\n";
list (i+1) string_option_underscore ppf x.typ_params;
line i ppf "ptype_cstrs =\n";
list (i+1) core_type_x_core_type_x_location ppf x.typ_cstrs;
line i ppf "ptype_kind =\n";
type_kind (i+1) ppf x.typ_kind;
line i ppf "ptype_private = %a\n" fmt_private_flag x.typ_private;
line i ppf "ptype_manifest =\n";
option (i+1) core_type ppf x.typ_manifest;
and type_kind i ppf x =
match x with
| Ttype_abstract ->
line i ppf "Ptype_abstract\n"
| Ttype_variant l ->
line i ppf "Ptype_variant\n";
list (i+1) string_x_core_type_list_x_location ppf l;
| Ttype_record l ->
line i ppf "Ptype_record\n";
list (i+1) string_x_mutable_flag_x_core_type_x_location ppf l;
and exception_declaration i ppf x = list i core_type ppf x
and class_type i ppf x =
line i ppf "class_type %a\n" fmt_location x.cltyp_loc;
let i = i+1 in
match x.cltyp_desc with
| Tcty_constr (li, _, l) ->
line i ppf "Pcty_constr %a\n" fmt_path li;
list i core_type ppf l;
| Tcty_signature (cs) ->
line i ppf "Pcty_signature\n";
class_signature i ppf cs;
| Tcty_fun (l, co, cl) ->
line i ppf "Pcty_fun \"%s\"\n" l;
core_type i ppf co;
class_type i ppf cl;
and class_signature i ppf { csig_self = ct; csig_fields = l } =
line i ppf "class_signature\n";
core_type (i+1) ppf ct;
list (i+1) class_type_field ppf l;
and class_type_field i ppf x =
let loc = x.ctf_loc in
match x.ctf_desc with
| Tctf_inher (ct) ->
line i ppf "Pctf_inher\n";
class_type i ppf ct;
| Tctf_val (s, mf, vf, ct) ->
line i ppf
"Pctf_val \"%s\" %a %a %a\n" s
fmt_mutable_flag mf fmt_virtual_flag vf fmt_location loc;
core_type (i+1) ppf ct;
| Tctf_virt (s, pf, ct) ->
line i ppf
"Pctf_virt \"%s\" %a %a\n" s fmt_private_flag pf fmt_location loc;
core_type (i+1) ppf ct;
| Tctf_meth (s, pf, ct) ->
line i ppf
"Pctf_meth \"%s\" %a %a\n" s fmt_private_flag pf fmt_location loc;
core_type (i+1) ppf ct;
| Tctf_cstr (ct1, ct2) ->
line i ppf "Pctf_cstr %a\n" fmt_location loc;
core_type i ppf ct1;
core_type i ppf ct2;
and class_description i ppf x =
line i ppf "class_description %a\n" fmt_location x.ci_loc;
let i = i+1 in
line i ppf "pci_virt = %a\n" fmt_virtual_flag x.ci_virt;
line i ppf "pci_params =\n";
string_list_x_location (i+1) ppf x.ci_params;
line i ppf "pci_name = \"%s\"\n" x.ci_id_name.txt;
line i ppf "pci_expr =\n";
class_type (i+1) ppf x.ci_expr;
and class_type_declaration i ppf x =
line i ppf "class_type_declaration %a\n" fmt_location x.ci_loc;
let i = i+1 in
line i ppf "pci_virt = %a\n" fmt_virtual_flag x.ci_virt;
line i ppf "pci_params =\n";
string_list_x_location (i+1) ppf x.ci_params;
line i ppf "pci_name = \"%s\"\n" x.ci_id_name.txt;
line i ppf "pci_expr =\n";
class_type (i+1) ppf x.ci_expr;
and class_expr i ppf x =
line i ppf "class_expr %a\n" fmt_location x.cl_loc;
let i = i+1 in
match x.cl_desc with
| Tcl_ident (li, _, l) ->
line i ppf "Pcl_constr %a\n" fmt_path li;
list i core_type ppf l;
| Tcl_structure (cs) ->
line i ppf "Pcl_structure\n";
class_structure i ppf cs;
TODO
line i ppf " Pcl_fun\n " ;
label i ppf l ;
option i expression ppf eo ;
pattern i ppf p ;
class_expr i ppf e ;
label i ppf l;
option i expression ppf eo;
pattern i ppf p;
class_expr i ppf e; *)
| Tcl_apply (ce, l) ->
line i ppf "Pcl_apply\n";
class_expr i ppf ce;
list i label_x_expression ppf l;
| Tcl_let (rf, l1, l2, ce) ->
line i ppf "Pcl_let %a\n" fmt_rec_flag rf;
list i pattern_x_expression_def ppf l1;
list i ident_x_loc_x_expression_def ppf l2;
class_expr i ppf ce;
| Tcl_constraint (ce, Some ct, _, _, _) ->
line i ppf "Pcl_constraint\n";
class_expr i ppf ce;
class_type i ppf ct;
| Tcl_constraint (_, None, _, _, _) -> assert false
TODO : is it possible ? see
and class_structure i ppf { cstr_pat = p; cstr_fields = l } =
line i ppf "class_structure\n";
pattern (i+1) ppf p;
list (i+1) class_field ppf l;
TODO
let loc = x.cf_loc in
match x.cf_desc with
| Tcf_inher ( ovf , ce , so ) - >
line i ppf " Pcf_inher % a\n " fmt_override_flag ovf ;
class_expr ( i+1 ) ppf ce ;
option ( i+1 ) string ppf so ;
| Tcf_valvirt ( s , mf , ct ) - >
line i ppf " Pcf_valvirt \"%s\ " % a % a\n "
s.txt fmt_mutable_flag mf fmt_location loc ;
core_type ( i+1 ) ppf ct ;
| Tcf_val ( s , mf , ovf , e ) - >
line i ppf " Pcf_val \"%s\ " % a % a % a\n "
s.txt fmt_mutable_flag mf fmt_override_flag ovf fmt_location loc ;
expression ( i+1 ) ppf e ;
| Tcf_virt ( s , pf , ct ) - >
line i ppf " Pcf_virt \"%s\ " % a % a\n "
s.txt fmt_private_flag ;
core_type ( i+1 ) ppf ct ;
| Tcf_meth ( s , pf , ovf , e ) - >
line i ppf " Pcf_meth \"%s\ " % a % a % a\n "
s.txt fmt_private_flag pf fmt_override_flag ovf fmt_location loc ;
expression ( i+1 ) ppf e ;
| Tcf_constr ( ct1 , ct2 ) - >
line i ppf " Pcf_constr % a\n " fmt_location loc ;
core_type ( i+1 ) ppf ct1 ;
core_type ( i+1 ) ppf ct2 ;
| Tcf_init ( e ) - >
line i ppf " Pcf_init\n " ;
expression ( i+1 ) ppf e ;
match x.cf_desc with
| Tcf_inher (ovf, ce, so) ->
line i ppf "Pcf_inher %a\n" fmt_override_flag ovf;
class_expr (i+1) ppf ce;
option (i+1) string ppf so;
| Tcf_valvirt (s, mf, ct) ->
line i ppf "Pcf_valvirt \"%s\" %a %a\n"
s.txt fmt_mutable_flag mf fmt_location loc;
core_type (i+1) ppf ct;
| Tcf_val (s, mf, ovf, e) ->
line i ppf "Pcf_val \"%s\" %a %a %a\n"
s.txt fmt_mutable_flag mf fmt_override_flag ovf fmt_location loc;
expression (i+1) ppf e;
| Tcf_virt (s, pf, ct) ->
line i ppf "Pcf_virt \"%s\" %a %a\n"
s.txt fmt_private_flag pf fmt_location loc;
core_type (i+1) ppf ct;
| Tcf_meth (s, pf, ovf, e) ->
line i ppf "Pcf_meth \"%s\" %a %a %a\n"
s.txt fmt_private_flag pf fmt_override_flag ovf fmt_location loc;
expression (i+1) ppf e;
| Tcf_constr (ct1, ct2) ->
line i ppf "Pcf_constr %a\n" fmt_location loc;
core_type (i+1) ppf ct1;
core_type (i+1) ppf ct2;
| Tcf_init (e) ->
line i ppf "Pcf_init\n";
expression (i+1) ppf e;
*)
and class_declaration i ppf x =
line i ppf "class_declaration %a\n" fmt_location x.ci_loc;
let i = i+1 in
line i ppf "pci_virt = %a\n" fmt_virtual_flag x.ci_virt;
line i ppf "pci_params =\n";
string_list_x_location (i+1) ppf x.ci_params;
line i ppf "pci_name = \"%s\"\n" x.ci_id_name.txt;
line i ppf "pci_expr =\n";
class_expr (i+1) ppf x.ci_expr;
and module_type i ppf x =
line i ppf "module_type %a\n" fmt_location x.mty_loc;
let i = i+1 in
match x.mty_desc with
| Tmty_ident (li,_) -> line i ppf "Pmty_ident %a\n" fmt_path li;
| Tmty_signature (s) ->
line i ppf "Pmty_signature\n";
signature i ppf s;
| Tmty_functor (s, _, mt1, mt2) ->
line i ppf "Pmty_functor \"%a\"\n" fmt_ident s;
module_type i ppf mt1;
module_type i ppf mt2;
| Tmty_with (mt, l) ->
line i ppf "Pmty_with\n";
module_type i ppf mt;
list i longident_x_with_constraint ppf l;
| Tmty_typeof m ->
line i ppf "Pmty_typeof\n";
module_expr i ppf m;
and signature i ppf x = list i signature_item ppf x.sig_items
and signature_item i ppf x =
line i ppf "signature_item %a\n" fmt_location x.sig_loc;
let i = i+1 in
match x.sig_desc with
| Tsig_value (s, _, vd) ->
line i ppf "Psig_value \"%a\"\n" fmt_ident s;
value_description i ppf vd;
| Tsig_type (l) ->
line i ppf "Psig_type\n";
list i string_x_type_declaration ppf l;
| Tsig_exception (s, _, ed) ->
line i ppf "Psig_exception \"%a\"\n" fmt_ident s;
exception_declaration i ppf ed.exn_params;
| Tsig_module (s, _, mt) ->
line i ppf "Psig_module \"%a\"\n" fmt_ident s;
module_type i ppf mt;
| Tsig_recmodule decls ->
line i ppf "Psig_recmodule\n";
list i string_x_module_type ppf decls;
| Tsig_modtype (s, _, md) ->
line i ppf "Psig_modtype \"%a\"\n" fmt_ident s;
modtype_declaration i ppf md;
| Tsig_open (li,_) -> line i ppf "Psig_open %a\n" fmt_path li;
| Tsig_include (mt, _) ->
line i ppf "Psig_include\n";
module_type i ppf mt;
| Tsig_class (l) ->
line i ppf "Psig_class\n";
list i class_description ppf l;
| Tsig_class_type (l) ->
line i ppf "Psig_class_type\n";
list i class_type_declaration ppf l;
and modtype_declaration i ppf x =
match x with
| Tmodtype_abstract -> line i ppf "Pmodtype_abstract\n";
| Tmodtype_manifest (mt) ->
line i ppf "Pmodtype_manifest\n";
module_type (i+1) ppf mt;
and with_constraint i ppf x =
match x with
| Twith_type (td) ->
line i ppf "Pwith_type\n";
type_declaration (i+1) ppf td;
| Twith_typesubst (td) ->
line i ppf "Pwith_typesubst\n";
type_declaration (i+1) ppf td;
| Twith_module (li,_) -> line i ppf "Pwith_module %a\n" fmt_path li;
| Twith_modsubst (li,_) -> line i ppf "Pwith_modsubst %a\n" fmt_path li;
and module_expr i ppf x =
line i ppf "module_expr %a\n" fmt_location x.mod_loc;
let i = i+1 in
match x.mod_desc with
| Tmod_ident (li,_) -> line i ppf "Pmod_ident %a\n" fmt_path li;
| Tmod_structure (s) ->
line i ppf "Pmod_structure\n";
structure i ppf s;
| Tmod_functor (s, _, mt, me) ->
line i ppf "Pmod_functor \"%a\"\n" fmt_ident s;
module_type i ppf mt;
module_expr i ppf me;
| Tmod_apply (me1, me2, _) ->
line i ppf "Pmod_apply\n";
module_expr i ppf me1;
module_expr i ppf me2;
| Tmod_constraint (me, _, Tmodtype_explicit mt, _) ->
line i ppf "Pmod_constraint\n";
module_expr i ppf me;
module_type i ppf mt;
TODO
line i ppf " Pmod_constraint\n " ;
module_expr i ppf me ;
module_type i ppf mt ;
module_expr i ppf me;
module_type i ppf mt; *)
| Tmod_unpack (e, _) ->
line i ppf "Pmod_unpack\n";
expression i ppf e;
and structure i ppf x = list i structure_item ppf x.str_items
and structure_item i ppf x =
line i ppf "structure_item %a\n" fmt_location x.str_loc;
let i = i+1 in
match x.str_desc with
| Tstr_eval (e) ->
line i ppf "Pstr_eval\n";
expression i ppf e;
| Tstr_value (rf, l) ->
line i ppf "Pstr_value %a\n" fmt_rec_flag rf;
list i pattern_x_expression_def ppf l;
| Tstr_primitive (s, _, vd) ->
line i ppf "Pstr_primitive \"%a\"\n" fmt_ident s;
value_description i ppf vd;
| Tstr_type l ->
line i ppf "Pstr_type\n";
list i string_x_type_declaration ppf l;
| Tstr_exception (s, _, ed) ->
line i ppf "Pstr_exception \"%a\"\n" fmt_ident s;
exception_declaration i ppf ed.exn_params;
| Tstr_exn_rebind (s, _, li, _) ->
line i ppf "Pstr_exn_rebind \"%a\" %a\n" fmt_ident s fmt_path li;
| Tstr_module (s, _, me) ->
line i ppf "Pstr_module \"%a\"\n" fmt_ident s;
module_expr i ppf me;
| Tstr_recmodule bindings ->
line i ppf "Pstr_recmodule\n";
list i string_x_modtype_x_module ppf bindings;
| Tstr_modtype (s, _, mt) ->
line i ppf "Pstr_modtype \"%a\"\n" fmt_ident s;
module_type i ppf mt;
| Tstr_open (li, _) -> line i ppf "Pstr_open %a\n" fmt_path li;
| Tstr_class (l) ->
line i ppf "Pstr_class\n";
list i class_declaration ppf (List.map (fun (cl, _,_) -> cl) l);
| Tstr_class_type (l) ->
line i ppf "Pstr_class_type\n";
list i class_type_declaration ppf (List.map (fun (_, _, cl) -> cl) l);
| Tstr_include (me, _) ->
line i ppf "Pstr_include";
module_expr i ppf me
and string_x_type_declaration i ppf (s, _, td) =
ident i ppf s;
type_declaration (i+1) ppf td;
and string_x_module_type i ppf (s, _, mty) =
ident i ppf s;
module_type (i+1) ppf mty;
and string_x_modtype_x_module i ppf (s, _, mty, modl) =
ident i ppf s;
module_type (i+1) ppf mty;
module_expr (i+1) ppf modl;
and longident_x_with_constraint i ppf (li, _, wc) =
line i ppf "%a\n" fmt_path li;
with_constraint (i+1) ppf wc;
and core_type_x_core_type_x_location i ppf (ct1, ct2, l) =
line i ppf "<constraint> %a\n" fmt_location l;
core_type (i+1) ppf ct1;
core_type (i+1) ppf ct2;
and string_x_core_type_list_x_location i ppf (s, _, l, r_opt) =
line i ppf "\"%a\"\n" fmt_ident s;
list (i+1) core_type ppf l;
and string_x_mutable_flag_x_core_type_x_location i ppf (s, _, mf, ct, loc) =
line i ppf "\"%a\" %a %a\n" fmt_ident s fmt_mutable_flag mf fmt_location loc;
core_type (i+1) ppf ct;
and string_list_x_location i ppf (l, loc) =
line i ppf "<params> %a\n" fmt_location loc;
list (i+1) string_loc ppf l;
and longident_x_pattern i ppf (li, _, p) =
line i ppf "%a\n" fmt_longident li;
pattern (i+1) ppf p;
and pattern_x_expression_case i ppf (p, e) =
line i ppf "<case>\n";
pattern (i+1) ppf p;
expression (i+1) ppf e;
and pattern_x_expression_def i ppf (p, e) =
line i ppf "<def>\n";
pattern (i+1) ppf p;
expression (i+1) ppf e;
and string_x_expression i ppf (s, _, e) =
line i ppf "<override> \"%a\"\n" fmt_path s;
expression (i+1) ppf e;
and longident_x_expression i ppf (li, _, e) =
line i ppf "%a\n" fmt_longident li;
expression (i+1) ppf e;
and label_x_expression i ppf (l, e, _) =
line i ppf "<label> \"%s\"\n" l;
(match e with None -> () | Some e -> expression (i+1) ppf e)
and ident_x_loc_x_expression_def i ppf (l,_, e) =
line i ppf "<def> \"%a\"\n" fmt_ident l;
expression (i+1) ppf e;
and label_x_bool_x_core_type_list i ppf x =
match x with
Ttag (l, b, ctl) ->
line i ppf "Rtag \"%s\" %s\n" l (string_of_bool b);
list (i+1) core_type ppf ctl
| Tinherit (ct) ->
line i ppf "Rinherit\n";
core_type (i+1) ppf ct
;;
let interface ppf x = list 0 signature_item ppf x.sig_items;;
let implementation ppf x = list 0 structure_item ppf x.str_items;;
let implementation_with_coercion ppf (x, _) = implementation ppf x
|
d92a9ac2bd54baad05b99a68c29546aca30ce2926c28858eedc8c7d85c774274 | RailsOnLisp/thot | thot-epoll.lisp | ;;
;; Thot - http web server
Copyright 2017,2018 Thomas de Grivel < > 0614550127
;;
(in-package :thot)
epoll infos
(defclass epoll-infos ()
((fd :initarg :fd
:reader epoll-fd
:type (unsigned-byte 31))
(agents :initform (make-hash-table)
:reader epoll-agents
:type hash-table)))
Generic epoll agent class
(defclass agent ()
((fd :initarg :fd
:reader agent-fd
:type (unsigned-byte 31))
(pending :initform nil
:accessor agent-pending
:type boolean)))
(defgeneric agent-epoll-events (agent))
(defgeneric agent-error (epoll agent))
(defgeneric agent-in (epoll agent))
(defgeneric agent-out (epoll agent))
(define-condition agent-error (error)
((agent :initarg :agent
:reader agent-error-agent
:type agent)))
;; Adding an agent
(defmacro get-agent (epoll fd)
`(gethash ,fd (epoll-agents ,epoll)))
(defun remove-agent (epoll fd)
(declare (type epoll-infos epoll))
(remhash fd (epoll-agents epoll)))
(defun epoll-add (epoll agent)
(declare (type epoll-infos epoll))
(let ((fd (agent-fd agent)))
(set-nonblocking fd)
(setf (get-agent epoll fd) agent)
(epoll:add (epoll-fd epoll) fd
(agent-epoll-events agent)
:data-fd fd)))
(defun epoll-mod (epoll agent events)
(declare (type epoll-infos epoll))
(let ((fd (agent-fd agent)))
(epoll:mod (epoll-fd epoll) fd events :data-fd fd)))
(defun epoll-del (epoll agent)
(declare (type epoll-infos epoll))
(setf (agent-pending agent) t)
(let ((fd (agent-fd agent)))
(epoll:del (epoll-fd epoll) fd)
(socket:shutdown fd t t)
(unistd:close fd)
(remove-agent epoll fd)))
;; Worker agent
(defclass worker (agent)
((addr :initarg :addr
:reader worker-addr)
(keep-alive :initform nil
:accessor worker-keep-alive
:type boolean)
(reader-cont :initarg :reader-cont
:accessor worker-reader-cont
:type (or null function))
(reply :initarg :reply
:accessor worker-reply
:type reply)
(request :initarg :request
:accessor worker-request
:type request)))
(defmethod agent-epoll-events ((worker worker))
(logior epoll:+in+ epoll:+out+ epoll:+err+))
(define-condition worker-error (agent-error)
())
(defmethod agent-error ((epoll epoll-infos) (worker worker))
(epoll-del epoll worker))
(defmethod agent-in ((epoll epoll-infos) (worker worker))
(let ((reader-cont (worker-reader-cont worker)))
(if reader-cont
(let ((result (funcall reader-cont)))
(cond ((null result) (setf (worker-reader-cont worker) nil))
((eq :keep-alive result)
(let ((request (worker-request worker))
(reply (worker-reply worker)))
(setf (worker-reader-cont worker)
(request-reader (reset-request request)
(reset-reply reply)))))
((functionp result) (setf (worker-reader-cont worker)
result))
((eq :eof result) (epoll-del epoll worker))
(t (error "worker input error ~S" worker))))
(when (= 0 (stream-output-length
(reply-stream (worker-reply worker))))
(epoll-del epoll worker)))))
(defmethod agent-out ((epoll epoll-infos) (worker worker))
(let* ((request (worker-request worker))
(reply (worker-reply worker))
(babel-stream (reply-stream reply))
(stream (stream-underlying-stream babel-stream)))
(case (stream-flush-output stream)
((:eof) (epoll-del epoll worker)))
(unless (worker-reader-cont worker)
(when (= 0 (stream-output-length stream))
(epoll-del epoll worker)))))
;; Acceptor agent
(defclass acceptor (agent)
())
(defmethod agent-epoll-events ((agent acceptor))
(logior epoll:+in+ epoll:+err+))
(define-condition acceptor-error (agent-error)
())
(defmethod agent-error ((epoll epoll-infos) (acceptor acceptor))
(error 'acceptor-error :agent acceptor))
(defun make-worker (fd addr)
(let* ((request-stream (babel-input-stream (unistd-input-stream fd)))
(reply-stream (babel-output-stream
(multi-buffered-output-stream
(unistd-output-stream fd))))
(request (make-instance 'request :stream request-stream
:remote-addr (socket:sockaddr-to-string
addr)))
(reply (make-instance 'reply :stream reply-stream))
(reader-cont (request-reader request reply)))
(make-instance 'worker
:addr addr
:fd fd
:reader-cont reader-cont
:request request
:reply reply)))
(defmethod agent-in ((epoll epoll-infos) (acceptor acceptor))
(multiple-value-bind (fd addr) (socket:accept (agent-fd acceptor))
(unless (eq :non-blocking fd)
(let ((worker (make-worker fd addr)))
(epoll-add epoll worker)))))
(defclass control (agent)
())
(defmethod agent-epoll-events ((agent control))
epoll:+in+)
(defmethod agent-in ((epoll epoll-infos) (agent control))
(setq *stop* t))
;; Thread event loop
(defun acceptor-loop-epoll (listenfd &optional pipe)
(declare (type unistd:file-descriptor listenfd))
(epoll:with (epoll-fd)
(let ((epoll (make-instance 'epoll-infos :fd epoll-fd)))
(epoll-add epoll (make-instance 'acceptor :fd listenfd))
(when pipe
(epoll-add epoll (make-instance 'control :fd pipe)))
(loop
(when *stop*
(return))
(epoll:wait (events fd epoll-fd 10000 -1)
(let ((agent (get-agent epoll fd)))
(unless agent (error "bad epoll fd ~S" fd))
(unless (= 0 (logand epoll:+err+ events))
(agent-error epoll agent))
(unless (or (agent-pending agent)
(= 0 (logand epoll:+in+ events)))
(agent-in epoll agent))
(unless (or (agent-pending agent)
(= 0 (logand epoll:+out+ events)))
(agent-out epoll agent))))))))
(defun maybe-configure-epoll ()
(when (cffi:foreign-symbol-pointer "epoll_create")
(setf *acceptor-loop* #'acceptor-loop-epoll)))
(eval-when (:load-toplevel :execute)
(maybe-configure-epoll))
#+nil
(trace
epoll:create epoll-add epoll-del
acceptor-loop-epoll make-worker agent-in agent-out agent-error
stream-flush-output unistd:c-write
stream-output-index
)
| null | https://raw.githubusercontent.com/RailsOnLisp/thot/9cfb6c228753a752d9326a8b39a77b77f790263c/thot-epoll.lisp | lisp |
Thot - http web server
Adding an agent
Worker agent
Acceptor agent
Thread event loop | Copyright 2017,2018 Thomas de Grivel < > 0614550127
(in-package :thot)
epoll infos
(defclass epoll-infos ()
((fd :initarg :fd
:reader epoll-fd
:type (unsigned-byte 31))
(agents :initform (make-hash-table)
:reader epoll-agents
:type hash-table)))
Generic epoll agent class
(defclass agent ()
((fd :initarg :fd
:reader agent-fd
:type (unsigned-byte 31))
(pending :initform nil
:accessor agent-pending
:type boolean)))
(defgeneric agent-epoll-events (agent))
(defgeneric agent-error (epoll agent))
(defgeneric agent-in (epoll agent))
(defgeneric agent-out (epoll agent))
(define-condition agent-error (error)
((agent :initarg :agent
:reader agent-error-agent
:type agent)))
(defmacro get-agent (epoll fd)
`(gethash ,fd (epoll-agents ,epoll)))
(defun remove-agent (epoll fd)
(declare (type epoll-infos epoll))
(remhash fd (epoll-agents epoll)))
(defun epoll-add (epoll agent)
(declare (type epoll-infos epoll))
(let ((fd (agent-fd agent)))
(set-nonblocking fd)
(setf (get-agent epoll fd) agent)
(epoll:add (epoll-fd epoll) fd
(agent-epoll-events agent)
:data-fd fd)))
(defun epoll-mod (epoll agent events)
(declare (type epoll-infos epoll))
(let ((fd (agent-fd agent)))
(epoll:mod (epoll-fd epoll) fd events :data-fd fd)))
(defun epoll-del (epoll agent)
(declare (type epoll-infos epoll))
(setf (agent-pending agent) t)
(let ((fd (agent-fd agent)))
(epoll:del (epoll-fd epoll) fd)
(socket:shutdown fd t t)
(unistd:close fd)
(remove-agent epoll fd)))
(defclass worker (agent)
((addr :initarg :addr
:reader worker-addr)
(keep-alive :initform nil
:accessor worker-keep-alive
:type boolean)
(reader-cont :initarg :reader-cont
:accessor worker-reader-cont
:type (or null function))
(reply :initarg :reply
:accessor worker-reply
:type reply)
(request :initarg :request
:accessor worker-request
:type request)))
(defmethod agent-epoll-events ((worker worker))
(logior epoll:+in+ epoll:+out+ epoll:+err+))
(define-condition worker-error (agent-error)
())
(defmethod agent-error ((epoll epoll-infos) (worker worker))
(epoll-del epoll worker))
(defmethod agent-in ((epoll epoll-infos) (worker worker))
(let ((reader-cont (worker-reader-cont worker)))
(if reader-cont
(let ((result (funcall reader-cont)))
(cond ((null result) (setf (worker-reader-cont worker) nil))
((eq :keep-alive result)
(let ((request (worker-request worker))
(reply (worker-reply worker)))
(setf (worker-reader-cont worker)
(request-reader (reset-request request)
(reset-reply reply)))))
((functionp result) (setf (worker-reader-cont worker)
result))
((eq :eof result) (epoll-del epoll worker))
(t (error "worker input error ~S" worker))))
(when (= 0 (stream-output-length
(reply-stream (worker-reply worker))))
(epoll-del epoll worker)))))
(defmethod agent-out ((epoll epoll-infos) (worker worker))
(let* ((request (worker-request worker))
(reply (worker-reply worker))
(babel-stream (reply-stream reply))
(stream (stream-underlying-stream babel-stream)))
(case (stream-flush-output stream)
((:eof) (epoll-del epoll worker)))
(unless (worker-reader-cont worker)
(when (= 0 (stream-output-length stream))
(epoll-del epoll worker)))))
(defclass acceptor (agent)
())
(defmethod agent-epoll-events ((agent acceptor))
(logior epoll:+in+ epoll:+err+))
(define-condition acceptor-error (agent-error)
())
(defmethod agent-error ((epoll epoll-infos) (acceptor acceptor))
(error 'acceptor-error :agent acceptor))
(defun make-worker (fd addr)
(let* ((request-stream (babel-input-stream (unistd-input-stream fd)))
(reply-stream (babel-output-stream
(multi-buffered-output-stream
(unistd-output-stream fd))))
(request (make-instance 'request :stream request-stream
:remote-addr (socket:sockaddr-to-string
addr)))
(reply (make-instance 'reply :stream reply-stream))
(reader-cont (request-reader request reply)))
(make-instance 'worker
:addr addr
:fd fd
:reader-cont reader-cont
:request request
:reply reply)))
(defmethod agent-in ((epoll epoll-infos) (acceptor acceptor))
(multiple-value-bind (fd addr) (socket:accept (agent-fd acceptor))
(unless (eq :non-blocking fd)
(let ((worker (make-worker fd addr)))
(epoll-add epoll worker)))))
(defclass control (agent)
())
(defmethod agent-epoll-events ((agent control))
epoll:+in+)
(defmethod agent-in ((epoll epoll-infos) (agent control))
(setq *stop* t))
(defun acceptor-loop-epoll (listenfd &optional pipe)
(declare (type unistd:file-descriptor listenfd))
(epoll:with (epoll-fd)
(let ((epoll (make-instance 'epoll-infos :fd epoll-fd)))
(epoll-add epoll (make-instance 'acceptor :fd listenfd))
(when pipe
(epoll-add epoll (make-instance 'control :fd pipe)))
(loop
(when *stop*
(return))
(epoll:wait (events fd epoll-fd 10000 -1)
(let ((agent (get-agent epoll fd)))
(unless agent (error "bad epoll fd ~S" fd))
(unless (= 0 (logand epoll:+err+ events))
(agent-error epoll agent))
(unless (or (agent-pending agent)
(= 0 (logand epoll:+in+ events)))
(agent-in epoll agent))
(unless (or (agent-pending agent)
(= 0 (logand epoll:+out+ events)))
(agent-out epoll agent))))))))
(defun maybe-configure-epoll ()
(when (cffi:foreign-symbol-pointer "epoll_create")
(setf *acceptor-loop* #'acceptor-loop-epoll)))
(eval-when (:load-toplevel :execute)
(maybe-configure-epoll))
#+nil
(trace
epoll:create epoll-add epoll-del
acceptor-loop-epoll make-worker agent-in agent-out agent-error
stream-flush-output unistd:c-write
stream-output-index
)
|
3782f6c38dcb44d96884a00bd739f36e95fb2430018aaaf95bc89ecc05eec5cf | mirage/ptt | nec.mli | * { 1 : A DKIM signer as a SMTP server . }
This module implements a server which signs incoming emails with a private
RSA key . It re - sends emails with the computed DKIM field .
This module implements a server which signs incoming emails with a private
RSA key. It re-sends emails with the computed DKIM field.
*)
module Make
(Random : Mirage_random.S)
(Time : Mirage_time.S)
(Mclock : Mirage_clock.MCLOCK)
(Pclock : Mirage_clock.PCLOCK)
(Resolver : Ptt.Sigs.RESOLVER with type +'a io = 'a Lwt.t)
(Stack : Tcpip.Stack.V4V6) : sig
val fiber :
?limit:int
-> ?stop:Lwt_switch.t
-> ?locals:Ptt.Relay_map.t
-> port:int
-> tls:Tls.Config.client
-> Stack.TCP.t
-> Resolver.t
-> Mirage_crypto_pk.Rsa.priv * Dkim.unsigned Dkim.dkim
-> Ptt.Logic.info
-> unit Lwt.t
end
| null | https://raw.githubusercontent.com/mirage/ptt/c4851958be6a3a98c5c267ae963e10cb41eb72e4/lib/nec.mli | ocaml | * { 1 : A DKIM signer as a SMTP server . }
This module implements a server which signs incoming emails with a private
RSA key . It re - sends emails with the computed DKIM field .
This module implements a server which signs incoming emails with a private
RSA key. It re-sends emails with the computed DKIM field.
*)
module Make
(Random : Mirage_random.S)
(Time : Mirage_time.S)
(Mclock : Mirage_clock.MCLOCK)
(Pclock : Mirage_clock.PCLOCK)
(Resolver : Ptt.Sigs.RESOLVER with type +'a io = 'a Lwt.t)
(Stack : Tcpip.Stack.V4V6) : sig
val fiber :
?limit:int
-> ?stop:Lwt_switch.t
-> ?locals:Ptt.Relay_map.t
-> port:int
-> tls:Tls.Config.client
-> Stack.TCP.t
-> Resolver.t
-> Mirage_crypto_pk.Rsa.priv * Dkim.unsigned Dkim.dkim
-> Ptt.Logic.info
-> unit Lwt.t
end
| |
9bc1ba635860c2a06976e25c86afde74774987680f2f89c0221c6f5064b4a8b0 | iand675/hs-opentelemetry | Processor.hs | -----------------------------------------------------------------------------
-----------------------------------------------------------------------------
|
Module : OpenTelemetry . Processor
Copyright : ( c ) , 2021
License : BSD-3
Description : Hooks for performing actions on the start and end of recording spans
Maintainer :
Stability : experimental
Portability : non - portable ( GHC extensions )
Span processor is an interface which allows hooks for span start and end method invocations . The span processors are invoked only when is true .
Built - in span processors are responsible for batching and conversion of spans to exportable representation and passing batches to exporters .
Span processors can be registered directly on SDK TracerProvider and they are invoked in the same order as they were registered .
Each processor registered on TracerProvider is a start of pipeline that consist of span processor and optional exporter . SDK MUST allow to end each pipeline with individual exporter .
SDK MUST allow users to implement and configure custom processors and decorate built - in processors for advanced scenarios such as tagging or filtering .
Module : OpenTelemetry.Processor
Copyright : (c) Ian Duncan, 2021
License : BSD-3
Description : Hooks for performing actions on the start and end of recording spans
Maintainer : Ian Duncan
Stability : experimental
Portability : non-portable (GHC extensions)
Span processor is an interface which allows hooks for span start and end method invocations. The span processors are invoked only when IsRecording is true.
Built-in span processors are responsible for batching and conversion of spans to exportable representation and passing batches to exporters.
Span processors can be registered directly on SDK TracerProvider and they are invoked in the same order as they were registered.
Each processor registered on TracerProvider is a start of pipeline that consist of span processor and optional exporter. SDK MUST allow to end each pipeline with individual exporter.
SDK MUST allow users to implement and configure custom processors and decorate built-in processors for advanced scenarios such as tagging or filtering.
-}
module OpenTelemetry.Processor (
Processor (..),
ShutdownResult (..),
) where
import OpenTelemetry.Internal.Trace.Types
| null | https://raw.githubusercontent.com/iand675/hs-opentelemetry/b08550db292ca0d8b9ce9156988e6d08dd6a2e61/api/src/OpenTelemetry/Processor.hs | haskell | ---------------------------------------------------------------------------
--------------------------------------------------------------------------- |
|
Module : OpenTelemetry . Processor
Copyright : ( c ) , 2021
License : BSD-3
Description : Hooks for performing actions on the start and end of recording spans
Maintainer :
Stability : experimental
Portability : non - portable ( GHC extensions )
Span processor is an interface which allows hooks for span start and end method invocations . The span processors are invoked only when is true .
Built - in span processors are responsible for batching and conversion of spans to exportable representation and passing batches to exporters .
Span processors can be registered directly on SDK TracerProvider and they are invoked in the same order as they were registered .
Each processor registered on TracerProvider is a start of pipeline that consist of span processor and optional exporter . SDK MUST allow to end each pipeline with individual exporter .
SDK MUST allow users to implement and configure custom processors and decorate built - in processors for advanced scenarios such as tagging or filtering .
Module : OpenTelemetry.Processor
Copyright : (c) Ian Duncan, 2021
License : BSD-3
Description : Hooks for performing actions on the start and end of recording spans
Maintainer : Ian Duncan
Stability : experimental
Portability : non-portable (GHC extensions)
Span processor is an interface which allows hooks for span start and end method invocations. The span processors are invoked only when IsRecording is true.
Built-in span processors are responsible for batching and conversion of spans to exportable representation and passing batches to exporters.
Span processors can be registered directly on SDK TracerProvider and they are invoked in the same order as they were registered.
Each processor registered on TracerProvider is a start of pipeline that consist of span processor and optional exporter. SDK MUST allow to end each pipeline with individual exporter.
SDK MUST allow users to implement and configure custom processors and decorate built-in processors for advanced scenarios such as tagging or filtering.
-}
module OpenTelemetry.Processor (
Processor (..),
ShutdownResult (..),
) where
import OpenTelemetry.Internal.Trace.Types
|
3c7ef52e46a9821001ee7a77044574793b240599de0b5f8408b4f42ce0ef1776 | mcorbin/riemann-cond-dt-plugin | core_test.clj | (ns riemann-cond-dt.core-test
(:require [riemann-cond-dt.core :refer :all]
[riemann.time :refer :all]
[riemann.time.controlled :refer :all]
[riemann.test :refer [test-stream]]
[clojure.test :refer :all]))
(deftest above-test
(testing "do nothing"
(test-stream (above 10 5) [] [])
(test-stream (above 10 5) [{:time 0
:metric 9}
{:time 5
:metric 11}
{:time 6
:metric 11}] [])
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 5
:metric 9}
{:time 11
:metric 12}] [])
(test-stream (above 10 5) [{:time 0
:metric 9}
{:metric 11}
{:time 6
:metric 11}] [])
(test-stream (above 10 5) [{:time 0
:metric 12}
{:time 5
:metric 12}
{:time 4
:metric 9}
{:time 8
:metric 11}] [])
(test-stream (above 10 5) [{:time 0
:metric 12}
{:metric 12}
{:time 5
:metric 11}
{:time 4 ;; too old
:metric 12}
{:time 6
:metric 9}
{:time 16
:metric 12}
{:time 10
:metric 11}] []))
(testing "fire alert"
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 6
:metric 11}] [{:time 6
:metric 11}])
(test-stream (above 10 5) [{:time 0
:metric 2}
{:time 6
:metric 11}
{:time 12
:metric 11}] [{:time 12
:metric 11}])
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 7
:metric 4}
{:time 9
:metric 11}
{:time 15
:metric 12}] [{:time 15
:metric 12}])
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 7
:metric 4}
{:time 9 ;; event ok
:metric 11}
{:time 15 ;; fire event
:metric 12}
{:time 20 ;; fire event
:metric 12}
{:time 20 ;; fire event
:metric 12}
{:time 8 ;; too old
:metric 9}
old but reset current - time to 10
:metric 9}
{:time 11 ;; event ok
:metric 12}
{:time 17 ;; fire event
:metric 12}] [{:time 15
:metric 12}
{:time 20
:metric 12}
{:time 20
:metric 12}
{:time 17
:metric 12}])
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 7
:metric 4}
;; ignore old ok event
{:time 1
:metric 12}
;; ignore old ok event
{:time 6
:metric 12}
{:time 9
:metric 11}
{:time 15
:metric 12}] [{:time 15
:metric 12}])
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 7
:metric 4}
;; ignored
{:time 6
:metric 8}
{:time 7.1
:metric 11}
{:time 15
:metric 12}] [{:time 15
:metric 12}])))
(deftest below-test
(testing "do nothing"
(test-stream (below 10 5) [] [])
(test-stream (below 10 5) [{:time 0
:metric 11}
{:time 5
:metric 9}
{:time 6
:metric 9}] [])
(test-stream (below 10 5) [{:time 0
:metric 9}
{:time 5
:metric 11}
{:time 11
:metric 7}] [])
(test-stream (below 10 5) [{:time 0
:metric 11}
{:metric 9}
{:time 6
:metric 9}] []))
(testing "fire alert"
(test-stream (below 10 5) [{:time 0
:metric 9}
{:time 6
:metric 9}] [{:time 6
:metric 9}])
(test-stream (below 10 5) [{:time 0
:metric 9}
{:time 7
:metric 15}
{:time 9
:metric 9}
{:time 15
:metric 7}] [{:time 15
:metric 7}])))
(deftest between-test
(testing "do nothing"
(test-stream (between 10 20 5) [] [])
(test-stream (between 10 20 5) [{:time 0
:metric 9}
{:time 5
:metric 11}
{:time 6
:metric 11}] [])
(test-stream (between 10 20 5) [{:time 0
:metric 11}
{:time 1
:metric 11}
{:time 5
:metric 21}
{:time 11
:metric 12}] [])
(test-stream (between 10 20 5) [{:time 0
:metric 9}
{:metric 15}
{:time 6
:metric 15}] []))
(testing "fire alert"
(test-stream (between 10 20 5) [{:time 0
:metric 15}
{:time 6
:metric 19}] [{:time 6
:metric 19}])
(test-stream (between 10 20 5) [{:time 0
:metric 11}
{:time 7
:metric 30}
{:time 9
:metric 15}
{:time 15
:metric 12}] [{:time 15
:metric 12}])))
(deftest outside-test
(testing "do nothing"
(test-stream (outside 10 20 5) [] [])
(test-stream (outside 10 20 5) [{:time 0
:metric 10}
{:time 5
:metric 20}
{:time 6
:metric 15}] [])
(test-stream (outside 10 20 5) [{:time 0
:metric 11}
{:time 1
:metric 11}
{:time 5
:metric 21}
{:time 11
:metric 12}] [])
(test-stream (outside 10 20 5) [{:time 0
:metric 9}
{:metric 15}
{:time 6
:metric 15}] []))
(testing "fire alert"
(test-stream (outside 10 20 5) [{:time 0
:metric 9}
{:time 6
:metric 21}] [{:time 6
:metric 21}])
(test-stream (outside 10 20 5) [{:time 0
:metric 9}
{:time 7
:metric 16}
{:time 9
:metric 1}
{:time 15
:metric 2}] [{:time 15
:metric 2}])))
(deftest critical-test
(testing "do nothing"
(test-stream (critical 5) [] [])
(test-stream (critical 5) [{:time 0
:state "critical"}
{:time 5
:state "ok"}
{:time 6
:state "ok"}] [])
(test-stream (critical 5) [{:time 0
:state "ok"}
{:time 1
:state "ok"}
{:time 5
:state "critical"}
{:time 11
:state "ok"}] [])
(test-stream (critical 5) [{:time 0
:state "critical"}
{:state 15}
{:time 6
:state "ok"}] []))
(testing "fire alert"
(test-stream (critical 5) [{:time 0
:state "critical"}
{:time 6
:state "critical"}] [{:time 6
:state "critical"}])
(test-stream (critical 5) [{:time 0
:state "critical"}
{:time 7
:state "ok"}
{:time 9
:state "critical"}
{:time 15
:state "critical"}] [{:time 15
:state "critical"}])))
| null | https://raw.githubusercontent.com/mcorbin/riemann-cond-dt-plugin/a584ac0b44be91b11c0255687c7f6cd90d7c1e68/test/riemann_cond_dt/core_test.clj | clojure | too old
event ok
fire event
fire event
fire event
too old
event ok
fire event
ignore old ok event
ignore old ok event
ignored | (ns riemann-cond-dt.core-test
(:require [riemann-cond-dt.core :refer :all]
[riemann.time :refer :all]
[riemann.time.controlled :refer :all]
[riemann.test :refer [test-stream]]
[clojure.test :refer :all]))
(deftest above-test
(testing "do nothing"
(test-stream (above 10 5) [] [])
(test-stream (above 10 5) [{:time 0
:metric 9}
{:time 5
:metric 11}
{:time 6
:metric 11}] [])
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 5
:metric 9}
{:time 11
:metric 12}] [])
(test-stream (above 10 5) [{:time 0
:metric 9}
{:metric 11}
{:time 6
:metric 11}] [])
(test-stream (above 10 5) [{:time 0
:metric 12}
{:time 5
:metric 12}
{:time 4
:metric 9}
{:time 8
:metric 11}] [])
(test-stream (above 10 5) [{:time 0
:metric 12}
{:metric 12}
{:time 5
:metric 11}
:metric 12}
{:time 6
:metric 9}
{:time 16
:metric 12}
{:time 10
:metric 11}] []))
(testing "fire alert"
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 6
:metric 11}] [{:time 6
:metric 11}])
(test-stream (above 10 5) [{:time 0
:metric 2}
{:time 6
:metric 11}
{:time 12
:metric 11}] [{:time 12
:metric 11}])
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 7
:metric 4}
{:time 9
:metric 11}
{:time 15
:metric 12}] [{:time 15
:metric 12}])
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 7
:metric 4}
:metric 11}
:metric 12}
:metric 12}
:metric 12}
:metric 9}
old but reset current - time to 10
:metric 9}
:metric 12}
:metric 12}] [{:time 15
:metric 12}
{:time 20
:metric 12}
{:time 20
:metric 12}
{:time 17
:metric 12}])
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 7
:metric 4}
{:time 1
:metric 12}
{:time 6
:metric 12}
{:time 9
:metric 11}
{:time 15
:metric 12}] [{:time 15
:metric 12}])
(test-stream (above 10 5) [{:time 0
:metric 11}
{:time 7
:metric 4}
{:time 6
:metric 8}
{:time 7.1
:metric 11}
{:time 15
:metric 12}] [{:time 15
:metric 12}])))
(deftest below-test
(testing "do nothing"
(test-stream (below 10 5) [] [])
(test-stream (below 10 5) [{:time 0
:metric 11}
{:time 5
:metric 9}
{:time 6
:metric 9}] [])
(test-stream (below 10 5) [{:time 0
:metric 9}
{:time 5
:metric 11}
{:time 11
:metric 7}] [])
(test-stream (below 10 5) [{:time 0
:metric 11}
{:metric 9}
{:time 6
:metric 9}] []))
(testing "fire alert"
(test-stream (below 10 5) [{:time 0
:metric 9}
{:time 6
:metric 9}] [{:time 6
:metric 9}])
(test-stream (below 10 5) [{:time 0
:metric 9}
{:time 7
:metric 15}
{:time 9
:metric 9}
{:time 15
:metric 7}] [{:time 15
:metric 7}])))
(deftest between-test
(testing "do nothing"
(test-stream (between 10 20 5) [] [])
(test-stream (between 10 20 5) [{:time 0
:metric 9}
{:time 5
:metric 11}
{:time 6
:metric 11}] [])
(test-stream (between 10 20 5) [{:time 0
:metric 11}
{:time 1
:metric 11}
{:time 5
:metric 21}
{:time 11
:metric 12}] [])
(test-stream (between 10 20 5) [{:time 0
:metric 9}
{:metric 15}
{:time 6
:metric 15}] []))
(testing "fire alert"
(test-stream (between 10 20 5) [{:time 0
:metric 15}
{:time 6
:metric 19}] [{:time 6
:metric 19}])
(test-stream (between 10 20 5) [{:time 0
:metric 11}
{:time 7
:metric 30}
{:time 9
:metric 15}
{:time 15
:metric 12}] [{:time 15
:metric 12}])))
(deftest outside-test
(testing "do nothing"
(test-stream (outside 10 20 5) [] [])
(test-stream (outside 10 20 5) [{:time 0
:metric 10}
{:time 5
:metric 20}
{:time 6
:metric 15}] [])
(test-stream (outside 10 20 5) [{:time 0
:metric 11}
{:time 1
:metric 11}
{:time 5
:metric 21}
{:time 11
:metric 12}] [])
(test-stream (outside 10 20 5) [{:time 0
:metric 9}
{:metric 15}
{:time 6
:metric 15}] []))
(testing "fire alert"
(test-stream (outside 10 20 5) [{:time 0
:metric 9}
{:time 6
:metric 21}] [{:time 6
:metric 21}])
(test-stream (outside 10 20 5) [{:time 0
:metric 9}
{:time 7
:metric 16}
{:time 9
:metric 1}
{:time 15
:metric 2}] [{:time 15
:metric 2}])))
(deftest critical-test
(testing "do nothing"
(test-stream (critical 5) [] [])
(test-stream (critical 5) [{:time 0
:state "critical"}
{:time 5
:state "ok"}
{:time 6
:state "ok"}] [])
(test-stream (critical 5) [{:time 0
:state "ok"}
{:time 1
:state "ok"}
{:time 5
:state "critical"}
{:time 11
:state "ok"}] [])
(test-stream (critical 5) [{:time 0
:state "critical"}
{:state 15}
{:time 6
:state "ok"}] []))
(testing "fire alert"
(test-stream (critical 5) [{:time 0
:state "critical"}
{:time 6
:state "critical"}] [{:time 6
:state "critical"}])
(test-stream (critical 5) [{:time 0
:state "critical"}
{:time 7
:state "ok"}
{:time 9
:state "critical"}
{:time 15
:state "critical"}] [{:time 15
:state "critical"}])))
|
79822550e89001bc291d708fba7ed6669e19dc6e4cc25651e6b2ee278927abad | karamellpelle/grid | ShadeSceneBegin.hs | grid is a game written in Haskell
Copyright ( C ) 2018
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
You should have received a copy of the GNU General Public License
-- along with grid. If not, see </>.
--
module Game.Run.RunData.Plain.ShadeSceneBegin
(
ShadeSceneBegin (..),
loadShadeSceneBegin,
unloadShadeSceneBegin,
) where
import MyPrelude
import File
import OpenGL
import OpenGL.Helpers
import OpenGL.Shade
data ShadeSceneBegin =
ShadeSceneBegin
{
shadeSceneBeginPrg :: !GLuint,
shadeSceneBeginUniProjModvMatrix :: !GLint,
shadeSceneBeginUniAlpha :: !GLint,
shadeSceneBeginUniD :: !GLint,
shadeSceneBeginUniDInv :: !GLint,
shadeSceneBeginUniTweak :: !GLint,
shadeSceneBeginVAO :: !GLuint,
shadeSceneBeginVBO :: !GLuint
}
loadShadeSceneBegin :: IO ShadeSceneBegin
loadShadeSceneBegin = do
vsh <- fileStaticData "shaders/SceneBegin.vsh"
fsh <- fileStaticData "shaders/SceneBegin.fsh"
prg <- createPrg vsh fsh [ (attPos, "a_pos"),
(attTexCoord, "a_texcoord") ] [
(tex0, "u_tex") ]
uProjModvMatrix <- getUniformLocation prg "u_projmodv_matrix"
uAlpha <- getUniformLocation prg "u_alpha"
uD <- getUniformLocation prg "u_d"
uDInv <- getUniformLocation prg "u_d_inv"
uTweak <- getUniformLocation prg "u_tweak"
vao
vao <- bindNewVAO
glEnableVertexAttribArray attPos
glEnableVertexAttribArray attTexCoord
-- vbo
vbo <- makeVBO
return ShadeSceneBegin
{
shadeSceneBeginPrg = prg,
shadeSceneBeginUniProjModvMatrix = uProjModvMatrix,
shadeSceneBeginUniAlpha = uAlpha,
shadeSceneBeginUniD = uD,
shadeSceneBeginUniDInv = uDInv,
shadeSceneBeginUniTweak = uTweak,
shadeSceneBeginVAO = vao,
shadeSceneBeginVBO = vbo
}
unloadShadeSceneBegin :: ShadeSceneBegin -> IO ()
unloadShadeSceneBegin sh = do
return ()
makeVBO :: IO GLuint
makeVBO = do
vbo <- bindNewBuf gl_ARRAY_BUFFER
let elemsize = 4 * 1 + 2 * 2
bytesize = 4 * elemsize
allocaBytes bytesize $ \ptr -> do
pokeByteOff ptr (0 + 0) (-1 :: GLbyte)
pokeByteOff ptr (0 + 1) (1 :: GLbyte)
pokeByteOff ptr (0 + 2) (0 :: GLbyte)
pokeByteOff ptr (0 + 3) (1 :: GLbyte)
pokeByteOff ptr (0 + 4) (0 :: GLushort)
pokeByteOff ptr (0 + 6) (1 :: GLushort)
pokeByteOff ptr (8 + 0) (-1 :: GLbyte)
pokeByteOff ptr (8 + 1) (-1 :: GLbyte)
pokeByteOff ptr (8 + 2) (0 :: GLbyte)
pokeByteOff ptr (8 + 3) (1 :: GLbyte)
pokeByteOff ptr (8 + 4) (0 :: GLushort)
pokeByteOff ptr (8 + 6) (0 :: GLushort)
pokeByteOff ptr (16 + 0) (1 :: GLbyte)
pokeByteOff ptr (16 + 1) (1 :: GLbyte)
pokeByteOff ptr (16 + 2) (0 :: GLbyte)
pokeByteOff ptr (16 + 3) (1 :: GLbyte)
pokeByteOff ptr (16 + 4) (1 :: GLushort)
pokeByteOff ptr (16 + 6) (1 :: GLushort)
pokeByteOff ptr (24 + 0) (1 :: GLbyte)
pokeByteOff ptr (24 + 1) (-1 :: GLbyte)
pokeByteOff ptr (24 + 2) (0 :: GLbyte)
pokeByteOff ptr (24 + 3) (1 :: GLbyte)
pokeByteOff ptr (24 + 4) (1 :: GLushort)
pokeByteOff ptr (24 + 6) (0 :: GLushort)
glBufferData gl_ARRAY_BUFFER (fI bytesize) ptr gl_DYNAMIC_DRAW
glVertexAttribPointer attPos 3 gl_BYTE gl_FALSE 8 (mkPtrGLvoid 0)
glVertexAttribPointer attTexCoord 2 gl_UNSIGNED_SHORT gl_FALSE
(8) (mkPtrGLvoid 4)
return vbo
| null | https://raw.githubusercontent.com/karamellpelle/grid/56729e63ed6404fd6cfd6d11e73fa358f03c386f/source/Game/Run/RunData/Plain/ShadeSceneBegin.hs | haskell |
This file is part of grid.
grid is free software: you can redistribute it and/or modify
(at your option) any later version.
grid is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with grid. If not, see </>.
vbo | grid is a game written in Haskell
Copyright ( C ) 2018
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
module Game.Run.RunData.Plain.ShadeSceneBegin
(
ShadeSceneBegin (..),
loadShadeSceneBegin,
unloadShadeSceneBegin,
) where
import MyPrelude
import File
import OpenGL
import OpenGL.Helpers
import OpenGL.Shade
data ShadeSceneBegin =
ShadeSceneBegin
{
shadeSceneBeginPrg :: !GLuint,
shadeSceneBeginUniProjModvMatrix :: !GLint,
shadeSceneBeginUniAlpha :: !GLint,
shadeSceneBeginUniD :: !GLint,
shadeSceneBeginUniDInv :: !GLint,
shadeSceneBeginUniTweak :: !GLint,
shadeSceneBeginVAO :: !GLuint,
shadeSceneBeginVBO :: !GLuint
}
loadShadeSceneBegin :: IO ShadeSceneBegin
loadShadeSceneBegin = do
vsh <- fileStaticData "shaders/SceneBegin.vsh"
fsh <- fileStaticData "shaders/SceneBegin.fsh"
prg <- createPrg vsh fsh [ (attPos, "a_pos"),
(attTexCoord, "a_texcoord") ] [
(tex0, "u_tex") ]
uProjModvMatrix <- getUniformLocation prg "u_projmodv_matrix"
uAlpha <- getUniformLocation prg "u_alpha"
uD <- getUniformLocation prg "u_d"
uDInv <- getUniformLocation prg "u_d_inv"
uTweak <- getUniformLocation prg "u_tweak"
vao
vao <- bindNewVAO
glEnableVertexAttribArray attPos
glEnableVertexAttribArray attTexCoord
vbo <- makeVBO
return ShadeSceneBegin
{
shadeSceneBeginPrg = prg,
shadeSceneBeginUniProjModvMatrix = uProjModvMatrix,
shadeSceneBeginUniAlpha = uAlpha,
shadeSceneBeginUniD = uD,
shadeSceneBeginUniDInv = uDInv,
shadeSceneBeginUniTweak = uTweak,
shadeSceneBeginVAO = vao,
shadeSceneBeginVBO = vbo
}
unloadShadeSceneBegin :: ShadeSceneBegin -> IO ()
unloadShadeSceneBegin sh = do
return ()
makeVBO :: IO GLuint
makeVBO = do
vbo <- bindNewBuf gl_ARRAY_BUFFER
let elemsize = 4 * 1 + 2 * 2
bytesize = 4 * elemsize
allocaBytes bytesize $ \ptr -> do
pokeByteOff ptr (0 + 0) (-1 :: GLbyte)
pokeByteOff ptr (0 + 1) (1 :: GLbyte)
pokeByteOff ptr (0 + 2) (0 :: GLbyte)
pokeByteOff ptr (0 + 3) (1 :: GLbyte)
pokeByteOff ptr (0 + 4) (0 :: GLushort)
pokeByteOff ptr (0 + 6) (1 :: GLushort)
pokeByteOff ptr (8 + 0) (-1 :: GLbyte)
pokeByteOff ptr (8 + 1) (-1 :: GLbyte)
pokeByteOff ptr (8 + 2) (0 :: GLbyte)
pokeByteOff ptr (8 + 3) (1 :: GLbyte)
pokeByteOff ptr (8 + 4) (0 :: GLushort)
pokeByteOff ptr (8 + 6) (0 :: GLushort)
pokeByteOff ptr (16 + 0) (1 :: GLbyte)
pokeByteOff ptr (16 + 1) (1 :: GLbyte)
pokeByteOff ptr (16 + 2) (0 :: GLbyte)
pokeByteOff ptr (16 + 3) (1 :: GLbyte)
pokeByteOff ptr (16 + 4) (1 :: GLushort)
pokeByteOff ptr (16 + 6) (1 :: GLushort)
pokeByteOff ptr (24 + 0) (1 :: GLbyte)
pokeByteOff ptr (24 + 1) (-1 :: GLbyte)
pokeByteOff ptr (24 + 2) (0 :: GLbyte)
pokeByteOff ptr (24 + 3) (1 :: GLbyte)
pokeByteOff ptr (24 + 4) (1 :: GLushort)
pokeByteOff ptr (24 + 6) (0 :: GLushort)
glBufferData gl_ARRAY_BUFFER (fI bytesize) ptr gl_DYNAMIC_DRAW
glVertexAttribPointer attPos 3 gl_BYTE gl_FALSE 8 (mkPtrGLvoid 0)
glVertexAttribPointer attTexCoord 2 gl_UNSIGNED_SHORT gl_FALSE
(8) (mkPtrGLvoid 4)
return vbo
|
248262878fa76e001049a1b808938ad70cd8ca4ad2b4834db6e9ea4403dd2bcb | clojure-interop/google-cloud-clients | DatabaseAdminSettings$Builder.clj | (ns com.google.cloud.spanner.admin.database.v1.DatabaseAdminSettings$Builder
"Builder for DatabaseAdminSettings."
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.spanner.admin.database.v1 DatabaseAdminSettings$Builder]))
(defn drop-database-settings
"Returns the builder for the settings used for calls to dropDatabase.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.spanner.admin.database.v1.DropDatabaseRequest,com.google.protobuf.Empty>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.dropDatabaseSettings))))
(defn apply-to-all-unary-methods
"Applies the given settings updater function to all of the unary API methods in this service.
Note: This method does not support applying settings to streaming methods.
settings-updater - `com.google.api.core.ApiFunction`
returns: `com.google.cloud.spanner.admin.database.v1.DatabaseAdminSettings$Builder`
throws: java.lang.Exception"
(^com.google.cloud.spanner.admin.database.v1.DatabaseAdminSettings$Builder [^DatabaseAdminSettings$Builder this ^com.google.api.core.ApiFunction settings-updater]
(-> this (.applyToAllUnaryMethods settings-updater))))
(defn list-databases-settings
"Returns the builder for the settings used for calls to listDatabases.
returns: `com.google.api.gax.rpc.PagedCallSettings.Builder<com.google.spanner.admin.database.v1.ListDatabasesRequest,com.google.spanner.admin.database.v1.ListDatabasesResponse,com.google.cloud.spanner.admin.database.v1.DatabaseAdminClient$ListDatabasesPagedResponse>`"
(^com.google.api.gax.rpc.PagedCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.listDatabasesSettings))))
(defn test-iam-permissions-settings
"Returns the builder for the settings used for calls to testIamPermissions.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.iam.v1.TestIamPermissionsRequest,com.google.iam.v1.TestIamPermissionsResponse>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.testIamPermissionsSettings))))
(defn get-database-settings
"Returns the builder for the settings used for calls to getDatabase.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.spanner.admin.database.v1.GetDatabaseRequest,com.google.spanner.admin.database.v1.Database>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.getDatabaseSettings))))
(defn create-database-operation-settings
"Returns the builder for the settings used for calls to createDatabase.
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallSettings.Builder<com.google.spanner.admin.database.v1.CreateDatabaseRequest,com.google.spanner.admin.database.v1.Database,com.google.spanner.admin.database.v1.CreateDatabaseMetadata>`"
([^DatabaseAdminSettings$Builder this]
(-> this (.createDatabaseOperationSettings))))
(defn set-iam-policy-settings
"Returns the builder for the settings used for calls to setIamPolicy.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.iam.v1.SetIamPolicyRequest,com.google.iam.v1.Policy>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.setIamPolicySettings))))
(defn update-database-ddl-settings
"Returns the builder for the settings used for calls to updateDatabaseDdl.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.updateDatabaseDdlSettings))))
(defn build
"returns: `com.google.cloud.spanner.admin.database.v1.DatabaseAdminSettings`
throws: java.io.IOException"
(^com.google.cloud.spanner.admin.database.v1.DatabaseAdminSettings [^DatabaseAdminSettings$Builder this]
(-> this (.build))))
(defn create-database-settings
"Returns the builder for the settings used for calls to createDatabase.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.spanner.admin.database.v1.CreateDatabaseRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.createDatabaseSettings))))
(defn get-stub-settings-builder
"returns: `com.google.cloud.spanner.admin.database.v1.stub.DatabaseAdminStubSettings$Builder`"
(^com.google.cloud.spanner.admin.database.v1.stub.DatabaseAdminStubSettings$Builder [^DatabaseAdminSettings$Builder this]
(-> this (.getStubSettingsBuilder))))
(defn get-iam-policy-settings
"Returns the builder for the settings used for calls to getIamPolicy.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.iam.v1.GetIamPolicyRequest,com.google.iam.v1.Policy>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.getIamPolicySettings))))
(defn update-database-ddl-operation-settings
"Returns the builder for the settings used for calls to updateDatabaseDdl.
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallSettings.Builder<com.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest,com.google.protobuf.Empty,com.google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata>`"
([^DatabaseAdminSettings$Builder this]
(-> this (.updateDatabaseDdlOperationSettings))))
(defn get-database-ddl-settings
"Returns the builder for the settings used for calls to getDatabaseDdl.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.spanner.admin.database.v1.GetDatabaseDdlRequest,com.google.spanner.admin.database.v1.GetDatabaseDdlResponse>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.getDatabaseDdlSettings))))
| null | https://raw.githubusercontent.com/clojure-interop/google-cloud-clients/80852d0496057c22f9cdc86d6f9ffc0fa3cd7904/com.google.cloud.spanner/src/com/google/cloud/spanner/admin/database/v1/DatabaseAdminSettings%24Builder.clj | clojure | (ns com.google.cloud.spanner.admin.database.v1.DatabaseAdminSettings$Builder
"Builder for DatabaseAdminSettings."
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.spanner.admin.database.v1 DatabaseAdminSettings$Builder]))
(defn drop-database-settings
"Returns the builder for the settings used for calls to dropDatabase.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.spanner.admin.database.v1.DropDatabaseRequest,com.google.protobuf.Empty>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.dropDatabaseSettings))))
(defn apply-to-all-unary-methods
"Applies the given settings updater function to all of the unary API methods in this service.
Note: This method does not support applying settings to streaming methods.
settings-updater - `com.google.api.core.ApiFunction`
returns: `com.google.cloud.spanner.admin.database.v1.DatabaseAdminSettings$Builder`
throws: java.lang.Exception"
(^com.google.cloud.spanner.admin.database.v1.DatabaseAdminSettings$Builder [^DatabaseAdminSettings$Builder this ^com.google.api.core.ApiFunction settings-updater]
(-> this (.applyToAllUnaryMethods settings-updater))))
(defn list-databases-settings
"Returns the builder for the settings used for calls to listDatabases.
returns: `com.google.api.gax.rpc.PagedCallSettings.Builder<com.google.spanner.admin.database.v1.ListDatabasesRequest,com.google.spanner.admin.database.v1.ListDatabasesResponse,com.google.cloud.spanner.admin.database.v1.DatabaseAdminClient$ListDatabasesPagedResponse>`"
(^com.google.api.gax.rpc.PagedCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.listDatabasesSettings))))
(defn test-iam-permissions-settings
"Returns the builder for the settings used for calls to testIamPermissions.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.iam.v1.TestIamPermissionsRequest,com.google.iam.v1.TestIamPermissionsResponse>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.testIamPermissionsSettings))))
(defn get-database-settings
"Returns the builder for the settings used for calls to getDatabase.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.spanner.admin.database.v1.GetDatabaseRequest,com.google.spanner.admin.database.v1.Database>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.getDatabaseSettings))))
(defn create-database-operation-settings
"Returns the builder for the settings used for calls to createDatabase.
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallSettings.Builder<com.google.spanner.admin.database.v1.CreateDatabaseRequest,com.google.spanner.admin.database.v1.Database,com.google.spanner.admin.database.v1.CreateDatabaseMetadata>`"
([^DatabaseAdminSettings$Builder this]
(-> this (.createDatabaseOperationSettings))))
(defn set-iam-policy-settings
"Returns the builder for the settings used for calls to setIamPolicy.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.iam.v1.SetIamPolicyRequest,com.google.iam.v1.Policy>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.setIamPolicySettings))))
(defn update-database-ddl-settings
"Returns the builder for the settings used for calls to updateDatabaseDdl.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.updateDatabaseDdlSettings))))
(defn build
"returns: `com.google.cloud.spanner.admin.database.v1.DatabaseAdminSettings`
throws: java.io.IOException"
(^com.google.cloud.spanner.admin.database.v1.DatabaseAdminSettings [^DatabaseAdminSettings$Builder this]
(-> this (.build))))
(defn create-database-settings
"Returns the builder for the settings used for calls to createDatabase.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.spanner.admin.database.v1.CreateDatabaseRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.createDatabaseSettings))))
(defn get-stub-settings-builder
"returns: `com.google.cloud.spanner.admin.database.v1.stub.DatabaseAdminStubSettings$Builder`"
(^com.google.cloud.spanner.admin.database.v1.stub.DatabaseAdminStubSettings$Builder [^DatabaseAdminSettings$Builder this]
(-> this (.getStubSettingsBuilder))))
(defn get-iam-policy-settings
"Returns the builder for the settings used for calls to getIamPolicy.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.iam.v1.GetIamPolicyRequest,com.google.iam.v1.Policy>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.getIamPolicySettings))))
(defn update-database-ddl-operation-settings
"Returns the builder for the settings used for calls to updateDatabaseDdl.
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallSettings.Builder<com.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest,com.google.protobuf.Empty,com.google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata>`"
([^DatabaseAdminSettings$Builder this]
(-> this (.updateDatabaseDdlOperationSettings))))
(defn get-database-ddl-settings
"Returns the builder for the settings used for calls to getDatabaseDdl.
returns: `com.google.api.gax.rpc.UnaryCallSettings.Builder<com.google.spanner.admin.database.v1.GetDatabaseDdlRequest,com.google.spanner.admin.database.v1.GetDatabaseDdlResponse>`"
(^com.google.api.gax.rpc.UnaryCallSettings.Builder [^DatabaseAdminSettings$Builder this]
(-> this (.getDatabaseDdlSettings))))
| |
900ba9c7a39e315ef1fa3545afee0ae0021b889579a670af2c1ab37c25add36f | ekmett/algebra | Hyperbolic.hs | # LANGUAGE MultiParamTypeClasses , FlexibleInstances , TypeFamilies , UndecidableInstances , DeriveDataTypeable #
module Numeric.Coalgebra.Hyperbolic
( Hyperbolic(..)
, HyperBasis(..)
, Hyper(..)
) where
import Control.Applicative
import Control.Monad.Reader.Class
import Data.Data
import Data.Distributive
import Data.Functor.Bind
import Data.Functor.Rep
import Data.Foldable
import Data.Ix
import Data.Semigroup.Traversable
import Data.Semigroup.Foldable
import Data.Semigroup
import Data.Traversable
import Numeric.Algebra
import Numeric.Coalgebra.Hyperbolic.Class
import Prelude hiding ((-),(+),(*),negate,subtract, fromInteger, cosh, sinh)
-- complex basis
data HyperBasis = Cosh | Sinh deriving (Eq,Ord,Show,Read,Enum,Ix,Bounded,Data,Typeable)
data Hyper a = Hyper a a deriving (Eq,Show,Read,Data,Typeable)
instance Hyperbolic HyperBasis where
cosh = Cosh
sinh = Sinh
instance Rig r => Hyperbolic (Hyper r) where
cosh = Hyper one zero
sinh = Hyper zero one
instance Rig r => Hyperbolic (HyperBasis -> r) where
cosh Sinh = zero
cosh Cosh = one
sinh Sinh = one
sinh Cosh = zero
instance Representable Hyper where
type Rep Hyper = HyperBasis
tabulate f = Hyper (f Cosh) (f Sinh)
index (Hyper a _ ) Cosh = a
index (Hyper _ b ) Sinh = b
instance Distributive Hyper where
distribute = distributeRep
instance Functor Hyper where
fmap f (Hyper a b) = Hyper (f a) (f b)
instance Apply Hyper where
(<.>) = apRep
instance Applicative Hyper where
pure = pureRep
(<*>) = apRep
instance Bind Hyper where
(>>-) = bindRep
instance Monad Hyper where
return = pureRep
(>>=) = bindRep
instance MonadReader HyperBasis Hyper where
ask = askRep
local = localRep
instance Foldable Hyper where
foldMap f (Hyper a b) = f a `mappend` f b
instance Traversable Hyper where
traverse f (Hyper a b) = Hyper <$> f a <*> f b
instance Foldable1 Hyper where
foldMap1 f (Hyper a b) = f a <> f b
instance Traversable1 Hyper where
traverse1 f (Hyper a b) = Hyper <$> f a <.> f b
instance Additive r => Additive (Hyper r) where
(+) = addRep
sinnum1p = sinnum1pRep
instance LeftModule r s => LeftModule r (Hyper s) where
r .* Hyper a b = Hyper (r .* a) (r .* b)
instance RightModule r s => RightModule r (Hyper s) where
Hyper a b *. r = Hyper (a *. r) (b *. r)
instance Monoidal r => Monoidal (Hyper r) where
zero = zeroRep
sinnum = sinnumRep
instance Group r => Group (Hyper r) where
(-) = minusRep
negate = negateRep
subtract = subtractRep
times = timesRep
instance Abelian r => Abelian (Hyper r)
instance Idempotent r => Idempotent (Hyper r)
instance Partitionable r => Partitionable (Hyper r) where
partitionWith f (Hyper a b) = id =<<
partitionWith (\a1 a2 ->
partitionWith (\b1 b2 -> f (Hyper a1 b1) (Hyper a2 b2)) b) a
-- | the trivial diagonal algebra
instance Semiring k => Algebra k HyperBasis where
mult f = f' where
fs = f Sinh Sinh
fc = f Cosh Cosh
f' Sinh = fs
f' Cosh = fc
instance Semiring k => UnitalAlgebra k HyperBasis where
unit = const
-- | the hyperbolic trigonometric coalgebra
instance (Commutative k, Semiring k) => Coalgebra k HyperBasis where
comult f = f' where
fs = f Sinh
fc = f Cosh
f' Sinh Sinh = fc
f' Sinh Cosh = fs
f' Cosh Sinh = fs
f' Cosh Cosh = fc
instance (Commutative k, Semiring k) => CounitalCoalgebra k HyperBasis where
counit f = f Cosh
instance (Commutative k, Semiring k) => Bialgebra k HyperBasis
instance (Commutative k, Group k, InvolutiveSemiring k) => InvolutiveAlgebra k HyperBasis where
inv f = f' where
afc = adjoint (f Cosh)
nfs = negate (f Sinh)
f' Cosh = afc
f' Sinh = nfs
instance (Commutative k, Group k, InvolutiveSemiring k) => InvolutiveCoalgebra k HyperBasis where
coinv = inv
instance (Commutative k, Group k, InvolutiveSemiring k) => HopfAlgebra k HyperBasis where
antipode = inv
instance (Commutative k, Semiring k) => Multiplicative (Hyper k) where
(*) = mulRep
instance (Commutative k, Semiring k) => Commutative (Hyper k)
instance (Commutative k, Semiring k) => Semiring (Hyper k)
instance (Commutative k, Rig k) => Unital (Hyper k) where
one = Hyper one zero
instance (Commutative r, Rig r) => Rig (Hyper r) where
fromNatural n = Hyper (fromNatural n) zero
instance (Commutative r, Ring r) => Ring (Hyper r) where
fromInteger n = Hyper (fromInteger n) zero
instance (Commutative r, Semiring r) => LeftModule (Hyper r) (Hyper r) where (.*) = (*)
instance (Commutative r, Semiring r) => RightModule (Hyper r) (Hyper r) where (*.) = (*)
instance (Commutative r, Group r, InvolutiveSemiring r) => InvolutiveMultiplication (Hyper r) where
adjoint (Hyper a b) = Hyper (adjoint a) (negate b)
instance (Commutative r, Group r, InvolutiveSemiring r) => InvolutiveSemiring (Hyper r)
| null | https://raw.githubusercontent.com/ekmett/algebra/12dd33e848f406dd53d19b69b4f14c93ba6e352b/src/Numeric/Coalgebra/Hyperbolic.hs | haskell | complex basis
| the trivial diagonal algebra
| the hyperbolic trigonometric coalgebra | # LANGUAGE MultiParamTypeClasses , FlexibleInstances , TypeFamilies , UndecidableInstances , DeriveDataTypeable #
module Numeric.Coalgebra.Hyperbolic
( Hyperbolic(..)
, HyperBasis(..)
, Hyper(..)
) where
import Control.Applicative
import Control.Monad.Reader.Class
import Data.Data
import Data.Distributive
import Data.Functor.Bind
import Data.Functor.Rep
import Data.Foldable
import Data.Ix
import Data.Semigroup.Traversable
import Data.Semigroup.Foldable
import Data.Semigroup
import Data.Traversable
import Numeric.Algebra
import Numeric.Coalgebra.Hyperbolic.Class
import Prelude hiding ((-),(+),(*),negate,subtract, fromInteger, cosh, sinh)
data HyperBasis = Cosh | Sinh deriving (Eq,Ord,Show,Read,Enum,Ix,Bounded,Data,Typeable)
data Hyper a = Hyper a a deriving (Eq,Show,Read,Data,Typeable)
instance Hyperbolic HyperBasis where
cosh = Cosh
sinh = Sinh
instance Rig r => Hyperbolic (Hyper r) where
cosh = Hyper one zero
sinh = Hyper zero one
instance Rig r => Hyperbolic (HyperBasis -> r) where
cosh Sinh = zero
cosh Cosh = one
sinh Sinh = one
sinh Cosh = zero
instance Representable Hyper where
type Rep Hyper = HyperBasis
tabulate f = Hyper (f Cosh) (f Sinh)
index (Hyper a _ ) Cosh = a
index (Hyper _ b ) Sinh = b
instance Distributive Hyper where
distribute = distributeRep
instance Functor Hyper where
fmap f (Hyper a b) = Hyper (f a) (f b)
instance Apply Hyper where
(<.>) = apRep
instance Applicative Hyper where
pure = pureRep
(<*>) = apRep
instance Bind Hyper where
(>>-) = bindRep
instance Monad Hyper where
return = pureRep
(>>=) = bindRep
instance MonadReader HyperBasis Hyper where
ask = askRep
local = localRep
instance Foldable Hyper where
foldMap f (Hyper a b) = f a `mappend` f b
instance Traversable Hyper where
traverse f (Hyper a b) = Hyper <$> f a <*> f b
instance Foldable1 Hyper where
foldMap1 f (Hyper a b) = f a <> f b
instance Traversable1 Hyper where
traverse1 f (Hyper a b) = Hyper <$> f a <.> f b
instance Additive r => Additive (Hyper r) where
(+) = addRep
sinnum1p = sinnum1pRep
instance LeftModule r s => LeftModule r (Hyper s) where
r .* Hyper a b = Hyper (r .* a) (r .* b)
instance RightModule r s => RightModule r (Hyper s) where
Hyper a b *. r = Hyper (a *. r) (b *. r)
instance Monoidal r => Monoidal (Hyper r) where
zero = zeroRep
sinnum = sinnumRep
instance Group r => Group (Hyper r) where
(-) = minusRep
negate = negateRep
subtract = subtractRep
times = timesRep
instance Abelian r => Abelian (Hyper r)
instance Idempotent r => Idempotent (Hyper r)
instance Partitionable r => Partitionable (Hyper r) where
partitionWith f (Hyper a b) = id =<<
partitionWith (\a1 a2 ->
partitionWith (\b1 b2 -> f (Hyper a1 b1) (Hyper a2 b2)) b) a
instance Semiring k => Algebra k HyperBasis where
mult f = f' where
fs = f Sinh Sinh
fc = f Cosh Cosh
f' Sinh = fs
f' Cosh = fc
instance Semiring k => UnitalAlgebra k HyperBasis where
unit = const
instance (Commutative k, Semiring k) => Coalgebra k HyperBasis where
comult f = f' where
fs = f Sinh
fc = f Cosh
f' Sinh Sinh = fc
f' Sinh Cosh = fs
f' Cosh Sinh = fs
f' Cosh Cosh = fc
instance (Commutative k, Semiring k) => CounitalCoalgebra k HyperBasis where
counit f = f Cosh
instance (Commutative k, Semiring k) => Bialgebra k HyperBasis
instance (Commutative k, Group k, InvolutiveSemiring k) => InvolutiveAlgebra k HyperBasis where
inv f = f' where
afc = adjoint (f Cosh)
nfs = negate (f Sinh)
f' Cosh = afc
f' Sinh = nfs
instance (Commutative k, Group k, InvolutiveSemiring k) => InvolutiveCoalgebra k HyperBasis where
coinv = inv
instance (Commutative k, Group k, InvolutiveSemiring k) => HopfAlgebra k HyperBasis where
antipode = inv
instance (Commutative k, Semiring k) => Multiplicative (Hyper k) where
(*) = mulRep
instance (Commutative k, Semiring k) => Commutative (Hyper k)
instance (Commutative k, Semiring k) => Semiring (Hyper k)
instance (Commutative k, Rig k) => Unital (Hyper k) where
one = Hyper one zero
instance (Commutative r, Rig r) => Rig (Hyper r) where
fromNatural n = Hyper (fromNatural n) zero
instance (Commutative r, Ring r) => Ring (Hyper r) where
fromInteger n = Hyper (fromInteger n) zero
instance (Commutative r, Semiring r) => LeftModule (Hyper r) (Hyper r) where (.*) = (*)
instance (Commutative r, Semiring r) => RightModule (Hyper r) (Hyper r) where (*.) = (*)
instance (Commutative r, Group r, InvolutiveSemiring r) => InvolutiveMultiplication (Hyper r) where
adjoint (Hyper a b) = Hyper (adjoint a) (negate b)
instance (Commutative r, Group r, InvolutiveSemiring r) => InvolutiveSemiring (Hyper r)
|
fb61d3b916195d004efebcc46da2173e50dc3aef298a710ce9b1d2c088405857 | tolysz/ghcjs-stack | PreProcess.hs | -----------------------------------------------------------------------------
-- |
Module : Distribution . Simple . PreProcess
Copyright : ( c ) 2003 - 2005 , ,
-- License : BSD3
--
-- Maintainer :
-- Portability : portable
--
This defines a ' PreProcessor ' abstraction which represents a pre - processor
-- that can transform one kind of file into another. There is also a
-- 'PPSuffixHandler' which is a combination of a file extension and a function
for configuring a ' PreProcessor ' . It defines a bunch of known built - in
preprocessors like @cpp@ , @cpphs@ , @c2hs@ , , @happy@ , @alex@ etc and
-- lists them in 'knownSuffixHandlers'. On top of this it provides a function
-- for actually preprocessing some sources given a bunch of known suffix
-- handlers. This module is not as good as it could be, it could really do with
-- a rewrite to address some of the problems we have with pre-processors.
module Distribution.Simple.PreProcess (preprocessComponent, preprocessExtras,
knownSuffixHandlers, ppSuffixes,
PPSuffixHandler, PreProcessor(..),
mkSimplePreProcessor, runSimplePreProcessor,
ppCpp, ppCpp', ppGreenCard, ppC2hs, ppHsc2hs,
ppHappy, ppAlex, ppUnlit, platformDefines
)
where
import Distribution.Simple.PreProcess.Unlit
import Distribution.Package
import qualified Distribution.ModuleName as ModuleName
import Distribution.PackageDescription as PD
import qualified Distribution.InstalledPackageInfo as Installed
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.CCompiler
import Distribution.Simple.Compiler
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.BuildPaths
import Distribution.Simple.Utils
import Distribution.Simple.Program
import Distribution.Simple.Test.LibV09
import Distribution.System
import Distribution.Text
import Distribution.Version
import Distribution.Verbosity
import Control.Monad
import Data.Maybe (fromMaybe)
import Data.List (nub, isSuffixOf)
import System.Directory (doesFileExist)
import System.Info (os, arch)
import System.FilePath (splitExtension, dropExtensions, (</>), (<.>),
takeDirectory, normalise, replaceExtension,
takeExtensions)
-- |The interface to a preprocessor, which may be implemented using an
-- external program, but need not be. The arguments are the name of
-- the input file, the name of the output file and a verbosity level.
-- Here is a simple example that merely prepends a comment to the given
-- source file:
--
> ppTestHandler : : PreProcessor
-- > ppTestHandler =
> PreProcessor {
-- > platformIndependent = True,
-- > runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
-- > do info verbosity (inFile++" has been preprocessed to "++outFile)
-- > stuff <- readFile inFile
-- > writeFile outFile ("-- preprocessed as a test\n\n" ++ stuff)
> return ExitSuccess
--
-- We split the input and output file names into a base directory and the
rest of the file name . The input base dir is the path in the list of search
-- dirs that this file was found in. The output base dir is the build dir where
-- all the generated source files are put.
--
-- The reason for splitting it up this way is that some pre-processors don't
simply generate one output .hs file from one input file but have
dependencies on other generated files ( notably c2hs , where building one
.hs file may require reading other files , and then compiling the .hs
-- file may require reading a generated .h file). In these cases the generated
-- files need to embed relative path names to each other (eg the generated .hs
file mentions the .h file in the FFI imports ) . This path must be relative to
-- the base directory where the generated files are located, it cannot be
-- relative to the top level of the build tree because the compilers do not
look for .h files relative to there , ie we do not use \"-I .\ " , instead we
use \"-I dist\/build\ " ( or whatever dist dir has been set by the user )
--
-- Most pre-processors do not care of course, so mkSimplePreProcessor and
-- runSimplePreProcessor functions handle the simple case.
--
data PreProcessor = PreProcessor {
-- Is the output of the pre-processor platform independent? eg happy output
is portable haskell but 's output is platform dependent .
-- This matters since only platform independent generated code can be
-- inlcuded into a source tarball.
platformIndependent :: Bool,
-- TODO: deal with pre-processors that have implementaion dependent output
eg and happy have flags . However we ca n't really inlcude
-- ghc-specific code into supposedly portable source tarballs.
runPreProcessor :: (FilePath, FilePath) -- Location of the source file relative to a base dir
-> (FilePath, FilePath) -- Output file name, relative to an output base dir
-> Verbosity -- verbosity
-> IO () -- Should exit if the preprocessor fails
}
-- | Function to determine paths to possible extra C sources for a
-- preprocessor: just takes the path to the build directory and uses
-- this to search for C sources with names that match the
-- preprocessor's output name format.
type PreProcessorExtras = FilePath -> IO [FilePath]
mkSimplePreProcessor :: (FilePath -> FilePath -> Verbosity -> IO ())
-> (FilePath, FilePath)
-> (FilePath, FilePath) -> Verbosity -> IO ()
mkSimplePreProcessor simplePP
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity = simplePP inFile outFile verbosity
where inFile = normalise (inBaseDir </> inRelativeFile)
outFile = normalise (outBaseDir </> outRelativeFile)
runSimplePreProcessor :: PreProcessor -> FilePath -> FilePath -> Verbosity
-> IO ()
runSimplePreProcessor pp inFile outFile verbosity =
runPreProcessor pp (".", inFile) (".", outFile) verbosity
|A preprocessor for turning non - Haskell files with the given extension
into plain source files .
type PPSuffixHandler
= (String, BuildInfo -> LocalBuildInfo -> PreProcessor)
| Apply preprocessors to the sources from ' hsSourceDirs ' for a given
-- component (lib, exe, or test suite).
preprocessComponent :: PackageDescription
-> Component
-> LocalBuildInfo
-> Bool
-> Verbosity
-> [PPSuffixHandler]
-> IO ()
preprocessComponent pd comp lbi isSrcDist verbosity handlers = case comp of
(CLib lib@Library{ libBuildInfo = bi }) -> do
let dirs = hsSourceDirs bi ++ [autogenModulesDir lbi]
setupMessage verbosity "Preprocessing library" (packageId pd)
forM_ (map ModuleName.toFilePath $ libModules lib) $
pre dirs (buildDir lbi) (localHandlers bi)
(CExe exe@Executable { buildInfo = bi, exeName = nm }) -> do
let exeDir = buildDir lbi </> nm </> nm ++ "-tmp"
dirs = hsSourceDirs bi ++ [autogenModulesDir lbi]
setupMessage verbosity ("Preprocessing executable '" ++ nm ++ "' for") (packageId pd)
forM_ (map ModuleName.toFilePath $ otherModules bi) $
pre dirs exeDir (localHandlers bi)
pre (hsSourceDirs bi) exeDir (localHandlers bi) $
dropExtensions (modulePath exe)
CTest test@TestSuite{ testName = nm } -> do
setupMessage verbosity ("Preprocessing test suite '" ++ nm ++ "' for") (packageId pd)
case testInterface test of
TestSuiteExeV10 _ f ->
preProcessTest test f $ buildDir lbi </> testName test
</> testName test ++ "-tmp"
TestSuiteLibV09 _ _ -> do
let testDir = buildDir lbi </> stubName test
</> stubName test ++ "-tmp"
writeSimpleTestStub test testDir
preProcessTest test (stubFilePath test) testDir
TestSuiteUnsupported tt -> die $ "No support for preprocessing test "
++ "suite type " ++ display tt
CBench bm@Benchmark{ benchmarkName = nm } -> do
setupMessage verbosity ("Preprocessing benchmark '" ++ nm ++ "' for") (packageId pd)
case benchmarkInterface bm of
BenchmarkExeV10 _ f ->
preProcessBench bm f $ buildDir lbi </> benchmarkName bm
</> benchmarkName bm ++ "-tmp"
BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark "
++ "type " ++ display tt
where
builtinHaskellSuffixes = ["hs", "lhs", "hsig", "lhsig"]
builtinCSuffixes = cSourceExtensions
builtinSuffixes = builtinHaskellSuffixes ++ builtinCSuffixes
localHandlers bi = [(ext, h bi lbi) | (ext, h) <- handlers]
pre dirs dir lhndlrs fp =
preprocessFile dirs dir isSrcDist fp verbosity builtinSuffixes lhndlrs
preProcessTest test = preProcessComponent (testBuildInfo test)
(testModules test)
preProcessBench bm = preProcessComponent (benchmarkBuildInfo bm)
(benchmarkModules bm)
preProcessComponent bi modules exePath dir = do
let biHandlers = localHandlers bi
sourceDirs = hsSourceDirs bi ++ [ autogenModulesDir lbi ]
sequence_ [ preprocessFile sourceDirs dir isSrcDist
(ModuleName.toFilePath modu) verbosity builtinSuffixes
biHandlers
| modu <- modules ]
preprocessFile (dir : (hsSourceDirs bi)) dir isSrcDist
(dropExtensions $ exePath) verbosity
builtinSuffixes biHandlers
--TODO: try to list all the modules that could not be found
not just the first one . It 's annoying and slow due to the need
-- to reconfigure after editing the .cabal file each time.
|Find the first extension of the file that exists , and preprocess it
-- if required.
preprocessFile
:: [FilePath] -- ^source directories
-> FilePath -- ^build directory
-> Bool -- ^preprocess for sdist
-> FilePath -- ^module file name
-> Verbosity -- ^verbosity
^builtin suffixes
-> [(String, PreProcessor)] -- ^possible preprocessors
-> IO ()
preprocessFile searchLoc buildLoc forSDist baseFile verbosity builtinSuffixes handlers = do
-- look for files in the various source dirs with this module name
-- and a file extension of a known preprocessor
psrcFiles <- findFileWithExtension' (map fst handlers) searchLoc baseFile
case psrcFiles of
-- no preprocessor file exists, look for an ordinary source file
-- just to make sure one actually exists at all for this module.
-- Note: by looking in the target/output build dir too, we allow
-- source files to appear magically in the target build dir without
-- any corresponding "real" source file. This lets custom Setup.hs
-- files generate source modules directly into the build dir without
-- the rest of the build system being aware of it (somewhat dodgy)
Nothing -> do
bsrcFiles <- findFileWithExtension builtinSuffixes (buildLoc : searchLoc) baseFile
case bsrcFiles of
Nothing -> die $ "can't find source for " ++ baseFile
++ " in " ++ intercalate ", " searchLoc
_ -> return ()
found a pre - processable file in one of the source dirs
Just (psrcLoc, psrcRelFile) -> do
let (srcStem, ext) = splitExtension psrcRelFile
psrcFile = psrcLoc </> psrcRelFile
pp = fromMaybe (error "Distribution.Simple.PreProcess: Just expected")
(lookup (tailNotNull ext) handlers)
-- Preprocessing files for 'sdist' is different from preprocessing
-- for 'build'. When preprocessing for sdist we preprocess to
-- avoid that the user has to have the preprocessors available.
ATM , we do n't have a way to specify which files are to be
-- preprocessed and which not, so for sdist we only process
-- platform independent files and put them into the 'buildLoc'
-- (which we assume is set to the temp. directory that will become
-- the tarball).
--TODO: eliminate sdist variant, just supply different handlers
when (not forSDist || forSDist && platformIndependent pp) $ do
-- look for existing pre-processed source file in the dest dir to
-- see if we really have to re-run the preprocessor.
ppsrcFiles <- findFileWithExtension builtinSuffixes [buildLoc] baseFile
recomp <- case ppsrcFiles of
Nothing -> return True
Just ppsrcFile ->
psrcFile `moreRecentFile` ppsrcFile
when recomp $ do
let destDir = buildLoc </> dirName srcStem
createDirectoryIfMissingVerbose verbosity True destDir
runPreProcessorWithHsBootHack pp
(psrcLoc, psrcRelFile)
(buildLoc, srcStem <.> "hs")
where
dirName = takeDirectory
tailNotNull [] = []
tailNotNull x = tail x
FIXME : This is a somewhat nasty hack . GHC requires that hs - boot files
-- be in the same place as the hs files, so if we put the hs file in dist/
-- then we need to copy the hs-boot file there too. This should probably be
-- done another way. Possibly we should also be looking for .lhs-boot
-- files, but I think that preprocessors only produce .hs files.
runPreProcessorWithHsBootHack pp
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) = do
runPreProcessor pp
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity
exists <- doesFileExist inBoot
when exists $ copyFileVerbose verbosity inBoot outBoot
where
inBoot = replaceExtension inFile "hs-boot"
outBoot = replaceExtension outFile "hs-boot"
inFile = normalise (inBaseDir </> inRelativeFile)
outFile = normalise (outBaseDir </> outRelativeFile)
-- ------------------------------------------------------------
-- * known preprocessors
-- ------------------------------------------------------------
ppGreenCard :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppGreenCard _ lbi
= PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
rawSystemProgramConf verbosity greencardProgram (withPrograms lbi)
(["-tffi", "-o" ++ outFile, inFile])
}
-- This one is useful for preprocessors that can't handle literate source.
-- We also need a way to chain preprocessors.
ppUnlit :: PreProcessor
ppUnlit =
PreProcessor {
platformIndependent = True,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile _verbosity ->
withUTF8FileContents inFile $ \contents ->
either (writeUTF8File outFile) die (unlit inFile contents)
}
ppCpp :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppCpp = ppCpp' []
ppCpp' :: [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor
ppCpp' extraArgs bi lbi =
case compilerFlavor (compiler lbi) of
GHC -> ppGhcCpp ghcProgram (>= Version [6,6] []) args bi lbi
GHCJS -> ppGhcCpp ghcjsProgram (const True) args bi lbi
_ -> ppCpphs args bi lbi
where cppArgs = getCppOptions bi lbi
args = cppArgs ++ extraArgs
ppGhcCpp :: Program -> (Version -> Bool)
-> [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor
ppGhcCpp program xHs extraArgs _bi lbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(prog, version, _) <- requireProgramVersion verbosity
program anyVersion (withPrograms lbi)
rawSystemProgram verbosity prog $
["-E", "-cpp"]
-- This is a bit of an ugly hack. We're going to
-- unlit the file ourselves later on if appropriate,
so we need GHC not to unlit it now or it 'll get
-- double-unlitted. In the future we might switch to
-- using cpphs --unlit instead.
++ (if xHs version then ["-x", "hs"] else [])
++ [ "-optP-include", "-optP"++ (autogenModulesDir lbi </> cppHeaderName) ]
++ ["-o", outFile, inFile]
++ extraArgs
}
ppCpphs :: [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor
ppCpphs extraArgs _bi lbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(cpphsProg, cpphsVersion, _) <- requireProgramVersion verbosity
cpphsProgram anyVersion (withPrograms lbi)
rawSystemProgram verbosity cpphsProg $
("-O" ++ outFile) : inFile
: "--noline" : "--strip"
: (if cpphsVersion >= Version [1,6] []
then ["--include="++ (autogenModulesDir lbi </> cppHeaderName)]
else [])
++ extraArgs
}
ppHsc2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppHsc2hs bi lbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi)
rawSystemProgramConf verbosity hsc2hsProgram (withPrograms lbi) $
[ "--cc=" ++ programPath gccProg
, "--ld=" ++ programPath gccProg ]
-- Additional gcc options
++ [ "--cflag=" ++ opt | opt <- programDefaultArgs gccProg
++ programOverrideArgs gccProg ]
++ [ "--lflag=" ++ opt | opt <- programDefaultArgs gccProg
++ programOverrideArgs gccProg ]
OSX frameworks :
++ [ what ++ "=-F" ++ opt
| isOSX
, opt <- nub (concatMap Installed.frameworkDirs pkgs)
, what <- ["--cflag", "--lflag"] ]
++ [ "--lflag=" ++ arg
| isOSX
, opt <- PD.frameworks bi ++ concatMap Installed.frameworks pkgs
, arg <- ["-framework", opt] ]
-- Note that on ELF systems, wherever we use -L, we must also use -R
-- because presumably that -L dir is not on the normal path for the
system 's dynamic linker . This is needed because hsc2hs works by
-- compiling a C program and then running it.
++ [ "--cflag=" ++ opt | opt <- platformDefines lbi ]
-- Options from the current package:
++ [ "--cflag=-I" ++ dir | dir <- PD.includeDirs bi ]
++ [ "--cflag=" ++ opt | opt <- PD.ccOptions bi
++ PD.cppOptions bi ]
++ [ "--cflag=" ++ opt | opt <-
[ "-I" ++ autogenModulesDir lbi,
"-include", autogenModulesDir lbi </> cppHeaderName ] ]
++ [ "--lflag=-L" ++ opt | opt <- PD.extraLibDirs bi ]
++ [ "--lflag=-Wl,-R," ++ opt | isELF
, opt <- PD.extraLibDirs bi ]
++ [ "--lflag=-l" ++ opt | opt <- PD.extraLibs bi ]
++ [ "--lflag=" ++ opt | opt <- PD.ldOptions bi ]
-- Options from dependent packages
++ [ "--cflag=" ++ opt
| pkg <- pkgs
, opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ]
++ [ opt | opt <- Installed.ccOptions pkg ] ]
++ [ "--lflag=" ++ opt
| pkg <- pkgs
, opt <- [ "-L" ++ opt | opt <- Installed.libraryDirs pkg ]
++ [ "-Wl,-R," ++ opt | isELF
, opt <- Installed.libraryDirs pkg ]
++ [ "-l" ++ opt | opt <- Installed.extraLibraries pkg ]
++ [ opt | opt <- Installed.ldOptions pkg ] ]
++ ["-o", outFile, inFile]
}
where
-- TODO: installedPkgs contains ALL dependencies associated with
-- the package, but we really only want to look at packages for the
-- *current* dependency. We should use PackageIndex.dependencyClosure
-- on the direct depends of the component. Can't easily do that,
-- because the signature of this function is wrong. Tracked with
# 2971 ( which has a test case . )
pkgs = PackageIndex.topologicalOrder (packageHacks (installedPkgs lbi))
isOSX = case buildOS of OSX -> True; _ -> False
isELF = case buildOS of OSX -> False; Windows -> False; AIX -> False; _ -> True;
packageHacks = case compilerFlavor (compiler lbi) of
GHC -> hackRtsPackage
GHCJS -> hackRtsPackage
_ -> id
We do n't link in the actual libraries of our dependencies , so
-- the -u flags in the ldOptions of the rts package mean linking fails on
OS X ( it 's ld is a tad stricter than gnu ld ) . Thus we remove the
ldOptions for GHC 's rts package :
hackRtsPackage index =
case PackageIndex.lookupPackageName index (PackageName "rts") of
[(_, [rts])]
-> PackageIndex.insert rts { Installed.ldOptions = [] } index
_ -> error "No (or multiple) ghc rts package is registered!!"
ppHsc2hsExtras :: PreProcessorExtras
ppHsc2hsExtras buildBaseDir = filter ("_hsc.c" `isSuffixOf`) `fmap`
getDirectoryContentsRecursive buildBaseDir
ppC2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppC2hs bi lbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = \(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity -> do
(c2hsProg, _, _) <- requireProgramVersion verbosity
c2hsProgram (orLaterVersion (Version [0,15] []))
(withPrograms lbi)
(gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi)
rawSystemProgram verbosity c2hsProg $
-- Options from the current package:
[ "--cpp=" ++ programPath gccProg, "--cppopts=-E" ]
++ [ "--cppopts=" ++ opt | opt <- getCppOptions bi lbi ]
++ [ "--cppopts=-include" ++ (autogenModulesDir lbi </> cppHeaderName) ]
++ [ "--include=" ++ outBaseDir ]
-- Options from dependent packages
++ [ "--cppopts=" ++ opt
| pkg <- pkgs
, opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ]
++ [ opt | opt@('-':c:_) <- Installed.ccOptions pkg
, c `elem` "DIU" ] ]
--TODO: install .chi files for packages, so we can --include
-- those dirs here, for the dependencies
-- input and output files
++ [ "--output-dir=" ++ outBaseDir
, "--output=" ++ outRelativeFile
, inBaseDir </> inRelativeFile ]
}
where
pkgs = PackageIndex.topologicalOrder (installedPkgs lbi)
ppC2hsExtras :: PreProcessorExtras
ppC2hsExtras d = filter (\p -> takeExtensions p == ".chs.c") `fmap`
getDirectoryContentsRecursive d
TODO : perhaps use this with hsc2hs too
TODO : remove cc - options from cpphs for cabal - version : > = 1.10
getCppOptions :: BuildInfo -> LocalBuildInfo -> [String]
getCppOptions bi lbi
= platformDefines lbi
++ cppOptions bi
++ ["-I" ++ dir | dir <- PD.includeDirs bi]
++ [opt | opt@('-':c:_) <- PD.ccOptions bi, c `elem` "DIU"]
platformDefines :: LocalBuildInfo -> [String]
platformDefines lbi =
case compilerFlavor comp of
GHC ->
["-D__GLASGOW_HASKELL__=" ++ versionInt version] ++
["-D" ++ os ++ "_BUILD_OS=1"] ++
["-D" ++ arch ++ "_BUILD_ARCH=1"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
GHCJS ->
compatGlasgowHaskell ++
["-D__GHCJS__=" ++ versionInt version] ++
["-D" ++ os ++ "_BUILD_OS=1"] ++
["-D" ++ arch ++ "_BUILD_ARCH=1"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
JHC -> ["-D__JHC__=" ++ versionInt version]
HaskellSuite {} ->
["-D__HASKELL_SUITE__"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
_ -> []
where
comp = compiler lbi
Platform hostArch hostOS = hostPlatform lbi
version = compilerVersion comp
compatGlasgowHaskell =
maybe [] (\v -> ["-D__GLASGOW_HASKELL__=" ++ versionInt v])
(compilerCompatVersion GHC comp)
-- TODO: move this into the compiler abstraction
FIXME : this forces GHC 's crazy 4.8.2 - > 408 convention on all
-- the other compilers. Check if that's really what they want.
versionInt :: Version -> String
versionInt (Version { versionBranch = [] }) = "1"
versionInt (Version { versionBranch = [n] }) = show n
versionInt (Version { versionBranch = n1:n2:_ })
6.8.x - > 608
6.10.x - > 610
let s1 = show n1
s2 = show n2
middle = case s2 of
_ : _ : _ -> ""
_ -> "0"
in s1 ++ middle ++ s2
osStr = case hostOS of
Linux -> ["linux"]
Windows -> ["mingw32"]
OSX -> ["darwin"]
FreeBSD -> ["freebsd"]
OpenBSD -> ["openbsd"]
NetBSD -> ["netbsd"]
DragonFly -> ["dragonfly"]
Solaris -> ["solaris2"]
AIX -> ["aix"]
HPUX -> ["hpux"]
IRIX -> ["irix"]
HaLVM -> []
IOS -> ["ios"]
Android -> ["android"]
Ghcjs -> ["ghcjs"]
Hurd -> ["hurd"]
OtherOS _ -> []
archStr = case hostArch of
I386 -> ["i386"]
X86_64 -> ["x86_64"]
PPC -> ["powerpc"]
PPC64 -> ["powerpc64"]
Sparc -> ["sparc"]
Arm -> ["arm"]
Mips -> ["mips"]
SH -> []
IA64 -> ["ia64"]
S390 -> ["s390"]
Alpha -> ["alpha"]
Hppa -> ["hppa"]
Rs6000 -> ["rs6000"]
M68k -> ["m68k"]
Vax -> ["vax"]
JavaScript -> ["javascript"]
OtherArch _ -> []
ppHappy :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppHappy _ lbi = pp { platformIndependent = True }
where pp = standardPP lbi happyProgram (hcFlags hc)
hc = compilerFlavor (compiler lbi)
hcFlags GHC = ["-agc"]
hcFlags GHCJS = ["-agc"]
hcFlags _ = []
ppAlex :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppAlex _ lbi = pp { platformIndependent = True }
where pp = standardPP lbi alexProgram (hcFlags hc)
hc = compilerFlavor (compiler lbi)
hcFlags GHC = ["-g"]
hcFlags GHCJS = ["-g"]
hcFlags _ = []
standardPP :: LocalBuildInfo -> Program -> [String] -> PreProcessor
standardPP lbi prog args =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
rawSystemProgramConf verbosity prog (withPrograms lbi)
(args ++ ["-o", outFile, inFile])
}
-- |Convenience function; get the suffixes of these preprocessors.
ppSuffixes :: [ PPSuffixHandler ] -> [String]
ppSuffixes = map fst
|Standard preprocessors : GreenCard , c2hs , hsc2hs , happy , and cpphs .
knownSuffixHandlers :: [ PPSuffixHandler ]
knownSuffixHandlers =
[ ("gc", ppGreenCard)
, ("chs", ppC2hs)
, ("hsc", ppHsc2hs)
, ("x", ppAlex)
, ("y", ppHappy)
, ("ly", ppHappy)
, ("cpphs", ppCpp)
]
|Standard preprocessors with possible extra C sources : c2hs , hsc2hs .
knownExtrasHandlers :: [ PreProcessorExtras ]
knownExtrasHandlers = [ ppC2hsExtras, ppHsc2hsExtras ]
-- | Find any extra C sources generated by preprocessing that need to
be added to the component ( addresses issue # 238 ) .
preprocessExtras :: Component
-> LocalBuildInfo
-> IO [FilePath]
preprocessExtras comp lbi = case comp of
CLib _ -> pp $ buildDir lbi
(CExe Executable { exeName = nm }) ->
pp $ buildDir lbi </> nm </> nm ++ "-tmp"
CTest test -> do
case testInterface test of
TestSuiteExeV10 _ _ ->
pp $ buildDir lbi </> testName test </> testName test ++ "-tmp"
TestSuiteLibV09 _ _ ->
pp $ buildDir lbi </> stubName test </> stubName test ++ "-tmp"
TestSuiteUnsupported tt -> die $ "No support for preprocessing test "
++ "suite type " ++ display tt
CBench bm -> do
case benchmarkInterface bm of
BenchmarkExeV10 _ _ ->
pp $ buildDir lbi </> benchmarkName bm </> benchmarkName bm ++ "-tmp"
BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark "
++ "type " ++ display tt
where
pp dir = (map (dir </>) . concat) `fmap` forM knownExtrasHandlers ($ dir)
| null | https://raw.githubusercontent.com/tolysz/ghcjs-stack/83d5be83e87286d984e89635d5926702c55b9f29/special/cabal/Cabal/Distribution/Simple/PreProcess.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD3
Maintainer :
Portability : portable
that can transform one kind of file into another. There is also a
'PPSuffixHandler' which is a combination of a file extension and a function
lists them in 'knownSuffixHandlers'. On top of this it provides a function
for actually preprocessing some sources given a bunch of known suffix
handlers. This module is not as good as it could be, it could really do with
a rewrite to address some of the problems we have with pre-processors.
|The interface to a preprocessor, which may be implemented using an
external program, but need not be. The arguments are the name of
the input file, the name of the output file and a verbosity level.
Here is a simple example that merely prepends a comment to the given
source file:
> ppTestHandler =
> platformIndependent = True,
> runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
> do info verbosity (inFile++" has been preprocessed to "++outFile)
> stuff <- readFile inFile
> writeFile outFile ("-- preprocessed as a test\n\n" ++ stuff)
We split the input and output file names into a base directory and the
dirs that this file was found in. The output base dir is the build dir where
all the generated source files are put.
The reason for splitting it up this way is that some pre-processors don't
file may require reading a generated .h file). In these cases the generated
files need to embed relative path names to each other (eg the generated .hs
the base directory where the generated files are located, it cannot be
relative to the top level of the build tree because the compilers do not
Most pre-processors do not care of course, so mkSimplePreProcessor and
runSimplePreProcessor functions handle the simple case.
Is the output of the pre-processor platform independent? eg happy output
This matters since only platform independent generated code can be
inlcuded into a source tarball.
TODO: deal with pre-processors that have implementaion dependent output
ghc-specific code into supposedly portable source tarballs.
Location of the source file relative to a base dir
Output file name, relative to an output base dir
verbosity
Should exit if the preprocessor fails
| Function to determine paths to possible extra C sources for a
preprocessor: just takes the path to the build directory and uses
this to search for C sources with names that match the
preprocessor's output name format.
component (lib, exe, or test suite).
TODO: try to list all the modules that could not be found
to reconfigure after editing the .cabal file each time.
if required.
^source directories
^build directory
^preprocess for sdist
^module file name
^verbosity
^possible preprocessors
look for files in the various source dirs with this module name
and a file extension of a known preprocessor
no preprocessor file exists, look for an ordinary source file
just to make sure one actually exists at all for this module.
Note: by looking in the target/output build dir too, we allow
source files to appear magically in the target build dir without
any corresponding "real" source file. This lets custom Setup.hs
files generate source modules directly into the build dir without
the rest of the build system being aware of it (somewhat dodgy)
Preprocessing files for 'sdist' is different from preprocessing
for 'build'. When preprocessing for sdist we preprocess to
avoid that the user has to have the preprocessors available.
preprocessed and which not, so for sdist we only process
platform independent files and put them into the 'buildLoc'
(which we assume is set to the temp. directory that will become
the tarball).
TODO: eliminate sdist variant, just supply different handlers
look for existing pre-processed source file in the dest dir to
see if we really have to re-run the preprocessor.
be in the same place as the hs files, so if we put the hs file in dist/
then we need to copy the hs-boot file there too. This should probably be
done another way. Possibly we should also be looking for .lhs-boot
files, but I think that preprocessors only produce .hs files.
------------------------------------------------------------
* known preprocessors
------------------------------------------------------------
This one is useful for preprocessors that can't handle literate source.
We also need a way to chain preprocessors.
This is a bit of an ugly hack. We're going to
unlit the file ourselves later on if appropriate,
double-unlitted. In the future we might switch to
using cpphs --unlit instead.
Additional gcc options
Note that on ELF systems, wherever we use -L, we must also use -R
because presumably that -L dir is not on the normal path for the
compiling a C program and then running it.
Options from the current package:
Options from dependent packages
TODO: installedPkgs contains ALL dependencies associated with
the package, but we really only want to look at packages for the
*current* dependency. We should use PackageIndex.dependencyClosure
on the direct depends of the component. Can't easily do that,
because the signature of this function is wrong. Tracked with
the -u flags in the ldOptions of the rts package mean linking fails on
Options from the current package:
Options from dependent packages
TODO: install .chi files for packages, so we can --include
those dirs here, for the dependencies
input and output files
TODO: move this into the compiler abstraction
the other compilers. Check if that's really what they want.
|Convenience function; get the suffixes of these preprocessors.
| Find any extra C sources generated by preprocessing that need to | Module : Distribution . Simple . PreProcess
Copyright : ( c ) 2003 - 2005 , ,
This defines a ' PreProcessor ' abstraction which represents a pre - processor
for configuring a ' PreProcessor ' . It defines a bunch of known built - in
preprocessors like @cpp@ , @cpphs@ , @c2hs@ , , @happy@ , @alex@ etc and
module Distribution.Simple.PreProcess (preprocessComponent, preprocessExtras,
knownSuffixHandlers, ppSuffixes,
PPSuffixHandler, PreProcessor(..),
mkSimplePreProcessor, runSimplePreProcessor,
ppCpp, ppCpp', ppGreenCard, ppC2hs, ppHsc2hs,
ppHappy, ppAlex, ppUnlit, platformDefines
)
where
import Distribution.Simple.PreProcess.Unlit
import Distribution.Package
import qualified Distribution.ModuleName as ModuleName
import Distribution.PackageDescription as PD
import qualified Distribution.InstalledPackageInfo as Installed
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.CCompiler
import Distribution.Simple.Compiler
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.BuildPaths
import Distribution.Simple.Utils
import Distribution.Simple.Program
import Distribution.Simple.Test.LibV09
import Distribution.System
import Distribution.Text
import Distribution.Version
import Distribution.Verbosity
import Control.Monad
import Data.Maybe (fromMaybe)
import Data.List (nub, isSuffixOf)
import System.Directory (doesFileExist)
import System.Info (os, arch)
import System.FilePath (splitExtension, dropExtensions, (</>), (<.>),
takeDirectory, normalise, replaceExtension,
takeExtensions)
> ppTestHandler : : PreProcessor
> PreProcessor {
> return ExitSuccess
rest of the file name . The input base dir is the path in the list of search
simply generate one output .hs file from one input file but have
dependencies on other generated files ( notably c2hs , where building one
.hs file may require reading other files , and then compiling the .hs
file mentions the .h file in the FFI imports ) . This path must be relative to
look for .h files relative to there , ie we do not use \"-I .\ " , instead we
use \"-I dist\/build\ " ( or whatever dist dir has been set by the user )
data PreProcessor = PreProcessor {
is portable haskell but 's output is platform dependent .
platformIndependent :: Bool,
eg and happy have flags . However we ca n't really inlcude
}
type PreProcessorExtras = FilePath -> IO [FilePath]
mkSimplePreProcessor :: (FilePath -> FilePath -> Verbosity -> IO ())
-> (FilePath, FilePath)
-> (FilePath, FilePath) -> Verbosity -> IO ()
mkSimplePreProcessor simplePP
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity = simplePP inFile outFile verbosity
where inFile = normalise (inBaseDir </> inRelativeFile)
outFile = normalise (outBaseDir </> outRelativeFile)
runSimplePreProcessor :: PreProcessor -> FilePath -> FilePath -> Verbosity
-> IO ()
runSimplePreProcessor pp inFile outFile verbosity =
runPreProcessor pp (".", inFile) (".", outFile) verbosity
|A preprocessor for turning non - Haskell files with the given extension
into plain source files .
type PPSuffixHandler
= (String, BuildInfo -> LocalBuildInfo -> PreProcessor)
| Apply preprocessors to the sources from ' hsSourceDirs ' for a given
preprocessComponent :: PackageDescription
-> Component
-> LocalBuildInfo
-> Bool
-> Verbosity
-> [PPSuffixHandler]
-> IO ()
preprocessComponent pd comp lbi isSrcDist verbosity handlers = case comp of
(CLib lib@Library{ libBuildInfo = bi }) -> do
let dirs = hsSourceDirs bi ++ [autogenModulesDir lbi]
setupMessage verbosity "Preprocessing library" (packageId pd)
forM_ (map ModuleName.toFilePath $ libModules lib) $
pre dirs (buildDir lbi) (localHandlers bi)
(CExe exe@Executable { buildInfo = bi, exeName = nm }) -> do
let exeDir = buildDir lbi </> nm </> nm ++ "-tmp"
dirs = hsSourceDirs bi ++ [autogenModulesDir lbi]
setupMessage verbosity ("Preprocessing executable '" ++ nm ++ "' for") (packageId pd)
forM_ (map ModuleName.toFilePath $ otherModules bi) $
pre dirs exeDir (localHandlers bi)
pre (hsSourceDirs bi) exeDir (localHandlers bi) $
dropExtensions (modulePath exe)
CTest test@TestSuite{ testName = nm } -> do
setupMessage verbosity ("Preprocessing test suite '" ++ nm ++ "' for") (packageId pd)
case testInterface test of
TestSuiteExeV10 _ f ->
preProcessTest test f $ buildDir lbi </> testName test
</> testName test ++ "-tmp"
TestSuiteLibV09 _ _ -> do
let testDir = buildDir lbi </> stubName test
</> stubName test ++ "-tmp"
writeSimpleTestStub test testDir
preProcessTest test (stubFilePath test) testDir
TestSuiteUnsupported tt -> die $ "No support for preprocessing test "
++ "suite type " ++ display tt
CBench bm@Benchmark{ benchmarkName = nm } -> do
setupMessage verbosity ("Preprocessing benchmark '" ++ nm ++ "' for") (packageId pd)
case benchmarkInterface bm of
BenchmarkExeV10 _ f ->
preProcessBench bm f $ buildDir lbi </> benchmarkName bm
</> benchmarkName bm ++ "-tmp"
BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark "
++ "type " ++ display tt
where
builtinHaskellSuffixes = ["hs", "lhs", "hsig", "lhsig"]
builtinCSuffixes = cSourceExtensions
builtinSuffixes = builtinHaskellSuffixes ++ builtinCSuffixes
localHandlers bi = [(ext, h bi lbi) | (ext, h) <- handlers]
pre dirs dir lhndlrs fp =
preprocessFile dirs dir isSrcDist fp verbosity builtinSuffixes lhndlrs
preProcessTest test = preProcessComponent (testBuildInfo test)
(testModules test)
preProcessBench bm = preProcessComponent (benchmarkBuildInfo bm)
(benchmarkModules bm)
preProcessComponent bi modules exePath dir = do
let biHandlers = localHandlers bi
sourceDirs = hsSourceDirs bi ++ [ autogenModulesDir lbi ]
sequence_ [ preprocessFile sourceDirs dir isSrcDist
(ModuleName.toFilePath modu) verbosity builtinSuffixes
biHandlers
| modu <- modules ]
preprocessFile (dir : (hsSourceDirs bi)) dir isSrcDist
(dropExtensions $ exePath) verbosity
builtinSuffixes biHandlers
not just the first one . It 's annoying and slow due to the need
|Find the first extension of the file that exists , and preprocess it
preprocessFile
^builtin suffixes
-> IO ()
preprocessFile searchLoc buildLoc forSDist baseFile verbosity builtinSuffixes handlers = do
psrcFiles <- findFileWithExtension' (map fst handlers) searchLoc baseFile
case psrcFiles of
Nothing -> do
bsrcFiles <- findFileWithExtension builtinSuffixes (buildLoc : searchLoc) baseFile
case bsrcFiles of
Nothing -> die $ "can't find source for " ++ baseFile
++ " in " ++ intercalate ", " searchLoc
_ -> return ()
found a pre - processable file in one of the source dirs
Just (psrcLoc, psrcRelFile) -> do
let (srcStem, ext) = splitExtension psrcRelFile
psrcFile = psrcLoc </> psrcRelFile
pp = fromMaybe (error "Distribution.Simple.PreProcess: Just expected")
(lookup (tailNotNull ext) handlers)
ATM , we do n't have a way to specify which files are to be
when (not forSDist || forSDist && platformIndependent pp) $ do
ppsrcFiles <- findFileWithExtension builtinSuffixes [buildLoc] baseFile
recomp <- case ppsrcFiles of
Nothing -> return True
Just ppsrcFile ->
psrcFile `moreRecentFile` ppsrcFile
when recomp $ do
let destDir = buildLoc </> dirName srcStem
createDirectoryIfMissingVerbose verbosity True destDir
runPreProcessorWithHsBootHack pp
(psrcLoc, psrcRelFile)
(buildLoc, srcStem <.> "hs")
where
dirName = takeDirectory
tailNotNull [] = []
tailNotNull x = tail x
FIXME : This is a somewhat nasty hack . GHC requires that hs - boot files
runPreProcessorWithHsBootHack pp
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) = do
runPreProcessor pp
(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity
exists <- doesFileExist inBoot
when exists $ copyFileVerbose verbosity inBoot outBoot
where
inBoot = replaceExtension inFile "hs-boot"
outBoot = replaceExtension outFile "hs-boot"
inFile = normalise (inBaseDir </> inRelativeFile)
outFile = normalise (outBaseDir </> outRelativeFile)
ppGreenCard :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppGreenCard _ lbi
= PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
rawSystemProgramConf verbosity greencardProgram (withPrograms lbi)
(["-tffi", "-o" ++ outFile, inFile])
}
ppUnlit :: PreProcessor
ppUnlit =
PreProcessor {
platformIndependent = True,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile _verbosity ->
withUTF8FileContents inFile $ \contents ->
either (writeUTF8File outFile) die (unlit inFile contents)
}
ppCpp :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppCpp = ppCpp' []
ppCpp' :: [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor
ppCpp' extraArgs bi lbi =
case compilerFlavor (compiler lbi) of
GHC -> ppGhcCpp ghcProgram (>= Version [6,6] []) args bi lbi
GHCJS -> ppGhcCpp ghcjsProgram (const True) args bi lbi
_ -> ppCpphs args bi lbi
where cppArgs = getCppOptions bi lbi
args = cppArgs ++ extraArgs
ppGhcCpp :: Program -> (Version -> Bool)
-> [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor
ppGhcCpp program xHs extraArgs _bi lbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(prog, version, _) <- requireProgramVersion verbosity
program anyVersion (withPrograms lbi)
rawSystemProgram verbosity prog $
["-E", "-cpp"]
so we need GHC not to unlit it now or it 'll get
++ (if xHs version then ["-x", "hs"] else [])
++ [ "-optP-include", "-optP"++ (autogenModulesDir lbi </> cppHeaderName) ]
++ ["-o", outFile, inFile]
++ extraArgs
}
ppCpphs :: [String] -> BuildInfo -> LocalBuildInfo -> PreProcessor
ppCpphs extraArgs _bi lbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(cpphsProg, cpphsVersion, _) <- requireProgramVersion verbosity
cpphsProgram anyVersion (withPrograms lbi)
rawSystemProgram verbosity cpphsProg $
("-O" ++ outFile) : inFile
: "--noline" : "--strip"
: (if cpphsVersion >= Version [1,6] []
then ["--include="++ (autogenModulesDir lbi </> cppHeaderName)]
else [])
++ extraArgs
}
ppHsc2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppHsc2hs bi lbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
(gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi)
rawSystemProgramConf verbosity hsc2hsProgram (withPrograms lbi) $
[ "--cc=" ++ programPath gccProg
, "--ld=" ++ programPath gccProg ]
++ [ "--cflag=" ++ opt | opt <- programDefaultArgs gccProg
++ programOverrideArgs gccProg ]
++ [ "--lflag=" ++ opt | opt <- programDefaultArgs gccProg
++ programOverrideArgs gccProg ]
OSX frameworks :
++ [ what ++ "=-F" ++ opt
| isOSX
, opt <- nub (concatMap Installed.frameworkDirs pkgs)
, what <- ["--cflag", "--lflag"] ]
++ [ "--lflag=" ++ arg
| isOSX
, opt <- PD.frameworks bi ++ concatMap Installed.frameworks pkgs
, arg <- ["-framework", opt] ]
system 's dynamic linker . This is needed because hsc2hs works by
++ [ "--cflag=" ++ opt | opt <- platformDefines lbi ]
++ [ "--cflag=-I" ++ dir | dir <- PD.includeDirs bi ]
++ [ "--cflag=" ++ opt | opt <- PD.ccOptions bi
++ PD.cppOptions bi ]
++ [ "--cflag=" ++ opt | opt <-
[ "-I" ++ autogenModulesDir lbi,
"-include", autogenModulesDir lbi </> cppHeaderName ] ]
++ [ "--lflag=-L" ++ opt | opt <- PD.extraLibDirs bi ]
++ [ "--lflag=-Wl,-R," ++ opt | isELF
, opt <- PD.extraLibDirs bi ]
++ [ "--lflag=-l" ++ opt | opt <- PD.extraLibs bi ]
++ [ "--lflag=" ++ opt | opt <- PD.ldOptions bi ]
++ [ "--cflag=" ++ opt
| pkg <- pkgs
, opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ]
++ [ opt | opt <- Installed.ccOptions pkg ] ]
++ [ "--lflag=" ++ opt
| pkg <- pkgs
, opt <- [ "-L" ++ opt | opt <- Installed.libraryDirs pkg ]
++ [ "-Wl,-R," ++ opt | isELF
, opt <- Installed.libraryDirs pkg ]
++ [ "-l" ++ opt | opt <- Installed.extraLibraries pkg ]
++ [ opt | opt <- Installed.ldOptions pkg ] ]
++ ["-o", outFile, inFile]
}
where
# 2971 ( which has a test case . )
pkgs = PackageIndex.topologicalOrder (packageHacks (installedPkgs lbi))
isOSX = case buildOS of OSX -> True; _ -> False
isELF = case buildOS of OSX -> False; Windows -> False; AIX -> False; _ -> True;
packageHacks = case compilerFlavor (compiler lbi) of
GHC -> hackRtsPackage
GHCJS -> hackRtsPackage
_ -> id
We do n't link in the actual libraries of our dependencies , so
OS X ( it 's ld is a tad stricter than gnu ld ) . Thus we remove the
ldOptions for GHC 's rts package :
hackRtsPackage index =
case PackageIndex.lookupPackageName index (PackageName "rts") of
[(_, [rts])]
-> PackageIndex.insert rts { Installed.ldOptions = [] } index
_ -> error "No (or multiple) ghc rts package is registered!!"
ppHsc2hsExtras :: PreProcessorExtras
ppHsc2hsExtras buildBaseDir = filter ("_hsc.c" `isSuffixOf`) `fmap`
getDirectoryContentsRecursive buildBaseDir
ppC2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppC2hs bi lbi =
PreProcessor {
platformIndependent = False,
runPreProcessor = \(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity -> do
(c2hsProg, _, _) <- requireProgramVersion verbosity
c2hsProgram (orLaterVersion (Version [0,15] []))
(withPrograms lbi)
(gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi)
rawSystemProgram verbosity c2hsProg $
[ "--cpp=" ++ programPath gccProg, "--cppopts=-E" ]
++ [ "--cppopts=" ++ opt | opt <- getCppOptions bi lbi ]
++ [ "--cppopts=-include" ++ (autogenModulesDir lbi </> cppHeaderName) ]
++ [ "--include=" ++ outBaseDir ]
++ [ "--cppopts=" ++ opt
| pkg <- pkgs
, opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ]
++ [ opt | opt@('-':c:_) <- Installed.ccOptions pkg
, c `elem` "DIU" ] ]
++ [ "--output-dir=" ++ outBaseDir
, "--output=" ++ outRelativeFile
, inBaseDir </> inRelativeFile ]
}
where
pkgs = PackageIndex.topologicalOrder (installedPkgs lbi)
ppC2hsExtras :: PreProcessorExtras
ppC2hsExtras d = filter (\p -> takeExtensions p == ".chs.c") `fmap`
getDirectoryContentsRecursive d
TODO : perhaps use this with hsc2hs too
TODO : remove cc - options from cpphs for cabal - version : > = 1.10
getCppOptions :: BuildInfo -> LocalBuildInfo -> [String]
getCppOptions bi lbi
= platformDefines lbi
++ cppOptions bi
++ ["-I" ++ dir | dir <- PD.includeDirs bi]
++ [opt | opt@('-':c:_) <- PD.ccOptions bi, c `elem` "DIU"]
platformDefines :: LocalBuildInfo -> [String]
platformDefines lbi =
case compilerFlavor comp of
GHC ->
["-D__GLASGOW_HASKELL__=" ++ versionInt version] ++
["-D" ++ os ++ "_BUILD_OS=1"] ++
["-D" ++ arch ++ "_BUILD_ARCH=1"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
GHCJS ->
compatGlasgowHaskell ++
["-D__GHCJS__=" ++ versionInt version] ++
["-D" ++ os ++ "_BUILD_OS=1"] ++
["-D" ++ arch ++ "_BUILD_ARCH=1"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
JHC -> ["-D__JHC__=" ++ versionInt version]
HaskellSuite {} ->
["-D__HASKELL_SUITE__"] ++
map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++
map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr
_ -> []
where
comp = compiler lbi
Platform hostArch hostOS = hostPlatform lbi
version = compilerVersion comp
compatGlasgowHaskell =
maybe [] (\v -> ["-D__GLASGOW_HASKELL__=" ++ versionInt v])
(compilerCompatVersion GHC comp)
FIXME : this forces GHC 's crazy 4.8.2 - > 408 convention on all
versionInt :: Version -> String
versionInt (Version { versionBranch = [] }) = "1"
versionInt (Version { versionBranch = [n] }) = show n
versionInt (Version { versionBranch = n1:n2:_ })
6.8.x - > 608
6.10.x - > 610
let s1 = show n1
s2 = show n2
middle = case s2 of
_ : _ : _ -> ""
_ -> "0"
in s1 ++ middle ++ s2
osStr = case hostOS of
Linux -> ["linux"]
Windows -> ["mingw32"]
OSX -> ["darwin"]
FreeBSD -> ["freebsd"]
OpenBSD -> ["openbsd"]
NetBSD -> ["netbsd"]
DragonFly -> ["dragonfly"]
Solaris -> ["solaris2"]
AIX -> ["aix"]
HPUX -> ["hpux"]
IRIX -> ["irix"]
HaLVM -> []
IOS -> ["ios"]
Android -> ["android"]
Ghcjs -> ["ghcjs"]
Hurd -> ["hurd"]
OtherOS _ -> []
archStr = case hostArch of
I386 -> ["i386"]
X86_64 -> ["x86_64"]
PPC -> ["powerpc"]
PPC64 -> ["powerpc64"]
Sparc -> ["sparc"]
Arm -> ["arm"]
Mips -> ["mips"]
SH -> []
IA64 -> ["ia64"]
S390 -> ["s390"]
Alpha -> ["alpha"]
Hppa -> ["hppa"]
Rs6000 -> ["rs6000"]
M68k -> ["m68k"]
Vax -> ["vax"]
JavaScript -> ["javascript"]
OtherArch _ -> []
ppHappy :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppHappy _ lbi = pp { platformIndependent = True }
where pp = standardPP lbi happyProgram (hcFlags hc)
hc = compilerFlavor (compiler lbi)
hcFlags GHC = ["-agc"]
hcFlags GHCJS = ["-agc"]
hcFlags _ = []
ppAlex :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppAlex _ lbi = pp { platformIndependent = True }
where pp = standardPP lbi alexProgram (hcFlags hc)
hc = compilerFlavor (compiler lbi)
hcFlags GHC = ["-g"]
hcFlags GHCJS = ["-g"]
hcFlags _ = []
standardPP :: LocalBuildInfo -> Program -> [String] -> PreProcessor
standardPP lbi prog args =
PreProcessor {
platformIndependent = False,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity ->
rawSystemProgramConf verbosity prog (withPrograms lbi)
(args ++ ["-o", outFile, inFile])
}
ppSuffixes :: [ PPSuffixHandler ] -> [String]
ppSuffixes = map fst
|Standard preprocessors : GreenCard , c2hs , hsc2hs , happy , and cpphs .
knownSuffixHandlers :: [ PPSuffixHandler ]
knownSuffixHandlers =
[ ("gc", ppGreenCard)
, ("chs", ppC2hs)
, ("hsc", ppHsc2hs)
, ("x", ppAlex)
, ("y", ppHappy)
, ("ly", ppHappy)
, ("cpphs", ppCpp)
]
|Standard preprocessors with possible extra C sources : c2hs , hsc2hs .
knownExtrasHandlers :: [ PreProcessorExtras ]
knownExtrasHandlers = [ ppC2hsExtras, ppHsc2hsExtras ]
be added to the component ( addresses issue # 238 ) .
preprocessExtras :: Component
-> LocalBuildInfo
-> IO [FilePath]
preprocessExtras comp lbi = case comp of
CLib _ -> pp $ buildDir lbi
(CExe Executable { exeName = nm }) ->
pp $ buildDir lbi </> nm </> nm ++ "-tmp"
CTest test -> do
case testInterface test of
TestSuiteExeV10 _ _ ->
pp $ buildDir lbi </> testName test </> testName test ++ "-tmp"
TestSuiteLibV09 _ _ ->
pp $ buildDir lbi </> stubName test </> stubName test ++ "-tmp"
TestSuiteUnsupported tt -> die $ "No support for preprocessing test "
++ "suite type " ++ display tt
CBench bm -> do
case benchmarkInterface bm of
BenchmarkExeV10 _ _ ->
pp $ buildDir lbi </> benchmarkName bm </> benchmarkName bm ++ "-tmp"
BenchmarkUnsupported tt -> die $ "No support for preprocessing benchmark "
++ "type " ++ display tt
where
pp dir = (map (dir </>) . concat) `fmap` forM knownExtrasHandlers ($ dir)
|
67dca8312438c9d8fcfb04b082a614029a85c4023c075391a7905c5bfe1157f6 | informatimago/lisp | rfc2822.lisp | -*- coding : utf-8 -*-
;;;;****************************************************************************
FILE : rfc2822.lisp
;;;;LANGUAGE: Common-Lisp
;;;;SYSTEM: Common-Lisp
USER - INTERFACE :
;;;;DESCRIPTION
;;;;
;;;; See defpackage documentation string.
;;;;
< PJB > < >
MODIFICATIONS
2005 - 09 - 01 < PJB > Made use of ecma048 / iso6429 .
2004 - 08 - 17 < PJB > Created ( fetched basic functions from antispam.lisp ) .
;;;;LEGAL
AGPL3
;;;;
Copyright 2004 - 2016
;;;;
;;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; This program is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details .
;;;;
You should have received a copy of the GNU Affero General Public License
;;;; along with this program. If not, see </>
;;;;****************************************************************************
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(in-package "COMMON-LISP-USER")
(declaim (declaration also-use-packages))
(declaim (also-use-packages "COM.INFORMATIMAGO.COMMON-LISP.CESARUM.ECMA048"))
(defpackage "COM.INFORMATIMAGO.COMMON-LISP.RFC2822.RFC2822"
(:use "COMMON-LISP")
(:export "REMOVE-COMMENTS" "REMOVE-SPACES" "UNQUOTE")
(:documentation
"RFC0822/RFC2822 support funtions.
RFC0822/RFC2822 support funtions.
RFC822 STANDARD FOR THE FORMAT OF ARPA INTERNET TEXT MESSAGES
RFC2822 Internet Message Format
RFC822 in fixnum words:
In transmission, message lines are separated by CRLF.
Header lines are separated from body lines by an empty line (CRLFCRLF).
Header lines may be cut by replacing any space or tab by CRLF, (space or tab).
Field name consists of any ASCII printable character but space and colon,
followed by a colon.
Field body begins immediately after the colon. (Customary space included).
NOTE: rfc2822 forbid spaces between field name and colon,
but it IS possible in rfc822 to insert spaces here.
(For example, see Annex A of RFC822).
License:
AGPL3
Copyright Pascal J. Bourguignon 2004 - 2015
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program.
If not, see </>
"))
(in-package "COM.INFORMATIMAGO.COMMON-LISP.RFC2822.RFC2822")
(defparameter +space+ (character " ") "An ASCII SPACE character.")
(defparameter +tab+ (code-char #+mocl 9 #-mocl com.informatimago.common-lisp.cesarum.ecma048:ht) "An ASCII TABULATION character.")
(defparameter +cr+ (code-char #+mocl 13 #-mocl com.informatimago.common-lisp.cesarum.ecma048:cr) "An ASCII CARRIAGE-RETURN.")
(defparameter +lf+ (code-char #+mocl 10 #-mocl com.informatimago.common-lisp.cesarum.ecma048:lf) "An ASCII LINE-FEED.")
(defun unquote (value)
"
RETURN: A string where the quotes and escapes are moved.
NOTE: It is assumed that the value contains only one string, or none.
EXAMPLE: (unquote \"A string \\\\\" with \\\\\" a quoted word.\")
--> \"A string \\\" with \\\" a quoted word.\"
"
(macrolet ((when-char (ch) (if (stringp ch)
`(char= (character ,ch) (aref value i))
`(char= ,ch (aref value i))))
(copy () `(progn (setf (aref temp j) (aref value i))
(incf i) (incf j)))
(skip () `(incf i)))
(do ((temp (make-string (length value)))
(i 0)
(j 0)
(state '(:out))) ;; :QUOTE :COMMENT
((<= (length value) i) (subseq temp 0 j))
(case (car state)
((:out)
(cond
((when-char "\\") (skip) (copy))
((when-char "\"") (skip) (push :quote state))
(t (copy))))
((:quote)
(cond
((when-char "\\") (skip) (copy))
((when-char "\"") (skip) (pop state))
(t (copy)))) ))))
(defun remove-spaces (value)
"
RETURN: A string with unquoted spaces and tabulations removed.
"
(macrolet ((when-char (ch) (if (stringp ch)
`(char= (character ,ch) (aref value i))
`(char= ,ch (aref value i))))
(copy () `(progn (setf (aref temp j) (aref value i))
(incf i) (incf j)))
(skip () `(incf i)))
(do ((temp (make-string (length value)))
(i 0)
(j 0)
(state '(:out))) ;; :QUOTE :COMMENT
((<= (length value) i) (subseq temp 0 j))
(case (car state)
((:out)
(cond
((when-char "\\") (copy) (copy))
((when-char "\"") (copy) (push :quote state))
((or (when-char " ") (when-char +tab+)) (skip))
(t (copy))))
((:quote)
(cond
((when-char "\\") (copy) (copy))
((when-char "\"") (copy) (pop state))
(t (copy)))) ))))
(defun remove-comments (value)
"
RETURN: A string with the RFC822 comments removed.
"
;;; comment = "(" *(ctext / quoted-pair / comment) ")"
ctext = < any excluding " ( " , ; = > may be folded
;;; ")", "\" & CR, & including
;;; linear-white-space>
quoted - pair = " \ " CHAR ; may quote any char
;;; linear-white-space = 1*([CRLF] LWSP-char) ; semantics = SPACE
;;; ; CRLF => folding
CHAR = < any ASCII character > ; ( 0 - 177 , 0.-127 . )
;;;
= < any excepting < " > , ; = > may be folded
;;; "\" & CR, and including
;;; linear-white-space>
quoted - string = < " > * ( / quoted - pair ) < " > ; Regular qtext or
;;; ; quoted chars.
(macrolet ((when-char (ch) (if (stringp ch)
`(char= (character ,ch) (aref value i))
`(char= ,ch (aref value i))))
(copy () `(progn (setf (aref temp j) (aref value i))
(incf i) (incf j)))
(skip () `(incf i)))
(do ((temp (make-string (length value)))
(i 0)
(j 0)
(state '(:out))) ;; :QUOTE :COMMENT
((<= (length value) i) (subseq temp 0 j))
(case (car state)
((:out)
(cond
((when-char "\\") (copy) (copy))
((when-char "\"") (copy) (push :quote state))
((when-char "(") (skip) (push :comment state))
(t (copy))))
((:quote)
(cond
((when-char "\\") (copy) (copy))
((when-char "\"") (copy) (pop state))
(t (copy))))
((:comment)
(cond
((when-char "\\") (skip) (skip))
((when-char "(") (skip) (push :comment state))
((when-char ")") (skip) (pop state))
(t (skip))))))))
;;;; THE END ;;;;
| null | https://raw.githubusercontent.com/informatimago/lisp/571af24c06ba466e01b4c9483f8bb7690bc46d03/common-lisp/rfc2822/rfc2822.lisp | lisp | ****************************************************************************
LANGUAGE: Common-Lisp
SYSTEM: Common-Lisp
DESCRIPTION
See defpackage documentation string.
LEGAL
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>
****************************************************************************
without even the implied warranty of
:QUOTE :COMMENT
:QUOTE :COMMENT
comment = "(" *(ctext / quoted-pair / comment) ")"
= > may be folded
")", "\" & CR, & including
linear-white-space>
may quote any char
linear-white-space = 1*([CRLF] LWSP-char) ; semantics = SPACE
; CRLF => folding
( 0 - 177 , 0.-127 . )
= > may be folded
"\" & CR, and including
linear-white-space>
Regular qtext or
; quoted chars.
:QUOTE :COMMENT
THE END ;;;; | -*- coding : utf-8 -*-
FILE : rfc2822.lisp
USER - INTERFACE :
< PJB > < >
MODIFICATIONS
2005 - 09 - 01 < PJB > Made use of ecma048 / iso6429 .
2004 - 08 - 17 < PJB > Created ( fetched basic functions from antispam.lisp ) .
AGPL3
Copyright 2004 - 2016
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(in-package "COMMON-LISP-USER")
(declaim (declaration also-use-packages))
(declaim (also-use-packages "COM.INFORMATIMAGO.COMMON-LISP.CESARUM.ECMA048"))
(defpackage "COM.INFORMATIMAGO.COMMON-LISP.RFC2822.RFC2822"
(:use "COMMON-LISP")
(:export "REMOVE-COMMENTS" "REMOVE-SPACES" "UNQUOTE")
(:documentation
"RFC0822/RFC2822 support funtions.
RFC0822/RFC2822 support funtions.
RFC822 STANDARD FOR THE FORMAT OF ARPA INTERNET TEXT MESSAGES
RFC2822 Internet Message Format
RFC822 in fixnum words:
In transmission, message lines are separated by CRLF.
Header lines are separated from body lines by an empty line (CRLFCRLF).
Header lines may be cut by replacing any space or tab by CRLF, (space or tab).
Field name consists of any ASCII printable character but space and colon,
followed by a colon.
Field body begins immediately after the colon. (Customary space included).
NOTE: rfc2822 forbid spaces between field name and colon,
but it IS possible in rfc822 to insert spaces here.
(For example, see Annex A of RFC822).
License:
AGPL3
Copyright Pascal J. Bourguignon 2004 - 2015
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program.
If not, see </>
"))
(in-package "COM.INFORMATIMAGO.COMMON-LISP.RFC2822.RFC2822")
(defparameter +space+ (character " ") "An ASCII SPACE character.")
(defparameter +tab+ (code-char #+mocl 9 #-mocl com.informatimago.common-lisp.cesarum.ecma048:ht) "An ASCII TABULATION character.")
(defparameter +cr+ (code-char #+mocl 13 #-mocl com.informatimago.common-lisp.cesarum.ecma048:cr) "An ASCII CARRIAGE-RETURN.")
(defparameter +lf+ (code-char #+mocl 10 #-mocl com.informatimago.common-lisp.cesarum.ecma048:lf) "An ASCII LINE-FEED.")
(defun unquote (value)
"
RETURN: A string where the quotes and escapes are moved.
NOTE: It is assumed that the value contains only one string, or none.
EXAMPLE: (unquote \"A string \\\\\" with \\\\\" a quoted word.\")
--> \"A string \\\" with \\\" a quoted word.\"
"
(macrolet ((when-char (ch) (if (stringp ch)
`(char= (character ,ch) (aref value i))
`(char= ,ch (aref value i))))
(copy () `(progn (setf (aref temp j) (aref value i))
(incf i) (incf j)))
(skip () `(incf i)))
(do ((temp (make-string (length value)))
(i 0)
(j 0)
((<= (length value) i) (subseq temp 0 j))
(case (car state)
((:out)
(cond
((when-char "\\") (skip) (copy))
((when-char "\"") (skip) (push :quote state))
(t (copy))))
((:quote)
(cond
((when-char "\\") (skip) (copy))
((when-char "\"") (skip) (pop state))
(t (copy)))) ))))
(defun remove-spaces (value)
"
RETURN: A string with unquoted spaces and tabulations removed.
"
(macrolet ((when-char (ch) (if (stringp ch)
`(char= (character ,ch) (aref value i))
`(char= ,ch (aref value i))))
(copy () `(progn (setf (aref temp j) (aref value i))
(incf i) (incf j)))
(skip () `(incf i)))
(do ((temp (make-string (length value)))
(i 0)
(j 0)
((<= (length value) i) (subseq temp 0 j))
(case (car state)
((:out)
(cond
((when-char "\\") (copy) (copy))
((when-char "\"") (copy) (push :quote state))
((or (when-char " ") (when-char +tab+)) (skip))
(t (copy))))
((:quote)
(cond
((when-char "\\") (copy) (copy))
((when-char "\"") (copy) (pop state))
(t (copy)))) ))))
(defun remove-comments (value)
"
RETURN: A string with the RFC822 comments removed.
"
(macrolet ((when-char (ch) (if (stringp ch)
`(char= (character ,ch) (aref value i))
`(char= ,ch (aref value i))))
(copy () `(progn (setf (aref temp j) (aref value i))
(incf i) (incf j)))
(skip () `(incf i)))
(do ((temp (make-string (length value)))
(i 0)
(j 0)
((<= (length value) i) (subseq temp 0 j))
(case (car state)
((:out)
(cond
((when-char "\\") (copy) (copy))
((when-char "\"") (copy) (push :quote state))
((when-char "(") (skip) (push :comment state))
(t (copy))))
((:quote)
(cond
((when-char "\\") (copy) (copy))
((when-char "\"") (copy) (pop state))
(t (copy))))
((:comment)
(cond
((when-char "\\") (skip) (skip))
((when-char "(") (skip) (push :comment state))
((when-char ")") (skip) (pop state))
(t (skip))))))))
|
685353d756e31d897ef1b734baab158c9fe1758f1168a1f5a396a8d470508c40 | OCamlPro/ezjs | ezjs_bind.ml | open Js_of_ocaml
type string_field = Js.js_string Js.t Js.prop
type int_field = int Js.prop
type float_field = float Js.prop
type bool_field = bool Js.prop
type 'a array_field = 'a Js.js_array Js.t Js.prop
let eval_js txt = ignore(Js.Unsafe.eval_string txt)
let link_js scriptUrl f =
Ezjs_xhr.get "jsreq" scriptUrl (fun txt ->
eval_js txt;
f ())
let rec resolve_deps jsdeps baseurl f =
match !jsdeps with
| [] -> f ()
| dep1 :: _tail ->
link_js (baseurl ^ dep1) (fun () ->
match !jsdeps with
| [] -> f ()
| dep2 :: tail ->
if dep1 = dep2 then begin
jsdeps := tail;
resolve_deps jsdeps baseurl f
end
else
f ())
| null | https://raw.githubusercontent.com/OCamlPro/ezjs/4dc09b1e9eeecd46f3717036303ac704cc49b9a1/libs/utils/ezjs_bind.ml | ocaml | open Js_of_ocaml
type string_field = Js.js_string Js.t Js.prop
type int_field = int Js.prop
type float_field = float Js.prop
type bool_field = bool Js.prop
type 'a array_field = 'a Js.js_array Js.t Js.prop
let eval_js txt = ignore(Js.Unsafe.eval_string txt)
let link_js scriptUrl f =
Ezjs_xhr.get "jsreq" scriptUrl (fun txt ->
eval_js txt;
f ())
let rec resolve_deps jsdeps baseurl f =
match !jsdeps with
| [] -> f ()
| dep1 :: _tail ->
link_js (baseurl ^ dep1) (fun () ->
match !jsdeps with
| [] -> f ()
| dep2 :: tail ->
if dep1 = dep2 then begin
jsdeps := tail;
resolve_deps jsdeps baseurl f
end
else
f ())
| |
aec1a34dfb4a79b718ce8a55b6f85e166153054c7ef0bfef91690b0e121501d4 | deadpendency/deadpendency | GHNodeId.hs | module Common.Model.GitHub.GHNodeId
( GHNodeId (..),
)
where
import Common.Aeson.Aeson (cleanJSONOptions)
import Data.Aeson
newtype GHNodeId = GHNodeId
{ _ntText :: Text
}
deriving stock (Show, Generic, Eq)
instance ToJSON GHNodeId where
toJSON = genericToJSON cleanJSONOptions
toEncoding = genericToEncoding cleanJSONOptions
instance FromJSON GHNodeId where
parseJSON = genericParseJSON cleanJSONOptions
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/common/src/Common/Model/GitHub/GHNodeId.hs | haskell | module Common.Model.GitHub.GHNodeId
( GHNodeId (..),
)
where
import Common.Aeson.Aeson (cleanJSONOptions)
import Data.Aeson
newtype GHNodeId = GHNodeId
{ _ntText :: Text
}
deriving stock (Show, Generic, Eq)
instance ToJSON GHNodeId where
toJSON = genericToJSON cleanJSONOptions
toEncoding = genericToEncoding cleanJSONOptions
instance FromJSON GHNodeId where
parseJSON = genericParseJSON cleanJSONOptions
| |
8f0a193472314b5a15eff341b9b79a9d9e929d50d2c52b4114fa6231e729c753 | tud-fop/vanda-haskell | XFSA.hs | -----------------------------------------------------------------------------
-- |
Copyright : ( c ) Technische Universität Dresden 2018
-- License : BSD-style
--
-- Stability : unknown
-- Portability : portable
-----------------------------------------------------------------------------
module Vanda.Grammar.XFSA (
module X
) where
import Vanda.Grammar.XFSA.XFSA as X
import Vanda.Grammar.XFSA.Closure as X
import Vanda.Grammar.XFSA.Language as X
| null | https://raw.githubusercontent.com/tud-fop/vanda-haskell/3214966361b6dbf178155950c94423eee7f9453e/library/Vanda/Grammar/XFSA.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-style
Stability : unknown
Portability : portable
--------------------------------------------------------------------------- | Copyright : ( c ) Technische Universität Dresden 2018
module Vanda.Grammar.XFSA (
module X
) where
import Vanda.Grammar.XFSA.XFSA as X
import Vanda.Grammar.XFSA.Closure as X
import Vanda.Grammar.XFSA.Language as X
|
e64761623f22ddf7d1932efcd836d68cdd6c6c4a9d72d1bc45d6669f1a393fd5 | graninas/Hydra | Types.hs | {-|
States that are dependent on labyrinth shape,
player's previous actions, and state of player's inventory.
-}
{-# LANGUAGE DeriveAnyClass #-}
module Labyrinth.Types where
import Labyrinth.Prelude
import Labyrinth.Domain
import Labyrinth.KVDB.Model
type PlayerPos = Pos
type PlayerHP = Int
type BearPos = Pos
type PlayerHasTreasure = Bool
type PlayerHasTheMap = Bool
data InventoryState = InventoryState
{ _treasureState :: StateVar PlayerHasTreasure
, _theMapState :: StateVar PlayerHasTheMap
}
data GameState
= GameStart
| GameFinished
| PlayerMove
| PlayerIsAboutLeaving
| PlayerIsAboutLossLeavingConfirmation
deriving (Show, Eq)
data AppState = AppState
{ _labyrinth :: StateVar Labyrinth
, _labBounds :: StateVar Bounds
, _labRenderTemplate :: StateVar LabRender
, _labRenderVar :: StateVar LabRender
, _labWormholes :: StateVar Wormholes
, _labTrailpoints :: StateVar Trailpoints
, _playerPos :: StateVar PlayerPos
, _playerHP :: StateVar PlayerHP
, _bearPos :: StateVar BearPos
, _playerInventory :: InventoryState
, _gameState :: StateVar GameState
, _gameMessages :: StateVar [String]
, _kvdbConfig :: KVDBConfig LabKVDB
}
data AppException
= NotImplemented String
| NotSupported String
| InvalidOperation String
| GenerationError String
deriving (Show, Read, Eq, Ord, Generic, ToJSON, FromJSON, Exception)
| null | https://raw.githubusercontent.com/graninas/Hydra/60d591b1300528f5ffd93efa205012eebdd0286c/app/labyrinth/src/Labyrinth/Types.hs | haskell | |
States that are dependent on labyrinth shape,
player's previous actions, and state of player's inventory.
# LANGUAGE DeriveAnyClass # |
module Labyrinth.Types where
import Labyrinth.Prelude
import Labyrinth.Domain
import Labyrinth.KVDB.Model
type PlayerPos = Pos
type PlayerHP = Int
type BearPos = Pos
type PlayerHasTreasure = Bool
type PlayerHasTheMap = Bool
data InventoryState = InventoryState
{ _treasureState :: StateVar PlayerHasTreasure
, _theMapState :: StateVar PlayerHasTheMap
}
data GameState
= GameStart
| GameFinished
| PlayerMove
| PlayerIsAboutLeaving
| PlayerIsAboutLossLeavingConfirmation
deriving (Show, Eq)
data AppState = AppState
{ _labyrinth :: StateVar Labyrinth
, _labBounds :: StateVar Bounds
, _labRenderTemplate :: StateVar LabRender
, _labRenderVar :: StateVar LabRender
, _labWormholes :: StateVar Wormholes
, _labTrailpoints :: StateVar Trailpoints
, _playerPos :: StateVar PlayerPos
, _playerHP :: StateVar PlayerHP
, _bearPos :: StateVar BearPos
, _playerInventory :: InventoryState
, _gameState :: StateVar GameState
, _gameMessages :: StateVar [String]
, _kvdbConfig :: KVDBConfig LabKVDB
}
data AppException
= NotImplemented String
| NotSupported String
| InvalidOperation String
| GenerationError String
deriving (Show, Read, Eq, Ord, Generic, ToJSON, FromJSON, Exception)
|
8b0def354178969ef6e445c69545fd65052b4bd4acc1ae32a39cb79c8b692dd5 | mstksg/advent-of-code-2017 | Day10.hs | module AOC2017.Day10 (day10a, day10b, knothash) where
import AOC2017.Types (Challenge)
import AOC2017.Util (strip)
import Data.Bits (xor)
import Data.Char (ord)
import Data.List (foldl')
import Data.List.Split (chunksOf, splitOn)
import Data.Word (Word8)
import Text.Printf (printf)
import qualified Data.Vector.Storable as V
data HashState = HS { _hsVec :: V.Vector Word8
, _hsPos :: Word8
, _hsSkip :: Word8
}
step :: HashState -> Word8 -> HashState
step (HS v0 p0 s0) n = HS v1 p1 s1
where
ixes = fromIntegral . (+ p0) <$> init [0 .. n]
vals = (v0 V.!) <$> ixes
v1 = v0 V.// zip ixes (reverse vals)
p1 = p0 + n + s0
s1 = s0 + 1
process :: [Word8] -> V.Vector Word8
process = _hsVec . foldl' step hs0
where
hs0 = HS (V.generate 256 fromIntegral) 0 0
day10a :: Challenge
day10a = show . V.product . V.take 2
. process
. map read . splitOn ","
day10b :: Challenge
day10b = concatMap (printf "%02x") . knothash . strip
knothash :: String -> [Word8]
knothash = map (foldr xor 0) . chunksOf 16 . V.toList . process
. concat . replicate 64 . (++ salt)
. map (fromIntegral . ord)
where
salt = [17, 31, 73, 47, 23]
| null | https://raw.githubusercontent.com/mstksg/advent-of-code-2017/f9e97680726e87be175cf423241da3048ef6564d/src/AOC2017/Day10.hs | haskell | module AOC2017.Day10 (day10a, day10b, knothash) where
import AOC2017.Types (Challenge)
import AOC2017.Util (strip)
import Data.Bits (xor)
import Data.Char (ord)
import Data.List (foldl')
import Data.List.Split (chunksOf, splitOn)
import Data.Word (Word8)
import Text.Printf (printf)
import qualified Data.Vector.Storable as V
data HashState = HS { _hsVec :: V.Vector Word8
, _hsPos :: Word8
, _hsSkip :: Word8
}
step :: HashState -> Word8 -> HashState
step (HS v0 p0 s0) n = HS v1 p1 s1
where
ixes = fromIntegral . (+ p0) <$> init [0 .. n]
vals = (v0 V.!) <$> ixes
v1 = v0 V.// zip ixes (reverse vals)
p1 = p0 + n + s0
s1 = s0 + 1
process :: [Word8] -> V.Vector Word8
process = _hsVec . foldl' step hs0
where
hs0 = HS (V.generate 256 fromIntegral) 0 0
day10a :: Challenge
day10a = show . V.product . V.take 2
. process
. map read . splitOn ","
day10b :: Challenge
day10b = concatMap (printf "%02x") . knothash . strip
knothash :: String -> [Word8]
knothash = map (foldr xor 0) . chunksOf 16 . V.toList . process
. concat . replicate 64 . (++ salt)
. map (fromIntegral . ord)
where
salt = [17, 31, 73, 47, 23]
| |
7d34ef63d567584389ac24050c33e7bc4842684d21555c46f7dbe0d8f3941264 | Oblosys/proxima | HsTokenScanner.hs |
module HsTokenScanner where
import HsToken
import UU.Scanner.Position
import List(sort)
import UU.Util.BinaryTrees
import CommonTypes
import Maybe
import Char
isAGesc c = c == '@'
lexTokens :: Pos -> String -> [HsToken]
lexTokens = scanTokens keywordstxt keywordsops specialchars opchars
where keywordstxt = []
keywordsops = [".","=", ":=", ":","|","@"]
specialchars = ";()[],_{}`"
opchars = "!#$%&*+./<=>?@\\^|-~:"
scanTokens :: [String] -> [String] -> String -> String -> Pos -> String -> [HsToken]
scanTokens keywordstxt keywordsops specchars opchars pos input
= doScan pos input
where
locatein :: Ord a => [a] -> a -> Bool
locatein es = isJust . btLocateIn compare (tab2tree (sort es))
iskw = locatein keywordstxt
isop = locatein keywordsops
isSymbol = locatein specchars
isOpsym = locatein opchars
isIdStart c = isLower c || c == '_'
isIdChar c = isAlphaNum c
|| c == '\''
|| c == '_'
scanIdent p s = let (name,rest) = span isIdChar s
in (name,advc (length name) p,rest)
doScan p [] = []
doScan p (c:s) | isSpace c = let (sp,next) = span isSpace s
in doScan (foldl (flip updPos) p (c:sp)) next
doScan p (c:d:s) | isAGesc c && isIdStart d =
let (fld,p2,rest) = scanIdent (advc 2 p) s
field = d:fld
in case rest of
('.':r:rs)
| isIdStart r -> let (at,p3,rest2) = scanIdent (advc 2 p2) rs
attr = r : at
in AGField (Ident field p) (Ident attr p) p Nothing : doScan p3 rest2
_ -> AGLocal (Ident field p) p Nothing : doScan p2 rest
doScan p ('-':'-':s) = doScan p (dropWhile (/= '\n') s)
doScan p ('{':'-':s) = advc' 2 p (lexNest doScan) s -- }
doScan p ('"':ss)
= let (s,swidth,rest) = scanString ss
in if null rest || head rest /= '"'
then Err "Unterminated string literal" p : advc' swidth p doScan rest
else StrToken s p : advc' (swidth+2) p doScan (tail rest)
doScan p ('\'':ss)
= let (mc,cwidth,rest) = scanChar ss
in case mc of
Nothing -> Err "Error in character literal" p : advc' cwidth p doScan rest
Just c -> if null rest || head rest /= '\''
then Err "Unterminated character literal" p : advc' (cwidth+1) p doScan rest
else CharToken [c] p : advc' (cwidth+2) p doScan (tail rest)
doScan p cs@(c:s)
| isIdStart c || isUpper c
= let (name', p', s') = scanIdent (advc 1 p) s
name = c:name'
tok = if iskw name
then HsToken name p -- keyword
else if null name' && isSymbol c
then HsToken [c] p -- '_'
varid / conid
in tok : doScan p' s'
| isOpsym c = let (name, s') = span isOpsym cs
tok | isop name = HsToken name p
| otherwise = HsToken name p
in tok : doScan (foldl (flip updPos) p name) s'
| isDigit c = let (base,digs,width,s') = getNumber cs
number = case base of
8 -> "0o"++digs
10 -> digs
16 -> "0x"++digs
in HsToken number p : advc' width p doScan s'
| isSymbol c = HsToken [c] p : advc' 1 p doScan s
| otherwise = Err ("Unexpected character " ++ show c) p : updPos' c p doScan s
lexNest cont pos inp = lexNest' cont pos inp
where lexNest' c p ('{':'-':s) = lexNest' (lexNest' c) (advc 2 p) s
lexNest' c p ('-':'}':s) = c (advc 2 p) s
lexNest' c p (x:s) = lexNest' c (updPos x p) s
lexNest' _ _ [] = [Err "Unterminated nested comment" pos]
scanString [] = ("",0,[])
scanString ('\\':'&':xs) = let (str,w,r) = scanString xs
in (str,w+2,r)
scanString ('\'':xs) = let (str,w,r) = scanString xs
in ('\'': str,w+1,r)
scanString xs = let (ch,cw,cr) = getchar xs
(str,w,r) = scanString cr
str' = maybe "" (:str) ch
in maybe ("",0,xs) (\c -> (c:str,cw+w,r)) ch
scanChar ('"' :xs) = (Just '"',1,xs)
scanChar xs = getchar xs
getchar [] = (Nothing,0,[])
getchar s@('\n':_ ) = (Nothing,0,s )
getchar s@('\t':_ ) = (Nothing,0,s)
getchar s@('\'':_ ) = (Nothing,0,s)
getchar s@('"' :_ ) = (Nothing,0,s)
getchar ('\\':xs) = let (c,l,r) = getEscChar xs
in (c,l+1,r)
getchar (x:xs) = (Just x,1,xs)
getEscChar [] = (Nothing,0,[])
getEscChar s@(x:xs) | isDigit x = let (base,n,len,rest) = getNumber s
val = readn base n
in if val >= 0 && val <= 255
then (Just (chr val),len, rest)
else (Nothing,1,rest)
| otherwise = case x `lookup` cntrChars of
Nothing -> (Nothing,0,s)
Just c -> (Just c,1,xs)
where cntrChars = [('a','\a'),('b','\b'),('f','\f'),('n','\n'),('r','\r'),('t','\t')
,('v','\v'),('\\','\\'),('"','\"'),('\'','\'')]
readn base n = foldl (\r x -> value x + base * r) 0 n
getNumber cs@(c:s)
| c /= '0' = num10
| null s = const0
| hs == 'x' || hs == 'X' = num16
| hs == 'o' || hs == 'O' = num8
| otherwise = num10
where (hs:ts) = s
const0 = (10, "0",1,s)
num10 = let (n,r) = span isDigit cs
in (10,n,length n,r)
num16 = readNum isHexaDigit ts 16
num8 = readNum isOctalDigit ts 8
readNum p ts tk
= let nrs@(n,rs) = span p ts
in if null n then const0
else (tk , n, 2+length n,rs)
isHexaDigit d = isDigit d || (d >= 'A' && d <= 'F') || (d >= 'a' && d <= 'f')
isOctalDigit d = d >= '0' && d <= '7'
value c | isDigit c = ord c - ord '0'
| isUpper c = ord c - ord 'A' + 10
| isLower c = ord c - ord 'a' + 10
| null | https://raw.githubusercontent.com/Oblosys/proxima/f154dff2ccb8afe00eeb325d9d06f5e2a5ee7589/uuagc/src/HsTokenScanner.hs | haskell | }
keyword
'_' |
module HsTokenScanner where
import HsToken
import UU.Scanner.Position
import List(sort)
import UU.Util.BinaryTrees
import CommonTypes
import Maybe
import Char
isAGesc c = c == '@'
lexTokens :: Pos -> String -> [HsToken]
lexTokens = scanTokens keywordstxt keywordsops specialchars opchars
where keywordstxt = []
keywordsops = [".","=", ":=", ":","|","@"]
specialchars = ";()[],_{}`"
opchars = "!#$%&*+./<=>?@\\^|-~:"
scanTokens :: [String] -> [String] -> String -> String -> Pos -> String -> [HsToken]
scanTokens keywordstxt keywordsops specchars opchars pos input
= doScan pos input
where
locatein :: Ord a => [a] -> a -> Bool
locatein es = isJust . btLocateIn compare (tab2tree (sort es))
iskw = locatein keywordstxt
isop = locatein keywordsops
isSymbol = locatein specchars
isOpsym = locatein opchars
isIdStart c = isLower c || c == '_'
isIdChar c = isAlphaNum c
|| c == '\''
|| c == '_'
scanIdent p s = let (name,rest) = span isIdChar s
in (name,advc (length name) p,rest)
doScan p [] = []
doScan p (c:s) | isSpace c = let (sp,next) = span isSpace s
in doScan (foldl (flip updPos) p (c:sp)) next
doScan p (c:d:s) | isAGesc c && isIdStart d =
let (fld,p2,rest) = scanIdent (advc 2 p) s
field = d:fld
in case rest of
('.':r:rs)
| isIdStart r -> let (at,p3,rest2) = scanIdent (advc 2 p2) rs
attr = r : at
in AGField (Ident field p) (Ident attr p) p Nothing : doScan p3 rest2
_ -> AGLocal (Ident field p) p Nothing : doScan p2 rest
doScan p ('-':'-':s) = doScan p (dropWhile (/= '\n') s)
doScan p ('"':ss)
= let (s,swidth,rest) = scanString ss
in if null rest || head rest /= '"'
then Err "Unterminated string literal" p : advc' swidth p doScan rest
else StrToken s p : advc' (swidth+2) p doScan (tail rest)
doScan p ('\'':ss)
= let (mc,cwidth,rest) = scanChar ss
in case mc of
Nothing -> Err "Error in character literal" p : advc' cwidth p doScan rest
Just c -> if null rest || head rest /= '\''
then Err "Unterminated character literal" p : advc' (cwidth+1) p doScan rest
else CharToken [c] p : advc' (cwidth+2) p doScan (tail rest)
doScan p cs@(c:s)
| isIdStart c || isUpper c
= let (name', p', s') = scanIdent (advc 1 p) s
name = c:name'
tok = if iskw name
else if null name' && isSymbol c
varid / conid
in tok : doScan p' s'
| isOpsym c = let (name, s') = span isOpsym cs
tok | isop name = HsToken name p
| otherwise = HsToken name p
in tok : doScan (foldl (flip updPos) p name) s'
| isDigit c = let (base,digs,width,s') = getNumber cs
number = case base of
8 -> "0o"++digs
10 -> digs
16 -> "0x"++digs
in HsToken number p : advc' width p doScan s'
| isSymbol c = HsToken [c] p : advc' 1 p doScan s
| otherwise = Err ("Unexpected character " ++ show c) p : updPos' c p doScan s
lexNest cont pos inp = lexNest' cont pos inp
where lexNest' c p ('{':'-':s) = lexNest' (lexNest' c) (advc 2 p) s
lexNest' c p ('-':'}':s) = c (advc 2 p) s
lexNest' c p (x:s) = lexNest' c (updPos x p) s
lexNest' _ _ [] = [Err "Unterminated nested comment" pos]
scanString [] = ("",0,[])
scanString ('\\':'&':xs) = let (str,w,r) = scanString xs
in (str,w+2,r)
scanString ('\'':xs) = let (str,w,r) = scanString xs
in ('\'': str,w+1,r)
scanString xs = let (ch,cw,cr) = getchar xs
(str,w,r) = scanString cr
str' = maybe "" (:str) ch
in maybe ("",0,xs) (\c -> (c:str,cw+w,r)) ch
scanChar ('"' :xs) = (Just '"',1,xs)
scanChar xs = getchar xs
getchar [] = (Nothing,0,[])
getchar s@('\n':_ ) = (Nothing,0,s )
getchar s@('\t':_ ) = (Nothing,0,s)
getchar s@('\'':_ ) = (Nothing,0,s)
getchar s@('"' :_ ) = (Nothing,0,s)
getchar ('\\':xs) = let (c,l,r) = getEscChar xs
in (c,l+1,r)
getchar (x:xs) = (Just x,1,xs)
getEscChar [] = (Nothing,0,[])
getEscChar s@(x:xs) | isDigit x = let (base,n,len,rest) = getNumber s
val = readn base n
in if val >= 0 && val <= 255
then (Just (chr val),len, rest)
else (Nothing,1,rest)
| otherwise = case x `lookup` cntrChars of
Nothing -> (Nothing,0,s)
Just c -> (Just c,1,xs)
where cntrChars = [('a','\a'),('b','\b'),('f','\f'),('n','\n'),('r','\r'),('t','\t')
,('v','\v'),('\\','\\'),('"','\"'),('\'','\'')]
readn base n = foldl (\r x -> value x + base * r) 0 n
getNumber cs@(c:s)
| c /= '0' = num10
| null s = const0
| hs == 'x' || hs == 'X' = num16
| hs == 'o' || hs == 'O' = num8
| otherwise = num10
where (hs:ts) = s
const0 = (10, "0",1,s)
num10 = let (n,r) = span isDigit cs
in (10,n,length n,r)
num16 = readNum isHexaDigit ts 16
num8 = readNum isOctalDigit ts 8
readNum p ts tk
= let nrs@(n,rs) = span p ts
in if null n then const0
else (tk , n, 2+length n,rs)
isHexaDigit d = isDigit d || (d >= 'A' && d <= 'F') || (d >= 'a' && d <= 'f')
isOctalDigit d = d >= '0' && d <= '7'
value c | isDigit c = ord c - ord '0'
| isUpper c = ord c - ord 'A' + 10
| isLower c = ord c - ord 'a' + 10
|
6908671e6d78676044507102c4c1f5ea67bb5e2f07335962dfb9c79eaba0175a | SonyCSLParis/fcg-hybrids | de-render.lisp |
;; Copyright 2019-present
Sony Computer Science Laboratories Paris
( )
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;; -2.0
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
;;=========================================================================
(in-package :fcg)
(defmethod de-render ((utterance string) (mode (eql :english-hybrid))
&key (key :english) cxn-inventory (model "en") &allow-other-keys)
(declare (ignorable mode cxn-inventory))
Step 1 : Get the dependency and constituent analysis :
(multiple-value-bind (dependency-tree constituent-tree)
(nlp-tools::get-english-sentence-analysis utterance)
Step 2 : Use the dependency tree for segmenting the utterance into a list of strings :
(utterance-as-list (nlp-tools::dp-build-utterance-as-list-from-dependency-tree dependency-tree))
Step 3 : Use the list of strings for building a basic transient structure :
(basic-transient-structure (de-render utterance-as-list :de-render-with-scope
:cxn-inventory cxn-inventory)))
Step 4 : Expand the transient structure with information from the dependency tree :
(setf basic-transient-structure
(represent-functional-structure dependency-tree basic-transient-structure key *english-dependency-specs*))
Step 5 : Expand the transient structure with information from the constituent tree :
(setf basic-transient-structure
(represent-constituent-structure constituent-tree basic-transient-structure key cxn-inventory))
Step 6 : Use constructional information to correct the constituent tree :
basic-transient-structure))) | null | https://raw.githubusercontent.com/SonyCSLParis/fcg-hybrids/7db632609a36dfa915bcc463b152c0b2bea962d9/languages/English/de-render.lisp | lisp | Copyright 2019-present
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
========================================================================= |
Sony Computer Science Laboratories Paris
( )
distributed under the License is distributed on an " AS IS " BASIS ,
(in-package :fcg)
(defmethod de-render ((utterance string) (mode (eql :english-hybrid))
&key (key :english) cxn-inventory (model "en") &allow-other-keys)
(declare (ignorable mode cxn-inventory))
Step 1 : Get the dependency and constituent analysis :
(multiple-value-bind (dependency-tree constituent-tree)
(nlp-tools::get-english-sentence-analysis utterance)
Step 2 : Use the dependency tree for segmenting the utterance into a list of strings :
(utterance-as-list (nlp-tools::dp-build-utterance-as-list-from-dependency-tree dependency-tree))
Step 3 : Use the list of strings for building a basic transient structure :
(basic-transient-structure (de-render utterance-as-list :de-render-with-scope
:cxn-inventory cxn-inventory)))
Step 4 : Expand the transient structure with information from the dependency tree :
(setf basic-transient-structure
(represent-functional-structure dependency-tree basic-transient-structure key *english-dependency-specs*))
Step 5 : Expand the transient structure with information from the constituent tree :
(setf basic-transient-structure
(represent-constituent-structure constituent-tree basic-transient-structure key cxn-inventory))
Step 6 : Use constructional information to correct the constituent tree :
basic-transient-structure))) |
7fad6c49ff59ed5e2f832ddfe34ab453d060d5bb4673b81ef33ba6670ca312b5 | samply/blaze | executors.clj | (ns blaze.executors
(:import
[java.util.concurrent Executor ExecutorService Executors ThreadFactory]))
(set! *warn-on-reflection* true)
(defn executor? [x]
(instance? Executor x))
(defn executor-service? [x]
(instance? ExecutorService x))
(defn execute!
"Executes the function `f` at some time in the future."
[executor f]
(.execute ^Executor executor f))
(defn shutdown! [executor-service]
(.shutdown ^ExecutorService executor-service))
(defn shutdown? [executor-service]
(.isShutdown ^ExecutorService executor-service))
(defn terminated?
"Returns true if all tasks have completed following shut down.
Note that this function returns never true unless either `shutdown` or
`shutdown-now` was called first."
[executor-service]
(.isTerminated ^ExecutorService executor-service))
(defn await-termination
"Blocks until all tasks have completed execution after a shutdown request, or
the timeout occurs, or the current thread is interrupted, whichever happens
first."
[executor-service timeout unit]
(.awaitTermination ^ExecutorService executor-service timeout unit))
(defn- thread-name!
[thread-counter name-template]
(format name-template (swap! thread-counter inc)))
(defn cpu-bound-pool
"Returns a thread pool with a fixed number of threads which is the number of
available processors."
[name-template]
(let [thread-counter (atom 0)]
(Executors/newFixedThreadPool
(.availableProcessors (Runtime/getRuntime))
(reify ThreadFactory
(newThread [_ r]
(Thread. ^Runnable r ^String (thread-name! thread-counter
name-template)))))))
(defn io-pool
"Returns a thread pool with a fixed number of threads which is suitable for
I/O."
[n name-template]
(let [thread-counter (atom 0)]
(Executors/newFixedThreadPool
n
(reify ThreadFactory
(newThread [_ r]
(Thread. ^Runnable r ^String (thread-name! thread-counter
name-template)))))))
(defn single-thread-executor
([]
(Executors/newSingleThreadExecutor))
([name]
(Executors/newSingleThreadExecutor
(reify ThreadFactory
(newThread [_ r]
(Thread. ^Runnable r ^String name))))))
| null | https://raw.githubusercontent.com/samply/blaze/41244588a59c5d5bf1070da1b263af7b46788268/modules/executor/src/blaze/executors.clj | clojure | (ns blaze.executors
(:import
[java.util.concurrent Executor ExecutorService Executors ThreadFactory]))
(set! *warn-on-reflection* true)
(defn executor? [x]
(instance? Executor x))
(defn executor-service? [x]
(instance? ExecutorService x))
(defn execute!
"Executes the function `f` at some time in the future."
[executor f]
(.execute ^Executor executor f))
(defn shutdown! [executor-service]
(.shutdown ^ExecutorService executor-service))
(defn shutdown? [executor-service]
(.isShutdown ^ExecutorService executor-service))
(defn terminated?
"Returns true if all tasks have completed following shut down.
Note that this function returns never true unless either `shutdown` or
`shutdown-now` was called first."
[executor-service]
(.isTerminated ^ExecutorService executor-service))
(defn await-termination
"Blocks until all tasks have completed execution after a shutdown request, or
the timeout occurs, or the current thread is interrupted, whichever happens
first."
[executor-service timeout unit]
(.awaitTermination ^ExecutorService executor-service timeout unit))
(defn- thread-name!
[thread-counter name-template]
(format name-template (swap! thread-counter inc)))
(defn cpu-bound-pool
"Returns a thread pool with a fixed number of threads which is the number of
available processors."
[name-template]
(let [thread-counter (atom 0)]
(Executors/newFixedThreadPool
(.availableProcessors (Runtime/getRuntime))
(reify ThreadFactory
(newThread [_ r]
(Thread. ^Runnable r ^String (thread-name! thread-counter
name-template)))))))
(defn io-pool
"Returns a thread pool with a fixed number of threads which is suitable for
I/O."
[n name-template]
(let [thread-counter (atom 0)]
(Executors/newFixedThreadPool
n
(reify ThreadFactory
(newThread [_ r]
(Thread. ^Runnable r ^String (thread-name! thread-counter
name-template)))))))
(defn single-thread-executor
([]
(Executors/newSingleThreadExecutor))
([name]
(Executors/newSingleThreadExecutor
(reify ThreadFactory
(newThread [_ r]
(Thread. ^Runnable r ^String name))))))
| |
b1ab2d4cd6600f4c38e80739a02027d2a946ac60c71d0555bf41970577205e7d | RBornat/jape | doubleclick.ml |
Copyright ( C ) 2003 - 19
This file is part of the proof engine , which is part of .
is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
( or look at ) .
Copyright (C) 2003-19 Richard Bornat & Bernard Sufrin
This file is part of the jape proof engine, which is part of jape.
Jape is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Jape is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with jape; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
(or look at ).
*)
open Sequent
open Listfuns
open Mappingfuns
open Tactic
open Optionfuns
type seq = Sequent.seq
and tactic = Tactictype.tactic
and term = Termtype.term
let seqmatch = seqmatch false
type dclick = DClickHyp | DClickConc
type doubleclickdef = dclick * tactic * seq
let doubleclickdefs : doubleclickdef list ref = ref []
let string_of_dclick = function
| DClickHyp -> "DClickHyp"
| DClickConc -> "DClickConc"
let rec adddoubleclick (b, s, seq as p) =
let rec insert =
function
[] -> [p]
| (b', _, seq' as p') :: doubleclicks ->
if b = b' && eqseqs (seq, seq') then p :: doubleclicks
else p' :: insert doubleclicks
in
doubleclickdefs := insert !doubleclickdefs
let rec deldoubleclick (b, seq) =
doubleclickdefs :=
((fun (b', _, seq') -> b <> b' || not (eqseqs (seq, seq'))) <|
!doubleclickdefs)
let rec cleardoubleclicks () = doubleclickdefs := []
let rec matchdoubleclick sense seq =
let rec match1 (sense', action', seq') =
if sense = sense' then
match seqmatch seq' seq empty with
Some env -> Some (remaptactic env action')
| None -> None
else None
in
findfirst match1 !doubleclickdefs
| null | https://raw.githubusercontent.com/RBornat/jape/afe9f207e89e965636b43ef8fad38fd1f69737ae/distrib/camlengine/doubleclick.ml | ocaml |
Copyright ( C ) 2003 - 19
This file is part of the proof engine , which is part of .
is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
( or look at ) .
Copyright (C) 2003-19 Richard Bornat & Bernard Sufrin
This file is part of the jape proof engine, which is part of jape.
Jape is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Jape is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with jape; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
(or look at ).
*)
open Sequent
open Listfuns
open Mappingfuns
open Tactic
open Optionfuns
type seq = Sequent.seq
and tactic = Tactictype.tactic
and term = Termtype.term
let seqmatch = seqmatch false
type dclick = DClickHyp | DClickConc
type doubleclickdef = dclick * tactic * seq
let doubleclickdefs : doubleclickdef list ref = ref []
let string_of_dclick = function
| DClickHyp -> "DClickHyp"
| DClickConc -> "DClickConc"
let rec adddoubleclick (b, s, seq as p) =
let rec insert =
function
[] -> [p]
| (b', _, seq' as p') :: doubleclicks ->
if b = b' && eqseqs (seq, seq') then p :: doubleclicks
else p' :: insert doubleclicks
in
doubleclickdefs := insert !doubleclickdefs
let rec deldoubleclick (b, seq) =
doubleclickdefs :=
((fun (b', _, seq') -> b <> b' || not (eqseqs (seq, seq'))) <|
!doubleclickdefs)
let rec cleardoubleclicks () = doubleclickdefs := []
let rec matchdoubleclick sense seq =
let rec match1 (sense', action', seq') =
if sense = sense' then
match seqmatch seq' seq empty with
Some env -> Some (remaptactic env action')
| None -> None
else None
in
findfirst match1 !doubleclickdefs
| |
87a83f00bc615f38b84233063d72a65ab1274ee6c56be82b96538a3afece73d3 | mkoppmann/eselsohr | Command.hs | module Lib.App.Command
( -- * Article related
* * CreateArticle
CreateArticle (..)
, createArticle
* *
, ChangeArticleTitle (..)
, changeArticleTitle
-- ** ChangeArticleState
, MarkArticleAsRead (..)
, markArticleAsRead
, MarkArticleAsUnread (..)
, markArticleAsUnread
* * DeleteArticle
, DeleteArticle (..)
, deleteArticle
-- * Capability related
-- ** CreateUnlockLink
, CreateUnlockLink (..)
, createUnlockLink
* *
, DeleteUnlockLink (..)
, deleteUnlockLink
* *
, AddShareUnlockLinks (..)
, addShareUnlockLinks
-- ** DeleteShareUnlockLinks
, DeleteShareUnlockLinks (..)
, deleteShareUnlockLinks
* *
, AddShareArticleList (..)
, addShareArticleList
-- ** DeleteShareArticleList
, DeleteShareArticleList (..)
, deleteShareArticleList
-- ** AddShareArticle
, AddShareArticle (..)
, addShareArticle
-- ** DeleteShareArticle
, DeleteShareArticle (..)
, deleteShareArticle
-- ** RemoveExpiredCapabilities
, RemoveExpiredCapabilities (..)
, removeExpiredCapabilities
-- * Collection related
-- ** CreateCollection
, createCollection
) where
import Data.Time.Clock (UTCTime)
import Prelude hiding
( id
, state
)
import qualified Lib.App.Port as Port
import qualified Lib.Domain.Article as Art
import qualified Lib.Domain.Authorization as Authz
import qualified Lib.Domain.Capability as Cap
import qualified Lib.Domain.Repo.ArticleList as ArtRepo
import qualified Lib.Domain.Repo.CapabilityList as CapRepo
import qualified Lib.Domain.Repo.Collection as ColRepo
import Lib.App.Port
( MonadRandom
, MonadScraper
, MonadTime
)
import Lib.Domain.Article
( Article
, ArticleState
)
import Lib.Domain.Capability
( ArticlePerms
, ArticlesPerms
, Capability
, ObjectReference
, OverviewPerms
)
import Lib.Domain.Collection (Collection)
import Lib.Domain.Error (AppErrorType)
import Lib.Domain.Id (Id)
import Lib.Domain.Repo.ArticleList
( ArticleListAction
, ArticleListRepo
)
import Lib.Domain.Repo.CapabilityList (CapabilityListRepo)
import Lib.Domain.Repo.Collection (CollectionRepo)
import Lib.Domain.Uri (Uri)
type CommandResult a = Either AppErrorType a
------------------------------------------------------------------------
CreateArticle
------------------------------------------------------------------------
data CreateArticle = CreateArticle
{ colId :: !(Id Collection)
, uri :: !Uri
, objRef :: !ObjectReference
}
createArticle :: (ArticleListRepo m, MonadScraper m, MonadTime m) => CreateArticle -> m (CommandResult (Id Article))
createArticle CreateArticle{..} = do
artId <- ArtRepo.nextId
title <- Port.scrapWebsite uri
creation <- Port.getCurrentTime
case mkAction artId title creation of
Left err -> pure $ Left err
Right action -> do
ArtRepo.save colId action
pure $ Right artId
where
mkAction :: Id Article -> Text -> UTCTime -> Either AppErrorType ArticleListAction
mkAction id artTitle creation = do
perm <- Authz.canCreateArticles objRef
title <- Art.titleFromText artTitle
let state = Art.Unread
art = Art.Article{..}
pure $ ArtRepo.addArticle perm id art
------------------------------------------------------------------------
------------------------------------------------------------------------
data ChangeArticleTitle = ChangeArticleTitle
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, title :: !Text
, objRef :: !ObjectReference
}
changeArticleTitle :: (ArticleListRepo m) => ChangeArticleTitle -> m (CommandResult ())
changeArticleTitle ChangeArticleTitle{..} = case mkAction of
Left err -> pure $ Left err
Right action -> Right <$> ArtRepo.save colId action
where
mkAction :: Either AppErrorType ArticleListAction
mkAction = do
perm <- Authz.canChangeArticleTitle objRef artId
artTitle <- Art.titleFromText title
pure $ ArtRepo.changeArticleTitle perm artTitle
------------------------------------------------------------------------
ChangeArticleState
------------------------------------------------------------------------
data MarkArticleAsRead = MarkArticleAsRead
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, objRef :: !ObjectReference
}
markArticleAsRead :: (ArticleListRepo m) => MarkArticleAsRead -> m (CommandResult ())
markArticleAsRead MarkArticleAsRead{..} = changeArticleState colId artId objRef Art.Read
data MarkArticleAsUnread = MarkArticleAsUnread
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, objRef :: !ObjectReference
}
markArticleAsUnread :: (ArticleListRepo m) => MarkArticleAsUnread -> m (CommandResult ())
markArticleAsUnread MarkArticleAsUnread{..} = changeArticleState colId artId objRef Art.Unread
changeArticleState
:: (ArticleListRepo m) => Id Collection -> Id Article -> ObjectReference -> ArticleState -> m (CommandResult ())
changeArticleState colId artId objRef artState = case Authz.canChangeArticleState objRef artId of
Left err -> pure $ Left err
Right perm -> Right <$> ArtRepo.save colId (action perm artState)
where
action perm Art.Unread = ArtRepo.markArticleAsUnread perm
action perm Art.Read = ArtRepo.markArticleAsRead perm
------------------------------------------------------------------------
DeleteArticle
------------------------------------------------------------------------
data DeleteArticle = DeleteArticle
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, objRef :: !ObjectReference
}
deleteArticle :: (ArticleListRepo m) => DeleteArticle -> m (CommandResult ())
deleteArticle DeleteArticle{..} = case Authz.canDeleteArticle objRef artId of
Left err -> pure $ Left err
Right perm -> Right <$> ArtRepo.save colId (ArtRepo.removeArticle perm)
------------------------------------------------------------------------
-- CreateUnlockLink
------------------------------------------------------------------------
data CreateUnlockLink = CreateUnlockLink
{ colId :: !(Id Collection)
, mPetname :: !(Maybe Text)
, mExpDate :: !(Maybe UTCTime)
, objRef :: !ObjectReference
}
createUnlockLink :: (CapabilityListRepo m) => CreateUnlockLink -> m (CommandResult (Id Capability))
createUnlockLink CreateUnlockLink{..} = do
capId <- CapRepo.nextId
let cap = Cap.mkCapability capId Cap.defaultArticlesRef mPetname mExpDate
case Authz.canCreateUnlockLinks objRef of
Left err -> pure $ Left err
Right perm -> do
CapRepo.save colId (CapRepo.addUnlockLink perm capId cap)
pure $ Right capId
------------------------------------------------------------------------
-- DeleteUnlockLink
------------------------------------------------------------------------
data DeleteUnlockLink = DeleteUnlockLink
{ colId :: !(Id Collection)
, capId :: !(Id Capability)
, objRef :: !ObjectReference
}
deleteUnlockLink :: (CapabilityListRepo m) => DeleteUnlockLink -> m (CommandResult ())
deleteUnlockLink DeleteUnlockLink{..} = case Authz.canDeleteUnlockLinks objRef of
Left err -> pure $ Left err
Right perm -> Right <$> CapRepo.save colId (CapRepo.removeUnlockLink perm capId)
------------------------------------------------------------------------
-- AddShareUnlockLink
------------------------------------------------------------------------
data AddShareUnlockLinks = AddShareUnlockLinks
{ colId :: !(Id Collection)
, sharedPerms :: !OverviewPerms
, mPetname :: !(Maybe Text)
, mExpDate :: !(Maybe UTCTime)
, objRef :: !ObjectReference
}
addShareUnlockLinks :: (CapabilityListRepo m) => AddShareUnlockLinks -> m (CommandResult (Id Capability))
addShareUnlockLinks AddShareUnlockLinks{..} = do
capId <- CapRepo.nextId
case Authz.canShareUnlockLinks objRef of
Left err -> pure $ Left err
Right perm -> case mkAction capId perm of
Nothing -> pure Authz.unauthorized
Just action -> do
CapRepo.save colId action
pure $ Right capId
where
mkAction capId perm = do
sharedObjRef <- Cap.createSharedOverviewRef objRef sharedPerms
let cap = Cap.mkCapability capId sharedObjRef mPetname mExpDate
pure $ CapRepo.addShareUnlockLinks perm capId cap
------------------------------------------------------------------------
-- DeleteShareUnlockLink
------------------------------------------------------------------------
data DeleteShareUnlockLinks = DeleteShareUnlockLinks
{ colId :: !(Id Collection)
, capId :: !(Id Capability)
, objRef :: !ObjectReference
}
deleteShareUnlockLinks :: (CapabilityListRepo m) => DeleteShareUnlockLinks -> m (CommandResult ())
deleteShareUnlockLinks DeleteShareUnlockLinks{..} = case Authz.canShareUnlockLinks objRef of
Left err -> pure $ Left err
Right perm -> Right <$> CapRepo.save colId (CapRepo.removeShareUnlockLinks perm capId)
------------------------------------------------------------------------
------------------------------------------------------------------------
data AddShareArticleList = AddShareArticleList
{ colId :: !(Id Collection)
, sharedPerms :: !ArticlesPerms
, mPetname :: !(Maybe Text)
, mExpDate :: !(Maybe UTCTime)
, objRef :: !ObjectReference
}
addShareArticleList :: (CapabilityListRepo m) => AddShareArticleList -> m (CommandResult (Id Capability))
addShareArticleList AddShareArticleList{..} = do
capId <- CapRepo.nextId
case Authz.canShareArticleList objRef of
Left err -> pure $ Left err
Right perm -> case mkAction capId perm of
Nothing -> pure Authz.unauthorized
Just action -> do
CapRepo.save colId action
pure $ Right capId
where
mkAction capId perm = do
sharedObjRef <- Cap.createSharedArticlesRef objRef sharedPerms
let cap = Cap.mkCapability capId sharedObjRef mPetname mExpDate
pure $ CapRepo.addShareArticleList perm capId cap
------------------------------------------------------------------------
DeleteShareArticleList
------------------------------------------------------------------------
data DeleteShareArticleList = DeleteShareArticleList
{ colId :: !(Id Collection)
, capId :: !(Id Capability)
, objRef :: !ObjectReference
}
deleteShareArticleList :: (CapabilityListRepo m) => DeleteShareArticleList -> m (CommandResult ())
deleteShareArticleList DeleteShareArticleList{..} = case Authz.canShareArticleList objRef of
Left err -> pure $ Left err
Right perm -> Right <$> CapRepo.save colId (CapRepo.removeShareArticleList perm capId)
------------------------------------------------------------------------
AddShareArticle
------------------------------------------------------------------------
data AddShareArticle = AddShareArticle
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, sharedPerms :: !ArticlePerms
, mPetname :: !(Maybe Text)
, mExpDate :: !(Maybe UTCTime)
, objRef :: !ObjectReference
}
addShareArticle :: (CapabilityListRepo m) => AddShareArticle -> m (CommandResult (Id Capability))
addShareArticle AddShareArticle{..} = do
capId <- CapRepo.nextId
case Authz.canShareArticle objRef artId of
Left err -> pure $ Left err
Right perm -> case mkAction capId perm of
Nothing -> pure Authz.unauthorized
Just action -> do
CapRepo.save colId action
pure $ Right capId
where
mkAction capId perm = do
sharedObjRef <- Cap.createSharedArticleRef objRef sharedPerms artId
let cap = Cap.mkCapability capId sharedObjRef mPetname mExpDate
pure $ CapRepo.addShareArticle perm capId cap
------------------------------------------------------------------------
DeleteShareArticle
------------------------------------------------------------------------
data DeleteShareArticle = DeleteShareArticle
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, capId :: !(Id Capability)
, objRef :: !ObjectReference
}
deleteShareArticle :: (CapabilityListRepo m) => DeleteShareArticle -> m (CommandResult ())
deleteShareArticle DeleteShareArticle{..} = case Authz.canShareArticle objRef artId of
Left err -> pure $ Left err
Right perm -> Right <$> CapRepo.save colId (CapRepo.removeShareArticle perm capId)
------------------------------------------------------------------------
-- RemoveExpiredCapabilities
------------------------------------------------------------------------
data RemoveExpiredCapabilities = RemoveExpiredCapabilities
{ colId :: !(Id Collection)
, objRef :: !ObjectReference
}
removeExpiredCapabilities :: (CapabilityListRepo m, MonadTime m) => RemoveExpiredCapabilities -> m (CommandResult ())
removeExpiredCapabilities RemoveExpiredCapabilities{..} = do
curTime <- Port.getCurrentTime
case Authz.canDeleteUnlockLinks objRef of
Left err -> pure $ Left err
Right perm -> Right <$> CapRepo.save colId (CapRepo.removeExpiredCapabilities perm curTime)
------------------------------------------------------------------------
CreateCollection
------------------------------------------------------------------------
createCollection :: (CollectionRepo m, MonadRandom m) => m (Id Collection, Id Capability)
createCollection = do
colId <- Port.getRandomId
collectionAlreadyExists <- ColRepo.exists colId
if collectionAlreadyExists then createCollection else createCollection' colId
where
createCollection' :: (CollectionRepo m, MonadRandom m) => Id Collection -> m (Id Collection, Id Capability)
createCollection' colId = do
capId <- Port.getRandomId
let cap = Cap.mkCapability capId Cap.defaultOverviewRef Nothing Nothing
ColRepo.createCollection colId capId cap
pure (colId, capId)
| null | https://raw.githubusercontent.com/mkoppmann/eselsohr/3bb8609199c1dfda94935e6dde0c46fc429de84e/src/Lib/App/Command.hs | haskell | * Article related
** ChangeArticleState
* Capability related
** CreateUnlockLink
** DeleteShareUnlockLinks
** DeleteShareArticleList
** AddShareArticle
** DeleteShareArticle
** RemoveExpiredCapabilities
* Collection related
** CreateCollection
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
CreateUnlockLink
----------------------------------------------------------------------
----------------------------------------------------------------------
DeleteUnlockLink
----------------------------------------------------------------------
----------------------------------------------------------------------
AddShareUnlockLink
----------------------------------------------------------------------
----------------------------------------------------------------------
DeleteShareUnlockLink
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
RemoveExpiredCapabilities
----------------------------------------------------------------------
----------------------------------------------------------------------
---------------------------------------------------------------------- | module Lib.App.Command
* * CreateArticle
CreateArticle (..)
, createArticle
* *
, ChangeArticleTitle (..)
, changeArticleTitle
, MarkArticleAsRead (..)
, markArticleAsRead
, MarkArticleAsUnread (..)
, markArticleAsUnread
* * DeleteArticle
, DeleteArticle (..)
, deleteArticle
, CreateUnlockLink (..)
, createUnlockLink
* *
, DeleteUnlockLink (..)
, deleteUnlockLink
* *
, AddShareUnlockLinks (..)
, addShareUnlockLinks
, DeleteShareUnlockLinks (..)
, deleteShareUnlockLinks
* *
, AddShareArticleList (..)
, addShareArticleList
, DeleteShareArticleList (..)
, deleteShareArticleList
, AddShareArticle (..)
, addShareArticle
, DeleteShareArticle (..)
, deleteShareArticle
, RemoveExpiredCapabilities (..)
, removeExpiredCapabilities
, createCollection
) where
import Data.Time.Clock (UTCTime)
import Prelude hiding
( id
, state
)
import qualified Lib.App.Port as Port
import qualified Lib.Domain.Article as Art
import qualified Lib.Domain.Authorization as Authz
import qualified Lib.Domain.Capability as Cap
import qualified Lib.Domain.Repo.ArticleList as ArtRepo
import qualified Lib.Domain.Repo.CapabilityList as CapRepo
import qualified Lib.Domain.Repo.Collection as ColRepo
import Lib.App.Port
( MonadRandom
, MonadScraper
, MonadTime
)
import Lib.Domain.Article
( Article
, ArticleState
)
import Lib.Domain.Capability
( ArticlePerms
, ArticlesPerms
, Capability
, ObjectReference
, OverviewPerms
)
import Lib.Domain.Collection (Collection)
import Lib.Domain.Error (AppErrorType)
import Lib.Domain.Id (Id)
import Lib.Domain.Repo.ArticleList
( ArticleListAction
, ArticleListRepo
)
import Lib.Domain.Repo.CapabilityList (CapabilityListRepo)
import Lib.Domain.Repo.Collection (CollectionRepo)
import Lib.Domain.Uri (Uri)
type CommandResult a = Either AppErrorType a
CreateArticle
data CreateArticle = CreateArticle
{ colId :: !(Id Collection)
, uri :: !Uri
, objRef :: !ObjectReference
}
createArticle :: (ArticleListRepo m, MonadScraper m, MonadTime m) => CreateArticle -> m (CommandResult (Id Article))
createArticle CreateArticle{..} = do
artId <- ArtRepo.nextId
title <- Port.scrapWebsite uri
creation <- Port.getCurrentTime
case mkAction artId title creation of
Left err -> pure $ Left err
Right action -> do
ArtRepo.save colId action
pure $ Right artId
where
mkAction :: Id Article -> Text -> UTCTime -> Either AppErrorType ArticleListAction
mkAction id artTitle creation = do
perm <- Authz.canCreateArticles objRef
title <- Art.titleFromText artTitle
let state = Art.Unread
art = Art.Article{..}
pure $ ArtRepo.addArticle perm id art
data ChangeArticleTitle = ChangeArticleTitle
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, title :: !Text
, objRef :: !ObjectReference
}
changeArticleTitle :: (ArticleListRepo m) => ChangeArticleTitle -> m (CommandResult ())
changeArticleTitle ChangeArticleTitle{..} = case mkAction of
Left err -> pure $ Left err
Right action -> Right <$> ArtRepo.save colId action
where
mkAction :: Either AppErrorType ArticleListAction
mkAction = do
perm <- Authz.canChangeArticleTitle objRef artId
artTitle <- Art.titleFromText title
pure $ ArtRepo.changeArticleTitle perm artTitle
ChangeArticleState
data MarkArticleAsRead = MarkArticleAsRead
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, objRef :: !ObjectReference
}
markArticleAsRead :: (ArticleListRepo m) => MarkArticleAsRead -> m (CommandResult ())
markArticleAsRead MarkArticleAsRead{..} = changeArticleState colId artId objRef Art.Read
data MarkArticleAsUnread = MarkArticleAsUnread
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, objRef :: !ObjectReference
}
markArticleAsUnread :: (ArticleListRepo m) => MarkArticleAsUnread -> m (CommandResult ())
markArticleAsUnread MarkArticleAsUnread{..} = changeArticleState colId artId objRef Art.Unread
changeArticleState
:: (ArticleListRepo m) => Id Collection -> Id Article -> ObjectReference -> ArticleState -> m (CommandResult ())
changeArticleState colId artId objRef artState = case Authz.canChangeArticleState objRef artId of
Left err -> pure $ Left err
Right perm -> Right <$> ArtRepo.save colId (action perm artState)
where
action perm Art.Unread = ArtRepo.markArticleAsUnread perm
action perm Art.Read = ArtRepo.markArticleAsRead perm
DeleteArticle
data DeleteArticle = DeleteArticle
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, objRef :: !ObjectReference
}
deleteArticle :: (ArticleListRepo m) => DeleteArticle -> m (CommandResult ())
deleteArticle DeleteArticle{..} = case Authz.canDeleteArticle objRef artId of
Left err -> pure $ Left err
Right perm -> Right <$> ArtRepo.save colId (ArtRepo.removeArticle perm)
data CreateUnlockLink = CreateUnlockLink
{ colId :: !(Id Collection)
, mPetname :: !(Maybe Text)
, mExpDate :: !(Maybe UTCTime)
, objRef :: !ObjectReference
}
createUnlockLink :: (CapabilityListRepo m) => CreateUnlockLink -> m (CommandResult (Id Capability))
createUnlockLink CreateUnlockLink{..} = do
capId <- CapRepo.nextId
let cap = Cap.mkCapability capId Cap.defaultArticlesRef mPetname mExpDate
case Authz.canCreateUnlockLinks objRef of
Left err -> pure $ Left err
Right perm -> do
CapRepo.save colId (CapRepo.addUnlockLink perm capId cap)
pure $ Right capId
data DeleteUnlockLink = DeleteUnlockLink
{ colId :: !(Id Collection)
, capId :: !(Id Capability)
, objRef :: !ObjectReference
}
deleteUnlockLink :: (CapabilityListRepo m) => DeleteUnlockLink -> m (CommandResult ())
deleteUnlockLink DeleteUnlockLink{..} = case Authz.canDeleteUnlockLinks objRef of
Left err -> pure $ Left err
Right perm -> Right <$> CapRepo.save colId (CapRepo.removeUnlockLink perm capId)
data AddShareUnlockLinks = AddShareUnlockLinks
{ colId :: !(Id Collection)
, sharedPerms :: !OverviewPerms
, mPetname :: !(Maybe Text)
, mExpDate :: !(Maybe UTCTime)
, objRef :: !ObjectReference
}
addShareUnlockLinks :: (CapabilityListRepo m) => AddShareUnlockLinks -> m (CommandResult (Id Capability))
addShareUnlockLinks AddShareUnlockLinks{..} = do
capId <- CapRepo.nextId
case Authz.canShareUnlockLinks objRef of
Left err -> pure $ Left err
Right perm -> case mkAction capId perm of
Nothing -> pure Authz.unauthorized
Just action -> do
CapRepo.save colId action
pure $ Right capId
where
mkAction capId perm = do
sharedObjRef <- Cap.createSharedOverviewRef objRef sharedPerms
let cap = Cap.mkCapability capId sharedObjRef mPetname mExpDate
pure $ CapRepo.addShareUnlockLinks perm capId cap
data DeleteShareUnlockLinks = DeleteShareUnlockLinks
{ colId :: !(Id Collection)
, capId :: !(Id Capability)
, objRef :: !ObjectReference
}
deleteShareUnlockLinks :: (CapabilityListRepo m) => DeleteShareUnlockLinks -> m (CommandResult ())
deleteShareUnlockLinks DeleteShareUnlockLinks{..} = case Authz.canShareUnlockLinks objRef of
Left err -> pure $ Left err
Right perm -> Right <$> CapRepo.save colId (CapRepo.removeShareUnlockLinks perm capId)
data AddShareArticleList = AddShareArticleList
{ colId :: !(Id Collection)
, sharedPerms :: !ArticlesPerms
, mPetname :: !(Maybe Text)
, mExpDate :: !(Maybe UTCTime)
, objRef :: !ObjectReference
}
addShareArticleList :: (CapabilityListRepo m) => AddShareArticleList -> m (CommandResult (Id Capability))
addShareArticleList AddShareArticleList{..} = do
capId <- CapRepo.nextId
case Authz.canShareArticleList objRef of
Left err -> pure $ Left err
Right perm -> case mkAction capId perm of
Nothing -> pure Authz.unauthorized
Just action -> do
CapRepo.save colId action
pure $ Right capId
where
mkAction capId perm = do
sharedObjRef <- Cap.createSharedArticlesRef objRef sharedPerms
let cap = Cap.mkCapability capId sharedObjRef mPetname mExpDate
pure $ CapRepo.addShareArticleList perm capId cap
DeleteShareArticleList
data DeleteShareArticleList = DeleteShareArticleList
{ colId :: !(Id Collection)
, capId :: !(Id Capability)
, objRef :: !ObjectReference
}
deleteShareArticleList :: (CapabilityListRepo m) => DeleteShareArticleList -> m (CommandResult ())
deleteShareArticleList DeleteShareArticleList{..} = case Authz.canShareArticleList objRef of
Left err -> pure $ Left err
Right perm -> Right <$> CapRepo.save colId (CapRepo.removeShareArticleList perm capId)
AddShareArticle
data AddShareArticle = AddShareArticle
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, sharedPerms :: !ArticlePerms
, mPetname :: !(Maybe Text)
, mExpDate :: !(Maybe UTCTime)
, objRef :: !ObjectReference
}
addShareArticle :: (CapabilityListRepo m) => AddShareArticle -> m (CommandResult (Id Capability))
addShareArticle AddShareArticle{..} = do
capId <- CapRepo.nextId
case Authz.canShareArticle objRef artId of
Left err -> pure $ Left err
Right perm -> case mkAction capId perm of
Nothing -> pure Authz.unauthorized
Just action -> do
CapRepo.save colId action
pure $ Right capId
where
mkAction capId perm = do
sharedObjRef <- Cap.createSharedArticleRef objRef sharedPerms artId
let cap = Cap.mkCapability capId sharedObjRef mPetname mExpDate
pure $ CapRepo.addShareArticle perm capId cap
DeleteShareArticle
data DeleteShareArticle = DeleteShareArticle
{ colId :: !(Id Collection)
, artId :: !(Id Article)
, capId :: !(Id Capability)
, objRef :: !ObjectReference
}
deleteShareArticle :: (CapabilityListRepo m) => DeleteShareArticle -> m (CommandResult ())
deleteShareArticle DeleteShareArticle{..} = case Authz.canShareArticle objRef artId of
Left err -> pure $ Left err
Right perm -> Right <$> CapRepo.save colId (CapRepo.removeShareArticle perm capId)
data RemoveExpiredCapabilities = RemoveExpiredCapabilities
{ colId :: !(Id Collection)
, objRef :: !ObjectReference
}
removeExpiredCapabilities :: (CapabilityListRepo m, MonadTime m) => RemoveExpiredCapabilities -> m (CommandResult ())
removeExpiredCapabilities RemoveExpiredCapabilities{..} = do
curTime <- Port.getCurrentTime
case Authz.canDeleteUnlockLinks objRef of
Left err -> pure $ Left err
Right perm -> Right <$> CapRepo.save colId (CapRepo.removeExpiredCapabilities perm curTime)
CreateCollection
createCollection :: (CollectionRepo m, MonadRandom m) => m (Id Collection, Id Capability)
createCollection = do
colId <- Port.getRandomId
collectionAlreadyExists <- ColRepo.exists colId
if collectionAlreadyExists then createCollection else createCollection' colId
where
createCollection' :: (CollectionRepo m, MonadRandom m) => Id Collection -> m (Id Collection, Id Capability)
createCollection' colId = do
capId <- Port.getRandomId
let cap = Cap.mkCapability capId Cap.defaultOverviewRef Nothing Nothing
ColRepo.createCollection colId capId cap
pure (colId, capId)
|
d426cb30a701b335636c00455be2bc2da21820b3e7e86b30d4411e503e51b890 | leviroth/ocaml-reddit-api | bounded_set.ml | open! Core
module Make (Hashable : Hashtbl.Key_plain) = struct
type t =
{ capacity : int
; hash_queue : (Hashable.t, unit) Hash_queue.t
}
let create ~capacity =
{ capacity
; hash_queue = Hash_queue.create (Hashtbl.Hashable.of_key (module Hashable))
}
;;
let mem { hash_queue; _ } value =
match Hash_queue.lookup_and_move_to_back hash_queue value with
| Some () -> true
| None -> false
;;
let add ({ hash_queue; capacity } as t) value =
match mem t value with
| true -> ()
| false ->
Hash_queue.enqueue_back_exn hash_queue value ();
(match Hash_queue.length hash_queue > capacity with
| false -> ()
| true -> Hash_queue.drop hash_queue `front)
;;
end
| null | https://raw.githubusercontent.com/leviroth/ocaml-reddit-api/a6c0cb2a325a26aa7c117e77d18ba9cb7c4a1f4b/reddit_api_async/bounded_set.ml | ocaml | open! Core
module Make (Hashable : Hashtbl.Key_plain) = struct
type t =
{ capacity : int
; hash_queue : (Hashable.t, unit) Hash_queue.t
}
let create ~capacity =
{ capacity
; hash_queue = Hash_queue.create (Hashtbl.Hashable.of_key (module Hashable))
}
;;
let mem { hash_queue; _ } value =
match Hash_queue.lookup_and_move_to_back hash_queue value with
| Some () -> true
| None -> false
;;
let add ({ hash_queue; capacity } as t) value =
match mem t value with
| true -> ()
| false ->
Hash_queue.enqueue_back_exn hash_queue value ();
(match Hash_queue.length hash_queue > capacity with
| false -> ()
| true -> Hash_queue.drop hash_queue `front)
;;
end
| |
90f2325e21c7849eff50f9fa2692a653f9f9937bfbaa062275e91ac75036ef86 | pezipink/Pisemble | stack.rkt | #lang pisemble
(require (for-syntax syntax/parse))
(define-syntax (regs stx)
(syntax-parse stx
[(_ ([new-reg old-reg:register]...) expr ...)
#'(let-syntax ([new-reg (make-rename-transformer #'old-reg)] ...)
expr ...)]))
(define-syntax (PUSH-REG-INNER stx)
(writeln stx)
(syntax-parse stx
[(_ mode rn:register ... )
#'{(Push-REG-INNER 0 mode rn ...)}]
[(_ used-bytes:nat mode rn:register ... )
#:when (= (syntax-e #'used-bytes) 16) ; finished slot
#'{(Push-REG-INNER 0 mode rn ...)}]
[(_ used-bytes:nat mode r:register-32 rn:register ... )
enough room for only 32bit reg
(let ([ num (syntax-e #'used-bytes)])
#`{ str r [sp @-4] !
(PUSH-REG-INNER #,(+ num 4) mode rn ...)})]
[(_ used-bytes:nat mode r:register rn:register ... )
#:when (<= (syntax-e #'used-bytes) 8) ; enough room for any reg
(let ([ num (syntax-e #'used-bytes)])
(syntax-parse #'r
[r:register-64
#`{ str r [sp @-8] !
(PUSH-REG-INNER #,(+ num 8) mode rn ...) }]
[r:register-32
#`{ str r [sp @-4] !
(PUSH-REG-INNER #,(+ num 4) mode rn ...)}]))]
[(_ used-bytes:nat mode rn:register ...+ )
;otherwise pad the sp with remaning bytes
#:with pad (datum->syntax this-syntax (- 16 (syntax-e #'used-bytes)))
#' { sub sp sp @pad
(PUSH-REG-INNER 0 mode rn ...)
}
]
[(_ used-bytes:nat mode )
;pad the sp with remaning bytes
#:with pad (datum->syntax this-syntax (- 16 (syntax-e #'used-bytes)))
#' {sub sp sp @pad } ]))
(define-syntax (PUSH stx)
(syntax-parse stx
[(_ r:register rn ...+)
#'{ str r [sp @-16] !
(PUSH rn ...) }]
[(_ r:register)
#'{ str r [sp @-16] !}]))
(define-syntax (PUSHH stx)
(syntax-parse stx
[(_ r:register)
#'{ strh r [sp @-16] !}]))
(define-syntax (PUSHB stx)
(syntax-parse stx
[(_ r:register)
#'{ strb r [sp @-16] !}]))
(define-syntax (POP stx)
(syntax-parse stx
[(_ r:register rn ...+)
#'{ ldr r [sp] @16
(POP rn ...) }]
[(_ r:register)
#'{ ldr r [sp] @16 }]))
(define-syntax (POPH stx)
(syntax-parse stx
[(_ r:register)
#'{ ldrh r [sp] @16 }]))
(define-syntax (POPB stx)
(syntax-parse stx
[(_ r:register)
#'{ ldrb r [sp] @16 }]))
(define-syntax (subr stx)
; define a subroutine. create a label for it and push/pop the supplied
; regs as a prologue/epilogue. rename regs to user-friendlty names.
;Of course, with a bit more work they could be automatically detected
(syntax-parse stx
[(_ subroutine-name:id
([new-reg old-reg:register] ...)
[used-reg:register ...]
code )
#:with (reg-rev ...)
(datum->syntax this-syntax (reverse (syntax->list #'(used-reg ...))))
#:with label
(let* ([sym (syntax-e #'subroutine-name)]
[str (symbol->string sym)]
[label-str (format ":~a" str)]
[label (string->symbol label-str)])
(datum->syntax this-syntax label))
#'(regs ([new-reg old-reg] ...)
(try-set-jump-source `label set-jump-source-current)
(PUSH x30) ; always preserve return address register
(PUSH used-reg) ...
code
(POP reg-rev) ...
(POP x30)
{ ret x30 })]))
(provide (all-defined-out))
| null | https://raw.githubusercontent.com/pezipink/Pisemble/63b1a6027af7e6e06d2facec019bed237696dce9/stack.rkt | racket | finished slot
enough room for any reg
otherwise pad the sp with remaning bytes
pad the sp with remaning bytes
define a subroutine. create a label for it and push/pop the supplied
regs as a prologue/epilogue. rename regs to user-friendlty names.
Of course, with a bit more work they could be automatically detected
always preserve return address register | #lang pisemble
(require (for-syntax syntax/parse))
(define-syntax (regs stx)
(syntax-parse stx
[(_ ([new-reg old-reg:register]...) expr ...)
#'(let-syntax ([new-reg (make-rename-transformer #'old-reg)] ...)
expr ...)]))
(define-syntax (PUSH-REG-INNER stx)
(writeln stx)
(syntax-parse stx
[(_ mode rn:register ... )
#'{(Push-REG-INNER 0 mode rn ...)}]
[(_ used-bytes:nat mode rn:register ... )
#'{(Push-REG-INNER 0 mode rn ...)}]
[(_ used-bytes:nat mode r:register-32 rn:register ... )
enough room for only 32bit reg
(let ([ num (syntax-e #'used-bytes)])
#`{ str r [sp @-4] !
(PUSH-REG-INNER #,(+ num 4) mode rn ...)})]
[(_ used-bytes:nat mode r:register rn:register ... )
(let ([ num (syntax-e #'used-bytes)])
(syntax-parse #'r
[r:register-64
#`{ str r [sp @-8] !
(PUSH-REG-INNER #,(+ num 8) mode rn ...) }]
[r:register-32
#`{ str r [sp @-4] !
(PUSH-REG-INNER #,(+ num 4) mode rn ...)}]))]
[(_ used-bytes:nat mode rn:register ...+ )
#:with pad (datum->syntax this-syntax (- 16 (syntax-e #'used-bytes)))
#' { sub sp sp @pad
(PUSH-REG-INNER 0 mode rn ...)
}
]
[(_ used-bytes:nat mode )
#:with pad (datum->syntax this-syntax (- 16 (syntax-e #'used-bytes)))
#' {sub sp sp @pad } ]))
(define-syntax (PUSH stx)
(syntax-parse stx
[(_ r:register rn ...+)
#'{ str r [sp @-16] !
(PUSH rn ...) }]
[(_ r:register)
#'{ str r [sp @-16] !}]))
(define-syntax (PUSHH stx)
(syntax-parse stx
[(_ r:register)
#'{ strh r [sp @-16] !}]))
(define-syntax (PUSHB stx)
(syntax-parse stx
[(_ r:register)
#'{ strb r [sp @-16] !}]))
(define-syntax (POP stx)
(syntax-parse stx
[(_ r:register rn ...+)
#'{ ldr r [sp] @16
(POP rn ...) }]
[(_ r:register)
#'{ ldr r [sp] @16 }]))
(define-syntax (POPH stx)
(syntax-parse stx
[(_ r:register)
#'{ ldrh r [sp] @16 }]))
(define-syntax (POPB stx)
(syntax-parse stx
[(_ r:register)
#'{ ldrb r [sp] @16 }]))
(define-syntax (subr stx)
(syntax-parse stx
[(_ subroutine-name:id
([new-reg old-reg:register] ...)
[used-reg:register ...]
code )
#:with (reg-rev ...)
(datum->syntax this-syntax (reverse (syntax->list #'(used-reg ...))))
#:with label
(let* ([sym (syntax-e #'subroutine-name)]
[str (symbol->string sym)]
[label-str (format ":~a" str)]
[label (string->symbol label-str)])
(datum->syntax this-syntax label))
#'(regs ([new-reg old-reg] ...)
(try-set-jump-source `label set-jump-source-current)
(PUSH used-reg) ...
code
(POP reg-rev) ...
(POP x30)
{ ret x30 })]))
(provide (all-defined-out))
|
d9f164946d84748e7f635839213422153867ba6175fccb7e47a83b602dc83fe6 | arttuka/reagent-material-ui | sos_rounded.cljs | (ns reagent-mui.icons.sos-rounded
"Imports @mui/icons-material/SosRounded as a Reagent component."
(:require-macros [reagent-mui.util :refer [create-svg-icon e]])
(:require [react :as react]
["@mui/material/SvgIcon" :as SvgIcon]
[reagent-mui.util]))
(def sos-rounded (create-svg-icon (e "path" #js {"d" "M13.5 7h-3c-1.1 0-2 .9-2 2v6c0 1.1.9 2 2 2h3c1.1 0 2-.9 2-2V9c0-1.1-.9-2-2-2zm0 8h-3V9h3v6zM3 9v2h2c1.1 0 2 .9 2 2v2c0 1.1-.9 2-2 2H2c-.55 0-1-.45-1-1s.45-1 1-1h3v-2H3c-1.1 0-2-.9-2-2V9c0-1.1.9-2 2-2h3c.55 0 1 .45 1 1s-.45 1-1 1H3zm16 0v2h2c1.1 0 2 .9 2 2v2c0 1.1-.9 2-2 2h-3c-.55 0-1-.45-1-1s.45-1 1-1h3v-2h-2c-1.1 0-2-.9-2-2V9c0-1.1.9-2 2-2h3c.55 0 1 .45 1 1s-.45 1-1 1h-3z"})
"SosRounded"))
| null | https://raw.githubusercontent.com/arttuka/reagent-material-ui/c7cd0d7c661ab9df5b0aed0213a6653a9a3f28ea/src/icons/reagent_mui/icons/sos_rounded.cljs | clojure | (ns reagent-mui.icons.sos-rounded
"Imports @mui/icons-material/SosRounded as a Reagent component."
(:require-macros [reagent-mui.util :refer [create-svg-icon e]])
(:require [react :as react]
["@mui/material/SvgIcon" :as SvgIcon]
[reagent-mui.util]))
(def sos-rounded (create-svg-icon (e "path" #js {"d" "M13.5 7h-3c-1.1 0-2 .9-2 2v6c0 1.1.9 2 2 2h3c1.1 0 2-.9 2-2V9c0-1.1-.9-2-2-2zm0 8h-3V9h3v6zM3 9v2h2c1.1 0 2 .9 2 2v2c0 1.1-.9 2-2 2H2c-.55 0-1-.45-1-1s.45-1 1-1h3v-2H3c-1.1 0-2-.9-2-2V9c0-1.1.9-2 2-2h3c.55 0 1 .45 1 1s-.45 1-1 1H3zm16 0v2h2c1.1 0 2 .9 2 2v2c0 1.1-.9 2-2 2h-3c-.55 0-1-.45-1-1s.45-1 1-1h3v-2h-2c-1.1 0-2-.9-2-2V9c0-1.1.9-2 2-2h3c.55 0 1 .45 1 1s-.45 1-1 1h-3z"})
"SosRounded"))
| |
4053f766a9fc043beea934ff9c9296efca6ba18fccdca6146e14d25209c2d3e1 | samply/blaze | spec.clj | (ns blaze.http.util.spec
(:require
[clojure.spec.alpha :as s]))
(s/def :blaze.http.header.element.param/name
string?)
(s/def :blaze.http.header.element.param/value
string?)
(s/def :blaze.http.header.element/param
(s/keys :req-un [:blaze.http.header.element.param/name
:blaze.http.header.element.param/value]))
(s/def :blaze.http.header.element/params
(s/coll-of :blaze.http.header.element/param))
(s/def :blaze.http.header.element/name
string?)
(s/def :blaze.http.header.element/value
string?)
(s/def :blaze.http.header/element
(s/keys :req-un [:blaze.http.header.element/name
:blaze.http.header.element/value]
:opt-un [:blaze.http.header.element/params]))
| null | https://raw.githubusercontent.com/samply/blaze/c479410a9198526453a0df769ab7db2e6d5dd654/modules/rest-util/src/blaze/http/util/spec.clj | clojure | (ns blaze.http.util.spec
(:require
[clojure.spec.alpha :as s]))
(s/def :blaze.http.header.element.param/name
string?)
(s/def :blaze.http.header.element.param/value
string?)
(s/def :blaze.http.header.element/param
(s/keys :req-un [:blaze.http.header.element.param/name
:blaze.http.header.element.param/value]))
(s/def :blaze.http.header.element/params
(s/coll-of :blaze.http.header.element/param))
(s/def :blaze.http.header.element/name
string?)
(s/def :blaze.http.header.element/value
string?)
(s/def :blaze.http.header/element
(s/keys :req-un [:blaze.http.header.element/name
:blaze.http.header.element/value]
:opt-un [:blaze.http.header.element/params]))
| |
136bbbfc9c666c802ed2bc2d7aa4538d498beb8f339dc28daa218c158572952f | abdulapopoola/SICPBook | helpers.scm | (define (pretty-print compiled-code)
(if (not (null? compiled-code))
(let ((first-instruction (car compiled-code)))
(if (not (symbol? first-instruction)) ;; is not a label?
(display " ")
(newline))
(begin
(print-to-screen first-instruction)
(pretty-print (cdr compiled-code))
(newline)))
'OK))
(define (print-to-screen . values)
(map
(lambda (value)
(display value)
(newline))
values))
| null | https://raw.githubusercontent.com/abdulapopoola/SICPBook/c8a0228ebf66d9c1ddc5ef1fcc1d05d8684f090a/Chapter%205/5.5/helpers.scm | scheme | is not a label? | (define (pretty-print compiled-code)
(if (not (null? compiled-code))
(let ((first-instruction (car compiled-code)))
(display " ")
(newline))
(begin
(print-to-screen first-instruction)
(pretty-print (cdr compiled-code))
(newline)))
'OK))
(define (print-to-screen . values)
(map
(lambda (value)
(display value)
(newline))
values))
|
95af6e9b37693c3efba56e356c34212b9175e030c2950e8ccedbb00070305a58 | raaz-crypto/raaz | XChaCha20Spec.hs | # OPTIONS_GHC -fno - warn - orphans #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
module Raaz.Cipher.XChaCha20Spec where
import Tests.Core
import qualified XChaCha20.Implementation as XI
import qualified ChaCha20.CPortable as CP
import System.IO.Unsafe( unsafePerformIO )
unsafeRun :: Memory mem => (mem -> IO a) -> a
unsafeRun = unsafePerformIO . withMemory
setup :: Key ChaCha20
-> Nounce XChaCha20
-> (Key ChaCha20, Nounce ChaCha20)
setup k n = unsafeRun setupMem
where setupMem :: CP.Internals -> IO (Key ChaCha20, Nounce ChaCha20)
setupMem mem = do initialise k mem
CP.xchacha20Setup n mem
(,) <$> extract (keyCell mem)
<*> extract (ivCell mem)
xinit :: Key XChaCha20
-> Nounce XChaCha20
-> (Key ChaCha20, Nounce ChaCha20)
xinit k n = unsafeRun xinitMem
where xinitMem :: XI.Internals -> IO (Key ChaCha20, Nounce ChaCha20)
xinitMem mem = do initialise k mem
initialise n mem
(,) <$> extract (keyCell $ XI.chacha20Internals mem)
<*> extract (ivCell $ XI.chacha20Internals mem)
mesg :: (Show k, Show n, Show kp, Show iv)
=> k -> n -> kp -> iv -> String
mesg k n kp iv = unwords ["for key:"
, shortened $ show k
, "and nounce:"
, shortened $ show n
, "the key should be"
, shortened $ show kp
, "and the internal nounce should be"
, show iv
]
setupSpec :: Key ChaCha20 -> Nounce XChaCha20 -> (Key ChaCha20, Nounce ChaCha20) -> Spec
setupSpec k n (kp,iv) = it msg $ setup k n `shouldBe` (kp,iv)
where msg = unwords ["setup:", mesg k n kp iv]
xinitSpec :: Key XChaCha20 -> Nounce XChaCha20 -> (Key ChaCha20,Nounce ChaCha20) -> Spec
xinitSpec k n (kp,iv) = it msg $ xinit k n `shouldBe` (kp,iv)
where msg = unwords ["xinit:", mesg k n kp iv]
spec :: Spec
spec = do
setupSpec
"00:01:02:03:04:05:06:07:08:09:0a:0b:0c:0d:0e:0f:10:11:12:13:14:15:16:17:18:19:1a:1b:1c:1d:1e:1f"
"00:00:00:09:00:00:00:4a:00:00:00:00:31:41:59:27:00:01:02:03:04:05:06:07"
("82413b42 27b27bfe d30e4250 8a877d73 a0f9e4d5 8a74a853 c12ec413 26d3ecdc", "00:00:00:00:00:01:02:03:04:05:06:07")
xinitSpec
"00:01:02:03:04:05:06:07:08:09:0a:0b:0c:0d:0e:0f:10:11:12:13:14:15:16:17:18:19:1a:1b:1c:1d:1e:1f"
"00:00:00:09:00:00:00:4a:00:00:00:00:31:41:59:27:00:01:02:03:04:05:06:07"
("82413b42 27b27bfe d30e4250 8a877d73 a0f9e4d5 8a74a853 c12ec413 26d3ecdc", "00:00:00:00:00:01:02:03:04:05:06:07")
| null | https://raw.githubusercontent.com/raaz-crypto/raaz/91799e1ae528e909ad921f6c0d6f51ebd8c7328f/tests/Raaz/Cipher/XChaCha20Spec.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE DataKinds # | # OPTIONS_GHC -fno - warn - orphans #
module Raaz.Cipher.XChaCha20Spec where
import Tests.Core
import qualified XChaCha20.Implementation as XI
import qualified ChaCha20.CPortable as CP
import System.IO.Unsafe( unsafePerformIO )
unsafeRun :: Memory mem => (mem -> IO a) -> a
unsafeRun = unsafePerformIO . withMemory
setup :: Key ChaCha20
-> Nounce XChaCha20
-> (Key ChaCha20, Nounce ChaCha20)
setup k n = unsafeRun setupMem
where setupMem :: CP.Internals -> IO (Key ChaCha20, Nounce ChaCha20)
setupMem mem = do initialise k mem
CP.xchacha20Setup n mem
(,) <$> extract (keyCell mem)
<*> extract (ivCell mem)
xinit :: Key XChaCha20
-> Nounce XChaCha20
-> (Key ChaCha20, Nounce ChaCha20)
xinit k n = unsafeRun xinitMem
where xinitMem :: XI.Internals -> IO (Key ChaCha20, Nounce ChaCha20)
xinitMem mem = do initialise k mem
initialise n mem
(,) <$> extract (keyCell $ XI.chacha20Internals mem)
<*> extract (ivCell $ XI.chacha20Internals mem)
mesg :: (Show k, Show n, Show kp, Show iv)
=> k -> n -> kp -> iv -> String
mesg k n kp iv = unwords ["for key:"
, shortened $ show k
, "and nounce:"
, shortened $ show n
, "the key should be"
, shortened $ show kp
, "and the internal nounce should be"
, show iv
]
setupSpec :: Key ChaCha20 -> Nounce XChaCha20 -> (Key ChaCha20, Nounce ChaCha20) -> Spec
setupSpec k n (kp,iv) = it msg $ setup k n `shouldBe` (kp,iv)
where msg = unwords ["setup:", mesg k n kp iv]
xinitSpec :: Key XChaCha20 -> Nounce XChaCha20 -> (Key ChaCha20,Nounce ChaCha20) -> Spec
xinitSpec k n (kp,iv) = it msg $ xinit k n `shouldBe` (kp,iv)
where msg = unwords ["xinit:", mesg k n kp iv]
spec :: Spec
spec = do
setupSpec
"00:01:02:03:04:05:06:07:08:09:0a:0b:0c:0d:0e:0f:10:11:12:13:14:15:16:17:18:19:1a:1b:1c:1d:1e:1f"
"00:00:00:09:00:00:00:4a:00:00:00:00:31:41:59:27:00:01:02:03:04:05:06:07"
("82413b42 27b27bfe d30e4250 8a877d73 a0f9e4d5 8a74a853 c12ec413 26d3ecdc", "00:00:00:00:00:01:02:03:04:05:06:07")
xinitSpec
"00:01:02:03:04:05:06:07:08:09:0a:0b:0c:0d:0e:0f:10:11:12:13:14:15:16:17:18:19:1a:1b:1c:1d:1e:1f"
"00:00:00:09:00:00:00:4a:00:00:00:00:31:41:59:27:00:01:02:03:04:05:06:07"
("82413b42 27b27bfe d30e4250 8a877d73 a0f9e4d5 8a74a853 c12ec413 26d3ecdc", "00:00:00:00:00:01:02:03:04:05:06:07")
|
af26f3ee165ec7e719adfcf652b717cd172648fb36bf577ceb4810d7634000c6 | freedict/tools | Token.hs |
- Language / Ding / Token.hs - token structures as produced by the lexer
-
- Copyright 2020 - 2021
-
- This file is part of ding2tei - haskell .
-
- ding2tei - haskell is free software : you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published
- by the Free Software Foundation , either version 3 of the License , or
- ( at your option ) any later version .
-
- ding2tei - haskell is distributed in the hope that it will be useful ,
- but WITHOUT ANY WARRANTY ; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
- GNU Affero General Public License for more details .
-
- You should have received a copy of the GNU Affero General Public License
- along with ding2tei - haskell . If not , see < / > .
- Language/Ding/Token.hs - token structures as produced by the lexer
-
- Copyright 2020-2021 Einhard Leichtfuß
-
- This file is part of ding2tei-haskell.
-
- ding2tei-haskell is free software: you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published
- by the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- ding2tei-haskell is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Affero General Public License for more details.
-
- You should have received a copy of the GNU Affero General Public License
- along with ding2tei-haskell. If not, see </>.
-}
|
- Token types , as produced by the generated lexer and consumed by the
- happy generated parser ( and the header parser ) .
- Token types, as produced by the Alex generated lexer and consumed by the
- happy generated parser (and the header parser).
-}
module Language.Ding.Token
( Token(..)
, Position(..)
, Atom(..)
, tokenToString
, tokenToPosition
, tokenToLine
, tokenToColumn
) where
import Data.NatLang.Grammar (GramLexCategory)
import Language.Ding.Show.Grammar (showGLC)
-- | Token, as produced by the lexer. Annotated with any directly preceding
-- whitespace and the position in the input.
data Token
= Token
String -- ^ Preceding whitespace.
Position
Atom
| EmptyToken -- ^ Neutral element in the monoid.
-- Note:
-- * This instance's show function is not injective. Information on position
-- and preceding whitespace is dropped.
instance Show Token where
show (Token _ _ atom) = show atom
show EmptyToken = "EmptyToken"
Note : One can not simply use AlexPosn here , since this would introduce a
-- dependency cycle (in the current setup).
-- | Position of a token in the input, line and column.
data Position = Position
{ positionToLine :: Int
, positionToColumn :: Int
}
deriving Show
-- TODO: Consider to add an explicit constructor for each possible separator.
-- | The essential part of a `Token'.
data Atom = NL
| LangSep -- ^ "::"
| Vert
| Semi
| Comma
| Tilde
| Plus
| Wordswitch -- ^ "<>"
| StrongSlash -- ^ see `doc/ding.slashes'
| WeakSlash -- ^ see `doc/ding.slashes'
| DoubleSlash
| OBrace
| CBrace
| OBracket
| CBracket
| OParen
| CParen
| OAngle
| CAngle
| OSlash -- ^ see `doc/ding.slashes'
| CSlash -- ^ see `doc/ding.slashes'
| SlashSpecial String
| Abbrev String
| AbbrevPlural String
| GramKW GramLexCategory
| IntPronKW String
| Text String
| KW_to
| HeaderLine String
deriving Show
-- | Convert a token back to the string it represents excluding potential
-- delimiters and dropping the annotated preceding whitespace. Uses
-- `atomToString'.
tokenToString :: Token -> String
tokenToString (Token _ _ atom) = atomToString atom
tokenToString EmptyToken = ""
-- | Convert an atom back to the string that it represents, excluding any
-- delimiters (</>).
-- This function is not injective, in particular the distinction of different
-- kinds of slashes is lost.
atomToString :: Atom -> String
atomToString NL = "\n"
atomToString LangSep = "::"
atomToString Vert = "|"
atomToString Semi = ";"
atomToString Comma = ","
atomToString Tilde = "~"
atomToString Plus = "+"
atomToString Wordswitch = "<>"
atomToString StrongSlash = "/"
atomToString WeakSlash = "/"
atomToString DoubleSlash = "//"
atomToString OBrace = "{"
atomToString CBrace = "}"
atomToString OBracket = "["
atomToString CBracket = "]"
atomToString OParen = "("
atomToString CParen = ")"
atomToString OAngle = "<"
atomToString CAngle = ">"
atomToString OSlash = "/"
atomToString CSlash = "/"
atomToString (SlashSpecial s) = s -- pretty: "/ " ++ s ++ " /"
atomToString (Abbrev s) = s -- pretty: "/ " ++ s ++ " /"
atomToString (AbbrevPlural s) = s -- pretty: "/" ++ s ++ "/s"
-- showGLC always gives a list of length one in this context.
atomToString (GramKW gram) = head $ showGLC gram
atomToString (IntPronKW pron) = pron
atomToString (Text t) = t
atomToString KW_to = "to"
atomToString (HeaderLine l) = l
tokenToPosition :: Token -> Position
tokenToPosition EmptyToken = error "Tried to get position of empty token."
tokenToPosition (Token _ pos _) = pos
tokenToLine :: Token -> Int
tokenToLine = positionToLine . tokenToPosition
tokenToColumn :: Token -> Int
tokenToColumn = positionToColumn . tokenToPosition
-- All tokens have a string representation, which, together with the preceding
whitespace , identifies their value . Two tokens may hence be combined in a
-- natural way, into a canonical `Text' token.
instance Semigroup Token where
Join two tokens by concatenating their string representations , with the
-- correct whitespace in between.
(Token ws1 pos1 atom1) <> (Token ws2 _ atom2) =
Token ws1 pos1 (Text $ atomToString atom1 ++ ws2 ++ atomToString atom2)
EmptyToken is supposed to be a neutral element . Note that this means that
-- `EmptyToken <> tok' retains the whitespace from `tok'. See also
-- todo/parsing.elimination.
EmptyToken <> tok = tok
tok <> EmptyToken = tok
instance Monoid Token where
-- The unit in the token monoid is the empty 'Text' with no preceding white-
-- space.
mempty = EmptyToken
-- vi: ts=2 sw=2 et
| null | https://raw.githubusercontent.com/freedict/tools/3596640e6e0582cc5fb76a342e5d8e7413aa4b34/importers/ding2tei/src/Language/Ding/Token.hs | haskell | | Token, as produced by the lexer. Annotated with any directly preceding
whitespace and the position in the input.
^ Preceding whitespace.
^ Neutral element in the monoid.
Note:
* This instance's show function is not injective. Information on position
and preceding whitespace is dropped.
dependency cycle (in the current setup).
| Position of a token in the input, line and column.
TODO: Consider to add an explicit constructor for each possible separator.
| The essential part of a `Token'.
^ "::"
^ "<>"
^ see `doc/ding.slashes'
^ see `doc/ding.slashes'
^ see `doc/ding.slashes'
^ see `doc/ding.slashes'
| Convert a token back to the string it represents excluding potential
delimiters and dropping the annotated preceding whitespace. Uses
`atomToString'.
| Convert an atom back to the string that it represents, excluding any
delimiters (</>).
This function is not injective, in particular the distinction of different
kinds of slashes is lost.
pretty: "/ " ++ s ++ " /"
pretty: "/ " ++ s ++ " /"
pretty: "/" ++ s ++ "/s"
showGLC always gives a list of length one in this context.
All tokens have a string representation, which, together with the preceding
natural way, into a canonical `Text' token.
correct whitespace in between.
`EmptyToken <> tok' retains the whitespace from `tok'. See also
todo/parsing.elimination.
The unit in the token monoid is the empty 'Text' with no preceding white-
space.
vi: ts=2 sw=2 et |
- Language / Ding / Token.hs - token structures as produced by the lexer
-
- Copyright 2020 - 2021
-
- This file is part of ding2tei - haskell .
-
- ding2tei - haskell is free software : you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published
- by the Free Software Foundation , either version 3 of the License , or
- ( at your option ) any later version .
-
- ding2tei - haskell is distributed in the hope that it will be useful ,
- but WITHOUT ANY WARRANTY ; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
- GNU Affero General Public License for more details .
-
- You should have received a copy of the GNU Affero General Public License
- along with ding2tei - haskell . If not , see < / > .
- Language/Ding/Token.hs - token structures as produced by the lexer
-
- Copyright 2020-2021 Einhard Leichtfuß
-
- This file is part of ding2tei-haskell.
-
- ding2tei-haskell is free software: you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published
- by the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- ding2tei-haskell is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Affero General Public License for more details.
-
- You should have received a copy of the GNU Affero General Public License
- along with ding2tei-haskell. If not, see </>.
-}
|
- Token types , as produced by the generated lexer and consumed by the
- happy generated parser ( and the header parser ) .
- Token types, as produced by the Alex generated lexer and consumed by the
- happy generated parser (and the header parser).
-}
module Language.Ding.Token
( Token(..)
, Position(..)
, Atom(..)
, tokenToString
, tokenToPosition
, tokenToLine
, tokenToColumn
) where
import Data.NatLang.Grammar (GramLexCategory)
import Language.Ding.Show.Grammar (showGLC)
data Token
= Token
Position
Atom
instance Show Token where
show (Token _ _ atom) = show atom
show EmptyToken = "EmptyToken"
Note : One can not simply use AlexPosn here , since this would introduce a
data Position = Position
{ positionToLine :: Int
, positionToColumn :: Int
}
deriving Show
data Atom = NL
| Vert
| Semi
| Comma
| Tilde
| Plus
| DoubleSlash
| OBrace
| CBrace
| OBracket
| CBracket
| OParen
| CParen
| OAngle
| CAngle
| SlashSpecial String
| Abbrev String
| AbbrevPlural String
| GramKW GramLexCategory
| IntPronKW String
| Text String
| KW_to
| HeaderLine String
deriving Show
tokenToString :: Token -> String
tokenToString (Token _ _ atom) = atomToString atom
tokenToString EmptyToken = ""
atomToString :: Atom -> String
atomToString NL = "\n"
atomToString LangSep = "::"
atomToString Vert = "|"
atomToString Semi = ";"
atomToString Comma = ","
atomToString Tilde = "~"
atomToString Plus = "+"
atomToString Wordswitch = "<>"
atomToString StrongSlash = "/"
atomToString WeakSlash = "/"
atomToString DoubleSlash = "//"
atomToString OBrace = "{"
atomToString CBrace = "}"
atomToString OBracket = "["
atomToString CBracket = "]"
atomToString OParen = "("
atomToString CParen = ")"
atomToString OAngle = "<"
atomToString CAngle = ">"
atomToString OSlash = "/"
atomToString CSlash = "/"
atomToString (GramKW gram) = head $ showGLC gram
atomToString (IntPronKW pron) = pron
atomToString (Text t) = t
atomToString KW_to = "to"
atomToString (HeaderLine l) = l
tokenToPosition :: Token -> Position
tokenToPosition EmptyToken = error "Tried to get position of empty token."
tokenToPosition (Token _ pos _) = pos
tokenToLine :: Token -> Int
tokenToLine = positionToLine . tokenToPosition
tokenToColumn :: Token -> Int
tokenToColumn = positionToColumn . tokenToPosition
whitespace , identifies their value . Two tokens may hence be combined in a
instance Semigroup Token where
Join two tokens by concatenating their string representations , with the
(Token ws1 pos1 atom1) <> (Token ws2 _ atom2) =
Token ws1 pos1 (Text $ atomToString atom1 ++ ws2 ++ atomToString atom2)
EmptyToken is supposed to be a neutral element . Note that this means that
EmptyToken <> tok = tok
tok <> EmptyToken = tok
instance Monoid Token where
mempty = EmptyToken
|
0d6c20159ece5f41df6644cb612ac0d706a2078d9bbcfa08c10a2e55647e944a | facebook/flow | exception.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type t = {
exn: exn;
backtrace: Printexc.raw_backtrace;
}
In ocaml , backtraces ( the path that the exception bubbled up after being thrown ) are stored as
* global state and NOT with the exception itself . This means the only safe place to ever read the
* backtrace is immediately after the exception is caught in the ` with ` block of a ` try ... with ` .
*
* Proper use of this module is something like
*
* try
* ...
* with exn - >
* let e = Exception.wrap exn in ( * DO THIS FIRST ! ! !
* global state and NOT with the exception itself. This means the only safe place to ever read the
* backtrace is immediately after the exception is caught in the `with` block of a `try...with`.
*
* Proper use of this module is something like
*
* try
* ...
* with exn ->
* let e = Exception.wrap exn in (* DO THIS FIRST!!! *)
* my_fun e; (* If this code throws internally it will overwrite the global backtrace *)
* Exception.reraise e
*)
let wrap exn =
let backtrace = Printexc.get_raw_backtrace () in
{ exn; backtrace }
(* The inverse of `wrap`, returns the wrapped `exn`. You might use this to pattern
match on the raw exception or print it, but should not reraise it since it
will not include the correct backtrace; use `reraise` or `to_exn` instead. *)
let unwrap { exn; backtrace = _ } = exn
let reraise { exn; backtrace } = Printexc.raise_with_backtrace exn backtrace
(** [raise_with_backtrace exn t] raises [exn] with the backtrace from [t].
This could be useful for reraising an exception with a new message,
without changing the backtrace. *)
let raise_with_backtrace exn { backtrace; _ } = reraise { exn; backtrace }
(* Converts back to an `exn` with the right backtrace. Generally, avoid this in favor of
the helpers in this module, like `to_string` and `get_backtrace_string`. *)
let to_exn t =
try reraise t with
| exn -> exn
Like ` wrap ` , but for the unusual case where you want to create an ` Exception `
for an un - raised ` exn ` , capturing its stack trace . If you 've caught an exception ,
you should use ` wrap ` instead , since it already has a stack trace .
for an un-raised `exn`, capturing its stack trace. If you've caught an exception,
you should use `wrap` instead, since it already has a stack trace. *)
let wrap_unraised ?(frames = 100) exn =
let frames =
if Printexc.backtrace_status () then
frames
else
0
in
let backtrace = Printexc.get_callstack frames in
{ exn; backtrace }
let get_ctor_string { exn; backtrace = _ } = Printexc.to_string exn
let get_backtrace_string { exn = _; backtrace } = Printexc.raw_backtrace_to_string backtrace
let to_string t =
let ctor = get_ctor_string t in
let bt = get_backtrace_string t in
if bt = "" then
ctor
else
ctor ^ "\n" ^ bt
let get_current_callstack_string n = Printexc.get_callstack n |> Printexc.raw_backtrace_to_string
(** `internal_get_backtrace_slots_of_callstack x y` returns the top `y`
backtrace slots after skipping the top `x` slots. Skipping slots is
useful for hiding our own exception printing functions. *)
let internal_get_backtrace_slots_of_callstack ~skip n =
(* skip `Printexc.get_callstack` *)
let skip = skip + 1 in
let n =
if max_int - skip < n then
max_int
else
n + skip
in
let callstack = Printexc.get_callstack n in
match Printexc.backtrace_slots callstack with
| Some callstack_slots ->
if Array.length callstack_slots > skip then
Some (Array.sub callstack_slots skip (Array.length callstack_slots - skip))
else
None
| None -> None
(** Appends the current callstack (up to `n` slots, after skipping `skip` frames) to
the given backtrace. *)
let internal_get_full_backtrace_slots ~skip n backtrace =
(* skip `internal_get_backtrace_slots_of_callstack` *)
let callstack_slots = internal_get_backtrace_slots_of_callstack ~skip:(skip + 1) n in
let backtrace_slots = Printexc.backtrace_slots backtrace in
match (backtrace_slots, callstack_slots) with
| (Some backtrace_slots, Some callstack_slots) ->
Some (Array.append backtrace_slots callstack_slots)
| (Some backtrace_slots, None) -> Some backtrace_slots
| (None, Some callstack_slots) -> Some callstack_slots
| (None, None) -> None
* the private ` Printexc.print_exception_backtrace `
let print_exception_backtrace outchan backtrace =
match backtrace with
| None -> Printf.fprintf outchan "(Program not linked with -g, cannot print stack backtrace)\n"
| Some a ->
for i = 0 to Array.length a - 1 do
match Printexc.Slot.format i a.(i) with
| None -> ()
| Some str -> Printf.fprintf outchan "%s\n" str
done
let print_full_backtrace outchan n { exn = _; backtrace } =
skip ` print_full_backtrace ` and ` internal_get_full_backtrace_slots `
let slots = internal_get_full_backtrace_slots ~skip:2 n backtrace in
print_exception_backtrace outchan slots
* the private ` Printexc.backtrace_to_string `
let backtrace_slots_to_string backtrace =
match backtrace with
| None -> "(Program not linked with -g, cannot print stack backtrace)\n"
| Some a ->
let b = Buffer.create 1024 in
for i = 0 to Array.length a - 1 do
match Printexc.Slot.format i a.(i) with
| None -> ()
| Some str -> Printf.bprintf b "%s\n" str
done;
Buffer.contents b
let get_full_backtrace_string n { exn = _; backtrace } =
skip ` get_full_backtrace_string ` and ` internal_get_full_backtrace_slots `
let slots = internal_get_full_backtrace_slots ~skip:2 n backtrace in
backtrace_slots_to_string slots
let register_printer = Printexc.register_printer
let record_backtrace = Printexc.record_backtrace
let protect ~f ~finally =
let res =
try f () with
| e ->
let e = wrap e in
finally ();
reraise e
in
finally ();
res
| null | https://raw.githubusercontent.com/facebook/flow/1232fa6f444797403095fc1f69dae7bf7789a313/src/hack_forked/utils/core/exception.ml | ocaml | DO THIS FIRST!!!
If this code throws internally it will overwrite the global backtrace
The inverse of `wrap`, returns the wrapped `exn`. You might use this to pattern
match on the raw exception or print it, but should not reraise it since it
will not include the correct backtrace; use `reraise` or `to_exn` instead.
* [raise_with_backtrace exn t] raises [exn] with the backtrace from [t].
This could be useful for reraising an exception with a new message,
without changing the backtrace.
Converts back to an `exn` with the right backtrace. Generally, avoid this in favor of
the helpers in this module, like `to_string` and `get_backtrace_string`.
* `internal_get_backtrace_slots_of_callstack x y` returns the top `y`
backtrace slots after skipping the top `x` slots. Skipping slots is
useful for hiding our own exception printing functions.
skip `Printexc.get_callstack`
* Appends the current callstack (up to `n` slots, after skipping `skip` frames) to
the given backtrace.
skip `internal_get_backtrace_slots_of_callstack` |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type t = {
exn: exn;
backtrace: Printexc.raw_backtrace;
}
In ocaml , backtraces ( the path that the exception bubbled up after being thrown ) are stored as
* global state and NOT with the exception itself . This means the only safe place to ever read the
* backtrace is immediately after the exception is caught in the ` with ` block of a ` try ... with ` .
*
* Proper use of this module is something like
*
* try
* ...
* with exn - >
* let e = Exception.wrap exn in ( * DO THIS FIRST ! ! !
* global state and NOT with the exception itself. This means the only safe place to ever read the
* backtrace is immediately after the exception is caught in the `with` block of a `try...with`.
*
* Proper use of this module is something like
*
* try
* ...
* with exn ->
* Exception.reraise e
*)
let wrap exn =
let backtrace = Printexc.get_raw_backtrace () in
{ exn; backtrace }
let unwrap { exn; backtrace = _ } = exn
let reraise { exn; backtrace } = Printexc.raise_with_backtrace exn backtrace
let raise_with_backtrace exn { backtrace; _ } = reraise { exn; backtrace }
let to_exn t =
try reraise t with
| exn -> exn
Like ` wrap ` , but for the unusual case where you want to create an ` Exception `
for an un - raised ` exn ` , capturing its stack trace . If you 've caught an exception ,
you should use ` wrap ` instead , since it already has a stack trace .
for an un-raised `exn`, capturing its stack trace. If you've caught an exception,
you should use `wrap` instead, since it already has a stack trace. *)
let wrap_unraised ?(frames = 100) exn =
let frames =
if Printexc.backtrace_status () then
frames
else
0
in
let backtrace = Printexc.get_callstack frames in
{ exn; backtrace }
let get_ctor_string { exn; backtrace = _ } = Printexc.to_string exn
let get_backtrace_string { exn = _; backtrace } = Printexc.raw_backtrace_to_string backtrace
let to_string t =
let ctor = get_ctor_string t in
let bt = get_backtrace_string t in
if bt = "" then
ctor
else
ctor ^ "\n" ^ bt
let get_current_callstack_string n = Printexc.get_callstack n |> Printexc.raw_backtrace_to_string
let internal_get_backtrace_slots_of_callstack ~skip n =
let skip = skip + 1 in
let n =
if max_int - skip < n then
max_int
else
n + skip
in
let callstack = Printexc.get_callstack n in
match Printexc.backtrace_slots callstack with
| Some callstack_slots ->
if Array.length callstack_slots > skip then
Some (Array.sub callstack_slots skip (Array.length callstack_slots - skip))
else
None
| None -> None
let internal_get_full_backtrace_slots ~skip n backtrace =
let callstack_slots = internal_get_backtrace_slots_of_callstack ~skip:(skip + 1) n in
let backtrace_slots = Printexc.backtrace_slots backtrace in
match (backtrace_slots, callstack_slots) with
| (Some backtrace_slots, Some callstack_slots) ->
Some (Array.append backtrace_slots callstack_slots)
| (Some backtrace_slots, None) -> Some backtrace_slots
| (None, Some callstack_slots) -> Some callstack_slots
| (None, None) -> None
* the private ` Printexc.print_exception_backtrace `
let print_exception_backtrace outchan backtrace =
match backtrace with
| None -> Printf.fprintf outchan "(Program not linked with -g, cannot print stack backtrace)\n"
| Some a ->
for i = 0 to Array.length a - 1 do
match Printexc.Slot.format i a.(i) with
| None -> ()
| Some str -> Printf.fprintf outchan "%s\n" str
done
let print_full_backtrace outchan n { exn = _; backtrace } =
skip ` print_full_backtrace ` and ` internal_get_full_backtrace_slots `
let slots = internal_get_full_backtrace_slots ~skip:2 n backtrace in
print_exception_backtrace outchan slots
* the private ` Printexc.backtrace_to_string `
let backtrace_slots_to_string backtrace =
match backtrace with
| None -> "(Program not linked with -g, cannot print stack backtrace)\n"
| Some a ->
let b = Buffer.create 1024 in
for i = 0 to Array.length a - 1 do
match Printexc.Slot.format i a.(i) with
| None -> ()
| Some str -> Printf.bprintf b "%s\n" str
done;
Buffer.contents b
let get_full_backtrace_string n { exn = _; backtrace } =
skip ` get_full_backtrace_string ` and ` internal_get_full_backtrace_slots `
let slots = internal_get_full_backtrace_slots ~skip:2 n backtrace in
backtrace_slots_to_string slots
let register_printer = Printexc.register_printer
let record_backtrace = Printexc.record_backtrace
let protect ~f ~finally =
let res =
try f () with
| e ->
let e = wrap e in
finally ();
reraise e
in
finally ();
res
|
dac91d14032294017533e52adc04a6a54867a7528700e8508a1bc82d91bcf369 | 15Galan/asignatura-204 | GraphDemoG.hs | import DataStructures.Graph.Graph
import DataStructures.Graph.GraphES as G
g1 :: Graph Int
g1 = mkGraphSuc [1,2,3,4] suc
where
suc 1 = [2,3]
suc 2 = [1,3]
suc 3 = [1,2,4]
suc 4 = [3]
g1' :: Graph Int
g1' = mkGraphEdges [1,2,3,4] [(1,2),(1,3),(2,3),(3,4)]
{-
g1 y g1':
1--2
\ |
3--4
-}
g2 :: Grafo Int
g2 = crearGrafoSucesores [1,2,3,4] suc
where
suc 1 = [2,3]
suc 2 = [1,3]
suc 3 = [1,2,4]
suc 4 = [3]
g2' :: Grafo Int
g2' = crearGrafoAristas [1,2,3,4] [(1,2),(1,3),(2,3),(3,4)] | null | https://raw.githubusercontent.com/15Galan/asignatura-204/894f33ff8e0f52a75d8f9ff15155c656f1a8f771/Recursos/data.structures/haskell/Demos/Graph/GraphDemoG.hs | haskell |
g1 y g1':
1--2
\ |
3--4
| import DataStructures.Graph.Graph
import DataStructures.Graph.GraphES as G
g1 :: Graph Int
g1 = mkGraphSuc [1,2,3,4] suc
where
suc 1 = [2,3]
suc 2 = [1,3]
suc 3 = [1,2,4]
suc 4 = [3]
g1' :: Graph Int
g1' = mkGraphEdges [1,2,3,4] [(1,2),(1,3),(2,3),(3,4)]
g2 :: Grafo Int
g2 = crearGrafoSucesores [1,2,3,4] suc
where
suc 1 = [2,3]
suc 2 = [1,3]
suc 3 = [1,2,4]
suc 4 = [3]
g2' :: Grafo Int
g2' = crearGrafoAristas [1,2,3,4] [(1,2),(1,3),(2,3),(3,4)] |
f613937476afceda5fbba05fdd5a9dc2ba64a58a89aa11344025ca3bc59eb3dc | sids/nerchuko | country.clj | (ns nerchuko-test.classification.naive-bayes.country
(:use nerchuko.classification
nerchuko.utils
nerchuko.helpers)
(:use clojure.test))
(def training-dataset [["chinese beijing chinese" :yes]
["chinese chinese shanghai" :yes]
["chinese macao" :yes]
["tokyo japan chinese" :no]])
(deftest naive-bayes-multinomial
(let [model (learn-model 'nerchuko.classifiers.naive-bayes.multinomial
training-dataset)]
(are [doc result] (= result
(scores model doc))
"chinese chinese chinese tokyo japan" {:yes 0.6897586117634673, :no 0.31024138823653274}
"" {:yes 3/4, :no 1/4})
(are [doc result] (= result
(classify model doc))
"chinese chinese chinese tokyo japan" :yes
"" :yes)))
(deftest naive-bayes-bernouli
(let [model (learn-model 'nerchuko.classifiers.naive-bayes.bernoulli
training-dataset)]
(are [doc result] (= result
(scores model doc))
"chinese chinese chinese tokyo japan" {:yes 0.19106678876165256, :no 0.8089332112383474}
"" {:yes 0.8831539824861842, :no 0.1168460175138158})
(are [doc result] (= result
(classify model doc))
"chinese chinese chinese tokyo japan" :no
"" :yes)))
(deftest naive-bayes-complement
(let [model (learn-model 'nerchuko.classifiers.naive-bayes.complement
training-dataset)]
(are [doc result] (= result
(scores model doc))
"chinese chinese chinese tokyo japan" {:yes 0.6897586117634675, :no 0.3102413882365326}
"" {:yes 3/4, :no 1/4})
(are [doc result] (= result
(classify model doc))
"chinese chinese chinese tokyo japan" :yes
"" :yes)))
| null | https://raw.githubusercontent.com/sids/nerchuko/8aa56497dd8e93e868713dd542667a56215522fb/test/nerchuko_test/classification/naive_bayes/country.clj | clojure | (ns nerchuko-test.classification.naive-bayes.country
(:use nerchuko.classification
nerchuko.utils
nerchuko.helpers)
(:use clojure.test))
(def training-dataset [["chinese beijing chinese" :yes]
["chinese chinese shanghai" :yes]
["chinese macao" :yes]
["tokyo japan chinese" :no]])
(deftest naive-bayes-multinomial
(let [model (learn-model 'nerchuko.classifiers.naive-bayes.multinomial
training-dataset)]
(are [doc result] (= result
(scores model doc))
"chinese chinese chinese tokyo japan" {:yes 0.6897586117634673, :no 0.31024138823653274}
"" {:yes 3/4, :no 1/4})
(are [doc result] (= result
(classify model doc))
"chinese chinese chinese tokyo japan" :yes
"" :yes)))
(deftest naive-bayes-bernouli
(let [model (learn-model 'nerchuko.classifiers.naive-bayes.bernoulli
training-dataset)]
(are [doc result] (= result
(scores model doc))
"chinese chinese chinese tokyo japan" {:yes 0.19106678876165256, :no 0.8089332112383474}
"" {:yes 0.8831539824861842, :no 0.1168460175138158})
(are [doc result] (= result
(classify model doc))
"chinese chinese chinese tokyo japan" :no
"" :yes)))
(deftest naive-bayes-complement
(let [model (learn-model 'nerchuko.classifiers.naive-bayes.complement
training-dataset)]
(are [doc result] (= result
(scores model doc))
"chinese chinese chinese tokyo japan" {:yes 0.6897586117634675, :no 0.3102413882365326}
"" {:yes 3/4, :no 1/4})
(are [doc result] (= result
(classify model doc))
"chinese chinese chinese tokyo japan" :yes
"" :yes)))
| |
38066005a2e8d008b417b2749b1877847b38dd665563b2fa1a5dd9d2b2ffce72 | airalab/hs-web3 | System.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : Network.Polkadot.Rpc.System
Copyright : 2016 - 2021
-- License : Apache-2.0
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- Polkadot RPC methods with `system` prefix.
--
module Network.Polkadot.Rpc.System where
import Data.Aeson (Object)
import Data.Text (Text)
import Network.JsonRpc.TinyClient (JsonRpc (..))
import Network.Polkadot.Rpc.Types (ChainType, Health, NodeRole,
PeerInfo)
-- | Adds a reserved peer.
addReservedPeer :: JsonRpc m
=> Text
-- ^ Peer URI
-> m Text
# INLINE addReservedPeer #
addReservedPeer = remote "system_addReservedPeer"
-- | Retrieves the chain.
chain :: JsonRpc m => m Text
# INLINE chain #
chain = remote "system_chain"
-- | Retrieves the chain type.
chainType :: JsonRpc m => m ChainType
# INLINE chainType #
chainType = remote "system_chainType"
-- | Return health status of the node.
health :: JsonRpc m => m Health
# INLINE health #
health = remote "system_health"
-- | The addresses include a trailing /p2p/ with the local PeerId,
-- and are thus suitable to be passed to addReservedPeer or as a bootnode address.
localListenAddresses :: JsonRpc m => m [Text]
# INLINE localListenAddresses #
localListenAddresses = remote "system_localListenAddresses"
| Returns the base58 - encoded PeerId of the node .
localPeerId :: JsonRpc m => m Text
# INLINE localPeerId #
localPeerId = remote "system_localPeerId"
-- | Retrieves the node name.
name :: JsonRpc m => m Text
# INLINE name #
name = remote "system_name"
-- | Returns current state of the network.
--
-- Warning: This API isn't stable.
networkState :: JsonRpc m => m Object
# INLINE networkState #
networkState = remote "system_networkState"
-- | Returns the roles the node is running as.
nodeRoles :: JsonRpc m => m [NodeRole]
# INLINE nodeRoles #
nodeRoles = remote "system_nodeRoles"
-- | Returns the currently connected peers.
peers :: JsonRpc m => m [PeerInfo]
# INLINE peers #
peers = remote "system_peers"
-- | Get a custom set of properties as a JSON object, defined in the chain spec.
properties :: JsonRpc m => m Object
# INLINE properties #
properties = remote "system_properties"
-- | Remove a reserved peer.
removeReservedPeer :: JsonRpc m
=> Text
-- ^ Peer URI
-> m Text
# INLINE removeReservedPeer #
removeReservedPeer = remote "system_removeReservedPeer"
-- | Retrieves the version of the node.
version :: JsonRpc m => m Text
# INLINE version #
version = remote "system_version"
| null | https://raw.githubusercontent.com/airalab/hs-web3/c03b86eb621f963886a78c39ee18bcec753f17ac/packages/polkadot/src/Network/Polkadot/Rpc/System.hs | haskell | # LANGUAGE OverloadedStrings #
|
Module : Network.Polkadot.Rpc.System
License : Apache-2.0
Maintainer :
Stability : experimental
Portability : portable
Polkadot RPC methods with `system` prefix.
| Adds a reserved peer.
^ Peer URI
| Retrieves the chain.
| Retrieves the chain type.
| Return health status of the node.
| The addresses include a trailing /p2p/ with the local PeerId,
and are thus suitable to be passed to addReservedPeer or as a bootnode address.
| Retrieves the node name.
| Returns current state of the network.
Warning: This API isn't stable.
| Returns the roles the node is running as.
| Returns the currently connected peers.
| Get a custom set of properties as a JSON object, defined in the chain spec.
| Remove a reserved peer.
^ Peer URI
| Retrieves the version of the node. | # LANGUAGE FlexibleContexts #
Copyright : 2016 - 2021
module Network.Polkadot.Rpc.System where
import Data.Aeson (Object)
import Data.Text (Text)
import Network.JsonRpc.TinyClient (JsonRpc (..))
import Network.Polkadot.Rpc.Types (ChainType, Health, NodeRole,
PeerInfo)
addReservedPeer :: JsonRpc m
=> Text
-> m Text
# INLINE addReservedPeer #
addReservedPeer = remote "system_addReservedPeer"
chain :: JsonRpc m => m Text
# INLINE chain #
chain = remote "system_chain"
chainType :: JsonRpc m => m ChainType
# INLINE chainType #
chainType = remote "system_chainType"
health :: JsonRpc m => m Health
# INLINE health #
health = remote "system_health"
localListenAddresses :: JsonRpc m => m [Text]
# INLINE localListenAddresses #
localListenAddresses = remote "system_localListenAddresses"
| Returns the base58 - encoded PeerId of the node .
localPeerId :: JsonRpc m => m Text
# INLINE localPeerId #
localPeerId = remote "system_localPeerId"
name :: JsonRpc m => m Text
# INLINE name #
name = remote "system_name"
networkState :: JsonRpc m => m Object
# INLINE networkState #
networkState = remote "system_networkState"
nodeRoles :: JsonRpc m => m [NodeRole]
# INLINE nodeRoles #
nodeRoles = remote "system_nodeRoles"
peers :: JsonRpc m => m [PeerInfo]
# INLINE peers #
peers = remote "system_peers"
properties :: JsonRpc m => m Object
# INLINE properties #
properties = remote "system_properties"
removeReservedPeer :: JsonRpc m
=> Text
-> m Text
# INLINE removeReservedPeer #
removeReservedPeer = remote "system_removeReservedPeer"
version :: JsonRpc m => m Text
# INLINE version #
version = remote "system_version"
|
cda5e655ff6949310e0b57773655875ab0ad49cfd42e9f5cd6b60d976ac7d5da | discus-lang/ddc | Guards.hs |
Suppress Data . Monoid warnings during GHC 8.4.1 transition
{-# OPTIONS -Wno-unused-imports #-}
| guards and nested patterns to match expressions .
module DDC.Source.Discus.Transform.Guards
( type S, evalState, newVar
, desugarModule)
where
import DDC.Source.Discus.Module
import DDC.Source.Discus.Exp
import Data.Monoid
import Data.Text (Text)
import Control.Monad
import qualified DDC.Data.SourcePos as SP
import qualified Control.Monad.State as S
import qualified Data.Text as Text
GHC 8.2 - > 8.4 transition .
import Data.Monoid (Monoid(..))
-------------------------------------------------------------------------------
| guards and nested patterns to match expressions .
desugarModule :: Module SourcePos -> S (Module SourcePos)
desugarModule mm
= do ts' <- mapM desugarTop $ moduleTops mm
return $ mm { moduleTops = ts' }
-------------------------------------------------------------------------------
| a top - level thing .
desugarTop :: Top SourcePos -> S (Top SourcePos)
desugarTop tt
= case tt of
TopClause sp c -> TopClause sp <$> desugarCl sp c
TopData{} -> return tt
TopType{} -> return tt
-------------------------------------------------------------------------------
| a clause .
desugarCl :: SP -> Clause -> S Clause
desugarCl _sp cc
= case cc of
SSig{}
-> return cc
SLet sp mt ps gxs
-> do (ps', gsParam) <- stripParamsToGuards ps
gxs' <- mapM (desugarGX sp >=> (return . cleanGX))
$ map (wrapGuards gsParam) gxs
return $ SLet sp mt ps' gxs'
-------------------------------------------------------------------------------
| an expression .
desugarX :: SP -> Exp -> S Exp
desugarX sp xx
= case xx of
-- Boilerplate.
XAnnot sp' x -> XAnnot sp' <$> desugarX sp' x
XPrim{} -> pure xx
XVar{} -> pure xx
XCon{} -> pure xx
XApp x1 r2 -> XApp <$> desugarX sp x1 <*> desugarArg sp r2
XLet lts x -> XLet <$> desugarLts sp lts <*> desugarX sp x
XCast c x -> XCast c <$> desugarX sp x
XDefix a xs -> XDefix a <$> mapM (desugarArg sp) xs
XInfixOp{} -> pure xx
XInfixVar{} -> pure xx
XWhere a x cls -> XWhere a <$> desugarX sp x
<*> mapM (desugarCl sp) cls
-- Desguar patterns in a term abstraction.
XAbs bParam xBody
-> case bParam of
MTerm PDefault _
-> XAbs bParam <$> desugarX sp xBody
MTerm PVar{} _
-> XAbs bParam <$> desugarX sp xBody
MTerm pat mtBind
-> do (b, u) <- newVar "scrut"
xBody' <- desugarX sp
$ XCase (XVar u) [AAltCase pat [GExp xBody]]
return $ XAbs (MTerm (PVar b) mtBind) xBody'
_ -> XAbs bParam <$> desugarX sp xBody
a case expression .
XCase xScrut alts
-- Simple alternatives are ones where we can determine whether they
-- match just based on the head pattern. If all the alternatives
-- in a case-expression are simple then we can convert directly
-- to core-level case expressions.
| all isSimpleAltCase alts
-> do xScrut' <- desugarX sp xScrut
alts' <- mapM (desugarAltCase sp) alts
return $ XAnnot sp
$ XCase xScrut' alts'
-- Complex alternatives are ones that have include a guard or some
-- other pattern that may fail, and require us to skip to the next
-- alternatives. These are compiled as per match expressions.
| otherwise
the scrutinee .
xScrut' <- desugarX sp xScrut
-- We bind the scrutinee to a new variable so we can
-- defer to it multiple times in the body of the match.
(b, u) <- newVar "xScrut"
-- At the start of each guarded expression we match against
-- the pattern from the original case alternative.
gxsAlt' <- mapM (desugarGX sp >=> (return . cleanGX))
$ concat [ map (GGuard (GPat p (XVar u))) gxs
| AAltCase p gxs <- alts]
the body of each alternative .
alts' <- mapM (desugarAltMatch sp)
$ [AAltMatch gx | gx <- gxsAlt']
-- Result contains a let-binding to bind the scrutinee,
-- then a match expression that implements the complex
-- case alternatives.
pure $ XAnnot sp
$ XLet (LLet (XBindVarMT b Nothing) xScrut')
$ XMatch sp alts'
$ makeXErrorDefault
(Text.pack $ SP.sourcePosSource sp)
(fromIntegral $ SP.sourcePosLine sp)
a match expression from the source code .
XMatch sp' alts xFail
-> do alts' <- mapM (desugarAltMatch sp') alts
xFail' <- desugarX sp' xFail
pure $ XMatch sp' alts' xFail'
lambda with a pattern for the parameter .
XAbsPat _a MSTerm PDefault mt x
-> XAnnot sp <$> XAbs (MTerm PDefault mt) <$> desugarX sp x
XAbsPat _a MSTerm (PVar b) mt x
-> XAnnot sp <$> XAbs (MTerm (PVar b) mt) <$> desugarX sp x
XAbsPat _a MSImplicit PDefault mt x
-> XAnnot sp <$> XAbs (MImplicit PDefault mt) <$> desugarX sp x
XAbsPat _a MSImplicit (PVar b) mt x
-> XAnnot sp <$> XAbs (MImplicit (PVar b) mt) <$> desugarX sp x
XAbsPat _a ps p mt x
-> do (b, u) <- newVar "xScrut"
x' <- desugarX sp x
case ps of
MSType
-> return xx
MSTerm
-> desugarX sp
$ XAnnot sp
$ XAbs (MTerm (PVar b) mt)
$ XCase (XVar u) [ AAltCase p [GExp x'] ]
MSImplicit
-> desugarX sp
$ XAnnot sp
$ XAbs (MImplicit (PVar b) mt)
$ XCase (XVar u) [ AAltCase p [GExp x'] ]
lambda case by inserting the intermediate variable .
XLamCase _a alts
-> do (b, u) <- newVar "x"
alts' <- mapM (desugarAltCase sp) alts
desugarX sp
$ XAnnot sp
$ XAbs (MTerm (PVar b) Nothing)
$ XCase (XVar u) alts'
XTuple a lxs
-> do let (ls, xs) = unzip lxs
xs' <- mapM (desugarX sp) xs
return $ XTuple a $ zip ls xs'
XRecord a lxs
-> do let (ls, xs) = unzip lxs
xs' <- mapM (desugarX sp) xs
return $ XRecord a $ zip ls xs'
XVariant a l x
-> do x' <- desugarX sp x
return $ XVariant a l x'
XArray a xs
-> do xs' <- mapM (desugarX sp) xs
return $ XArray a xs'
-- | Check if this is simple Case alternative, which means if the pattern
-- matches then we can run the expression on the right instead of needing
-- to skip to another alternative.
isSimpleAltCase :: AltCase -> Bool
isSimpleAltCase aa
= case aa of
AAltCase p [GExp _] -> isSimplePat p
_ -> False
-- | Simple patterns can be converted directly to core.
isSimplePat :: Pat -> Bool
isSimplePat pp
= case pp of
PDefault -> True
PAt{} -> False
PVar{} -> True
PData _ ps -> all isTrivialPat ps
-- | Trival patterns are the default one and variables,
-- and don't require an actual pattern to be matched.
isTrivialPat :: Pat -> Bool
isTrivialPat pp
= case pp of
PDefault -> True
PVar{} -> True
_ -> False
-------------------------------------------------------------------------------
-- | Desguar an argument.
desugarArg :: SP -> Arg -> S Arg
desugarArg sp arg
= case arg of
RType{} -> return arg
RWitness{} -> return arg
RTerm x -> RTerm <$> desugarX sp x
RImplicit arg' -> RImplicit <$> desugarArg sp arg'
-------------------------------------------------------------------------------
| some let bindings .
desugarLts :: SP -> Lets -> S Lets
desugarLts sp lts
= case lts of
LLet bm x -> LLet bm <$> desugarX sp x
LRec bxs
-> do let (bs, xs) = unzip bxs
xs' <- mapM (desugarX sp) xs
let bxs' = zip bs xs'
return $ LRec bxs'
LPrivate{} -> pure lts
LExtend{} -> pure lts
LGroup bRec cs -> LGroup bRec <$> mapM (desugarCl sp) cs
-------------------------------------------------------------------------------
| a guarded expression .
desugarGX :: SP -> GuardedExp -> S GuardedExp
desugarGX sp gx
= case gx of
GGuard (GPat p x) gxInner
-> do x' <- desugarX sp x
(g', gs') <- stripGuardToGuards (GPat p x')
gxInner' <- desugarGX sp gxInner
return $ GGuard g'
$ wrapGuards gs' gxInner'
GGuard g gx'
-> GGuard <$> desugarG sp g <*> desugarGX sp gx'
GExp x
-> GExp <$> desugarX sp x
| a guard .
desugarG :: SP -> Guard -> S Guard
desugarG sp g
= case g of
GPat p x -> GPat p <$> desugarX sp x
GPred x -> GPred <$> desugarX sp x
GDefault -> pure GDefault
-------------------------------------------------------------------------------
| a case alternative .
desugarAltCase :: SP -> AltCase -> S AltCase
desugarAltCase sp (AAltCase p gxs)
= do gxs' <- mapM (desugarGX sp >=> (return . cleanGX)) gxs
pure $ AAltCase p gxs'
| a match alternative .
desugarAltMatch :: SP -> AltMatch -> S AltMatch
desugarAltMatch sp (AAltMatch gx)
= do gx' <- (desugarGX sp >=> (return . cleanGX)) gx
pure $ AAltMatch gx'
-------------------------------------------------------------------------------
-- | Strip out patterns in the given parameter list,
-- yielding a list of guards that implement the patterns.
stripParamsToGuards :: [Param] -> S ([Param], [Guard])
stripParamsToGuards []
= return ([], [])
stripParamsToGuards (p:ps)
= case p of
MType{}
-> do (ps', gs) <- stripParamsToGuards ps
return (p : ps', gs)
MTerm b mt
-> stripValue MTerm b mt
MImplicit b mt
-> stripValue MImplicit b mt
where stripValue make p' mt
= case p' of
PDefault
-> do (ps', gs) <- stripParamsToGuards ps
return (p : ps', gs)
PVar _b
-> do (ps', gs) <- stripParamsToGuards ps
return (p : ps', gs)
PAt b p1
-> do (psParam', gsRest) <- stripParamsToGuards ps
(ps', gsData) <- stripPatsToGuards [p1]
let [p1'] = ps'
let Just u = takeBoundOfBind b
return ( make (PVar b) mt : psParam'
, GPat p1' (XVar u)
: (gsData ++ gsRest))
PData dc psData
-> do (psParam', gsRest) <- stripParamsToGuards ps
(psData', gsData) <- stripPatsToGuards psData
(b, u) <- newVar "p"
return ( make (PVar b) mt : psParam'
, GPat (PData dc psData') (XVar u)
: (gsData ++ gsRest))
-- | Strip out nested patterns from the given pattern list,
-- yielding a list of guards that implement the patterns.
stripPatsToGuards :: [Pat] -> S ([Pat], [Guard])
stripPatsToGuards []
= return ([], [])
stripPatsToGuards (p:ps)
= case p of
-- Match against defaults directly.
PDefault
-> do (ps', gs) <- stripPatsToGuards ps
return (p : ps', gs)
-- Match against vars directly.
PVar _b
-> do (ps', gs) <- stripPatsToGuards ps
return (p : ps', gs)
Strip at patterns .
PAt b p1
Strip the rest of the patterns .
(psRest', gsRest) <- stripPatsToGuards ps
-- Strip nested patterns from the argument.
(ps', gsData) <- stripPatsToGuards [p1]
let [p1'] = ps'
let Just u = takeBoundOfBind b
return ( PVar b : psRest'
, GPat p1' (XVar u)
: (gsData ++ gsRest))
Strip out nested patterns in the arguments of a data constructor .
PData dc psData
Strip the rest of the patterns .
(psRest', gsRest) <- stripPatsToGuards ps
Strip nested patterns out of the arguments .
(psData', gsData) <- stripPatsToGuards psData
-- Make a new name to bind the value we are matching against.
(b, u) <- newVar "p"
return ( PVar b : psRest'
, GPat (PData dc psData') (XVar u)
: (gsData ++ gsRest) )
-- | Like `stripPatsToGuards` but we take the whole enclosing guards.
-- This gives us access to the expression being scrutinised,
-- which we can match against directly without introducing a new variable.
stripGuardToGuards :: Guard -> S (Guard, [Guard])
stripGuardToGuards g
= case g of
-- Match against defaults and vars directly.
GPat PDefault _ -> return (g, [])
GPat PVar{} _ -> return (g, [])
-- As we alerady have the expression being matched we don't
-- need to introduce a new variable to name it.
GPat (PAt b p) x
-> do (ps', gsData) <- stripPatsToGuards [p]
let [p'] = ps'
let Just u = takeBoundOfBind b
return ( GPat (PVar b) x
, GPat p' (XVar u) : gsData)
GPat (PData dc psData) x
-> do (psData', gsData) <- stripPatsToGuards psData
return ( GPat (PData dc psData') x
, gsData)
GPred{} -> return (g, [])
GDefault{} -> return (g, [])
-- | Wrap more guards around the outside of a guarded expression.
wrapGuards :: [Guard] -> GuardedExp -> GuardedExp
wrapGuards [] gx = gx
wrapGuards (g : gs) gx = GGuard g (wrapGuards gs gx)
-- | Clean out default patterns from a guarded expression.
--
-- We end up with default patterns in guards when desugaring default
-- alternatives, but they serve no purpose in the desugared code.
cleanGX :: GuardedExp -> GuardedExp
cleanGX gx
= case gx of
GGuard GDefault gx' -> cleanGX gx'
GGuard g gx' -> GGuard g $ cleanGX gx'
GExp x -> GExp x
-------------------------------------------------------------------------------
-- | Source position.
type SP = SP.SourcePos
-- | State holding a variable name prefix and counter to
-- create fresh variable names.
type S = S.State (Text, Int)
-- | Evaluate a desguaring computation,
-- using the given prefix for freshly introduced variables.
evalState :: Text -> S a -> a
evalState n c
= S.evalState c (n, 0)
-- | Allocate a new named variable, yielding its associated bind and bound.
newVar :: Text -> S (Bind, Bound)
newVar pre
= do (n, i) <- S.get
let name = pre <> "$" <> n <> Text.pack (show i)
S.put (n, i + 1)
return (BName name, UName name)
| null | https://raw.githubusercontent.com/discus-lang/ddc/2baa1b4e2d43b6b02135257677671a83cb7384ac/src/s1/ddc-source-discus/DDC/Source/Discus/Transform/Guards.hs | haskell | # OPTIONS -Wno-unused-imports #
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Boilerplate.
Desguar patterns in a term abstraction.
Simple alternatives are ones where we can determine whether they
match just based on the head pattern. If all the alternatives
in a case-expression are simple then we can convert directly
to core-level case expressions.
Complex alternatives are ones that have include a guard or some
other pattern that may fail, and require us to skip to the next
alternatives. These are compiled as per match expressions.
We bind the scrutinee to a new variable so we can
defer to it multiple times in the body of the match.
At the start of each guarded expression we match against
the pattern from the original case alternative.
Result contains a let-binding to bind the scrutinee,
then a match expression that implements the complex
case alternatives.
| Check if this is simple Case alternative, which means if the pattern
matches then we can run the expression on the right instead of needing
to skip to another alternative.
| Simple patterns can be converted directly to core.
| Trival patterns are the default one and variables,
and don't require an actual pattern to be matched.
-----------------------------------------------------------------------------
| Desguar an argument.
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
| Strip out patterns in the given parameter list,
yielding a list of guards that implement the patterns.
| Strip out nested patterns from the given pattern list,
yielding a list of guards that implement the patterns.
Match against defaults directly.
Match against vars directly.
Strip nested patterns from the argument.
Make a new name to bind the value we are matching against.
| Like `stripPatsToGuards` but we take the whole enclosing guards.
This gives us access to the expression being scrutinised,
which we can match against directly without introducing a new variable.
Match against defaults and vars directly.
As we alerady have the expression being matched we don't
need to introduce a new variable to name it.
| Wrap more guards around the outside of a guarded expression.
| Clean out default patterns from a guarded expression.
We end up with default patterns in guards when desugaring default
alternatives, but they serve no purpose in the desugared code.
-----------------------------------------------------------------------------
| Source position.
| State holding a variable name prefix and counter to
create fresh variable names.
| Evaluate a desguaring computation,
using the given prefix for freshly introduced variables.
| Allocate a new named variable, yielding its associated bind and bound. |
Suppress Data . Monoid warnings during GHC 8.4.1 transition
| guards and nested patterns to match expressions .
module DDC.Source.Discus.Transform.Guards
( type S, evalState, newVar
, desugarModule)
where
import DDC.Source.Discus.Module
import DDC.Source.Discus.Exp
import Data.Monoid
import Data.Text (Text)
import Control.Monad
import qualified DDC.Data.SourcePos as SP
import qualified Control.Monad.State as S
import qualified Data.Text as Text
GHC 8.2 - > 8.4 transition .
import Data.Monoid (Monoid(..))
| guards and nested patterns to match expressions .
desugarModule :: Module SourcePos -> S (Module SourcePos)
desugarModule mm
= do ts' <- mapM desugarTop $ moduleTops mm
return $ mm { moduleTops = ts' }
| a top - level thing .
desugarTop :: Top SourcePos -> S (Top SourcePos)
desugarTop tt
= case tt of
TopClause sp c -> TopClause sp <$> desugarCl sp c
TopData{} -> return tt
TopType{} -> return tt
| a clause .
desugarCl :: SP -> Clause -> S Clause
desugarCl _sp cc
= case cc of
SSig{}
-> return cc
SLet sp mt ps gxs
-> do (ps', gsParam) <- stripParamsToGuards ps
gxs' <- mapM (desugarGX sp >=> (return . cleanGX))
$ map (wrapGuards gsParam) gxs
return $ SLet sp mt ps' gxs'
| an expression .
desugarX :: SP -> Exp -> S Exp
desugarX sp xx
= case xx of
XAnnot sp' x -> XAnnot sp' <$> desugarX sp' x
XPrim{} -> pure xx
XVar{} -> pure xx
XCon{} -> pure xx
XApp x1 r2 -> XApp <$> desugarX sp x1 <*> desugarArg sp r2
XLet lts x -> XLet <$> desugarLts sp lts <*> desugarX sp x
XCast c x -> XCast c <$> desugarX sp x
XDefix a xs -> XDefix a <$> mapM (desugarArg sp) xs
XInfixOp{} -> pure xx
XInfixVar{} -> pure xx
XWhere a x cls -> XWhere a <$> desugarX sp x
<*> mapM (desugarCl sp) cls
XAbs bParam xBody
-> case bParam of
MTerm PDefault _
-> XAbs bParam <$> desugarX sp xBody
MTerm PVar{} _
-> XAbs bParam <$> desugarX sp xBody
MTerm pat mtBind
-> do (b, u) <- newVar "scrut"
xBody' <- desugarX sp
$ XCase (XVar u) [AAltCase pat [GExp xBody]]
return $ XAbs (MTerm (PVar b) mtBind) xBody'
_ -> XAbs bParam <$> desugarX sp xBody
a case expression .
XCase xScrut alts
| all isSimpleAltCase alts
-> do xScrut' <- desugarX sp xScrut
alts' <- mapM (desugarAltCase sp) alts
return $ XAnnot sp
$ XCase xScrut' alts'
| otherwise
the scrutinee .
xScrut' <- desugarX sp xScrut
(b, u) <- newVar "xScrut"
gxsAlt' <- mapM (desugarGX sp >=> (return . cleanGX))
$ concat [ map (GGuard (GPat p (XVar u))) gxs
| AAltCase p gxs <- alts]
the body of each alternative .
alts' <- mapM (desugarAltMatch sp)
$ [AAltMatch gx | gx <- gxsAlt']
pure $ XAnnot sp
$ XLet (LLet (XBindVarMT b Nothing) xScrut')
$ XMatch sp alts'
$ makeXErrorDefault
(Text.pack $ SP.sourcePosSource sp)
(fromIntegral $ SP.sourcePosLine sp)
a match expression from the source code .
XMatch sp' alts xFail
-> do alts' <- mapM (desugarAltMatch sp') alts
xFail' <- desugarX sp' xFail
pure $ XMatch sp' alts' xFail'
lambda with a pattern for the parameter .
XAbsPat _a MSTerm PDefault mt x
-> XAnnot sp <$> XAbs (MTerm PDefault mt) <$> desugarX sp x
XAbsPat _a MSTerm (PVar b) mt x
-> XAnnot sp <$> XAbs (MTerm (PVar b) mt) <$> desugarX sp x
XAbsPat _a MSImplicit PDefault mt x
-> XAnnot sp <$> XAbs (MImplicit PDefault mt) <$> desugarX sp x
XAbsPat _a MSImplicit (PVar b) mt x
-> XAnnot sp <$> XAbs (MImplicit (PVar b) mt) <$> desugarX sp x
XAbsPat _a ps p mt x
-> do (b, u) <- newVar "xScrut"
x' <- desugarX sp x
case ps of
MSType
-> return xx
MSTerm
-> desugarX sp
$ XAnnot sp
$ XAbs (MTerm (PVar b) mt)
$ XCase (XVar u) [ AAltCase p [GExp x'] ]
MSImplicit
-> desugarX sp
$ XAnnot sp
$ XAbs (MImplicit (PVar b) mt)
$ XCase (XVar u) [ AAltCase p [GExp x'] ]
lambda case by inserting the intermediate variable .
XLamCase _a alts
-> do (b, u) <- newVar "x"
alts' <- mapM (desugarAltCase sp) alts
desugarX sp
$ XAnnot sp
$ XAbs (MTerm (PVar b) Nothing)
$ XCase (XVar u) alts'
XTuple a lxs
-> do let (ls, xs) = unzip lxs
xs' <- mapM (desugarX sp) xs
return $ XTuple a $ zip ls xs'
XRecord a lxs
-> do let (ls, xs) = unzip lxs
xs' <- mapM (desugarX sp) xs
return $ XRecord a $ zip ls xs'
XVariant a l x
-> do x' <- desugarX sp x
return $ XVariant a l x'
XArray a xs
-> do xs' <- mapM (desugarX sp) xs
return $ XArray a xs'
isSimpleAltCase :: AltCase -> Bool
isSimpleAltCase aa
= case aa of
AAltCase p [GExp _] -> isSimplePat p
_ -> False
isSimplePat :: Pat -> Bool
isSimplePat pp
= case pp of
PDefault -> True
PAt{} -> False
PVar{} -> True
PData _ ps -> all isTrivialPat ps
isTrivialPat :: Pat -> Bool
isTrivialPat pp
= case pp of
PDefault -> True
PVar{} -> True
_ -> False
desugarArg :: SP -> Arg -> S Arg
desugarArg sp arg
= case arg of
RType{} -> return arg
RWitness{} -> return arg
RTerm x -> RTerm <$> desugarX sp x
RImplicit arg' -> RImplicit <$> desugarArg sp arg'
| some let bindings .
desugarLts :: SP -> Lets -> S Lets
desugarLts sp lts
= case lts of
LLet bm x -> LLet bm <$> desugarX sp x
LRec bxs
-> do let (bs, xs) = unzip bxs
xs' <- mapM (desugarX sp) xs
let bxs' = zip bs xs'
return $ LRec bxs'
LPrivate{} -> pure lts
LExtend{} -> pure lts
LGroup bRec cs -> LGroup bRec <$> mapM (desugarCl sp) cs
| a guarded expression .
desugarGX :: SP -> GuardedExp -> S GuardedExp
desugarGX sp gx
= case gx of
GGuard (GPat p x) gxInner
-> do x' <- desugarX sp x
(g', gs') <- stripGuardToGuards (GPat p x')
gxInner' <- desugarGX sp gxInner
return $ GGuard g'
$ wrapGuards gs' gxInner'
GGuard g gx'
-> GGuard <$> desugarG sp g <*> desugarGX sp gx'
GExp x
-> GExp <$> desugarX sp x
| a guard .
desugarG :: SP -> Guard -> S Guard
desugarG sp g
= case g of
GPat p x -> GPat p <$> desugarX sp x
GPred x -> GPred <$> desugarX sp x
GDefault -> pure GDefault
| a case alternative .
desugarAltCase :: SP -> AltCase -> S AltCase
desugarAltCase sp (AAltCase p gxs)
= do gxs' <- mapM (desugarGX sp >=> (return . cleanGX)) gxs
pure $ AAltCase p gxs'
| a match alternative .
desugarAltMatch :: SP -> AltMatch -> S AltMatch
desugarAltMatch sp (AAltMatch gx)
= do gx' <- (desugarGX sp >=> (return . cleanGX)) gx
pure $ AAltMatch gx'
stripParamsToGuards :: [Param] -> S ([Param], [Guard])
stripParamsToGuards []
= return ([], [])
stripParamsToGuards (p:ps)
= case p of
MType{}
-> do (ps', gs) <- stripParamsToGuards ps
return (p : ps', gs)
MTerm b mt
-> stripValue MTerm b mt
MImplicit b mt
-> stripValue MImplicit b mt
where stripValue make p' mt
= case p' of
PDefault
-> do (ps', gs) <- stripParamsToGuards ps
return (p : ps', gs)
PVar _b
-> do (ps', gs) <- stripParamsToGuards ps
return (p : ps', gs)
PAt b p1
-> do (psParam', gsRest) <- stripParamsToGuards ps
(ps', gsData) <- stripPatsToGuards [p1]
let [p1'] = ps'
let Just u = takeBoundOfBind b
return ( make (PVar b) mt : psParam'
, GPat p1' (XVar u)
: (gsData ++ gsRest))
PData dc psData
-> do (psParam', gsRest) <- stripParamsToGuards ps
(psData', gsData) <- stripPatsToGuards psData
(b, u) <- newVar "p"
return ( make (PVar b) mt : psParam'
, GPat (PData dc psData') (XVar u)
: (gsData ++ gsRest))
stripPatsToGuards :: [Pat] -> S ([Pat], [Guard])
stripPatsToGuards []
= return ([], [])
stripPatsToGuards (p:ps)
= case p of
PDefault
-> do (ps', gs) <- stripPatsToGuards ps
return (p : ps', gs)
PVar _b
-> do (ps', gs) <- stripPatsToGuards ps
return (p : ps', gs)
Strip at patterns .
PAt b p1
Strip the rest of the patterns .
(psRest', gsRest) <- stripPatsToGuards ps
(ps', gsData) <- stripPatsToGuards [p1]
let [p1'] = ps'
let Just u = takeBoundOfBind b
return ( PVar b : psRest'
, GPat p1' (XVar u)
: (gsData ++ gsRest))
Strip out nested patterns in the arguments of a data constructor .
PData dc psData
Strip the rest of the patterns .
(psRest', gsRest) <- stripPatsToGuards ps
Strip nested patterns out of the arguments .
(psData', gsData) <- stripPatsToGuards psData
(b, u) <- newVar "p"
return ( PVar b : psRest'
, GPat (PData dc psData') (XVar u)
: (gsData ++ gsRest) )
stripGuardToGuards :: Guard -> S (Guard, [Guard])
stripGuardToGuards g
= case g of
GPat PDefault _ -> return (g, [])
GPat PVar{} _ -> return (g, [])
GPat (PAt b p) x
-> do (ps', gsData) <- stripPatsToGuards [p]
let [p'] = ps'
let Just u = takeBoundOfBind b
return ( GPat (PVar b) x
, GPat p' (XVar u) : gsData)
GPat (PData dc psData) x
-> do (psData', gsData) <- stripPatsToGuards psData
return ( GPat (PData dc psData') x
, gsData)
GPred{} -> return (g, [])
GDefault{} -> return (g, [])
wrapGuards :: [Guard] -> GuardedExp -> GuardedExp
wrapGuards [] gx = gx
wrapGuards (g : gs) gx = GGuard g (wrapGuards gs gx)
cleanGX :: GuardedExp -> GuardedExp
cleanGX gx
= case gx of
GGuard GDefault gx' -> cleanGX gx'
GGuard g gx' -> GGuard g $ cleanGX gx'
GExp x -> GExp x
type SP = SP.SourcePos
type S = S.State (Text, Int)
evalState :: Text -> S a -> a
evalState n c
= S.evalState c (n, 0)
newVar :: Text -> S (Bind, Bound)
newVar pre
= do (n, i) <- S.get
let name = pre <> "$" <> n <> Text.pack (show i)
S.put (n, i + 1)
return (BName name, UName name)
|
328472bd4847263f19dc19619146a804e2c3f590d24ed2d3fd6d193bb773276d | haskell/cabal | cabal.test.hs | import Test.Cabal.Prelude
-- Points to dist.
main = cabalTest $
fails $ cabal "check" []
| null | https://raw.githubusercontent.com/haskell/cabal/1cfe7c4c7257aa7ae450209d34b4a359e6703a10/cabal-testsuite/PackageTests/Check/ConfiguredPackage/Paths/DistPoint/cabal.test.hs | haskell | Points to dist. | import Test.Cabal.Prelude
main = cabalTest $
fails $ cabal "check" []
|
8dc3beb822ef983c400eaa04c2694af6b16ef11e5e70190ebe3366cef518347d | mhkoji/Senn | base.lisp | (fiveam:in-suite* :senn.t)
| null | https://raw.githubusercontent.com/mhkoji/Senn/74f909bfe10ba360523be5b3b162688f21f5b333/senn/t/base.lisp | lisp | (fiveam:in-suite* :senn.t)
| |
acd5475a303f3d337f4d4d3eeeda59b995d61215379c4dbecaae888fbfef8e57 | simonmar/monad-par | TestHelpers.hs | {-# LANGUAGE BangPatterns #-}
module TestHelpers where
import Data.List
import Prelude hiding (catch)
import Control.Exception
import System.IO.Unsafe (unsafePerformIO)
import Data.IORef
import Data.Time.Clock
import Control.Monad.Par.Class
------------------------------------------------------------
-- Helpers
_ : : IO a - > Par a
_unsafeio :: ParFuture iv p => IO a -> p a
_unsafeio io = let x = unsafePerformIO io in
x `seq` return x
_waste_time :: Int -> Double
_waste_time n = loop n 1.00111
where
loop 0 !x = x
loop !n !x | x > 100.0 = loop (n-1) (x / 2)
loop !n !x = loop (n-1) (x + x * 0.5011)
-- This version watches the clock so it uses a constant amount of time
-- regardless of compile/interpret mode an opt lvl.
waste_time :: Double -> IO Double
waste_time seconds =
do strt <- getCurrentTime
let loop !x | x > 100.0 = chk (x / 2)
loop !x = chk (x + x * 0.5011)
chk !x = do t <- getCurrentTime
if diffUTCTime t strt >= realToFrac seconds
then return x
else loop x
loop 1.00111
-- Obviously this takes a lot longer if it's interpreted:
awhile = 300000000
awhile :: Integer
awhile = 3 * 1000 * 1000
awhile = 300000
atomicModifyIORef_ :: IORef a -> (a -> a) -> IO ()
atomicModifyIORef_ rf fn = atomicModifyIORef rf (\x -> (fn x, ()))
-- | Haskell doesn't offer a way to create a Handle for in-memory output.
So here we use IORefs instead ...
collectOutput :: (IORef [String] -> IO ()) -> IO String
collectOutput fn =
do c <- newIORef []
fn c
ls <- readIORef c
return (unlines (reverse ls))
prnt :: IORef [String] -> String -> IO ()
prnt ref str = atomicModifyIORef_ ref (str:)
-- _prnt :: IORef [String] -> String -> Par ()
_prnt :: ParFuture iv p => IORef [String] -> String -> p ()
_prnt ref = _unsafeio . prnt ref
-- -----------------------------------------------------------------------------
assertException : : ( Exception e , e ) = > e - > IO a - > IO ( )
assertException ex action =
-- handleJust isWanted (const $ return ()) $ do
-- action
-- assertFailure $ "Expected exception: " ++ show ex
-- where isWanted = guard . (== ex)
-- | Ensure that evaluating an expression returns an exception
-- containing one of the expected messages.
assertException :: [String] -> a -> IO ()
assertException msgs val = do
x <- catch (do evaluate val; return Nothing)
(\e -> do putStrLn$ "Good. Caught exception: " ++ show (e :: SomeException)
return (Just$ show e))
case x of
Nothing -> error "Failed to get an exception!"
Just s ->
if any (`isInfixOf` s) msgs
then return ()
else error$ "Got the wrong exception, expected to one of the strings: "++ show msgs
++ "\nInstead got this exception:\n " ++ show s
| null | https://raw.githubusercontent.com/simonmar/monad-par/9a25911e2004c66c1eb14dc200d1f5b0c2d83f0e/monad-par/tests/TestHelpers.hs | haskell | # LANGUAGE BangPatterns #
----------------------------------------------------------
Helpers
This version watches the clock so it uses a constant amount of time
regardless of compile/interpret mode an opt lvl.
Obviously this takes a lot longer if it's interpreted:
| Haskell doesn't offer a way to create a Handle for in-memory output.
_prnt :: IORef [String] -> String -> Par ()
-----------------------------------------------------------------------------
handleJust isWanted (const $ return ()) $ do
action
assertFailure $ "Expected exception: " ++ show ex
where isWanted = guard . (== ex)
| Ensure that evaluating an expression returns an exception
containing one of the expected messages. |
module TestHelpers where
import Data.List
import Prelude hiding (catch)
import Control.Exception
import System.IO.Unsafe (unsafePerformIO)
import Data.IORef
import Data.Time.Clock
import Control.Monad.Par.Class
_ : : IO a - > Par a
_unsafeio :: ParFuture iv p => IO a -> p a
_unsafeio io = let x = unsafePerformIO io in
x `seq` return x
_waste_time :: Int -> Double
_waste_time n = loop n 1.00111
where
loop 0 !x = x
loop !n !x | x > 100.0 = loop (n-1) (x / 2)
loop !n !x = loop (n-1) (x + x * 0.5011)
waste_time :: Double -> IO Double
waste_time seconds =
do strt <- getCurrentTime
let loop !x | x > 100.0 = chk (x / 2)
loop !x = chk (x + x * 0.5011)
chk !x = do t <- getCurrentTime
if diffUTCTime t strt >= realToFrac seconds
then return x
else loop x
loop 1.00111
awhile = 300000000
awhile :: Integer
awhile = 3 * 1000 * 1000
awhile = 300000
atomicModifyIORef_ :: IORef a -> (a -> a) -> IO ()
atomicModifyIORef_ rf fn = atomicModifyIORef rf (\x -> (fn x, ()))
So here we use IORefs instead ...
collectOutput :: (IORef [String] -> IO ()) -> IO String
collectOutput fn =
do c <- newIORef []
fn c
ls <- readIORef c
return (unlines (reverse ls))
prnt :: IORef [String] -> String -> IO ()
prnt ref str = atomicModifyIORef_ ref (str:)
_prnt :: ParFuture iv p => IORef [String] -> String -> p ()
_prnt ref = _unsafeio . prnt ref
assertException : : ( Exception e , e ) = > e - > IO a - > IO ( )
assertException ex action =
assertException :: [String] -> a -> IO ()
assertException msgs val = do
x <- catch (do evaluate val; return Nothing)
(\e -> do putStrLn$ "Good. Caught exception: " ++ show (e :: SomeException)
return (Just$ show e))
case x of
Nothing -> error "Failed to get an exception!"
Just s ->
if any (`isInfixOf` s) msgs
then return ()
else error$ "Got the wrong exception, expected to one of the strings: "++ show msgs
++ "\nInstead got this exception:\n " ++ show s
|
9d9f5c106447e3abb112ab2c4fa8b19032af4cac99e7741da5ce9dddb6c420a5 | esl/MongooseIM | mongoose_domain_api.erl | %% Main module other parts of MongooseIM should use to access the domain
%% management.
-module(mongoose_domain_api).
-include("mongoose_logger.hrl").
-export([init/0,
stop/0,
get_host_type/1]).
external domain API for GraphQL or REST handlers
-export([insert_domain/2,
delete_domain/2,
request_delete_domain/2,
disable_domain/1,
enable_domain/1,
get_domain_details/1,
check_host_type_and_get_domains/1]).
external domain admin API for GraphQL or REST handlers
-export([set_domain_password/2,
delete_domain_password/1]).
%% domain API
-export([get_domain_host_type/1,
get_all_static/0,
get_domains_by_host_type/1]).
%% domain admin API
-export([check_domain_password/2]).
%% subdomain API
-export([register_subdomain/3,
unregister_subdomain/2,
get_subdomain_host_type/1,
get_subdomain_info/1,
get_all_subdomains_for_domain/1]).
%% Helper for remove_domain
-export([remove_domain_wrapper/3,
do_delete_domain_in_progress/2]).
%% For testing
-export([get_all_dynamic/0]).
-ignore_xref([get_all_static/0]).
-ignore_xref([get_all_dynamic/0]).
-ignore_xref([stop/0]).
-type status() :: enabled | disabled | deleting.
-type domain() :: jid:lserver().
-type host_type() :: mongooseim:host_type().
-type subdomain_pattern() :: mongoose_subdomain_utils:subdomain_pattern().
-type remove_domain_acc() :: #{failed := [module()]}.
-type domain_info() :: #{domain := domain(), host_type => host_type(), status => status()}.
-type insert_result() :: {ok, domain_info()} |
{static | unknown_host_type | duplicate, iodata()}.
-type delete_result() :: {ok, domain_info()} |
{static | unknown_host_type | not_found | wrong_host_type, iodata()}.
-type set_status_result() :: {ok, domain_info()} |
{static | unknown_host_type | not_found | deleted, iodata()}.
-type get_domains_result() :: {ok, [domain()]} | {unknown_host_type, iodata()}.
-type get_domain_details_result() :: {ok, domain_info()} | {static | not_found, iodata()}.
-export_type([status/0, remove_domain_acc/0]).
-spec init() -> ok | {error, term()}.
init() ->
mongoose_domain_core:start(),
mongoose_subdomain_core:start(),
mongoose_lazy_routing:start().
%% Stops gen_servers, that are started from init/0
%% Does not fail, even if servers are already stopped
-spec stop() -> ok.
stop() ->
catch mongoose_domain_core:stop(),
catch mongoose_subdomain_core:stop(),
catch mongoose_lazy_routing:stop(),
ok.
-spec insert_domain(domain(), host_type()) -> insert_result().
insert_domain(Domain, HostType) ->
M = #{domain => Domain, host_type => HostType},
fold(M, [fun check_domain/1, fun check_host_type/1,
fun do_insert_domain/1, fun force_check/1, fun return_domain/1]).
-spec delete_domain(domain(), host_type()) -> delete_result().
delete_domain(Domain, HostType) ->
delete_domain(Domain, HostType, sync).
-spec request_delete_domain(domain(), host_type()) -> delete_result().
request_delete_domain(Domain, HostType) ->
delete_domain(Domain, HostType, async).
-spec delete_domain(domain(), host_type(), sync | async) -> delete_result().
delete_domain(Domain, HostType, RequestType) ->
M = #{domain => Domain, host_type => HostType, request_type => RequestType},
fold(M, [fun check_domain/1, fun check_host_type/1, fun set_domain_for_deletion/1,
fun force_check/1, fun do_delete_domain/1, fun return_domain/1]).
-spec disable_domain(domain()) -> set_status_result().
disable_domain(Domain) ->
M = #{domain => Domain, status => disabled},
fold(M, [fun check_domain/1, fun set_status/1, fun force_check/1, fun return_domain/1]).
-spec enable_domain(domain()) -> set_status_result().
enable_domain(Domain) ->
M = #{domain => Domain, status => enabled},
fold(M, [fun check_domain/1, fun set_status/1, fun force_check/1, fun return_domain/1]).
-spec check_host_type_and_get_domains(host_type()) -> get_domains_result().
check_host_type_and_get_domains(HostType) ->
M = #{host_type => HostType},
fold(M, [fun check_host_type/1, fun get_domains/1]).
-spec get_domain_details(domain()) -> get_domain_details_result().
get_domain_details(Domain) ->
M = #{domain => Domain},
fold(M, [fun check_domain/1, fun select_domain/1, fun return_domain/1]).
check_domain(M = #{domain := Domain}) ->
case mongoose_domain_core:is_static(Domain) of
true ->
{static, <<"Domain is static">>};
false ->
M
end.
check_host_type(M = #{host_type := HostType}) ->
case mongoose_domain_core:is_host_type_allowed(HostType) of
true ->
M;
false ->
{unknown_host_type, <<"Unknown host type">>}
end.
select_domain(M = #{domain := Domain}) ->
case mongoose_domain_sql:select_domain(Domain) of
{ok, DomainDetails} ->
maps:merge(M, DomainDetails);
{error, not_found} ->
{not_found, <<"Given domain does not exist">>}
end.
do_insert_domain(M = #{domain := Domain, host_type := HostType}) ->
case mongoose_domain_sql:insert_domain(Domain, HostType) of
ok ->
M;
{error, duplicate} ->
{duplicate, <<"Domain already exists">>}
end.
set_domain_for_deletion(M = #{domain := Domain, host_type := HostType}) ->
case mongoose_domain_sql:set_domain_for_deletion(Domain, HostType) of
ok ->
M;
{error, wrong_host_type} ->
{wrong_host_type, <<"Wrong host type was provided">>};
{error, not_found} ->
{not_found, <<"Given domain does not exist">>}
end.
force_check(M) ->
service_domain_db:force_check_for_updates(),
M.
do_delete_domain(M = #{domain := Domain, host_type := HostType, request_type := RequestType}) ->
mongoose_domain_sql:delete_domain_admin(Domain),
case RequestType of
sync ->
do_delete_domain_in_progress(Domain, HostType),
M#{status => deleted};
async ->
mongoose_domain_db_cleaner:request_delete_domain(Domain, HostType),
M#{status => deleting}
end.
set_status(M = #{domain := Domain, status := Status}) ->
case mongoose_domain_sql:set_status(Domain, Status) of
{error, unknown_host_type} ->
{unknown_host_type, <<"Unknown host type">>};
{error, domain_deleted} ->
{deleted, <<"Domain has been deleted">>};
{error, not_found} ->
{not_found, <<"Given domain does not exist">>};
ok ->
M
end.
return_domain(M) ->
{ok, maps:with([domain, host_type, status], M)}.
get_domains(#{host_type := HostType}) ->
{ok, get_domains_by_host_type(HostType)}.
-spec do_delete_domain_in_progress(domain(), host_type()) -> ok.
do_delete_domain_in_progress(Domain, HostType) ->
#{failed := []} = mongoose_hooks:remove_domain(HostType, Domain),
ok = mongoose_domain_sql:delete_domain(Domain, HostType).
Domain should be nameprepped using ` jid : '
-spec get_host_type(domain()) ->
{ok, host_type()} | {error, not_found}.
get_host_type(Domain) ->
case get_domain_host_type(Domain) of
{ok, HostType} -> {ok, HostType};
{error, not_found} ->
get_subdomain_host_type(Domain)
end.
Domain should be nameprepped using ` jid : '
-spec get_domain_host_type(domain()) ->
{ok, host_type()} | {error, not_found}.
get_domain_host_type(Domain) ->
mongoose_domain_core:get_host_type(Domain).
Subdomain should be nameprepped using ` jid : '
-spec get_subdomain_host_type(domain()) ->
{ok, host_type()} | {error, not_found}.
get_subdomain_host_type(Subdomain) ->
mongoose_subdomain_core:get_host_type(Subdomain).
Subdomain should be nameprepped using ` jid : '
-spec get_subdomain_info(domain()) ->
{ok, mongoose_subdomain_core:subdomain_info()} | {error, not_found}.
get_subdomain_info(Subdomain) ->
mongoose_subdomain_core:get_subdomain_info(Subdomain).
%% Get the list of the host_types provided during initialisation
%% This has complexity N, where N is the number of online domains.
-spec get_all_static() -> [{domain(), host_type()}].
get_all_static() ->
mongoose_domain_core:get_all_static().
Get domains , loaded from DB to this node
-spec get_all_dynamic() -> [{domain(), host_type()}].
get_all_dynamic() ->
mongoose_domain_core:get_all_dynamic().
%% Get the list of the host_types provided during initialisation
%% This has complexity N, where N is the number of online domains.
-spec get_domains_by_host_type(host_type()) -> [domain()].
get_domains_by_host_type(HostType) ->
mongoose_domain_core:get_domains_by_host_type(HostType).
-type password() :: binary().
-spec check_domain_password(domain(), password()) -> ok | {error, wrong_password | not_found}.
check_domain_password(Domain, Password) ->
case mongoose_domain_sql:select_domain_admin(Domain) of
{ok, {Domain, PassDetails}} ->
case do_check_domain_password(Password, PassDetails) of
true ->
ok;
false ->
{error, wrong_password}
end;
{error, not_found} ->
{error, not_found}
end.
do_check_domain_password(Password, PassDetails) ->
case mongoose_scram:deserialize(PassDetails) of
{ok, Scram} ->
mongoose_scram:check_password(Password, Scram);
{error, _Reason} ->
false
end.
-spec set_domain_password(domain(), password()) -> {ok | not_found, iodata()}.
set_domain_password(Domain, Password) ->
case get_host_type(Domain) of
{ok, _} ->
ok = mongoose_domain_sql:set_domain_admin(Domain, Password),
{ok, <<"Domain password set successfully">>};
{error, not_found} ->
{not_found, <<"Given domain does not exist or is disabled">>}
end.
-spec delete_domain_password(domain()) -> {ok, iodata()}.
delete_domain_password(Domain) ->
case mongoose_domain_sql:delete_domain_admin(Domain) of
ok ->
{ok, <<"Domain password deleted successfully">>};
{error, not_found} ->
{not_found, <<"Domain password does not exist">>}
end.
-spec register_subdomain(host_type(), subdomain_pattern(),
mongoose_packet_handler:t()) ->
ok | {error, already_registered | subdomain_already_exists}.
register_subdomain(HostType, SubdomainPattern, PacketHandler) ->
mongoose_subdomain_core:register_subdomain(HostType, SubdomainPattern,
PacketHandler).
-spec unregister_subdomain(host_type(), subdomain_pattern()) -> ok.
unregister_subdomain(HostType, SubdomainPattern) ->
mongoose_subdomain_core:unregister_subdomain(HostType, SubdomainPattern).
-spec get_all_subdomains_for_domain(domain()) ->
[mongoose_subdomain_core:subdomain_info()].
get_all_subdomains_for_domain(Domain) ->
mongoose_subdomain_core:get_all_subdomains_for_domain(Domain).
-spec remove_domain_wrapper(remove_domain_acc(), fun(() -> remove_domain_acc()), module()) ->
{ok | stop, remove_domain_acc()}.
remove_domain_wrapper(Acc, F, Module) ->
try F() of
Acc -> {ok, Acc}
catch C:R:S ->
?LOG_ERROR(#{what => hook_failed,
text => <<"Error running hook">>,
module => Module,
class => C, reason => R, stacktrace => S}),
{stop, Acc#{failed := [Module | maps:get(failed, Acc)]}}
end.
fold({_, _} = Result, _) ->
Result;
fold(M, [Step | Rest]) when is_map(M) ->
fold(Step(M), Rest).
| null | https://raw.githubusercontent.com/esl/MongooseIM/c863ca0a6109c782577a63e00510f634ca31d831/src/domain/mongoose_domain_api.erl | erlang | Main module other parts of MongooseIM should use to access the domain
management.
domain API
domain admin API
subdomain API
Helper for remove_domain
For testing
Stops gen_servers, that are started from init/0
Does not fail, even if servers are already stopped
Get the list of the host_types provided during initialisation
This has complexity N, where N is the number of online domains.
Get the list of the host_types provided during initialisation
This has complexity N, where N is the number of online domains. | -module(mongoose_domain_api).
-include("mongoose_logger.hrl").
-export([init/0,
stop/0,
get_host_type/1]).
external domain API for GraphQL or REST handlers
-export([insert_domain/2,
delete_domain/2,
request_delete_domain/2,
disable_domain/1,
enable_domain/1,
get_domain_details/1,
check_host_type_and_get_domains/1]).
external domain admin API for GraphQL or REST handlers
-export([set_domain_password/2,
delete_domain_password/1]).
-export([get_domain_host_type/1,
get_all_static/0,
get_domains_by_host_type/1]).
-export([check_domain_password/2]).
-export([register_subdomain/3,
unregister_subdomain/2,
get_subdomain_host_type/1,
get_subdomain_info/1,
get_all_subdomains_for_domain/1]).
-export([remove_domain_wrapper/3,
do_delete_domain_in_progress/2]).
-export([get_all_dynamic/0]).
-ignore_xref([get_all_static/0]).
-ignore_xref([get_all_dynamic/0]).
-ignore_xref([stop/0]).
-type status() :: enabled | disabled | deleting.
-type domain() :: jid:lserver().
-type host_type() :: mongooseim:host_type().
-type subdomain_pattern() :: mongoose_subdomain_utils:subdomain_pattern().
-type remove_domain_acc() :: #{failed := [module()]}.
-type domain_info() :: #{domain := domain(), host_type => host_type(), status => status()}.
-type insert_result() :: {ok, domain_info()} |
{static | unknown_host_type | duplicate, iodata()}.
-type delete_result() :: {ok, domain_info()} |
{static | unknown_host_type | not_found | wrong_host_type, iodata()}.
-type set_status_result() :: {ok, domain_info()} |
{static | unknown_host_type | not_found | deleted, iodata()}.
-type get_domains_result() :: {ok, [domain()]} | {unknown_host_type, iodata()}.
-type get_domain_details_result() :: {ok, domain_info()} | {static | not_found, iodata()}.
-export_type([status/0, remove_domain_acc/0]).
-spec init() -> ok | {error, term()}.
init() ->
mongoose_domain_core:start(),
mongoose_subdomain_core:start(),
mongoose_lazy_routing:start().
-spec stop() -> ok.
stop() ->
catch mongoose_domain_core:stop(),
catch mongoose_subdomain_core:stop(),
catch mongoose_lazy_routing:stop(),
ok.
-spec insert_domain(domain(), host_type()) -> insert_result().
insert_domain(Domain, HostType) ->
M = #{domain => Domain, host_type => HostType},
fold(M, [fun check_domain/1, fun check_host_type/1,
fun do_insert_domain/1, fun force_check/1, fun return_domain/1]).
-spec delete_domain(domain(), host_type()) -> delete_result().
delete_domain(Domain, HostType) ->
delete_domain(Domain, HostType, sync).
-spec request_delete_domain(domain(), host_type()) -> delete_result().
request_delete_domain(Domain, HostType) ->
delete_domain(Domain, HostType, async).
-spec delete_domain(domain(), host_type(), sync | async) -> delete_result().
delete_domain(Domain, HostType, RequestType) ->
M = #{domain => Domain, host_type => HostType, request_type => RequestType},
fold(M, [fun check_domain/1, fun check_host_type/1, fun set_domain_for_deletion/1,
fun force_check/1, fun do_delete_domain/1, fun return_domain/1]).
-spec disable_domain(domain()) -> set_status_result().
disable_domain(Domain) ->
M = #{domain => Domain, status => disabled},
fold(M, [fun check_domain/1, fun set_status/1, fun force_check/1, fun return_domain/1]).
-spec enable_domain(domain()) -> set_status_result().
enable_domain(Domain) ->
M = #{domain => Domain, status => enabled},
fold(M, [fun check_domain/1, fun set_status/1, fun force_check/1, fun return_domain/1]).
-spec check_host_type_and_get_domains(host_type()) -> get_domains_result().
check_host_type_and_get_domains(HostType) ->
M = #{host_type => HostType},
fold(M, [fun check_host_type/1, fun get_domains/1]).
-spec get_domain_details(domain()) -> get_domain_details_result().
get_domain_details(Domain) ->
M = #{domain => Domain},
fold(M, [fun check_domain/1, fun select_domain/1, fun return_domain/1]).
check_domain(M = #{domain := Domain}) ->
case mongoose_domain_core:is_static(Domain) of
true ->
{static, <<"Domain is static">>};
false ->
M
end.
check_host_type(M = #{host_type := HostType}) ->
case mongoose_domain_core:is_host_type_allowed(HostType) of
true ->
M;
false ->
{unknown_host_type, <<"Unknown host type">>}
end.
select_domain(M = #{domain := Domain}) ->
case mongoose_domain_sql:select_domain(Domain) of
{ok, DomainDetails} ->
maps:merge(M, DomainDetails);
{error, not_found} ->
{not_found, <<"Given domain does not exist">>}
end.
do_insert_domain(M = #{domain := Domain, host_type := HostType}) ->
case mongoose_domain_sql:insert_domain(Domain, HostType) of
ok ->
M;
{error, duplicate} ->
{duplicate, <<"Domain already exists">>}
end.
set_domain_for_deletion(M = #{domain := Domain, host_type := HostType}) ->
case mongoose_domain_sql:set_domain_for_deletion(Domain, HostType) of
ok ->
M;
{error, wrong_host_type} ->
{wrong_host_type, <<"Wrong host type was provided">>};
{error, not_found} ->
{not_found, <<"Given domain does not exist">>}
end.
force_check(M) ->
service_domain_db:force_check_for_updates(),
M.
do_delete_domain(M = #{domain := Domain, host_type := HostType, request_type := RequestType}) ->
mongoose_domain_sql:delete_domain_admin(Domain),
case RequestType of
sync ->
do_delete_domain_in_progress(Domain, HostType),
M#{status => deleted};
async ->
mongoose_domain_db_cleaner:request_delete_domain(Domain, HostType),
M#{status => deleting}
end.
set_status(M = #{domain := Domain, status := Status}) ->
case mongoose_domain_sql:set_status(Domain, Status) of
{error, unknown_host_type} ->
{unknown_host_type, <<"Unknown host type">>};
{error, domain_deleted} ->
{deleted, <<"Domain has been deleted">>};
{error, not_found} ->
{not_found, <<"Given domain does not exist">>};
ok ->
M
end.
return_domain(M) ->
{ok, maps:with([domain, host_type, status], M)}.
get_domains(#{host_type := HostType}) ->
{ok, get_domains_by_host_type(HostType)}.
-spec do_delete_domain_in_progress(domain(), host_type()) -> ok.
do_delete_domain_in_progress(Domain, HostType) ->
#{failed := []} = mongoose_hooks:remove_domain(HostType, Domain),
ok = mongoose_domain_sql:delete_domain(Domain, HostType).
Domain should be nameprepped using ` jid : '
-spec get_host_type(domain()) ->
{ok, host_type()} | {error, not_found}.
get_host_type(Domain) ->
case get_domain_host_type(Domain) of
{ok, HostType} -> {ok, HostType};
{error, not_found} ->
get_subdomain_host_type(Domain)
end.
Domain should be nameprepped using ` jid : '
-spec get_domain_host_type(domain()) ->
{ok, host_type()} | {error, not_found}.
get_domain_host_type(Domain) ->
mongoose_domain_core:get_host_type(Domain).
Subdomain should be nameprepped using ` jid : '
-spec get_subdomain_host_type(domain()) ->
{ok, host_type()} | {error, not_found}.
get_subdomain_host_type(Subdomain) ->
mongoose_subdomain_core:get_host_type(Subdomain).
Subdomain should be nameprepped using ` jid : '
-spec get_subdomain_info(domain()) ->
{ok, mongoose_subdomain_core:subdomain_info()} | {error, not_found}.
get_subdomain_info(Subdomain) ->
mongoose_subdomain_core:get_subdomain_info(Subdomain).
-spec get_all_static() -> [{domain(), host_type()}].
get_all_static() ->
mongoose_domain_core:get_all_static().
Get domains , loaded from DB to this node
-spec get_all_dynamic() -> [{domain(), host_type()}].
get_all_dynamic() ->
mongoose_domain_core:get_all_dynamic().
-spec get_domains_by_host_type(host_type()) -> [domain()].
get_domains_by_host_type(HostType) ->
mongoose_domain_core:get_domains_by_host_type(HostType).
-type password() :: binary().
-spec check_domain_password(domain(), password()) -> ok | {error, wrong_password | not_found}.
check_domain_password(Domain, Password) ->
case mongoose_domain_sql:select_domain_admin(Domain) of
{ok, {Domain, PassDetails}} ->
case do_check_domain_password(Password, PassDetails) of
true ->
ok;
false ->
{error, wrong_password}
end;
{error, not_found} ->
{error, not_found}
end.
do_check_domain_password(Password, PassDetails) ->
case mongoose_scram:deserialize(PassDetails) of
{ok, Scram} ->
mongoose_scram:check_password(Password, Scram);
{error, _Reason} ->
false
end.
-spec set_domain_password(domain(), password()) -> {ok | not_found, iodata()}.
set_domain_password(Domain, Password) ->
case get_host_type(Domain) of
{ok, _} ->
ok = mongoose_domain_sql:set_domain_admin(Domain, Password),
{ok, <<"Domain password set successfully">>};
{error, not_found} ->
{not_found, <<"Given domain does not exist or is disabled">>}
end.
-spec delete_domain_password(domain()) -> {ok, iodata()}.
delete_domain_password(Domain) ->
case mongoose_domain_sql:delete_domain_admin(Domain) of
ok ->
{ok, <<"Domain password deleted successfully">>};
{error, not_found} ->
{not_found, <<"Domain password does not exist">>}
end.
-spec register_subdomain(host_type(), subdomain_pattern(),
mongoose_packet_handler:t()) ->
ok | {error, already_registered | subdomain_already_exists}.
register_subdomain(HostType, SubdomainPattern, PacketHandler) ->
mongoose_subdomain_core:register_subdomain(HostType, SubdomainPattern,
PacketHandler).
-spec unregister_subdomain(host_type(), subdomain_pattern()) -> ok.
unregister_subdomain(HostType, SubdomainPattern) ->
mongoose_subdomain_core:unregister_subdomain(HostType, SubdomainPattern).
-spec get_all_subdomains_for_domain(domain()) ->
[mongoose_subdomain_core:subdomain_info()].
get_all_subdomains_for_domain(Domain) ->
mongoose_subdomain_core:get_all_subdomains_for_domain(Domain).
-spec remove_domain_wrapper(remove_domain_acc(), fun(() -> remove_domain_acc()), module()) ->
{ok | stop, remove_domain_acc()}.
remove_domain_wrapper(Acc, F, Module) ->
try F() of
Acc -> {ok, Acc}
catch C:R:S ->
?LOG_ERROR(#{what => hook_failed,
text => <<"Error running hook">>,
module => Module,
class => C, reason => R, stacktrace => S}),
{stop, Acc#{failed := [Module | maps:get(failed, Acc)]}}
end.
fold({_, _} = Result, _) ->
Result;
fold(M, [Step | Rest]) when is_map(M) ->
fold(Step(M), Rest).
|
f360523910b56084c731c6416e29e357d01916cf2edd0dfbf67853c3defb7d93 | jrheard/voke | state.cljs | (ns voke.state
"Contains functions that let Systems express an intent to modify the state of the game.
Systems can call add-entity!, update-entity!, and remove-entity! in their tick functions / event handlers.
These updates/removes will be queued, and will be processed by the core game loop in voke.core
at the end of every frame."
(:require [cljs.spec :as s]
[voke.events :refer [publish-event]]
[voke.specs]))
Specs
(s/def :game-state-event/type #{:add :remove :update})
(s/def :game-state-event/origin keyword?)
(s/def :game-state-event/entity :entity/entity)
(s/def :game-state-event/entity-id :entity/id)
(s/def :game-state-event/update-fn fn?)
(s/def :game-state-event/mode :game-state/mode)
(def -base-event-keys [:game-state-event/type :game-state-event/origin])
(defmulti event-type :game-state-event/type)
(defmethod event-type :add [_]
(s/keys :req (conj -base-event-keys :game-state-event/entity)))
(defmethod event-type :update [_]
(s/keys :req (conj -base-event-keys :game-state-event/entity-id :game-state-event/update-fn)))
(defmethod event-type :remove [_]
(s/keys :req (conj -base-event-keys :game-state-event/entity-id)))
(defmethod event-type :mode [_]
(s/keys :req (conj -base-event-keys :game-state-event/mode)))
(s/def :game-state-event/event (s/multi-spec event-type :game-state-event/type))
;; Private
(defn- event-with-type [event-type]
(s/and :game-state-event/event
#(= (% :game-state-event/type) event-type)))
(def ^:private buffer (atom []))
(defn process-events
[state
event-processing-fn
events]
(assoc state
:game-state/entities
TODO use into / transducers
(persistent!
(reduce event-processing-fn
(transient (state :game-state/entities))
events))))
(s/fdef process-events
:args (s/cat :state :game-state/game-state
:event-processing-fn fn?
:events (s/coll-of :game-state-event/event)))
(defn process-add-events
[state
add-events]
(process-events state
(fn [entities event]
(let [entity (event :game-state-event/entity)]
(publish-event {:event/type :entity-added
:entity entity})
(assoc! entities (entity :entity/id) entity)))
add-events))
(s/fdef process-add-events
:args (s/cat :state :game-state/game-state
:add-events (s/coll-of (event-with-type :add))))
(defn process-update-events
[state
update-events]
(process-events state
(fn [entities event]
(let [entity-id (event :game-state-event/entity-id)]
(assoc! entities
entity-id
((event :game-state-event/update-fn) (get entities entity-id)))))
update-events))
(s/fdef process-update-events
:args (s/cat :state :game-state/game-state
:update-events (s/coll-of (event-with-type :update))))
(defn process-remove-events
[state
remove-events]
(process-events state
(fn [entities remove-event]
(let [entity-id (remove-event :game-state-event/entity-id)]
(publish-event {:event/type :entity-removed
:entity-id entity-id})
(dissoc! entities entity-id)))
remove-events))
(s/fdef process-remove-events
:args (s/cat :state :game-state/game-state
:remove-events (s/coll-of (event-with-type :remove))))
(defn process-mode-events
[state mode-events]
(if (seq mode-events)
(assoc state
:game-state/mode
((last mode-events) :game-state-event/mode))
state))
;; Public (but only intended to be used by voke.core)
(defn make-game-state
[entities]
{:game-state/entities (into {}
(map (juxt :entity/id identity)
entities))
:game-state/mode :default})
(s/fdef make-game-state
:args (s/cat :entities (s/coll-of :entity/entity))
:ret :game-state/game-state)
(defn flush!
"Takes a :game-state/game-state and returns a :game-state/game-state to which all queued
:game-state-event/events have been applied.
Resets @buffer to []."
[state]
; i'm sure there's a more concise way to write this
(let [buffer-contents @buffer
add-events (filter #(= (% :game-state-event/type) :add) buffer-contents)
update-events (filter #(= (% :game-state-event/type) :update) buffer-contents)
remove-events (filter #(= (% :game-state-event/type) :remove) buffer-contents)
mode-events (filter #(= (% :game-state-event/type) :mode) buffer-contents)]
(reset! buffer [])
(-> state
(process-add-events add-events)
(process-update-events update-events)
(process-remove-events remove-events)
(process-mode-events mode-events))))
(s/fdef flush!
:args (s/cat :state :game-state/game-state)
:ret :game-state/game-state)
;; Public
(defn add-entity!
[entity
origin]
(swap! buffer conj #:game-state-event {:type :add
:origin origin
:entity entity}))
(s/fdef add-entity!
:args (s/cat :entity :entity/entity
:origin keyword?))
(defn update-entity!
"Queue an update to a particular entity.
Mainly intended to be used by systems' event handlers. System tick functions should return modified entities
rather than calling this function."
[entity-id
origin
update-fn]
this swap ! call takes a bit longer than i 'd like , shows up in profiles at around 3 % of
; the time we spend. consider replacing the buffer with (gasp) a regular js array. not worth it yet though
(swap! buffer conj #:game-state-event {:type :update
:origin origin
:entity-id entity-id
:update-fn update-fn}))
(s/fdef update-entity!
:args (s/cat :entity-id :entity/id
:origin keyword?
:update-fn fn?))
(defn remove-entity!
"Queue an entity's removal from the game."
[entity-id
origin]
(swap! buffer conj #:game-state-event {:type :remove
:origin origin
:entity-id entity-id}))
(s/fdef remove-entity!
:args (s/cat :entity-id :entity/id
:origin keyword?))
(defn update-mode!
[mode origin]
(swap! buffer conj #:game-state-event {:type :mode
:origin origin
:mode mode}))
| null | https://raw.githubusercontent.com/jrheard/voke/15b272955d214ce0c531fb2b8d645feb217255c2/src/voke/state.cljs | clojure | Private
Public (but only intended to be used by voke.core)
i'm sure there's a more concise way to write this
Public
the time we spend. consider replacing the buffer with (gasp) a regular js array. not worth it yet though | (ns voke.state
"Contains functions that let Systems express an intent to modify the state of the game.
Systems can call add-entity!, update-entity!, and remove-entity! in their tick functions / event handlers.
These updates/removes will be queued, and will be processed by the core game loop in voke.core
at the end of every frame."
(:require [cljs.spec :as s]
[voke.events :refer [publish-event]]
[voke.specs]))
Specs
(s/def :game-state-event/type #{:add :remove :update})
(s/def :game-state-event/origin keyword?)
(s/def :game-state-event/entity :entity/entity)
(s/def :game-state-event/entity-id :entity/id)
(s/def :game-state-event/update-fn fn?)
(s/def :game-state-event/mode :game-state/mode)
(def -base-event-keys [:game-state-event/type :game-state-event/origin])
(defmulti event-type :game-state-event/type)
(defmethod event-type :add [_]
(s/keys :req (conj -base-event-keys :game-state-event/entity)))
(defmethod event-type :update [_]
(s/keys :req (conj -base-event-keys :game-state-event/entity-id :game-state-event/update-fn)))
(defmethod event-type :remove [_]
(s/keys :req (conj -base-event-keys :game-state-event/entity-id)))
(defmethod event-type :mode [_]
(s/keys :req (conj -base-event-keys :game-state-event/mode)))
(s/def :game-state-event/event (s/multi-spec event-type :game-state-event/type))
(defn- event-with-type [event-type]
(s/and :game-state-event/event
#(= (% :game-state-event/type) event-type)))
(def ^:private buffer (atom []))
(defn process-events
[state
event-processing-fn
events]
(assoc state
:game-state/entities
TODO use into / transducers
(persistent!
(reduce event-processing-fn
(transient (state :game-state/entities))
events))))
(s/fdef process-events
:args (s/cat :state :game-state/game-state
:event-processing-fn fn?
:events (s/coll-of :game-state-event/event)))
(defn process-add-events
[state
add-events]
(process-events state
(fn [entities event]
(let [entity (event :game-state-event/entity)]
(publish-event {:event/type :entity-added
:entity entity})
(assoc! entities (entity :entity/id) entity)))
add-events))
(s/fdef process-add-events
:args (s/cat :state :game-state/game-state
:add-events (s/coll-of (event-with-type :add))))
(defn process-update-events
[state
update-events]
(process-events state
(fn [entities event]
(let [entity-id (event :game-state-event/entity-id)]
(assoc! entities
entity-id
((event :game-state-event/update-fn) (get entities entity-id)))))
update-events))
(s/fdef process-update-events
:args (s/cat :state :game-state/game-state
:update-events (s/coll-of (event-with-type :update))))
(defn process-remove-events
[state
remove-events]
(process-events state
(fn [entities remove-event]
(let [entity-id (remove-event :game-state-event/entity-id)]
(publish-event {:event/type :entity-removed
:entity-id entity-id})
(dissoc! entities entity-id)))
remove-events))
(s/fdef process-remove-events
:args (s/cat :state :game-state/game-state
:remove-events (s/coll-of (event-with-type :remove))))
(defn process-mode-events
[state mode-events]
(if (seq mode-events)
(assoc state
:game-state/mode
((last mode-events) :game-state-event/mode))
state))
(defn make-game-state
[entities]
{:game-state/entities (into {}
(map (juxt :entity/id identity)
entities))
:game-state/mode :default})
(s/fdef make-game-state
:args (s/cat :entities (s/coll-of :entity/entity))
:ret :game-state/game-state)
(defn flush!
"Takes a :game-state/game-state and returns a :game-state/game-state to which all queued
:game-state-event/events have been applied.
Resets @buffer to []."
[state]
(let [buffer-contents @buffer
add-events (filter #(= (% :game-state-event/type) :add) buffer-contents)
update-events (filter #(= (% :game-state-event/type) :update) buffer-contents)
remove-events (filter #(= (% :game-state-event/type) :remove) buffer-contents)
mode-events (filter #(= (% :game-state-event/type) :mode) buffer-contents)]
(reset! buffer [])
(-> state
(process-add-events add-events)
(process-update-events update-events)
(process-remove-events remove-events)
(process-mode-events mode-events))))
(s/fdef flush!
:args (s/cat :state :game-state/game-state)
:ret :game-state/game-state)
(defn add-entity!
[entity
origin]
(swap! buffer conj #:game-state-event {:type :add
:origin origin
:entity entity}))
(s/fdef add-entity!
:args (s/cat :entity :entity/entity
:origin keyword?))
(defn update-entity!
"Queue an update to a particular entity.
Mainly intended to be used by systems' event handlers. System tick functions should return modified entities
rather than calling this function."
[entity-id
origin
update-fn]
this swap ! call takes a bit longer than i 'd like , shows up in profiles at around 3 % of
(swap! buffer conj #:game-state-event {:type :update
:origin origin
:entity-id entity-id
:update-fn update-fn}))
(s/fdef update-entity!
:args (s/cat :entity-id :entity/id
:origin keyword?
:update-fn fn?))
(defn remove-entity!
"Queue an entity's removal from the game."
[entity-id
origin]
(swap! buffer conj #:game-state-event {:type :remove
:origin origin
:entity-id entity-id}))
(s/fdef remove-entity!
:args (s/cat :entity-id :entity/id
:origin keyword?))
(defn update-mode!
[mode origin]
(swap! buffer conj #:game-state-event {:type :mode
:origin origin
:mode mode}))
|
7d1b7af6e969480de8ec162dcc5f2c16fbe1ba025d38776de53256b800549c21 | robrix/sequoia | Assertion.hs | {-# LANGUAGE ConstraintKinds #-}
module Sequoia.Calculus.Assertion
( -- * Assertion
AssertionIntro
-- * Re-exports
, module Sequoia.Calculus.NotUntrue
, module Sequoia.Calculus.True
) where
import Sequoia.Calculus.NotUntrue
import Sequoia.Calculus.True
type AssertionIntro s = (NotUntrueIntro s, TrueIntro s)
| null | https://raw.githubusercontent.com/robrix/sequoia/ab008900ca0d7eece2e693d921a31e2e2518b15d/src/Sequoia/Calculus/Assertion.hs | haskell | # LANGUAGE ConstraintKinds #
* Assertion
* Re-exports | module Sequoia.Calculus.Assertion
AssertionIntro
, module Sequoia.Calculus.NotUntrue
, module Sequoia.Calculus.True
) where
import Sequoia.Calculus.NotUntrue
import Sequoia.Calculus.True
type AssertionIntro s = (NotUntrueIntro s, TrueIntro s)
|
70ee951e6f2100553e7fae8ab50209bbcb532fd29e42a985dc7983a08884ef47 | ocaml/oasis | OASISVersion.ml | (******************************************************************************)
OASIS : architecture for building OCaml libraries and applications
(* *)
Copyright ( C ) 2011 - 2016 ,
Copyright ( C ) 2008 - 2011 , OCamlCore SARL
(* *)
(* This library is free software; you can redistribute it and/or modify it *)
(* under the terms of the GNU Lesser General Public License as published by *)
the Free Software Foundation ; either version 2.1 of the License , or ( at
(* your option) any later version, with the OCaml static compilation *)
(* exception. *)
(* *)
(* This library is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *)
(* or FITNESS FOR A PARTICULAR PURPOSE. See the file COPYING for more *)
(* details. *)
(* *)
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
(******************************************************************************)
open OASISGettext
type t = string
type comparator =
| VGreater of t
| VGreaterEqual of t
| VEqual of t
| VLesser of t
| VLesserEqual of t
| VOr of comparator * comparator
| VAnd of comparator * comparator
(* Range of allowed characters *)
let is_digit c = '0' <= c && c <= '9'
let is_alpha c = ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z')
let is_special = function | '.' | '+' | '-' | '~' -> true | _ -> false
let rec version_compare v1 v2 =
if v1 <> "" || v2 <> "" then
begin
(* Compare ascii string, using special meaning for version
* related char
*)
let val_ascii c =
if c = '~' then -1
else if is_digit c then 0
else if c = '\000' then 0
else if is_alpha c then Char.code c
else (Char.code c) + 256
in
let len1 = String.length v1 in
let len2 = String.length v2 in
let p = ref 0 in
(** Compare ascii part *)
let compare_vascii () =
let cmp = ref 0 in
while !cmp = 0 &&
!p < len1 && !p < len2 &&
not (is_digit v1.[!p] && is_digit v2.[!p]) do
cmp := (val_ascii v1.[!p]) - (val_ascii v2.[!p]);
incr p
done;
if !cmp = 0 && !p < len1 && !p = len2 then
val_ascii v1.[!p]
else if !cmp = 0 && !p = len1 && !p < len2 then
- (val_ascii v2.[!p])
else
!cmp
in
(** Compare digit part *)
let compare_digit () =
let extract_int v p =
let start_p = !p in
while !p < String.length v && is_digit v.[!p] do
incr p
done;
let substr =
String.sub v !p ((String.length v) - !p)
in
let res =
match String.sub v start_p (!p - start_p) with
| "" -> 0
| s -> int_of_string s
in
res, substr
in
let i1, tl1 = extract_int v1 (ref !p) in
let i2, tl2 = extract_int v2 (ref !p) in
i1 - i2, tl1, tl2
in
match compare_vascii () with
| 0 ->
begin
match compare_digit () with
| 0, tl1, tl2 ->
if tl1 <> "" && is_digit tl1.[0] then
1
else if tl2 <> "" && is_digit tl2.[0] then
-1
else
version_compare tl1 tl2
| n, _, _ ->
n
end
| n ->
n
end
else begin
0
end
let version_of_string str = str
let string_of_version t = t
let chop t =
try
let pos =
String.rindex t '.'
in
String.sub t 0 pos
with Not_found ->
t
let rec comparator_apply v op =
match op with
| VGreater cv ->
(version_compare v cv) > 0
| VGreaterEqual cv ->
(version_compare v cv) >= 0
| VLesser cv ->
(version_compare v cv) < 0
| VLesserEqual cv ->
(version_compare v cv) <= 0
| VEqual cv ->
(version_compare v cv) = 0
| VOr (op1, op2) ->
(comparator_apply v op1) || (comparator_apply v op2)
| VAnd (op1, op2) ->
(comparator_apply v op1) && (comparator_apply v op2)
let rec string_of_comparator =
function
| VGreater v -> "> "^(string_of_version v)
| VEqual v -> "= "^(string_of_version v)
| VLesser v -> "< "^(string_of_version v)
| VGreaterEqual v -> ">= "^(string_of_version v)
| VLesserEqual v -> "<= "^(string_of_version v)
| VOr (c1, c2) ->
(string_of_comparator c1)^" || "^(string_of_comparator c2)
| VAnd (c1, c2) ->
(string_of_comparator c1)^" && "^(string_of_comparator c2)
let rec varname_of_comparator =
let concat p v =
OASISUtils.varname_concat
p
(OASISUtils.varname_of_string
(string_of_version v))
in
function
| VGreater v -> concat "gt" v
| VLesser v -> concat "lt" v
| VEqual v -> concat "eq" v
| VGreaterEqual v -> concat "ge" v
| VLesserEqual v -> concat "le" v
| VOr (c1, c2) ->
(varname_of_comparator c1)^"_or_"^(varname_of_comparator c2)
| VAnd (c1, c2) ->
(varname_of_comparator c1)^"_and_"^(varname_of_comparator c2)
(* END EXPORT *)
open OASISUtils
open OASISVersion_types
module StringVersion =
struct
type t = string
let to_version = version_of_string
let compare s1 s2 = version_compare (to_version s1) (to_version s2)
let comparator_ge s c_opt =
let rec comparator_ge' v' =
let cmp v = version_compare v v' >= 0 in
function
| VEqual v
| VGreaterEqual v
| VGreater v -> cmp v
| VLesserEqual _
| VLesser _ -> false
| VOr (c1, c2) -> comparator_ge' v' c1 || comparator_ge' v' c2
| VAnd (c1, c2) -> comparator_ge' v' c1 && comparator_ge' v' c2
in
match c_opt with
| Some c -> comparator_ge' (to_version s) c
| None -> false
end
let comparator_of_string str =
let lexbuf =
Lexing.from_string str
in
let rec parse_aux =
function
| VCAnd (c1, c2) -> VAnd (parse_aux c1, parse_aux c2)
| VCOr (c1, c2) -> VOr (parse_aux c1, parse_aux c2)
| VCGt s -> VGreater (version_of_string s)
| VCGe s -> VGreaterEqual (version_of_string s)
| VCEq s -> VEqual (version_of_string s)
| VCLt s -> VLesser (version_of_string s)
| VCLe s -> VLesserEqual (version_of_string s)
in
try
parse_aux
(OASISVersion_parser.main
OASISVersion_lexer.token lexbuf)
with e ->
failwithf
(f_ "Error while parsing '%s': %s")
str
(Printexc.to_string e)
(* The comparator_reduce function transforms the given comparator into its
* disjunctive normal form considering, with all version in ascending order. It
* uses intervals of version to combine the terms of the comparator.
*)
let comparator_reduce =
(* Compare endpoints *)
let cmp_norm e1 e2 =
match e1, e2 with
| `BeforeFirst, `BeforeFirst | `AfterLast, `AfterLast -> `EQ
| `BeforeFirst, _ | _, `AfterLast -> `AB
| _, `BeforeFirst | `AfterLast, _ -> `BA
| `Version v1, `Version v2 ->
let d = version_compare v1 v2 in
if d = 0 then `EQ else if d < 0 then `AB else `BA
in
let split e1 e2 e3 tl =
match e2 with
| `Version v2 -> `Interval(e1, e2) :: `Point v2 :: `Interval(e2, e3) :: tl
| _ -> assert false
in
let pushif op acc b1 b2 e = if op b1 b2 then e :: acc else acc in
Combine heads of intervals and continue processing .
let rec combine op acc hd1 tl1 hd2 tl2 =
let id, cons, pacc = (fun i -> i), (fun i j -> i :: j), pushif op acc in
let m ?(acc=acc) ?(f1=id) ?(f2=id) () = merge op acc (f1 tl1) (f2 tl2) in
match hd1, hd2 with
| `Interval(e1, e2), `Interval (e3, e4) ->
begin
match cmp_norm e3 e1, cmp_norm e1 e4, cmp_norm e2 e4 with
| `BA, _, _ -> combine op acc hd2 tl2 hd1 tl1
| `EQ, _, `EQ -> m ~acc:(pacc true true hd1) ()
| `AB, `EQ, _ -> m ~acc:(pacc false true hd2) ~f1:(cons hd1) ()
| `AB, `BA, _ -> m ~acc:(pacc false true hd2) ~f1:(cons hd1) ()
| `AB, `AB, `BA -> m ~f1:(split e1 e4 e2) ~f2:(split e3 e1 e4) ()
| `AB, `AB, `EQ -> m ~f1:(cons hd1) ~f2:(split e3 e1 e4) ()
| `AB, `AB, `AB -> m ~f1:(cons hd1) ~f2:(split e3 e1 e4) ()
| `EQ, _, `BA -> m ~f1:(split e1 e4 e2) ~f2:(cons hd2) ()
| `EQ, _, `AB -> m ~f1:(cons hd1) ~f2:(split e3 e2 e4) ()
end
| `Interval(e1, e2), `Point v3 ->
begin
let e3 = `Version v3 in
match cmp_norm e3 e1, cmp_norm e2 e3 with
| `BA, `BA -> m ~f1:(split e1 e3 e2) ~f2:(cons hd2) ()
| (`EQ | `AB), _ -> m ~acc:(pacc false true hd2) ~f1:(cons hd1) ()
| _, (`EQ | `AB) -> m ~acc:(pacc true false hd1) ~f2:(cons hd2) ()
end
| `Point v1, `Point v2 ->
begin
match cmp_norm (`Version v1) (`Version v2) with
| `EQ -> m ~acc:(pacc true true hd1) ()
| `AB -> m ~acc:(pacc true false hd1) ~f2:(cons hd2) ()
| `BA -> m ~acc:(pacc false true hd2) ~f1:(cons hd1) ()
end
| `Point _, `Interval _ -> combine op acc hd2 tl2 hd1 tl1
(* Reduce a list of segment when we can find some patterns. *)
and reduce acc i =
match i with
| `Interval(e1, `Version v2) :: `Point v3
:: `Interval(`Version v4, e5) :: tl when v2 = v3 && v3 = v4 ->
reduce [] (List.rev_append acc (`Interval(e1, e5) :: tl))
| hd :: tl -> reduce (hd :: acc) tl
| [] -> List.rev acc
and merge op acc i1 i2 =
match i1, i2 with
| hd1 :: tl1, hd2 :: tl2 -> combine op acc hd1 tl1 hd2 tl2
| hd :: tl, [] -> merge op (pushif op acc true false hd) tl []
| [], hd :: tl -> merge op (pushif op acc false true hd) [] tl
| [], [] -> reduce [] (List.rev acc)
in
let rec of_comparator =
function
| VGreater v -> [`Interval(`Version v, `AfterLast)]
| VLesser v -> [`Interval(`BeforeFirst, `Version v)]
| VEqual v -> [`Point v]
| VGreaterEqual v -> [`Point v; `Interval(`Version v, `AfterLast)]
| VLesserEqual v -> [`Interval(`BeforeFirst, `Version v); `Point v]
| VOr (c1, c2) -> merge ( || ) [] (of_comparator c1) (of_comparator c2)
| VAnd (c1, c2) -> merge ( && ) [] (of_comparator c1) (of_comparator c2)
in
let to_comparator i =
let cmp_true = VOr(VLesserEqual "0", VGreaterEqual "0") in
let rec close_interval acc i c e =
match e, i with
| `Version v1, `Point v2 :: tl when v1 = v2 ->
combine_intervals acc tl c (VLesserEqual v1)
| `Version v, _ -> combine_intervals acc i c (VLesser v)
| `AfterLast, _ -> combine_intervals acc i c cmp_true
| `BeforeFirst, _ -> assert false
and combine_intervals acc i c1 c2 =
let vor c1 c2 = if c1 = cmp_true then c2 else VOr(c1, c2) in
match c1 = cmp_true, c2 = cmp_true with
| true, true -> map_intervals cmp_true i
| true, false -> map_intervals (vor acc c2) i
| false, true -> map_intervals (vor acc c1) i
| false, false -> map_intervals (vor acc (VAnd(c1, c2))) i
and map_intervals acc i =
match i with
| `Point v1 :: `Interval(`Version v2, e3) :: tl when v1 = v2 ->
close_interval acc tl (VGreaterEqual v2) e3
| `Interval(`BeforeFirst, e) :: tl -> close_interval acc tl cmp_true e
| `Interval(`Version v, e) :: tl -> close_interval acc tl (VGreater v) e
| `Interval(`AfterLast, _) :: _ -> assert false
| `Point v :: tl -> combine_intervals acc tl (VEqual v) cmp_true
| [] -> assert (acc <> cmp_true); acc
in
map_intervals cmp_true i
in
fun v -> to_comparator (of_comparator v)
open OASISValues
let value =
{
parse = (fun ~ctxt:_ s -> version_of_string s);
update = update_fail;
print = string_of_version;
}
let comparator_value =
{
parse = (fun ~ctxt:_ s -> comparator_of_string s);
update = update_fail;
print = string_of_comparator;
}
let odn_of_t = OASISDataNotation.of_string
let rec odn_of_comparator =
let open OASISDataNotation in
function
| VGreater v0 -> VRT ("OASISVersion.VGreater", [ odn_of_t v0 ])
| VGreaterEqual v0 ->
VRT ("OASISVersion.VGreaterEqual", [ odn_of_t v0 ])
| VEqual v0 -> VRT ("OASISVersion.VEqual", [ odn_of_t v0 ])
| VLesser v0 -> VRT ("OASISVersion.VLesser", [ odn_of_t v0 ])
| VLesserEqual v0 -> VRT ("OASISVersion.VLesserEqual", [ odn_of_t v0 ])
| VOr ((v1, v0)) ->
VRT ("OASISVersion.VOr",
[ odn_of_comparator v1; odn_of_comparator v0 ])
| VAnd ((v1, v0)) ->
VRT ("OASISVersion.VAnd",
[ odn_of_comparator v1; odn_of_comparator v0 ])
| null | https://raw.githubusercontent.com/ocaml/oasis/3d1a9421db92a0882ebc58c5df219b18c1e5681d/src/oasis/OASISVersion.ml | ocaml | ****************************************************************************
This library is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
your option) any later version, with the OCaml static compilation
exception.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the file COPYING for more
details.
****************************************************************************
Range of allowed characters
Compare ascii string, using special meaning for version
* related char
* Compare ascii part
* Compare digit part
END EXPORT
The comparator_reduce function transforms the given comparator into its
* disjunctive normal form considering, with all version in ascending order. It
* uses intervals of version to combine the terms of the comparator.
Compare endpoints
Reduce a list of segment when we can find some patterns. | OASIS : architecture for building OCaml libraries and applications
Copyright ( C ) 2011 - 2016 ,
Copyright ( C ) 2008 - 2011 , OCamlCore SARL
the Free Software Foundation ; either version 2.1 of the License , or ( at
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
open OASISGettext
type t = string
type comparator =
| VGreater of t
| VGreaterEqual of t
| VEqual of t
| VLesser of t
| VLesserEqual of t
| VOr of comparator * comparator
| VAnd of comparator * comparator
let is_digit c = '0' <= c && c <= '9'
let is_alpha c = ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z')
let is_special = function | '.' | '+' | '-' | '~' -> true | _ -> false
let rec version_compare v1 v2 =
if v1 <> "" || v2 <> "" then
begin
let val_ascii c =
if c = '~' then -1
else if is_digit c then 0
else if c = '\000' then 0
else if is_alpha c then Char.code c
else (Char.code c) + 256
in
let len1 = String.length v1 in
let len2 = String.length v2 in
let p = ref 0 in
let compare_vascii () =
let cmp = ref 0 in
while !cmp = 0 &&
!p < len1 && !p < len2 &&
not (is_digit v1.[!p] && is_digit v2.[!p]) do
cmp := (val_ascii v1.[!p]) - (val_ascii v2.[!p]);
incr p
done;
if !cmp = 0 && !p < len1 && !p = len2 then
val_ascii v1.[!p]
else if !cmp = 0 && !p = len1 && !p < len2 then
- (val_ascii v2.[!p])
else
!cmp
in
let compare_digit () =
let extract_int v p =
let start_p = !p in
while !p < String.length v && is_digit v.[!p] do
incr p
done;
let substr =
String.sub v !p ((String.length v) - !p)
in
let res =
match String.sub v start_p (!p - start_p) with
| "" -> 0
| s -> int_of_string s
in
res, substr
in
let i1, tl1 = extract_int v1 (ref !p) in
let i2, tl2 = extract_int v2 (ref !p) in
i1 - i2, tl1, tl2
in
match compare_vascii () with
| 0 ->
begin
match compare_digit () with
| 0, tl1, tl2 ->
if tl1 <> "" && is_digit tl1.[0] then
1
else if tl2 <> "" && is_digit tl2.[0] then
-1
else
version_compare tl1 tl2
| n, _, _ ->
n
end
| n ->
n
end
else begin
0
end
let version_of_string str = str
let string_of_version t = t
let chop t =
try
let pos =
String.rindex t '.'
in
String.sub t 0 pos
with Not_found ->
t
let rec comparator_apply v op =
match op with
| VGreater cv ->
(version_compare v cv) > 0
| VGreaterEqual cv ->
(version_compare v cv) >= 0
| VLesser cv ->
(version_compare v cv) < 0
| VLesserEqual cv ->
(version_compare v cv) <= 0
| VEqual cv ->
(version_compare v cv) = 0
| VOr (op1, op2) ->
(comparator_apply v op1) || (comparator_apply v op2)
| VAnd (op1, op2) ->
(comparator_apply v op1) && (comparator_apply v op2)
let rec string_of_comparator =
function
| VGreater v -> "> "^(string_of_version v)
| VEqual v -> "= "^(string_of_version v)
| VLesser v -> "< "^(string_of_version v)
| VGreaterEqual v -> ">= "^(string_of_version v)
| VLesserEqual v -> "<= "^(string_of_version v)
| VOr (c1, c2) ->
(string_of_comparator c1)^" || "^(string_of_comparator c2)
| VAnd (c1, c2) ->
(string_of_comparator c1)^" && "^(string_of_comparator c2)
let rec varname_of_comparator =
let concat p v =
OASISUtils.varname_concat
p
(OASISUtils.varname_of_string
(string_of_version v))
in
function
| VGreater v -> concat "gt" v
| VLesser v -> concat "lt" v
| VEqual v -> concat "eq" v
| VGreaterEqual v -> concat "ge" v
| VLesserEqual v -> concat "le" v
| VOr (c1, c2) ->
(varname_of_comparator c1)^"_or_"^(varname_of_comparator c2)
| VAnd (c1, c2) ->
(varname_of_comparator c1)^"_and_"^(varname_of_comparator c2)
open OASISUtils
open OASISVersion_types
module StringVersion =
struct
type t = string
let to_version = version_of_string
let compare s1 s2 = version_compare (to_version s1) (to_version s2)
let comparator_ge s c_opt =
let rec comparator_ge' v' =
let cmp v = version_compare v v' >= 0 in
function
| VEqual v
| VGreaterEqual v
| VGreater v -> cmp v
| VLesserEqual _
| VLesser _ -> false
| VOr (c1, c2) -> comparator_ge' v' c1 || comparator_ge' v' c2
| VAnd (c1, c2) -> comparator_ge' v' c1 && comparator_ge' v' c2
in
match c_opt with
| Some c -> comparator_ge' (to_version s) c
| None -> false
end
let comparator_of_string str =
let lexbuf =
Lexing.from_string str
in
let rec parse_aux =
function
| VCAnd (c1, c2) -> VAnd (parse_aux c1, parse_aux c2)
| VCOr (c1, c2) -> VOr (parse_aux c1, parse_aux c2)
| VCGt s -> VGreater (version_of_string s)
| VCGe s -> VGreaterEqual (version_of_string s)
| VCEq s -> VEqual (version_of_string s)
| VCLt s -> VLesser (version_of_string s)
| VCLe s -> VLesserEqual (version_of_string s)
in
try
parse_aux
(OASISVersion_parser.main
OASISVersion_lexer.token lexbuf)
with e ->
failwithf
(f_ "Error while parsing '%s': %s")
str
(Printexc.to_string e)
let comparator_reduce =
let cmp_norm e1 e2 =
match e1, e2 with
| `BeforeFirst, `BeforeFirst | `AfterLast, `AfterLast -> `EQ
| `BeforeFirst, _ | _, `AfterLast -> `AB
| _, `BeforeFirst | `AfterLast, _ -> `BA
| `Version v1, `Version v2 ->
let d = version_compare v1 v2 in
if d = 0 then `EQ else if d < 0 then `AB else `BA
in
let split e1 e2 e3 tl =
match e2 with
| `Version v2 -> `Interval(e1, e2) :: `Point v2 :: `Interval(e2, e3) :: tl
| _ -> assert false
in
let pushif op acc b1 b2 e = if op b1 b2 then e :: acc else acc in
Combine heads of intervals and continue processing .
let rec combine op acc hd1 tl1 hd2 tl2 =
let id, cons, pacc = (fun i -> i), (fun i j -> i :: j), pushif op acc in
let m ?(acc=acc) ?(f1=id) ?(f2=id) () = merge op acc (f1 tl1) (f2 tl2) in
match hd1, hd2 with
| `Interval(e1, e2), `Interval (e3, e4) ->
begin
match cmp_norm e3 e1, cmp_norm e1 e4, cmp_norm e2 e4 with
| `BA, _, _ -> combine op acc hd2 tl2 hd1 tl1
| `EQ, _, `EQ -> m ~acc:(pacc true true hd1) ()
| `AB, `EQ, _ -> m ~acc:(pacc false true hd2) ~f1:(cons hd1) ()
| `AB, `BA, _ -> m ~acc:(pacc false true hd2) ~f1:(cons hd1) ()
| `AB, `AB, `BA -> m ~f1:(split e1 e4 e2) ~f2:(split e3 e1 e4) ()
| `AB, `AB, `EQ -> m ~f1:(cons hd1) ~f2:(split e3 e1 e4) ()
| `AB, `AB, `AB -> m ~f1:(cons hd1) ~f2:(split e3 e1 e4) ()
| `EQ, _, `BA -> m ~f1:(split e1 e4 e2) ~f2:(cons hd2) ()
| `EQ, _, `AB -> m ~f1:(cons hd1) ~f2:(split e3 e2 e4) ()
end
| `Interval(e1, e2), `Point v3 ->
begin
let e3 = `Version v3 in
match cmp_norm e3 e1, cmp_norm e2 e3 with
| `BA, `BA -> m ~f1:(split e1 e3 e2) ~f2:(cons hd2) ()
| (`EQ | `AB), _ -> m ~acc:(pacc false true hd2) ~f1:(cons hd1) ()
| _, (`EQ | `AB) -> m ~acc:(pacc true false hd1) ~f2:(cons hd2) ()
end
| `Point v1, `Point v2 ->
begin
match cmp_norm (`Version v1) (`Version v2) with
| `EQ -> m ~acc:(pacc true true hd1) ()
| `AB -> m ~acc:(pacc true false hd1) ~f2:(cons hd2) ()
| `BA -> m ~acc:(pacc false true hd2) ~f1:(cons hd1) ()
end
| `Point _, `Interval _ -> combine op acc hd2 tl2 hd1 tl1
and reduce acc i =
match i with
| `Interval(e1, `Version v2) :: `Point v3
:: `Interval(`Version v4, e5) :: tl when v2 = v3 && v3 = v4 ->
reduce [] (List.rev_append acc (`Interval(e1, e5) :: tl))
| hd :: tl -> reduce (hd :: acc) tl
| [] -> List.rev acc
and merge op acc i1 i2 =
match i1, i2 with
| hd1 :: tl1, hd2 :: tl2 -> combine op acc hd1 tl1 hd2 tl2
| hd :: tl, [] -> merge op (pushif op acc true false hd) tl []
| [], hd :: tl -> merge op (pushif op acc false true hd) [] tl
| [], [] -> reduce [] (List.rev acc)
in
let rec of_comparator =
function
| VGreater v -> [`Interval(`Version v, `AfterLast)]
| VLesser v -> [`Interval(`BeforeFirst, `Version v)]
| VEqual v -> [`Point v]
| VGreaterEqual v -> [`Point v; `Interval(`Version v, `AfterLast)]
| VLesserEqual v -> [`Interval(`BeforeFirst, `Version v); `Point v]
| VOr (c1, c2) -> merge ( || ) [] (of_comparator c1) (of_comparator c2)
| VAnd (c1, c2) -> merge ( && ) [] (of_comparator c1) (of_comparator c2)
in
let to_comparator i =
let cmp_true = VOr(VLesserEqual "0", VGreaterEqual "0") in
let rec close_interval acc i c e =
match e, i with
| `Version v1, `Point v2 :: tl when v1 = v2 ->
combine_intervals acc tl c (VLesserEqual v1)
| `Version v, _ -> combine_intervals acc i c (VLesser v)
| `AfterLast, _ -> combine_intervals acc i c cmp_true
| `BeforeFirst, _ -> assert false
and combine_intervals acc i c1 c2 =
let vor c1 c2 = if c1 = cmp_true then c2 else VOr(c1, c2) in
match c1 = cmp_true, c2 = cmp_true with
| true, true -> map_intervals cmp_true i
| true, false -> map_intervals (vor acc c2) i
| false, true -> map_intervals (vor acc c1) i
| false, false -> map_intervals (vor acc (VAnd(c1, c2))) i
and map_intervals acc i =
match i with
| `Point v1 :: `Interval(`Version v2, e3) :: tl when v1 = v2 ->
close_interval acc tl (VGreaterEqual v2) e3
| `Interval(`BeforeFirst, e) :: tl -> close_interval acc tl cmp_true e
| `Interval(`Version v, e) :: tl -> close_interval acc tl (VGreater v) e
| `Interval(`AfterLast, _) :: _ -> assert false
| `Point v :: tl -> combine_intervals acc tl (VEqual v) cmp_true
| [] -> assert (acc <> cmp_true); acc
in
map_intervals cmp_true i
in
fun v -> to_comparator (of_comparator v)
open OASISValues
let value =
{
parse = (fun ~ctxt:_ s -> version_of_string s);
update = update_fail;
print = string_of_version;
}
let comparator_value =
{
parse = (fun ~ctxt:_ s -> comparator_of_string s);
update = update_fail;
print = string_of_comparator;
}
let odn_of_t = OASISDataNotation.of_string
let rec odn_of_comparator =
let open OASISDataNotation in
function
| VGreater v0 -> VRT ("OASISVersion.VGreater", [ odn_of_t v0 ])
| VGreaterEqual v0 ->
VRT ("OASISVersion.VGreaterEqual", [ odn_of_t v0 ])
| VEqual v0 -> VRT ("OASISVersion.VEqual", [ odn_of_t v0 ])
| VLesser v0 -> VRT ("OASISVersion.VLesser", [ odn_of_t v0 ])
| VLesserEqual v0 -> VRT ("OASISVersion.VLesserEqual", [ odn_of_t v0 ])
| VOr ((v1, v0)) ->
VRT ("OASISVersion.VOr",
[ odn_of_comparator v1; odn_of_comparator v0 ])
| VAnd ((v1, v0)) ->
VRT ("OASISVersion.VAnd",
[ odn_of_comparator v1; odn_of_comparator v0 ])
|
f9707c72056a4ee3feea897c1258eb1490077ac89fd5509f82369c3d22f3bc63 | p2k/ecoinpool | ebitcoin_chain_data.erl |
%%
Copyright ( C ) 2011 Patrick " p2k " < >
%%
This file is part of ebitcoin .
%%
%% ebitcoin is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
%% (at your option) any later version.
%%
%% ebitcoin is distributed in the hope that it will be useful,
%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
%% GNU General Public License for more details.
%%
You should have received a copy of the GNU General Public License
along with ebitcoin . If not , see < / > .
%%
-module(ebitcoin_chain_data).
-include("btc_protocol_records.hrl").
-export([
type_and_port/1,
network_magic/1,
genesis_block/1
]).
type_and_port(<<"btc">>) -> {bitcoin, 8333};
type_and_port(<<"btc_testnet">>) -> {bitcoin_testnet, 18333};
type_and_port(<<"nmc">>) -> {namecoin, 8334};
type_and_port(<<"nmc_testnet">>) -> {namecoin_testnet, 18334};
type_and_port(<<"ltc">>) -> {litecoin, 9333};
type_and_port(<<"ltc_testnet">>) -> {litecoin_testnet, 19333};
type_and_port(<<"fbx">>) -> {fairbrix, 8591};
type_and_port(_) -> undefined.
network_magic(bitcoin) ->
<<249,190,180,217>>;
network_magic(bitcoin_testnet) ->
<<250,191,181,218>>;
network_magic(namecoin) ->
<<249,190,180,254>>;
network_magic(namecoin_testnet) ->
<<250,191,181,254>>;
network_magic(litecoin) ->
<<251,192,182,219>>;
network_magic(litecoin_testnet) ->
<<252,193,183,220>>;
network_magic(fairbrix) ->
<<249,219,249,219>>.
genesis_block(bitcoin) ->
ZeroHash = binary:list_to_bin(lists:duplicate(32,0)),
Header = #btc_header{
version = 1,
hash_prev_block = ZeroHash,
hash_merkle_root = base64:decode(<<"Sl4eS6q4nzoyUYqIwxvIf2GPdmc+LMd6shJ7ev3tozs=">>),
timestamp = 16#495fab29,
bits = 16#1d00ffff,
nonce = 16#7c2bac1d
},
Tx = #btc_tx{
version = 1,
tx_in = [#btc_tx_in{
prev_output_hash = ZeroHash,
prev_output_index = 16#ffffffff,
signature_script = [16#1d00ffff, <<4>>, <<"The Times 03/Jan/2009 Chancellor on brink of second bailout for banks">>],
sequence = 16#ffffffff
}],
tx_out = [#btc_tx_out{
value = 5000000000,
pk_script = base64:decode(<<"QQRniv2w/lVIJxln8aZxMLcQXNaoKOA5CaZ5YuDqH2Hetkn2vD9M7zjE81UE5R7BEt5cOE33uguNV4pMcCtr8R1frA==">>)
}],
lock_time = 0
},
BlockHash = <<"000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f">>,
{BlockHash, Header, Tx};
genesis_block(namecoin) ->
ZeroHash = binary:list_to_bin(lists:duplicate(32,0)),
Header = #btc_header{
version = 1,
hash_prev_block = ZeroHash,
hash_merkle_root = base64:decode(<<"QcYtvZBoyJpElSXjzVrGGyDs4ow8OLPzWyFh8ObTyw0=">>),
timestamp = 16#4daa33c1,
bits = 16#1c007fff,
nonce = 16#a21ea192
},
Tx = #btc_tx{
version = 1,
tx_in = [#btc_tx_in{
prev_output_hash = ZeroHash,
prev_output_index = 16#ffffffff,
signature_script = [16#1c007fff, 522, <<"... choose what comes next. Lives of your own, or a return to chains. -- V">>],
sequence = 16#ffffffff
}],
tx_out = [#btc_tx_out{
value = 5000000000,
pk_script = base64:decode(<<"QQS2IDaQUM2Jn/u8To7lHoxFNKhVu0Y0OdY9I11HeWhdi29IcKI4zzZayU+hPvmioizZnQ1e6G3K\nvK/ONses9DzlrA==">>)
}],
lock_time = 0
},
BlockHash = <<"000000000062b72c5e2ceb45fbc8587e807c155b0da735e6483dfba2f0a9c770">>,
{BlockHash, Header, Tx};
genesis_block(litecoin) ->
ZeroHash = binary:list_to_bin(lists:duplicate(32,0)),
Header = #btc_header{
version = 1,
hash_prev_block = ZeroHash,
hash_merkle_root = base64:decode(<<"l937uua+l/1s3z58oTIyo6//I1Pim636t/cwEe3Uztk=">>),
timestamp = 16#4e8eaab9,
bits = 16#1d00ffff,
nonce = 16#7c3f51cd
},
Tx = #btc_tx{
version = 1,
tx_in = [#btc_tx_in{
prev_output_hash = ZeroHash,
prev_output_index = 16#ffffffff,
signature_script = [16#1d00ffff, <<4>>, <<"NY Times 05/Oct/2011 Steve Jobs, Apple", 226, 128, 153, "s Visionary, Dies at 56">>],
sequence = 16#ffffffff
}],
tx_out = [#btc_tx_out{
value = 5000000000,
pk_script = base64:decode(<<"QQQBhHEPpomtUCNpDIDzpJyPE/jUW4yFf7y8i8So5NPrSxD01GBPoI3OYBqvD0cCFv4bUYULSs8hsXnEUHCsewOprA==">>)
}],
lock_time = 0
},
BlockHash = <<"12a765e31ffd4059bada1e25190f6e98c99d9714d334efa41a195a7e7e04bfe2">>,
{BlockHash, Header, Tx};
genesis_block(fairbrix) ->
ZeroHash = binary:list_to_bin(lists:duplicate(32,0)),
Header = #btc_header{
version = 1,
hash_prev_block = ZeroHash,
hash_merkle_root = base64:decode(<<"QKYnJi7XFvDz1RBDFf4LYAv44yoCGSkpkWP3QVH6UrE=">>),
timestamp = 16#4e87e916,
bits = 16#1e0ffff0,
nonce = 16#16fbf1ed
},
Tx = #btc_tx{
version = 1,
tx_in = [#btc_tx_in{
prev_output_hash = ZeroHash,
prev_output_index = 16#ffffffff,
signature_script = [16#1d00ffff, <<4>>, <<"\"nytimes.com 10/1/2011 - Police Arrest Over 700 Protesters on Brooklyn Bridge\"">>],
sequence = 16#ffffffff
}],
tx_out = [#btc_tx_out{
value = 5000000000,
pk_script = base64:decode(<<"QQRniv2w/lVIJxln8aZxMLcQXNaoKOA5CaZ5YuDqH2Hetkn2vD9M7zjE81UE5R7BEt5cOE33uguNV4pMcCtr8R1frA==">>)
}],
lock_time = 0
},
BlockHash = <<"002a91713910bc96eb0edf237fcd2799d7a01186e1e96023e860bc70b3916200">>,
{BlockHash, Header, Tx}.
| null | https://raw.githubusercontent.com/p2k/ecoinpool/01ba76a7ab4b17b60cb0c525786fddef43ea80e1/apps/ebitcoin/src/ebitcoin_chain_data.erl | erlang |
ebitcoin is free software: you can redistribute it and/or modify
(at your option) any later version.
ebitcoin is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
|
Copyright ( C ) 2011 Patrick " p2k " < >
This file is part of ebitcoin .
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
along with ebitcoin . If not , see < / > .
-module(ebitcoin_chain_data).
-include("btc_protocol_records.hrl").
-export([
type_and_port/1,
network_magic/1,
genesis_block/1
]).
type_and_port(<<"btc">>) -> {bitcoin, 8333};
type_and_port(<<"btc_testnet">>) -> {bitcoin_testnet, 18333};
type_and_port(<<"nmc">>) -> {namecoin, 8334};
type_and_port(<<"nmc_testnet">>) -> {namecoin_testnet, 18334};
type_and_port(<<"ltc">>) -> {litecoin, 9333};
type_and_port(<<"ltc_testnet">>) -> {litecoin_testnet, 19333};
type_and_port(<<"fbx">>) -> {fairbrix, 8591};
type_and_port(_) -> undefined.
network_magic(bitcoin) ->
<<249,190,180,217>>;
network_magic(bitcoin_testnet) ->
<<250,191,181,218>>;
network_magic(namecoin) ->
<<249,190,180,254>>;
network_magic(namecoin_testnet) ->
<<250,191,181,254>>;
network_magic(litecoin) ->
<<251,192,182,219>>;
network_magic(litecoin_testnet) ->
<<252,193,183,220>>;
network_magic(fairbrix) ->
<<249,219,249,219>>.
genesis_block(bitcoin) ->
ZeroHash = binary:list_to_bin(lists:duplicate(32,0)),
Header = #btc_header{
version = 1,
hash_prev_block = ZeroHash,
hash_merkle_root = base64:decode(<<"Sl4eS6q4nzoyUYqIwxvIf2GPdmc+LMd6shJ7ev3tozs=">>),
timestamp = 16#495fab29,
bits = 16#1d00ffff,
nonce = 16#7c2bac1d
},
Tx = #btc_tx{
version = 1,
tx_in = [#btc_tx_in{
prev_output_hash = ZeroHash,
prev_output_index = 16#ffffffff,
signature_script = [16#1d00ffff, <<4>>, <<"The Times 03/Jan/2009 Chancellor on brink of second bailout for banks">>],
sequence = 16#ffffffff
}],
tx_out = [#btc_tx_out{
value = 5000000000,
pk_script = base64:decode(<<"QQRniv2w/lVIJxln8aZxMLcQXNaoKOA5CaZ5YuDqH2Hetkn2vD9M7zjE81UE5R7BEt5cOE33uguNV4pMcCtr8R1frA==">>)
}],
lock_time = 0
},
BlockHash = <<"000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f">>,
{BlockHash, Header, Tx};
genesis_block(namecoin) ->
ZeroHash = binary:list_to_bin(lists:duplicate(32,0)),
Header = #btc_header{
version = 1,
hash_prev_block = ZeroHash,
hash_merkle_root = base64:decode(<<"QcYtvZBoyJpElSXjzVrGGyDs4ow8OLPzWyFh8ObTyw0=">>),
timestamp = 16#4daa33c1,
bits = 16#1c007fff,
nonce = 16#a21ea192
},
Tx = #btc_tx{
version = 1,
tx_in = [#btc_tx_in{
prev_output_hash = ZeroHash,
prev_output_index = 16#ffffffff,
signature_script = [16#1c007fff, 522, <<"... choose what comes next. Lives of your own, or a return to chains. -- V">>],
sequence = 16#ffffffff
}],
tx_out = [#btc_tx_out{
value = 5000000000,
pk_script = base64:decode(<<"QQS2IDaQUM2Jn/u8To7lHoxFNKhVu0Y0OdY9I11HeWhdi29IcKI4zzZayU+hPvmioizZnQ1e6G3K\nvK/ONses9DzlrA==">>)
}],
lock_time = 0
},
BlockHash = <<"000000000062b72c5e2ceb45fbc8587e807c155b0da735e6483dfba2f0a9c770">>,
{BlockHash, Header, Tx};
genesis_block(litecoin) ->
ZeroHash = binary:list_to_bin(lists:duplicate(32,0)),
Header = #btc_header{
version = 1,
hash_prev_block = ZeroHash,
hash_merkle_root = base64:decode(<<"l937uua+l/1s3z58oTIyo6//I1Pim636t/cwEe3Uztk=">>),
timestamp = 16#4e8eaab9,
bits = 16#1d00ffff,
nonce = 16#7c3f51cd
},
Tx = #btc_tx{
version = 1,
tx_in = [#btc_tx_in{
prev_output_hash = ZeroHash,
prev_output_index = 16#ffffffff,
signature_script = [16#1d00ffff, <<4>>, <<"NY Times 05/Oct/2011 Steve Jobs, Apple", 226, 128, 153, "s Visionary, Dies at 56">>],
sequence = 16#ffffffff
}],
tx_out = [#btc_tx_out{
value = 5000000000,
pk_script = base64:decode(<<"QQQBhHEPpomtUCNpDIDzpJyPE/jUW4yFf7y8i8So5NPrSxD01GBPoI3OYBqvD0cCFv4bUYULSs8hsXnEUHCsewOprA==">>)
}],
lock_time = 0
},
BlockHash = <<"12a765e31ffd4059bada1e25190f6e98c99d9714d334efa41a195a7e7e04bfe2">>,
{BlockHash, Header, Tx};
genesis_block(fairbrix) ->
ZeroHash = binary:list_to_bin(lists:duplicate(32,0)),
Header = #btc_header{
version = 1,
hash_prev_block = ZeroHash,
hash_merkle_root = base64:decode(<<"QKYnJi7XFvDz1RBDFf4LYAv44yoCGSkpkWP3QVH6UrE=">>),
timestamp = 16#4e87e916,
bits = 16#1e0ffff0,
nonce = 16#16fbf1ed
},
Tx = #btc_tx{
version = 1,
tx_in = [#btc_tx_in{
prev_output_hash = ZeroHash,
prev_output_index = 16#ffffffff,
signature_script = [16#1d00ffff, <<4>>, <<"\"nytimes.com 10/1/2011 - Police Arrest Over 700 Protesters on Brooklyn Bridge\"">>],
sequence = 16#ffffffff
}],
tx_out = [#btc_tx_out{
value = 5000000000,
pk_script = base64:decode(<<"QQRniv2w/lVIJxln8aZxMLcQXNaoKOA5CaZ5YuDqH2Hetkn2vD9M7zjE81UE5R7BEt5cOE33uguNV4pMcCtr8R1frA==">>)
}],
lock_time = 0
},
BlockHash = <<"002a91713910bc96eb0edf237fcd2799d7a01186e1e96023e860bc70b3916200">>,
{BlockHash, Header, Tx}.
|
0fd91e2c220d1d201132ab1abd38eb85440a4b693ec2348a680dfb01ff5bea81 | subttle/regular | DFA.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ExistentialQuantification #
{-# OPTIONS_GHC -Wall #-}
module DFA where
import Algebra.Graph.Relation as Relation (stars)
import Data.Bool (bool)
import Data.Bool.Unicode ((∧), (∨))
import Data.Functor.Contravariant (Contravariant (..), Equivalence (..), Predicate (..))
import Data.Function (on)
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import qualified Data.List as List
import qualified Data.Map as Map
import Data.Set as Set (Set, delete, elemAt, filter, map, powerSet, singleton)
import Data.Set.Unicode ((∅), (∈), (∉), (∖), (∪))
import Numeric.Algebra.Class (sumWith)
import Prelude hiding (map)
import Common (Set' (..), (×), (‥), equation, format, implies, intersects, palindrome, quoteWith, size', upToLength)
import Config (Configuration (..))
import Finite (Finite (..), Q (..), Σ (..), Init (..), Final (..), predicateToSet, representative)
import qualified NFA
import qualified EFA
import qualified GNFA
import qualified FA
import qualified DA
import qualified RegExp as RE
import Language (ℒ)
import qualified TransitionGraph as TG
-- Deterministic Finite Automaton
data DFA q s = -- q is the set of states, Q
-- s is the set of symbols Σ
DFA { delta ∷ (q, s) → q -- The (total) transition function, δ : Q × Σ → Q
, q0 ∷ q -- The initial state, q₀ ∈ Q
, fs ∷ Set q -- The final states, F ⊆ Q
}
A DFA constructor where the ` q ` type parameter is an existential
data SomeDFA s where
SomeDFA ∷ (Show q, Finite q) ⇒ DFA q s → SomeDFA s
instance (Finite q) ⇒ Q (DFA q s) q
instance (Finite s) ⇒ Σ (DFA q s) s
instance (Finite s) ⇒ Σ (SomeDFA s) s
instance Contravariant (DFA q) where
contramap ∷ (s → g) → DFA q g → DFA q s
contramap h ( DFA δ q₀ f ) = DFA ( , σ ) → δ ( q , h σ ) ) q₀ f
contramap h (DFA δ q₀ f) = DFA (\(q, σ) → (curry δ) q (h σ)) q₀ f
invhomimage ∷ (s → [g]) → DFA q g → DFA q s
invhomimage h (DFA δ q₀ f) = DFA (\(q, σ) → foldl (curry δ) q (h σ)) q₀ f
instance Contravariant SomeDFA where
contramap ∷ (g → s) → SomeDFA s → SomeDFA g
contramap h (SomeDFA m) = SomeDFA (contramap h m)
instance (Show q, Finite q, Show s, Finite s) ⇒ Show (DFA q s) where
show ∷ DFA q s → String
show m = quoteWith "( " " )" $ List.intercalate "\n, "
[ equation "Q " ((show . Set' . qs ) m)
, equation "Σ " ((show . Set' . sigma) m)
, quoteWith "δ : Q × Σ → Q" ((format . deltaToMap) m) "\n"
, equation "q₀" ((show . q0 ) m)
, equation "F " ((show . Set' . fs ) m)
]
instance (Show s, Finite s) ⇒ Show (SomeDFA s) where
show ∷ SomeDFA s → String
show (SomeDFA m) = show m
instance (Finite q, Finite s) ⇒ Configuration DFA q s q where
By construction of a DFA type this will be ` True `
deterministic ∷ DFA q s → Bool
deterministic = const True
codeterministic ∷ DFA q s → Bool
codeterministic = deterministic . FA.reversal . toFA
By construction of a DFA type this will be ` True `
complete ∷ DFA q s → Bool
complete = const True
occupied ∷ DFA q s → q → Set q
occupied = const singleton
deltaD ∷ DFA q s → ((q, s) → q)
deltaD = delta
initial ∷ DFA q s → q
initial = q0
final ∷ DFA q s → Set q
final = fs
Given a DFA , m , and a configuration , return what it yields in one step
(⊢) ∷ DFA q s → (q, [s]) → (q, [s])
(⊢) _ (q, []) = ( q , [])
(⊢) (DFA δ _ _) (q, σ : w ) = (δ (q, σ), w )
Determine which states are accessible in the given DFA , i.e.
-- { q ∈ Q | ∃w ∈ Σ★, δ★(q₀, w) = q }
accessible ∷ DFA q s → Set q
accessible m@(DFA _ q₀ _) = reachable m q₀
-- δ★ : Q × Σ★ → Q
-- "Extended delta" - The delta function extended from single symbols to strings (lists of symbols).
Take a DFA and a starting state , q , for that DFA , then compute the state p such that δ ★ (q , w ) = p
delta' ∷ DFA q s → (q, [s]) → q
delta' (DFA δ _ _) = uncurry (foldl (curry δ))
-- δ′′ : P(Q) × Σ★ → P(Q)
delta'' ∷ DFA q s → (Set q, [s]) → Set q
delta'' (DFA δ _ _) = uncurry (foldl (\states σ → map (\q → δ (q, σ)) states))
-- Evaluate a string
Take a DFA , m , and a string of symbols , w , and then compute the resulting state , q
-- δ★(q₀, w) = q
eval ∷ DFA q s → [s] → q
eval m@(DFA _ q₀ _) w = delta' m (q₀, w)
-- trace δ★(q, w)
traced ∷ DFA q s → [s] → (q, [(q, s)])
traced (DFA δ q₀ _) = List.mapAccumL (\q σ → (δ (q, σ), (q, σ))) q₀
Take a DFA , m , and a string , w , and decide if that string is accepted / recognized
-- m accepts a string w ∈ Σ★ iff δ★(q₀, w) ∈ F
accepts ∷ DFA q s → [s] → Bool
accepts m@(DFA _ _ f) w = eval m w ∈ f
Take a DFA , m , and a string , w , and decide if that string is not accepted
rejects ∷ DFA q s → [s] → Bool
rejects m@(DFA _ _ f) w = eval m w ∉ f
Convert the DFA to its Transition Graph .
-- N.B. information is lost in this conversion, i.e. q₀ and F will be dropped
toGraph ∷ DFA q s → TG.TG q s
toGraph (DFA δ _ _) = TG.TG (\s → stars (fmap (\q → (q, [δ (q, s)])) asList))
Determine if a string , w , synchronizes ( or " resets " ) a DFA , m
-- /~kisiel/auto/eppstein.pdf
A string , w , " resets " a DFA when ∃w ∈ Σ ★ , ∀q ∈ Q , δ ★ (q , w ) = p for some p ∈ Q
-- evaluate the same word from all states of Q, not just q₀
i.e. | { δ ★ (q , w ) | q ∈ Q } | = 1
synchronizes ∷ (Finite q, Finite s) ⇒ DFA q s → [s] → Bool
synchronizes m w = size ' ( delta '' m ( qs m , w ) ) = = 1
synchronizes m w = (==) 1 (size' (delta'' m (qs m, w)))
Lazily generate all the rejected strings of the given DFA
rejected ∷ (Finite q, Finite s) ⇒ DFA q s → [[s]]
rejected = language . complement
TODO can delete this because it is redundant given ` eval ` already defined ,
TODO however for now I 'm keeping it for reference
evaluate ∷ DFA q s → [s] → q
evaluate (DFA δ q₀ _) w = foldl (curry δ) q₀ w
Trace the path the DFA takes for a word
trace ∷ DFA q s → [s] → NonEmpty q
trace (DFA δ q₀ _) w = NE.scanl (curry δ) q₀ w
derivative
-- ∂σ(ℒ(m)) = { w | σw ∈ ℒ(m) }
derivative ∷ DFA q s → s → DFA q s
derivative (DFA δ q₀ f) σ = DFA δ ( (curry δ) q₀ σ) f
derivative extended to strings
derivative' ∷ (Finite q, Finite s) ⇒ DFA q s → [s] → DFA q s
derivative' (DFA δ q₀ f) w = DFA δ (foldl (curry δ) q₀ w) f
-- The "right language" of m wrt some state q
right ∷ DFA q s → q → DFA q s
right (DFA δ _ f) q = DFA δ q f
-- The "left language" of q
left ∷ DFA q s → q → DFA q s
left (DFA δ q₀ _) = DFA δ q₀ . singleton
transition ∷ (Finite q, Finite s) ⇒ DFA q s → s → (q → q)
transition (DFA δ _ _) σ = \q → δ (q, σ)
TODO name ?
transitionStar ∷ ( Finite q , Finite s ) ⇒ DFA q s → [ s ] → ( q → q )
-- transitionStar m w = \q → delta' m (q, w)
-- The equivalence relation formed on Q by indistinguishable states for m
Two states having the same right language are indistinguishable
-- they both lead to the same words being accepted.
indistinguishability ∷ (Finite q, Finite s) ⇒ DFA q s → Equivalence q
indistinguishability = Equivalence . (DFA.equal `on`) . right
domain ∷ (Finite q, Finite s) ⇒ DFA q s → Set (q, s)
domain m = qs m × sigma m
deltaToMap ∷ (Finite q, Finite s) ⇒ DFA q s → Map.Map (q, s) q
deltaToMap m@(DFA δ _ _) = Map.fromSet δ (domain m)
The transition table of the DFA 's δ function
table ∷ (Finite q, Finite s) ⇒ DFA q s → [((q, s), q)]
table = Map.toAscList . deltaToMap
ℒ(m ) is cofinite in Σ ★ iff the complement of ℒ(m ) ( in Σ ★ ) is finite .
cofinite ∷ (Finite q, Finite s) ⇒ Predicate (DFA q s)
cofinite = Predicate (finite . complement)
Theorem ( Cerny , 1964 ): A DFA M is ( directable ) synchronizing iff ∀q ∈ Q , ∃p ∈ Q , ∃w ∈ Σ ★ : δ(q , w ) = δ(p , w )
-- That is, there exists a word w, such that evaluation of w from from any state, q, always ends up in the same state, p.
" A DFA is synchronizing if there exists a word that sends all states of the automaton to the same state . " -
synchronizing ∷ (Finite q, Finite s) ⇒ Predicate (DFA q s)
synchronizing = Predicate (not . isZero . power)
where
-- FIXME supposed to be a non-empty set
TODO alter this to check for shortest path to get shortest reset word ?
power m@(DFA δ _ _) = DFA (\(states, σ) → map (\q → δ (q, σ)) states) (qs m) (map singleton (qs m))
A palindrome is a word w such that w = wʳ.
-- Let ℒ ⊆ Σ★, ℒ is palindromic if every word w ∈ ℒ is a palindrome.
ℒ(M ) is palindromic if and only if { x ∈ ℒ(M ) : |x| < 3n } is
palindromic , where n is the number of states of M.
TODO this is the ( untested ) naive implementation
palindromic ∷ (Finite q, Finite s) ⇒ Predicate (DFA q s)
palindromic = Predicate (\m → all palindrome (upToLength (3 * size' (qs m)) (language m)))
An automaton M = ( S , I , δ , s₀ , F ) is said to be a permutation
automaton , or more simply a p - automaton , if and only if , a ) = δ(sⱼ , a ) , where sᵢ , sⱼ ∈ S , a ∈ I , implies that sᵢ = sⱼ.
Permutation Automata by
TODO untested
TODO better to place in src / FA.hs ?
permutation ∷ (Finite q, Finite s) ⇒ Predicate (DFA q s)
permutation = Predicate (\m@(DFA δ _ _) → all (\(qᵢ, qⱼ) →
all (\σ → (δ (qᵢ, σ) == δ (qⱼ, σ)) `implies` (qᵢ == qⱼ)) (sigma m)
) (qs m × qs m))
Given two DFAs , decide if they produce the exact same language , i.e.
-- ℒ(m₁) ≟ ℒ(m₂)
equal ∷ (Finite q, Finite p, Finite s) ⇒ DFA q s → DFA p s → Bool
equal m₁ m₂ = contained m₁ m₂ ∧ contained m₂ m₁
Given two DFAs , m₁ and m₂ , decide if ℒ(m₁ ) ⊆ ℒ(m₂ )
contained ∷ (Finite q, Finite p, Finite s) ⇒ DFA q s → DFA p s → Bool
contained m = isZero . intersection m . complement
Given two DFAs , m₁ and m₂ ,
-- ℒ(m₁) ∩ ℒ(m₂) ≟ ∅
disjoint ∷ (Finite q, Finite p, Finite s) ⇒ DFA q s → DFA p s → Bool
disjoint = isZero ‥ intersection
Given two DFAs , m₁ and m₂ ,
-- ℒ(m₁) ∩ ℒ(m₂) ≠ ∅?
intersects ∷ (Finite q, Finite p, Finite s) ⇒ DFA q s → DFA p s → Bool
intersects = not ‥ DFA.disjoint
The difference of two DFAs , m₁ and m₂ , produces a new DFA , , such that
-- ℒ(m₃) = ℒ(m₁) − ℒ(m₂)
difference ∷ (Finite q, Finite p) ⇒ DFA q s → DFA p s → DFA (q, p) s
difference m₁@(DFA _ _ f₁) m₂@(DFA _ _ f₂) = (synchronous m₁ m₂) { fs = Set.filter (\(q, p) → (q ∈ f₁) ∧ (p ∉ f₂)) (qs m₁ × qs m₂) }
The union of two DFAs , m₁ and m₂ , produces a new DFA , , such that
-- ℒ(m₃) = ℒ(m₁) ∪ ℒ(m₂)
F = ( F₁ × Q₁ ) ( Q₂ × F₂ )
union ∷ (Finite q, Finite p) ⇒ DFA q s → DFA p s → DFA (q, p) s
union m₁@(DFA _ _ f₁) m₂@(DFA _ _ f₂) = (synchronous m₁ m₂) { fs = Set.filter (\(q, p) → (q ∈ f₁) ∨ (p ∈ f₂)) (qs m₁ × qs m₂) }
The instersection of two DFAs , m₁ and m₂ , produces a new DFA , , such that
-- ℒ(m₃) = ℒ(m₁) ∩ ℒ(m₂)
intersection ∷ (Ord q, Ord p) ⇒ DFA q s → DFA p s → DFA (q, p) s
intersection = synchronous
-- The product construction
Essentially this runs two DFAs ( which both share the same alphabet ) " in parallel " together in lock step
synchronous ∷ (Ord q, Ord p) ⇒ DFA q s → DFA p s → DFA (q, p) s
synchronous (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = DFA (\((q, p), σ) → (δ₁ (q, σ), δ₂ (p, σ))) (q₀, p₀) (f₁ × f₂)
The asynchronous product of two DFA
Essentially this runs two DFAs with different alphabets " in parallel " independently
asynchronous ∷ ∀ q p s g . (Ord q, Ord p) ⇒ DFA q s → DFA p g → DFA (q, p) (Either s g)
asynchronous (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = DFA δ (q₀, p₀) (f₁ × f₂)
where
δ ∷ ((q, p), Either s g) → (q, p)
δ ((q, p), Left σ) = (δ₁ (q, σ), p )
δ ((q, p), Right γ) = (q, δ₂ (p, γ))
perfectShuffle ∷ ∀ q p s . (Ord q, Ord p) ⇒ DFA q s → DFA p s → DFA (q, p, Bool) s
perfectShuffle (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = DFA δ (q₀, p₀, False) (Set.map (\(q, p) → (q, p, False)) (f₁ × f₂))
where
δ ∷ ((q, p, Bool), s) → (q, p, Bool)
δ ((q, p, False), σ) = (δ₁ (q, σ), p , True )
δ ((q, p, True ), σ) = ( q , δ₂ (p, σ), False)
import qualified Data . Can as Can ( Can ( .. ) )
import Data . Can
-- FIXME rename , consider /*chronous
asdf1 q . ( q , Ord p ) ⇒ DFA q s → DFA p g → DFA ( q , p ) ( Can s g )
asdf1 ( DFA δ₁ q₀ f₁ ) ( ) = DFA δ ( q₀ , p₀ ) ( f₁ × f₂ )
where
δ ∷ ( ( q , p ) , Can s g ) → ( q , p )
δ ( ( q , p ) , Can . Non ) = ( q , p )
δ ( ( q , p ) , ( Can . One σ ) ) = ( δ₁ ( q , σ ) , p )
δ ( ( q , p ) , ( Can . Eno γ ) ) = ( q , δ₂ ( p , γ ) )
δ ( ( q , p ) , ( Can . Two σ γ ) ) = ( δ₁ ( q , σ ) , δ₂ ( p , γ ) )
q . ( q , Ord p ) ⇒ DFA q s → DFA p s → DFA ( Can q p ) s
asdf2 ( DFA δ₁ q₀ f₁ ) ( ) = DFA δ ( Can . Two q₀ p₀ ) ( Set.map ( uncurry Can . Two ) ( f₁ × f₂ ) )
where
δ ∷ ( Can q p , s ) → Can q p
δ ( Can . Non , _ ) = Can . Non
δ ( Can . One q , σ ) = Can . One ( δ₁ ( q , σ ) )
δ ( Can . Eno p , σ ) = Can . Eno ( δ₂ ( p , σ ) )
δ ( Can . Two q p , σ ) = Can . Two ( δ₁ ( q , σ ) ) ( δ₂ ( p , σ ) )
import qualified Data.Can as Can (Can (..))
import Data.Can
-- FIXME rename, consider /*chronous
asdf1 ∷ ∀ q p s g . (Ord q, Ord p) ⇒ DFA q s → DFA p g → DFA (q, p) (Can s g)
asdf1 (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = DFA δ (q₀, p₀) (f₁ × f₂)
where
δ ∷ ((q, p), Can s g) → (q, p)
δ ((q, p), Can.Non ) = (q, p )
δ ((q, p), (Can.One σ )) = (δ₁ (q, σ), p )
δ ((q, p), (Can.Eno γ)) = (q, δ₂ (p, γ))
δ ((q, p), (Can.Two σ γ)) = (δ₁ (q, σ), δ₂ (p, γ))
asdf2 ∷ ∀ q p s g . (Ord q, Ord p) ⇒ DFA q s → DFA p s → DFA (Can q p) s
asdf2 (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = DFA δ (Can.Two q₀ p₀) (Set.map (uncurry Can.Two) (f₁ × f₂))
where
δ ∷ (Can q p, s) → Can q p
δ (Can.Non , _) = Can.Non
δ (Can.One q , σ) = Can.One (δ₁ (q, σ))
δ (Can.Eno p, σ) = Can.Eno (δ₂ (p, σ))
δ (Can.Two q p, σ) = Can.Two (δ₁ (q, σ)) (δ₂ (p, σ))
-}
The symmetric difference ( " exclusive or " , or " xor " ) of two DFAs
ℒ(m₁ ) ⊕ ℒ(m₂ ) = ( ℒ(m₁ ) - ℒ(m₂ ) ) ( ℒ(m₂ ) - ℒ(m₁ ) )
xor ∷ (Finite q, Finite p) ⇒ DFA q s → DFA p s → DFA ((q, p), (p, q)) s
xor m₁ m₂ = DFA.difference m₁ m₂ `DFA.union` DFA.difference m₂ m₁
-- ℒʳ
reversal ∷ (Finite q, Finite s) ⇒ DFA q s → DFA (Set q) s
reversal = DFA.fromFA . FA.reversal . toFA
ℒ(m₁ ) / ℒ(m₂ ) = { w | w · x ∈ ℒ(m₁ ) ∈ ℒ(m₂ ) }
rquotient ∷ (Finite q, Finite p, Finite s) ⇒ DFA q s → DFA p s → DFA q s
rquotient m₁ m₂ = m₁ { fs = Set.filter (DFA.intersects m₂ . right m₁) (qs m₁) }
-- min(ℒ(m)) = ℒ(m) - ℒ(m)·Σ⁺ = { w | w ∈ ℒ(m) ∧ no proper prefix of w is in ℒ(m) }
-- a proper prefix of a string is a prefix of the string not empty and not equal to itself
min ∷ ∀ q s . (Ord q) ⇒ DFA q s → DFA (Either () q) s
min (DFA δ q₀ f) = DFA δ₁ (Right q₀) (Set.map Right f)
where
δ₁ ∷ (Either () q, s) → Either () q
δ₁ (Left (), _) = Left () -- `Left ()` is a dead state with no way to transition out
δ₁ (Right q, _) | q ∈ f = Left () -- delete transitions out of final states by sending to the new dead state
δ₁ (Right q, σ) = Right (δ (q, σ))
max(ℒ(m ) ) = { w | w ∈ ℒ(m ) ∧ ∀x ≠ ε , wx ∉ ℒ(m ) }
max ∷ (Finite q, Finite s) ⇒ DFA q s → DFA q s -- delete q because x cannot be ε
max m@(DFA _ _ f) = m { fs = Set.filter (\q → any (∈ delete q (reachable m q)) f) f }
-- Init(ℒ) = ℒ − (ℒ ∩ ℒΣ⁺) = { w ∈ Σ★ | wy ∈ ℒ for some y ∈ Σ★ }
-- F = { q | ∃w.δ★(q, w) ∈ F }
-- "Given a DFA M for ℒ, make each state q final if there is a path from q to a final state in the original machine"
init ∷ (Finite q, Finite s) ⇒ DFA q s → DFA q s
init m = m { fs = coaccessible m }
Given a DFA , m , return a new DFA , m ' , which recognizes only the rejected strings of m
-- such that ℒ(m') = Σ★ ∖ ℒ(m)
complement ∷ (Finite q) ⇒ DFA q s → DFA q s
complement m@(DFA _ _ f) = m { fs = qs m ∖ f }
minimization
-- Here we convert to FA to avoid introducing a new state with ε-transitions while reversing
-- The number of states, i.e. `size' (qs m)`, will increase but the number of accessible states will stay the same or decrease
-- N.B. `fromFA` performs the last determinization
TODO testme
FIXME need to map ` ( Set ( Set q ) ) ` back down to ` q ` or smaller
minimize ∷ (Finite q, Finite s) ⇒ DFA q s → DFA (Set (Set q)) s
minimize = DFA.fromFA . FA.codeterminization . toFA
-- Quotient automaton
FIXME see about necessarily starting with trim automaton , may have to return ` Maybe ( DFA q s ) `
FIXME or maybe something like trim the ` DFA ` as a ` SomeDFA `
quotient ∷ ∀ q s . (Finite q, Finite s) ⇒ DFA q s → DFA q s
quotient m@(DFA δ q₀ f) = DFA (rep . δ) (rep q₀) (Set.map rep f)
where
rep ∷ q → q
rep = representative (indistinguishability m)
The DFA , empty , which produces the empty language , such that
-- ℒ(empty) = ∅
empty ∷ DFA () s
empty = DFA (const ()) () (∅)
The DFA , epsilon , which produces the language , such that
-- ℒ(epsilon) = {ε}
epsilon ∷ DFA Bool s
epsilon = DFA (const False) True (singleton True)
Given a symbol of an alphabet , σ ∈ Σ , construct a DFA which recognizes only that symbol and nothing else , i.e.
ℒ(m ) = { σ }
literal ∷ ∀ s . (Ord s) ⇒ s → DFA Ordering s
literal σ = DFA δ LT (singleton EQ)
where
δ ∷ (Ordering, s) → Ordering
δ (LT, σ') | σ' == σ = EQ
δ _ = GT
fromSet ∷ ∀ s . (Ord s) ⇒ Set s → DFA Ordering s
fromSet s = DFA δ LT (singleton EQ)
where
δ ∷ (Ordering, s) → Ordering
δ (LT, σ) | σ ∈ s = EQ
δ _ = GT
TODO untested
toSet ∷ (Finite q, Finite s) ⇒ DFA q s → Set s
toSet m@(DFA δ _ _) = foldMap (\(q, σ) → bool (∅) (singleton σ) (δ (q, σ) ∈ useful m)) (useful m × sigma m)
dot ∷ (Finite s) ⇒ DFA Ordering s
dot = fromSet asSet
Convert an NFA with multiple start states to a DFA ( performs determinization )
fromFA ∷ (Finite q) ⇒ FA.FA q s → DFA (Set q) s
fromFA m@(FA.FA δ i f) = DFA (\(states, σ) → foldMap (\q → δ (q, σ)) states) i (Set.filter (Common.intersects f) (powerSet (qs m)))
fromCDFA ∷ (Finite q, Finite s) ⇒ FA.FA q s → Maybe (DFA q s)
fromCDFA m@(FA.FA δ i f) | complete m
∧ deterministic m = Just (DFA (\(q, σ) → elemAt 0 (δ (q, σ))) (elemAt 0 i) f)
fromCDFA _ = Nothing
Take an NFA , and use subset construction to convert it to an equivalent DFA ( performs determinization )
fromNFA ∷ (Finite q) ⇒ NFA.NFA q s → DFA (Set q) s
fromNFA m@(NFA.NFA δ q₀ f) = DFA (\(states, σ) → foldMap (\q → δ (q, σ)) states) -- for each occupied state,
-- transition to next state,
-- then union all the results
(singleton q₀)
(Set.filter (Common.intersects f) (powerSet (qs m)))
Take an EFA and use ( slightly modded ( See ( 2 . ) page 77 , HMU ) ) subset construction
to generate an equivalent DFA by " eliminating " epsilon transitions
fromEFA ∷ (Finite q) ⇒ EFA.EFA q s → DFA (Set q) s
fromEFA = fromNFA . NFA.fromEFA
Take a DFA , d , and convert it to an NFA , n , such that ℒ(d ) = ℒ(n )
toNFA ∷ DFA q s → NFA.NFA q s
toNFA (DFA δ q₀ f) = NFA.NFA (singleton . δ) q₀ f
-- min(ℒ(m)) = ℒ(m) - ℒ(m)·Σ⁺ = { w | w ∈ ℒ(m) ∧ no proper prefix of w is in ℒ(m) }
-- a proper prefix of a string is a prefix of the string not empty and not equal to itself
toNFAMin ∷ ∀ q s . (Ord q) ⇒ DFA q s → NFA.NFA q s
toNFAMin m@(DFA δ _ f) = (toNFA m) { NFA.delta = δ₁ }
where
δ₁ ∷ (q, s) → Set q
δ₁ (q, _) | q ∈ f = (∅) -- delete transitions out of final states
δ₁ (q, σ) = singleton (δ (q, σ))
toNFAShuffle ∷ ∀ q p s . (Ord q, Ord p) ⇒ DFA q s → DFA p s → NFA.NFA (q, p) s
toNFAShuffle (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = NFA.NFA δ (q₀, p₀) (f₁ × f₂)
where
δ ∷ ((q, p), s) → Set (q, p)
δ ((q, p), σ) = Set.singleton (δ₁ (q, σ), p )
∪ Set.singleton ( q , δ₂ (p, σ))
Take a DFA , d , and convert it to an EFA , e , such that ℒ(d ) = ℒ(e )
toEFA ∷ DFA q s → EFA.EFA q s
toEFA = NFA.toEFA . toNFA
-- cycle(ℒ) = { w₁·w₂ | w₂·w₁ ∈ ℒ }
A Second Course in Formal Languages and Automata Theory pg . 60
-- string conjugations
toEFACycle ∷ ∀ q s . (Finite q) ⇒ DFA q s → EFA.EFA (Either () (q, q, Bool)) s
toEFACycle m@(DFA δ q₀ f) = EFA.EFA δ₁ (Left ()) (Set.map (\q → Right (q, q, True)) (qs m))
where
δ₁ ∷ (Either () (q, q, Bool), Maybe s) → Set (Either () (q, q, Bool))
δ₁ (Left (), Nothing) = Set.map (\q → Right (q, q, False)) (qs m)
δ₁ (Right (q, p, False), Nothing) | q ∈ f = singleton (Right (q₀ , p, True))
δ₁ (Right (q, p, b), Just σ) = singleton (Right (δ (q, σ), p, b )) -- Simulation
δ₁ _ = (∅)
½ℒ = { w₁ | ∃ w₂ such that |w₁| = |w₂| ∧ w₁·w₂ ∈ ℒ ; w₁ ∈ Σ ★ , w₂ ∈ Σ ★ } .
A Second Course in Formal Languages and Automata Theory pg . 59
for all even length strings w ∈ ℒ , take the first half of w , producing ½ℒ
toEFAHalf ∷ ∀ q s . (Finite q, Finite s) ⇒ DFA q s → EFA.EFA (Either () (q, q, q)) s
toEFAHalf m@(DFA δ q₀ f) = EFA.EFA δ₁ (Left ()) (Set.map (\(q, qᶠ) → Right (q, q, qᶠ)) (qs m × f))
where
δ₁ ∷ (Either () (q, q, q), Maybe s) → Set (Either () (q, q, q))
δ₁ (Left (), Nothing) = Set.map (\q → Right (q, q₀, q)) (qs m)
δ₁ (Right (q, p, r), Just σ) = Set.map (\σ' → Right (q, δ (p, σ), δ (r, σ'))) (sigma m)
δ₁ _ = (∅)
toFA ∷ (Finite q) ⇒ DFA q s → FA.FA q s
toFA = NFA.toFA . toNFA
toDA ∷ (Ord q) ⇒ DFA q s → (DA.DA q s, q)
toDA (DFA δ q₀ f) = (DA.DA (Predicate (∈ f)) (curry δ), q₀)
fromDA ∷ (Finite q) ⇒ (DA.DA q s, q) → DFA q s
fromDA (DA.DA o t, q₀) = DFA (uncurry t) q₀ (predicateToSet o)
Convert a DFA to a Generalized Nondeterministic Finite Automaton with ε - transitions
δ(q₁ , σ ) = q₂ ⟺ δ'(q₁ , q₂ ) = σ
toGNFA ∷ ∀ q s . (Finite s, Ord q) ⇒ DFA q s → GNFA.GNFA q s
toGNFA m@(DFA δ q₀ f) = GNFA.GNFA δ'
where
δ' ∷ (Either Init q, Either Final q) → RE.RegExp s
Connect the new ( forced ) GNFA start state to q₀ with an ε .
δ' (Left (Init _), Right q₂) | q₂ == q₀ = RE.one
Connect the new ( forced ) GNFA final state to each element of f with an ε .
δ' (Right q₁, Left (Final _)) | q₁ ∈ f = RE.one
If and q₂ were connected ( often via multiple transitions ) in the DFA ,
-- lift all symbols into RE.Lit, and let multiple transitions be represented
by the union of said literals . If no transitions between q₁ and q₂ in DFA then , RE.zero .
δ' (Right q₁, Right q₂) = sumWith RE.Lit (Set.filter (\σ → δ (q₁, σ) == q₂) (sigma m))
Besides the explicitly given epsilon connections , no connections
-- to the new final state nor from the new start state should exist.
δ' _ = RE.zero
toRE ∷ (Finite q, Finite s) ⇒ DFA q s → RE.RegExp s
toRE = GNFA.toRE . toGNFA
toLanguage ∷ (Finite q, Finite s) ⇒ DFA q s → Language.ℒ s
toLanguage = RE.toLanguage . toRE
| null | https://raw.githubusercontent.com/subttle/regular/1a9e71cb1c43cb3215b9331efa98d4dcf21fbf9c/src/DFA.hs | haskell | # OPTIONS_GHC -Wall #
Deterministic Finite Automaton
q is the set of states, Q
s is the set of symbols Σ
The (total) transition function, δ : Q × Σ → Q
The initial state, q₀ ∈ Q
The final states, F ⊆ Q
{ q ∈ Q | ∃w ∈ Σ★, δ★(q₀, w) = q }
δ★ : Q × Σ★ → Q
"Extended delta" - The delta function extended from single symbols to strings (lists of symbols).
δ′′ : P(Q) × Σ★ → P(Q)
Evaluate a string
δ★(q₀, w) = q
trace δ★(q, w)
m accepts a string w ∈ Σ★ iff δ★(q₀, w) ∈ F
N.B. information is lost in this conversion, i.e. q₀ and F will be dropped
/~kisiel/auto/eppstein.pdf
evaluate the same word from all states of Q, not just q₀
∂σ(ℒ(m)) = { w | σw ∈ ℒ(m) }
The "right language" of m wrt some state q
The "left language" of q
transitionStar m w = \q → delta' m (q, w)
The equivalence relation formed on Q by indistinguishable states for m
they both lead to the same words being accepted.
That is, there exists a word w, such that evaluation of w from from any state, q, always ends up in the same state, p.
FIXME supposed to be a non-empty set
Let ℒ ⊆ Σ★, ℒ is palindromic if every word w ∈ ℒ is a palindrome.
ℒ(m₁) ≟ ℒ(m₂)
ℒ(m₁) ∩ ℒ(m₂) ≟ ∅
ℒ(m₁) ∩ ℒ(m₂) ≠ ∅?
ℒ(m₃) = ℒ(m₁) − ℒ(m₂)
ℒ(m₃) = ℒ(m₁) ∪ ℒ(m₂)
ℒ(m₃) = ℒ(m₁) ∩ ℒ(m₂)
The product construction
FIXME rename , consider /*chronous
FIXME rename, consider /*chronous
ℒʳ
min(ℒ(m)) = ℒ(m) - ℒ(m)·Σ⁺ = { w | w ∈ ℒ(m) ∧ no proper prefix of w is in ℒ(m) }
a proper prefix of a string is a prefix of the string not empty and not equal to itself
`Left ()` is a dead state with no way to transition out
delete transitions out of final states by sending to the new dead state
delete q because x cannot be ε
Init(ℒ) = ℒ − (ℒ ∩ ℒΣ⁺) = { w ∈ Σ★ | wy ∈ ℒ for some y ∈ Σ★ }
F = { q | ∃w.δ★(q, w) ∈ F }
"Given a DFA M for ℒ, make each state q final if there is a path from q to a final state in the original machine"
such that ℒ(m') = Σ★ ∖ ℒ(m)
Here we convert to FA to avoid introducing a new state with ε-transitions while reversing
The number of states, i.e. `size' (qs m)`, will increase but the number of accessible states will stay the same or decrease
N.B. `fromFA` performs the last determinization
Quotient automaton
ℒ(empty) = ∅
ℒ(epsilon) = {ε}
for each occupied state,
transition to next state,
then union all the results
min(ℒ(m)) = ℒ(m) - ℒ(m)·Σ⁺ = { w | w ∈ ℒ(m) ∧ no proper prefix of w is in ℒ(m) }
a proper prefix of a string is a prefix of the string not empty and not equal to itself
delete transitions out of final states
cycle(ℒ) = { w₁·w₂ | w₂·w₁ ∈ ℒ }
string conjugations
Simulation
lift all symbols into RE.Lit, and let multiple transitions be represented
to the new final state nor from the new start state should exist. | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ExistentialQuantification #
module DFA where
import Algebra.Graph.Relation as Relation (stars)
import Data.Bool (bool)
import Data.Bool.Unicode ((∧), (∨))
import Data.Functor.Contravariant (Contravariant (..), Equivalence (..), Predicate (..))
import Data.Function (on)
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import qualified Data.List as List
import qualified Data.Map as Map
import Data.Set as Set (Set, delete, elemAt, filter, map, powerSet, singleton)
import Data.Set.Unicode ((∅), (∈), (∉), (∖), (∪))
import Numeric.Algebra.Class (sumWith)
import Prelude hiding (map)
import Common (Set' (..), (×), (‥), equation, format, implies, intersects, palindrome, quoteWith, size', upToLength)
import Config (Configuration (..))
import Finite (Finite (..), Q (..), Σ (..), Init (..), Final (..), predicateToSet, representative)
import qualified NFA
import qualified EFA
import qualified GNFA
import qualified FA
import qualified DA
import qualified RegExp as RE
import Language (ℒ)
import qualified TransitionGraph as TG
}
A DFA constructor where the ` q ` type parameter is an existential
data SomeDFA s where
SomeDFA ∷ (Show q, Finite q) ⇒ DFA q s → SomeDFA s
instance (Finite q) ⇒ Q (DFA q s) q
instance (Finite s) ⇒ Σ (DFA q s) s
instance (Finite s) ⇒ Σ (SomeDFA s) s
instance Contravariant (DFA q) where
contramap ∷ (s → g) → DFA q g → DFA q s
contramap h ( DFA δ q₀ f ) = DFA ( , σ ) → δ ( q , h σ ) ) q₀ f
contramap h (DFA δ q₀ f) = DFA (\(q, σ) → (curry δ) q (h σ)) q₀ f
invhomimage ∷ (s → [g]) → DFA q g → DFA q s
invhomimage h (DFA δ q₀ f) = DFA (\(q, σ) → foldl (curry δ) q (h σ)) q₀ f
instance Contravariant SomeDFA where
contramap ∷ (g → s) → SomeDFA s → SomeDFA g
contramap h (SomeDFA m) = SomeDFA (contramap h m)
instance (Show q, Finite q, Show s, Finite s) ⇒ Show (DFA q s) where
show ∷ DFA q s → String
show m = quoteWith "( " " )" $ List.intercalate "\n, "
[ equation "Q " ((show . Set' . qs ) m)
, equation "Σ " ((show . Set' . sigma) m)
, quoteWith "δ : Q × Σ → Q" ((format . deltaToMap) m) "\n"
, equation "q₀" ((show . q0 ) m)
, equation "F " ((show . Set' . fs ) m)
]
instance (Show s, Finite s) ⇒ Show (SomeDFA s) where
show ∷ SomeDFA s → String
show (SomeDFA m) = show m
instance (Finite q, Finite s) ⇒ Configuration DFA q s q where
By construction of a DFA type this will be ` True `
deterministic ∷ DFA q s → Bool
deterministic = const True
codeterministic ∷ DFA q s → Bool
codeterministic = deterministic . FA.reversal . toFA
By construction of a DFA type this will be ` True `
complete ∷ DFA q s → Bool
complete = const True
occupied ∷ DFA q s → q → Set q
occupied = const singleton
deltaD ∷ DFA q s → ((q, s) → q)
deltaD = delta
initial ∷ DFA q s → q
initial = q0
final ∷ DFA q s → Set q
final = fs
Given a DFA , m , and a configuration , return what it yields in one step
(⊢) ∷ DFA q s → (q, [s]) → (q, [s])
(⊢) _ (q, []) = ( q , [])
(⊢) (DFA δ _ _) (q, σ : w ) = (δ (q, σ), w )
Determine which states are accessible in the given DFA , i.e.
accessible ∷ DFA q s → Set q
accessible m@(DFA _ q₀ _) = reachable m q₀
Take a DFA and a starting state , q , for that DFA , then compute the state p such that δ ★ (q , w ) = p
delta' ∷ DFA q s → (q, [s]) → q
delta' (DFA δ _ _) = uncurry (foldl (curry δ))
delta'' ∷ DFA q s → (Set q, [s]) → Set q
delta'' (DFA δ _ _) = uncurry (foldl (\states σ → map (\q → δ (q, σ)) states))
Take a DFA , m , and a string of symbols , w , and then compute the resulting state , q
eval ∷ DFA q s → [s] → q
eval m@(DFA _ q₀ _) w = delta' m (q₀, w)
traced ∷ DFA q s → [s] → (q, [(q, s)])
traced (DFA δ q₀ _) = List.mapAccumL (\q σ → (δ (q, σ), (q, σ))) q₀
Take a DFA , m , and a string , w , and decide if that string is accepted / recognized
accepts ∷ DFA q s → [s] → Bool
accepts m@(DFA _ _ f) w = eval m w ∈ f
Take a DFA , m , and a string , w , and decide if that string is not accepted
rejects ∷ DFA q s → [s] → Bool
rejects m@(DFA _ _ f) w = eval m w ∉ f
Convert the DFA to its Transition Graph .
toGraph ∷ DFA q s → TG.TG q s
toGraph (DFA δ _ _) = TG.TG (\s → stars (fmap (\q → (q, [δ (q, s)])) asList))
Determine if a string , w , synchronizes ( or " resets " ) a DFA , m
A string , w , " resets " a DFA when ∃w ∈ Σ ★ , ∀q ∈ Q , δ ★ (q , w ) = p for some p ∈ Q
i.e. | { δ ★ (q , w ) | q ∈ Q } | = 1
synchronizes ∷ (Finite q, Finite s) ⇒ DFA q s → [s] → Bool
synchronizes m w = size ' ( delta '' m ( qs m , w ) ) = = 1
synchronizes m w = (==) 1 (size' (delta'' m (qs m, w)))
Lazily generate all the rejected strings of the given DFA
rejected ∷ (Finite q, Finite s) ⇒ DFA q s → [[s]]
rejected = language . complement
TODO can delete this because it is redundant given ` eval ` already defined ,
TODO however for now I 'm keeping it for reference
evaluate ∷ DFA q s → [s] → q
evaluate (DFA δ q₀ _) w = foldl (curry δ) q₀ w
Trace the path the DFA takes for a word
trace ∷ DFA q s → [s] → NonEmpty q
trace (DFA δ q₀ _) w = NE.scanl (curry δ) q₀ w
derivative
derivative ∷ DFA q s → s → DFA q s
derivative (DFA δ q₀ f) σ = DFA δ ( (curry δ) q₀ σ) f
derivative extended to strings
derivative' ∷ (Finite q, Finite s) ⇒ DFA q s → [s] → DFA q s
derivative' (DFA δ q₀ f) w = DFA δ (foldl (curry δ) q₀ w) f
right ∷ DFA q s → q → DFA q s
right (DFA δ _ f) q = DFA δ q f
left ∷ DFA q s → q → DFA q s
left (DFA δ q₀ _) = DFA δ q₀ . singleton
transition ∷ (Finite q, Finite s) ⇒ DFA q s → s → (q → q)
transition (DFA δ _ _) σ = \q → δ (q, σ)
TODO name ?
transitionStar ∷ ( Finite q , Finite s ) ⇒ DFA q s → [ s ] → ( q → q )
Two states having the same right language are indistinguishable
indistinguishability ∷ (Finite q, Finite s) ⇒ DFA q s → Equivalence q
indistinguishability = Equivalence . (DFA.equal `on`) . right
domain ∷ (Finite q, Finite s) ⇒ DFA q s → Set (q, s)
domain m = qs m × sigma m
deltaToMap ∷ (Finite q, Finite s) ⇒ DFA q s → Map.Map (q, s) q
deltaToMap m@(DFA δ _ _) = Map.fromSet δ (domain m)
The transition table of the DFA 's δ function
table ∷ (Finite q, Finite s) ⇒ DFA q s → [((q, s), q)]
table = Map.toAscList . deltaToMap
ℒ(m ) is cofinite in Σ ★ iff the complement of ℒ(m ) ( in Σ ★ ) is finite .
cofinite ∷ (Finite q, Finite s) ⇒ Predicate (DFA q s)
cofinite = Predicate (finite . complement)
Theorem ( Cerny , 1964 ): A DFA M is ( directable ) synchronizing iff ∀q ∈ Q , ∃p ∈ Q , ∃w ∈ Σ ★ : δ(q , w ) = δ(p , w )
" A DFA is synchronizing if there exists a word that sends all states of the automaton to the same state . " -
synchronizing ∷ (Finite q, Finite s) ⇒ Predicate (DFA q s)
synchronizing = Predicate (not . isZero . power)
where
TODO alter this to check for shortest path to get shortest reset word ?
power m@(DFA δ _ _) = DFA (\(states, σ) → map (\q → δ (q, σ)) states) (qs m) (map singleton (qs m))
A palindrome is a word w such that w = wʳ.
ℒ(M ) is palindromic if and only if { x ∈ ℒ(M ) : |x| < 3n } is
palindromic , where n is the number of states of M.
TODO this is the ( untested ) naive implementation
palindromic ∷ (Finite q, Finite s) ⇒ Predicate (DFA q s)
palindromic = Predicate (\m → all palindrome (upToLength (3 * size' (qs m)) (language m)))
An automaton M = ( S , I , δ , s₀ , F ) is said to be a permutation
automaton , or more simply a p - automaton , if and only if , a ) = δ(sⱼ , a ) , where sᵢ , sⱼ ∈ S , a ∈ I , implies that sᵢ = sⱼ.
Permutation Automata by
TODO untested
TODO better to place in src / FA.hs ?
permutation ∷ (Finite q, Finite s) ⇒ Predicate (DFA q s)
permutation = Predicate (\m@(DFA δ _ _) → all (\(qᵢ, qⱼ) →
all (\σ → (δ (qᵢ, σ) == δ (qⱼ, σ)) `implies` (qᵢ == qⱼ)) (sigma m)
) (qs m × qs m))
Given two DFAs , decide if they produce the exact same language , i.e.
equal ∷ (Finite q, Finite p, Finite s) ⇒ DFA q s → DFA p s → Bool
equal m₁ m₂ = contained m₁ m₂ ∧ contained m₂ m₁
Given two DFAs , m₁ and m₂ , decide if ℒ(m₁ ) ⊆ ℒ(m₂ )
contained ∷ (Finite q, Finite p, Finite s) ⇒ DFA q s → DFA p s → Bool
contained m = isZero . intersection m . complement
Given two DFAs , m₁ and m₂ ,
disjoint ∷ (Finite q, Finite p, Finite s) ⇒ DFA q s → DFA p s → Bool
disjoint = isZero ‥ intersection
Given two DFAs , m₁ and m₂ ,
intersects ∷ (Finite q, Finite p, Finite s) ⇒ DFA q s → DFA p s → Bool
intersects = not ‥ DFA.disjoint
The difference of two DFAs , m₁ and m₂ , produces a new DFA , , such that
difference ∷ (Finite q, Finite p) ⇒ DFA q s → DFA p s → DFA (q, p) s
difference m₁@(DFA _ _ f₁) m₂@(DFA _ _ f₂) = (synchronous m₁ m₂) { fs = Set.filter (\(q, p) → (q ∈ f₁) ∧ (p ∉ f₂)) (qs m₁ × qs m₂) }
The union of two DFAs , m₁ and m₂ , produces a new DFA , , such that
F = ( F₁ × Q₁ ) ( Q₂ × F₂ )
union ∷ (Finite q, Finite p) ⇒ DFA q s → DFA p s → DFA (q, p) s
union m₁@(DFA _ _ f₁) m₂@(DFA _ _ f₂) = (synchronous m₁ m₂) { fs = Set.filter (\(q, p) → (q ∈ f₁) ∨ (p ∈ f₂)) (qs m₁ × qs m₂) }
The instersection of two DFAs , m₁ and m₂ , produces a new DFA , , such that
intersection ∷ (Ord q, Ord p) ⇒ DFA q s → DFA p s → DFA (q, p) s
intersection = synchronous
Essentially this runs two DFAs ( which both share the same alphabet ) " in parallel " together in lock step
synchronous ∷ (Ord q, Ord p) ⇒ DFA q s → DFA p s → DFA (q, p) s
synchronous (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = DFA (\((q, p), σ) → (δ₁ (q, σ), δ₂ (p, σ))) (q₀, p₀) (f₁ × f₂)
The asynchronous product of two DFA
Essentially this runs two DFAs with different alphabets " in parallel " independently
asynchronous ∷ ∀ q p s g . (Ord q, Ord p) ⇒ DFA q s → DFA p g → DFA (q, p) (Either s g)
asynchronous (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = DFA δ (q₀, p₀) (f₁ × f₂)
where
δ ∷ ((q, p), Either s g) → (q, p)
δ ((q, p), Left σ) = (δ₁ (q, σ), p )
δ ((q, p), Right γ) = (q, δ₂ (p, γ))
perfectShuffle ∷ ∀ q p s . (Ord q, Ord p) ⇒ DFA q s → DFA p s → DFA (q, p, Bool) s
perfectShuffle (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = DFA δ (q₀, p₀, False) (Set.map (\(q, p) → (q, p, False)) (f₁ × f₂))
where
δ ∷ ((q, p, Bool), s) → (q, p, Bool)
δ ((q, p, False), σ) = (δ₁ (q, σ), p , True )
δ ((q, p, True ), σ) = ( q , δ₂ (p, σ), False)
import qualified Data . Can as Can ( Can ( .. ) )
import Data . Can
asdf1 q . ( q , Ord p ) ⇒ DFA q s → DFA p g → DFA ( q , p ) ( Can s g )
asdf1 ( DFA δ₁ q₀ f₁ ) ( ) = DFA δ ( q₀ , p₀ ) ( f₁ × f₂ )
where
δ ∷ ( ( q , p ) , Can s g ) → ( q , p )
δ ( ( q , p ) , Can . Non ) = ( q , p )
δ ( ( q , p ) , ( Can . One σ ) ) = ( δ₁ ( q , σ ) , p )
δ ( ( q , p ) , ( Can . Eno γ ) ) = ( q , δ₂ ( p , γ ) )
δ ( ( q , p ) , ( Can . Two σ γ ) ) = ( δ₁ ( q , σ ) , δ₂ ( p , γ ) )
q . ( q , Ord p ) ⇒ DFA q s → DFA p s → DFA ( Can q p ) s
asdf2 ( DFA δ₁ q₀ f₁ ) ( ) = DFA δ ( Can . Two q₀ p₀ ) ( Set.map ( uncurry Can . Two ) ( f₁ × f₂ ) )
where
δ ∷ ( Can q p , s ) → Can q p
δ ( Can . Non , _ ) = Can . Non
δ ( Can . One q , σ ) = Can . One ( δ₁ ( q , σ ) )
δ ( Can . Eno p , σ ) = Can . Eno ( δ₂ ( p , σ ) )
δ ( Can . Two q p , σ ) = Can . Two ( δ₁ ( q , σ ) ) ( δ₂ ( p , σ ) )
import qualified Data.Can as Can (Can (..))
import Data.Can
asdf1 ∷ ∀ q p s g . (Ord q, Ord p) ⇒ DFA q s → DFA p g → DFA (q, p) (Can s g)
asdf1 (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = DFA δ (q₀, p₀) (f₁ × f₂)
where
δ ∷ ((q, p), Can s g) → (q, p)
δ ((q, p), Can.Non ) = (q, p )
δ ((q, p), (Can.One σ )) = (δ₁ (q, σ), p )
δ ((q, p), (Can.Eno γ)) = (q, δ₂ (p, γ))
δ ((q, p), (Can.Two σ γ)) = (δ₁ (q, σ), δ₂ (p, γ))
asdf2 ∷ ∀ q p s g . (Ord q, Ord p) ⇒ DFA q s → DFA p s → DFA (Can q p) s
asdf2 (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = DFA δ (Can.Two q₀ p₀) (Set.map (uncurry Can.Two) (f₁ × f₂))
where
δ ∷ (Can q p, s) → Can q p
δ (Can.Non , _) = Can.Non
δ (Can.One q , σ) = Can.One (δ₁ (q, σ))
δ (Can.Eno p, σ) = Can.Eno (δ₂ (p, σ))
δ (Can.Two q p, σ) = Can.Two (δ₁ (q, σ)) (δ₂ (p, σ))
-}
The symmetric difference ( " exclusive or " , or " xor " ) of two DFAs
ℒ(m₁ ) ⊕ ℒ(m₂ ) = ( ℒ(m₁ ) - ℒ(m₂ ) ) ( ℒ(m₂ ) - ℒ(m₁ ) )
xor ∷ (Finite q, Finite p) ⇒ DFA q s → DFA p s → DFA ((q, p), (p, q)) s
xor m₁ m₂ = DFA.difference m₁ m₂ `DFA.union` DFA.difference m₂ m₁
reversal ∷ (Finite q, Finite s) ⇒ DFA q s → DFA (Set q) s
reversal = DFA.fromFA . FA.reversal . toFA
ℒ(m₁ ) / ℒ(m₂ ) = { w | w · x ∈ ℒ(m₁ ) ∈ ℒ(m₂ ) }
rquotient ∷ (Finite q, Finite p, Finite s) ⇒ DFA q s → DFA p s → DFA q s
rquotient m₁ m₂ = m₁ { fs = Set.filter (DFA.intersects m₂ . right m₁) (qs m₁) }
min ∷ ∀ q s . (Ord q) ⇒ DFA q s → DFA (Either () q) s
min (DFA δ q₀ f) = DFA δ₁ (Right q₀) (Set.map Right f)
where
δ₁ ∷ (Either () q, s) → Either () q
δ₁ (Right q, σ) = Right (δ (q, σ))
max(ℒ(m ) ) = { w | w ∈ ℒ(m ) ∧ ∀x ≠ ε , wx ∉ ℒ(m ) }
max m@(DFA _ _ f) = m { fs = Set.filter (\q → any (∈ delete q (reachable m q)) f) f }
init ∷ (Finite q, Finite s) ⇒ DFA q s → DFA q s
init m = m { fs = coaccessible m }
Given a DFA , m , return a new DFA , m ' , which recognizes only the rejected strings of m
complement ∷ (Finite q) ⇒ DFA q s → DFA q s
complement m@(DFA _ _ f) = m { fs = qs m ∖ f }
minimization
TODO testme
FIXME need to map ` ( Set ( Set q ) ) ` back down to ` q ` or smaller
minimize ∷ (Finite q, Finite s) ⇒ DFA q s → DFA (Set (Set q)) s
minimize = DFA.fromFA . FA.codeterminization . toFA
FIXME see about necessarily starting with trim automaton , may have to return ` Maybe ( DFA q s ) `
FIXME or maybe something like trim the ` DFA ` as a ` SomeDFA `
quotient ∷ ∀ q s . (Finite q, Finite s) ⇒ DFA q s → DFA q s
quotient m@(DFA δ q₀ f) = DFA (rep . δ) (rep q₀) (Set.map rep f)
where
rep ∷ q → q
rep = representative (indistinguishability m)
The DFA , empty , which produces the empty language , such that
empty ∷ DFA () s
empty = DFA (const ()) () (∅)
The DFA , epsilon , which produces the language , such that
epsilon ∷ DFA Bool s
epsilon = DFA (const False) True (singleton True)
Given a symbol of an alphabet , σ ∈ Σ , construct a DFA which recognizes only that symbol and nothing else , i.e.
ℒ(m ) = { σ }
literal ∷ ∀ s . (Ord s) ⇒ s → DFA Ordering s
literal σ = DFA δ LT (singleton EQ)
where
δ ∷ (Ordering, s) → Ordering
δ (LT, σ') | σ' == σ = EQ
δ _ = GT
fromSet ∷ ∀ s . (Ord s) ⇒ Set s → DFA Ordering s
fromSet s = DFA δ LT (singleton EQ)
where
δ ∷ (Ordering, s) → Ordering
δ (LT, σ) | σ ∈ s = EQ
δ _ = GT
TODO untested
toSet ∷ (Finite q, Finite s) ⇒ DFA q s → Set s
toSet m@(DFA δ _ _) = foldMap (\(q, σ) → bool (∅) (singleton σ) (δ (q, σ) ∈ useful m)) (useful m × sigma m)
dot ∷ (Finite s) ⇒ DFA Ordering s
dot = fromSet asSet
Convert an NFA with multiple start states to a DFA ( performs determinization )
fromFA ∷ (Finite q) ⇒ FA.FA q s → DFA (Set q) s
fromFA m@(FA.FA δ i f) = DFA (\(states, σ) → foldMap (\q → δ (q, σ)) states) i (Set.filter (Common.intersects f) (powerSet (qs m)))
fromCDFA ∷ (Finite q, Finite s) ⇒ FA.FA q s → Maybe (DFA q s)
fromCDFA m@(FA.FA δ i f) | complete m
∧ deterministic m = Just (DFA (\(q, σ) → elemAt 0 (δ (q, σ))) (elemAt 0 i) f)
fromCDFA _ = Nothing
Take an NFA , and use subset construction to convert it to an equivalent DFA ( performs determinization )
fromNFA ∷ (Finite q) ⇒ NFA.NFA q s → DFA (Set q) s
(singleton q₀)
(Set.filter (Common.intersects f) (powerSet (qs m)))
Take an EFA and use ( slightly modded ( See ( 2 . ) page 77 , HMU ) ) subset construction
to generate an equivalent DFA by " eliminating " epsilon transitions
fromEFA ∷ (Finite q) ⇒ EFA.EFA q s → DFA (Set q) s
fromEFA = fromNFA . NFA.fromEFA
Take a DFA , d , and convert it to an NFA , n , such that ℒ(d ) = ℒ(n )
toNFA ∷ DFA q s → NFA.NFA q s
toNFA (DFA δ q₀ f) = NFA.NFA (singleton . δ) q₀ f
toNFAMin ∷ ∀ q s . (Ord q) ⇒ DFA q s → NFA.NFA q s
toNFAMin m@(DFA δ _ f) = (toNFA m) { NFA.delta = δ₁ }
where
δ₁ ∷ (q, s) → Set q
δ₁ (q, σ) = singleton (δ (q, σ))
toNFAShuffle ∷ ∀ q p s . (Ord q, Ord p) ⇒ DFA q s → DFA p s → NFA.NFA (q, p) s
toNFAShuffle (DFA δ₁ q₀ f₁) (DFA δ₂ p₀ f₂) = NFA.NFA δ (q₀, p₀) (f₁ × f₂)
where
δ ∷ ((q, p), s) → Set (q, p)
δ ((q, p), σ) = Set.singleton (δ₁ (q, σ), p )
∪ Set.singleton ( q , δ₂ (p, σ))
Take a DFA , d , and convert it to an EFA , e , such that ℒ(d ) = ℒ(e )
toEFA ∷ DFA q s → EFA.EFA q s
toEFA = NFA.toEFA . toNFA
A Second Course in Formal Languages and Automata Theory pg . 60
toEFACycle ∷ ∀ q s . (Finite q) ⇒ DFA q s → EFA.EFA (Either () (q, q, Bool)) s
toEFACycle m@(DFA δ q₀ f) = EFA.EFA δ₁ (Left ()) (Set.map (\q → Right (q, q, True)) (qs m))
where
δ₁ ∷ (Either () (q, q, Bool), Maybe s) → Set (Either () (q, q, Bool))
δ₁ (Left (), Nothing) = Set.map (\q → Right (q, q, False)) (qs m)
δ₁ (Right (q, p, False), Nothing) | q ∈ f = singleton (Right (q₀ , p, True))
δ₁ _ = (∅)
½ℒ = { w₁ | ∃ w₂ such that |w₁| = |w₂| ∧ w₁·w₂ ∈ ℒ ; w₁ ∈ Σ ★ , w₂ ∈ Σ ★ } .
A Second Course in Formal Languages and Automata Theory pg . 59
for all even length strings w ∈ ℒ , take the first half of w , producing ½ℒ
toEFAHalf ∷ ∀ q s . (Finite q, Finite s) ⇒ DFA q s → EFA.EFA (Either () (q, q, q)) s
toEFAHalf m@(DFA δ q₀ f) = EFA.EFA δ₁ (Left ()) (Set.map (\(q, qᶠ) → Right (q, q, qᶠ)) (qs m × f))
where
δ₁ ∷ (Either () (q, q, q), Maybe s) → Set (Either () (q, q, q))
δ₁ (Left (), Nothing) = Set.map (\q → Right (q, q₀, q)) (qs m)
δ₁ (Right (q, p, r), Just σ) = Set.map (\σ' → Right (q, δ (p, σ), δ (r, σ'))) (sigma m)
δ₁ _ = (∅)
toFA ∷ (Finite q) ⇒ DFA q s → FA.FA q s
toFA = NFA.toFA . toNFA
toDA ∷ (Ord q) ⇒ DFA q s → (DA.DA q s, q)
toDA (DFA δ q₀ f) = (DA.DA (Predicate (∈ f)) (curry δ), q₀)
fromDA ∷ (Finite q) ⇒ (DA.DA q s, q) → DFA q s
fromDA (DA.DA o t, q₀) = DFA (uncurry t) q₀ (predicateToSet o)
Convert a DFA to a Generalized Nondeterministic Finite Automaton with ε - transitions
δ(q₁ , σ ) = q₂ ⟺ δ'(q₁ , q₂ ) = σ
toGNFA ∷ ∀ q s . (Finite s, Ord q) ⇒ DFA q s → GNFA.GNFA q s
toGNFA m@(DFA δ q₀ f) = GNFA.GNFA δ'
where
δ' ∷ (Either Init q, Either Final q) → RE.RegExp s
Connect the new ( forced ) GNFA start state to q₀ with an ε .
δ' (Left (Init _), Right q₂) | q₂ == q₀ = RE.one
Connect the new ( forced ) GNFA final state to each element of f with an ε .
δ' (Right q₁, Left (Final _)) | q₁ ∈ f = RE.one
If and q₂ were connected ( often via multiple transitions ) in the DFA ,
by the union of said literals . If no transitions between q₁ and q₂ in DFA then , RE.zero .
δ' (Right q₁, Right q₂) = sumWith RE.Lit (Set.filter (\σ → δ (q₁, σ) == q₂) (sigma m))
Besides the explicitly given epsilon connections , no connections
δ' _ = RE.zero
toRE ∷ (Finite q, Finite s) ⇒ DFA q s → RE.RegExp s
toRE = GNFA.toRE . toGNFA
toLanguage ∷ (Finite q, Finite s) ⇒ DFA q s → Language.ℒ s
toLanguage = RE.toLanguage . toRE
|
b3ed54ff75ca0efb07b8da2ea2db5026bf97a6413c49de94356fe0445acf57e7 | chrisdone/prana | FFI.hs | # LANGUAGE ViewPatterns #
{-# LANGUAGE EmptyDataDeriving #-}
-- | FFI-related work.
module Prana.FFI where
import Data.Bifunctor
import Data.Function
import Data.List.NonEmpty (NonEmpty(..))
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Typeable
import Data.Validation
import qualified Module
import qualified Name
import qualified Outputable
import Prana.Rename
import Prana.Types
import qualified Type
data FFIError
= UnsupportedFFIType !Type.Type
| ExpectedFunType !Type.Type
| ExpectedConType !Type.Type
| NameResolveIssue !RenameFailure
| ExpectedUnboxedTuple !Type.Type !Name
| ExpectedUnboxedTupleType !Type.Type
| ExpectedStateRealWorld !Type.Type
| UnknownPrimFFIType !WiredInType
| InvalidFFIType !Name
deriving (Typeable)
instance Eq FFIError where (==) = on (==) show
instance Show FFIError where
show (ExpectedUnboxedTupleType ty) = "ExpectedUnboxedTupleType: " ++ Outputable.showSDocUnsafe (Outputable.ppr ty)
show (UnsupportedFFIType ty) = Outputable.showSDocUnsafe (Outputable.ppr ty)
show (ExpectedFunType ty) =
"ExpectedFunType: " ++ Outputable.showSDocUnsafe (Outputable.ppr ty)
show (ExpectedConType ty) =
"ExpectedConType: " ++ Outputable.showSDocUnsafe (Outputable.ppr ty)
show (NameResolveIssue ty) = "NameResolveIssue: " ++ show ty
show (ExpectedUnboxedTuple typ ty) =
"ExpectedUnboxedTuple: " ++
Outputable.showSDocUnsafe (Outputable.ppr typ) ++ " " ++ show ty
show (ExpectedStateRealWorld ty) =
"ExpectedStateRealWorld: " ++ Outputable.showSDocUnsafe (Outputable.ppr ty)
show (UnknownPrimFFIType x) = "UnknownPrimFFIType: " ++ show x
show (InvalidFFIType x) = "InvalidFFIType: " ++ show x
parseAcceptableFFIReturnType ::
Module.Module
-> Map Name WiredInType
-> Type.Type
-> Validation (NonEmpty FFIError) FFIReturnType
parseAcceptableFFIReturnType theModule wiredInTypes typ =
case Type.splitTyConApp_maybe typ of
Just (tyCon, Type.dropRuntimeRepArgs -> (stateRealWorld:ret)) -> do
bindValidation
(validationNel
(first NameResolveIssue (renameName theModule (Name.getName tyCon))))
(\name ->
case M.lookup name wiredInTypes of
Just (WiredIn_UnboxedTuple {}) ->
if Outputable.showSDocUnsafe (Outputable.ppr stateRealWorld) ==
"State# RealWorld"
then fmap
FFIUnboxedTupleOfStateRealWorldAnd
(maybe
(pure Nothing)
(fmap Just . parseFFIType theModule wiredInTypes)
(listToMaybe ret))
else Failure (pure (ExpectedStateRealWorld typ))
_ -> Failure (pure (ExpectedUnboxedTuple typ name)))
_ -> Failure (pure (ExpectedUnboxedTupleType typ))
parseFFIType ::
Module.Module
-> Map Name WiredInType
-> Type.Type
-> Validation (NonEmpty FFIError) FFIType
parseFFIType theModule wiredInTypes typ =
case Type.splitTyConApp_maybe typ of
Just (tyCon, Type.dropRuntimeRepArgs -> []) -> do
bindValidation
(validationNel
(first NameResolveIssue (renameName theModule (Name.getName tyCon))))
(\name ->
case M.lookup name wiredInTypes of
Just WiredIn_CharPrimTyConName -> pure FFI_Char
Just WiredIn_IntPrimTyConName -> pure FFI_Int
Just WiredIn_Int32PrimTyConName -> pure FFI_Int32
Just WiredIn_Int64PrimTyConName -> pure FFI_Int64
Just WiredIn_WordPrimTyConName -> pure FFI_Word
Just WiredIn_Word32PrimTyConName -> pure FFI_Word32
Just WiredIn_Word64PrimTyConName -> pure FFI_Word64
Just WiredIn_AddrPrimTyConName -> pure FFI_Addr
Just WiredIn_FloatPrimTyConName -> pure FFI_Float
Just WiredIn_DoublePrimTyConName -> pure FFI_Double
Just WiredIn_StablePtrPrimTyConName -> pure FFI_StablePtr
Just ty -> Failure (pure (UnknownPrimFFIType ty))
Nothing -> Failure (pure (InvalidFFIType name)))
Just (_, args) ->
error
("Didn't expect args... " ++
unlines (map (\x -> Outputable.showSDocUnsafe (Outputable.ppr x)) args))
_ -> Failure (pure (ExpectedUnboxedTupleType typ))
| null | https://raw.githubusercontent.com/chrisdone/prana/f2e45538937d326aff562b6d49296eaedd015662/prana-ghc/src/Prana/FFI.hs | haskell | # LANGUAGE EmptyDataDeriving #
| FFI-related work. | # LANGUAGE ViewPatterns #
module Prana.FFI where
import Data.Bifunctor
import Data.Function
import Data.List.NonEmpty (NonEmpty(..))
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Typeable
import Data.Validation
import qualified Module
import qualified Name
import qualified Outputable
import Prana.Rename
import Prana.Types
import qualified Type
data FFIError
= UnsupportedFFIType !Type.Type
| ExpectedFunType !Type.Type
| ExpectedConType !Type.Type
| NameResolveIssue !RenameFailure
| ExpectedUnboxedTuple !Type.Type !Name
| ExpectedUnboxedTupleType !Type.Type
| ExpectedStateRealWorld !Type.Type
| UnknownPrimFFIType !WiredInType
| InvalidFFIType !Name
deriving (Typeable)
instance Eq FFIError where (==) = on (==) show
instance Show FFIError where
show (ExpectedUnboxedTupleType ty) = "ExpectedUnboxedTupleType: " ++ Outputable.showSDocUnsafe (Outputable.ppr ty)
show (UnsupportedFFIType ty) = Outputable.showSDocUnsafe (Outputable.ppr ty)
show (ExpectedFunType ty) =
"ExpectedFunType: " ++ Outputable.showSDocUnsafe (Outputable.ppr ty)
show (ExpectedConType ty) =
"ExpectedConType: " ++ Outputable.showSDocUnsafe (Outputable.ppr ty)
show (NameResolveIssue ty) = "NameResolveIssue: " ++ show ty
show (ExpectedUnboxedTuple typ ty) =
"ExpectedUnboxedTuple: " ++
Outputable.showSDocUnsafe (Outputable.ppr typ) ++ " " ++ show ty
show (ExpectedStateRealWorld ty) =
"ExpectedStateRealWorld: " ++ Outputable.showSDocUnsafe (Outputable.ppr ty)
show (UnknownPrimFFIType x) = "UnknownPrimFFIType: " ++ show x
show (InvalidFFIType x) = "InvalidFFIType: " ++ show x
parseAcceptableFFIReturnType ::
Module.Module
-> Map Name WiredInType
-> Type.Type
-> Validation (NonEmpty FFIError) FFIReturnType
parseAcceptableFFIReturnType theModule wiredInTypes typ =
case Type.splitTyConApp_maybe typ of
Just (tyCon, Type.dropRuntimeRepArgs -> (stateRealWorld:ret)) -> do
bindValidation
(validationNel
(first NameResolveIssue (renameName theModule (Name.getName tyCon))))
(\name ->
case M.lookup name wiredInTypes of
Just (WiredIn_UnboxedTuple {}) ->
if Outputable.showSDocUnsafe (Outputable.ppr stateRealWorld) ==
"State# RealWorld"
then fmap
FFIUnboxedTupleOfStateRealWorldAnd
(maybe
(pure Nothing)
(fmap Just . parseFFIType theModule wiredInTypes)
(listToMaybe ret))
else Failure (pure (ExpectedStateRealWorld typ))
_ -> Failure (pure (ExpectedUnboxedTuple typ name)))
_ -> Failure (pure (ExpectedUnboxedTupleType typ))
parseFFIType ::
Module.Module
-> Map Name WiredInType
-> Type.Type
-> Validation (NonEmpty FFIError) FFIType
parseFFIType theModule wiredInTypes typ =
case Type.splitTyConApp_maybe typ of
Just (tyCon, Type.dropRuntimeRepArgs -> []) -> do
bindValidation
(validationNel
(first NameResolveIssue (renameName theModule (Name.getName tyCon))))
(\name ->
case M.lookup name wiredInTypes of
Just WiredIn_CharPrimTyConName -> pure FFI_Char
Just WiredIn_IntPrimTyConName -> pure FFI_Int
Just WiredIn_Int32PrimTyConName -> pure FFI_Int32
Just WiredIn_Int64PrimTyConName -> pure FFI_Int64
Just WiredIn_WordPrimTyConName -> pure FFI_Word
Just WiredIn_Word32PrimTyConName -> pure FFI_Word32
Just WiredIn_Word64PrimTyConName -> pure FFI_Word64
Just WiredIn_AddrPrimTyConName -> pure FFI_Addr
Just WiredIn_FloatPrimTyConName -> pure FFI_Float
Just WiredIn_DoublePrimTyConName -> pure FFI_Double
Just WiredIn_StablePtrPrimTyConName -> pure FFI_StablePtr
Just ty -> Failure (pure (UnknownPrimFFIType ty))
Nothing -> Failure (pure (InvalidFFIType name)))
Just (_, args) ->
error
("Didn't expect args... " ++
unlines (map (\x -> Outputable.showSDocUnsafe (Outputable.ppr x)) args))
_ -> Failure (pure (ExpectedUnboxedTupleType typ))
|
2298250a14b89b658a65123591b19679e8483ec6234d16a98a1509c00a273ef7 | sansarip/owlbear | utilities.cljs | (ns owlbear.parser.utilities
(:require [oops.core :refer [oget]]))
(defn ctx->children-seq
"Given a context,
returns a flattened, depth-first traversed, lazy sequence
of all of the context's children"
[ctx]
(tree-seq #(oget % :?children) #(oget % :children) ctx))
(defn ctx->parent-seq
"Given a context,
recursively traverses the context's parents and
returns a vector of all the context's parents"
[ctx]
(loop [parent-ctx (oget ctx :?parentCtx)
parent-ctx-coll []]
(if parent-ctx
(recur (oget parent-ctx :?parentCtx) (conj parent-ctx-coll parent-ctx))
parent-ctx-coll)))
(defn sibling-ctxs
"Given a context,
returns the sibling contexts for that context"
[ctx]
(rest (concat (oget ctx :?parentCtx.?children)
;; Accounts for scenarios where an HTML element context has a parent HTML elements context
(oget ctx :?parentCtx.?parentCtx.?children))))
(defn some-sibling-ctx
"Given a context (and optionally a parent context),
returns the first sibling context that fulfills the predicate function"
[pred ctx]
(some pred (sibling-ctxs ctx)))
(defn range-in-ctx?
"Given a context, a start offset, and a stop offset,
returns true if the context is within the given range (inclusive)"
([ctx start]
(range-in-ctx? ctx start start))
([ctx start stop]
(and start
stop
(let [ctx-start (oget ctx :?start.?start)
ctx-stop (oget ctx :?stop.?stop)]
(and
ctx-start
ctx-stop
(apply <=
(concat [ctx-start]
(range start (inc stop))
[ctx-stop])))))))
(defn filter-current-ctxs
"Given a list of contexts and a character offset,
a lazy sequence of only the contexts containing the given offset"
[offset ctxs]
(filter #(range-in-ctx? % offset) ctxs))
(defn ctx->current-ctxs
"Given a context and a character offset,
return a lazy sequence of the contexts containing the given offset"
[ctx offset]
(filter-current-ctxs offset (ctx->children-seq ctx)))
(defn forward-ctx?
"Given a context and a second context,
returns true if the second context is positionall ahead of the first context"
[ctx forward-ctx]
(< (oget ctx :?stop.?stop) (oget forward-ctx :?start.?start)))
| null | https://raw.githubusercontent.com/sansarip/owlbear/b25d46e3f401f5fee739889e5bc604f6b9c00c41/src/cljs/owlbear/parser/utilities.cljs | clojure | Accounts for scenarios where an HTML element context has a parent HTML elements context | (ns owlbear.parser.utilities
(:require [oops.core :refer [oget]]))
(defn ctx->children-seq
"Given a context,
returns a flattened, depth-first traversed, lazy sequence
of all of the context's children"
[ctx]
(tree-seq #(oget % :?children) #(oget % :children) ctx))
(defn ctx->parent-seq
"Given a context,
recursively traverses the context's parents and
returns a vector of all the context's parents"
[ctx]
(loop [parent-ctx (oget ctx :?parentCtx)
parent-ctx-coll []]
(if parent-ctx
(recur (oget parent-ctx :?parentCtx) (conj parent-ctx-coll parent-ctx))
parent-ctx-coll)))
(defn sibling-ctxs
"Given a context,
returns the sibling contexts for that context"
[ctx]
(rest (concat (oget ctx :?parentCtx.?children)
(oget ctx :?parentCtx.?parentCtx.?children))))
(defn some-sibling-ctx
"Given a context (and optionally a parent context),
returns the first sibling context that fulfills the predicate function"
[pred ctx]
(some pred (sibling-ctxs ctx)))
(defn range-in-ctx?
"Given a context, a start offset, and a stop offset,
returns true if the context is within the given range (inclusive)"
([ctx start]
(range-in-ctx? ctx start start))
([ctx start stop]
(and start
stop
(let [ctx-start (oget ctx :?start.?start)
ctx-stop (oget ctx :?stop.?stop)]
(and
ctx-start
ctx-stop
(apply <=
(concat [ctx-start]
(range start (inc stop))
[ctx-stop])))))))
(defn filter-current-ctxs
"Given a list of contexts and a character offset,
a lazy sequence of only the contexts containing the given offset"
[offset ctxs]
(filter #(range-in-ctx? % offset) ctxs))
(defn ctx->current-ctxs
"Given a context and a character offset,
return a lazy sequence of the contexts containing the given offset"
[ctx offset]
(filter-current-ctxs offset (ctx->children-seq ctx)))
(defn forward-ctx?
"Given a context and a second context,
returns true if the second context is positionall ahead of the first context"
[ctx forward-ctx]
(< (oget ctx :?stop.?stop) (oget forward-ctx :?start.?start)))
|
9de38fd7543fc72e3ab0ba533902550c7f1bf45cb5a496624c4fb1735fca08b1 | athos/symbol-analyzer | extraction_test.clj | (ns symbol-analyzer.extraction-test
(:require [symbol-analyzer.extraction :refer :all]
[clojure.test :refer :all]
[clojure.core.match :refer [match]]))
(defn- install-data-readers [n]
(letfn [(make-annotator [id]
(fn [form]
(vary-meta form assoc :id id)))]
(dotimes [i n]
(alter-var-root #'default-data-readers assoc (symbol (str '$ i)) (make-annotator i)))))
(install-data-readers 16)
(defmacro matches [expr pat]
`(match ~expr
~pat true
:else false))
(defmacro extracted [form expected]
(let [v (gensym 'v)]
`(let [~v (extract '~form :ns *ns* :symbol-id-key :id)]
~@(for [[id info] expected]
`(is (matches (get ~v ~id) ~info))))))
(deftest extract-from-symbol-test
(extracted
#$0 cons
{0 {:type :var}})
(extracted
#$0 clojure.lang.IFn
{0 {:type :class :class (_ :guard #(= % clojure.lang.IFn))}})
(extracted
#$0 String
{0 {:type :class :class (_ :guard #(= % java.lang.String))}}))
(deftest extract-from-collection-test
(extracted
[#$0 cons [#$1 cons]]
{0 {:type :var}})
(extracted
{#$0 key #$1 val}
{0 {:type :var}, 1 {:type :var}})
(extracted
#{#$0 map}
{0 {:type :var}}))
(deftest extract-from-application-test
(extracted
(#$0 map #$1 identity [1 2 3])
{0 {:type :var}, 1 {:type :var}})
(extracted
(#$0 filter (#$1 complement #$2 even?) (#$3 range 10))
{0 {:type :var}, 1 {:type :var}, 2 {:type :var}, 3 {:type :var}})
(extracted
((#$0 constantly 0) 1)
{0 {:type :var}}))
(deftest extract-from-if-test
(extracted
(#$0 if (#$1 not true)
(#$2 println 'yes)
(#$3 println 'no))
{0 {:type :special}, 1 {:type :var}, 2 {:type :var}, 3 {:type :var}}))
(deftest extract-from-do-test
(extracted
(#$0 do (#$1 println 'foo)
(#$2 println 'bar))
{0 {:type :special}, 1 {:type :var}, 2 {:type :var}}))
(deftest extract-from-quote-test
(extracted
'(#$0 foo (#$1 bar))
{0 {:type :quoted}, 1 {:type :quoted}})
(extracted
'(#$0 let [#$1 x 2] (#$2 * #$3 x 3))
{0 {:type :quoted}, 1 {:type :quoted},
2 {:type :quoted}, 3 {:type :quoted}})
(extracted
'[#$0 foo [#$1 bar]]
{0 {:type :quoted}, 1 {:type :quoted}})
(extracted
'{#$0 foo {#$1 bar #$2 baz}}
{0 {:type :quoted}, 1 {:type :quoted}, 2 {:type :quoted}})
(extracted
'#{#$0 foo #{#$1 bar}}
{0 {:type :quoted}, 1 {:type :quoted}}))
(deftest extract-from-def-test
(extracted
(#$0 def #$1 foo)
{0 {:type :special}, 1 {:type :var}})
(extracted
(#$0 def #$1 foo (#$2 inc 0))
{0 {:type :special}, 1 {:type :var}, 2 {:type :var}})
#_(extracted
(def f (fn [] #$0 f))
{0 {:type :var}}))
(deftest extract-from-let-test
(extracted
(#$0 let [#$1 x (#$2 inc 0)
#$3 y (#$4 * #$5 x 2)]
(#$6 * #$7 y 2))
{0 {:type :macro}, 1 {:type :local}, 2 {:type :var}, 3 {:type :local}
4 {:type :var}, 5 {:type :local}, 6 {:type :var}, 7 {:type :local}}))
(deftest extract-from-loop-recur-test
(extracted
(#$0 loop [#$1 x (#$2 inc 0)
#$3 y (#$4 * #$5 x 2)]
(#$6 recur (#$7 inc #$8 x) (#$9 * #$10 y 2)))
{0 {:type :macro}, 1 {:type :local}, 2 {:type :var}, 3 {:type :local}
4 {:type :var}, 5 {:type :local}, 6 {:type :special}, 7 {:type :var}
8 {:type :local}, 9 {:type :var}, 10 {:type :local}}))
(deftest extract-from-fn-test
(extracted
(#$0 fn [#$1 x] (#$2 * #$3 x 2))
{0 {:type :macro}, 1 {:type :local}, 2 {:type :var}, 3 {:type :local}})
(extracted
(fn [#$0 x #$1 & #$2 y] [#$3 x #$4 y])
{0 {:type :local}, 1 nil, 2 {:type :local}, 3 {:type :local},
4 {:type :local}})
(extracted
(fn [[#$0 x #$1 & #$2 y]] [#$3 x #$4 y])
{0 {:type :local}, 1 nil, 2 {:type :local}, 3 {:type :local},
4 {:type :local}})
(extracted
(fn #$0 f [#$1 x] (#$2 f #$3 x))
{0 {:type :local}, 1 {:type :local}, 2 {:type :local}, 3 {:type :local}})
(extracted
(fn ([#$0 x] (* #$1 x 2))
([#$2 x #$3 y] (* #$4 x #$5 y)))
{0 {:type :local}, 1 {:type :local}, 2 {:type :local}, 3 {:type :local}
4 {:type :local}, 5 {:type :local}})
(extracted
(fn #$0 f
([#$1 x] (#$2 f #$3 x 2))
([#$4 x #$5 y] (#$6 f #$7 x #$8 y)))
{0 {:type :local}, 1 {:type :local}, 2 {:type :local}, 3 {:type :local}
4 {:type :local}, 5 {:type :local}, 6 {:type :local}, 7 {:type :local}
8 {:type :local}}))
(deftest extract-from-letfn-test
(extracted
(#$0 letfn [(#$1 f [#$2 x] (#$3 g #$4 x))
(#$5 g [#$6 x] (#$7 f #$8 x))]
(#$9 f (#$10 g (#$11 inc 0))))
{0 {:type :macro}, 1 {:type :local}, 2 {:type :local}, 3 {:type :local}
4 {:type :local}, 5 {:type :local}, 6 {:type :local}, 7 {:type :local}
8 {:type :local}, 9 {:type :local}, 10 {:type :local}, 11 {:type :var}}))
(deftest extract-from-set!-test
(extracted
(#$0 set! #$1 *warn-on-reflection* (#$2 not false))
{0 {:type :special}, 1 {:type :var}, 2 {:type :var}}))
(deftest extract-from-var-test
(extracted
#'#$0 cons
{0 {:type :var}})
(extracted
#'#$0 no-such-var-found
{0 nil}))
(deftest extract-from-throw-test
(extracted
(#$0 throw (#$1 ex-info "error!" {}))
{0 {:type :special}, 1 {:type :var}}))
(deftest extract-from-try-catch-finally-test
(extracted
(#$0 try
(#$1 println "foo")
(#$2 catch #$3 IllegalStateException #$4 e
(#$5 println #$6 e))
(#$7 catch #$8 Exception #$9 e
(#$10 println #$11 e))
(#$12 finally
(#$13 println "bar")))
{0 {:type :special}, 1 {:type :var}, 2 {:type :special}, 3 {:type :class}
4 {:type :local}, 5 {:type :var}, 6 {:type :local}, 7 {:type :special}
8 {:type :class}, 9 {:type :local}, 10 {:type :var}, 11 {:type :local}
12 {:type :special}, 13 {:type :var}}))
(deftest extract-from-monitor-enter-exit-test
(extracted
(let [o (new Object)]
(try
(#$0 monitor-enter #$1 o)
(finally
(#$2 monitor-exit #$3 o))))
{0 {:type :special}, 1 {:type :local}, 2 {:type :special}, 3 {:type :local}}))
(deftest extract-from-import-test
(extracted
(#$0 import '#$1 java.io.Reader)
{0 {:type :macro}, 1 nil}))
(deftest extract-from-new-test
(extracted
(#$0 new #$1 Integer (#$2 inc 0))
{0 {:type :special}, 1 {:type :class}, 2 {:type :var}}))
(deftest extract-from-dot-test
(extracted
(#$0 . #$1 System #$2 out)
{0 {:type :special}, 1 {:type :class}, 2 {:type :member}})
(extracted
(let [s "foo"]
(. #$0 s #$1 substring 0 (#$2 inc 0)))
{0 {:type :local}, 1 {:type :member}, 2 {:type :var}})
(extracted
(let [s "42"]
(. #$0 Long (#$1 parseLong #$2 s)))
{0 {:type :class}, 1 {:type :member}, 2 {:type :local}})
#_(extracted
(let [Integer "foo"]
(. #$0 Integer #$1 valueOf 1))
{0 {:type :class}, 1 {:type :member}}))
(deftest extract-from-java-interop-test
(extracted
(#$0 String. "foo")
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(#$0 Long/parseLong "42")
{0 {:type :member :class (_ :guard #(= % java.lang.Long))}})
(extracted
#$0 System/out
{0 {:type :member :class (_ :guard #(= % java.lang.System))}})
(extracted
(#$0 .member x 42)
{0 {:type :member}}))
(deftest extract-from-case-test
(extracted
(#$0 case '#$1 bar
#$2 foo (#$3 inc 0)
(#$4 bar #$5 baz) (#$6 dec 0)
(#$7 * 2 2))
{0 {:type :macro}, 1 {:type :quoted}, 2 {:type :quoted},
3 {:type :var}, 4 {:type :quoted}, 5 {:type :quoted},
6 {:type :var}, 7 {:type :var}})
(extracted
(case '[foo bar]
[#$0 foo #$1 bar] :vector
{#$2 foo #$3 bar} :map
#{#$4 foo #$5 bar} :set)
{0 {:type :quoted}, 1 {:type :quoted}, 2 {:type :quoted},
3 {:type :quoted}, 4 {:type :quoted}, 5 {:type :quoted}}))
(deftest extract-from-reify-test
(extracted
(#$0 reify
#$1 Runnable
(#$2 run [#$3 this] (#$4 println #$5 this))
#$6 clojure.lang.IFn
(#$7 invoke [#$8 this #$9 x] (#$10 inc #$11 x)))
{0 {:type :macro}, 1 {:type :class}, 2 {:type :member}, 3 {:type :local}
4 {:type :var}, 5 {:type :local}, 6 {:type :class}, 7 {:type :member}
8 {:type :local}, 9 {:type :local}, 10 {:type :var}, 11 {:type :local}}))
kludge for extraction from deftype form without errors
(in-ns 'user)
(deftype T [x y])
(in-ns 'symbol-analyzer.extraction-test)
(deftest extract-from-deftype-test
(extracted
(#$0 deftype #$1 T [#$2 x #$3 y]
#$4 Runnable
(#$5 run [#$6 this] (#$7 println #$8 this #$9 x))
#$10 clojure.lang.IFn
(#$11 invoke [#$12 this #$13 x] [#$14 x #$15 y]))
{0 {:type :macro}, 1 {:type :class}, 2 {:type :field}, 3 {:type :field}
4 {:type :class}, 5 {:type :member}, 6 {:type :local}, 7 {:type :var}
8 {:type :local}, 9 {:type :local}, 10 {:type :class}, 11 {:type :member}
12 {:type :local}, 13 {:type :local}, 14 {:type :local}, 15 {:type :local}}))
(deftest extract-from-meta-test
(extracted
^#$0 String x
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
^{:tag #$0 String} x
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(def ^#$0 String x)
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(def ^#$0 String x "hoge")
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(let [^#$0 String x "hoge"] x)
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(loop [^#$0 String x "hoge"] x)
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(fn ^#$0 String [^#$1 String x] x)
{0 {:type :class :class (_ :guard #(= % java.lang.String))}
1 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(reify
clojure.lang.IFn
(^#$0 Object invoke [this ^#$1 Object x] ^#$2 Object x))
{0 {:type :class :class (_ :guard #(= % java.lang.Object))}
1 {:type :class :class (_ :guard #(= % java.lang.Object))}
2 {:type :class :class (_ :guard #(= % java.lang.Object))}}))
| null | https://raw.githubusercontent.com/athos/symbol-analyzer/99c7090f5fda52ccd40404c8823614ea343e5a51/test/symbol_analyzer/extraction_test.clj | clojure | (ns symbol-analyzer.extraction-test
(:require [symbol-analyzer.extraction :refer :all]
[clojure.test :refer :all]
[clojure.core.match :refer [match]]))
(defn- install-data-readers [n]
(letfn [(make-annotator [id]
(fn [form]
(vary-meta form assoc :id id)))]
(dotimes [i n]
(alter-var-root #'default-data-readers assoc (symbol (str '$ i)) (make-annotator i)))))
(install-data-readers 16)
(defmacro matches [expr pat]
`(match ~expr
~pat true
:else false))
(defmacro extracted [form expected]
(let [v (gensym 'v)]
`(let [~v (extract '~form :ns *ns* :symbol-id-key :id)]
~@(for [[id info] expected]
`(is (matches (get ~v ~id) ~info))))))
(deftest extract-from-symbol-test
(extracted
#$0 cons
{0 {:type :var}})
(extracted
#$0 clojure.lang.IFn
{0 {:type :class :class (_ :guard #(= % clojure.lang.IFn))}})
(extracted
#$0 String
{0 {:type :class :class (_ :guard #(= % java.lang.String))}}))
(deftest extract-from-collection-test
(extracted
[#$0 cons [#$1 cons]]
{0 {:type :var}})
(extracted
{#$0 key #$1 val}
{0 {:type :var}, 1 {:type :var}})
(extracted
#{#$0 map}
{0 {:type :var}}))
(deftest extract-from-application-test
(extracted
(#$0 map #$1 identity [1 2 3])
{0 {:type :var}, 1 {:type :var}})
(extracted
(#$0 filter (#$1 complement #$2 even?) (#$3 range 10))
{0 {:type :var}, 1 {:type :var}, 2 {:type :var}, 3 {:type :var}})
(extracted
((#$0 constantly 0) 1)
{0 {:type :var}}))
(deftest extract-from-if-test
(extracted
(#$0 if (#$1 not true)
(#$2 println 'yes)
(#$3 println 'no))
{0 {:type :special}, 1 {:type :var}, 2 {:type :var}, 3 {:type :var}}))
(deftest extract-from-do-test
(extracted
(#$0 do (#$1 println 'foo)
(#$2 println 'bar))
{0 {:type :special}, 1 {:type :var}, 2 {:type :var}}))
(deftest extract-from-quote-test
(extracted
'(#$0 foo (#$1 bar))
{0 {:type :quoted}, 1 {:type :quoted}})
(extracted
'(#$0 let [#$1 x 2] (#$2 * #$3 x 3))
{0 {:type :quoted}, 1 {:type :quoted},
2 {:type :quoted}, 3 {:type :quoted}})
(extracted
'[#$0 foo [#$1 bar]]
{0 {:type :quoted}, 1 {:type :quoted}})
(extracted
'{#$0 foo {#$1 bar #$2 baz}}
{0 {:type :quoted}, 1 {:type :quoted}, 2 {:type :quoted}})
(extracted
'#{#$0 foo #{#$1 bar}}
{0 {:type :quoted}, 1 {:type :quoted}}))
(deftest extract-from-def-test
(extracted
(#$0 def #$1 foo)
{0 {:type :special}, 1 {:type :var}})
(extracted
(#$0 def #$1 foo (#$2 inc 0))
{0 {:type :special}, 1 {:type :var}, 2 {:type :var}})
#_(extracted
(def f (fn [] #$0 f))
{0 {:type :var}}))
(deftest extract-from-let-test
(extracted
(#$0 let [#$1 x (#$2 inc 0)
#$3 y (#$4 * #$5 x 2)]
(#$6 * #$7 y 2))
{0 {:type :macro}, 1 {:type :local}, 2 {:type :var}, 3 {:type :local}
4 {:type :var}, 5 {:type :local}, 6 {:type :var}, 7 {:type :local}}))
(deftest extract-from-loop-recur-test
(extracted
(#$0 loop [#$1 x (#$2 inc 0)
#$3 y (#$4 * #$5 x 2)]
(#$6 recur (#$7 inc #$8 x) (#$9 * #$10 y 2)))
{0 {:type :macro}, 1 {:type :local}, 2 {:type :var}, 3 {:type :local}
4 {:type :var}, 5 {:type :local}, 6 {:type :special}, 7 {:type :var}
8 {:type :local}, 9 {:type :var}, 10 {:type :local}}))
(deftest extract-from-fn-test
(extracted
(#$0 fn [#$1 x] (#$2 * #$3 x 2))
{0 {:type :macro}, 1 {:type :local}, 2 {:type :var}, 3 {:type :local}})
(extracted
(fn [#$0 x #$1 & #$2 y] [#$3 x #$4 y])
{0 {:type :local}, 1 nil, 2 {:type :local}, 3 {:type :local},
4 {:type :local}})
(extracted
(fn [[#$0 x #$1 & #$2 y]] [#$3 x #$4 y])
{0 {:type :local}, 1 nil, 2 {:type :local}, 3 {:type :local},
4 {:type :local}})
(extracted
(fn #$0 f [#$1 x] (#$2 f #$3 x))
{0 {:type :local}, 1 {:type :local}, 2 {:type :local}, 3 {:type :local}})
(extracted
(fn ([#$0 x] (* #$1 x 2))
([#$2 x #$3 y] (* #$4 x #$5 y)))
{0 {:type :local}, 1 {:type :local}, 2 {:type :local}, 3 {:type :local}
4 {:type :local}, 5 {:type :local}})
(extracted
(fn #$0 f
([#$1 x] (#$2 f #$3 x 2))
([#$4 x #$5 y] (#$6 f #$7 x #$8 y)))
{0 {:type :local}, 1 {:type :local}, 2 {:type :local}, 3 {:type :local}
4 {:type :local}, 5 {:type :local}, 6 {:type :local}, 7 {:type :local}
8 {:type :local}}))
(deftest extract-from-letfn-test
(extracted
(#$0 letfn [(#$1 f [#$2 x] (#$3 g #$4 x))
(#$5 g [#$6 x] (#$7 f #$8 x))]
(#$9 f (#$10 g (#$11 inc 0))))
{0 {:type :macro}, 1 {:type :local}, 2 {:type :local}, 3 {:type :local}
4 {:type :local}, 5 {:type :local}, 6 {:type :local}, 7 {:type :local}
8 {:type :local}, 9 {:type :local}, 10 {:type :local}, 11 {:type :var}}))
(deftest extract-from-set!-test
(extracted
(#$0 set! #$1 *warn-on-reflection* (#$2 not false))
{0 {:type :special}, 1 {:type :var}, 2 {:type :var}}))
(deftest extract-from-var-test
(extracted
#'#$0 cons
{0 {:type :var}})
(extracted
#'#$0 no-such-var-found
{0 nil}))
(deftest extract-from-throw-test
(extracted
(#$0 throw (#$1 ex-info "error!" {}))
{0 {:type :special}, 1 {:type :var}}))
(deftest extract-from-try-catch-finally-test
(extracted
(#$0 try
(#$1 println "foo")
(#$2 catch #$3 IllegalStateException #$4 e
(#$5 println #$6 e))
(#$7 catch #$8 Exception #$9 e
(#$10 println #$11 e))
(#$12 finally
(#$13 println "bar")))
{0 {:type :special}, 1 {:type :var}, 2 {:type :special}, 3 {:type :class}
4 {:type :local}, 5 {:type :var}, 6 {:type :local}, 7 {:type :special}
8 {:type :class}, 9 {:type :local}, 10 {:type :var}, 11 {:type :local}
12 {:type :special}, 13 {:type :var}}))
(deftest extract-from-monitor-enter-exit-test
(extracted
(let [o (new Object)]
(try
(#$0 monitor-enter #$1 o)
(finally
(#$2 monitor-exit #$3 o))))
{0 {:type :special}, 1 {:type :local}, 2 {:type :special}, 3 {:type :local}}))
(deftest extract-from-import-test
(extracted
(#$0 import '#$1 java.io.Reader)
{0 {:type :macro}, 1 nil}))
(deftest extract-from-new-test
(extracted
(#$0 new #$1 Integer (#$2 inc 0))
{0 {:type :special}, 1 {:type :class}, 2 {:type :var}}))
(deftest extract-from-dot-test
(extracted
(#$0 . #$1 System #$2 out)
{0 {:type :special}, 1 {:type :class}, 2 {:type :member}})
(extracted
(let [s "foo"]
(. #$0 s #$1 substring 0 (#$2 inc 0)))
{0 {:type :local}, 1 {:type :member}, 2 {:type :var}})
(extracted
(let [s "42"]
(. #$0 Long (#$1 parseLong #$2 s)))
{0 {:type :class}, 1 {:type :member}, 2 {:type :local}})
#_(extracted
(let [Integer "foo"]
(. #$0 Integer #$1 valueOf 1))
{0 {:type :class}, 1 {:type :member}}))
(deftest extract-from-java-interop-test
(extracted
(#$0 String. "foo")
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(#$0 Long/parseLong "42")
{0 {:type :member :class (_ :guard #(= % java.lang.Long))}})
(extracted
#$0 System/out
{0 {:type :member :class (_ :guard #(= % java.lang.System))}})
(extracted
(#$0 .member x 42)
{0 {:type :member}}))
(deftest extract-from-case-test
(extracted
(#$0 case '#$1 bar
#$2 foo (#$3 inc 0)
(#$4 bar #$5 baz) (#$6 dec 0)
(#$7 * 2 2))
{0 {:type :macro}, 1 {:type :quoted}, 2 {:type :quoted},
3 {:type :var}, 4 {:type :quoted}, 5 {:type :quoted},
6 {:type :var}, 7 {:type :var}})
(extracted
(case '[foo bar]
[#$0 foo #$1 bar] :vector
{#$2 foo #$3 bar} :map
#{#$4 foo #$5 bar} :set)
{0 {:type :quoted}, 1 {:type :quoted}, 2 {:type :quoted},
3 {:type :quoted}, 4 {:type :quoted}, 5 {:type :quoted}}))
(deftest extract-from-reify-test
(extracted
(#$0 reify
#$1 Runnable
(#$2 run [#$3 this] (#$4 println #$5 this))
#$6 clojure.lang.IFn
(#$7 invoke [#$8 this #$9 x] (#$10 inc #$11 x)))
{0 {:type :macro}, 1 {:type :class}, 2 {:type :member}, 3 {:type :local}
4 {:type :var}, 5 {:type :local}, 6 {:type :class}, 7 {:type :member}
8 {:type :local}, 9 {:type :local}, 10 {:type :var}, 11 {:type :local}}))
kludge for extraction from deftype form without errors
(in-ns 'user)
(deftype T [x y])
(in-ns 'symbol-analyzer.extraction-test)
(deftest extract-from-deftype-test
(extracted
(#$0 deftype #$1 T [#$2 x #$3 y]
#$4 Runnable
(#$5 run [#$6 this] (#$7 println #$8 this #$9 x))
#$10 clojure.lang.IFn
(#$11 invoke [#$12 this #$13 x] [#$14 x #$15 y]))
{0 {:type :macro}, 1 {:type :class}, 2 {:type :field}, 3 {:type :field}
4 {:type :class}, 5 {:type :member}, 6 {:type :local}, 7 {:type :var}
8 {:type :local}, 9 {:type :local}, 10 {:type :class}, 11 {:type :member}
12 {:type :local}, 13 {:type :local}, 14 {:type :local}, 15 {:type :local}}))
(deftest extract-from-meta-test
(extracted
^#$0 String x
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
^{:tag #$0 String} x
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(def ^#$0 String x)
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(def ^#$0 String x "hoge")
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(let [^#$0 String x "hoge"] x)
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(loop [^#$0 String x "hoge"] x)
{0 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(fn ^#$0 String [^#$1 String x] x)
{0 {:type :class :class (_ :guard #(= % java.lang.String))}
1 {:type :class :class (_ :guard #(= % java.lang.String))}})
(extracted
(reify
clojure.lang.IFn
(^#$0 Object invoke [this ^#$1 Object x] ^#$2 Object x))
{0 {:type :class :class (_ :guard #(= % java.lang.Object))}
1 {:type :class :class (_ :guard #(= % java.lang.Object))}
2 {:type :class :class (_ :guard #(= % java.lang.Object))}}))
| |
53e6cc711f8a8944a14cacbceb772b0a55b336e74fc7b8366cc20ddd240a6ab2 | mthbernardes/shaggy-rogers | lambda_test.clj | (ns shaggy-rogers.middleware.lambda-test
(:require [clojure.test :refer :all]
[shaggy-rogers.middleware.lambda :as lambda])
(:import (java.util HashMap)))
(deftest ->cljmap
(testing "testing ->cljmap conversion"
(is (= {:first-key "value"
:second-key ["value1"
"value2"]}
(lambda/->cljmap (HashMap. {"first-key" "value" "second-key" ["value1" "value2"]}))))))
| null | https://raw.githubusercontent.com/mthbernardes/shaggy-rogers/aa100bf81ec142503f69882aa811ef15fae4f027/test/shaggy_rogers/middleware/lambda_test.clj | clojure | (ns shaggy-rogers.middleware.lambda-test
(:require [clojure.test :refer :all]
[shaggy-rogers.middleware.lambda :as lambda])
(:import (java.util HashMap)))
(deftest ->cljmap
(testing "testing ->cljmap conversion"
(is (= {:first-key "value"
:second-key ["value1"
"value2"]}
(lambda/->cljmap (HashMap. {"first-key" "value" "second-key" ["value1" "value2"]}))))))
| |
7aa2b0af1351784dfedb59c8a55b7b77b25196b00bc21234b52072b6d2cd7289 | BBVA/ust2dsa | Parser.hs | |
Copyright 2020 Banco Bilbao Vizcaya Argentaria , S.A.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2020 Banco Bilbao Vizcaya Argentaria, S.A.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Text.UbuntuSecurityTracker.CVE.Parser
( cveParser
, parseWithErrors
) where
import Text.UbuntuSecurityTracker.CVE.ParserImpl (cveParser, parseWithErrors)
| null | https://raw.githubusercontent.com/BBVA/ust2dsa/0688651f77ebda312dac76ae6a1901357ab12907/src/Text/UbuntuSecurityTracker/CVE/Parser.hs | haskell | |
Copyright 2020 Banco Bilbao Vizcaya Argentaria , S.A.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2020 Banco Bilbao Vizcaya Argentaria, S.A.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Text.UbuntuSecurityTracker.CVE.Parser
( cveParser
, parseWithErrors
) where
import Text.UbuntuSecurityTracker.CVE.ParserImpl (cveParser, parseWithErrors)
| |
20f9f7e2cce28fd5e6f87043b00c68434d235d41ee8000ef9ad96f999a7b5c1b | swlkr/majestic-web | project.clj | (defproject {{name}} "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:min-lein-version "2.6.1"
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/tools.namespace "0.2.11"]
[ring/ring-core "1.5.0"]
[ring/ring-devel "1.5.0"]
[ring/ring-defaults "0.2.3"]
[org.postgresql/postgresql "9.4-1201-jdbc41"]
[ragtime/ragtime.jdbc "0.6.3"]
[ring/ring-mock "0.3.0"]
[compojure "1.5.1"]
[http-kit "2.2.0"]
[yesql "0.5.3"]
[environ "1.1.0"]
[cheshire "5.7.0"]
[hiccup "1.0.5"]
[buddy "1.3.0"]]
:plugins [[lein-environ "1.0.3"]]
:main {{name}}.core
:source-paths ["src"]
:test-paths ["test"]
:aliases {"db/migrate" ["run" "-m" "{{name}}.migrations/migrate"]
"db/rollback" ["run" "-m" "{{name}}.migrations/rollback"]
"db/migration" ["run" "-m" "{{name}}.migrations/create"]
"db/crud" ["run" "-m" "{{name}}.migrations/crud"]}
:profiles {:uberjar {:aot :all
:uberjar-name "{{name}}.jar"}})
| null | https://raw.githubusercontent.com/swlkr/majestic-web/3d247f6b5ccc4ff3662a64544a4ca27beada971b/resources/leiningen/new/majestic_web/project.clj | clojure | (defproject {{name}} "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:min-lein-version "2.6.1"
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/tools.namespace "0.2.11"]
[ring/ring-core "1.5.0"]
[ring/ring-devel "1.5.0"]
[ring/ring-defaults "0.2.3"]
[org.postgresql/postgresql "9.4-1201-jdbc41"]
[ragtime/ragtime.jdbc "0.6.3"]
[ring/ring-mock "0.3.0"]
[compojure "1.5.1"]
[http-kit "2.2.0"]
[yesql "0.5.3"]
[environ "1.1.0"]
[cheshire "5.7.0"]
[hiccup "1.0.5"]
[buddy "1.3.0"]]
:plugins [[lein-environ "1.0.3"]]
:main {{name}}.core
:source-paths ["src"]
:test-paths ["test"]
:aliases {"db/migrate" ["run" "-m" "{{name}}.migrations/migrate"]
"db/rollback" ["run" "-m" "{{name}}.migrations/rollback"]
"db/migration" ["run" "-m" "{{name}}.migrations/create"]
"db/crud" ["run" "-m" "{{name}}.migrations/crud"]}
:profiles {:uberjar {:aot :all
:uberjar-name "{{name}}.jar"}})
| |
974e2e8e941cfc02f6183fd760e9b10df974bd48cc879ab4919e70dcf86782bd | amosr/folderol | Folderol.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE TemplateHaskell #
module Bench.Audio.Folderol where
import Bench.Audio.Audio
import qualified Bench.Plumbing.Folderol as Plumbing
import qualified Folderol.Typed as Network
import qualified Folderol.Process as Process
import qualified Folderol.Splice as Splice
import qualified Folderol.Source as Source
import qualified Data.Vector.Unboxed as Unbox
runCompressor :: Unbox.Vector Double -> IO (Unbox.Vector Double)
runCompressor !xs = do
(ys,()) <- Plumbing.vectorAtMostIO (Unbox.length xs) $ \snkYs -> do
$$(Splice.fuse Splice.defaultFuseOptions $ do
x0 <- Network.source [|| Source.sourceOfVector xs ||]
squares <- Process.map [|| \x -> x * x ||] x0
avg <- Process.postscanl [|| expAvg ||] [||0||] squares
root <- Process.map [|| clipRoot ||] avg
out <- Process.zipWith [|| (*) ||] root x0
Network.sink out [|| snkYs ||])
return ys
runCompressorLop :: Unbox.Vector Double -> IO (Unbox.Vector Double)
runCompressorLop !xs = do
(ys,()) <- Plumbing.vectorAtMostIO (Unbox.length xs) $ \snkYs -> do
$$(Splice.fuse Splice.defaultFuseOptions $ do
x0 <- Network.source [|| Source.sourceOfVector xs ||]
x' <- Process.postscanl [|| lopass ||] [||0||] x0
squares <- Process.map [|| \x -> x * x ||] x'
avg <- Process.postscanl [|| expAvg ||] [||0||] squares
root <- Process.map [|| clipRoot ||] avg
out <- Process.zipWith [|| (*) ||] root x'
Network.sink out [|| snkYs ||])
return ys
| null | https://raw.githubusercontent.com/amosr/folderol/9b8c0cd30cfb798dadaa404cc66404765b1fc4fe/bench/Bench/Audio/Folderol.hs | haskell | # LANGUAGE BangPatterns # | # LANGUAGE TemplateHaskell #
module Bench.Audio.Folderol where
import Bench.Audio.Audio
import qualified Bench.Plumbing.Folderol as Plumbing
import qualified Folderol.Typed as Network
import qualified Folderol.Process as Process
import qualified Folderol.Splice as Splice
import qualified Folderol.Source as Source
import qualified Data.Vector.Unboxed as Unbox
runCompressor :: Unbox.Vector Double -> IO (Unbox.Vector Double)
runCompressor !xs = do
(ys,()) <- Plumbing.vectorAtMostIO (Unbox.length xs) $ \snkYs -> do
$$(Splice.fuse Splice.defaultFuseOptions $ do
x0 <- Network.source [|| Source.sourceOfVector xs ||]
squares <- Process.map [|| \x -> x * x ||] x0
avg <- Process.postscanl [|| expAvg ||] [||0||] squares
root <- Process.map [|| clipRoot ||] avg
out <- Process.zipWith [|| (*) ||] root x0
Network.sink out [|| snkYs ||])
return ys
runCompressorLop :: Unbox.Vector Double -> IO (Unbox.Vector Double)
runCompressorLop !xs = do
(ys,()) <- Plumbing.vectorAtMostIO (Unbox.length xs) $ \snkYs -> do
$$(Splice.fuse Splice.defaultFuseOptions $ do
x0 <- Network.source [|| Source.sourceOfVector xs ||]
x' <- Process.postscanl [|| lopass ||] [||0||] x0
squares <- Process.map [|| \x -> x * x ||] x'
avg <- Process.postscanl [|| expAvg ||] [||0||] squares
root <- Process.map [|| clipRoot ||] avg
out <- Process.zipWith [|| (*) ||] root x'
Network.sink out [|| snkYs ||])
return ys
|
9fe86e1f68c9436782ed0b9d3b1ea8a8fb6167958dd40211109dfaa6afc55c1c | robert-strandh/SICL | make-symbol-defun.lisp | (cl:in-package #:sicl-symbol)
(defun make-symbol (string)
(unless (stringp string)
(error 'type-error
:datum string
:expected-type 'string))
(make-instance 'symbol
:name string
:package nil))
| null | https://raw.githubusercontent.com/robert-strandh/SICL/837f2e8e436c42fba3e13f6d823136976a04e775/Code/Symbol/make-symbol-defun.lisp | lisp | (cl:in-package #:sicl-symbol)
(defun make-symbol (string)
(unless (stringp string)
(error 'type-error
:datum string
:expected-type 'string))
(make-instance 'symbol
:name string
:package nil))
| |
b74a634c199af99c2d641a35cce1a7c61bdb5694f090a11ed209c4bd997b8249 | brownplt/LambdaS5 | prelude.mli | type id = string
module Pos : sig
type t = Lexing.position * Lexing.position * bool (* start, end, is synthetic? *)
val dummy : t
val compare : t -> t -> int
val before : t -> t -> bool
val synth : t -> t
val synthetic : Lexing.position * Lexing.position -> t
val real : Lexing.position * Lexing.position -> t
val rangeToString : Lexing.position -> Lexing.position -> string
val string_of_pos : t -> string
val toLexPos : t -> Lexing.position * Lexing.position
val isSynthetic : t -> bool
val fname : t -> string
end
module IntSet : SetExt.S
with type elt = int
module IdSet : SetExt.S
with type elt = id
module IdHashtbl : Hashtbl.S
with type key = id
module PosSet : SetExt.S
with type elt = Pos.t
module PosMap : MapExt.S
with type key = Pos.t
module IdMap : MapExt.S
with type key = id
with type + ' a t = ' a IdMap.t
val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b list -> 'a
val fold_right : ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b
val map : ('a -> 'b) -> 'a list -> 'b list
val second2 : ('b -> 'c) -> 'a * 'b -> 'a * 'c
val third3 : ('c -> 'd) -> 'a * 'b * 'c -> 'a * 'b * 'd
val snd3 : 'a * 'b * 'c -> 'b
val snd2 : 'a * 'b -> 'b
val fst2 : 'a * 'b -> 'a
val fst3 : 'a * 'b * 'c -> 'a
val thd3 : 'a * 'b * 'c -> 'c
val printf : ('a, out_channel, unit) format -> 'a
val eprintf : ('a, out_channel, unit) format -> 'a
val sprintf : ('a, unit, string) format -> 'a
val intersperse : 'a -> 'a list -> 'a list
val take_while : ('a -> bool) -> 'a list -> 'a list * 'a list
val match_while : ( 'a -> 'b option) -> 'a list -> 'b list * 'a list
* [ take n lst ] returns the first n elts of lst , or if there are less than n
* elts , lst .
* elts, lst. *)
val take : int -> 'a list -> 'a list
(** [nub lst] removes duplicates from the [lst]. Duplicates are identified by
structural equality. *)
val nub : 'a list -> 'a list
(** [iota n] returns the list of integers [0] through [n-1], inclusive. *)
val iota : int -> int list
val curry : ('a * 'b -> 'c) -> ('a -> 'b -> 'c)
val uncurry : ('a -> 'b -> 'c) -> ('a * 'b -> 'c)
Switches the order of args for a two - arg function
val flip : ('a -> 'b -> 'c) -> ('b -> 'a -> 'c)
* [ group cmp lst ] collects like elts of [ lst ] into lists using [ cmp ] to check equality .
* Returns a list of lists , where all like elts are in one sublist
* Returns a list of lists, where all like elts are in one sublist *)
val group : ('a -> 'a -> int) -> 'a list -> 'a list list
val list_of_option : 'a option -> 'a list
val null : 'a list -> bool
val last : 'a list -> 'a
Returns true if the second arg is substring of the first
val str_contains : string -> string -> bool
val identity : 'a -> 'a
val compose : ('a -> 'a) list -> 'a -> 'a
val apply : ('b -> 'a -> 'a) -> 'b list -> 'a -> 'a
val find_cycle : 'a -> ('a -> 'a option) -> ('a -> 'a -> bool) -> 'a list
val string_of_file : string -> string
| null | https://raw.githubusercontent.com/brownplt/LambdaS5/f0bf5c7baf1daa4ead4e398ba7d430bedb7de9cf/src/util/prelude.mli | ocaml | start, end, is synthetic?
* [nub lst] removes duplicates from the [lst]. Duplicates are identified by
structural equality.
* [iota n] returns the list of integers [0] through [n-1], inclusive. | type id = string
module Pos : sig
val dummy : t
val compare : t -> t -> int
val before : t -> t -> bool
val synth : t -> t
val synthetic : Lexing.position * Lexing.position -> t
val real : Lexing.position * Lexing.position -> t
val rangeToString : Lexing.position -> Lexing.position -> string
val string_of_pos : t -> string
val toLexPos : t -> Lexing.position * Lexing.position
val isSynthetic : t -> bool
val fname : t -> string
end
module IntSet : SetExt.S
with type elt = int
module IdSet : SetExt.S
with type elt = id
module IdHashtbl : Hashtbl.S
with type key = id
module PosSet : SetExt.S
with type elt = Pos.t
module PosMap : MapExt.S
with type key = Pos.t
module IdMap : MapExt.S
with type key = id
with type + ' a t = ' a IdMap.t
val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b list -> 'a
val fold_right : ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b
val map : ('a -> 'b) -> 'a list -> 'b list
val second2 : ('b -> 'c) -> 'a * 'b -> 'a * 'c
val third3 : ('c -> 'd) -> 'a * 'b * 'c -> 'a * 'b * 'd
val snd3 : 'a * 'b * 'c -> 'b
val snd2 : 'a * 'b -> 'b
val fst2 : 'a * 'b -> 'a
val fst3 : 'a * 'b * 'c -> 'a
val thd3 : 'a * 'b * 'c -> 'c
val printf : ('a, out_channel, unit) format -> 'a
val eprintf : ('a, out_channel, unit) format -> 'a
val sprintf : ('a, unit, string) format -> 'a
val intersperse : 'a -> 'a list -> 'a list
val take_while : ('a -> bool) -> 'a list -> 'a list * 'a list
val match_while : ( 'a -> 'b option) -> 'a list -> 'b list * 'a list
* [ take n lst ] returns the first n elts of lst , or if there are less than n
* elts , lst .
* elts, lst. *)
val take : int -> 'a list -> 'a list
val nub : 'a list -> 'a list
val iota : int -> int list
val curry : ('a * 'b -> 'c) -> ('a -> 'b -> 'c)
val uncurry : ('a -> 'b -> 'c) -> ('a * 'b -> 'c)
Switches the order of args for a two - arg function
val flip : ('a -> 'b -> 'c) -> ('b -> 'a -> 'c)
* [ group cmp lst ] collects like elts of [ lst ] into lists using [ cmp ] to check equality .
* Returns a list of lists , where all like elts are in one sublist
* Returns a list of lists, where all like elts are in one sublist *)
val group : ('a -> 'a -> int) -> 'a list -> 'a list list
val list_of_option : 'a option -> 'a list
val null : 'a list -> bool
val last : 'a list -> 'a
Returns true if the second arg is substring of the first
val str_contains : string -> string -> bool
val identity : 'a -> 'a
val compose : ('a -> 'a) list -> 'a -> 'a
val apply : ('b -> 'a -> 'a) -> 'b list -> 'a -> 'a
val find_cycle : 'a -> ('a -> 'a option) -> ('a -> 'a -> bool) -> 'a list
val string_of_file : string -> string
|
ae2619cac93189955da816b3080659409c2b211c9d585263a6a40a068d897c61 | jaredloomis/Haskell-OpenGL | NewMatrix.hs | # LANGUAGE DataKinds #
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE TypeOperators #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE MultiParamTypeClasses #
module Engine.Matrix.NewMatrix where
import Foreign.Storable (Storable(..))
import qualified Data.Vector.Storable as V
import GHC.TypeLits
import Numeric.LinearAlgebra.Static
newtype Matrix w h t = Matrix (L w h)
deriving (Num, Fractional, Floating)
newtype Vector l t = Vector (R l)
deriving (Num, Fractional, Floating)
instance (Sized t (R l) V.Vector) => Sized t (Vector l t) V.Vector where
konst = Vector . konst
unwrap (Vector r) = unwrap r
(<&>) :: (KnownNat w1, KnownNat w2, KnownNat h) =>
Matrix w1 h t -> Matrix w2 h t -> Matrix (w1+w2) h t
(<&>) (Matrix l1) (Matrix l2) = Matrix $ l1 —— l2
| null | https://raw.githubusercontent.com/jaredloomis/Haskell-OpenGL/5c7363bbc07c5064e49b608d689cda2cab99f3eb/src/Engine/Matrix/NewMatrix.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE TypeOperators #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE MultiParamTypeClasses #
module Engine.Matrix.NewMatrix where
import Foreign.Storable (Storable(..))
import qualified Data.Vector.Storable as V
import GHC.TypeLits
import Numeric.LinearAlgebra.Static
newtype Matrix w h t = Matrix (L w h)
deriving (Num, Fractional, Floating)
newtype Vector l t = Vector (R l)
deriving (Num, Fractional, Floating)
instance (Sized t (R l) V.Vector) => Sized t (Vector l t) V.Vector where
konst = Vector . konst
unwrap (Vector r) = unwrap r
(<&>) :: (KnownNat w1, KnownNat w2, KnownNat h) =>
Matrix w1 h t -> Matrix w2 h t -> Matrix (w1+w2) h t
(<&>) (Matrix l1) (Matrix l2) = Matrix $ l1 —— l2
| |
6e84e4cf8bb466555d7425d14ebd5b8581dadd39b7c3c27ade0aeb1e2565b1eb | AccelerateHS/accelerate-examples | Main.hs | # LANGUAGE ScopedTypeVariables , CPP , FlexibleContexts #
module Main where
import Data.Bits
import Properties
import Text.Printf
import Test.QuickCheck
import Data.Array.Accelerate
-- All tests, all element types (coffee time!)
--
main :: IO ()
main = do
mapM_ (\(s,t) -> printf "===> %s\n" s >> runTests t >> putStrLn "") $
[ ("Int", prop_integral (undefined :: Int) ++
prop_Int)
, ("Int8", prop_integral (undefined :: Int8))
, ("Int16", prop_integral (undefined :: Int16))
, ("Int32", prop_integral (undefined :: Int32))
, ("Int64", prop_integral (undefined :: Int64))
, ("Word", prop_integral (undefined :: Word))
, ("Word8", prop_integral (undefined :: Word8))
, ("Word16", prop_integral (undefined :: Word16))
, ("Word32", prop_integral (undefined :: Word32))
, ("Word64", prop_integral (undefined :: Word64))
, ("Float", prop_floating (undefined :: Float) ++
prop_Float)
, ("Double", prop_floating (undefined :: Double))
]
-- Execute a sequence of (name,test) pairs
--
runTests :: [(String, IO b)] -> IO ()
runTests tests = mapM_ (\(s,a) -> printf "%-25s: " s >> a) tests
-- The test sets
--
prop_integral :: forall a. (Integral a, Bits a, IsIntegral a, Elem a, Similar a, Arbitrary a, Arbitrary (Acc (Vector a)))
=> a -> [(String, IO ())]
prop_integral dummy =
[ test2 prop_Add
, test2 prop_Sub
, test2 prop_Mul
, test1 prop_Abs
, test1 prop_Negate
, test1 prop_Signum
, test2 prop_Quot
, test2 prop_Rem
, test2 prop_Idiv
, test2 prop_Mod
, test2 prop_Band
, test2 prop_BOr
, test2 prop_BXor
, test1 prop_BNot
, test1 prop_BShiftL
, test1 prop_BShiftR
, test1 prop_BRotateL
, test1 prop_BRotateR
, test2 prop_Lt
, test2 prop_Gt
, test2 prop_LtEq
, test2 prop_GtEq
, test2 prop_Eq
, test2 prop_NEq
, test2 prop_Min
, test2 prop_Max
]
++ prop_comps dummy
where
test1 (s,t) = (s, quickCheck (t :: [a] -> Property))
test2 (s,t) = (s, quickCheck (t :: [a] -> [a] -> Property))
prop_Int :: [(String, IO ())]
prop_Int =
[ test1 prop_intToFloat ]
where
test1 (s,t) = (s, quickCheck (t :: [Int] -> Property))
prop_floating :: forall a. (RealFrac a, IsFloating a, Elem a, Similar a, Arbitrary a, Arbitrary (Acc (Vector a)))
=> a -> [(String, IO ())]
prop_floating dummy =
[ test2 prop_Add
, test2 prop_Sub
, test2 prop_Mul
, test1 prop_Abs
, test1 prop_Negate
, test1 prop_Signum
, test2 prop_FDiv
, test1 prop_Recip
, test1 prop_Sin
, test1 prop_Cos
, test1 prop_Tan
, test1 prop_ASin
, test1 prop_ACos
, test1 prop_ATan
, test1 prop_ASinh
, test1 prop_ACosh
, test1 prop_ATanh
, test1 prop_Exp
, test1 prop_Sqrt
, test1 prop_Log
, test2 prop_Pow
, test2 prop_LogBase
, test2 prop_Lt
, test2 prop_Gt
, test2 prop_LtEq
, test2 prop_GtEq
, test2 prop_Eq
, test2 prop_NEq
, test2 prop_Min
, test2 prop_Max
]
++ prop_comps dummy
where
test1 (s,t) = (s, quickCheck (t :: [a] -> Property))
test2 (s,t) = (s, quickCheck (t :: [a] -> [a] -> Property))
prop_Float :: [(String, IO ())]
prop_Float =
[ test1 prop_roundFloatToInt
, test1 prop_truncateFloatToInt
]
where
test1 (s,t) = (s, quickCheck (t :: [Float] -> Property))
prop_comps :: forall a. (IsNum a, Ord a, Elem a, Similar a, Arbitrary a, Arbitrary (Acc (Vector a)))
=> a -> [(String, IO ())]
prop_comps _dummy =
[ test1 prop_Sum
, test1 prop_Product
, test1 prop_Minimum
, test1 prop_Maximum
, test1 prop_FoldSeg
, test2 prop_Zip
, testPair prop_FstUnzip
, testPair prop_SndUnzip
, test1 prop_Backpermute
, test1 prop_Scanl
, test1 prop_ScanlRdx
, test1 prop_Scanr
, test1 prop_ScanrRdx
, test1 prop_Square
, testSaxpy prop_Saxpy
, test2 prop_Dotp
, test1 prop_Filter
, testPair prop_MapAddPair
, testPair prop_ScanlPair
, testPair prop_ScanrPair
#ifdef ACCELERATE_CUDA_BACKEND
, ("arbitrary", quickCheck (test_arbitrary _dummy))
#endif
]
where
test1 (s,t) = (s, quickCheck (t :: [a] -> Property))
test2 (s,t) = (s, quickCheck (t :: [a] -> [a] -> Property))
testPair (s,t) = (s, quickCheck (t :: [(a,a)] -> Property)) -- mix and match types?
testSaxpy (s,t) = (s, quickCheck (t :: a -> [a] -> [a] -> Property))
| null | https://raw.githubusercontent.com/AccelerateHS/accelerate-examples/a973ee423b5eadda6ef2e2504d2383f625e49821/examples/icebox/quickcheck-ast-old/Main.hs | haskell | All tests, all element types (coffee time!)
Execute a sequence of (name,test) pairs
The test sets
mix and match types? | # LANGUAGE ScopedTypeVariables , CPP , FlexibleContexts #
module Main where
import Data.Bits
import Properties
import Text.Printf
import Test.QuickCheck
import Data.Array.Accelerate
main :: IO ()
main = do
mapM_ (\(s,t) -> printf "===> %s\n" s >> runTests t >> putStrLn "") $
[ ("Int", prop_integral (undefined :: Int) ++
prop_Int)
, ("Int8", prop_integral (undefined :: Int8))
, ("Int16", prop_integral (undefined :: Int16))
, ("Int32", prop_integral (undefined :: Int32))
, ("Int64", prop_integral (undefined :: Int64))
, ("Word", prop_integral (undefined :: Word))
, ("Word8", prop_integral (undefined :: Word8))
, ("Word16", prop_integral (undefined :: Word16))
, ("Word32", prop_integral (undefined :: Word32))
, ("Word64", prop_integral (undefined :: Word64))
, ("Float", prop_floating (undefined :: Float) ++
prop_Float)
, ("Double", prop_floating (undefined :: Double))
]
runTests :: [(String, IO b)] -> IO ()
runTests tests = mapM_ (\(s,a) -> printf "%-25s: " s >> a) tests
prop_integral :: forall a. (Integral a, Bits a, IsIntegral a, Elem a, Similar a, Arbitrary a, Arbitrary (Acc (Vector a)))
=> a -> [(String, IO ())]
prop_integral dummy =
[ test2 prop_Add
, test2 prop_Sub
, test2 prop_Mul
, test1 prop_Abs
, test1 prop_Negate
, test1 prop_Signum
, test2 prop_Quot
, test2 prop_Rem
, test2 prop_Idiv
, test2 prop_Mod
, test2 prop_Band
, test2 prop_BOr
, test2 prop_BXor
, test1 prop_BNot
, test1 prop_BShiftL
, test1 prop_BShiftR
, test1 prop_BRotateL
, test1 prop_BRotateR
, test2 prop_Lt
, test2 prop_Gt
, test2 prop_LtEq
, test2 prop_GtEq
, test2 prop_Eq
, test2 prop_NEq
, test2 prop_Min
, test2 prop_Max
]
++ prop_comps dummy
where
test1 (s,t) = (s, quickCheck (t :: [a] -> Property))
test2 (s,t) = (s, quickCheck (t :: [a] -> [a] -> Property))
prop_Int :: [(String, IO ())]
prop_Int =
[ test1 prop_intToFloat ]
where
test1 (s,t) = (s, quickCheck (t :: [Int] -> Property))
prop_floating :: forall a. (RealFrac a, IsFloating a, Elem a, Similar a, Arbitrary a, Arbitrary (Acc (Vector a)))
=> a -> [(String, IO ())]
prop_floating dummy =
[ test2 prop_Add
, test2 prop_Sub
, test2 prop_Mul
, test1 prop_Abs
, test1 prop_Negate
, test1 prop_Signum
, test2 prop_FDiv
, test1 prop_Recip
, test1 prop_Sin
, test1 prop_Cos
, test1 prop_Tan
, test1 prop_ASin
, test1 prop_ACos
, test1 prop_ATan
, test1 prop_ASinh
, test1 prop_ACosh
, test1 prop_ATanh
, test1 prop_Exp
, test1 prop_Sqrt
, test1 prop_Log
, test2 prop_Pow
, test2 prop_LogBase
, test2 prop_Lt
, test2 prop_Gt
, test2 prop_LtEq
, test2 prop_GtEq
, test2 prop_Eq
, test2 prop_NEq
, test2 prop_Min
, test2 prop_Max
]
++ prop_comps dummy
where
test1 (s,t) = (s, quickCheck (t :: [a] -> Property))
test2 (s,t) = (s, quickCheck (t :: [a] -> [a] -> Property))
prop_Float :: [(String, IO ())]
prop_Float =
[ test1 prop_roundFloatToInt
, test1 prop_truncateFloatToInt
]
where
test1 (s,t) = (s, quickCheck (t :: [Float] -> Property))
prop_comps :: forall a. (IsNum a, Ord a, Elem a, Similar a, Arbitrary a, Arbitrary (Acc (Vector a)))
=> a -> [(String, IO ())]
prop_comps _dummy =
[ test1 prop_Sum
, test1 prop_Product
, test1 prop_Minimum
, test1 prop_Maximum
, test1 prop_FoldSeg
, test2 prop_Zip
, testPair prop_FstUnzip
, testPair prop_SndUnzip
, test1 prop_Backpermute
, test1 prop_Scanl
, test1 prop_ScanlRdx
, test1 prop_Scanr
, test1 prop_ScanrRdx
, test1 prop_Square
, testSaxpy prop_Saxpy
, test2 prop_Dotp
, test1 prop_Filter
, testPair prop_MapAddPair
, testPair prop_ScanlPair
, testPair prop_ScanrPair
#ifdef ACCELERATE_CUDA_BACKEND
, ("arbitrary", quickCheck (test_arbitrary _dummy))
#endif
]
where
test1 (s,t) = (s, quickCheck (t :: [a] -> Property))
test2 (s,t) = (s, quickCheck (t :: [a] -> [a] -> Property))
testSaxpy (s,t) = (s, quickCheck (t :: a -> [a] -> [a] -> Property))
|
9579296167ea9de4cf146b6af6f051b9d73bb2ac4b4569163f354af711d4a4c6 | osa1/StrictCore | Lint.hs | FIXME : This module needs some extra exports from CoreLint
module StrictCore.Lint
( lintCoreProgram
) where
--------------------------------------------------------------------------------
import Bag
import BasicTypes
import CoAxiom (Role (..))
import CoreLint hiding (lintExpr, lintSingleBinding, mkBadAltMsg, mkCaseAltMsg,
mkNewTyDataConAltMsg)
import qualified CoreSyn
import DataCon
import DynFlags
import ErrUtils
import Id
import Kind (classifiesTypeWithValues)
import Literal (literalType)
import Outputable
import TyCon
import TyCoRep
import Type
import TysWiredIn (mkTupleTy)
import StrictCore.Syntax
import Control.Monad (mapM_)
import Prelude hiding (id)
--------------------------------------------------------------------------------
-- | Returns (warnings, errors).
lintCoreProgram :: DynFlags -> [Var] -> [Bind] -> (Bag MsgDoc, Bag MsgDoc)
lintCoreProgram dflags in_scope binds
= initL dflags defaultLintFlags $
addLoc TopLevelBindings $
addInScopeVars in_scope $
addInScopeVars (bindersOfBinds binds) $
mapM_ lintBind binds
lintBind :: Bind -> LintM ()
lintBind (NonRec bndr rhs) = lintSingleBind bndr rhs
lintBind (Rec bs) = mapM_ (uncurry lintSingleBind) bs
--------------------------------------------------------------------------------
lintExpr :: Expr -> LintM Type
lintExpr (Var var)
= do var' <- lookupIdInScope var
return (idType var')
lintExpr (Lit lit)
= return (literalType lit)
lintExpr (MultiVal es)
= mkTupleTy Unboxed <$> mapM lintExpr es
lintExpr (Lam (ValBndrs as) body)
= addLoc (BodyOfLetRec as) $ -- FIXME: LambdaBodyOf wants an Id so can't use it here
lintBinders as $ \as' -> do
body_ty <- lintExpr body
return (FunTy (mkMultiValTy (map idType as')) body_ty)
lintExpr (Lam (TyBndr ty_var) body)
= addLoc (LambdaBodyOf ty_var) $
lintBinder ty_var $ \ty_var' -> do
body_ty <- lintExpr body
return (mkLamType ty_var' body_ty)
lintExpr (App fn args)
= do fun_ty <- lintExpr fn
lintApp fun_ty args
lintExpr (Eval bndrs rhs body)
= do rhs_ty <- lintExpr rhs
lintAndScopeIds bndrs $ \bndrs' -> do
let bndr_tys = mkMultiValTy (map idType bndrs')
ensureEqTys bndr_tys rhs_ty (mkEvalBndrsMsg bndr_tys rhs_ty bndrs rhs)
lintExpr body
lintExpr (Let (NonRec bndr rhs) body)
= do lintSingleBind bndr rhs
lintAndScopeId bndr $ \_ -> lintExpr body
lintExpr (Let (Rec binds) body)
= lintAndScopeIds (map fst binds) $ \_ -> do
mapM_ (uncurry lintSingleBind) binds
lintExpr body
lintExpr (Case scrt alt_ty alts)
= do scrt_ty <- lintAtom scrt
(alt_ty', _) <- lintInTy alt_ty
-- TODO: Run the tests if alts is empty.
-- Check the alternatives
mapM_ (lintAlt scrt_ty alt_ty') alts
return alt_ty'
lintExpr (Type ty)
= -- TODO: Not sure if this invariant still holds...
failWithL (text "Type found as expression" <+> ppr ty)
Copied from CoreLint with one line of change
lintExpr (Cast expr co)
= do expr_ty <- lintExpr expr
co' <- applySubstCo co
(_, k2, from_ty, to_ty, r) <- lintCoercion co'
lintL (classifiesTypeWithValues k2)
(text "Target of cast not # or *:" <+> ppr co)
lintRole co' Representational r
ensureEqTys from_ty expr_ty (mkCastErr expr co' from_ty expr_ty)
return to_ty
lintExpr (Coercion co)
= lintCoreExpr (CoreSyn.Coercion co)
lintSingleBind :: Id -> Expr -> LintM ()
lintSingleBind bndr val
= do ty <- lintValue val
lintIdBndr bndr (\_ -> return ()) -- Check match to RHS type
binder_ty <- applySubstTy (idType bndr)
ensureEqTys binder_ty ty (mkRhsMsg bndr (text "RHS") ty)
lintValue :: Expr -> LintM Type
lintValue = lintExpr -- TODO: check if expr is really a value
mkEvalBndrsMsg :: Type -> Type -> [Id] -> Expr -> MsgDoc
mkEvalBndrsMsg bndrs_ty expr_ty bndrs expr
= vcat [ text "Eval LHS and RHS types don't match",
text "LHS type:" <+> ppr bndrs_ty,
text "RHS type:" <+> ppr expr_ty,
text "Binders:" <+> ppr bndrs,
text "RHS:" <+> ppr expr ]
--------------------------------------------------------------------------------
lintAlt :: Type -> Type -> Alt -> LintM ()
lintAlt _ alt_ty (CoreSyn.DEFAULT, args, rhs)
= addLoc (RhsOf (head args)) $ do
lintL (null args) (mkDefaultArgsMsg args)
lintAltRhs alt_ty rhs
lintAlt scrt_ty alt_ty (CoreSyn.LitAlt lit, args, rhs)
= addLoc (BodyOfLetRec args) $ do
lintL (null args) (mkDefaultArgsMsg args)
let lit_ty = literalType lit
ensureEqTys lit_ty scrt_ty (mkBadPatMsg lit_ty scrt_ty)
lintAltRhs alt_ty rhs
lintAlt scrt_ty alt_ty alt@(CoreSyn.DataAlt con, args, rhs)
| isNewTyCon (dataConTyCon con)
= addErrL (mkNewTyDataConAltMsg scrt_ty alt)
| Just (tycon, tycon_arg_tys) <- splitTyConApp_maybe scrt_ty
= addLoc (BodyOfLetRec args) $ do
First instantiate the universally quantified
-- type variables of the data constructor we've already check
lintL (tycon == dataConTyCon con) (mkBadConMsg tycon con)
let con_payload_ty = piResultTys (dataConRepType con) tycon_arg_tys
-- And now bring the new binders into scope
lintBinders args $ \ args' -> do
FIXME : This line wo n't work , we need to translate DataCons to update
-- argument types.
-- lintAltBinders scrt_ty con_payload_ty args'
lintAltRhs alt_ty rhs
is wrong shape
= addErrL (mkBadAltMsg scrt_ty alt)
lintAltRhs :: Type -> Expr -> LintM ()
lintAltRhs ann_ty expr
= do actual_ty <- lintExpr expr
ensureEqTys actual_ty ann_ty (mkCaseAltMsg expr actual_ty ann_ty)
mkNewTyDataConAltMsg :: Type -> Alt -> MsgDoc
mkNewTyDataConAltMsg scrt_ty alt
= vcat [ text "Data alternative for newtype datacon",
text "Scrutinee type:" <+> ppr scrt_ty,
text "Alternative:" <+> pprAlt alt ]
mkBadAltMsg :: Type -> Alt -> MsgDoc
mkBadAltMsg scrt_ty alt
= vcat [ text "Data alternative when scrutinee is not a tycon application",
text "Scrutinee type:" <+> ppr scrt_ty,
text "Alternative:" <+> pprAlt alt ]
mkCaseAltMsg :: Expr -> Type -> Type -> MsgDoc
mkCaseAltMsg e ty1 ty2
= hang (text "Type of case alternatives not the same as the annotation on case:")
4 (vcat [ text "Actual type:" <+> ppr ty1,
text "Annotation on case:" <+> ppr ty2,
text "Alt Rhs:" <+> ppr e ])
--------------------------------------------------------------------------------
lintApp :: Type -> [Expr] -> LintM Type
lintApp fun_ty [Type ty]
= lintCoreArg fun_ty (CoreSyn.Type ty)
lintApp fun_ty [arg]
= do arg_ty <- lintExpr arg
lintValApp arg fun_ty arg_ty
lintApp fun_ty args
= do -- TODO: Make sure `args` doesn't have Type
arg_tys <- mapM lintExpr args
lintValApp args fun_ty (mkTupleTy Unboxed arg_tys)
--------------------------------------------------------------------------------
lintAtom :: Atom -> LintM Type
lintAtom (AVar id)
= return (idType id)
lintAtom (ALit lit)
= return (literalType lit)
lintAtom (AApp a ty)
= do fun_ty <- lintAtom a
ty' <- applySubstTy ty
lintTyApp fun_ty ty'
Copied from CoreLint
lintAtom (ACast a co)
= do atom_ty <- lintAtom a
co' <- applySubstCo co
(_, k2, from_ty, to_ty, r) <- lintCoercion co'
lintL (classifiesTypeWithValues k2)
(text "Target of cast not # or *:" <+> ppr co)
lintRole co' Representational r
ensureEqTys from_ty atom_ty (mkCastErr a co' from_ty atom_ty)
return to_ty
lintAtom (AType ty)
= lintCoreExpr (CoreSyn.Type ty)
--------------------------------------------------------------------------------
mkMultiValTy :: [Type] -> Type
mkMultiValTy [ty] = ty
mkMultiValTy tys = mkTupleTy Unboxed tys -- empty list OK
| null | https://raw.githubusercontent.com/osa1/StrictCore/f546099a0d345bc32297cd212edaf5197251886f/src/StrictCore/Lint.hs | haskell | ------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Returns (warnings, errors).
------------------------------------------------------------------------------
FIXME: LambdaBodyOf wants an Id so can't use it here
TODO: Run the tests if alts is empty.
Check the alternatives
TODO: Not sure if this invariant still holds...
Check match to RHS type
TODO: check if expr is really a value
------------------------------------------------------------------------------
type variables of the data constructor we've already check
And now bring the new binders into scope
argument types.
lintAltBinders scrt_ty con_payload_ty args'
------------------------------------------------------------------------------
TODO: Make sure `args` doesn't have Type
------------------------------------------------------------------------------
------------------------------------------------------------------------------
empty list OK | FIXME : This module needs some extra exports from CoreLint
module StrictCore.Lint
( lintCoreProgram
) where
import Bag
import BasicTypes
import CoAxiom (Role (..))
import CoreLint hiding (lintExpr, lintSingleBinding, mkBadAltMsg, mkCaseAltMsg,
mkNewTyDataConAltMsg)
import qualified CoreSyn
import DataCon
import DynFlags
import ErrUtils
import Id
import Kind (classifiesTypeWithValues)
import Literal (literalType)
import Outputable
import TyCon
import TyCoRep
import Type
import TysWiredIn (mkTupleTy)
import StrictCore.Syntax
import Control.Monad (mapM_)
import Prelude hiding (id)
lintCoreProgram :: DynFlags -> [Var] -> [Bind] -> (Bag MsgDoc, Bag MsgDoc)
lintCoreProgram dflags in_scope binds
= initL dflags defaultLintFlags $
addLoc TopLevelBindings $
addInScopeVars in_scope $
addInScopeVars (bindersOfBinds binds) $
mapM_ lintBind binds
lintBind :: Bind -> LintM ()
lintBind (NonRec bndr rhs) = lintSingleBind bndr rhs
lintBind (Rec bs) = mapM_ (uncurry lintSingleBind) bs
lintExpr :: Expr -> LintM Type
lintExpr (Var var)
= do var' <- lookupIdInScope var
return (idType var')
lintExpr (Lit lit)
= return (literalType lit)
lintExpr (MultiVal es)
= mkTupleTy Unboxed <$> mapM lintExpr es
lintExpr (Lam (ValBndrs as) body)
lintBinders as $ \as' -> do
body_ty <- lintExpr body
return (FunTy (mkMultiValTy (map idType as')) body_ty)
lintExpr (Lam (TyBndr ty_var) body)
= addLoc (LambdaBodyOf ty_var) $
lintBinder ty_var $ \ty_var' -> do
body_ty <- lintExpr body
return (mkLamType ty_var' body_ty)
lintExpr (App fn args)
= do fun_ty <- lintExpr fn
lintApp fun_ty args
lintExpr (Eval bndrs rhs body)
= do rhs_ty <- lintExpr rhs
lintAndScopeIds bndrs $ \bndrs' -> do
let bndr_tys = mkMultiValTy (map idType bndrs')
ensureEqTys bndr_tys rhs_ty (mkEvalBndrsMsg bndr_tys rhs_ty bndrs rhs)
lintExpr body
lintExpr (Let (NonRec bndr rhs) body)
= do lintSingleBind bndr rhs
lintAndScopeId bndr $ \_ -> lintExpr body
lintExpr (Let (Rec binds) body)
= lintAndScopeIds (map fst binds) $ \_ -> do
mapM_ (uncurry lintSingleBind) binds
lintExpr body
lintExpr (Case scrt alt_ty alts)
= do scrt_ty <- lintAtom scrt
(alt_ty', _) <- lintInTy alt_ty
mapM_ (lintAlt scrt_ty alt_ty') alts
return alt_ty'
lintExpr (Type ty)
failWithL (text "Type found as expression" <+> ppr ty)
Copied from CoreLint with one line of change
lintExpr (Cast expr co)
= do expr_ty <- lintExpr expr
co' <- applySubstCo co
(_, k2, from_ty, to_ty, r) <- lintCoercion co'
lintL (classifiesTypeWithValues k2)
(text "Target of cast not # or *:" <+> ppr co)
lintRole co' Representational r
ensureEqTys from_ty expr_ty (mkCastErr expr co' from_ty expr_ty)
return to_ty
lintExpr (Coercion co)
= lintCoreExpr (CoreSyn.Coercion co)
lintSingleBind :: Id -> Expr -> LintM ()
lintSingleBind bndr val
= do ty <- lintValue val
binder_ty <- applySubstTy (idType bndr)
ensureEqTys binder_ty ty (mkRhsMsg bndr (text "RHS") ty)
lintValue :: Expr -> LintM Type
mkEvalBndrsMsg :: Type -> Type -> [Id] -> Expr -> MsgDoc
mkEvalBndrsMsg bndrs_ty expr_ty bndrs expr
= vcat [ text "Eval LHS and RHS types don't match",
text "LHS type:" <+> ppr bndrs_ty,
text "RHS type:" <+> ppr expr_ty,
text "Binders:" <+> ppr bndrs,
text "RHS:" <+> ppr expr ]
lintAlt :: Type -> Type -> Alt -> LintM ()
lintAlt _ alt_ty (CoreSyn.DEFAULT, args, rhs)
= addLoc (RhsOf (head args)) $ do
lintL (null args) (mkDefaultArgsMsg args)
lintAltRhs alt_ty rhs
lintAlt scrt_ty alt_ty (CoreSyn.LitAlt lit, args, rhs)
= addLoc (BodyOfLetRec args) $ do
lintL (null args) (mkDefaultArgsMsg args)
let lit_ty = literalType lit
ensureEqTys lit_ty scrt_ty (mkBadPatMsg lit_ty scrt_ty)
lintAltRhs alt_ty rhs
lintAlt scrt_ty alt_ty alt@(CoreSyn.DataAlt con, args, rhs)
| isNewTyCon (dataConTyCon con)
= addErrL (mkNewTyDataConAltMsg scrt_ty alt)
| Just (tycon, tycon_arg_tys) <- splitTyConApp_maybe scrt_ty
= addLoc (BodyOfLetRec args) $ do
First instantiate the universally quantified
lintL (tycon == dataConTyCon con) (mkBadConMsg tycon con)
let con_payload_ty = piResultTys (dataConRepType con) tycon_arg_tys
lintBinders args $ \ args' -> do
FIXME : This line wo n't work , we need to translate DataCons to update
lintAltRhs alt_ty rhs
is wrong shape
= addErrL (mkBadAltMsg scrt_ty alt)
lintAltRhs :: Type -> Expr -> LintM ()
lintAltRhs ann_ty expr
= do actual_ty <- lintExpr expr
ensureEqTys actual_ty ann_ty (mkCaseAltMsg expr actual_ty ann_ty)
mkNewTyDataConAltMsg :: Type -> Alt -> MsgDoc
mkNewTyDataConAltMsg scrt_ty alt
= vcat [ text "Data alternative for newtype datacon",
text "Scrutinee type:" <+> ppr scrt_ty,
text "Alternative:" <+> pprAlt alt ]
mkBadAltMsg :: Type -> Alt -> MsgDoc
mkBadAltMsg scrt_ty alt
= vcat [ text "Data alternative when scrutinee is not a tycon application",
text "Scrutinee type:" <+> ppr scrt_ty,
text "Alternative:" <+> pprAlt alt ]
mkCaseAltMsg :: Expr -> Type -> Type -> MsgDoc
mkCaseAltMsg e ty1 ty2
= hang (text "Type of case alternatives not the same as the annotation on case:")
4 (vcat [ text "Actual type:" <+> ppr ty1,
text "Annotation on case:" <+> ppr ty2,
text "Alt Rhs:" <+> ppr e ])
lintApp :: Type -> [Expr] -> LintM Type
lintApp fun_ty [Type ty]
= lintCoreArg fun_ty (CoreSyn.Type ty)
lintApp fun_ty [arg]
= do arg_ty <- lintExpr arg
lintValApp arg fun_ty arg_ty
lintApp fun_ty args
arg_tys <- mapM lintExpr args
lintValApp args fun_ty (mkTupleTy Unboxed arg_tys)
lintAtom :: Atom -> LintM Type
lintAtom (AVar id)
= return (idType id)
lintAtom (ALit lit)
= return (literalType lit)
lintAtom (AApp a ty)
= do fun_ty <- lintAtom a
ty' <- applySubstTy ty
lintTyApp fun_ty ty'
Copied from CoreLint
lintAtom (ACast a co)
= do atom_ty <- lintAtom a
co' <- applySubstCo co
(_, k2, from_ty, to_ty, r) <- lintCoercion co'
lintL (classifiesTypeWithValues k2)
(text "Target of cast not # or *:" <+> ppr co)
lintRole co' Representational r
ensureEqTys from_ty atom_ty (mkCastErr a co' from_ty atom_ty)
return to_ty
lintAtom (AType ty)
= lintCoreExpr (CoreSyn.Type ty)
mkMultiValTy :: [Type] -> Type
mkMultiValTy [ty] = ty
|
a78750af5b48214cc80c10023a5e9489cbc54944a6025776282886b7b96b96e3 | fetburner/Coq2SML | dnet.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
* Generic discrimination net implementation over recursive
types . This module implements a association data structure similar
to tries but working on any types ( not just lists ) . It is a term
indexing datastructure , a generalization of the discrimination nets
described for example in W.W.McCune , 1992 , related also to
generalized tries [ , 2000 ] .
You can add pairs of ( term , identifier ) into a dnet , where the
identifier is * unique * , and search terms in a dnet filtering a
given pattern ( retrievial of instances ) . It returns all identifiers
associated with terms matching the pattern . It also works the other
way around : You provide a set of patterns and a term , and it
returns all patterns which the term matches ( retrievial of
generalizations ) . That 's why you provide * patterns * everywhere .
Warning 1 : Full unification does n't work as for now . Make sure the
set of metavariables in the structure and in the queries are
distincts , or you 'll get unexpected behaviours .
Warning 2 : This structure is perfect , i.e. the set of candidates
returned is equal to the set of solutions . Beware of
shifts and sorts subtyping though ( which makes the comparison not
symmetric , see term_dnet.ml ) .
The complexity of the search is ( almost ) the depth of the term .
To use it , you have to provide a module ( Datatype ) with the datatype
parametrized on the recursive argument . example :
type btree = type ' a btree0 =
| Leaf = = = > | Leaf
| Node of btree * btree | Node of ' a * ' a
types. This module implements a association data structure similar
to tries but working on any types (not just lists). It is a term
indexing datastructure, a generalization of the discrimination nets
described for example in W.W.McCune, 1992, related also to
generalized tries [Hinze, 2000].
You can add pairs of (term,identifier) into a dnet, where the
identifier is *unique*, and search terms in a dnet filtering a
given pattern (retrievial of instances). It returns all identifiers
associated with terms matching the pattern. It also works the other
way around : You provide a set of patterns and a term, and it
returns all patterns which the term matches (retrievial of
generalizations). That's why you provide *patterns* everywhere.
Warning 1: Full unification doesn't work as for now. Make sure the
set of metavariables in the structure and in the queries are
distincts, or you'll get unexpected behaviours.
Warning 2: This structure is perfect, i.e. the set of candidates
returned is equal to the set of solutions. Beware of DeBruijn
shifts and sorts subtyping though (which makes the comparison not
symmetric, see term_dnet.ml).
The complexity of the search is (almost) the depth of the term.
To use it, you have to provide a module (Datatype) with the datatype
parametrized on the recursive argument. example:
type btree = type 'a btree0 =
| Leaf ===> | Leaf
| Node of btree * btree | Node of 'a * 'a
*)
(** datatype you want to build a dnet on *)
module type Datatype =
sig
(** parametric datatype. ['a] is morally the recursive argument *)
type 'a t
(** non-recursive mapping of subterms *)
val map : ('a -> 'b) -> 'a t -> 'b t
val map2 : ('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
(** non-recursive folding of subterms *)
val fold : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a
val fold2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b t -> 'c t -> 'a
(** comparison of constructors *)
val compare : unit t -> unit t -> int
(** for each constructor, is it not-parametric on 'a? *)
val terminal : 'a t -> bool
* [ choose f w ] applies f on ONE of the subterms of w
val choose : ('a -> 'b) -> 'a t -> 'b
end
module type S =
sig
type t
(** provided identifier type *)
type ident
* provided
type meta
(** provided parametrized datastructure *)
type 'a structure
(** returned sets of solutions *)
module Idset : Set.S with type elt=ident
(** a pattern is a term where each node can be a unification
variable *)
type 'a pattern =
| Term of 'a
| Meta of meta
type term_pattern = 'a structure pattern as 'a
val empty : t
(** [add t w i] adds a new association (w,i) in t. *)
val add : t -> term_pattern -> ident -> t
(** [find_all t] returns all identifiers contained in t. *)
val find_all : t -> Idset.t
* [ fold_pattern f acc p dn ] folds f on each meta of p , passing the
meta and the sub - dnet under it . The result includes :
- Some set if identifiers were gathered on the leafs of the term
- None if the pattern contains no leaf ( only Metas at the leafs ) .
meta and the sub-dnet under it. The result includes:
- Some set if identifiers were gathered on the leafs of the term
- None if the pattern contains no leaf (only Metas at the leafs).
*)
val fold_pattern :
('a -> (Idset.t * meta * t) -> 'a) -> 'a -> term_pattern -> t -> Idset.t option * 'a
(** [find_match p t] returns identifiers of all terms matching p in
t. *)
val find_match : term_pattern -> t -> Idset.t
(** set operations on dnets *)
val inter : t -> t -> t
val union : t -> t -> t
(** apply a function on each identifier and node of terms in a dnet *)
val map : (ident -> ident) -> (unit structure -> unit structure) -> t -> t
end
module Make :
functor (T:Datatype) ->
functor (Ident:Set.OrderedType) ->
functor (Meta:Set.OrderedType) ->
S with type ident = Ident.t
and type meta = Meta.t
and type 'a structure = 'a T.t
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/lib/dnet.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* datatype you want to build a dnet on
* parametric datatype. ['a] is morally the recursive argument
* non-recursive mapping of subterms
* non-recursive folding of subterms
* comparison of constructors
* for each constructor, is it not-parametric on 'a?
* provided identifier type
* provided parametrized datastructure
* returned sets of solutions
* a pattern is a term where each node can be a unification
variable
* [add t w i] adds a new association (w,i) in t.
* [find_all t] returns all identifiers contained in t.
* [find_match p t] returns identifiers of all terms matching p in
t.
* set operations on dnets
* apply a function on each identifier and node of terms in a dnet | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Generic discrimination net implementation over recursive
types . This module implements a association data structure similar
to tries but working on any types ( not just lists ) . It is a term
indexing datastructure , a generalization of the discrimination nets
described for example in W.W.McCune , 1992 , related also to
generalized tries [ , 2000 ] .
You can add pairs of ( term , identifier ) into a dnet , where the
identifier is * unique * , and search terms in a dnet filtering a
given pattern ( retrievial of instances ) . It returns all identifiers
associated with terms matching the pattern . It also works the other
way around : You provide a set of patterns and a term , and it
returns all patterns which the term matches ( retrievial of
generalizations ) . That 's why you provide * patterns * everywhere .
Warning 1 : Full unification does n't work as for now . Make sure the
set of metavariables in the structure and in the queries are
distincts , or you 'll get unexpected behaviours .
Warning 2 : This structure is perfect , i.e. the set of candidates
returned is equal to the set of solutions . Beware of
shifts and sorts subtyping though ( which makes the comparison not
symmetric , see term_dnet.ml ) .
The complexity of the search is ( almost ) the depth of the term .
To use it , you have to provide a module ( Datatype ) with the datatype
parametrized on the recursive argument . example :
type btree = type ' a btree0 =
| Leaf = = = > | Leaf
| Node of btree * btree | Node of ' a * ' a
types. This module implements a association data structure similar
to tries but working on any types (not just lists). It is a term
indexing datastructure, a generalization of the discrimination nets
described for example in W.W.McCune, 1992, related also to
generalized tries [Hinze, 2000].
You can add pairs of (term,identifier) into a dnet, where the
identifier is *unique*, and search terms in a dnet filtering a
given pattern (retrievial of instances). It returns all identifiers
associated with terms matching the pattern. It also works the other
way around : You provide a set of patterns and a term, and it
returns all patterns which the term matches (retrievial of
generalizations). That's why you provide *patterns* everywhere.
Warning 1: Full unification doesn't work as for now. Make sure the
set of metavariables in the structure and in the queries are
distincts, or you'll get unexpected behaviours.
Warning 2: This structure is perfect, i.e. the set of candidates
returned is equal to the set of solutions. Beware of DeBruijn
shifts and sorts subtyping though (which makes the comparison not
symmetric, see term_dnet.ml).
The complexity of the search is (almost) the depth of the term.
To use it, you have to provide a module (Datatype) with the datatype
parametrized on the recursive argument. example:
type btree = type 'a btree0 =
| Leaf ===> | Leaf
| Node of btree * btree | Node of 'a * 'a
*)
module type Datatype =
sig
type 'a t
val map : ('a -> 'b) -> 'a t -> 'b t
val map2 : ('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
val fold : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a
val fold2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b t -> 'c t -> 'a
val compare : unit t -> unit t -> int
val terminal : 'a t -> bool
* [ choose f w ] applies f on ONE of the subterms of w
val choose : ('a -> 'b) -> 'a t -> 'b
end
module type S =
sig
type t
type ident
* provided
type meta
type 'a structure
module Idset : Set.S with type elt=ident
type 'a pattern =
| Term of 'a
| Meta of meta
type term_pattern = 'a structure pattern as 'a
val empty : t
val add : t -> term_pattern -> ident -> t
val find_all : t -> Idset.t
* [ fold_pattern f acc p dn ] folds f on each meta of p , passing the
meta and the sub - dnet under it . The result includes :
- Some set if identifiers were gathered on the leafs of the term
- None if the pattern contains no leaf ( only Metas at the leafs ) .
meta and the sub-dnet under it. The result includes:
- Some set if identifiers were gathered on the leafs of the term
- None if the pattern contains no leaf (only Metas at the leafs).
*)
val fold_pattern :
('a -> (Idset.t * meta * t) -> 'a) -> 'a -> term_pattern -> t -> Idset.t option * 'a
val find_match : term_pattern -> t -> Idset.t
val inter : t -> t -> t
val union : t -> t -> t
val map : (ident -> ident) -> (unit structure -> unit structure) -> t -> t
end
module Make :
functor (T:Datatype) ->
functor (Ident:Set.OrderedType) ->
functor (Meta:Set.OrderedType) ->
S with type ident = Ident.t
and type meta = Meta.t
and type 'a structure = 'a T.t
|
e5853aaaa2a5bf929a7a13485816782a6b2a66e0b4698f251dda624eac6291f9 | hipsleek/hipsleek | gLogViewWindow.ml | #include "xdebug.cppo"
(**/**)
open GUtil.SourceUtil
(**/**)
class log_view_window ?(title="Log") log () =
let tag_results = "results" in
let tag_current = "current" in
let win = GWindow.window
~title
~height:600 ~width:850
~allow_shrink:true
() in
object (self)
inherit GWindow.window win#as_window as super
val log_view = GText.view ~editable:false ~wrap_mode:`WORD ()
val search_field = GEdit.entry ~activates_default:true ()
val status_lbl = GMisc.label ()
val mutable current_index = 0
val mutable search_results = []
val mutable current_pos = None
mutable clear_callback = ( fun ( ) - > ( ) )
initializer
status_lbl#set_use_markup true;
let h_separator = GMisc.separator `HORIZONTAL () in
let v_separator = GMisc.separator `VERTICAL () in
let log_panel = GUtil.create_scrolled_win log_view in
log_view#buffer#set_text log;
let action_panel = GPack.hbox ~spacing:10 ~border_width:10 () in
let search_lbl = GMisc.label ~text:"Find:" () in
action_panel#pack search_lbl#coerce;
action_panel#pack ~expand:true search_field#coerce;
action_panel#pack status_lbl#coerce;
let next_btn = GButton.button ~label:"Next" () in
let prev_btn = GButton.button ~label:"Previous" () in
let buttons = GPack.button_box `HORIZONTAL () in
buttons#pack next_btn#coerce;
buttons#pack prev_btn#coerce;
action_panel#pack buttons#coerce;
let clear_btn = GButton.button ~label:"Clear " ( ) in
let close_btn = GButton.button ~label:"Close" () in
let buttons = GPack.button_box `HORIZONTAL () in
buttons#pack clear_btn#coerce ;
buttons#pack close_btn#coerce;
action_panel#pack v_separator#coerce;
action_panel#pack buttons#coerce;
ignore (close_btn#connect#clicked ~callback:(fun _ -> self#misc#hide ()));
let vbox = GPack.vbox ~packing:self#add () in
vbox#pack ~expand:true log_panel#coerce;
vbox#pack action_panel#coerce;
ignore (log_view#buffer#create_tag ~name:tag_results [`BACKGROUND "yellow"]);
ignore (log_view#buffer#create_tag ~name:tag_current [`BACKGROUND "orange"]);
(* set event handlers *)
ignore (search_field#connect#changed
~callback:self#update_search);
ignore (search_field#connect#activate ~callback:self#find_next);
ignore (next_btn#connect#clicked ~callback:self#find_next);
ignore (prev_btn#connect#clicked ~callback:self#find_previous);
ignore ( clear_btn#connect#clicked ~callback : )
(*****************
* Public methods
* ***************)
method clear_log () =
log_view#buffer#set_text ""
(*clear_callback ();*)
method set_log log =
log_view#buffer#set_text log
(******************
* Private methods
* ****************)
method private update_search () =
let trimmed = Gen.SysUti.trim_str search_field#text in
if String.length trimmed > 0 then
let found = self#find_all (search_field#text) in
if found then
self#find_next ()
else self#set_status "<span background='red'>0 of 0</span>"
else
(self#clear_highlight (); self#set_status "")
method private pos2iters (pos: seg_pos) =
let start = log_view#buffer#get_iter_at_char pos.start_char in
let stop = log_view#buffer#get_iter_at_char pos.stop_char in
start, stop
method private apply_tag (tag: string) (pos: seg_pos) =
let start, stop = self#pos2iters pos in
log_view#buffer#apply_tag_by_name tag start stop
method private remove_tag (tag: string) (pos: seg_pos) =
let start, stop = self#pos2iters pos in
log_view#buffer#remove_tag_by_name tag start stop
method private find_all (sub: string) =
(* clear current highlight *)
self#clear_highlight ();
(* search *)
let doc = log_view#buffer#get_text () in
let res = search doc sub in
(* update current state and highlight all results *)
search_results <- res;
current_index <- -1;
current_pos <- None;
List.iter (self#apply_tag tag_results) res;
let found = (List.length res) > 0 in
found
method private find_next () =
if (List.length search_results) > 0 then
let next_idx = (current_index + 1) mod (List.length search_results) in
self#goto_search_result next_idx
method private find_previous () =
if (List.length search_results) > 0 then
let length = List.length search_results in
let prev_idx = (current_index - 1) mod length in
let prev_idx = if prev_idx < 0 then length-1 else prev_idx in
self#goto_search_result prev_idx
method private goto_search_result idx =
unhighlight current pos
let () = match current_pos with
| Some pos ->
self#remove_tag tag_current pos;
self#apply_tag tag_results pos
| None -> ()
in
(* get next pos and it's iter *)
let pos = List.nth search_results idx in
let iter = log_view#buffer#get_iter_at_char pos.start_char in
(* scroll to and highlight it *)
ignore (log_view#scroll_to_iter iter);
self#apply_tag tag_current pos;
(* update current state *)
current_index <- idx;
current_pos <- Some pos;
self#set_status (Printf.sprintf "%d of %d" (idx+1) (List.length search_results))
method private clear_highlight () =
let start = log_view#buffer#get_iter `START in
let stop = log_view#buffer#get_iter `END in
log_view#buffer#remove_tag_by_name tag_current start stop;
log_view#buffer#remove_tag_by_name tag_results start stop
(*method private set_clear_callback ~callback =*)
(*clear_callback <- callback*)
method private set_status (msg: string) =
status_lbl#set_label msg
end
| null | https://raw.githubusercontent.com/hipsleek/hipsleek/596f7fa7f67444c8309da2ca86ba4c47d376618c/bef_indent/gLogViewWindow.ml | ocaml | */*
*/*
set event handlers
****************
* Public methods
* **************
clear_callback ();
*****************
* Private methods
* ***************
clear current highlight
search
update current state and highlight all results
get next pos and it's iter
scroll to and highlight it
update current state
method private set_clear_callback ~callback =
clear_callback <- callback | #include "xdebug.cppo"
open GUtil.SourceUtil
class log_view_window ?(title="Log") log () =
let tag_results = "results" in
let tag_current = "current" in
let win = GWindow.window
~title
~height:600 ~width:850
~allow_shrink:true
() in
object (self)
inherit GWindow.window win#as_window as super
val log_view = GText.view ~editable:false ~wrap_mode:`WORD ()
val search_field = GEdit.entry ~activates_default:true ()
val status_lbl = GMisc.label ()
val mutable current_index = 0
val mutable search_results = []
val mutable current_pos = None
mutable clear_callback = ( fun ( ) - > ( ) )
initializer
status_lbl#set_use_markup true;
let h_separator = GMisc.separator `HORIZONTAL () in
let v_separator = GMisc.separator `VERTICAL () in
let log_panel = GUtil.create_scrolled_win log_view in
log_view#buffer#set_text log;
let action_panel = GPack.hbox ~spacing:10 ~border_width:10 () in
let search_lbl = GMisc.label ~text:"Find:" () in
action_panel#pack search_lbl#coerce;
action_panel#pack ~expand:true search_field#coerce;
action_panel#pack status_lbl#coerce;
let next_btn = GButton.button ~label:"Next" () in
let prev_btn = GButton.button ~label:"Previous" () in
let buttons = GPack.button_box `HORIZONTAL () in
buttons#pack next_btn#coerce;
buttons#pack prev_btn#coerce;
action_panel#pack buttons#coerce;
let clear_btn = GButton.button ~label:"Clear " ( ) in
let close_btn = GButton.button ~label:"Close" () in
let buttons = GPack.button_box `HORIZONTAL () in
buttons#pack clear_btn#coerce ;
buttons#pack close_btn#coerce;
action_panel#pack v_separator#coerce;
action_panel#pack buttons#coerce;
ignore (close_btn#connect#clicked ~callback:(fun _ -> self#misc#hide ()));
let vbox = GPack.vbox ~packing:self#add () in
vbox#pack ~expand:true log_panel#coerce;
vbox#pack action_panel#coerce;
ignore (log_view#buffer#create_tag ~name:tag_results [`BACKGROUND "yellow"]);
ignore (log_view#buffer#create_tag ~name:tag_current [`BACKGROUND "orange"]);
ignore (search_field#connect#changed
~callback:self#update_search);
ignore (search_field#connect#activate ~callback:self#find_next);
ignore (next_btn#connect#clicked ~callback:self#find_next);
ignore (prev_btn#connect#clicked ~callback:self#find_previous);
ignore ( clear_btn#connect#clicked ~callback : )
method clear_log () =
log_view#buffer#set_text ""
method set_log log =
log_view#buffer#set_text log
method private update_search () =
let trimmed = Gen.SysUti.trim_str search_field#text in
if String.length trimmed > 0 then
let found = self#find_all (search_field#text) in
if found then
self#find_next ()
else self#set_status "<span background='red'>0 of 0</span>"
else
(self#clear_highlight (); self#set_status "")
method private pos2iters (pos: seg_pos) =
let start = log_view#buffer#get_iter_at_char pos.start_char in
let stop = log_view#buffer#get_iter_at_char pos.stop_char in
start, stop
method private apply_tag (tag: string) (pos: seg_pos) =
let start, stop = self#pos2iters pos in
log_view#buffer#apply_tag_by_name tag start stop
method private remove_tag (tag: string) (pos: seg_pos) =
let start, stop = self#pos2iters pos in
log_view#buffer#remove_tag_by_name tag start stop
method private find_all (sub: string) =
self#clear_highlight ();
let doc = log_view#buffer#get_text () in
let res = search doc sub in
search_results <- res;
current_index <- -1;
current_pos <- None;
List.iter (self#apply_tag tag_results) res;
let found = (List.length res) > 0 in
found
method private find_next () =
if (List.length search_results) > 0 then
let next_idx = (current_index + 1) mod (List.length search_results) in
self#goto_search_result next_idx
method private find_previous () =
if (List.length search_results) > 0 then
let length = List.length search_results in
let prev_idx = (current_index - 1) mod length in
let prev_idx = if prev_idx < 0 then length-1 else prev_idx in
self#goto_search_result prev_idx
method private goto_search_result idx =
unhighlight current pos
let () = match current_pos with
| Some pos ->
self#remove_tag tag_current pos;
self#apply_tag tag_results pos
| None -> ()
in
let pos = List.nth search_results idx in
let iter = log_view#buffer#get_iter_at_char pos.start_char in
ignore (log_view#scroll_to_iter iter);
self#apply_tag tag_current pos;
current_index <- idx;
current_pos <- Some pos;
self#set_status (Printf.sprintf "%d of %d" (idx+1) (List.length search_results))
method private clear_highlight () =
let start = log_view#buffer#get_iter `START in
let stop = log_view#buffer#get_iter `END in
log_view#buffer#remove_tag_by_name tag_current start stop;
log_view#buffer#remove_tag_by_name tag_results start stop
method private set_status (msg: string) =
status_lbl#set_label msg
end
|
e6fe8b5910ee8c1a9ad87144a2cc4819018ac3eef57b91d21a6aac5a411a43a9 | huangz1990/SICP-answers | 52-square-limit.scm | 52-square-limit.scm
(define (square-limit painter n)
(let ((combine4 (square-of-four identity flip-horiz)
flip-vect rotate180))
(combine4 (corner-split painter n))))
| null | https://raw.githubusercontent.com/huangz1990/SICP-answers/15e3475003ef10eb738cf93c1932277bc56bacbe/chp2/code/52-square-limit.scm | scheme | 52-square-limit.scm
(define (square-limit painter n)
(let ((combine4 (square-of-four identity flip-horiz)
flip-vect rotate180))
(combine4 (corner-split painter n))))
| |
a4852ebf6267be8ec02b35f1c62d5575814912e3135de5ab3a5935d165f233f9 | texmacs/markdown | srfi-9.scm | ;;; srfi-9.scm --- define-record-type
Copyright ( C ) 2001 , 2002 , 2006 Free Software Foundation , Inc.
;;
;; This library is free software; you can redistribute it and/or
;; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
;;
;; This library is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;; Lesser General Public License for more details.
;;
You should have received a copy of the GNU Lesser General Public
;; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
;;; Commentary:
;; This module exports the syntactic form `define-record-type', which
;; is the means for creating record types defined in SRFI-9.
;;
;; The syntax of a record type definition is:
;;
;; <record type definition>
;; -> (define-record-type <type name>
;; (<constructor name> <field tag> ...)
;; <predicate name>
;; <field spec> ...)
;;
;; <field spec> -> (<field tag> <accessor name>)
;; -> (<field tag> <accessor name> <modifier name>)
;;
;; <field tag> -> <identifier>
;; <... name> -> <identifier>
;;
;; Usage example:
;;
;; guile> (use-modules (srfi srfi-9))
;; guile> (define-record-type :foo (make-foo x) foo?
;; (x get-x) (y get-y set-y!))
guile > ( define f ( make - foo 1 ) )
;; guile> f
# < : foo x : 1 y : # f >
;; guile> (get-x f)
1
guile > ( set - y ! f 2 )
2
;; guile> (get-y f)
2
;; guile> f
# < : foo x : 1 y : 2 >
;; guile> (foo? f)
;; #t
guile > ( foo ? 1 )
;; #f
;;; Code:
(define-module (srfi srfi-9)
:export-syntax (define-record-type))
(cond-expand-provide (current-module) '(srfi-9))
(define-macro (define-record-type type-name constructor/field-tag
predicate-name . field-specs)
`(begin
(define ,type-name
(make-record-type ',type-name ',(map car field-specs)))
(define ,(car constructor/field-tag)
(record-constructor ,type-name ',(cdr constructor/field-tag)))
(define ,predicate-name
(record-predicate ,type-name))
,@(map
(lambda (spec)
(cond
((= (length spec) 2)
`(define ,(cadr spec)
(record-accessor ,type-name ',(car spec))))
((= (length spec) 3)
`(begin
(define ,(cadr spec)
(record-accessor ,type-name ',(car spec)))
(define ,(caddr spec)
(record-modifier ,type-name ',(car spec)))))
(else
(error "invalid field spec " spec))))
field-specs)))
;;; srfi-9.scm ends here
| null | https://raw.githubusercontent.com/texmacs/markdown/f1332bba6d0d23d3448921acc8ae3ddf905659f7/progs/srfi/srfi-9.scm | scheme | srfi-9.scm --- define-record-type
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
Commentary:
This module exports the syntactic form `define-record-type', which
is the means for creating record types defined in SRFI-9.
The syntax of a record type definition is:
<record type definition>
-> (define-record-type <type name>
(<constructor name> <field tag> ...)
<predicate name>
<field spec> ...)
<field spec> -> (<field tag> <accessor name>)
-> (<field tag> <accessor name> <modifier name>)
<field tag> -> <identifier>
<... name> -> <identifier>
Usage example:
guile> (use-modules (srfi srfi-9))
guile> (define-record-type :foo (make-foo x) foo?
(x get-x) (y get-y set-y!))
guile> f
guile> (get-x f)
guile> (get-y f)
guile> f
guile> (foo? f)
#t
#f
Code:
srfi-9.scm ends here |
Copyright ( C ) 2001 , 2002 , 2006 Free Software Foundation , Inc.
version 2.1 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
guile > ( define f ( make - foo 1 ) )
# < : foo x : 1 y : # f >
1
guile > ( set - y ! f 2 )
2
2
# < : foo x : 1 y : 2 >
guile > ( foo ? 1 )
(define-module (srfi srfi-9)
:export-syntax (define-record-type))
(cond-expand-provide (current-module) '(srfi-9))
(define-macro (define-record-type type-name constructor/field-tag
predicate-name . field-specs)
`(begin
(define ,type-name
(make-record-type ',type-name ',(map car field-specs)))
(define ,(car constructor/field-tag)
(record-constructor ,type-name ',(cdr constructor/field-tag)))
(define ,predicate-name
(record-predicate ,type-name))
,@(map
(lambda (spec)
(cond
((= (length spec) 2)
`(define ,(cadr spec)
(record-accessor ,type-name ',(car spec))))
((= (length spec) 3)
`(begin
(define ,(cadr spec)
(record-accessor ,type-name ',(car spec)))
(define ,(caddr spec)
(record-modifier ,type-name ',(car spec)))))
(else
(error "invalid field spec " spec))))
field-specs)))
|
fa3ff513eec4acc0562e2bb13603984f5bdfb445a61d2da0d4d521ce99312d9b | rbkmoney/fistful-server | ff_withdrawal_codec.erl | -module(ff_withdrawal_codec).
-behaviour(ff_codec).
-include_lib("fistful_proto/include/ff_proto_withdrawal_thrift.hrl").
-export([unmarshal_quote_params/1]).
-export([unmarshal_withdrawal_params/1]).
-export([marshal_withdrawal_params/1]).
-export([marshal_withdrawal_state/2]).
-export([marshal_event/1]).
-export([marshal/2]).
-export([unmarshal/2]).
%% API
-spec unmarshal_quote_params(ff_proto_withdrawal_thrift:'QuoteParams'()) -> ff_withdrawal:quote_params().
unmarshal_quote_params(Params) ->
genlib_map:compact(#{
wallet_id => unmarshal(id, Params#wthd_QuoteParams.wallet_id),
currency_from => unmarshal(currency_ref, Params#wthd_QuoteParams.currency_from),
currency_to => unmarshal(currency_ref, Params#wthd_QuoteParams.currency_to),
body => unmarshal(cash, Params#wthd_QuoteParams.body),
destination_id => maybe_unmarshal(id, Params#wthd_QuoteParams.destination_id),
external_id => maybe_unmarshal(id, Params#wthd_QuoteParams.external_id)
}).
-spec marshal_withdrawal_params(ff_withdrawal:params()) -> ff_proto_withdrawal_thrift:'WithdrawalParams'().
marshal_withdrawal_params(Params) ->
#wthd_WithdrawalParams{
id = marshal(id, maps:get(id, Params)),
wallet_id = marshal(id, maps:get(wallet_id, Params)),
destination_id = marshal(id, maps:get(destination_id, Params)),
body = marshal(cash, maps:get(body, Params)),
external_id = maybe_marshal(id, maps:get(external_id, Params, undefined)),
metadata = maybe_marshal(ctx, maps:get(metadata, Params, undefined))
}.
-spec unmarshal_withdrawal_params(ff_proto_withdrawal_thrift:'WithdrawalParams'()) -> ff_withdrawal:params().
unmarshal_withdrawal_params(Params) ->
genlib_map:compact(#{
id => unmarshal(id, Params#wthd_WithdrawalParams.id),
wallet_id => unmarshal(id, Params#wthd_WithdrawalParams.wallet_id),
destination_id => unmarshal(id, Params#wthd_WithdrawalParams.destination_id),
body => unmarshal(cash, Params#wthd_WithdrawalParams.body),
quote => maybe_unmarshal(quote, Params#wthd_WithdrawalParams.quote),
external_id => maybe_unmarshal(id, Params#wthd_WithdrawalParams.external_id),
metadata => maybe_unmarshal(ctx, Params#wthd_WithdrawalParams.metadata)
}).
-spec marshal_withdrawal_state(ff_withdrawal:withdrawal_state(), ff_entity_context:context()) ->
ff_proto_withdrawal_thrift:'WithdrawalState'().
marshal_withdrawal_state(WithdrawalState, Context) ->
CashFlow = ff_withdrawal:effective_final_cash_flow(WithdrawalState),
Adjustments = ff_withdrawal:adjustments(WithdrawalState),
Sessions = ff_withdrawal:sessions(WithdrawalState),
#wthd_WithdrawalState{
id = marshal(id, ff_withdrawal:id(WithdrawalState)),
body = marshal(cash, ff_withdrawal:body(WithdrawalState)),
wallet_id = marshal(id, ff_withdrawal:wallet_id(WithdrawalState)),
destination_id = marshal(id, ff_withdrawal:destination_id(WithdrawalState)),
route = maybe_marshal(route, ff_withdrawal:route(WithdrawalState)),
external_id = maybe_marshal(id, ff_withdrawal:external_id(WithdrawalState)),
domain_revision = maybe_marshal(domain_revision, ff_withdrawal:domain_revision(WithdrawalState)),
party_revision = maybe_marshal(party_revision, ff_withdrawal:party_revision(WithdrawalState)),
created_at = maybe_marshal(timestamp_ms, ff_withdrawal:created_at(WithdrawalState)),
status = maybe_marshal(status, ff_withdrawal:status(WithdrawalState)),
sessions = [marshal(session_state, S) || S <- Sessions],
effective_route = maybe_marshal(route, ff_withdrawal:route(WithdrawalState)),
effective_final_cash_flow = ff_cash_flow_codec:marshal(final_cash_flow, CashFlow),
adjustments = [ff_withdrawal_adjustment_codec:marshal(adjustment_state, A) || A <- Adjustments],
context = marshal(ctx, Context),
metadata = marshal(ctx, ff_withdrawal:metadata(WithdrawalState)),
quote = maybe_marshal(quote_state, ff_withdrawal:quote(WithdrawalState))
}.
-spec marshal_event(ff_withdrawal_machine:event()) -> ff_proto_withdrawal_thrift:'Event'().
marshal_event({EventID, {ev, Timestamp, Change}}) ->
#wthd_Event{
event_id = ff_codec:marshal(event_id, EventID),
occured_at = ff_codec:marshal(timestamp, Timestamp),
change = marshal(change, Change)
}.
-spec marshal(ff_codec:type_name(), ff_codec:decoded_value()) -> ff_codec:encoded_value().
marshal({list, T}, V) ->
[marshal(T, E) || E <- V];
marshal(timestamped_change, {ev, Timestamp, Change}) ->
#wthd_TimestampedChange{
change = marshal(change, Change),
occured_at = ff_codec:marshal(timestamp, Timestamp)
};
marshal(change, {created, Withdrawal}) ->
{created, #wthd_CreatedChange{withdrawal = marshal(withdrawal, Withdrawal)}};
marshal(change, {status_changed, Status}) ->
{status_changed, #wthd_StatusChange{status = ff_withdrawal_status_codec:marshal(status, Status)}};
marshal(change, {p_transfer, TransferChange}) ->
{transfer, #wthd_TransferChange{payload = ff_p_transfer_codec:marshal(change, TransferChange)}};
marshal(change, {session_started, SessionID}) ->
{session, #wthd_SessionChange{id = SessionID, payload = marshal(session_event, started)}};
marshal(change, {session_finished, {SessionID, SessionResult}}) ->
{session, #wthd_SessionChange{id = SessionID, payload = marshal(session_event, {finished, SessionResult})}};
marshal(change, {route_changed, Route}) ->
{route, #wthd_RouteChange{route = marshal(route, Route)}};
marshal(change, {limit_check, Details}) ->
{limit_check, #wthd_LimitCheckChange{details = ff_limit_check_codec:marshal(details, Details)}};
marshal(change, {resource_got, Resource}) ->
{resource, {got, #wthd_ResourceGot{resource = marshal(resource, Resource)}}};
marshal(change, {adjustment, #{id := ID, payload := Payload}}) ->
{adjustment, #wthd_AdjustmentChange{
id = marshal(id, ID),
payload = ff_withdrawal_adjustment_codec:marshal(change, Payload)
}};
marshal(withdrawal, Withdrawal) ->
#wthd_Withdrawal{
id = marshal(id, ff_withdrawal:id(Withdrawal)),
body = marshal(cash, ff_withdrawal:body(Withdrawal)),
wallet_id = marshal(id, ff_withdrawal:wallet_id(Withdrawal)),
destination_id = marshal(id, ff_withdrawal:destination_id(Withdrawal)),
route = maybe_marshal(route, ff_withdrawal:route(Withdrawal)),
external_id = maybe_marshal(id, ff_withdrawal:external_id(Withdrawal)),
domain_revision = maybe_marshal(domain_revision, ff_withdrawal:domain_revision(Withdrawal)),
party_revision = maybe_marshal(party_revision, ff_withdrawal:party_revision(Withdrawal)),
created_at = maybe_marshal(timestamp_ms, ff_withdrawal:created_at(Withdrawal)),
metadata = maybe_marshal(ctx, ff_withdrawal:metadata(Withdrawal)),
quote = maybe_marshal(quote_state, ff_withdrawal:quote(Withdrawal))
};
marshal(route, Route) ->
#{
version := 1,
provider_id := ProviderID
} = Route,
#wthd_Route{
provider_id = marshal(provider_id, ProviderID),
terminal_id = maybe_marshal(terminal_id, genlib_map:get(terminal_id, Route)),
provider_id_legacy = marshal(string, get_legacy_provider_id(Route))
};
marshal(status, Status) ->
ff_withdrawal_status_codec:marshal(status, Status);
marshal(session_event, started) ->
{started, #wthd_SessionStarted{}};
marshal(session_event, {finished, Result}) ->
{finished, #wthd_SessionFinished{result = marshal(session_result, Result)}};
marshal(session_result, success) ->
{succeeded, #wthd_SessionSucceeded{}};
marshal(session_result, {success, TransactionInfo}) ->
for backward compatibility with events stored in DB - take here .
%% @see ff_adapter_withdrawal:rebind_transaction_info/1
{succeeded, #wthd_SessionSucceeded{trx_info = marshal(transaction_info, TransactionInfo)}};
marshal(session_result, {failed, Failure}) ->
{failed, #wthd_SessionFailed{failure = ff_codec:marshal(failure, Failure)}};
marshal(transaction_info, TrxInfo) ->
ff_withdrawal_session_codec:marshal(transaction_info, TrxInfo);
marshal(session_state, Session) ->
#wthd_SessionState{
id = marshal(id, maps:get(id, Session)),
result = maybe_marshal(session_result, maps:get(result, Session, undefined))
};
marshal(quote_state, Quote) ->
#wthd_QuoteState{
cash_from = marshal(cash, maps:get(cash_from, Quote)),
cash_to = marshal(cash, maps:get(cash_to, Quote)),
% already formatted
created_at = maps:get(created_at, Quote),
expires_on = maps:get(expires_on, Quote),
quote_data = maybe_marshal(msgpack, maps:get(quote_data, Quote, undefined)),
route = maybe_marshal(route, maps:get(route, Quote, undefined)),
resource = maybe_marshal(resource_descriptor, maps:get(resource_descriptor, Quote, undefined)),
quote_data_legacy = marshal(ctx, #{})
};
marshal(quote, Quote) ->
#wthd_Quote{
cash_from = marshal(cash, maps:get(cash_from, Quote)),
cash_to = marshal(cash, maps:get(cash_to, Quote)),
% already formatted
created_at = maps:get(created_at, Quote),
expires_on = maps:get(expires_on, Quote),
quote_data = maybe_marshal(msgpack, genlib_map:get(quote_data, Quote)),
route = maybe_marshal(route, genlib_map:get(route, Quote)),
resource = maybe_marshal(resource_descriptor, genlib_map:get(resource_descriptor, Quote)),
party_revision = maybe_marshal(party_revision, genlib_map:get(party_revision, Quote)),
domain_revision = maybe_marshal(domain_revision, genlib_map:get(domain_revision, Quote)),
operation_timestamp = maybe_marshal(timestamp_ms, genlib_map:get(operation_timestamp, Quote))
};
marshal(ctx, Ctx) ->
maybe_marshal(context, Ctx);
marshal(T, V) ->
ff_codec:marshal(T, V).
-spec unmarshal(ff_codec:type_name(), ff_codec:encoded_value()) -> ff_codec:decoded_value().
unmarshal({list, T}, V) ->
[unmarshal(T, E) || E <- V];
unmarshal(timestamped_change, TimestampedChange) ->
Timestamp = ff_codec:unmarshal(timestamp, TimestampedChange#wthd_TimestampedChange.occured_at),
Change = unmarshal(change, TimestampedChange#wthd_TimestampedChange.change),
{ev, Timestamp, Change};
unmarshal(repair_scenario, {add_events, #wthd_AddEventsRepair{events = Events, action = Action}}) ->
{add_events,
genlib_map:compact(#{
events => unmarshal({list, change}, Events),
action => maybe_unmarshal(complex_action, Action)
})};
unmarshal(change, {created, #wthd_CreatedChange{withdrawal = Withdrawal}}) ->
{created, unmarshal(withdrawal, Withdrawal)};
unmarshal(change, {status_changed, #wthd_StatusChange{status = Status}}) ->
{status_changed, unmarshal(status, Status)};
unmarshal(change, {transfer, #wthd_TransferChange{payload = TransferChange}}) ->
{p_transfer, ff_p_transfer_codec:unmarshal(change, TransferChange)};
unmarshal(change, {session, SessionChange}) ->
unmarshal(session_event, SessionChange);
unmarshal(change, {route, #wthd_RouteChange{route = Route}}) ->
{route_changed, unmarshal(route, Route)};
unmarshal(change, {limit_check, #wthd_LimitCheckChange{details = Details}}) ->
{limit_check, ff_limit_check_codec:unmarshal(details, Details)};
unmarshal(change, {resource, {got, #wthd_ResourceGot{resource = Resource}}}) ->
{resource_got, unmarshal(resource, Resource)};
unmarshal(change, {adjustment, Change}) ->
{adjustment, #{
id => unmarshal(id, Change#wthd_AdjustmentChange.id),
payload => ff_withdrawal_adjustment_codec:unmarshal(change, Change#wthd_AdjustmentChange.payload)
}};
unmarshal(withdrawal, Withdrawal = #wthd_Withdrawal{}) ->
ff_withdrawal:gen(#{
id => unmarshal(id, Withdrawal#wthd_Withdrawal.id),
body => unmarshal(cash, Withdrawal#wthd_Withdrawal.body),
params => genlib_map:compact(#{
wallet_id => unmarshal(id, Withdrawal#wthd_Withdrawal.wallet_id),
destination_id => unmarshal(id, Withdrawal#wthd_Withdrawal.destination_id),
quote => maybe_unmarshal(quote_state, Withdrawal#wthd_Withdrawal.quote)
}),
route => maybe_unmarshal(route, Withdrawal#wthd_Withdrawal.route),
external_id => maybe_unmarshal(id, Withdrawal#wthd_Withdrawal.external_id),
domain_revision => maybe_unmarshal(domain_revision, Withdrawal#wthd_Withdrawal.domain_revision),
party_revision => maybe_unmarshal(party_revision, Withdrawal#wthd_Withdrawal.party_revision),
created_at => maybe_unmarshal(timestamp_ms, Withdrawal#wthd_Withdrawal.created_at),
transfer_type => withdrawal,
metadata => maybe_unmarshal(ctx, Withdrawal#wthd_Withdrawal.metadata)
});
unmarshal(route, Route) ->
genlib_map:compact(#{
version => 1,
provider_id => unmarshal(provider_id, Route#wthd_Route.provider_id),
terminal_id => maybe_unmarshal(terminal_id, Route#wthd_Route.terminal_id),
provider_id_legacy => maybe_unmarshal(string, Route#wthd_Route.provider_id_legacy)
});
unmarshal(status, Status) ->
ff_withdrawal_status_codec:unmarshal(status, Status);
unmarshal(session_event, #wthd_SessionChange{id = ID, payload = {started, #wthd_SessionStarted{}}}) ->
{session_started, unmarshal(id, ID)};
unmarshal(session_event, #wthd_SessionChange{id = ID, payload = {finished, Finished}}) ->
#wthd_SessionFinished{result = Result} = Finished,
{session_finished, {unmarshal(id, ID), unmarshal(session_result, Result)}};
unmarshal(session_result, {succeeded, #wthd_SessionSucceeded{trx_info = undefined}}) ->
success;
unmarshal(session_result, {succeeded, #wthd_SessionSucceeded{trx_info = TransactionInfo}}) ->
for backward compatibility with events stored in DB - take here .
%% @see ff_adapter_withdrawal:rebind_transaction_info/1
{success, unmarshal(transaction_info, TransactionInfo)};
unmarshal(session_result, {failed, #wthd_SessionFailed{failure = Failure}}) ->
{failed, ff_codec:unmarshal(failure, Failure)};
unmarshal(transaction_info, TrxInfo) ->
ff_withdrawal_session_codec:unmarshal(transaction_info, TrxInfo);
unmarshal(session_state, Session) ->
genlib_map:compact(#{
id => unmarshal(id, Session#wthd_SessionState.id),
result => maybe_unmarshal(session_result, Session#wthd_SessionState.result)
});
unmarshal(quote_state, Quote) ->
genlib_map:compact(#{
cash_from => unmarshal(cash, Quote#wthd_QuoteState.cash_from),
cash_to => unmarshal(cash, Quote#wthd_QuoteState.cash_to),
created_at => Quote#wthd_QuoteState.created_at,
expires_on => Quote#wthd_QuoteState.expires_on,
route => maybe_unmarshal(route, Quote#wthd_QuoteState.route),
resource_descriptor => maybe_unmarshal(resource_descriptor, Quote#wthd_QuoteState.resource),
quote_data => maybe_unmarshal(msgpack, Quote#wthd_QuoteState.quote_data)
});
unmarshal(quote, Quote) ->
genlib_map:compact(#{
cash_from => unmarshal(cash, Quote#wthd_Quote.cash_from),
cash_to => unmarshal(cash, Quote#wthd_Quote.cash_to),
created_at => Quote#wthd_Quote.created_at,
expires_on => Quote#wthd_Quote.expires_on,
route => maybe_unmarshal(route, Quote#wthd_Quote.route),
resource_descriptor => maybe_unmarshal(resource_descriptor, Quote#wthd_Quote.resource),
quote_data => maybe_unmarshal(msgpack, Quote#wthd_Quote.quote_data),
domain_revision => maybe_unmarshal(domain_revision, Quote#wthd_Quote.domain_revision),
party_revision => maybe_unmarshal(party_revision, Quote#wthd_Quote.party_revision),
operation_timestamp => maybe_unmarshal(timestamp_ms, Quote#wthd_Quote.operation_timestamp)
});
unmarshal(ctx, Ctx) ->
maybe_unmarshal(context, Ctx);
unmarshal(T, V) ->
ff_codec:unmarshal(T, V).
%% Internals
maybe_unmarshal(_Type, undefined) ->
undefined;
maybe_unmarshal(Type, Value) ->
unmarshal(Type, Value).
maybe_marshal(_Type, undefined) ->
undefined;
maybe_marshal(Type, Value) ->
marshal(Type, Value).
get_legacy_provider_id(#{provider_id_legacy := Provider}) when is_binary(Provider) ->
Provider;
get_legacy_provider_id(#{provider_id := Provider}) when is_integer(Provider) ->
genlib:to_binary(Provider - 300).
%% TESTS
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-spec test() -> _.
-spec withdrawal_symmetry_test() -> _.
withdrawal_symmetry_test() ->
In = #wthd_Withdrawal{
id = genlib:unique(),
body = #'Cash'{
amount = 10101,
currency = #'CurrencyRef'{symbolic_code = <<"Banana Republic">>}
},
wallet_id = genlib:unique(),
destination_id = genlib:unique(),
external_id = genlib:unique(),
route = #wthd_Route{
provider_id = 1,
terminal_id = 7,
provider_id_legacy = <<"mocketbank">>
},
domain_revision = 1,
party_revision = 3,
created_at = <<"2099-01-01T00:00:00.123Z">>
},
?assertEqual(In, marshal(withdrawal, unmarshal(withdrawal, In))).
-spec withdrawal_params_symmetry_test() -> _.
withdrawal_params_symmetry_test() ->
In = #wthd_WithdrawalParams{
id = genlib:unique(),
body = #'Cash'{
amount = 10101,
currency = #'CurrencyRef'{symbolic_code = <<"Banana Republic">>}
},
wallet_id = genlib:unique(),
destination_id = genlib:unique(),
external_id = undefined
},
?assertEqual(In, marshal_withdrawal_params(unmarshal_withdrawal_params(In))).
-spec quote_state_symmetry_test() -> _.
quote_state_symmetry_test() ->
In = #wthd_QuoteState{
cash_from = #'Cash'{
amount = 10101,
currency = #'CurrencyRef'{symbolic_code = <<"Banana Republic">>}
},
cash_to = #'Cash'{
amount = 20202,
currency = #'CurrencyRef'{symbolic_code = <<"Pineapple Empire">>}
},
created_at = genlib:unique(),
expires_on = genlib:unique(),
quote_data = {arr, [{bin, genlib:unique()}, {i, 5}, {nl, #msgp_Nil{}}]},
route = #wthd_Route{
provider_id = 1,
terminal_id = 2,
provider_id_legacy = <<>>
},
resource = {bank_card, #'ResourceDescriptorBankCard'{bin_data_id = {arr, [{bin, genlib:unique()}]}}},
quote_data_legacy = #{}
},
?assertEqual(In, marshal(quote_state, unmarshal(quote_state, In))).
-spec quote_symmetry_test() -> _.
quote_symmetry_test() ->
In = #wthd_Quote{
cash_from = #'Cash'{
amount = 10101,
currency = #'CurrencyRef'{symbolic_code = <<"Banana Republic">>}
},
cash_to = #'Cash'{
amount = 20202,
currency = #'CurrencyRef'{symbolic_code = <<"Pineapple Empire">>}
},
created_at = genlib:unique(),
expires_on = genlib:unique(),
quote_data = {arr, [{bin, genlib:unique()}, {i, 5}, {nl, #msgp_Nil{}}]},
route = #wthd_Route{
provider_id = 1,
terminal_id = 2,
provider_id_legacy = <<"drovider">>
},
resource = {bank_card, #'ResourceDescriptorBankCard'{bin_data_id = {arr, [{bin, genlib:unique()}]}}},
domain_revision = 1,
party_revision = 2,
operation_timestamp = <<"2020-01-01T01:00:00Z">>
},
?assertEqual(In, marshal(quote, unmarshal(quote, In))).
-spec marshal_session_result_test_() -> _.
marshal_session_result_test_() ->
TransactionInfo = #{id => <<"ID">>, extra => #{<<"Hello">> => <<"World">>}},
TransactionInfoThrift = marshal(transaction_info, TransactionInfo),
Results = [
{success, TransactionInfo},
success
],
ResultsThrift = [
{succeeded, #wthd_SessionSucceeded{trx_info = TransactionInfoThrift}},
{succeeded, #wthd_SessionSucceeded{}}
],
[
?_assertEqual(ResultsThrift, marshal({list, session_result}, Results)),
?_assertEqual(Results, unmarshal({list, session_result}, ResultsThrift))
].
-spec unmarshal_repair_scenario_test() -> _.
unmarshal_repair_scenario_test() ->
Scenario = {
add_events,
#wthd_AddEventsRepair{
events = [
{status_changed, #wthd_StatusChange{
status = {pending, #wthd_status_Pending{}}
}}
],
action = #ff_repairer_ComplexAction{
timer =
{set_timer, #ff_repairer_SetTimerAction{
timer = {timeout, 0}
}}
}
}
},
?assertEqual(
{add_events, #{
events => [
{status_changed, pending}
],
action => [
{set_timer, {timeout, 0}}
]
}},
unmarshal(repair_scenario, Scenario)
).
-endif.
| null | https://raw.githubusercontent.com/rbkmoney/fistful-server/526adc789f4f6083dae7eb4d7ac2d9ba3c34845b/apps/ff_server/src/ff_withdrawal_codec.erl | erlang | API
@see ff_adapter_withdrawal:rebind_transaction_info/1
already formatted
already formatted
@see ff_adapter_withdrawal:rebind_transaction_info/1
Internals
TESTS | -module(ff_withdrawal_codec).
-behaviour(ff_codec).
-include_lib("fistful_proto/include/ff_proto_withdrawal_thrift.hrl").
-export([unmarshal_quote_params/1]).
-export([unmarshal_withdrawal_params/1]).
-export([marshal_withdrawal_params/1]).
-export([marshal_withdrawal_state/2]).
-export([marshal_event/1]).
-export([marshal/2]).
-export([unmarshal/2]).
-spec unmarshal_quote_params(ff_proto_withdrawal_thrift:'QuoteParams'()) -> ff_withdrawal:quote_params().
unmarshal_quote_params(Params) ->
genlib_map:compact(#{
wallet_id => unmarshal(id, Params#wthd_QuoteParams.wallet_id),
currency_from => unmarshal(currency_ref, Params#wthd_QuoteParams.currency_from),
currency_to => unmarshal(currency_ref, Params#wthd_QuoteParams.currency_to),
body => unmarshal(cash, Params#wthd_QuoteParams.body),
destination_id => maybe_unmarshal(id, Params#wthd_QuoteParams.destination_id),
external_id => maybe_unmarshal(id, Params#wthd_QuoteParams.external_id)
}).
-spec marshal_withdrawal_params(ff_withdrawal:params()) -> ff_proto_withdrawal_thrift:'WithdrawalParams'().
marshal_withdrawal_params(Params) ->
#wthd_WithdrawalParams{
id = marshal(id, maps:get(id, Params)),
wallet_id = marshal(id, maps:get(wallet_id, Params)),
destination_id = marshal(id, maps:get(destination_id, Params)),
body = marshal(cash, maps:get(body, Params)),
external_id = maybe_marshal(id, maps:get(external_id, Params, undefined)),
metadata = maybe_marshal(ctx, maps:get(metadata, Params, undefined))
}.
-spec unmarshal_withdrawal_params(ff_proto_withdrawal_thrift:'WithdrawalParams'()) -> ff_withdrawal:params().
unmarshal_withdrawal_params(Params) ->
genlib_map:compact(#{
id => unmarshal(id, Params#wthd_WithdrawalParams.id),
wallet_id => unmarshal(id, Params#wthd_WithdrawalParams.wallet_id),
destination_id => unmarshal(id, Params#wthd_WithdrawalParams.destination_id),
body => unmarshal(cash, Params#wthd_WithdrawalParams.body),
quote => maybe_unmarshal(quote, Params#wthd_WithdrawalParams.quote),
external_id => maybe_unmarshal(id, Params#wthd_WithdrawalParams.external_id),
metadata => maybe_unmarshal(ctx, Params#wthd_WithdrawalParams.metadata)
}).
-spec marshal_withdrawal_state(ff_withdrawal:withdrawal_state(), ff_entity_context:context()) ->
ff_proto_withdrawal_thrift:'WithdrawalState'().
marshal_withdrawal_state(WithdrawalState, Context) ->
CashFlow = ff_withdrawal:effective_final_cash_flow(WithdrawalState),
Adjustments = ff_withdrawal:adjustments(WithdrawalState),
Sessions = ff_withdrawal:sessions(WithdrawalState),
#wthd_WithdrawalState{
id = marshal(id, ff_withdrawal:id(WithdrawalState)),
body = marshal(cash, ff_withdrawal:body(WithdrawalState)),
wallet_id = marshal(id, ff_withdrawal:wallet_id(WithdrawalState)),
destination_id = marshal(id, ff_withdrawal:destination_id(WithdrawalState)),
route = maybe_marshal(route, ff_withdrawal:route(WithdrawalState)),
external_id = maybe_marshal(id, ff_withdrawal:external_id(WithdrawalState)),
domain_revision = maybe_marshal(domain_revision, ff_withdrawal:domain_revision(WithdrawalState)),
party_revision = maybe_marshal(party_revision, ff_withdrawal:party_revision(WithdrawalState)),
created_at = maybe_marshal(timestamp_ms, ff_withdrawal:created_at(WithdrawalState)),
status = maybe_marshal(status, ff_withdrawal:status(WithdrawalState)),
sessions = [marshal(session_state, S) || S <- Sessions],
effective_route = maybe_marshal(route, ff_withdrawal:route(WithdrawalState)),
effective_final_cash_flow = ff_cash_flow_codec:marshal(final_cash_flow, CashFlow),
adjustments = [ff_withdrawal_adjustment_codec:marshal(adjustment_state, A) || A <- Adjustments],
context = marshal(ctx, Context),
metadata = marshal(ctx, ff_withdrawal:metadata(WithdrawalState)),
quote = maybe_marshal(quote_state, ff_withdrawal:quote(WithdrawalState))
}.
-spec marshal_event(ff_withdrawal_machine:event()) -> ff_proto_withdrawal_thrift:'Event'().
marshal_event({EventID, {ev, Timestamp, Change}}) ->
#wthd_Event{
event_id = ff_codec:marshal(event_id, EventID),
occured_at = ff_codec:marshal(timestamp, Timestamp),
change = marshal(change, Change)
}.
-spec marshal(ff_codec:type_name(), ff_codec:decoded_value()) -> ff_codec:encoded_value().
marshal({list, T}, V) ->
[marshal(T, E) || E <- V];
marshal(timestamped_change, {ev, Timestamp, Change}) ->
#wthd_TimestampedChange{
change = marshal(change, Change),
occured_at = ff_codec:marshal(timestamp, Timestamp)
};
marshal(change, {created, Withdrawal}) ->
{created, #wthd_CreatedChange{withdrawal = marshal(withdrawal, Withdrawal)}};
marshal(change, {status_changed, Status}) ->
{status_changed, #wthd_StatusChange{status = ff_withdrawal_status_codec:marshal(status, Status)}};
marshal(change, {p_transfer, TransferChange}) ->
{transfer, #wthd_TransferChange{payload = ff_p_transfer_codec:marshal(change, TransferChange)}};
marshal(change, {session_started, SessionID}) ->
{session, #wthd_SessionChange{id = SessionID, payload = marshal(session_event, started)}};
marshal(change, {session_finished, {SessionID, SessionResult}}) ->
{session, #wthd_SessionChange{id = SessionID, payload = marshal(session_event, {finished, SessionResult})}};
marshal(change, {route_changed, Route}) ->
{route, #wthd_RouteChange{route = marshal(route, Route)}};
marshal(change, {limit_check, Details}) ->
{limit_check, #wthd_LimitCheckChange{details = ff_limit_check_codec:marshal(details, Details)}};
marshal(change, {resource_got, Resource}) ->
{resource, {got, #wthd_ResourceGot{resource = marshal(resource, Resource)}}};
marshal(change, {adjustment, #{id := ID, payload := Payload}}) ->
{adjustment, #wthd_AdjustmentChange{
id = marshal(id, ID),
payload = ff_withdrawal_adjustment_codec:marshal(change, Payload)
}};
marshal(withdrawal, Withdrawal) ->
#wthd_Withdrawal{
id = marshal(id, ff_withdrawal:id(Withdrawal)),
body = marshal(cash, ff_withdrawal:body(Withdrawal)),
wallet_id = marshal(id, ff_withdrawal:wallet_id(Withdrawal)),
destination_id = marshal(id, ff_withdrawal:destination_id(Withdrawal)),
route = maybe_marshal(route, ff_withdrawal:route(Withdrawal)),
external_id = maybe_marshal(id, ff_withdrawal:external_id(Withdrawal)),
domain_revision = maybe_marshal(domain_revision, ff_withdrawal:domain_revision(Withdrawal)),
party_revision = maybe_marshal(party_revision, ff_withdrawal:party_revision(Withdrawal)),
created_at = maybe_marshal(timestamp_ms, ff_withdrawal:created_at(Withdrawal)),
metadata = maybe_marshal(ctx, ff_withdrawal:metadata(Withdrawal)),
quote = maybe_marshal(quote_state, ff_withdrawal:quote(Withdrawal))
};
marshal(route, Route) ->
#{
version := 1,
provider_id := ProviderID
} = Route,
#wthd_Route{
provider_id = marshal(provider_id, ProviderID),
terminal_id = maybe_marshal(terminal_id, genlib_map:get(terminal_id, Route)),
provider_id_legacy = marshal(string, get_legacy_provider_id(Route))
};
marshal(status, Status) ->
ff_withdrawal_status_codec:marshal(status, Status);
marshal(session_event, started) ->
{started, #wthd_SessionStarted{}};
marshal(session_event, {finished, Result}) ->
{finished, #wthd_SessionFinished{result = marshal(session_result, Result)}};
marshal(session_result, success) ->
{succeeded, #wthd_SessionSucceeded{}};
marshal(session_result, {success, TransactionInfo}) ->
for backward compatibility with events stored in DB - take here .
{succeeded, #wthd_SessionSucceeded{trx_info = marshal(transaction_info, TransactionInfo)}};
marshal(session_result, {failed, Failure}) ->
{failed, #wthd_SessionFailed{failure = ff_codec:marshal(failure, Failure)}};
marshal(transaction_info, TrxInfo) ->
ff_withdrawal_session_codec:marshal(transaction_info, TrxInfo);
marshal(session_state, Session) ->
#wthd_SessionState{
id = marshal(id, maps:get(id, Session)),
result = maybe_marshal(session_result, maps:get(result, Session, undefined))
};
marshal(quote_state, Quote) ->
#wthd_QuoteState{
cash_from = marshal(cash, maps:get(cash_from, Quote)),
cash_to = marshal(cash, maps:get(cash_to, Quote)),
created_at = maps:get(created_at, Quote),
expires_on = maps:get(expires_on, Quote),
quote_data = maybe_marshal(msgpack, maps:get(quote_data, Quote, undefined)),
route = maybe_marshal(route, maps:get(route, Quote, undefined)),
resource = maybe_marshal(resource_descriptor, maps:get(resource_descriptor, Quote, undefined)),
quote_data_legacy = marshal(ctx, #{})
};
marshal(quote, Quote) ->
#wthd_Quote{
cash_from = marshal(cash, maps:get(cash_from, Quote)),
cash_to = marshal(cash, maps:get(cash_to, Quote)),
created_at = maps:get(created_at, Quote),
expires_on = maps:get(expires_on, Quote),
quote_data = maybe_marshal(msgpack, genlib_map:get(quote_data, Quote)),
route = maybe_marshal(route, genlib_map:get(route, Quote)),
resource = maybe_marshal(resource_descriptor, genlib_map:get(resource_descriptor, Quote)),
party_revision = maybe_marshal(party_revision, genlib_map:get(party_revision, Quote)),
domain_revision = maybe_marshal(domain_revision, genlib_map:get(domain_revision, Quote)),
operation_timestamp = maybe_marshal(timestamp_ms, genlib_map:get(operation_timestamp, Quote))
};
marshal(ctx, Ctx) ->
maybe_marshal(context, Ctx);
marshal(T, V) ->
ff_codec:marshal(T, V).
-spec unmarshal(ff_codec:type_name(), ff_codec:encoded_value()) -> ff_codec:decoded_value().
unmarshal({list, T}, V) ->
[unmarshal(T, E) || E <- V];
unmarshal(timestamped_change, TimestampedChange) ->
Timestamp = ff_codec:unmarshal(timestamp, TimestampedChange#wthd_TimestampedChange.occured_at),
Change = unmarshal(change, TimestampedChange#wthd_TimestampedChange.change),
{ev, Timestamp, Change};
unmarshal(repair_scenario, {add_events, #wthd_AddEventsRepair{events = Events, action = Action}}) ->
{add_events,
genlib_map:compact(#{
events => unmarshal({list, change}, Events),
action => maybe_unmarshal(complex_action, Action)
})};
unmarshal(change, {created, #wthd_CreatedChange{withdrawal = Withdrawal}}) ->
{created, unmarshal(withdrawal, Withdrawal)};
unmarshal(change, {status_changed, #wthd_StatusChange{status = Status}}) ->
{status_changed, unmarshal(status, Status)};
unmarshal(change, {transfer, #wthd_TransferChange{payload = TransferChange}}) ->
{p_transfer, ff_p_transfer_codec:unmarshal(change, TransferChange)};
unmarshal(change, {session, SessionChange}) ->
unmarshal(session_event, SessionChange);
unmarshal(change, {route, #wthd_RouteChange{route = Route}}) ->
{route_changed, unmarshal(route, Route)};
unmarshal(change, {limit_check, #wthd_LimitCheckChange{details = Details}}) ->
{limit_check, ff_limit_check_codec:unmarshal(details, Details)};
unmarshal(change, {resource, {got, #wthd_ResourceGot{resource = Resource}}}) ->
{resource_got, unmarshal(resource, Resource)};
unmarshal(change, {adjustment, Change}) ->
{adjustment, #{
id => unmarshal(id, Change#wthd_AdjustmentChange.id),
payload => ff_withdrawal_adjustment_codec:unmarshal(change, Change#wthd_AdjustmentChange.payload)
}};
unmarshal(withdrawal, Withdrawal = #wthd_Withdrawal{}) ->
ff_withdrawal:gen(#{
id => unmarshal(id, Withdrawal#wthd_Withdrawal.id),
body => unmarshal(cash, Withdrawal#wthd_Withdrawal.body),
params => genlib_map:compact(#{
wallet_id => unmarshal(id, Withdrawal#wthd_Withdrawal.wallet_id),
destination_id => unmarshal(id, Withdrawal#wthd_Withdrawal.destination_id),
quote => maybe_unmarshal(quote_state, Withdrawal#wthd_Withdrawal.quote)
}),
route => maybe_unmarshal(route, Withdrawal#wthd_Withdrawal.route),
external_id => maybe_unmarshal(id, Withdrawal#wthd_Withdrawal.external_id),
domain_revision => maybe_unmarshal(domain_revision, Withdrawal#wthd_Withdrawal.domain_revision),
party_revision => maybe_unmarshal(party_revision, Withdrawal#wthd_Withdrawal.party_revision),
created_at => maybe_unmarshal(timestamp_ms, Withdrawal#wthd_Withdrawal.created_at),
transfer_type => withdrawal,
metadata => maybe_unmarshal(ctx, Withdrawal#wthd_Withdrawal.metadata)
});
unmarshal(route, Route) ->
genlib_map:compact(#{
version => 1,
provider_id => unmarshal(provider_id, Route#wthd_Route.provider_id),
terminal_id => maybe_unmarshal(terminal_id, Route#wthd_Route.terminal_id),
provider_id_legacy => maybe_unmarshal(string, Route#wthd_Route.provider_id_legacy)
});
unmarshal(status, Status) ->
ff_withdrawal_status_codec:unmarshal(status, Status);
unmarshal(session_event, #wthd_SessionChange{id = ID, payload = {started, #wthd_SessionStarted{}}}) ->
{session_started, unmarshal(id, ID)};
unmarshal(session_event, #wthd_SessionChange{id = ID, payload = {finished, Finished}}) ->
#wthd_SessionFinished{result = Result} = Finished,
{session_finished, {unmarshal(id, ID), unmarshal(session_result, Result)}};
unmarshal(session_result, {succeeded, #wthd_SessionSucceeded{trx_info = undefined}}) ->
success;
unmarshal(session_result, {succeeded, #wthd_SessionSucceeded{trx_info = TransactionInfo}}) ->
for backward compatibility with events stored in DB - take here .
{success, unmarshal(transaction_info, TransactionInfo)};
unmarshal(session_result, {failed, #wthd_SessionFailed{failure = Failure}}) ->
{failed, ff_codec:unmarshal(failure, Failure)};
unmarshal(transaction_info, TrxInfo) ->
ff_withdrawal_session_codec:unmarshal(transaction_info, TrxInfo);
unmarshal(session_state, Session) ->
genlib_map:compact(#{
id => unmarshal(id, Session#wthd_SessionState.id),
result => maybe_unmarshal(session_result, Session#wthd_SessionState.result)
});
unmarshal(quote_state, Quote) ->
genlib_map:compact(#{
cash_from => unmarshal(cash, Quote#wthd_QuoteState.cash_from),
cash_to => unmarshal(cash, Quote#wthd_QuoteState.cash_to),
created_at => Quote#wthd_QuoteState.created_at,
expires_on => Quote#wthd_QuoteState.expires_on,
route => maybe_unmarshal(route, Quote#wthd_QuoteState.route),
resource_descriptor => maybe_unmarshal(resource_descriptor, Quote#wthd_QuoteState.resource),
quote_data => maybe_unmarshal(msgpack, Quote#wthd_QuoteState.quote_data)
});
unmarshal(quote, Quote) ->
genlib_map:compact(#{
cash_from => unmarshal(cash, Quote#wthd_Quote.cash_from),
cash_to => unmarshal(cash, Quote#wthd_Quote.cash_to),
created_at => Quote#wthd_Quote.created_at,
expires_on => Quote#wthd_Quote.expires_on,
route => maybe_unmarshal(route, Quote#wthd_Quote.route),
resource_descriptor => maybe_unmarshal(resource_descriptor, Quote#wthd_Quote.resource),
quote_data => maybe_unmarshal(msgpack, Quote#wthd_Quote.quote_data),
domain_revision => maybe_unmarshal(domain_revision, Quote#wthd_Quote.domain_revision),
party_revision => maybe_unmarshal(party_revision, Quote#wthd_Quote.party_revision),
operation_timestamp => maybe_unmarshal(timestamp_ms, Quote#wthd_Quote.operation_timestamp)
});
unmarshal(ctx, Ctx) ->
maybe_unmarshal(context, Ctx);
unmarshal(T, V) ->
ff_codec:unmarshal(T, V).
maybe_unmarshal(_Type, undefined) ->
undefined;
maybe_unmarshal(Type, Value) ->
unmarshal(Type, Value).
maybe_marshal(_Type, undefined) ->
undefined;
maybe_marshal(Type, Value) ->
marshal(Type, Value).
get_legacy_provider_id(#{provider_id_legacy := Provider}) when is_binary(Provider) ->
Provider;
get_legacy_provider_id(#{provider_id := Provider}) when is_integer(Provider) ->
genlib:to_binary(Provider - 300).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-spec test() -> _.
-spec withdrawal_symmetry_test() -> _.
withdrawal_symmetry_test() ->
In = #wthd_Withdrawal{
id = genlib:unique(),
body = #'Cash'{
amount = 10101,
currency = #'CurrencyRef'{symbolic_code = <<"Banana Republic">>}
},
wallet_id = genlib:unique(),
destination_id = genlib:unique(),
external_id = genlib:unique(),
route = #wthd_Route{
provider_id = 1,
terminal_id = 7,
provider_id_legacy = <<"mocketbank">>
},
domain_revision = 1,
party_revision = 3,
created_at = <<"2099-01-01T00:00:00.123Z">>
},
?assertEqual(In, marshal(withdrawal, unmarshal(withdrawal, In))).
-spec withdrawal_params_symmetry_test() -> _.
withdrawal_params_symmetry_test() ->
In = #wthd_WithdrawalParams{
id = genlib:unique(),
body = #'Cash'{
amount = 10101,
currency = #'CurrencyRef'{symbolic_code = <<"Banana Republic">>}
},
wallet_id = genlib:unique(),
destination_id = genlib:unique(),
external_id = undefined
},
?assertEqual(In, marshal_withdrawal_params(unmarshal_withdrawal_params(In))).
-spec quote_state_symmetry_test() -> _.
quote_state_symmetry_test() ->
In = #wthd_QuoteState{
cash_from = #'Cash'{
amount = 10101,
currency = #'CurrencyRef'{symbolic_code = <<"Banana Republic">>}
},
cash_to = #'Cash'{
amount = 20202,
currency = #'CurrencyRef'{symbolic_code = <<"Pineapple Empire">>}
},
created_at = genlib:unique(),
expires_on = genlib:unique(),
quote_data = {arr, [{bin, genlib:unique()}, {i, 5}, {nl, #msgp_Nil{}}]},
route = #wthd_Route{
provider_id = 1,
terminal_id = 2,
provider_id_legacy = <<>>
},
resource = {bank_card, #'ResourceDescriptorBankCard'{bin_data_id = {arr, [{bin, genlib:unique()}]}}},
quote_data_legacy = #{}
},
?assertEqual(In, marshal(quote_state, unmarshal(quote_state, In))).
-spec quote_symmetry_test() -> _.
quote_symmetry_test() ->
In = #wthd_Quote{
cash_from = #'Cash'{
amount = 10101,
currency = #'CurrencyRef'{symbolic_code = <<"Banana Republic">>}
},
cash_to = #'Cash'{
amount = 20202,
currency = #'CurrencyRef'{symbolic_code = <<"Pineapple Empire">>}
},
created_at = genlib:unique(),
expires_on = genlib:unique(),
quote_data = {arr, [{bin, genlib:unique()}, {i, 5}, {nl, #msgp_Nil{}}]},
route = #wthd_Route{
provider_id = 1,
terminal_id = 2,
provider_id_legacy = <<"drovider">>
},
resource = {bank_card, #'ResourceDescriptorBankCard'{bin_data_id = {arr, [{bin, genlib:unique()}]}}},
domain_revision = 1,
party_revision = 2,
operation_timestamp = <<"2020-01-01T01:00:00Z">>
},
?assertEqual(In, marshal(quote, unmarshal(quote, In))).
-spec marshal_session_result_test_() -> _.
marshal_session_result_test_() ->
TransactionInfo = #{id => <<"ID">>, extra => #{<<"Hello">> => <<"World">>}},
TransactionInfoThrift = marshal(transaction_info, TransactionInfo),
Results = [
{success, TransactionInfo},
success
],
ResultsThrift = [
{succeeded, #wthd_SessionSucceeded{trx_info = TransactionInfoThrift}},
{succeeded, #wthd_SessionSucceeded{}}
],
[
?_assertEqual(ResultsThrift, marshal({list, session_result}, Results)),
?_assertEqual(Results, unmarshal({list, session_result}, ResultsThrift))
].
-spec unmarshal_repair_scenario_test() -> _.
unmarshal_repair_scenario_test() ->
Scenario = {
add_events,
#wthd_AddEventsRepair{
events = [
{status_changed, #wthd_StatusChange{
status = {pending, #wthd_status_Pending{}}
}}
],
action = #ff_repairer_ComplexAction{
timer =
{set_timer, #ff_repairer_SetTimerAction{
timer = {timeout, 0}
}}
}
}
},
?assertEqual(
{add_events, #{
events => [
{status_changed, pending}
],
action => [
{set_timer, {timeout, 0}}
]
}},
unmarshal(repair_scenario, Scenario)
).
-endif.
|
aeebedfd140e2b228985fffe93ec4d25eb2a68bd7d6e5b2c03757bd1c55aecde | nponeccop/HNC | Types.hs | # OPTIONS_GHC -fno - warn - unused - matches #
module SPL.Types (T (..), C (..), St (..), InFun (..)) where
import Data.Map
-- type
data T =
T [Char]
| TT [T]
| TU [Char]
| TV [Char]
| TD [Char] [T]
| TS (Map [Char] T)
| TL
| TUL [T]
deriving (Eq, Show, Read)
-- code
data InFun =
InFun [Char] ([C] -> Map [Char] C -> C)
instance Show InFun where
show (InFun s f) = "InFun \""++s++"\""
instance Eq InFun where
(==) (InFun a f1) (InFun b f2) = (==) a b
data St =
K [C]
| S [[Char]]
| W [([Char], C)]
| D [Char]
| L
| R
deriving (Eq, Show)
data C =
CBool Bool
| CNum Int
| CStr [Char]
| CVal [Char]
| CL C St
| CInFun Int InFun
| CInfFun InFun
| CSpec [Char]
| CList [C]
| CPair [C]
| CDebug Int C
| CTyped T C
| CStruct (Map [Char] C)
| CF Integer
deriving (Eq, Show)
| null | https://raw.githubusercontent.com/nponeccop/HNC/d8447009a04c56ae2cba4c7c179e39384085ea00/SPL/Types.hs | haskell | type
code | # OPTIONS_GHC -fno - warn - unused - matches #
module SPL.Types (T (..), C (..), St (..), InFun (..)) where
import Data.Map
data T =
T [Char]
| TT [T]
| TU [Char]
| TV [Char]
| TD [Char] [T]
| TS (Map [Char] T)
| TL
| TUL [T]
deriving (Eq, Show, Read)
data InFun =
InFun [Char] ([C] -> Map [Char] C -> C)
instance Show InFun where
show (InFun s f) = "InFun \""++s++"\""
instance Eq InFun where
(==) (InFun a f1) (InFun b f2) = (==) a b
data St =
K [C]
| S [[Char]]
| W [([Char], C)]
| D [Char]
| L
| R
deriving (Eq, Show)
data C =
CBool Bool
| CNum Int
| CStr [Char]
| CVal [Char]
| CL C St
| CInFun Int InFun
| CInfFun InFun
| CSpec [Char]
| CList [C]
| CPair [C]
| CDebug Int C
| CTyped T C
| CStruct (Map [Char] C)
| CF Integer
deriving (Eq, Show)
|
8e8537592286c18a1daf01a4c80d71ca5eaa233b4bcdecff3eff9feed91c8dfd | shriram/mystery-languages | semantics.rkt | #lang racket
(require mystery-languages/make-semantics)
(provide (rename-out [mod-begin #%module-begin]
[ti #%top-interaction]))
(define-values (namespaces lang-print-names)
(make-namespaces-and-lang-print-names (list 'mystery-languages/mut-structs/L1/semantics
'mystery-languages/mut-structs/L2/semantics
'mystery-languages/mut-structs/L3/semantics)))
(define-syntax (multi-runner stx)
(syntax-case stx (TEST)
[(_ (TEST e r ...))
#`(test-output 'e (list 'r ...) namespaces)]
[(_ e)
#`(show-output 'e namespaces lang-print-names)]))
(define-syntax mod-begin
(λ (stx)
(syntax-case stx ()
[(_ b ...)
#'(#%printing-module-begin (multi-runner b) ...)])))
(define-syntax ti
(λ (stx)
(syntax-case stx ()
([_ . e]
#'(#%top-interaction . (multi-runner e))))))
| null | https://raw.githubusercontent.com/shriram/mystery-languages/86f717654c9ff05b7f838ea7ba1e63fb280d9387/mut-structs/semantics.rkt | racket | #lang racket
(require mystery-languages/make-semantics)
(provide (rename-out [mod-begin #%module-begin]
[ti #%top-interaction]))
(define-values (namespaces lang-print-names)
(make-namespaces-and-lang-print-names (list 'mystery-languages/mut-structs/L1/semantics
'mystery-languages/mut-structs/L2/semantics
'mystery-languages/mut-structs/L3/semantics)))
(define-syntax (multi-runner stx)
(syntax-case stx (TEST)
[(_ (TEST e r ...))
#`(test-output 'e (list 'r ...) namespaces)]
[(_ e)
#`(show-output 'e namespaces lang-print-names)]))
(define-syntax mod-begin
(λ (stx)
(syntax-case stx ()
[(_ b ...)
#'(#%printing-module-begin (multi-runner b) ...)])))
(define-syntax ti
(λ (stx)
(syntax-case stx ()
([_ . e]
#'(#%top-interaction . (multi-runner e))))))
| |
5a8717604104426d2cdb5252a779430392044afaa0babf76c6692088c76222b0 | manuel-serrano/hop | wiki.scm | ;*=====================================================================*/
* serrano / prgm / project / hop/2.1.x / weblets / wiki / wiki.scm * /
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Tue Mar 10 09:57:17 2009 * /
* Last change : Mon Mar 22 10:13:36 2010 ( serrano ) * /
* Copyright : 2009 - 10 * /
;* ------------------------------------------------------------- */
;* Wiki runtime system */
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module wiki_client
(export (wiki-hide-toc-popup obj)
(wiki-show-toc-popup obj)))
;*---------------------------------------------------------------------*/
;* wiki-key-modifier ... */
;*---------------------------------------------------------------------*/
(define wiki-key-modifier #f)
(define wiki-anim-step 0.3)
(define wiki-anim-speed 5)
;*---------------------------------------------------------------------*/
;* wiki-keyup-handler ... */
;*---------------------------------------------------------------------*/
(define (wiki-keyup-handler event)
(when (= event.which 18) (set! wiki-key-modifier #f)))
;*---------------------------------------------------------------------*/
;* wiki-keydown-handler ... */
;*---------------------------------------------------------------------*/
(define (wiki-keydown-handler event)
(let ((k event.which))
(case k
((18)
;; alt
(set! wiki-key-modifier 'alt))
((36)
;; home
(window.scrollTo 0 0)
(stop-event-propagation event #f))
((35)
;; end
(window.scrollTo 0 (node-bounding-box-y (dom-get-element-by-id "wiki-foot")))
(stop-event-propagation event #f))
((82)
;; r
(when (eq? wiki-key-modifier 'alt)
(document.location.reload)
(stop-event-propagation event #f)))
((83)
;; s
(when (eq? wiki-key-modifier 'alt)
(let ((el (dom-get-element-by-id "wiki-toc-popup")))
(if (equal? (node-style-get el :display) "block")
(wiki-hide-toc-popup el)
(wiki-show-toc-popup el)))
(stop-event-propagation event #f))))))
;*---------------------------------------------------------------------*/
;* wiki-show-toc-popup ... */
;*---------------------------------------------------------------------*/
(define (wiki-show-toc-popup el)
(let ((m (format "~apx" (/ (main-window-height) 4))))
(node-style-set! el :left m)
(node-style-set! el :right m)
(let ((opacity (node-computed-style-get el :opacity)))
(if opacity
(let ((i 0))
(node-style-set! el :opacity 0)
(node-style-set! el :display "block")
(timeout wiki-anim-speed
(lambda ()
(node-style-set! el :opacity i)
(if (> (- opacity i) wiki-anim-step)
(begin
(set! i (+ i wiki-anim-step))
#t)
(begin
(node-style-set! el :opacity opacity)
#f)))))
(node-style-set! el :display "block")))))
;*---------------------------------------------------------------------*/
;* wiki-hide-menu ... */
;*---------------------------------------------------------------------*/
(define (wiki-hide-toc-popup el)
(let ((opacity (node-computed-style-get el :opacity)))
(if opacity
(let ((i 0))
(timeout wiki-anim-speed
(lambda ()
(node-style-set! el :opacity i)
(if (> i wiki-anim-step)
(begin
(set! i (- i wiki-anim-step))
#t)
(begin
(node-style-set! el :display "none")
(node-style-set! el :opacity opacity)
#f)))))
(node-style-set! el :display "none"))))
;*---------------------------------------------------------------------*/
;* event handlers */
;*---------------------------------------------------------------------*/
(add-event-listener! document "keydown" wiki-keydown-handler #t)
(add-event-listener! document "keyup" wiki-keyup-handler))
| null | https://raw.githubusercontent.com/manuel-serrano/hop/481cb10478286796addd2ec9ee29c95db27aa390/weblets/wiki/wiki.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* Wiki runtime system */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* wiki-key-modifier ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* wiki-keyup-handler ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* wiki-keydown-handler ... */
*---------------------------------------------------------------------*/
alt
home
end
r
s
*---------------------------------------------------------------------*/
* wiki-show-toc-popup ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* wiki-hide-menu ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* event handlers */
*---------------------------------------------------------------------*/ | * serrano / prgm / project / hop/2.1.x / weblets / wiki / wiki.scm * /
* Author : * /
* Creation : Tue Mar 10 09:57:17 2009 * /
* Last change : Mon Mar 22 10:13:36 2010 ( serrano ) * /
* Copyright : 2009 - 10 * /
(module wiki_client
(export (wiki-hide-toc-popup obj)
(wiki-show-toc-popup obj)))
(define wiki-key-modifier #f)
(define wiki-anim-step 0.3)
(define wiki-anim-speed 5)
(define (wiki-keyup-handler event)
(when (= event.which 18) (set! wiki-key-modifier #f)))
(define (wiki-keydown-handler event)
(let ((k event.which))
(case k
((18)
(set! wiki-key-modifier 'alt))
((36)
(window.scrollTo 0 0)
(stop-event-propagation event #f))
((35)
(window.scrollTo 0 (node-bounding-box-y (dom-get-element-by-id "wiki-foot")))
(stop-event-propagation event #f))
((82)
(when (eq? wiki-key-modifier 'alt)
(document.location.reload)
(stop-event-propagation event #f)))
((83)
(when (eq? wiki-key-modifier 'alt)
(let ((el (dom-get-element-by-id "wiki-toc-popup")))
(if (equal? (node-style-get el :display) "block")
(wiki-hide-toc-popup el)
(wiki-show-toc-popup el)))
(stop-event-propagation event #f))))))
(define (wiki-show-toc-popup el)
(let ((m (format "~apx" (/ (main-window-height) 4))))
(node-style-set! el :left m)
(node-style-set! el :right m)
(let ((opacity (node-computed-style-get el :opacity)))
(if opacity
(let ((i 0))
(node-style-set! el :opacity 0)
(node-style-set! el :display "block")
(timeout wiki-anim-speed
(lambda ()
(node-style-set! el :opacity i)
(if (> (- opacity i) wiki-anim-step)
(begin
(set! i (+ i wiki-anim-step))
#t)
(begin
(node-style-set! el :opacity opacity)
#f)))))
(node-style-set! el :display "block")))))
(define (wiki-hide-toc-popup el)
(let ((opacity (node-computed-style-get el :opacity)))
(if opacity
(let ((i 0))
(timeout wiki-anim-speed
(lambda ()
(node-style-set! el :opacity i)
(if (> i wiki-anim-step)
(begin
(set! i (- i wiki-anim-step))
#t)
(begin
(node-style-set! el :display "none")
(node-style-set! el :opacity opacity)
#f)))))
(node-style-set! el :display "none"))))
(add-event-listener! document "keydown" wiki-keydown-handler #t)
(add-event-listener! document "keyup" wiki-keyup-handler))
|
780902e13eeeb2571bf32a319f2786c35caf725eabcc9ee3403ccdcd88fb136e | sgbj/MaximaSharp | zdotc.lisp | ;;; Compiled by f2cl version:
( " f2cl1.l , v 2edcbd958861 2012/05/30 03:34:52 toy $ "
" f2cl2.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl3.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl4.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl5.l , v 3fe93de3be82 2012/05/06 02:17:14 toy $ "
" f2cl6.l , v 1d5cbacbb977 2008/08/24 00:56:27 rtoy $ "
" macros.l , v 3fe93de3be82 2012/05/06 02:17:14 toy $ " )
;;; Using Lisp CMU Common Lisp 20d (20D Unicode)
;;;
;;; Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
;;; (:coerce-assigns :as-needed) (:array-type ':array)
;;; (:array-slicing t) (:declare-common nil)
;;; (:float-format double-float))
(in-package :blas)
(defun zdotc (n zx incx zy incy)
(declare (type (array f2cl-lib:complex16 (*)) zy zx)
(type (f2cl-lib:integer4) incy incx n))
(f2cl-lib:with-multi-array-data
((zx f2cl-lib:complex16 zx-%data% zx-%offset%)
(zy f2cl-lib:complex16 zy-%data% zy-%offset%))
(prog ((i 0) (ix 0) (iy 0) (ztemp #C(0.0 0.0)) (zdotc #C(0.0 0.0)))
(declare (type (f2cl-lib:complex16) zdotc ztemp)
(type (f2cl-lib:integer4) iy ix i))
(setf ztemp (f2cl-lib:cmplx 0.0 0.0))
(setf zdotc (f2cl-lib:cmplx 0.0 0.0))
(if (<= n 0) (go end_label))
(if (and (= incx 1) (= incy 1)) (go label20))
(setf ix 1)
(setf iy 1)
(if (< incx 0)
(setf ix
(f2cl-lib:int-add
(f2cl-lib:int-mul (f2cl-lib:int-sub 1 n) incx)
1)))
(if (< incy 0)
(setf iy
(f2cl-lib:int-add
(f2cl-lib:int-mul (f2cl-lib:int-sub 1 n) incy)
1)))
(f2cl-lib:fdo (i 1 (f2cl-lib:int-add i 1))
((> i n) nil)
(tagbody
(setf ztemp
(+ ztemp
(*
(f2cl-lib:dconjg
(f2cl-lib:fref zx-%data% (ix) ((1 *)) zx-%offset%))
(f2cl-lib:fref zy-%data% (iy) ((1 *)) zy-%offset%))))
(setf ix (f2cl-lib:int-add ix incx))
(setf iy (f2cl-lib:int-add iy incy))
label10))
(setf zdotc ztemp)
(go end_label)
label20
(f2cl-lib:fdo (i 1 (f2cl-lib:int-add i 1))
((> i n) nil)
(tagbody
(setf ztemp
(+ ztemp
(*
(f2cl-lib:dconjg
(f2cl-lib:fref zx-%data% (i) ((1 *)) zx-%offset%))
(f2cl-lib:fref zy-%data% (i) ((1 *)) zy-%offset%))))
label30))
(setf zdotc ztemp)
(go end_label)
end_label
(return (values zdotc nil nil nil nil nil)))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::zdotc fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((fortran-to-lisp::integer4)
(array fortran-to-lisp::complex16 (*))
(fortran-to-lisp::integer4)
(array fortran-to-lisp::complex16 (*))
(fortran-to-lisp::integer4))
:return-values '(nil nil nil nil nil)
:calls 'nil)))
| null | https://raw.githubusercontent.com/sgbj/MaximaSharp/75067d7e045b9ed50883b5eb09803b4c8f391059/Test/bin/Debug/Maxima-5.30.0/share/maxima/5.30.0/share/lapack/blas/zdotc.lisp | lisp | Compiled by f2cl version:
Using Lisp CMU Common Lisp 20d (20D Unicode)
Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
(:coerce-assigns :as-needed) (:array-type ':array)
(:array-slicing t) (:declare-common nil)
(:float-format double-float)) | ( " f2cl1.l , v 2edcbd958861 2012/05/30 03:34:52 toy $ "
" f2cl2.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl3.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl4.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl5.l , v 3fe93de3be82 2012/05/06 02:17:14 toy $ "
" f2cl6.l , v 1d5cbacbb977 2008/08/24 00:56:27 rtoy $ "
" macros.l , v 3fe93de3be82 2012/05/06 02:17:14 toy $ " )
(in-package :blas)
(defun zdotc (n zx incx zy incy)
(declare (type (array f2cl-lib:complex16 (*)) zy zx)
(type (f2cl-lib:integer4) incy incx n))
(f2cl-lib:with-multi-array-data
((zx f2cl-lib:complex16 zx-%data% zx-%offset%)
(zy f2cl-lib:complex16 zy-%data% zy-%offset%))
(prog ((i 0) (ix 0) (iy 0) (ztemp #C(0.0 0.0)) (zdotc #C(0.0 0.0)))
(declare (type (f2cl-lib:complex16) zdotc ztemp)
(type (f2cl-lib:integer4) iy ix i))
(setf ztemp (f2cl-lib:cmplx 0.0 0.0))
(setf zdotc (f2cl-lib:cmplx 0.0 0.0))
(if (<= n 0) (go end_label))
(if (and (= incx 1) (= incy 1)) (go label20))
(setf ix 1)
(setf iy 1)
(if (< incx 0)
(setf ix
(f2cl-lib:int-add
(f2cl-lib:int-mul (f2cl-lib:int-sub 1 n) incx)
1)))
(if (< incy 0)
(setf iy
(f2cl-lib:int-add
(f2cl-lib:int-mul (f2cl-lib:int-sub 1 n) incy)
1)))
(f2cl-lib:fdo (i 1 (f2cl-lib:int-add i 1))
((> i n) nil)
(tagbody
(setf ztemp
(+ ztemp
(*
(f2cl-lib:dconjg
(f2cl-lib:fref zx-%data% (ix) ((1 *)) zx-%offset%))
(f2cl-lib:fref zy-%data% (iy) ((1 *)) zy-%offset%))))
(setf ix (f2cl-lib:int-add ix incx))
(setf iy (f2cl-lib:int-add iy incy))
label10))
(setf zdotc ztemp)
(go end_label)
label20
(f2cl-lib:fdo (i 1 (f2cl-lib:int-add i 1))
((> i n) nil)
(tagbody
(setf ztemp
(+ ztemp
(*
(f2cl-lib:dconjg
(f2cl-lib:fref zx-%data% (i) ((1 *)) zx-%offset%))
(f2cl-lib:fref zy-%data% (i) ((1 *)) zy-%offset%))))
label30))
(setf zdotc ztemp)
(go end_label)
end_label
(return (values zdotc nil nil nil nil nil)))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::zdotc fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((fortran-to-lisp::integer4)
(array fortran-to-lisp::complex16 (*))
(fortran-to-lisp::integer4)
(array fortran-to-lisp::complex16 (*))
(fortran-to-lisp::integer4))
:return-values '(nil nil nil nil nil)
:calls 'nil)))
|
43c3f635a505e33e96387aeddf1c1d9e7066cd8fe6cf4e3b7919e70e3bb1f578 | Deducteam/SizeChangeTool | call_extractor.ml | open Kernel
open Kernel.Term
open Rules
open Sign
open Sizematrix
open Callgraph
let rec dig_in_rhs : term -> (int * Basic.name * term array) list =
function
| Kind
| Type(_) -> assert false
| DB(_,_,_) -> []
| Const(_,f) -> [0, f, [||]]
| App(Const(_,f),u,l) ->
(0, f, Array.of_list (u::l))
:: List.concat (List.map dig_in_rhs (u::l))
| App(t,u,l) ->
List.concat (List.map dig_in_rhs (t::u::l))
| Lam(_,x,None,t) ->
List.map (fun (i,b,c) -> (i+1,b,c)) (dig_in_rhs t)
| Lam(_,x,Some ty,t) ->
(dig_in_rhs ty) @ (List.map (fun (i,b,c) -> (i+1,b,c)) (dig_in_rhs t))
| Pi(_,_,a,b) ->
(dig_in_rhs a) @ (List.map (fun (i,b,c) -> (i+1,b,c)) (dig_in_rhs b))
[ compare_term i t_l t_r ] returns Min1 if t_r is a subterm of t_l considering that t_r is under i lambdas , Zero if both terms are equal and Infi otherwise
let rec compare_term : int -> term -> term -> Cmp.t =
fun i t_l t_r ->
let rec comp_list : Cmp.t -> term list -> term list -> Cmp.t =
fun cur lp lt ->
match lp,lt with
| [], _ | _, [] -> cur
| a::l1, b::l2 ->
begin
match (compare_term i a b), cur with
| _ , Infi -> assert false
We are sure , that the current state [ cur ] can not contain a Infi , else the Infi would be the result of the function and no recursive call would be needed
| Infi, _ -> Infi
| Min1, _ -> comp_list Min1 l1 l2
| _ , Min1 -> comp_list Min1 l1 l2
| Zero, Zero -> comp_list Zero l1 l2
end
in
match t_l,t_r with
Two distinct variables are uncomparable
| DB (_,_,n), DB (_,_,m)
(* A variable when applied has the same size as if it was not applied *)
| DB (_,_,n), App(DB(_,_,m),_,_)
| App(DB (_,_,n),_,_), DB (_,_,m)
| App(DB (_,_,n),_,_), App(DB(_,_,m),_,_) ->
if n + i = m then Zero else Infi
| Lam(_,_,_,DB(_,_,n)), DB(_,_,m)
| Lam(_,_,_,App(DB(_,_,n),_,_)), DB(_,_,m)
| Lam(_,_,_,DB(_,_,n)), App(DB(_,_,m),_,_)
| Lam(_,_,_,App(DB(_,_,n),_,_)), App(DB(_,_,m),_,_) ->
if n + i = m + 1 then Zero else Infi
| App (Const(_,f),up,lp), App(Const(_,g),ut,lt) when f = g ->
begin
let res1 = comp_list Zero (up::lp) (ut::lt) in
let res2 =
Cmp.minus1 (
Cmp.mini (
List.map (fun t_ll -> compare_term i t_ll t_r) (up::lp)
)
) in
Cmp.plus res1 res2
end
| App (_,u,l),_ ->
Cmp.minus1
(Cmp.mini (List.map (fun t_ll -> compare_term i t_ll t_r) (u::l)))
| Lam(_,_,_,t_ll),Lam(_,_,_,t_rr) -> compare_term i t_ll t_rr
| _ -> Infi
let study_call : signature -> index -> term array ->
int -> index -> term array -> Cmp_matrix.t =
fun si fun_l arg_l nb fun_r arg_r ->
let h = arity_of (find_symb si fun_l).typ in
let w = arity_of (find_symb si fun_r).typ in
let matrix : Cmp_matrix.t =
{h; w; tab = Array.make_matrix h w Cmp.Infi} in
for i=0 to (min h (Array.length arg_l)) -1
do
for j=0 to (min w (Array.length arg_r)) -1
do
matrix.tab.(i).(j) <- compare_term nb arg_l.(i) arg_r.(j)
done
done;
matrix
(** Add the calls associated to a rule in the call graph *)
let add_rule : call_graph -> pre_rule -> call_graph =
fun gr r ->
let sign = gr.signature in
let ind_l = find_symbol_index sign r.head in
let new_calls = List.map
(fun (i,n,a) ->
let ind_r = find_symbol_index sign n in
ind_r,study_call sign ind_l r.args i ind_r a
) (dig_in_rhs r.rhs)
in
{ (List.fold_left
(fun g (callee,matrix) ->
add_call g {caller = ind_l; callee; matrix; rule_name=r.name }
) gr new_calls
) with signature = add_rule sign r
}
| null | https://raw.githubusercontent.com/Deducteam/SizeChangeTool/0a4db26ee1beed6ca7cf404ba6edd0539d540371/src/call_extractor.ml | ocaml | A variable when applied has the same size as if it was not applied
* Add the calls associated to a rule in the call graph | open Kernel
open Kernel.Term
open Rules
open Sign
open Sizematrix
open Callgraph
let rec dig_in_rhs : term -> (int * Basic.name * term array) list =
function
| Kind
| Type(_) -> assert false
| DB(_,_,_) -> []
| Const(_,f) -> [0, f, [||]]
| App(Const(_,f),u,l) ->
(0, f, Array.of_list (u::l))
:: List.concat (List.map dig_in_rhs (u::l))
| App(t,u,l) ->
List.concat (List.map dig_in_rhs (t::u::l))
| Lam(_,x,None,t) ->
List.map (fun (i,b,c) -> (i+1,b,c)) (dig_in_rhs t)
| Lam(_,x,Some ty,t) ->
(dig_in_rhs ty) @ (List.map (fun (i,b,c) -> (i+1,b,c)) (dig_in_rhs t))
| Pi(_,_,a,b) ->
(dig_in_rhs a) @ (List.map (fun (i,b,c) -> (i+1,b,c)) (dig_in_rhs b))
[ compare_term i t_l t_r ] returns Min1 if t_r is a subterm of t_l considering that t_r is under i lambdas , Zero if both terms are equal and Infi otherwise
let rec compare_term : int -> term -> term -> Cmp.t =
fun i t_l t_r ->
let rec comp_list : Cmp.t -> term list -> term list -> Cmp.t =
fun cur lp lt ->
match lp,lt with
| [], _ | _, [] -> cur
| a::l1, b::l2 ->
begin
match (compare_term i a b), cur with
| _ , Infi -> assert false
We are sure , that the current state [ cur ] can not contain a Infi , else the Infi would be the result of the function and no recursive call would be needed
| Infi, _ -> Infi
| Min1, _ -> comp_list Min1 l1 l2
| _ , Min1 -> comp_list Min1 l1 l2
| Zero, Zero -> comp_list Zero l1 l2
end
in
match t_l,t_r with
Two distinct variables are uncomparable
| DB (_,_,n), DB (_,_,m)
| DB (_,_,n), App(DB(_,_,m),_,_)
| App(DB (_,_,n),_,_), DB (_,_,m)
| App(DB (_,_,n),_,_), App(DB(_,_,m),_,_) ->
if n + i = m then Zero else Infi
| Lam(_,_,_,DB(_,_,n)), DB(_,_,m)
| Lam(_,_,_,App(DB(_,_,n),_,_)), DB(_,_,m)
| Lam(_,_,_,DB(_,_,n)), App(DB(_,_,m),_,_)
| Lam(_,_,_,App(DB(_,_,n),_,_)), App(DB(_,_,m),_,_) ->
if n + i = m + 1 then Zero else Infi
| App (Const(_,f),up,lp), App(Const(_,g),ut,lt) when f = g ->
begin
let res1 = comp_list Zero (up::lp) (ut::lt) in
let res2 =
Cmp.minus1 (
Cmp.mini (
List.map (fun t_ll -> compare_term i t_ll t_r) (up::lp)
)
) in
Cmp.plus res1 res2
end
| App (_,u,l),_ ->
Cmp.minus1
(Cmp.mini (List.map (fun t_ll -> compare_term i t_ll t_r) (u::l)))
| Lam(_,_,_,t_ll),Lam(_,_,_,t_rr) -> compare_term i t_ll t_rr
| _ -> Infi
let study_call : signature -> index -> term array ->
int -> index -> term array -> Cmp_matrix.t =
fun si fun_l arg_l nb fun_r arg_r ->
let h = arity_of (find_symb si fun_l).typ in
let w = arity_of (find_symb si fun_r).typ in
let matrix : Cmp_matrix.t =
{h; w; tab = Array.make_matrix h w Cmp.Infi} in
for i=0 to (min h (Array.length arg_l)) -1
do
for j=0 to (min w (Array.length arg_r)) -1
do
matrix.tab.(i).(j) <- compare_term nb arg_l.(i) arg_r.(j)
done
done;
matrix
let add_rule : call_graph -> pre_rule -> call_graph =
fun gr r ->
let sign = gr.signature in
let ind_l = find_symbol_index sign r.head in
let new_calls = List.map
(fun (i,n,a) ->
let ind_r = find_symbol_index sign n in
ind_r,study_call sign ind_l r.args i ind_r a
) (dig_in_rhs r.rhs)
in
{ (List.fold_left
(fun g (callee,matrix) ->
add_call g {caller = ind_l; callee; matrix; rule_name=r.name }
) gr new_calls
) with signature = add_rule sign r
}
|
c6042c3e8144c7fae973a8a78e212f8f1fedbc776147c6151d32067c46f5b941 | well-typed/large-records | R030.hs | # LANGUAGE TypeApplications #
#if PROFILE_CORESIZE
{-# OPTIONS_GHC -ddump-to-file -ddump-ds-preopt -ddump-ds -ddump-simpl #-}
#endif
#if PROFILE_TIMING
{-# OPTIONS_GHC -ddump-to-file -ddump-timings #-}
#endif
module Experiment.Induction_Tree_Phantom.Sized.R030 where
import Data.Proxy
import Common.EmptyClass_Tree_Phantom
import Common.HListOfSize.HL030
requiresInstance :: ()
requiresInstance = requireEmptyClass (Proxy @ExampleFields)
| null | https://raw.githubusercontent.com/well-typed/large-records/c6c2b51af11e90f30822543d7ce4d1cb28cee294/large-records-benchmarks/bench/experiments/Experiment/Induction_Tree_Phantom/Sized/R030.hs | haskell | # OPTIONS_GHC -ddump-to-file -ddump-ds-preopt -ddump-ds -ddump-simpl #
# OPTIONS_GHC -ddump-to-file -ddump-timings # | # LANGUAGE TypeApplications #
#if PROFILE_CORESIZE
#endif
#if PROFILE_TIMING
#endif
module Experiment.Induction_Tree_Phantom.Sized.R030 where
import Data.Proxy
import Common.EmptyClass_Tree_Phantom
import Common.HListOfSize.HL030
requiresInstance :: ()
requiresInstance = requireEmptyClass (Proxy @ExampleFields)
|
95fdf2ff414d755a62d34793df8644280ebefa7aefa1f95e6d6d18139575f562 | dolotech/erlang_server | pt_attain.erl | %%----------------------------------------------------
协议24 - 成就
%%
$ Id$
%%
%% @author Rolong<>
%%----------------------------------------------------
-module(pt_attain).
-export([handle/3]).
-include("common.hrl").
-include("hero.hrl").
-include("equ.hrl").
-include("prop.hrl").
领取成就
handle(24004, [Id], Rs) ->
Data = data_attain:get(Id),
PriceNum = util:get_val(num, Data),
if
Data == undefined ->
{ok, [127, 0, 0]};
true ->
case util:get_val(tid, Data) of
1 ->
NewDiamond = Rs#role.diamond + PriceNum ,
%% Rs1 = Rs#role{diamond = NewDiamond},
Rs1 = lib_role:add_attr(diamond, PriceNum, Rs),
Rs2 = lib_role:add_attr_ok(diamond, 27, Rs, Rs1),
lib_role:notice(Rs2),
{NextId, State, Rs0} = attain_next(Id, Rs2),
?DEBUG("NextId:~w, State:~w ", [NextId, State]),
{ok, [0, NextId, State], Rs0};
2 ->
NewGold = Rs#role.gold + PriceNum ,
Rs1 = Rs#role{gold = NewGold } ,
Rs1 = lib_role:add_attr(gold, PriceNum, Rs),
Rs2 = lib_role:add_attr_ok(gold, 27, Rs, Rs1),
lib_role:notice(Rs2),
{NextId, State, Rs0} = attain_next(Id, Rs2),
{ok, [0, NextId, State], Rs0};
3 ->
{ Atar , A , B , C } = Rs#role.luck ,
Luck = { Atar + PriceNum , A , B , C } ,
%% Rs1 = Rs#role{luck = Luck},
Rs1 = lib_role:add_attr(luck, PriceNum, Rs),
lib_role:notice(luck, Rs1),
{NextId, State, Rs2} = attain_next(Id, Rs1),
{ok, [0, NextId, State], Rs2};
Tid ->
case mod_item:add_item(Rs, Tid, PriceNum) of
{ok, Rs1, PA, EA} ->
mod_item:send_notice(Rs1#role.pid_sender, PA, EA),
{NextId, State, Rs2} = attain_next(Id, Rs1),
{ok, [0, NextId, State], Rs2#role{save = [role, items]}};
%% TODO:邮件系统
{error, full} ->
{ok, [3, 0, 0]};
{error, _} ->
{ok, [128, 0, 0]}
end
end
end;
%% 初始化成就
handle(24006, [], Rs) ->
IdList = data_attain:get(ids),
case Rs#role.attain of
[] ->
F1 = fun(Id) ->
Team = data_attain:get(Id),
Type = util:get_val(type, Team),
case util:get_val(start, Team, 0) of
1 ->
%% Condition = data_attain:get(condition, Team),
case util:get_val(next, Team, 0) of
0 -> {Id, 0, Type, 0, 0};
NextId ->
{Id, NextId, Type, 0, 0}
end;
0 -> 0
end
end,
AttainList1 = [F1(Id) || Id <- IdList, F1(Id) > 0],
?DEBUG("AttainList1:~w", [AttainList1]),
L2 = mod_attain:attain_today() ++ AttainList1,
?DEBUG("Attain_today:~w", [L2]),
L = [{Id, State} || {Id, _, _, _, State} <- L2],
?DEBUG("L:~w", [L]),
Rs1 = Rs#role{attain = L2},
{ok, [L], Rs1};
Attain ->
?DEBUG("Attain:~w", [Attain]),
F2 = fun(Id, NextId, Type, Condition, State) ->
case State =:= 2 of
true ->
Team = data_attain:get(Id),
S = util:get_val(condition, Team),
case util:get_val(next, Team, 0) of
0 -> {Id, 0, Type, Condition, 2};
Nid ->
case Condition >= S of
true ->
{Id, Nid, Type, Condition, 1};
false ->
{Id, Nid, Type, Condition, 0}
end
end;
false ->
{Id, NextId, Type, Condition, State}
end
end,
T = [F2(A,B,C,D,E) || {A,B,C,D,E} <- Attain],
Rs1 = Rs#role{attain = T},
L = [{Id, State} || {Id, _, _, _, State} <- T, State < 2],
{ok, [L], Rs1}
end;
handle(_Cmd, _Data, _RoleState) ->
{error, bad_request}.
%% === 私有函数 ===
下一个成就
attain_next(Id, Rs) ->
Data = data_attain:get(Id),
MyAttain = Rs#role.attain,
Lists1 = lists:keydelete(Id, 1, MyAttain),
%% 是否存在下一个成就
case util:get_val(next, Data, 0) of
0 ->
{A, B, C, D, _E} = lists:keyfind(Id, 1, MyAttain),
Lists2 = [{A,B,C,D,2} | Lists1],
Rs1 = Rs#role{attain = Lists2},
{0, 0, Rs1};
NextId ->
Data2 = data_attain:get(NextId),
{_, _, Type, Condition, _} = lists:keyfind(Id, 1, MyAttain),
S = util:get_val(condition, Data2),
%% 是否已经完成可领取
case Condition >= S of
true ->
Lists2 = [{NextId, 0, Type, Condition, 0} | Lists1],
Rs1 = Rs#role{attain = Lists2},
{NextId, 1, Rs1};
false ->
%% 是否存在下一个成就
case util:get_val(next, Data2, 0) of
0 ->
Lists2 = [{NextId, 0, Type, Condition, 0} | Lists1],
Rs1 = Rs#role{attain = Lists2},
{NextId, 0, Rs1};
NextId2 ->
Lists2 = [{NextId, NextId2, Type, Condition, 0} | Lists1],
Rs1 = Rs#role{attain = Lists2},
{NextId, 0, Rs1}
end
end
end.
% % 成就是否达到条件 , 更新完成状态 ,
%% attain_state(Type, Num, Rs) ->
%% MyAttain = Rs#role.attain,
case lists : keyfind(Type , 3 , MyAttain ) of
%% false -> Rs;
%% {Id, NextId, Type, Condition, State} ->
= data_attain : get(Id ) ,
S1 = util : get_val(condition , ) ,
%% %% Data2 = data_attain:get(NextId),
%% %% S2 = util:get_val(condition, Data2),
%% case Condition >= S1 of
%% true ->
Lists1 = { Id , NextId , Type , Condition + Num , State } ,
Lists2 = lists : keyreplace(Id , 1 , MyAttain , Lists1 ) ,
%% Rs#role{attain = Lists2};
%% false ->
%% case Condition + Num >= S1 of
%% true ->
Lists1 = { Id , NextId , Type , Condition + Num , 1 } ,
Lists2 = lists : keyreplace(Id , 1 , MyAttain , Lists1 ) ,
sender : pack_send(Rs#role.pid_sender , 24001 , [ I d ] ) ,
%% Rs#role{attain = Lists2};
%% false ->
%% Lists1 = {Id,NextId, Type, Condition + Num, State},
Lists2 = lists : keyreplace(Id , 1 , MyAttain , Lists1 ) ,
%% Rs#role{attain = Lists2}
%% end
%% end
%% end.
vim : set marker foldmarker=%%',%% . :
| null | https://raw.githubusercontent.com/dolotech/erlang_server/44ea3693317f60e18b19c9ddfa179307cbd646d7/src/pt/pt_attain.erl | erlang | ----------------------------------------------------
@author Rolong<>
----------------------------------------------------
Rs1 = Rs#role{diamond = NewDiamond},
Rs1 = Rs#role{luck = Luck},
TODO:邮件系统
初始化成就
Condition = data_attain:get(condition, Team),
=== 私有函数 ===
是否存在下一个成就
是否已经完成可领取
是否存在下一个成就
% 成就是否达到条件 , 更新完成状态 ,
attain_state(Type, Num, Rs) ->
MyAttain = Rs#role.attain,
false -> Rs;
{Id, NextId, Type, Condition, State} ->
%% Data2 = data_attain:get(NextId),
%% S2 = util:get_val(condition, Data2),
case Condition >= S1 of
true ->
Rs#role{attain = Lists2};
false ->
case Condition + Num >= S1 of
true ->
Rs#role{attain = Lists2};
false ->
Lists1 = {Id,NextId, Type, Condition + Num, State},
Rs#role{attain = Lists2}
end
end
end.
',%% . :
| 协议24 - 成就
$ Id$
-module(pt_attain).
-export([handle/3]).
-include("common.hrl").
-include("hero.hrl").
-include("equ.hrl").
-include("prop.hrl").
领取成就
handle(24004, [Id], Rs) ->
Data = data_attain:get(Id),
PriceNum = util:get_val(num, Data),
if
Data == undefined ->
{ok, [127, 0, 0]};
true ->
case util:get_val(tid, Data) of
1 ->
NewDiamond = Rs#role.diamond + PriceNum ,
Rs1 = lib_role:add_attr(diamond, PriceNum, Rs),
Rs2 = lib_role:add_attr_ok(diamond, 27, Rs, Rs1),
lib_role:notice(Rs2),
{NextId, State, Rs0} = attain_next(Id, Rs2),
?DEBUG("NextId:~w, State:~w ", [NextId, State]),
{ok, [0, NextId, State], Rs0};
2 ->
NewGold = Rs#role.gold + PriceNum ,
Rs1 = Rs#role{gold = NewGold } ,
Rs1 = lib_role:add_attr(gold, PriceNum, Rs),
Rs2 = lib_role:add_attr_ok(gold, 27, Rs, Rs1),
lib_role:notice(Rs2),
{NextId, State, Rs0} = attain_next(Id, Rs2),
{ok, [0, NextId, State], Rs0};
3 ->
{ Atar , A , B , C } = Rs#role.luck ,
Luck = { Atar + PriceNum , A , B , C } ,
Rs1 = lib_role:add_attr(luck, PriceNum, Rs),
lib_role:notice(luck, Rs1),
{NextId, State, Rs2} = attain_next(Id, Rs1),
{ok, [0, NextId, State], Rs2};
Tid ->
case mod_item:add_item(Rs, Tid, PriceNum) of
{ok, Rs1, PA, EA} ->
mod_item:send_notice(Rs1#role.pid_sender, PA, EA),
{NextId, State, Rs2} = attain_next(Id, Rs1),
{ok, [0, NextId, State], Rs2#role{save = [role, items]}};
{error, full} ->
{ok, [3, 0, 0]};
{error, _} ->
{ok, [128, 0, 0]}
end
end
end;
handle(24006, [], Rs) ->
IdList = data_attain:get(ids),
case Rs#role.attain of
[] ->
F1 = fun(Id) ->
Team = data_attain:get(Id),
Type = util:get_val(type, Team),
case util:get_val(start, Team, 0) of
1 ->
case util:get_val(next, Team, 0) of
0 -> {Id, 0, Type, 0, 0};
NextId ->
{Id, NextId, Type, 0, 0}
end;
0 -> 0
end
end,
AttainList1 = [F1(Id) || Id <- IdList, F1(Id) > 0],
?DEBUG("AttainList1:~w", [AttainList1]),
L2 = mod_attain:attain_today() ++ AttainList1,
?DEBUG("Attain_today:~w", [L2]),
L = [{Id, State} || {Id, _, _, _, State} <- L2],
?DEBUG("L:~w", [L]),
Rs1 = Rs#role{attain = L2},
{ok, [L], Rs1};
Attain ->
?DEBUG("Attain:~w", [Attain]),
F2 = fun(Id, NextId, Type, Condition, State) ->
case State =:= 2 of
true ->
Team = data_attain:get(Id),
S = util:get_val(condition, Team),
case util:get_val(next, Team, 0) of
0 -> {Id, 0, Type, Condition, 2};
Nid ->
case Condition >= S of
true ->
{Id, Nid, Type, Condition, 1};
false ->
{Id, Nid, Type, Condition, 0}
end
end;
false ->
{Id, NextId, Type, Condition, State}
end
end,
T = [F2(A,B,C,D,E) || {A,B,C,D,E} <- Attain],
Rs1 = Rs#role{attain = T},
L = [{Id, State} || {Id, _, _, _, State} <- T, State < 2],
{ok, [L], Rs1}
end;
handle(_Cmd, _Data, _RoleState) ->
{error, bad_request}.
下一个成就
attain_next(Id, Rs) ->
Data = data_attain:get(Id),
MyAttain = Rs#role.attain,
Lists1 = lists:keydelete(Id, 1, MyAttain),
case util:get_val(next, Data, 0) of
0 ->
{A, B, C, D, _E} = lists:keyfind(Id, 1, MyAttain),
Lists2 = [{A,B,C,D,2} | Lists1],
Rs1 = Rs#role{attain = Lists2},
{0, 0, Rs1};
NextId ->
Data2 = data_attain:get(NextId),
{_, _, Type, Condition, _} = lists:keyfind(Id, 1, MyAttain),
S = util:get_val(condition, Data2),
case Condition >= S of
true ->
Lists2 = [{NextId, 0, Type, Condition, 0} | Lists1],
Rs1 = Rs#role{attain = Lists2},
{NextId, 1, Rs1};
false ->
case util:get_val(next, Data2, 0) of
0 ->
Lists2 = [{NextId, 0, Type, Condition, 0} | Lists1],
Rs1 = Rs#role{attain = Lists2},
{NextId, 0, Rs1};
NextId2 ->
Lists2 = [{NextId, NextId2, Type, Condition, 0} | Lists1],
Rs1 = Rs#role{attain = Lists2},
{NextId, 0, Rs1}
end
end
end.
case lists : keyfind(Type , 3 , MyAttain ) of
= data_attain : get(Id ) ,
S1 = util : get_val(condition , ) ,
Lists1 = { Id , NextId , Type , Condition + Num , State } ,
Lists2 = lists : keyreplace(Id , 1 , MyAttain , Lists1 ) ,
Lists1 = { Id , NextId , Type , Condition + Num , 1 } ,
Lists2 = lists : keyreplace(Id , 1 , MyAttain , Lists1 ) ,
sender : pack_send(Rs#role.pid_sender , 24001 , [ I d ] ) ,
Lists2 = lists : keyreplace(Id , 1 , MyAttain , Lists1 ) ,
|
b3bf0c543cef81421fa701d5a4db10cec37ea32b19c96cfcd48384261617a92e | Z572/guile-wlroots | drm-format-set.scm | (define-module (wlroots render drm-format-set)
#:use-module (oop goops)
#:duplicates (merge-accessors merge-generics replace warn-override-core warn last)
#:use-module (wlroots types)
#:use-module (wayland util)
#:use-module (bytestructures guile)
#:use-module ((system foreign) #:prefix ffi:)
#:use-module (wlroots utils)
#:use-module (wlroots util box)
#:export (wlr-drm-format-set-finish
wlr-drm-format-set-get
wlr-drm-format-set-has
wlr-drm-format-set-add
wlr-drm-format-set-intersect
.len .capacity .formats .format .modifiers))
(define-wlr-types-class wlr-drm-format ()
(format #:accessor .format)
(len #:accessor .len)
(capacity #:accessor .capacity)
(modifiers #:accessor .modifiers)
#:descriptor %wlr-drm-format-struct)
(define-wlr-types-class wlr-drm-format-set ()
(len #:accessor .len)
(capacity #:accessor .capacity)
(formats #:accessor .formats)
#:descriptor %wlr-drm-format-set-struct)
(define-wlr-procedure (wlr-drm-format-set-finish set)
(ffi:void "wlr_drm_format_set_finish" (list '*))
(% (unwrap-wlr-drm-format-set set)))
(define-wlr-procedure (wlr-drm-format-set-get set format)
('* "wlr_drm_format_set_get" (list '* ffi:uint32))
(wrap-wlr-drm-format (% (unwrap-wlr-drm-format-set set) format)))
(define-wlr-procedure (wlr-drm-format-set-has set format modifier)
(ffi:int8 "wlr_drm_format_set_has" (list '* ffi:uint32 ffi:uint64))
(not (zero? (% (unwrap-wlr-drm-format-set set) format modifier))))
(define-wlr-procedure (wlr-drm-format-set-add set format modifier)
(ffi:int8 "wlr_drm_format_set_add" (list '* ffi:uint32 ffi:uint64))
(not (zero? (% (unwrap-wlr-drm-format-set set) format modifier))))
(define-wlr-procedure (wlr-drm-format-set-intersect dst a b)
(ffi:int8 "wlr_drm_format_set_intersect" (list '* '* '*))
(not (zero?
(% (unwrap-wlr-drm-format-set dst)
(unwrap-wlr-drm-format-set a)
(unwrap-wlr-drm-format-set b)))))
| null | https://raw.githubusercontent.com/Z572/guile-wlroots/dc6cd05d5c46f811d75cbc30d1464820b19b1de8/wlroots/render/drm-format-set.scm | scheme | (define-module (wlroots render drm-format-set)
#:use-module (oop goops)
#:duplicates (merge-accessors merge-generics replace warn-override-core warn last)
#:use-module (wlroots types)
#:use-module (wayland util)
#:use-module (bytestructures guile)
#:use-module ((system foreign) #:prefix ffi:)
#:use-module (wlroots utils)
#:use-module (wlroots util box)
#:export (wlr-drm-format-set-finish
wlr-drm-format-set-get
wlr-drm-format-set-has
wlr-drm-format-set-add
wlr-drm-format-set-intersect
.len .capacity .formats .format .modifiers))
(define-wlr-types-class wlr-drm-format ()
(format #:accessor .format)
(len #:accessor .len)
(capacity #:accessor .capacity)
(modifiers #:accessor .modifiers)
#:descriptor %wlr-drm-format-struct)
(define-wlr-types-class wlr-drm-format-set ()
(len #:accessor .len)
(capacity #:accessor .capacity)
(formats #:accessor .formats)
#:descriptor %wlr-drm-format-set-struct)
(define-wlr-procedure (wlr-drm-format-set-finish set)
(ffi:void "wlr_drm_format_set_finish" (list '*))
(% (unwrap-wlr-drm-format-set set)))
(define-wlr-procedure (wlr-drm-format-set-get set format)
('* "wlr_drm_format_set_get" (list '* ffi:uint32))
(wrap-wlr-drm-format (% (unwrap-wlr-drm-format-set set) format)))
(define-wlr-procedure (wlr-drm-format-set-has set format modifier)
(ffi:int8 "wlr_drm_format_set_has" (list '* ffi:uint32 ffi:uint64))
(not (zero? (% (unwrap-wlr-drm-format-set set) format modifier))))
(define-wlr-procedure (wlr-drm-format-set-add set format modifier)
(ffi:int8 "wlr_drm_format_set_add" (list '* ffi:uint32 ffi:uint64))
(not (zero? (% (unwrap-wlr-drm-format-set set) format modifier))))
(define-wlr-procedure (wlr-drm-format-set-intersect dst a b)
(ffi:int8 "wlr_drm_format_set_intersect" (list '* '* '*))
(not (zero?
(% (unwrap-wlr-drm-format-set dst)
(unwrap-wlr-drm-format-set a)
(unwrap-wlr-drm-format-set b)))))
| |
c33246e52b367c36f1afbca959828d794d99344c0b8a9a1e5a15d08f53f9cddf | abcdw/rde | admin.scm | ;;; rde --- Reproducible development environment.
;;;
Copyright © 2022 < >
;;;
;;; This file is part of rde.
;;;
;;; rde is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; rde is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with rde. If not, see </>.
(define-module (rde system services admin)
#:use-module (rde serializers utils)
#:use-module (gnu services)
#:use-module (gnu services configuration)
#:use-module (gnu packages admin)
#:use-module (gnu system shadow)
#:use-module (guix packages)
#:use-module (guix gexp)
#:use-module (guix modules)
#:use-module (srfi srfi-1)
#:use-module (ice-9 match)
#:export (sudoers-service-type))
;;;
Sudoers .
;;;
(define default-sudoers-content
"\
root ALL=(ALL) ALL
%wheel ALL=(ALL) ALL\n")
(define (validated-sudoers-file file)
"Return a copy of FILE, a sudoers file, after checking that it is
syntactically correct."
(computed-file "sudoers"
(with-imported-modules '((guix build utils))
#~(begin
(use-modules (guix build utils))
(invoke #+(file-append sudo "/sbin/visudo")
"--check" "--file" #$file)
(copy-file #$file #$output)))))
(define (sudoers-file config)
`(("sudoers"
,(validated-sudoers-file
(mixed-text-file
"sudoers"
(serialize-gexp-text-config #f config))))))
(define sudoers-service-type
(service-type (name 'sudoers)
(extensions
(list (service-extension etc-service-type sudoers-file)))
(default-value (list default-sudoers-content))
(compose concatenate)
(extend append)
(description
"Manage the content of @file{/etc/sudoers}.")))
| null | https://raw.githubusercontent.com/abcdw/rde/faa6bbfda680c444a1ba6d07782b7665ef1ebc00/src/rde/system/services/admin.scm | scheme | rde --- Reproducible development environment.
This file is part of rde.
rde is free software; you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
rde is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with rde. If not, see </>.
| Copyright © 2022 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
(define-module (rde system services admin)
#:use-module (rde serializers utils)
#:use-module (gnu services)
#:use-module (gnu services configuration)
#:use-module (gnu packages admin)
#:use-module (gnu system shadow)
#:use-module (guix packages)
#:use-module (guix gexp)
#:use-module (guix modules)
#:use-module (srfi srfi-1)
#:use-module (ice-9 match)
#:export (sudoers-service-type))
Sudoers .
(define default-sudoers-content
"\
root ALL=(ALL) ALL
%wheel ALL=(ALL) ALL\n")
(define (validated-sudoers-file file)
"Return a copy of FILE, a sudoers file, after checking that it is
syntactically correct."
(computed-file "sudoers"
(with-imported-modules '((guix build utils))
#~(begin
(use-modules (guix build utils))
(invoke #+(file-append sudo "/sbin/visudo")
"--check" "--file" #$file)
(copy-file #$file #$output)))))
(define (sudoers-file config)
`(("sudoers"
,(validated-sudoers-file
(mixed-text-file
"sudoers"
(serialize-gexp-text-config #f config))))))
(define sudoers-service-type
(service-type (name 'sudoers)
(extensions
(list (service-extension etc-service-type sudoers-file)))
(default-value (list default-sudoers-content))
(compose concatenate)
(extend append)
(description
"Manage the content of @file{/etc/sudoers}.")))
|
4835c5a31abeae87ed9368e2e36ae6f34b3baa6fad3dea9680a627aafef84fdd | green-coder/girouette | comprehensive.cljc | (ns acme.frontend.comprehensive)
;; ---------------------------------------------------
;; Use {:retrieve :comprehensive} to collect those
;; (it's the default retrieval method)
;; ---------------------------------------------------
(defn compact-example []
[:h1.flex
[:div.flex-1 "hello"]
[:div.flex-2 "the"]
[:div.flex-3 "world"]])
| null | https://raw.githubusercontent.com/green-coder/girouette/6b1a23aafdd73beeabd81a884ee31c4bfbc1de99/example/reagent-demo/src/acme/frontend/comprehensive.cljc | clojure | ---------------------------------------------------
Use {:retrieve :comprehensive} to collect those
(it's the default retrieval method)
--------------------------------------------------- | (ns acme.frontend.comprehensive)
(defn compact-example []
[:h1.flex
[:div.flex-1 "hello"]
[:div.flex-2 "the"]
[:div.flex-3 "world"]])
|
d0cccc919010eb77d6a8a22e0ccfaad53867af0d0d5ece13b087ee6e89ed8009 | replikativ/konserve | common.clj | (ns benchmark.common
(:require [konserve.core :as k]
[konserve.filestore :refer [new-fs-store delete-store]]
[konserve.memory :refer [new-mem-store]]
[clojure.core.async :refer [<!!] :as async]))
;; Store
(def fs-store-path "/tmp/konserve-fs-bench")
(defmulti get-store
(fn [type] type))
(defmethod get-store :file [_]
(delete-store fs-store-path)
(<!! (new-fs-store fs-store-path)))
(defmethod get-store :memory [_]
(<!! (new-mem-store)))
(defn setup-store [type n]
(let [population (range n)
store (get-store type)]
(run! #(<!! (k/assoc store % nil))
population)
store))
; time measurements
(defmacro timed
"Evaluates expr. Returns the value of expr and the time in a map."
[expr]
`(let [start# (. System (nanoTime))
ret# ~expr]
(/ (double (- (. System (nanoTime)) start#)) 1000000.0)))
(defn statistics [times]
(let [vtimes (vec times)
n (count vtimes)
mean (/ (apply + times) n)]
{:mean mean
:median (nth (sort times) (int (/ n 2)))
:sd (->> times
(map #(* (- % mean) (- % mean)))
(apply +)
(* (/ 1.0 n))
Math/sqrt)
:observations vtimes}))
;; other
(defn transpose [vec-of-vecs]
(apply map vector vec-of-vecs))
(defmulti benchmark
(fn [function _stores _store-sizes _iterations] function))
(defmulti plots
(fn [function _data] function))
| null | https://raw.githubusercontent.com/replikativ/konserve/da09c0fcc066d0e13d6eded673a83382bb9f9e33/benchmark/src/benchmark/common.clj | clojure | Store
time measurements
other | (ns benchmark.common
(:require [konserve.core :as k]
[konserve.filestore :refer [new-fs-store delete-store]]
[konserve.memory :refer [new-mem-store]]
[clojure.core.async :refer [<!!] :as async]))
(def fs-store-path "/tmp/konserve-fs-bench")
(defmulti get-store
(fn [type] type))
(defmethod get-store :file [_]
(delete-store fs-store-path)
(<!! (new-fs-store fs-store-path)))
(defmethod get-store :memory [_]
(<!! (new-mem-store)))
(defn setup-store [type n]
(let [population (range n)
store (get-store type)]
(run! #(<!! (k/assoc store % nil))
population)
store))
(defmacro timed
"Evaluates expr. Returns the value of expr and the time in a map."
[expr]
`(let [start# (. System (nanoTime))
ret# ~expr]
(/ (double (- (. System (nanoTime)) start#)) 1000000.0)))
(defn statistics [times]
(let [vtimes (vec times)
n (count vtimes)
mean (/ (apply + times) n)]
{:mean mean
:median (nth (sort times) (int (/ n 2)))
:sd (->> times
(map #(* (- % mean) (- % mean)))
(apply +)
(* (/ 1.0 n))
Math/sqrt)
:observations vtimes}))
(defn transpose [vec-of-vecs]
(apply map vector vec-of-vecs))
(defmulti benchmark
(fn [function _stores _store-sizes _iterations] function))
(defmulti plots
(fn [function _data] function))
|
fdc9c2c47cc1672d70489d9e1130ef971a03046904bd876c85c3e52f499438ee | tengstrand/lein-polylith | time_test.clj | (ns leiningen.polylith.cmd.time-test
(:require [clojure.test :refer :all]
[leiningen.polylith.time :as time]))
(deftest milliseconds->minutes-and-seconds--less-than-a-minute--returns-seconds
(is (= "7.3 seconds"
(time/milliseconds->minutes-and-seconds 7345))))
(deftest milliseconds->minutes-and-seconds--more-then-a-minute--returns-minutes-and-seconds
(is (= "2 minutes 7.3 seconds"
(time/milliseconds->minutes-and-seconds 127345))))
| null | https://raw.githubusercontent.com/tengstrand/lein-polylith/27bf508a7b4806e4d2dfac01787e99edf2c1c306/test/leiningen/polylith/cmd/time_test.clj | clojure | (ns leiningen.polylith.cmd.time-test
(:require [clojure.test :refer :all]
[leiningen.polylith.time :as time]))
(deftest milliseconds->minutes-and-seconds--less-than-a-minute--returns-seconds
(is (= "7.3 seconds"
(time/milliseconds->minutes-and-seconds 7345))))
(deftest milliseconds->minutes-and-seconds--more-then-a-minute--returns-minutes-and-seconds
(is (= "2 minutes 7.3 seconds"
(time/milliseconds->minutes-and-seconds 127345))))
| |
bea06b5cc6fa5c4378777e5af5bd3618fb46bc723eb75fcd217d7958ef2439b5 | haskell/vector | append.hs | import qualified Data.Vector as U
import Data.Bits
main = print . U.sum . U.map (`shiftL` 2) $
(U.++) (U.replicate 10000000 (1::Int))
(U.replicate 10000000 (7::Int))
| null | https://raw.githubusercontent.com/haskell/vector/4c87e88f07aad166c6ae2ccb94fa539fbdd99a91/old-testsuite/microsuite/append.hs | haskell | import qualified Data.Vector as U
import Data.Bits
main = print . U.sum . U.map (`shiftL` 2) $
(U.++) (U.replicate 10000000 (1::Int))
(U.replicate 10000000 (7::Int))
| |
f051f00014c9b936ddf3a8e4955852b9a33cef146141218a9f47361a570d8c59 | juxt/roll | generate_config_sample.cljs | (ns roll.generate-config-sample
(:require [roll.samples]
[cljs.nodejs :as nodejs]))
(def fs (nodejs/require "fs"))
(defn- spit [f data]
(fs.writeFileSync f data))
(defn -main [& argv]
(spit "sample-config.edn" (roll.samples/generate-roll-config)))
| null | https://raw.githubusercontent.com/juxt/roll/1ef07d72f05b5604eec4f7d6a5dbf0d21ec3c8b3/test/roll/generate_config_sample.cljs | clojure | (ns roll.generate-config-sample
(:require [roll.samples]
[cljs.nodejs :as nodejs]))
(def fs (nodejs/require "fs"))
(defn- spit [f data]
(fs.writeFileSync f data))
(defn -main [& argv]
(spit "sample-config.edn" (roll.samples/generate-roll-config)))
| |
fae5a4833f39f7ae10548c8383593dd1b8f0751d7cc6d021876755c9673516e0 | emaphis/HtDP2e-solutions | ex073.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex073) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
Ex . 73 :
Design the function posn - up - x , which consumes a Posn p and a Number n.
;; It produces a Posn like p with n in the x field.
;; Note Functions such as posn-up-x are often called updaters or functional
;; setters. They are extremely useful when you write large programs.
; Posn Number -> Posn
; produces a Posn given a Posn and a Number with the x field replaced by n
(check-expect (posn-up-x (make-posn 10 20) 30)
(make-posn 30 20))
; (define (posn-up-x p n) (make-posn 0 0)) ; stub
(define (posn-up-x p n)
(make-posn n (posn-y p)))
| null | https://raw.githubusercontent.com/emaphis/HtDP2e-solutions/ecb60b9a7bbf9b8999c0122b6ea152a3301f0a68/1-Fixed-Size-Data/05-Adding-Structure/ex073.rkt | racket | about the language level of this file in a form that our tools can easily process.
It produces a Posn like p with n in the x field.
Note Functions such as posn-up-x are often called updaters or functional
setters. They are extremely useful when you write large programs.
Posn Number -> Posn
produces a Posn given a Posn and a Number with the x field replaced by n
(define (posn-up-x p n) (make-posn 0 0)) ; stub | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex073) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
Ex . 73 :
Design the function posn - up - x , which consumes a Posn p and a Number n.
(check-expect (posn-up-x (make-posn 10 20) 30)
(make-posn 30 20))
(define (posn-up-x p n)
(make-posn n (posn-y p)))
|
41102cb7e7a2ad908ccfe82d8b9e1b6d3498259b74fc7bc67790f5831dc4c00f | jrm-code-project/LISP-Machine | supdup-server.lisp | -*- Mode : LISP ; Package : SUPDUP ; Base:8 ; : ZL -*-
;;; This is a flavor definition generated by the window maker.
(DEFFLAVOR supdup-server-debug-frame
()
(TV:CONSTRAINT-FRAME)
(:DEFAULT-INIT-PLIST
:PANES '((ZMACS ZWEI:ZMACS-FRAME
:SAVE-BITS T)
(SUPDUP SUPDUP
:SAVE-BITS T)
(SERVER TV:WINDOW
:SAVE-BITS T))
:CONSTRAINTS '((SERVER-DEBUG (WHOLE)
((WHOLE :HORIZONTAL
(:EVEN)
(ZMACS right-side)
((ZMACS 0.75s0))
((right-side :VERTICAL
(:EVEN)
(SERVER SUPDUP)
((SERVER 0.33s0))
((SUPDUP :EVEN)))
))
))))
:SETTABLE-INSTANCE-VARIABLES)
(DEFMETHOD (supdup-server-debug-frame :AFTER :INIT) (&REST IGNORE)
(FUNCALL-SELF :SET-SELECTION-SUBSTITUTE (FUNCALL-SELF :GET-PANE 'ZMACS)))
(tv:add-system-key #\roman-iv 'supdup-server-debug-frame "Supdup Server")
(delete-initialization "SUPDUP" nil 'chaos:server-alist)
(add-initialization "SUPDUP"
'(process-run-function "SUPDUP Server" 'supdup-server)
NIL
'chaos:server-alist)
(defflavor serial-terminal-io
(input-io-buffer
output-io-buffer)
(si:bidirectional-stream)
:settable-instance-variables)
(defmethod (serial-terminal-io :after :init) (ignore)
(setq input-io-buffer (tv:make-io-buffer 1024.))
(setq output-io-buffer (tv:make-io-buffer 1024.))
)
(defmethod (serial-terminal-io :tyi) (&optional no-hang-p)
(tv:io-buffer-get input-io-buffer no-hang-p))
(defmethod (serial-terminal-io :untyi) (char)
(tv:io-buffer-unget input-io-buffer char))
(defmethod (serial-terminal-io :tyo) (char)
(tv:io-buffer-put output-io-buffer char))
(defvar supdup-server-lisp nil)
(defun supdup-server ()
(let ((conn (chaos:listen "SUPDUP"))
window)
(do ((w (send tv:main-screen :inferiors) (cdr w)))
((null w)
(chaos:reject conn "No debug window")
(return-from supdup-server nil))
(when (and (typep (car w) 'supdup-server-debug-frame)
(send (send (car w) :get-pane 'server) :exposed-p))
(setq window (send (car w) :get-pane 'server))
(return)))
(send window :clear-screen)
(chaos:accept conn)
(let ((net-stream (chaos:make-stream conn))
(term-stream (make-instance 'serial-terminal-io))
child error-instance)
(setq child (make-process "SUPDUP Server Input"))
(send child :preset 'supdup-server-input term-stream net-stream window)
(process-enable child)
(setq supdup-server-lisp (make-process "SUPDUP Server Lisp"))
(send supdup-server-lisp :preset 'supdup-server-lisp term-stream)
(process-enable supdup-server-lisp)
(unwind-protect
(condition-case (instance)
(supdup-server-output term-stream net-stream window)
(error (setq error-instance instance)))
(send net-stream :eof)
(chaos:close-conn conn (if error-instance (send error-instance :report nil) ""))
(without-interrupts
(if (send child :active-p)
(send child :kill))
(when (send supdup-server-lisp :active-p)
(send supdup-server-lisp :kill)
(setq supdup-server-lisp nil)))
))))
(defun supdup-server-lisp (stream)
(let ((*terminal-io* stream)
(*read-base* 8)
(*print-base* 8)
(*readtable* (copy-readtable si:standard-readtable))
(*package* (find-package "USER")))
(si:lisp-top-level1 stream)))
(defun supdup-server-output (term-stream net-stream *terminal-io*)
(let ((*read-base* 8)
(*print-base* 8)
(*readtable* (copy-readtable si:standard-readtable)))
(do ((char (tv:io-buffer-get (send term-stream :output-io-buffer))
(tv:io-buffer-get (send term-stream :output-io-buffer))))
(())
(send net-stream :tyo char)
(send net-stream :force-output))))
(defun supdup-server-input (term-stream net-stream *terminal-io*)
(let ((*read-base* 8)
(*print-base* 8)
(*readtable* (copy-readtable si:standard-readtable)))
(let ((nwords (dpb (get-18-bits net-stream) (byte 18. 0) -1)))
(if (or (< nwords -20)
(> nwords 0))
(ferror nil "bad number of words"))
(get-18-bits net-stream)
(when (not (zerop nwords))
(get-18-bits net-stream)
(if (not (= (get-18-bits net-stream) 7))
(ferror nil "not TCTYP 7"))
(incf nwords))
(when (not (zerop nwords))
;;lots of flags
(get-18-bits net-stream)
(get-18-bits net-stream)
(incf nwords))
(when (not (zerop nwords))
(get-18-bits net-stream)
;;height in lines
(get-18-bits net-stream)
(incf nwords))
(when (not (zerop nwords))
(get-18-bits net-stream)
;;width in chars
(get-18-bits net-stream)
(incf nwords))
(when (not (zerop nwords))
;;scroll glitch
(get-18-bits net-stream)
(get-18-bits net-stream)
(incf nwords))
(when (not (zerop nwords))
TTYSMT
(get-18-bits net-stream)
(get-18-bits net-stream)
(incf nwords))
(do (())
((zerop nwords))
(get-18-bits net-stream)
(get-18-bits net-stream)
(incf nwords)))
(do ((char (send net-stream :tyi)
(send net-stream :tyi)))
(())
(tv:io-buffer-put (send term-stream :input-io-buffer) char)
)))
(defun get-18-bits (stream)
(let* ((b2 (send stream :tyi))
(b1 (send stream :tyi))
(b0 (send stream :tyi)))
(dpb b2
(byte 6 12.)
(dpb b1
(byte 6 6)
b0))))
| null | https://raw.githubusercontent.com/jrm-code-project/LISP-Machine/0a448d27f40761fafabe5775ffc550637be537b2/lambda/pace/hacks/supdup-server.lisp | lisp | Package : SUPDUP ; Base:8 ; : ZL -*-
This is a flavor definition generated by the window maker.
lots of flags
height in lines
width in chars
scroll glitch |
(DEFFLAVOR supdup-server-debug-frame
()
(TV:CONSTRAINT-FRAME)
(:DEFAULT-INIT-PLIST
:PANES '((ZMACS ZWEI:ZMACS-FRAME
:SAVE-BITS T)
(SUPDUP SUPDUP
:SAVE-BITS T)
(SERVER TV:WINDOW
:SAVE-BITS T))
:CONSTRAINTS '((SERVER-DEBUG (WHOLE)
((WHOLE :HORIZONTAL
(:EVEN)
(ZMACS right-side)
((ZMACS 0.75s0))
((right-side :VERTICAL
(:EVEN)
(SERVER SUPDUP)
((SERVER 0.33s0))
((SUPDUP :EVEN)))
))
))))
:SETTABLE-INSTANCE-VARIABLES)
(DEFMETHOD (supdup-server-debug-frame :AFTER :INIT) (&REST IGNORE)
(FUNCALL-SELF :SET-SELECTION-SUBSTITUTE (FUNCALL-SELF :GET-PANE 'ZMACS)))
(tv:add-system-key #\roman-iv 'supdup-server-debug-frame "Supdup Server")
(delete-initialization "SUPDUP" nil 'chaos:server-alist)
(add-initialization "SUPDUP"
'(process-run-function "SUPDUP Server" 'supdup-server)
NIL
'chaos:server-alist)
(defflavor serial-terminal-io
(input-io-buffer
output-io-buffer)
(si:bidirectional-stream)
:settable-instance-variables)
(defmethod (serial-terminal-io :after :init) (ignore)
(setq input-io-buffer (tv:make-io-buffer 1024.))
(setq output-io-buffer (tv:make-io-buffer 1024.))
)
(defmethod (serial-terminal-io :tyi) (&optional no-hang-p)
(tv:io-buffer-get input-io-buffer no-hang-p))
(defmethod (serial-terminal-io :untyi) (char)
(tv:io-buffer-unget input-io-buffer char))
(defmethod (serial-terminal-io :tyo) (char)
(tv:io-buffer-put output-io-buffer char))
(defvar supdup-server-lisp nil)
(defun supdup-server ()
(let ((conn (chaos:listen "SUPDUP"))
window)
(do ((w (send tv:main-screen :inferiors) (cdr w)))
((null w)
(chaos:reject conn "No debug window")
(return-from supdup-server nil))
(when (and (typep (car w) 'supdup-server-debug-frame)
(send (send (car w) :get-pane 'server) :exposed-p))
(setq window (send (car w) :get-pane 'server))
(return)))
(send window :clear-screen)
(chaos:accept conn)
(let ((net-stream (chaos:make-stream conn))
(term-stream (make-instance 'serial-terminal-io))
child error-instance)
(setq child (make-process "SUPDUP Server Input"))
(send child :preset 'supdup-server-input term-stream net-stream window)
(process-enable child)
(setq supdup-server-lisp (make-process "SUPDUP Server Lisp"))
(send supdup-server-lisp :preset 'supdup-server-lisp term-stream)
(process-enable supdup-server-lisp)
(unwind-protect
(condition-case (instance)
(supdup-server-output term-stream net-stream window)
(error (setq error-instance instance)))
(send net-stream :eof)
(chaos:close-conn conn (if error-instance (send error-instance :report nil) ""))
(without-interrupts
(if (send child :active-p)
(send child :kill))
(when (send supdup-server-lisp :active-p)
(send supdup-server-lisp :kill)
(setq supdup-server-lisp nil)))
))))
(defun supdup-server-lisp (stream)
(let ((*terminal-io* stream)
(*read-base* 8)
(*print-base* 8)
(*readtable* (copy-readtable si:standard-readtable))
(*package* (find-package "USER")))
(si:lisp-top-level1 stream)))
(defun supdup-server-output (term-stream net-stream *terminal-io*)
(let ((*read-base* 8)
(*print-base* 8)
(*readtable* (copy-readtable si:standard-readtable)))
(do ((char (tv:io-buffer-get (send term-stream :output-io-buffer))
(tv:io-buffer-get (send term-stream :output-io-buffer))))
(())
(send net-stream :tyo char)
(send net-stream :force-output))))
(defun supdup-server-input (term-stream net-stream *terminal-io*)
(let ((*read-base* 8)
(*print-base* 8)
(*readtable* (copy-readtable si:standard-readtable)))
(let ((nwords (dpb (get-18-bits net-stream) (byte 18. 0) -1)))
(if (or (< nwords -20)
(> nwords 0))
(ferror nil "bad number of words"))
(get-18-bits net-stream)
(when (not (zerop nwords))
(get-18-bits net-stream)
(if (not (= (get-18-bits net-stream) 7))
(ferror nil "not TCTYP 7"))
(incf nwords))
(when (not (zerop nwords))
(get-18-bits net-stream)
(get-18-bits net-stream)
(incf nwords))
(when (not (zerop nwords))
(get-18-bits net-stream)
(get-18-bits net-stream)
(incf nwords))
(when (not (zerop nwords))
(get-18-bits net-stream)
(get-18-bits net-stream)
(incf nwords))
(when (not (zerop nwords))
(get-18-bits net-stream)
(get-18-bits net-stream)
(incf nwords))
(when (not (zerop nwords))
TTYSMT
(get-18-bits net-stream)
(get-18-bits net-stream)
(incf nwords))
(do (())
((zerop nwords))
(get-18-bits net-stream)
(get-18-bits net-stream)
(incf nwords)))
(do ((char (send net-stream :tyi)
(send net-stream :tyi)))
(())
(tv:io-buffer-put (send term-stream :input-io-buffer) char)
)))
(defun get-18-bits (stream)
(let* ((b2 (send stream :tyi))
(b1 (send stream :tyi))
(b0 (send stream :tyi)))
(dpb b2
(byte 6 12.)
(dpb b1
(byte 6 6)
b0))))
|
b8ba53b3e42c63900d471428a56f4be9e1dc7e9f4ba9796e91800178835285f5 | samplecount/shake-language-c | BuildFlags.hs | Copyright 2012 - 2014 Samplecount S.L.
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- -2.0
--
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
# LANGUAGE TemplateHaskell #
|
Description : Build flags record for building @C@ language projects
The ` BuildFlags ` record is an abstraction for various toolchain flags for
building executables and libraries from source files in a @C@-based language .
It 's intended to be toolchain - independent , but currently there 's a
bias towards binutils\/gcc / clang toolchains .
Description: Build flags record for building @C@ language projects
The `BuildFlags` record is an abstraction for various toolchain flags for
building executables and libraries from source files in a @C@-based language.
It's intended to be toolchain-independent, but currently there's a
bias towards binutils\/gcc/clang toolchains.
-}
module Development.Shake.Language.C.BuildFlags (
-- * Source Language
Language(..)
-- * Build flags
, BuildFlags
Poor man 's documentation for TH generated functions .
, systemIncludes -- | System include directories, referenced by @#include \<...\>@ in code and usually passed to the compiler with the @-isystem@ flag.
| User include directories , referenced by @#include " ... " @ in code and usually passed to the compiler with the @-I@ flag .
, defines -- | Preprocessor defines, a list of pairs of names with or without a value.
, preprocessorFlags -- | Other preprocessor flags.
, compilerFlags -- | Compiler flags, either generic ones or for a specific source 'Language'.
, libraryPath -- | Linker search path for libraries.
| List of libraries to link against . Note that you should use the library name without the @lib@ prefix and without extension .
, linkerFlags -- | Flags passed to the linker.
, localLibraries -- | Locally built static libraries to be linked against. See also the corresponding section in the <-language-c/blob/master/docs/Manual.md#locally-built-libraries manual>.
, archiverFlags -- | Flags passed to the object archiver.
-- ** Utilities for toolchain writers
, defineFlags
, compilerFlagsFor
-- ** Working with config files
, fromConfig
-- * Utilities
, (>>>=)
, append
, prepend
) where
import Control.Category ((>>>))
import Control.Monad
import Data.Char (isSpace)
import Data.Default.Class (Default(..))
import Data.List
import Data.List.Split
import Data.Maybe
import Data.Semigroup
import Development.Shake.Language.C.Language (Language(..))
import Development.Shake.Language.C.Label
import Development.Shake.Language.C.Util
| Record type for abstracting various toolchain command line flags .
` BuildFlags ` is an instance of ` Default ` , you can create a default record with
` def ` . ` BuildFlags ` is also an instance ` Monoid ` , you can create an empty record with
` mempty ` and append flags with ` mappend ` . ` def ` and ` mempty ` are synonyms :
> > > ( def : : BuildFlags ) = = ( : : BuildFlags )
True
Record accessors are ` Data . Label . Mono . Lens`es from the
< fclabels > package , which
makes accessing and modifying record fields a bit more convenient .
@fclabels@ was chosen over < lens >
because it has far fewer dependencies , which is convenient when installing
the Shake build system in a per - project cabal sandbox . We might switch to
@lens@ when it gets included in the platform .
There are two convenience functions for working with ` BuildFlags ` record fields
containing lists of flags , ` append ` and ` prepend ` . Since most combinators in
this library expect a function @BuildFlags - > BuildFlags@ , the following is a
common idiom :
@
buildFlags . append ` systemIncludes ` [ " path " ]
@
Note that when modifying the same record field , order of function composition
matters and you might want to use the arrow combinator ' > > > ' for appending in
source statement order :
> > > : {
get systemIncludes
$ append systemIncludes [ " path1 " ] . append systemIncludes [ " path2 " ]
$ mempty
:}
[ " path2","path1 " ]
> > > : {
get systemIncludes
$ append systemIncludes [ " path1 " ] > > > append systemIncludes [ " path2 " ]
$ mempty
:}
[ " path1","path2 " ]
See " Development . Shake . Language . C.Rules " for how to use ' BuildFlags ' in build
product rules .
`BuildFlags` is an instance of `Default`, you can create a default record with
`def`. `BuildFlags` is also an instance `Monoid`, you can create an empty record with
`mempty` and append flags with `mappend`. `def` and `mempty` are synonyms:
>>> (def :: BuildFlags) == (mempty :: BuildFlags)
True
Record accessors are `Data.Label.Mono.Lens`es from the
< fclabels> package, which
makes accessing and modifying record fields a bit more convenient.
@fclabels@ was chosen over < lens>
because it has far fewer dependencies, which is convenient when installing
the Shake build system in a per-project cabal sandbox. We might switch to
@lens@ when it gets included in the Haskell platform.
There are two convenience functions for working with `BuildFlags` record fields
containing lists of flags, `append` and `prepend`. Since most combinators in
this library expect a function @BuildFlags -> BuildFlags@, the following is a
common idiom:
@
buildFlags . append `systemIncludes` ["path"]
@
Note that when modifying the same record field, order of function composition
matters and you might want to use the arrow combinator '>>>' for appending in
source statement order:
>>> :{
get systemIncludes
$ append systemIncludes ["path1"] . append systemIncludes ["path2"]
$ mempty
:}
["path2","path1"]
>>> :{
get systemIncludes
$ append systemIncludes ["path1"] >>> append systemIncludes ["path2"]
$ mempty
:}
["path1","path2"]
See "Development.Shake.Language.C.Rules" for how to use 'BuildFlags' in build
product rules.
-}
data BuildFlags = BuildFlags {
_systemIncludes :: [FilePath]
, _userIncludes :: [FilePath]
, _defines :: [(String, Maybe String)]
, _preprocessorFlags :: [String]
, _compilerFlags :: [(Maybe Language, [String])]
, _libraryPath :: [FilePath]
, _libraries :: [String]
, _linkerFlags :: [String]
-- This is needed for linking against local libraries built by shake (the linker `needs' its inputs).
, _localLibraries :: [FilePath]
, _archiverFlags :: [String]
} deriving (Eq, Show)
mkLabel ''BuildFlags
defaultBuildFlags :: BuildFlags
defaultBuildFlags =
BuildFlags {
_systemIncludes = []
, _userIncludes = []
, _defines = []
, _preprocessorFlags = []
, _compilerFlags = []
, _libraryPath = []
, _libraries = []
, _linkerFlags = []
, _localLibraries = []
, _archiverFlags = []
}
instance Default BuildFlags where
def = defaultBuildFlags
instance Semigroup BuildFlags where
a <> b =
append systemIncludes (get systemIncludes a)
. append userIncludes (get userIncludes a)
. append defines (get defines a)
. append preprocessorFlags (get preprocessorFlags a)
. append compilerFlags (get compilerFlags a)
. append libraryPath (get libraryPath a)
. append libraries (get libraries a)
. append linkerFlags (get linkerFlags a)
. append localLibraries (get localLibraries a)
. append archiverFlags (get archiverFlags a)
$ b
instance Monoid BuildFlags where
mempty = defaultBuildFlags
mappend = (<>)
-- | Construct preprocessor flags from the 'defines' field of 'BuildFlags'.
defineFlags :: BuildFlags -> [String]
defineFlags = concatMapFlag "-D"
. map (\(a, b) -> maybe a (\b' -> a++"="++b') b)
. get defines
-- | Return a list of compiler flags for a specific source language.
compilerFlagsFor :: Maybe Language -> BuildFlags -> [String]
compilerFlagsFor lang = concat
. maybe (map snd . filter (isNothing.fst))
(mapMaybe . f) lang
. get compilerFlags
where f _ (Nothing, x) = Just x
f l (Just l', x) | l == l' = Just x
| otherwise = Nothing
-- | Construct a 'BuildFlags' modifier function from a config file.
--
-- See also "Development.Shake.Language.C.Config".
fromConfig :: (Functor m, Monad m) => (String -> m (Maybe String)) -> m (BuildFlags -> BuildFlags)
fromConfig getConfig = do
let parseConfig parser = fmap (maybe [] parser) . getConfig . ("BuildFlags."++)
config_systemIncludes <- parseConfig paths "systemIncludes"
config_userIncludes <- parseConfig paths "userIncludes"
config_defines <- parseConfig defines' "defines"
config_preprocessorFlags <- parseConfig flags "preprocessorFlags"
config_compilerFlags <- parseConfig ((:[]) . ((,)Nothing) . flags) "compilerFlags"
config_compilerFlags_c <- parseConfig ((:[]) . ((,)(Just C)) . flags) "compilerFlags.c"
config_compilerFlags_cxx <- parseConfig ((:[]) . ((,)(Just Cpp)) . flags) "compilerFlags.cxx"
config_libraryPath <- parseConfig paths "libraryPath"
config_libraries <- parseConfig flags "libraries"
config_linkerFlags <- parseConfig flags "linkerFlags"
config_localLibraries <- parseConfig paths "localLibraries"
config_archiverFlags <- parseConfig flags "archiverFlags"
return $ append systemIncludes config_systemIncludes
. append userIncludes config_userIncludes
. append defines config_defines
. append preprocessorFlags config_preprocessorFlags
. append compilerFlags (config_compilerFlags ++ config_compilerFlags_c ++ config_compilerFlags_cxx)
. append libraryPath config_libraryPath
. append libraries config_libraries
. append linkerFlags config_linkerFlags
. append localLibraries config_localLibraries
. append archiverFlags config_archiverFlags
where
flags = words' . dropWhile isSpace
paths = words' . dropWhile isSpace
define [] = error "Empty preprocessor definition"
define [k] = (k, Nothing)
define [k,v] = (k, Just v)
define (k:vs) = (k, Just (intercalate "=" vs))
defines' = map (define . splitOn "=") . flags
-- | Utility function for composing functions in a monad.
(>>>=) :: Monad m => m (a -> b) -> m (b -> c) -> m (a -> c)
(>>>=) = liftM2 (>>>)
| null | https://raw.githubusercontent.com/samplecount/shake-language-c/7eba37910bf711cc4c3fe6e7f065c8108858eea4/src/Development/Shake/Language/C/BuildFlags.hs | haskell |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
* Source Language
* Build flags
| System include directories, referenced by @#include \<...\>@ in code and usually passed to the compiler with the @-isystem@ flag.
| Preprocessor defines, a list of pairs of names with or without a value.
| Other preprocessor flags.
| Compiler flags, either generic ones or for a specific source 'Language'.
| Linker search path for libraries.
| Flags passed to the linker.
| Locally built static libraries to be linked against. See also the corresponding section in the <-language-c/blob/master/docs/Manual.md#locally-built-libraries manual>.
| Flags passed to the object archiver.
** Utilities for toolchain writers
** Working with config files
* Utilities
This is needed for linking against local libraries built by shake (the linker `needs' its inputs).
| Construct preprocessor flags from the 'defines' field of 'BuildFlags'.
| Return a list of compiler flags for a specific source language.
| Construct a 'BuildFlags' modifier function from a config file.
See also "Development.Shake.Language.C.Config".
| Utility function for composing functions in a monad. | Copyright 2012 - 2014 Samplecount S.L.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
# LANGUAGE TemplateHaskell #
|
Description : Build flags record for building @C@ language projects
The ` BuildFlags ` record is an abstraction for various toolchain flags for
building executables and libraries from source files in a @C@-based language .
It 's intended to be toolchain - independent , but currently there 's a
bias towards binutils\/gcc / clang toolchains .
Description: Build flags record for building @C@ language projects
The `BuildFlags` record is an abstraction for various toolchain flags for
building executables and libraries from source files in a @C@-based language.
It's intended to be toolchain-independent, but currently there's a
bias towards binutils\/gcc/clang toolchains.
-}
module Development.Shake.Language.C.BuildFlags (
Language(..)
, BuildFlags
Poor man 's documentation for TH generated functions .
| User include directories , referenced by @#include " ... " @ in code and usually passed to the compiler with the @-I@ flag .
| List of libraries to link against . Note that you should use the library name without the @lib@ prefix and without extension .
, defineFlags
, compilerFlagsFor
, fromConfig
, (>>>=)
, append
, prepend
) where
import Control.Category ((>>>))
import Control.Monad
import Data.Char (isSpace)
import Data.Default.Class (Default(..))
import Data.List
import Data.List.Split
import Data.Maybe
import Data.Semigroup
import Development.Shake.Language.C.Language (Language(..))
import Development.Shake.Language.C.Label
import Development.Shake.Language.C.Util
| Record type for abstracting various toolchain command line flags .
` BuildFlags ` is an instance of ` Default ` , you can create a default record with
` def ` . ` BuildFlags ` is also an instance ` Monoid ` , you can create an empty record with
` mempty ` and append flags with ` mappend ` . ` def ` and ` mempty ` are synonyms :
> > > ( def : : BuildFlags ) = = ( : : BuildFlags )
True
Record accessors are ` Data . Label . Mono . Lens`es from the
< fclabels > package , which
makes accessing and modifying record fields a bit more convenient .
@fclabels@ was chosen over < lens >
because it has far fewer dependencies , which is convenient when installing
the Shake build system in a per - project cabal sandbox . We might switch to
@lens@ when it gets included in the platform .
There are two convenience functions for working with ` BuildFlags ` record fields
containing lists of flags , ` append ` and ` prepend ` . Since most combinators in
this library expect a function @BuildFlags - > BuildFlags@ , the following is a
common idiom :
@
buildFlags . append ` systemIncludes ` [ " path " ]
@
Note that when modifying the same record field , order of function composition
matters and you might want to use the arrow combinator ' > > > ' for appending in
source statement order :
> > > : {
get systemIncludes
$ append systemIncludes [ " path1 " ] . append systemIncludes [ " path2 " ]
$ mempty
:}
[ " path2","path1 " ]
> > > : {
get systemIncludes
$ append systemIncludes [ " path1 " ] > > > append systemIncludes [ " path2 " ]
$ mempty
:}
[ " path1","path2 " ]
See " Development . Shake . Language . C.Rules " for how to use ' BuildFlags ' in build
product rules .
`BuildFlags` is an instance of `Default`, you can create a default record with
`def`. `BuildFlags` is also an instance `Monoid`, you can create an empty record with
`mempty` and append flags with `mappend`. `def` and `mempty` are synonyms:
>>> (def :: BuildFlags) == (mempty :: BuildFlags)
True
Record accessors are `Data.Label.Mono.Lens`es from the
< fclabels> package, which
makes accessing and modifying record fields a bit more convenient.
@fclabels@ was chosen over < lens>
because it has far fewer dependencies, which is convenient when installing
the Shake build system in a per-project cabal sandbox. We might switch to
@lens@ when it gets included in the Haskell platform.
There are two convenience functions for working with `BuildFlags` record fields
containing lists of flags, `append` and `prepend`. Since most combinators in
this library expect a function @BuildFlags -> BuildFlags@, the following is a
common idiom:
@
buildFlags . append `systemIncludes` ["path"]
@
Note that when modifying the same record field, order of function composition
matters and you might want to use the arrow combinator '>>>' for appending in
source statement order:
>>> :{
get systemIncludes
$ append systemIncludes ["path1"] . append systemIncludes ["path2"]
$ mempty
:}
["path2","path1"]
>>> :{
get systemIncludes
$ append systemIncludes ["path1"] >>> append systemIncludes ["path2"]
$ mempty
:}
["path1","path2"]
See "Development.Shake.Language.C.Rules" for how to use 'BuildFlags' in build
product rules.
-}
data BuildFlags = BuildFlags {
_systemIncludes :: [FilePath]
, _userIncludes :: [FilePath]
, _defines :: [(String, Maybe String)]
, _preprocessorFlags :: [String]
, _compilerFlags :: [(Maybe Language, [String])]
, _libraryPath :: [FilePath]
, _libraries :: [String]
, _linkerFlags :: [String]
, _localLibraries :: [FilePath]
, _archiverFlags :: [String]
} deriving (Eq, Show)
mkLabel ''BuildFlags
defaultBuildFlags :: BuildFlags
defaultBuildFlags =
BuildFlags {
_systemIncludes = []
, _userIncludes = []
, _defines = []
, _preprocessorFlags = []
, _compilerFlags = []
, _libraryPath = []
, _libraries = []
, _linkerFlags = []
, _localLibraries = []
, _archiverFlags = []
}
instance Default BuildFlags where
def = defaultBuildFlags
instance Semigroup BuildFlags where
a <> b =
append systemIncludes (get systemIncludes a)
. append userIncludes (get userIncludes a)
. append defines (get defines a)
. append preprocessorFlags (get preprocessorFlags a)
. append compilerFlags (get compilerFlags a)
. append libraryPath (get libraryPath a)
. append libraries (get libraries a)
. append linkerFlags (get linkerFlags a)
. append localLibraries (get localLibraries a)
. append archiverFlags (get archiverFlags a)
$ b
instance Monoid BuildFlags where
mempty = defaultBuildFlags
mappend = (<>)
defineFlags :: BuildFlags -> [String]
defineFlags = concatMapFlag "-D"
. map (\(a, b) -> maybe a (\b' -> a++"="++b') b)
. get defines
compilerFlagsFor :: Maybe Language -> BuildFlags -> [String]
compilerFlagsFor lang = concat
. maybe (map snd . filter (isNothing.fst))
(mapMaybe . f) lang
. get compilerFlags
where f _ (Nothing, x) = Just x
f l (Just l', x) | l == l' = Just x
| otherwise = Nothing
fromConfig :: (Functor m, Monad m) => (String -> m (Maybe String)) -> m (BuildFlags -> BuildFlags)
fromConfig getConfig = do
let parseConfig parser = fmap (maybe [] parser) . getConfig . ("BuildFlags."++)
config_systemIncludes <- parseConfig paths "systemIncludes"
config_userIncludes <- parseConfig paths "userIncludes"
config_defines <- parseConfig defines' "defines"
config_preprocessorFlags <- parseConfig flags "preprocessorFlags"
config_compilerFlags <- parseConfig ((:[]) . ((,)Nothing) . flags) "compilerFlags"
config_compilerFlags_c <- parseConfig ((:[]) . ((,)(Just C)) . flags) "compilerFlags.c"
config_compilerFlags_cxx <- parseConfig ((:[]) . ((,)(Just Cpp)) . flags) "compilerFlags.cxx"
config_libraryPath <- parseConfig paths "libraryPath"
config_libraries <- parseConfig flags "libraries"
config_linkerFlags <- parseConfig flags "linkerFlags"
config_localLibraries <- parseConfig paths "localLibraries"
config_archiverFlags <- parseConfig flags "archiverFlags"
return $ append systemIncludes config_systemIncludes
. append userIncludes config_userIncludes
. append defines config_defines
. append preprocessorFlags config_preprocessorFlags
. append compilerFlags (config_compilerFlags ++ config_compilerFlags_c ++ config_compilerFlags_cxx)
. append libraryPath config_libraryPath
. append libraries config_libraries
. append linkerFlags config_linkerFlags
. append localLibraries config_localLibraries
. append archiverFlags config_archiverFlags
where
flags = words' . dropWhile isSpace
paths = words' . dropWhile isSpace
define [] = error "Empty preprocessor definition"
define [k] = (k, Nothing)
define [k,v] = (k, Just v)
define (k:vs) = (k, Just (intercalate "=" vs))
defines' = map (define . splitOn "=") . flags
(>>>=) :: Monad m => m (a -> b) -> m (b -> c) -> m (a -> c)
(>>>=) = liftM2 (>>>)
|
05eb7e4115209aecfffb836336d5c4441f3f9faa1b9bd3738a8b798a3a3ccc62 | zlatozar/study-paip | krep1.lisp | -*- Mode : LISP ; Syntax : COMMON - LISP ; Package : CH14 - FIRST ; Base : 10 -*-
;;; Code from Paradigms of Artificial Intelligence Programming
Copyright ( c ) 1991
File krep1.lisp : Knowledge representation code ; first version .
(in-package #:ch14-first)
;;; ____________________________________________________________________________
An ` nlist ' is implemented as a ( count . elements ) pair :
(defun make-empty-nlist ()
"Create a new, empty nlist."
(cons 0 nil))
(defun nlist-n (x) "The number of elements in an nlist." (car x))
(defun nlist-list (x) "The elements in an nlist." (cdr x))
(defun nlist-push (item nlist)
"Add a new element to an NLIST."
(incf (car nlist))
(push item (cdr nlist))
nlist)
;;; ____________________________________________________________________________
(defstruct (dtree (:type vector))
(first nil) (rest nil) (atoms nil) (var (make-empty-nlist)))
;;; ____________________________________________________________________________
Not all handle the closure properly , so change the local PREDICATES
;; to a global *predicates*
(defvar *predicates* nil)
(defun get-dtree (predicate)
"Fetch (or make) the `dtree' for this PREDICATE."
(cond ((get predicate 'dtree))
(t (push predicate *predicates*)
(setf (get predicate 'dtree) (make-dtree)))))
(defun clear-dtrees ()
"Remove all the dtrees for all the predicates."
(dolist (predicate *predicates*)
(setf (get predicate 'dtree) nil))
(setf *predicates* nil))
;;; ____________________________________________________________________________
(defun index (key)
"Store key in a `dtree' node. Key must be (predicate . args);
it is stored in the predicate's dtree."
(dtree-index key key (get-dtree (predicate key))))
(defun dtree-index (key value dtree)
"Index value under all atoms of KEY in dtree."
(cond
index on both first and rest
(dtree-index (first key) value
(or (dtree-first dtree)
(setf (dtree-first dtree) (make-dtree))))
(dtree-index (rest key) value
(or (dtree-rest dtree)
(setf (dtree-rest dtree) (make-dtree)))))
((null key)) ; don't index on nil
((variable-p key) ; index a variable
(nlist-push value (dtree-var dtree)))
(t ;; Make sure there is an nlist for this atom, and add to it
(nlist-push value (lookup-atom key dtree)))))
(defun lookup-atom (atom dtree)
"Return (or create) the nlist for this atom in dtree."
(or (lookup atom (dtree-atoms dtree))
(let ((new (make-empty-nlist)))
(push (cons atom new) (dtree-atoms dtree))
new)))
;;; ____________________________________________________________________________
(defun test-index ()
(let ((props '((p a b) (p a c) (p a ?x) (p b c)
(p b (f c)) (p a (f . ?x)))))
(clear-dtrees)
(mapc #'index props)
(write (list props (get-dtree 'p))
:circle t :array t :pretty t)
(values)))
;;; ____________________________________________________________________________
(defun fetch (query)
"Return a list of buckets potentially matching the QUERY,
which must be a relation of form (predicate . args)."
(dtree-fetch query (get-dtree (predicate query))
nil 0 nil most-positive-fixnum))
;;; ____________________________________________________________________________
(defun dtree-fetch (pat dtree var-list-in var-n-in best-list best-n)
"Return two values: a list-of-lists of possible matches to pat,
and the number of elements in the list-of-lists."
(if (or (null dtree) (null pat) (variable-p pat))
(values best-list best-n)
(let* ((var-nlist (dtree-var dtree))
(var-n (+ var-n-in (nlist-n var-nlist)))
(var-list (if (null (nlist-list var-nlist))
var-list-in
(cons (nlist-list var-nlist)
var-list-in))))
(cond
((>= var-n best-n) (values best-list best-n))
((atom pat) (dtree-atom-fetch pat dtree var-list var-n
best-list best-n))
(t (multiple-value-bind (list1 n1)
(dtree-fetch (first pat) (dtree-first dtree)
var-list var-n best-list best-n)
(dtree-fetch (rest pat) (dtree-rest dtree)
var-list var-n list1 n1)))))))
(defun dtree-atom-fetch (atom dtree var-list var-n best-list best-n)
"Return the answers indexed at this atom (along with the vars),
or return the previous best answer, if it is better."
(let ((atom-nlist (lookup atom (dtree-atoms dtree))))
(cond
((or (null atom-nlist) (null (nlist-list atom-nlist)))
(values var-list var-n))
((and atom-nlist (< (incf var-n (nlist-n atom-nlist)) best-n))
(values (cons (nlist-list atom-nlist) var-list) var-n))
(t (values best-list best-n)))))
;;; ____________________________________________________________________________
(proclaim '(inline mapc-retrieve))
(defun mapc-retrieve (fn query)
"For every fact that matches the query,
apply the function to the binding list."
(dolist (bucket (fetch query))
(dolist (answer bucket)
(let ((bindings (unify query answer)))
(unless (eq bindings fail)
(funcall fn bindings))))))
;;; ____________________________________________________________________________
(defun retrieve (query)
"Find all facts that match QUERY. Return a list of bindings."
(let ((answers nil))
(mapc-retrieve #'(lambda (bindings) (push bindings answers))
query)
answers))
(defun retrieve-matches (query)
"Find all facts that match QUERY.
Return a list of expressions that match the query."
(mapcar #'(lambda (bindings) (subst-bindings bindings query))
(retrieve query)))
;;; ____________________________________________________________________________
(defmacro query-bind (variables query &body body)
"Execute the body for each match to the QUERY.
Within the BODY, bind each variable."
(let* ((bindings (gensym "BINDINGS"))
(vars-and-vals
(mapcar
#'(lambda (var)
(list var `(subst-bindings ,bindings ',var)))
variables)))
`(mapc-retrieve
#'(lambda (,bindings)
(let ,vars-and-vals
,@body))
,query)))
| null | https://raw.githubusercontent.com/zlatozar/study-paip/dfa1ca6118f718f5d47d8c63cbb7b4cad23671e1/ch14/krep1.lisp | lisp | Syntax : COMMON - LISP ; Package : CH14 - FIRST ; Base : 10 -*-
Code from Paradigms of Artificial Intelligence Programming
first version .
____________________________________________________________________________
____________________________________________________________________________
____________________________________________________________________________
to a global *predicates*
____________________________________________________________________________
don't index on nil
index a variable
Make sure there is an nlist for this atom, and add to it
____________________________________________________________________________
____________________________________________________________________________
____________________________________________________________________________
____________________________________________________________________________
____________________________________________________________________________
____________________________________________________________________________ |
Copyright ( c ) 1991
(in-package #:ch14-first)
An ` nlist ' is implemented as a ( count . elements ) pair :
(defun make-empty-nlist ()
"Create a new, empty nlist."
(cons 0 nil))
(defun nlist-n (x) "The number of elements in an nlist." (car x))
(defun nlist-list (x) "The elements in an nlist." (cdr x))
(defun nlist-push (item nlist)
"Add a new element to an NLIST."
(incf (car nlist))
(push item (cdr nlist))
nlist)
(defstruct (dtree (:type vector))
(first nil) (rest nil) (atoms nil) (var (make-empty-nlist)))
Not all handle the closure properly , so change the local PREDICATES
(defvar *predicates* nil)
(defun get-dtree (predicate)
"Fetch (or make) the `dtree' for this PREDICATE."
(cond ((get predicate 'dtree))
(t (push predicate *predicates*)
(setf (get predicate 'dtree) (make-dtree)))))
(defun clear-dtrees ()
"Remove all the dtrees for all the predicates."
(dolist (predicate *predicates*)
(setf (get predicate 'dtree) nil))
(setf *predicates* nil))
(defun index (key)
it is stored in the predicate's dtree."
(dtree-index key key (get-dtree (predicate key))))
(defun dtree-index (key value dtree)
"Index value under all atoms of KEY in dtree."
(cond
index on both first and rest
(dtree-index (first key) value
(or (dtree-first dtree)
(setf (dtree-first dtree) (make-dtree))))
(dtree-index (rest key) value
(or (dtree-rest dtree)
(setf (dtree-rest dtree) (make-dtree)))))
(nlist-push value (dtree-var dtree)))
(nlist-push value (lookup-atom key dtree)))))
(defun lookup-atom (atom dtree)
"Return (or create) the nlist for this atom in dtree."
(or (lookup atom (dtree-atoms dtree))
(let ((new (make-empty-nlist)))
(push (cons atom new) (dtree-atoms dtree))
new)))
(defun test-index ()
(let ((props '((p a b) (p a c) (p a ?x) (p b c)
(p b (f c)) (p a (f . ?x)))))
(clear-dtrees)
(mapc #'index props)
(write (list props (get-dtree 'p))
:circle t :array t :pretty t)
(values)))
(defun fetch (query)
"Return a list of buckets potentially matching the QUERY,
which must be a relation of form (predicate . args)."
(dtree-fetch query (get-dtree (predicate query))
nil 0 nil most-positive-fixnum))
(defun dtree-fetch (pat dtree var-list-in var-n-in best-list best-n)
"Return two values: a list-of-lists of possible matches to pat,
and the number of elements in the list-of-lists."
(if (or (null dtree) (null pat) (variable-p pat))
(values best-list best-n)
(let* ((var-nlist (dtree-var dtree))
(var-n (+ var-n-in (nlist-n var-nlist)))
(var-list (if (null (nlist-list var-nlist))
var-list-in
(cons (nlist-list var-nlist)
var-list-in))))
(cond
((>= var-n best-n) (values best-list best-n))
((atom pat) (dtree-atom-fetch pat dtree var-list var-n
best-list best-n))
(t (multiple-value-bind (list1 n1)
(dtree-fetch (first pat) (dtree-first dtree)
var-list var-n best-list best-n)
(dtree-fetch (rest pat) (dtree-rest dtree)
var-list var-n list1 n1)))))))
(defun dtree-atom-fetch (atom dtree var-list var-n best-list best-n)
"Return the answers indexed at this atom (along with the vars),
or return the previous best answer, if it is better."
(let ((atom-nlist (lookup atom (dtree-atoms dtree))))
(cond
((or (null atom-nlist) (null (nlist-list atom-nlist)))
(values var-list var-n))
((and atom-nlist (< (incf var-n (nlist-n atom-nlist)) best-n))
(values (cons (nlist-list atom-nlist) var-list) var-n))
(t (values best-list best-n)))))
(proclaim '(inline mapc-retrieve))
(defun mapc-retrieve (fn query)
"For every fact that matches the query,
apply the function to the binding list."
(dolist (bucket (fetch query))
(dolist (answer bucket)
(let ((bindings (unify query answer)))
(unless (eq bindings fail)
(funcall fn bindings))))))
(defun retrieve (query)
"Find all facts that match QUERY. Return a list of bindings."
(let ((answers nil))
(mapc-retrieve #'(lambda (bindings) (push bindings answers))
query)
answers))
(defun retrieve-matches (query)
"Find all facts that match QUERY.
Return a list of expressions that match the query."
(mapcar #'(lambda (bindings) (subst-bindings bindings query))
(retrieve query)))
(defmacro query-bind (variables query &body body)
"Execute the body for each match to the QUERY.
Within the BODY, bind each variable."
(let* ((bindings (gensym "BINDINGS"))
(vars-and-vals
(mapcar
#'(lambda (var)
(list var `(subst-bindings ,bindings ',var)))
variables)))
`(mapc-retrieve
#'(lambda (,bindings)
(let ,vars-and-vals
,@body))
,query)))
|
b9f860df1f36f88a663fd6db5af0168dddd1918a58f4374cfb16b496954fd96d | jordanthayer/ocaml-search | high_obstacles.ml | (** For generating instances with many obstacles (but not mazes) *)
open Grid
type cell_types =
| Verboten
| Free
| Blocked
let make_empty_instance ?(i = -1) cost moves x y =
{ blocked = Array.create_matrix x y false;
costs = cost;
moves = moves;
goal = [(x-1), 0];
start = 0,0;
instance = i; }
let make_rand_h seed =
Random.init seed;
(fun n -> Random.float 1.)
let df_iface seed w =
Search_interface.make
~h:(make_rand_h seed)
~domain_expand:(make_expand w)
~key:key
~key_print:key_to_string
~equals:equals
~goal_p:(make_goal_p w)
~halt_on:[]
~get_sol_length:sol_length
~p_update:update_parent
(get_type w)
(make_root w)
(fun _ _ -> false)
(fun _ -> ())
let rec fill_in_path cells node =
let x,y = node.pos in
cells.(x).(y) <- Verboten;
if node != node.parent then fill_in_path cells node.parent
let gen_v1 seed costs moves x y obst_p =
let board = make_empty_instance costs moves x y in
let sface = df_iface seed board in
Verb.pe Verb.always "Solving Blank using dfs...\n";
let (s,_,_,_,_,_) = Depth_first_search.dups_hash_firstsol sface [||] in
Verb.pe Verb.always "Solved!\n";
let cells = Array.create_matrix x y Free in
(match s with
None -> failwith "Not possible"
| Some (p,_) -> fill_in_path cells p);
Verb.pe Verb.always "Traced path\n";
for x = 0 to (x - 1)
do
for y = 0 to (y - 1)
do
(match cells.(x).(y) with
| Verboten -> cells.(x).(y) <- Free
| _ -> (if (Random.float 1.) > obst_p
then cells.(x).(y) <- Free
else cells.(x).(y) <- Blocked))
done
done;
let blocked =
Array.map (fun ar ->
Array.map (fun e -> match e with
| Free -> false
| Blocked -> true
| _ -> failwith "Not Possible") ar) cells in
{ board with blocked = blocked }
let solvable on_path b board =
let problem = {b with blocked = (Wrarray.map_matrix board
(fun ele -> match ele with
Blocked -> true
| _ -> false))} in
let iface = Grid_interfaces.default_interface problem [] in
match Speedy.drop_dups iface [||] with
(None,_,_,_,_,_) -> (Verb.pe Verb.always "Cannot occlude\n";
false)
| (Some (p,f),_,_,_,_,_) ->
(Verb.pe Verb.always "Generating new path, %f\n" f;
Hashtbl.clear on_path;
let rec add_node p =
Hashtbl.add on_path p.pos true;
if p == p.parent then true
else add_node p.parent in
if Math.finite_p f
then add_node p
else false)
let gen_v2 seed costs moves maxx maxy obst_p =
let board = make_empty_instance costs moves maxx maxy in
let sface = df_iface seed board in
Verb.pe Verb.always "Solving Blank using dfs...\n";
let (s,_,_,_,_,_) = Depth_first_search.dups_hash_firstsol sface [||] in
Verb.pe Verb.always "Solved!\n";
let cells = Array.create_matrix maxx maxy Free
and to_add = ref []
and on_path = Hashtbl.create 3000 in
(match s with
None -> failwith "Not possible"
| Some (p,_) -> fill_in_path cells p);
Verb.pe Verb.always "Traced path\n";
for x = 0 to (maxx - 1) do
for y = 0 to (maxy - 1) do
(if y = 0 && (x = 0 || x = (maxx - 1))
then cells.(x).(y) <- Free
else (if (Random.float 1.) < obst_p
then (match cells.(x).(y) with
| Verboten -> to_add := (x,y)::!to_add
| _ -> cells.(x).(y) <- Blocked)
else cells.(x).(y) <- Free))
done
done;
List.iter (fun (x,y) ->
let test_board = Wrarray.copy_matrix cells in
test_board.(x).(y) <- Blocked;
if solvable on_path board test_board
then (cells.(x).(y) <- Blocked;
to_add := List.filter
(fun (x,y) ->
let t_val = Hashtbl.mem on_path (x,y) in
if not t_val then cells.(x).(y) <- Blocked;
t_val) !to_add)
else cells.(x).(y) <- Free) !to_add;
let blocked =
Array.map (fun ar ->
Array.map (fun e -> match e with
| Free -> false
| Blocked -> true
| _ -> failwith "Impossible") ar)
cells in
{ board with blocked = blocked }
let gen = gen_v1
EOF
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/grid/high_obstacles.ml | ocaml | * For generating instances with many obstacles (but not mazes) |
open Grid
type cell_types =
| Verboten
| Free
| Blocked
let make_empty_instance ?(i = -1) cost moves x y =
{ blocked = Array.create_matrix x y false;
costs = cost;
moves = moves;
goal = [(x-1), 0];
start = 0,0;
instance = i; }
let make_rand_h seed =
Random.init seed;
(fun n -> Random.float 1.)
let df_iface seed w =
Search_interface.make
~h:(make_rand_h seed)
~domain_expand:(make_expand w)
~key:key
~key_print:key_to_string
~equals:equals
~goal_p:(make_goal_p w)
~halt_on:[]
~get_sol_length:sol_length
~p_update:update_parent
(get_type w)
(make_root w)
(fun _ _ -> false)
(fun _ -> ())
let rec fill_in_path cells node =
let x,y = node.pos in
cells.(x).(y) <- Verboten;
if node != node.parent then fill_in_path cells node.parent
let gen_v1 seed costs moves x y obst_p =
let board = make_empty_instance costs moves x y in
let sface = df_iface seed board in
Verb.pe Verb.always "Solving Blank using dfs...\n";
let (s,_,_,_,_,_) = Depth_first_search.dups_hash_firstsol sface [||] in
Verb.pe Verb.always "Solved!\n";
let cells = Array.create_matrix x y Free in
(match s with
None -> failwith "Not possible"
| Some (p,_) -> fill_in_path cells p);
Verb.pe Verb.always "Traced path\n";
for x = 0 to (x - 1)
do
for y = 0 to (y - 1)
do
(match cells.(x).(y) with
| Verboten -> cells.(x).(y) <- Free
| _ -> (if (Random.float 1.) > obst_p
then cells.(x).(y) <- Free
else cells.(x).(y) <- Blocked))
done
done;
let blocked =
Array.map (fun ar ->
Array.map (fun e -> match e with
| Free -> false
| Blocked -> true
| _ -> failwith "Not Possible") ar) cells in
{ board with blocked = blocked }
let solvable on_path b board =
let problem = {b with blocked = (Wrarray.map_matrix board
(fun ele -> match ele with
Blocked -> true
| _ -> false))} in
let iface = Grid_interfaces.default_interface problem [] in
match Speedy.drop_dups iface [||] with
(None,_,_,_,_,_) -> (Verb.pe Verb.always "Cannot occlude\n";
false)
| (Some (p,f),_,_,_,_,_) ->
(Verb.pe Verb.always "Generating new path, %f\n" f;
Hashtbl.clear on_path;
let rec add_node p =
Hashtbl.add on_path p.pos true;
if p == p.parent then true
else add_node p.parent in
if Math.finite_p f
then add_node p
else false)
let gen_v2 seed costs moves maxx maxy obst_p =
let board = make_empty_instance costs moves maxx maxy in
let sface = df_iface seed board in
Verb.pe Verb.always "Solving Blank using dfs...\n";
let (s,_,_,_,_,_) = Depth_first_search.dups_hash_firstsol sface [||] in
Verb.pe Verb.always "Solved!\n";
let cells = Array.create_matrix maxx maxy Free
and to_add = ref []
and on_path = Hashtbl.create 3000 in
(match s with
None -> failwith "Not possible"
| Some (p,_) -> fill_in_path cells p);
Verb.pe Verb.always "Traced path\n";
for x = 0 to (maxx - 1) do
for y = 0 to (maxy - 1) do
(if y = 0 && (x = 0 || x = (maxx - 1))
then cells.(x).(y) <- Free
else (if (Random.float 1.) < obst_p
then (match cells.(x).(y) with
| Verboten -> to_add := (x,y)::!to_add
| _ -> cells.(x).(y) <- Blocked)
else cells.(x).(y) <- Free))
done
done;
List.iter (fun (x,y) ->
let test_board = Wrarray.copy_matrix cells in
test_board.(x).(y) <- Blocked;
if solvable on_path board test_board
then (cells.(x).(y) <- Blocked;
to_add := List.filter
(fun (x,y) ->
let t_val = Hashtbl.mem on_path (x,y) in
if not t_val then cells.(x).(y) <- Blocked;
t_val) !to_add)
else cells.(x).(y) <- Free) !to_add;
let blocked =
Array.map (fun ar ->
Array.map (fun e -> match e with
| Free -> false
| Blocked -> true
| _ -> failwith "Impossible") ar)
cells in
{ board with blocked = blocked }
let gen = gen_v1
EOF
|
2591b1f226a08b656db211ef7604cd6a3f6b79f7c40e38eb826c28c182b52f30 | NorfairKing/intray | HomeRSpec.hs | module Intray.Web.Server.Handler.HomeRSpec where
import Intray.Web.Server.Foundation
import Intray.Web.Server.TestUtils
import Test.Syd.Yesod
import TestImport
spec :: Spec
spec =
intrayWebServerSpec $
ydescribe "HomeR" $ do
yit "gets a 200 for non-logged-in user" $ do
get HomeR
statusIs 200
yit "gets a 200 for an example user" $
withExampleAccount_ $ do
get HomeR
statusIs 200
yit "gets a login page when clicking on the CTA" $ do
get HomeR
statusIs 200
get AccountR
statusIs 303
yit "gets to the account page when clicking on the CTA when already logged in" $ do
withExampleAccount_ $ do
get HomeR
statusIs 200
get AccountR
statusIs 200
| null | https://raw.githubusercontent.com/NorfairKing/intray/6a2422111a3d007b9b89f9eae5965ac7bf6224f8/intray-web-server/test/Intray/Web/Server/Handler/HomeRSpec.hs | haskell | module Intray.Web.Server.Handler.HomeRSpec where
import Intray.Web.Server.Foundation
import Intray.Web.Server.TestUtils
import Test.Syd.Yesod
import TestImport
spec :: Spec
spec =
intrayWebServerSpec $
ydescribe "HomeR" $ do
yit "gets a 200 for non-logged-in user" $ do
get HomeR
statusIs 200
yit "gets a 200 for an example user" $
withExampleAccount_ $ do
get HomeR
statusIs 200
yit "gets a login page when clicking on the CTA" $ do
get HomeR
statusIs 200
get AccountR
statusIs 303
yit "gets to the account page when clicking on the CTA when already logged in" $ do
withExampleAccount_ $ do
get HomeR
statusIs 200
get AccountR
statusIs 200
| |
98bbf102dbc15ab50e4c21d1ad778959f2beb86a70c553a31581c439cb37fe4e | zotonic/zotonic | action_wires_slide_fade_out.erl | @author < >
2009
%%
Based on code copyright ( c ) 2008 - 2009
Copyright 2009
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(action_wires_slide_fade_out).
-include_lib("zotonic_core/include/zotonic.hrl").
-export([render_action/4]).
render_action(TriggerId, TargetId, Args, Context) ->
action_wires_jquery_effect:render_action(TriggerId, TargetId, [{type,slide_fade_out}|Args], Context).
| null | https://raw.githubusercontent.com/zotonic/zotonic/852f627c28adf6e5212e8ad5383d4af3a2f25e3f/apps/zotonic_mod_wires/src/actions/action_wires_slide_fade_out.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | @author < >
2009
Based on code copyright ( c ) 2008 - 2009
Copyright 2009
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(action_wires_slide_fade_out).
-include_lib("zotonic_core/include/zotonic.hrl").
-export([render_action/4]).
render_action(TriggerId, TargetId, Args, Context) ->
action_wires_jquery_effect:render_action(TriggerId, TargetId, [{type,slide_fade_out}|Args], Context).
|
f42e3685c10fc935a9a8cbf5ded43dff4d8dca561e9a6c0d37edae5094401192 | fulcro-legacy/semantic-ui-wrapper | ui_dimmer_dimmable.cljs | (ns fulcrologic.semantic-ui.modules.dimmer.ui-dimmer-dimmable
(:require
[fulcrologic.semantic-ui.factory-helpers :as h]
["semantic-ui-react/dist/commonjs/modules/Dimmer/DimmerDimmable" :default DimmerDimmable]))
(def ui-dimmer-dimmable
"A dimmable sub-component for Dimmer.
Props:
- as (custom): An element type to render as (string or function).
- blurring (bool): A dimmable element can blur its contents.
- children (node): Primary content.
- className (string): Additional classes.
- content (custom): Shorthand for primary content.
- dimmed (bool): Controls whether or not the dim is displayed."
(h/factory-apply DimmerDimmable))
| null | https://raw.githubusercontent.com/fulcro-legacy/semantic-ui-wrapper/b0473480ddfff18496df086bf506099ac897f18f/semantic-ui-wrappers-shadow/src/main/fulcrologic/semantic_ui/modules/dimmer/ui_dimmer_dimmable.cljs | clojure | (ns fulcrologic.semantic-ui.modules.dimmer.ui-dimmer-dimmable
(:require
[fulcrologic.semantic-ui.factory-helpers :as h]
["semantic-ui-react/dist/commonjs/modules/Dimmer/DimmerDimmable" :default DimmerDimmable]))
(def ui-dimmer-dimmable
"A dimmable sub-component for Dimmer.
Props:
- as (custom): An element type to render as (string or function).
- blurring (bool): A dimmable element can blur its contents.
- children (node): Primary content.
- className (string): Additional classes.
- content (custom): Shorthand for primary content.
- dimmed (bool): Controls whether or not the dim is displayed."
(h/factory-apply DimmerDimmable))
| |
cb9b4ea04d8a2137a68b5d7bac0b400af1546d1fbcb3b255c123c32c4f6cf7b2 | pepeiborra/narradar | NarrowingProblem.hs | # LANGUAGE ScopedTypeVariables #
# LANGUAGE PatternGuards , ViewPatterns , NamedFieldPuns #
# LANGUAGE FlexibleContexts , FlexibleInstances #
# LANGUAGE TypeOperators #
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE UndecidableInstances , OverlappingInstances , TypeSynonymInstances #
{-# LANGUAGE GADTs #-}
module Narradar.Processor.NarrowingProblem where
import Control.Applicative
import Control.Exception
import qualified Control.RMonad as R
import Control.RMonad.AsMonad
import Data.Foldable (Foldable)
import Data.Traversable (Traversable)
import Data.List ( (\\), sortBy)
import Data.Monoid
import qualified Data.Set as Set
import Data.Set (Set)
import Prelude hiding (mapM, pi)
import Narradar.Constraints.VariableCondition
import Narradar.Types.ArgumentFiltering (AF_, PolyHeuristic, Heuristic, MkHeu, mkHeu, isSoundAF, ApplyAF(..))
import qualified Narradar.Types.ArgumentFiltering as AF
import Narradar.Framework
import Narradar.Framework.GraphViz hiding (note)
import Narradar.Processor.UsableRules
import Narradar.Types as Narradar
import Narradar.Types.Problem.Narrowing
import Narradar.Types.Problem.NarrowingGoal
import Narradar.Utils
import Lattice
data NarrowingToRewritingICLP08 heu = NarrowingToRewritingICLP08 (MkHeu heu)
| NarrowingToRewritingICLP08_SCC (MkHeu heu)
instance ( PolyHeuristic heu id, Lattice (AF_ id), Ord id, Pretty id, Pretty (TermN id)
, Info info (Problem Narrowing (NTRS id))
, Info info (Problem Rewriting (NTRS id))
, Info info UsableRulesProof
, Info info (NarrowingToRewritingProof id)
, MkDPProblem base (NTRS id)
, Traversable (Problem base)
, NCap base id
, NUsableRules base id
) =>
Processor info (NarrowingToRewritingICLP08 heu) (Problem (MkNarrowing base) (NTRS id) ) (Problem base (NTRS id) ) where
applySearch (NarrowingToRewritingICLP08 mk) p
| null orProblems = [dontKnow (NarrowingToRewritingICLP08Fail :: NarrowingToRewritingProof id) p]
| otherwise = orProblems
where (trs, dps) = (getR p, getP p)
heu = mkHeu mk p
u_p = iUsableRules p (rhs <$> rules dps)
afs = findGroundAF heu (AF.init u_p) u_p R.=<< Set.fromList(rules dps)
orProblems = [ singleP UsableRulesProof p u_p >>= \ p' ->
singleP (NarrowingToRewritingICLP08Proof af) p $
AF.apply af (getBaseProblem p')
| af <- Set.toList afs]
applySearch (NarrowingToRewritingICLP08_SCC mk) p
| null orProblems = [dontKnow (NarrowingToRewritingICLP08Fail :: NarrowingToRewritingProof id) p]
| otherwise = orProblems
where (trs, dps) = (getR p, getP p)
heu = mkHeu mk p
u_p = iUsableRules p (rhs <$> rules dps)
afs = R.foldM (\af -> findGroundAF heu af u_p) (AF.init u_p) (rules dps)
orProblems = [ singleP UsableRulesProof p u_p >>= \ p' ->
singleP (NarrowingToRewritingICLP08Proof af) p' $
AF.apply af (getBaseProblem p')
| af <- Set.toList afs]
instance ( HasSignature (NProblem base id), id ~ SignatureId (NProblem base id)
, PolyHeuristic heu id, Lattice (AF_ id), Ord id, Pretty id, Pretty (TermN id)
, ApplyAF (NProblem base id)
, Info info (NProblem (MkNarrowingGoal id base) id)
, Info info (NProblem base id)
, Info info (NarrowingToRewritingProof id)
, MkDPProblem base (NTRS id), Traversable (Problem base)
, NUsableRules base id
, NCap base id
) =>
Processor info (NarrowingToRewritingICLP08 heu)
(NProblem (MkNarrowingGoal id base) id)
(NProblem base id)
where
applySearch (NarrowingToRewritingICLP08 mk) p@(getFramework -> NarrowingGoal _ pi_groundInfo0 _ base)
| null orProblems = [dontKnow (NarrowingToRewritingICLP08Fail :: NarrowingToRewritingProof id) p]
| otherwise = orProblems
where heu = mkHeu mk p
af0 = AF.init p `mappend` AF.restrictTo (getConstructorSymbols p) pi_groundInfo0
afs = unEmbed $ do
af00 <- embed $ invariantEV heu p af0
let pi_groundInfo = AF.init p `mappend` AF.restrictTo (getConstructorSymbols p) af00
embed $ findGroundAF' heu pi_groundInfo af0 p R.=<< Set.fromList(rules $ getP p)
orProblems = [ singleP (NarrowingToRewritingICLP08Proof af) p $
AF.apply af (mkDerivedDPProblem base p)
| af <- Set.toList afs]
-- -----------
-- Proofs
-- -----------
data NarrowingToRewritingProof id where
NarrowingToRewritingICLP08Proof :: AF_ id -> NarrowingToRewritingProof id
NarrowingToRewritingICLP08Fail :: NarrowingToRewritingProof id
instance Pretty id => Pretty (NarrowingToRewritingProof id) where
pPrint NarrowingToRewritingICLP08Fail = text "Failed to find an argument filtering that satisfies" $$
text "the one pair with a ground right hand side condition."
pPrint (NarrowingToRewritingICLP08Proof af) = text "Termination of the following rewriting DP problem" $$
text "implies termination of the original problem." $$
text "The following argument filtering was used:" $$
pPrint af
-- ---------------
-- building blocks
-- ---------------
findGroundAF heu af0 p (_:->r)
| isVar r = Set.empty
| otherwise = mkGround r R.>>= invariantEV heu p
where
TODO Fix : cut one at a time
where varsp = [noteV v | v <- vars (annotateWithPos t)]
-- | Takes a heuristic, an af with groundness information, an af to use as starting point, a problem and a rule,
findGroundAF' :: ( IsDPProblem typ, HasSignature (Problem typ (NarradarTRS t Var))
, Traversable t, HasId t, ApplyAF (Term t Var), Ord (Term t Var)
, id ~ TermId t, id ~ AFId (Term t Var), id ~ SignatureId (Problem typ (NarradarTRS t Var))
, Ord id, Pretty id, Lattice (AF_ id), Foldable (Problem typ)
, ApplyAF (Problem typ (NarradarTRS t Var))
, ApplyAF (Term (WithNote1 Position t) (WithNote Position Var))
, AFId (Term (WithNote1 Position t) (WithNote Position Var)) ~ id
) =>
Heuristic id
^ Groundness information
-> AF_ id -- ^ the argument filtering to constrain
-> Problem typ (NarradarTRS t Var)
-> Rule t Var -- ^ the rule to make ground
-> Set (AF_ id)
findGroundAF' heu pi_groundInfo af0 p (_:->r)
| isVar r = Set.empty
| otherwise = mkGround r R.>>= invariantEV heu p
where
TODO Fix : cut one at a time
where varsp = [noteV v | v <- vars (annotateWithPos t)] \\
[note v | v <- subterms (AF.apply pi_d $ annotateWithPos t)]
(pi_c,pi_d) = AF.splitCD p pi_groundInfo
| null | https://raw.githubusercontent.com/pepeiborra/narradar/bc53dcad9aee480ab3424a75239bac67e4794456/src/Narradar/Processor/NarrowingProblem.hs | haskell | # LANGUAGE GADTs #
-----------
Proofs
-----------
---------------
building blocks
---------------
| Takes a heuristic, an af with groundness information, an af to use as starting point, a problem and a rule,
^ the argument filtering to constrain
^ the rule to make ground | # LANGUAGE ScopedTypeVariables #
# LANGUAGE PatternGuards , ViewPatterns , NamedFieldPuns #
# LANGUAGE FlexibleContexts , FlexibleInstances #
# LANGUAGE TypeOperators #
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE UndecidableInstances , OverlappingInstances , TypeSynonymInstances #
module Narradar.Processor.NarrowingProblem where
import Control.Applicative
import Control.Exception
import qualified Control.RMonad as R
import Control.RMonad.AsMonad
import Data.Foldable (Foldable)
import Data.Traversable (Traversable)
import Data.List ( (\\), sortBy)
import Data.Monoid
import qualified Data.Set as Set
import Data.Set (Set)
import Prelude hiding (mapM, pi)
import Narradar.Constraints.VariableCondition
import Narradar.Types.ArgumentFiltering (AF_, PolyHeuristic, Heuristic, MkHeu, mkHeu, isSoundAF, ApplyAF(..))
import qualified Narradar.Types.ArgumentFiltering as AF
import Narradar.Framework
import Narradar.Framework.GraphViz hiding (note)
import Narradar.Processor.UsableRules
import Narradar.Types as Narradar
import Narradar.Types.Problem.Narrowing
import Narradar.Types.Problem.NarrowingGoal
import Narradar.Utils
import Lattice
data NarrowingToRewritingICLP08 heu = NarrowingToRewritingICLP08 (MkHeu heu)
| NarrowingToRewritingICLP08_SCC (MkHeu heu)
instance ( PolyHeuristic heu id, Lattice (AF_ id), Ord id, Pretty id, Pretty (TermN id)
, Info info (Problem Narrowing (NTRS id))
, Info info (Problem Rewriting (NTRS id))
, Info info UsableRulesProof
, Info info (NarrowingToRewritingProof id)
, MkDPProblem base (NTRS id)
, Traversable (Problem base)
, NCap base id
, NUsableRules base id
) =>
Processor info (NarrowingToRewritingICLP08 heu) (Problem (MkNarrowing base) (NTRS id) ) (Problem base (NTRS id) ) where
applySearch (NarrowingToRewritingICLP08 mk) p
| null orProblems = [dontKnow (NarrowingToRewritingICLP08Fail :: NarrowingToRewritingProof id) p]
| otherwise = orProblems
where (trs, dps) = (getR p, getP p)
heu = mkHeu mk p
u_p = iUsableRules p (rhs <$> rules dps)
afs = findGroundAF heu (AF.init u_p) u_p R.=<< Set.fromList(rules dps)
orProblems = [ singleP UsableRulesProof p u_p >>= \ p' ->
singleP (NarrowingToRewritingICLP08Proof af) p $
AF.apply af (getBaseProblem p')
| af <- Set.toList afs]
applySearch (NarrowingToRewritingICLP08_SCC mk) p
| null orProblems = [dontKnow (NarrowingToRewritingICLP08Fail :: NarrowingToRewritingProof id) p]
| otherwise = orProblems
where (trs, dps) = (getR p, getP p)
heu = mkHeu mk p
u_p = iUsableRules p (rhs <$> rules dps)
afs = R.foldM (\af -> findGroundAF heu af u_p) (AF.init u_p) (rules dps)
orProblems = [ singleP UsableRulesProof p u_p >>= \ p' ->
singleP (NarrowingToRewritingICLP08Proof af) p' $
AF.apply af (getBaseProblem p')
| af <- Set.toList afs]
instance ( HasSignature (NProblem base id), id ~ SignatureId (NProblem base id)
, PolyHeuristic heu id, Lattice (AF_ id), Ord id, Pretty id, Pretty (TermN id)
, ApplyAF (NProblem base id)
, Info info (NProblem (MkNarrowingGoal id base) id)
, Info info (NProblem base id)
, Info info (NarrowingToRewritingProof id)
, MkDPProblem base (NTRS id), Traversable (Problem base)
, NUsableRules base id
, NCap base id
) =>
Processor info (NarrowingToRewritingICLP08 heu)
(NProblem (MkNarrowingGoal id base) id)
(NProblem base id)
where
applySearch (NarrowingToRewritingICLP08 mk) p@(getFramework -> NarrowingGoal _ pi_groundInfo0 _ base)
| null orProblems = [dontKnow (NarrowingToRewritingICLP08Fail :: NarrowingToRewritingProof id) p]
| otherwise = orProblems
where heu = mkHeu mk p
af0 = AF.init p `mappend` AF.restrictTo (getConstructorSymbols p) pi_groundInfo0
afs = unEmbed $ do
af00 <- embed $ invariantEV heu p af0
let pi_groundInfo = AF.init p `mappend` AF.restrictTo (getConstructorSymbols p) af00
embed $ findGroundAF' heu pi_groundInfo af0 p R.=<< Set.fromList(rules $ getP p)
orProblems = [ singleP (NarrowingToRewritingICLP08Proof af) p $
AF.apply af (mkDerivedDPProblem base p)
| af <- Set.toList afs]
data NarrowingToRewritingProof id where
NarrowingToRewritingICLP08Proof :: AF_ id -> NarrowingToRewritingProof id
NarrowingToRewritingICLP08Fail :: NarrowingToRewritingProof id
instance Pretty id => Pretty (NarrowingToRewritingProof id) where
pPrint NarrowingToRewritingICLP08Fail = text "Failed to find an argument filtering that satisfies" $$
text "the one pair with a ground right hand side condition."
pPrint (NarrowingToRewritingICLP08Proof af) = text "Termination of the following rewriting DP problem" $$
text "implies termination of the original problem." $$
text "The following argument filtering was used:" $$
pPrint af
findGroundAF heu af0 p (_:->r)
| isVar r = Set.empty
| otherwise = mkGround r R.>>= invariantEV heu p
where
TODO Fix : cut one at a time
where varsp = [noteV v | v <- vars (annotateWithPos t)]
findGroundAF' :: ( IsDPProblem typ, HasSignature (Problem typ (NarradarTRS t Var))
, Traversable t, HasId t, ApplyAF (Term t Var), Ord (Term t Var)
, id ~ TermId t, id ~ AFId (Term t Var), id ~ SignatureId (Problem typ (NarradarTRS t Var))
, Ord id, Pretty id, Lattice (AF_ id), Foldable (Problem typ)
, ApplyAF (Problem typ (NarradarTRS t Var))
, ApplyAF (Term (WithNote1 Position t) (WithNote Position Var))
, AFId (Term (WithNote1 Position t) (WithNote Position Var)) ~ id
) =>
Heuristic id
^ Groundness information
-> Problem typ (NarradarTRS t Var)
-> Set (AF_ id)
findGroundAF' heu pi_groundInfo af0 p (_:->r)
| isVar r = Set.empty
| otherwise = mkGround r R.>>= invariantEV heu p
where
TODO Fix : cut one at a time
where varsp = [noteV v | v <- vars (annotateWithPos t)] \\
[note v | v <- subterms (AF.apply pi_d $ annotateWithPos t)]
(pi_c,pi_d) = AF.splitCD p pi_groundInfo
|
d6bbf2a03057169a34f5f733aa4e655d6502ae7a1982bbc499860cbb46c3ecaf | BranchTaken/Hemlock | array.mli | open Rudiments0
type 'a t = 'a array
include ContainerIntf.SPolyIter with type 'a t := 'a t
include FormattableIntf.SPoly with type 'a t := 'a t
val fmt: ?alt:bool -> ?width:uns -> ('a -> (module Fmt.Formatter) -> (module Fmt.Formatter))
-> 'a t -> (module Fmt.Formatter) -> (module Fmt.Formatter)
* [ fmt ~alt ~width fmt_a t ] uses the element formatter [ fmt_a ] to format a syntactically valid
array representation of [ t ] . If [ ~alt = true ] , the output is broken across multiple lines with
outermost indentation [ ~width ] ( elements are indented to [ ~width + 4 ] ) .
array representation of [t]. If [~alt=true], the output is broken across multiple lines with
outermost indentation [~width] (elements are indented to [~width + 4]). *)
(* Seeming excess verbosity is necessary for destructive type substitution. *)
* Cursor that supports arbitrary array element access . All operations are O(1 ) .
module Cursor : sig
include CursorIntf.SPolyIndex
with type 'a container := 'a t
with type 'a elm := 'a
end
val hash_fold: ('a -> Hash.State.t -> Hash.State.t) -> 'a t -> Hash.State.t -> Hash.State.t
(** [hash_fold hash_fold_a t state] incorporates the hash of [t] into [state] and returns the
resulting state. Array elements are sequentially hash-folded into the resulting state via
[hash_fold_a]. *)
val cmp: ('a -> 'a -> Cmp.t) -> 'a t -> 'a t -> Cmp.t
* Compare two arrays given the element comparison function . The array lengths may differ .
module Seq : sig
type 'a outer = 'a t
module type SMono = sig
type t
type elm
val to_array: t -> elm outer
end
module type SPoly = sig
type 'a t
type 'a elm
val to_array: 'a t -> 'a elm outer
end
module type SPoly2 = sig
type ('a, 'cmp) t
type 'a elm
val to_array: ('a, 'cmp) t -> 'a elm outer
end
module type SPoly3 = sig
type ('k, 'v, 'cmp) t
type 'k key
type 'v value
val to_array: ('k, 'v, 'cmp) t -> ('k key * 'v value) outer
end
(** Efficiently convert a sequence of fixed element type with known length to an array. *)
module MakeMono (T : SeqIntf.IMonoDef) : SMono
with type t := T.t
with type elm := T.elm
(** Efficiently convert a reversed sequence of fixed element type with known length to an array.
*)
module MakeMonoRev (T : SeqIntf.IMonoDef) : SMono
with type t := T.t
with type elm := T.elm
(** Efficiently convert a generic sequence with known length to an array. *)
module MakePoly (T : SeqIntf.IPolyDef) : SPoly
with type 'a t := 'a T.t
with type 'a elm := 'a T.elm
(** Efficiently convert a reversed generic sequence with known length to an array. *)
module MakePolyRev (T : SeqIntf.IPolyDef) : SPoly
with type 'a t := 'a T.t
with type 'a elm := 'a T.elm
(** Efficiently convert a generic sequence with known length to an array. *)
module MakePoly2 (T : SeqIntf.IPoly2Def) : SPoly2
with type ('a, 'cmp) t := ('a, 'cmp) T.t
with type 'a elm := 'a T.elm
(** Efficiently convert a generic sequence with known length to an array. *)
module MakePoly3 (T : SeqIntf.IPoly3Def) : SPoly3
with type ('k, 'v, 'cmp) t := ('k, 'v, 'cmp) T.t
with type 'k key := 'k T.key
with type 'v value := 'v T.value
end
module Slice : sig
include SliceIntf.SPolyIndex
with type 'a container := 'a t
with type 'a cursor := 'a Cursor.t
with type 'a elm := 'a
include ContainerIntf.SPolyIter with type 'a t := 'a t
include FormattableIntf.SPoly with type 'a t := 'a t
val fmt: ?alt:bool -> ?width:uns -> ('a -> (module Fmt.Formatter) -> (module Fmt.Formatter))
-> 'a t -> (module Fmt.Formatter) -> (module Fmt.Formatter)
* [ fmt ~alt ~width fmt_a t ] formats uses the element formatter [ fmt_a ] to format a syntactically
valid array representation of [ t ] . If [ ~alt = true ] , the output is broken across multiple lines
with outermost indentation [ ~width ] ( elements are indented to [ ~width + 4 ] ) .
valid array representation of [t]. If [~alt=true], the output is broken across multiple lines
with outermost indentation [~width] (elements are indented to [~width + 4]). *)
val is_empty: 'a t -> bool
(** Return [true] if slice length is 0; [false] otherwise. *)
val get: uns -> 'a t -> 'a
(** Get slice element. [get i t] returns the element at index [i]. *)
val set_inplace: uns -> 'a -> 'a t -> unit
(** Set slice element in place (mutate). [set_inplace i elm t] sets the element at index [i] to
[elm]. *)
val set: uns -> 'a -> 'a t -> 'a t
* Create a new slice based on input slice , differing in one element . [ set i elm t ] creates a
slice equal to [ t ] , except that element [ i ] is initialized to [ elm ] .
slice equal to [t], except that element [i] is initialized to [elm]. *)
val copy: 'a t -> 'a t
(** Create a copy of a slice. *)
val pare: range -> 'a t -> 'a t
(** Create a slice with contents initialized to equal the specified [range] of the input slice. *)
val join: ?sep:'a t -> 'a t list -> 'a t
* a list of slices , with optional separator .
val concat: 'a t -> 'a t -> 'a t
* two slices .
val append: 'a -> 'a t -> 'a t
(** Create a slice that is the concatenation of the input slice and the input element. *)
val prepend: 'a -> 'a t -> 'a t
(** Create a slice that is the concatenation of the input element and the input slice. *)
val insert: uns -> 'a -> 'a t -> 'a t
(** Create a slice that is the concatenation of the bipartition of the input slice at specified
index, with the input element interposed. *)
val remove: uns -> 'a t -> 'a t
* Create a slice that is the concatenation of the first and third components of the tripartition
about the element at specified index .
about the element at specified index. *)
val reduce: f:('a -> 'a -> 'a) -> 'a t -> 'a option
(** Reduce the slice to a single value, or return [None] if the slice is empty. The reduction
function is assumed to be associative and commutative; thus reduction order is unspecified. *)
val reduce_hlt: f:('a -> 'a -> 'a) -> 'a t -> 'a
(** Reduce the slice to a single value, or halt if the slice is empty. The reduction function is
assumed to be associative and commutative; thus reduction order is unspecified. *)
val swap_inplace: uns -> uns -> 'a t -> unit
(** Swap elements at given indices in place (mutate). *)
val swap: uns -> uns -> 'a t -> 'a t
(** Create a slice based on the input slice, but with elements at given indices swapped. *)
val rev_inplace: 'a t -> unit
(** Reverse slice in place (mutate). *)
val rev: 'a t -> 'a t
(** Create a slice with contents reversed relative to the input slice. *)
val blit: 'a t -> 'a t -> unit
(** Set elements in place (mutate). [blit t0 t1] sets the elements of [t1] to equal the elements
of [t0]. Overlapping slices are supported, but in all cases [t0] and [t1] must have equal
length. *)
val is_sorted: ?strict:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> bool
(** Return true if slice is sorted (strictly if [?strict] is [true]) according to the comparison
function. *)
val sort_inplace: ?stable:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> unit
(** Sort the slice in place (mutate) according to the comparison function. Preserve order of
equivalent elements if [?stable] is [true]. *)
val sort: ?stable:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> 'a t
(** Create a slice with sorted contents of the input slice according to the comparison function.
Preserve order of equivalent elements if [?stable] is [true]. *)
val psearch: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> (Cmp.t * uns) option
* Binary search for key in slice , selecting the leftmost match , and falling back to the nearest
present predecessor in the case of no match .
@return { ul
{ - No predecessor : [ Some ( Cmp . Lt , 0 ) ] }
{ - Leftmost match : [ Some ( Cmp . , index ) ] }
{ - Predecessor : [ Some ( Cmp . Gt , index ) ] }
{ - Empty slice : [ None ] }
}
present predecessor in the case of no match.
@return {ul
{- No predecessor: [Some (Cmp.Lt, 0)]}
{- Leftmost match: [Some (Cmp.Eq, index)]}
{- Predecessor: [Some (Cmp.Gt, index)]}
{- Empty slice: [None]}
} *)
val search: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> uns option
* Binary search for key in slice . If key is found , return [ ( Some index ) ] , otherwise return
[ None ] . Note that if more than one element matches key , an arbitrary match is returned .
[None]. Note that if more than one element matches key, an arbitrary match is returned. *)
val nsearch: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> (Cmp.t * uns) option
* Binary search for key in slice , selecting the rightmost match , and falling back to the nearest
present successor in the case of no match .
@return { ul
{ - Successor : [ Some ( Cmp . Lt , index ) ] }
{ - Rightmost match : [ Some ( Cmp . , index ) ] }
{ - No successor : [ Some ( Cmp . Gt , ( Uns.pred ( length t ) ) ) ] }
{ - Empty slice : [ None ] }
}
present successor in the case of no match.
@return {ul
{- Successor: [Some (Cmp.Lt, index)]}
{- Rightmost match: [Some (Cmp.Eq, index)]}
{- No successor: [Some (Cmp.Gt, (Uns.pred (length t)))]}
{- Empty slice: [None]}
} *)
val map: f:('a -> 'b) -> 'a t -> 'b t
(** Create a slice with elements mapped from the input slice, according to the element mapping
function. *)
val mapi: f:(uns -> 'a -> 'b) -> 'a t -> 'b t
(** Create a slice with elements mapped from the input slice, according to the indexed element
mapping function. *)
val fold_map: init:'accum -> f:('accum -> 'a -> 'accum * 'b) -> 'a t -> 'accum * 'b t
(** Create a slice and accumulated result with elements mapped from the input slice, according to
the folding mapping function. *)
val foldi_map: init:'accum -> f:(uns -> 'accum -> 'a -> 'accum * 'b) -> 'a t -> 'accum * 'b t
(** Create a slice and accumulated result with elements mapped from the input slice, according to
the indexed folding mapping function. *)
val filter: f:('a -> bool) -> 'a t -> 'a t
(** Create a slice with contents filtered by the given function. Only elements for which the
filter function returns [true] are incorporated into the result. *)
val filteri: f:(uns -> 'a -> bool) -> 'a t -> 'a t
(** Create a slice with contents filtered by the given function. Only elements for which the
indexed filter function returns [true] are incorporated into the result. *)
val fold2_until: init:'accum -> f:('accum -> 'a -> 'b -> 'accum * bool) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two slices , calling the element
folding function in increasing index order , and terminate folding early if the folding
function returns true .
folding function in increasing index order, and terminate folding early if the folding
function returns true. *)
val foldi2_until: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum * bool) -> 'a t -> 'b t
-> 'accum
* Create an accumulated result for the paired elements of two slices , calling the indexed
element folding function in increasing index order , and terminate folding early if the folding
function returns true .
element folding function in increasing index order, and terminate folding early if the folding
function returns true. *)
val fold2: init:'accum -> f:('accum -> 'a -> 'b -> 'accum) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the element
folding function in increasing index order .
folding function in increasing index order. *)
val foldi2: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the indexed
element folding function in increasing index order .
element folding function in increasing index order. *)
val iter2: f:('a -> 'b -> unit) -> 'a t -> 'b t -> unit
* Iterate over the paired elements of two slices , calling the element visiting function in
increasing index order .
increasing index order. *)
val iteri2: f:(uns -> 'a -> 'b -> unit) -> 'a t -> 'b t -> unit
* Iterate over the paired elements of two slices , calling the indexed element visiting function
in increasing index order .
in increasing index order. *)
val map2: f:('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
* Create a slice with elements mapped by the element mapping function from the paired elements
of two input slices .
of two input slices. *)
val mapi2: f:(uns -> 'a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
* Create a slice with elements mapped by the indexed element mapping function from the paired
elements of two input slices .
elements of two input slices. *)
val fold2_map: init:'accum -> f:('accum -> 'a -> 'b -> 'accum * 'c) -> 'a t -> 'b t
-> 'accum * 'c t
* Create a slice and accumulated result based on the paired elements of two slices , calling the
element folding / mapping function in increasing index order .
element folding/mapping function in increasing index order. *)
val foldi2_map: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum * 'c) -> 'a t -> 'b t
-> 'accum * 'c t
* Create a slice and accumulated result based on the paired elements of two slices , calling the
indexed element folding / mapping function in increasing index order .
indexed element folding/mapping function in increasing index order. *)
val zip: 'a t -> 'b t -> ('a * 'b) t
* Create a slice with the paired elements of two slices .
val unzip: ('a * 'b) t -> 'a t * 'b t
* Create two slices with the unpaired elements of the input pair slice .
end
val init: range -> f:(uns -> 'a) -> 'a t
(** Initialize array. [init range ~f:(fun i -> ...)] creates an array of length
[Range.Uns.length_hlt range] using [~f] to map range elements to array elements. *)
val of_list: ?length:uns -> 'a list -> 'a t
* Initialize array using contents of list . If specified , [ ? length ] must equal [ ( List.length
list ) ] .
list)]. *)
val of_list_rev: ?length:uns -> 'a list -> 'a t
* Initialize array using reversed contents of list . If specified , [ ? length ] must equal
[ ( list ) ] .
[(List.length list)]. *)
val of_stream: ?length:uns -> 'a Stream.t -> 'a t
* Initialize array using contents of stream . If specified , [ ? length ] must equal [ ( Stream.length
stream ) ] .
stream)]. *)
val of_stream_rev: ?length:uns -> 'a Stream.t -> 'a t
* Initialize array using reversed contents of stream . If specified , [ ? length ] must equal
[ ( Stream.length stream ) ] .
[(Stream.length stream)]. *)
val length: 'a t -> uns
(** Return array length. *)
val range: 'a t -> range
(** Return the range of indices contained in the array. *)
val is_empty: 'a t -> bool
(** Return [true] if array length is 0; [false] otherwise. *)
val get: uns -> 'a t -> 'a
(** Get array element. [get i t] returns the element at index [i]. *)
val set_inplace: uns -> 'a -> 'a t -> unit
(** Set array element in place (mutate). [set_inplace i elm t] sets the element at index [i] to
[elm]. *)
val set: uns -> 'a -> 'a t -> 'a t
* Create a new array based on input array , differing in one element . [ set i elm t ] creates an
array equal to [ t ] , except that element [ i ] is initialized to [ elm ] .
array equal to [t], except that element [i] is initialized to [elm]. *)
val copy: 'a t -> 'a t
(** Create a copy of an array. *)
val pare: range -> 'a t -> 'a t
(** Create an array with contents initialized to equal the specified [range] of the input array. *)
val join: ?sep:'a t -> 'a t list -> 'a t
* a list of arrays , with optional separator .
val concat: 'a t -> 'a t -> 'a t
* two arrays .
val append: 'a -> 'a t -> 'a t
(** Create an array that is the concatenation of the input array and the input element. *)
val prepend: 'a -> 'a t -> 'a t
(** Create an array that is the concatenation of the input element and the input array. *)
val insert: uns -> 'a -> 'a t -> 'a t
(** Create an array that is the concatenation of the bipartition of the input array at specified
index, with the input element interposed. *)
val remove: uns -> 'a t -> 'a t
* Create an array that is the concatenation of the first and third components of the tripartition
about the element at specified index .
about the element at specified index. *)
val reduce: f:('a -> 'a -> 'a) -> 'a t -> 'a option
(** Reduce the array to a single value, or return [None] if the array is empty. The reduction
function is assumed to be associative and commutative; thus reduction order is unspecified. *)
val reduce_hlt: f:('a -> 'a -> 'a) -> 'a t -> 'a
(** Reduce the array to a single value, or halt if the array is empty. The reduction function is
assumed to be associative and commutative; thus reduction order is unspecified. *)
val swap_inplace: uns -> uns -> 'a t -> unit
(** Swap elements at given indices in place (mutate). *)
val swap: uns -> uns -> 'a t -> 'a t
(** Create an array based on the input array, but with elements at given indices swapped. *)
val rev_inplace: 'a t -> unit
(** Reverse array in place (mutate). *)
val rev: 'a t -> 'a t
(** Create an array with contents reversed relative to the input array. *)
val is_sorted: ?strict:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> bool
(** Return true if array is sorted (strictly if [?strict] is [true]) according to the comparison
function. *)
val sort_inplace: ?stable:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> unit
(** Sort the array in place (mutate) according to the comparison function. Preserve order of
equivalent elements if [?stable] is [true]. *)
val sort: ?stable:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> 'a t
(** Create an array with sorted contents of the input array according to the comparison function.
Preserve order of equivalent elements if [?stable] is [true]. *)
val psearch: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> (Cmp.t * uns) option
* Binary search for key in array , selecting the leftmost match , and falling back to the nearest
present predecessor in the case of no match .
@return { ul
{ - No predecessor : [ Some ( Cmp . Lt , 0 ) ] }
{ - Leftmost match : [ Some ( Cmp . , index ) ] }
{ - Predecessor : [ Some ( Cmp . Gt , index ) ] }
{ - Empty array : [ None ] }
}
present predecessor in the case of no match.
@return {ul
{- No predecessor: [Some (Cmp.Lt, 0)]}
{- Leftmost match: [Some (Cmp.Eq, index)]}
{- Predecessor: [Some (Cmp.Gt, index)]}
{- Empty array: [None]}
} *)
val search: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> uns option
* Binary search for key in array . If key is found , return [ ( Some index ) ] , otherwise return [ None ] .
Note that if more than one element matches key , an arbitrary match is returned .
Note that if more than one element matches key, an arbitrary match is returned. *)
val nsearch: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> (Cmp.t * uns) option
* Binary search for key in array , selecting the rightmost match , and falling back to the nearest
present successor in the case of no match .
@return { ul
{ - Successor : [ Some ( Cmp . Lt , index ) ] }
{ - Rightmost match : [ Some ( Cmp . , index ) ] }
{ - No successor : [ Some ( Cmp . Gt , ( Uns.pred ( length t ) ) ) ] }
{ - Empty array : [ None ] }
}
present successor in the case of no match.
@return {ul
{- Successor: [Some (Cmp.Lt, index)]}
{- Rightmost match: [Some (Cmp.Eq, index)]}
{- No successor: [Some (Cmp.Gt, (Uns.pred (length t)))]}
{- Empty array: [None]}
} *)
val map: f:('a -> 'b) -> 'a t -> 'b t
(** Create an array with elements mapped from the input array, according to the element mapping
function. *)
val mapi: f:(uns -> 'a -> 'b) -> 'a t -> 'b t
(** Create an array with elements mapped from the input array, according to the indexed element
mapping function. *)
val fold_map: init:'accum -> f:('accum -> 'a -> 'accum * 'b) -> 'a t -> 'accum * 'b t
(** Create an array and accumulated result with elements mapped from the input array, according to
the folding mapping function. *)
val foldi_map: init:'accum -> f:(uns -> 'accum -> 'a -> 'accum * 'b) -> 'a t -> 'accum * 'b t
(** Create an array and accumulated result with elements mapped from the input array, according to
the indexed folding mapping function. *)
val filter: f:('a -> bool) -> 'a t -> 'a t
(** Create an array with contents filtered by the given function. Only elements for which the filter
function returns [true] are incorporated into the result. *)
val filteri: f:(uns -> 'a -> bool) -> 'a t -> 'a t
(** Create an array with contents filtered by the given function. Only elements for which the
indexed filter function returns [true] are incorporated into the result. *)
val fold2_until: init:'accum -> f:('accum -> 'a -> 'b -> 'accum * bool) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the element folding
function in increasing index order , and terminate folding early if the folding function returns
true .
function in increasing index order, and terminate folding early if the folding function returns
true. *)
val foldi2_until: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum * bool) -> 'a t -> 'b t
-> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the indexed element
folding function in increasing index order , and terminate folding early if the folding function
returns true .
folding function in increasing index order, and terminate folding early if the folding function
returns true. *)
val fold2: init:'accum -> f:('accum -> 'a -> 'b -> 'accum) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the element folding
function in increasing index order .
function in increasing index order. *)
val foldi2: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the indexed element
folding function in increasing index order .
folding function in increasing index order. *)
val iter2: f:('a -> 'b -> unit) -> 'a t -> 'b t -> unit
* Iterate over the paired elements of two arrays , calling the element visiting function in
increasing index order .
increasing index order. *)
val iteri2: f:(uns -> 'a -> 'b -> unit) -> 'a t -> 'b t -> unit
* Iterate over the paired elements of two arrays , calling the indexed element visiting function in
increasing index order .
increasing index order. *)
val map2: f:('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
* Create an array with elements mapped by the element mapping function from the paired elements of
two input arrays .
two input arrays. *)
val mapi2: f:(uns -> 'a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
* Create an array with elements mapped by the indexed element mapping function from the paired
elements of two input arrays .
elements of two input arrays. *)
val fold2_map: init:'accum -> f:('accum -> 'a -> 'b -> 'accum * 'c) -> 'a t -> 'b t -> 'accum * 'c t
* Create an array and accumulated result based on the paired elements of two arrays , calling the
element folding / mapping function in increasing index order .
element folding/mapping function in increasing index order. *)
val foldi2_map: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum * 'c) -> 'a t -> 'b t
-> 'accum * 'c t
* Create an array and accumulated result based on the paired elements of two arrays , calling the
indexed element folding / mapping function in increasing index order .
indexed element folding/mapping function in increasing index order. *)
val zip: 'a t -> 'b t -> ('a * 'b) t
* Create an array with the paired elements of two arrays .
val unzip: ('a * 'b) t -> 'a t * 'b t
* Create two arrays with the unpaired elements of the input pair array .
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/ed397cf3294ca397024e69eb3b1ed5f1db773db6/bootstrap/src/basis/array.mli | ocaml | Seeming excess verbosity is necessary for destructive type substitution.
* [hash_fold hash_fold_a t state] incorporates the hash of [t] into [state] and returns the
resulting state. Array elements are sequentially hash-folded into the resulting state via
[hash_fold_a].
* Efficiently convert a sequence of fixed element type with known length to an array.
* Efficiently convert a reversed sequence of fixed element type with known length to an array.
* Efficiently convert a generic sequence with known length to an array.
* Efficiently convert a reversed generic sequence with known length to an array.
* Efficiently convert a generic sequence with known length to an array.
* Efficiently convert a generic sequence with known length to an array.
* Return [true] if slice length is 0; [false] otherwise.
* Get slice element. [get i t] returns the element at index [i].
* Set slice element in place (mutate). [set_inplace i elm t] sets the element at index [i] to
[elm].
* Create a copy of a slice.
* Create a slice with contents initialized to equal the specified [range] of the input slice.
* Create a slice that is the concatenation of the input slice and the input element.
* Create a slice that is the concatenation of the input element and the input slice.
* Create a slice that is the concatenation of the bipartition of the input slice at specified
index, with the input element interposed.
* Reduce the slice to a single value, or return [None] if the slice is empty. The reduction
function is assumed to be associative and commutative; thus reduction order is unspecified.
* Reduce the slice to a single value, or halt if the slice is empty. The reduction function is
assumed to be associative and commutative; thus reduction order is unspecified.
* Swap elements at given indices in place (mutate).
* Create a slice based on the input slice, but with elements at given indices swapped.
* Reverse slice in place (mutate).
* Create a slice with contents reversed relative to the input slice.
* Set elements in place (mutate). [blit t0 t1] sets the elements of [t1] to equal the elements
of [t0]. Overlapping slices are supported, but in all cases [t0] and [t1] must have equal
length.
* Return true if slice is sorted (strictly if [?strict] is [true]) according to the comparison
function.
* Sort the slice in place (mutate) according to the comparison function. Preserve order of
equivalent elements if [?stable] is [true].
* Create a slice with sorted contents of the input slice according to the comparison function.
Preserve order of equivalent elements if [?stable] is [true].
* Create a slice with elements mapped from the input slice, according to the element mapping
function.
* Create a slice with elements mapped from the input slice, according to the indexed element
mapping function.
* Create a slice and accumulated result with elements mapped from the input slice, according to
the folding mapping function.
* Create a slice and accumulated result with elements mapped from the input slice, according to
the indexed folding mapping function.
* Create a slice with contents filtered by the given function. Only elements for which the
filter function returns [true] are incorporated into the result.
* Create a slice with contents filtered by the given function. Only elements for which the
indexed filter function returns [true] are incorporated into the result.
* Initialize array. [init range ~f:(fun i -> ...)] creates an array of length
[Range.Uns.length_hlt range] using [~f] to map range elements to array elements.
* Return array length.
* Return the range of indices contained in the array.
* Return [true] if array length is 0; [false] otherwise.
* Get array element. [get i t] returns the element at index [i].
* Set array element in place (mutate). [set_inplace i elm t] sets the element at index [i] to
[elm].
* Create a copy of an array.
* Create an array with contents initialized to equal the specified [range] of the input array.
* Create an array that is the concatenation of the input array and the input element.
* Create an array that is the concatenation of the input element and the input array.
* Create an array that is the concatenation of the bipartition of the input array at specified
index, with the input element interposed.
* Reduce the array to a single value, or return [None] if the array is empty. The reduction
function is assumed to be associative and commutative; thus reduction order is unspecified.
* Reduce the array to a single value, or halt if the array is empty. The reduction function is
assumed to be associative and commutative; thus reduction order is unspecified.
* Swap elements at given indices in place (mutate).
* Create an array based on the input array, but with elements at given indices swapped.
* Reverse array in place (mutate).
* Create an array with contents reversed relative to the input array.
* Return true if array is sorted (strictly if [?strict] is [true]) according to the comparison
function.
* Sort the array in place (mutate) according to the comparison function. Preserve order of
equivalent elements if [?stable] is [true].
* Create an array with sorted contents of the input array according to the comparison function.
Preserve order of equivalent elements if [?stable] is [true].
* Create an array with elements mapped from the input array, according to the element mapping
function.
* Create an array with elements mapped from the input array, according to the indexed element
mapping function.
* Create an array and accumulated result with elements mapped from the input array, according to
the folding mapping function.
* Create an array and accumulated result with elements mapped from the input array, according to
the indexed folding mapping function.
* Create an array with contents filtered by the given function. Only elements for which the filter
function returns [true] are incorporated into the result.
* Create an array with contents filtered by the given function. Only elements for which the
indexed filter function returns [true] are incorporated into the result. | open Rudiments0
type 'a t = 'a array
include ContainerIntf.SPolyIter with type 'a t := 'a t
include FormattableIntf.SPoly with type 'a t := 'a t
val fmt: ?alt:bool -> ?width:uns -> ('a -> (module Fmt.Formatter) -> (module Fmt.Formatter))
-> 'a t -> (module Fmt.Formatter) -> (module Fmt.Formatter)
* [ fmt ~alt ~width fmt_a t ] uses the element formatter [ fmt_a ] to format a syntactically valid
array representation of [ t ] . If [ ~alt = true ] , the output is broken across multiple lines with
outermost indentation [ ~width ] ( elements are indented to [ ~width + 4 ] ) .
array representation of [t]. If [~alt=true], the output is broken across multiple lines with
outermost indentation [~width] (elements are indented to [~width + 4]). *)
* Cursor that supports arbitrary array element access . All operations are O(1 ) .
module Cursor : sig
include CursorIntf.SPolyIndex
with type 'a container := 'a t
with type 'a elm := 'a
end
val hash_fold: ('a -> Hash.State.t -> Hash.State.t) -> 'a t -> Hash.State.t -> Hash.State.t
val cmp: ('a -> 'a -> Cmp.t) -> 'a t -> 'a t -> Cmp.t
* Compare two arrays given the element comparison function . The array lengths may differ .
module Seq : sig
type 'a outer = 'a t
module type SMono = sig
type t
type elm
val to_array: t -> elm outer
end
module type SPoly = sig
type 'a t
type 'a elm
val to_array: 'a t -> 'a elm outer
end
module type SPoly2 = sig
type ('a, 'cmp) t
type 'a elm
val to_array: ('a, 'cmp) t -> 'a elm outer
end
module type SPoly3 = sig
type ('k, 'v, 'cmp) t
type 'k key
type 'v value
val to_array: ('k, 'v, 'cmp) t -> ('k key * 'v value) outer
end
module MakeMono (T : SeqIntf.IMonoDef) : SMono
with type t := T.t
with type elm := T.elm
module MakeMonoRev (T : SeqIntf.IMonoDef) : SMono
with type t := T.t
with type elm := T.elm
module MakePoly (T : SeqIntf.IPolyDef) : SPoly
with type 'a t := 'a T.t
with type 'a elm := 'a T.elm
module MakePolyRev (T : SeqIntf.IPolyDef) : SPoly
with type 'a t := 'a T.t
with type 'a elm := 'a T.elm
module MakePoly2 (T : SeqIntf.IPoly2Def) : SPoly2
with type ('a, 'cmp) t := ('a, 'cmp) T.t
with type 'a elm := 'a T.elm
module MakePoly3 (T : SeqIntf.IPoly3Def) : SPoly3
with type ('k, 'v, 'cmp) t := ('k, 'v, 'cmp) T.t
with type 'k key := 'k T.key
with type 'v value := 'v T.value
end
module Slice : sig
include SliceIntf.SPolyIndex
with type 'a container := 'a t
with type 'a cursor := 'a Cursor.t
with type 'a elm := 'a
include ContainerIntf.SPolyIter with type 'a t := 'a t
include FormattableIntf.SPoly with type 'a t := 'a t
val fmt: ?alt:bool -> ?width:uns -> ('a -> (module Fmt.Formatter) -> (module Fmt.Formatter))
-> 'a t -> (module Fmt.Formatter) -> (module Fmt.Formatter)
* [ fmt ~alt ~width fmt_a t ] formats uses the element formatter [ fmt_a ] to format a syntactically
valid array representation of [ t ] . If [ ~alt = true ] , the output is broken across multiple lines
with outermost indentation [ ~width ] ( elements are indented to [ ~width + 4 ] ) .
valid array representation of [t]. If [~alt=true], the output is broken across multiple lines
with outermost indentation [~width] (elements are indented to [~width + 4]). *)
val is_empty: 'a t -> bool
val get: uns -> 'a t -> 'a
val set_inplace: uns -> 'a -> 'a t -> unit
val set: uns -> 'a -> 'a t -> 'a t
* Create a new slice based on input slice , differing in one element . [ set i elm t ] creates a
slice equal to [ t ] , except that element [ i ] is initialized to [ elm ] .
slice equal to [t], except that element [i] is initialized to [elm]. *)
val copy: 'a t -> 'a t
val pare: range -> 'a t -> 'a t
val join: ?sep:'a t -> 'a t list -> 'a t
* a list of slices , with optional separator .
val concat: 'a t -> 'a t -> 'a t
* two slices .
val append: 'a -> 'a t -> 'a t
val prepend: 'a -> 'a t -> 'a t
val insert: uns -> 'a -> 'a t -> 'a t
val remove: uns -> 'a t -> 'a t
* Create a slice that is the concatenation of the first and third components of the tripartition
about the element at specified index .
about the element at specified index. *)
val reduce: f:('a -> 'a -> 'a) -> 'a t -> 'a option
val reduce_hlt: f:('a -> 'a -> 'a) -> 'a t -> 'a
val swap_inplace: uns -> uns -> 'a t -> unit
val swap: uns -> uns -> 'a t -> 'a t
val rev_inplace: 'a t -> unit
val rev: 'a t -> 'a t
val blit: 'a t -> 'a t -> unit
val is_sorted: ?strict:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> bool
val sort_inplace: ?stable:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> unit
val sort: ?stable:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> 'a t
val psearch: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> (Cmp.t * uns) option
* Binary search for key in slice , selecting the leftmost match , and falling back to the nearest
present predecessor in the case of no match .
@return { ul
{ - No predecessor : [ Some ( Cmp . Lt , 0 ) ] }
{ - Leftmost match : [ Some ( Cmp . , index ) ] }
{ - Predecessor : [ Some ( Cmp . Gt , index ) ] }
{ - Empty slice : [ None ] }
}
present predecessor in the case of no match.
@return {ul
{- No predecessor: [Some (Cmp.Lt, 0)]}
{- Leftmost match: [Some (Cmp.Eq, index)]}
{- Predecessor: [Some (Cmp.Gt, index)]}
{- Empty slice: [None]}
} *)
val search: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> uns option
* Binary search for key in slice . If key is found , return [ ( Some index ) ] , otherwise return
[ None ] . Note that if more than one element matches key , an arbitrary match is returned .
[None]. Note that if more than one element matches key, an arbitrary match is returned. *)
val nsearch: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> (Cmp.t * uns) option
* Binary search for key in slice , selecting the rightmost match , and falling back to the nearest
present successor in the case of no match .
@return { ul
{ - Successor : [ Some ( Cmp . Lt , index ) ] }
{ - Rightmost match : [ Some ( Cmp . , index ) ] }
{ - No successor : [ Some ( Cmp . Gt , ( Uns.pred ( length t ) ) ) ] }
{ - Empty slice : [ None ] }
}
present successor in the case of no match.
@return {ul
{- Successor: [Some (Cmp.Lt, index)]}
{- Rightmost match: [Some (Cmp.Eq, index)]}
{- No successor: [Some (Cmp.Gt, (Uns.pred (length t)))]}
{- Empty slice: [None]}
} *)
val map: f:('a -> 'b) -> 'a t -> 'b t
val mapi: f:(uns -> 'a -> 'b) -> 'a t -> 'b t
val fold_map: init:'accum -> f:('accum -> 'a -> 'accum * 'b) -> 'a t -> 'accum * 'b t
val foldi_map: init:'accum -> f:(uns -> 'accum -> 'a -> 'accum * 'b) -> 'a t -> 'accum * 'b t
val filter: f:('a -> bool) -> 'a t -> 'a t
val filteri: f:(uns -> 'a -> bool) -> 'a t -> 'a t
val fold2_until: init:'accum -> f:('accum -> 'a -> 'b -> 'accum * bool) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two slices , calling the element
folding function in increasing index order , and terminate folding early if the folding
function returns true .
folding function in increasing index order, and terminate folding early if the folding
function returns true. *)
val foldi2_until: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum * bool) -> 'a t -> 'b t
-> 'accum
* Create an accumulated result for the paired elements of two slices , calling the indexed
element folding function in increasing index order , and terminate folding early if the folding
function returns true .
element folding function in increasing index order, and terminate folding early if the folding
function returns true. *)
val fold2: init:'accum -> f:('accum -> 'a -> 'b -> 'accum) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the element
folding function in increasing index order .
folding function in increasing index order. *)
val foldi2: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the indexed
element folding function in increasing index order .
element folding function in increasing index order. *)
val iter2: f:('a -> 'b -> unit) -> 'a t -> 'b t -> unit
* Iterate over the paired elements of two slices , calling the element visiting function in
increasing index order .
increasing index order. *)
val iteri2: f:(uns -> 'a -> 'b -> unit) -> 'a t -> 'b t -> unit
* Iterate over the paired elements of two slices , calling the indexed element visiting function
in increasing index order .
in increasing index order. *)
val map2: f:('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
* Create a slice with elements mapped by the element mapping function from the paired elements
of two input slices .
of two input slices. *)
val mapi2: f:(uns -> 'a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
* Create a slice with elements mapped by the indexed element mapping function from the paired
elements of two input slices .
elements of two input slices. *)
val fold2_map: init:'accum -> f:('accum -> 'a -> 'b -> 'accum * 'c) -> 'a t -> 'b t
-> 'accum * 'c t
* Create a slice and accumulated result based on the paired elements of two slices , calling the
element folding / mapping function in increasing index order .
element folding/mapping function in increasing index order. *)
val foldi2_map: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum * 'c) -> 'a t -> 'b t
-> 'accum * 'c t
* Create a slice and accumulated result based on the paired elements of two slices , calling the
indexed element folding / mapping function in increasing index order .
indexed element folding/mapping function in increasing index order. *)
val zip: 'a t -> 'b t -> ('a * 'b) t
* Create a slice with the paired elements of two slices .
val unzip: ('a * 'b) t -> 'a t * 'b t
* Create two slices with the unpaired elements of the input pair slice .
end
val init: range -> f:(uns -> 'a) -> 'a t
val of_list: ?length:uns -> 'a list -> 'a t
* Initialize array using contents of list . If specified , [ ? length ] must equal [ ( List.length
list ) ] .
list)]. *)
val of_list_rev: ?length:uns -> 'a list -> 'a t
* Initialize array using reversed contents of list . If specified , [ ? length ] must equal
[ ( list ) ] .
[(List.length list)]. *)
val of_stream: ?length:uns -> 'a Stream.t -> 'a t
* Initialize array using contents of stream . If specified , [ ? length ] must equal [ ( Stream.length
stream ) ] .
stream)]. *)
val of_stream_rev: ?length:uns -> 'a Stream.t -> 'a t
* Initialize array using reversed contents of stream . If specified , [ ? length ] must equal
[ ( Stream.length stream ) ] .
[(Stream.length stream)]. *)
val length: 'a t -> uns
val range: 'a t -> range
val is_empty: 'a t -> bool
val get: uns -> 'a t -> 'a
val set_inplace: uns -> 'a -> 'a t -> unit
val set: uns -> 'a -> 'a t -> 'a t
* Create a new array based on input array , differing in one element . [ set i elm t ] creates an
array equal to [ t ] , except that element [ i ] is initialized to [ elm ] .
array equal to [t], except that element [i] is initialized to [elm]. *)
val copy: 'a t -> 'a t
val pare: range -> 'a t -> 'a t
val join: ?sep:'a t -> 'a t list -> 'a t
* a list of arrays , with optional separator .
val concat: 'a t -> 'a t -> 'a t
* two arrays .
val append: 'a -> 'a t -> 'a t
val prepend: 'a -> 'a t -> 'a t
val insert: uns -> 'a -> 'a t -> 'a t
val remove: uns -> 'a t -> 'a t
* Create an array that is the concatenation of the first and third components of the tripartition
about the element at specified index .
about the element at specified index. *)
val reduce: f:('a -> 'a -> 'a) -> 'a t -> 'a option
val reduce_hlt: f:('a -> 'a -> 'a) -> 'a t -> 'a
val swap_inplace: uns -> uns -> 'a t -> unit
val swap: uns -> uns -> 'a t -> 'a t
val rev_inplace: 'a t -> unit
val rev: 'a t -> 'a t
val is_sorted: ?strict:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> bool
val sort_inplace: ?stable:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> unit
val sort: ?stable:bool -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> 'a t
val psearch: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> (Cmp.t * uns) option
* Binary search for key in array , selecting the leftmost match , and falling back to the nearest
present predecessor in the case of no match .
@return { ul
{ - No predecessor : [ Some ( Cmp . Lt , 0 ) ] }
{ - Leftmost match : [ Some ( Cmp . , index ) ] }
{ - Predecessor : [ Some ( Cmp . Gt , index ) ] }
{ - Empty array : [ None ] }
}
present predecessor in the case of no match.
@return {ul
{- No predecessor: [Some (Cmp.Lt, 0)]}
{- Leftmost match: [Some (Cmp.Eq, index)]}
{- Predecessor: [Some (Cmp.Gt, index)]}
{- Empty array: [None]}
} *)
val search: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> uns option
* Binary search for key in array . If key is found , return [ ( Some index ) ] , otherwise return [ None ] .
Note that if more than one element matches key , an arbitrary match is returned .
Note that if more than one element matches key, an arbitrary match is returned. *)
val nsearch: 'a -> cmp:('a -> 'a -> Cmp.t) -> 'a t -> (Cmp.t * uns) option
* Binary search for key in array , selecting the rightmost match , and falling back to the nearest
present successor in the case of no match .
@return { ul
{ - Successor : [ Some ( Cmp . Lt , index ) ] }
{ - Rightmost match : [ Some ( Cmp . , index ) ] }
{ - No successor : [ Some ( Cmp . Gt , ( Uns.pred ( length t ) ) ) ] }
{ - Empty array : [ None ] }
}
present successor in the case of no match.
@return {ul
{- Successor: [Some (Cmp.Lt, index)]}
{- Rightmost match: [Some (Cmp.Eq, index)]}
{- No successor: [Some (Cmp.Gt, (Uns.pred (length t)))]}
{- Empty array: [None]}
} *)
val map: f:('a -> 'b) -> 'a t -> 'b t
val mapi: f:(uns -> 'a -> 'b) -> 'a t -> 'b t
val fold_map: init:'accum -> f:('accum -> 'a -> 'accum * 'b) -> 'a t -> 'accum * 'b t
val foldi_map: init:'accum -> f:(uns -> 'accum -> 'a -> 'accum * 'b) -> 'a t -> 'accum * 'b t
val filter: f:('a -> bool) -> 'a t -> 'a t
val filteri: f:(uns -> 'a -> bool) -> 'a t -> 'a t
val fold2_until: init:'accum -> f:('accum -> 'a -> 'b -> 'accum * bool) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the element folding
function in increasing index order , and terminate folding early if the folding function returns
true .
function in increasing index order, and terminate folding early if the folding function returns
true. *)
val foldi2_until: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum * bool) -> 'a t -> 'b t
-> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the indexed element
folding function in increasing index order , and terminate folding early if the folding function
returns true .
folding function in increasing index order, and terminate folding early if the folding function
returns true. *)
val fold2: init:'accum -> f:('accum -> 'a -> 'b -> 'accum) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the element folding
function in increasing index order .
function in increasing index order. *)
val foldi2: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum) -> 'a t -> 'b t -> 'accum
* Create an accumulated result for the paired elements of two arrays , calling the indexed element
folding function in increasing index order .
folding function in increasing index order. *)
val iter2: f:('a -> 'b -> unit) -> 'a t -> 'b t -> unit
* Iterate over the paired elements of two arrays , calling the element visiting function in
increasing index order .
increasing index order. *)
val iteri2: f:(uns -> 'a -> 'b -> unit) -> 'a t -> 'b t -> unit
* Iterate over the paired elements of two arrays , calling the indexed element visiting function in
increasing index order .
increasing index order. *)
val map2: f:('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
* Create an array with elements mapped by the element mapping function from the paired elements of
two input arrays .
two input arrays. *)
val mapi2: f:(uns -> 'a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t
* Create an array with elements mapped by the indexed element mapping function from the paired
elements of two input arrays .
elements of two input arrays. *)
val fold2_map: init:'accum -> f:('accum -> 'a -> 'b -> 'accum * 'c) -> 'a t -> 'b t -> 'accum * 'c t
* Create an array and accumulated result based on the paired elements of two arrays , calling the
element folding / mapping function in increasing index order .
element folding/mapping function in increasing index order. *)
val foldi2_map: init:'accum -> f:(uns -> 'accum -> 'a -> 'b -> 'accum * 'c) -> 'a t -> 'b t
-> 'accum * 'c t
* Create an array and accumulated result based on the paired elements of two arrays , calling the
indexed element folding / mapping function in increasing index order .
indexed element folding/mapping function in increasing index order. *)
val zip: 'a t -> 'b t -> ('a * 'b) t
* Create an array with the paired elements of two arrays .
val unzip: ('a * 'b) t -> 'a t * 'b t
* Create two arrays with the unpaired elements of the input pair array .
|
6a726f79c1e0af5455084e043a040df30292f77ed6427378c28ef5d528795ef4 | MassD/99 | p62.ml |
Collect the internal nodes of a binary tree in a list . ( easy )
An internal node of a binary tree has either one or two non - empty successors . Write a function internals to collect them in a list .
Collect the internal nodes of a binary tree in a list. (easy)
An internal node of a binary tree has either one or two non-empty successors. Write a function internals to collect them in a list.
*)
type 'a bt = Empty | Node of 'a * 'a bt * 'a bt
let rec internals = function
| Empty | Node (_,Empty,Empty) -> []
| Node (x,l,r) -> x::(List.rev_append (internals l) (internals r))
let internals' btree =
let rec collect acc = function
| Empty, [] | Node (_,Empty,Empty), [] -> acc
| Empty, hd::tl | Node (_,Empty,Empty), hd::tl -> collect acc (hd,tl)
| Node (x,l,r), wl -> collect (x::acc) (l, r::wl)
in
collect [] (btree,[])
| null | https://raw.githubusercontent.com/MassD/99/1d3019eb55b0d621ed1df4132315673dd812b1e1/55-69-binary-tress/p62.ml | ocaml |
Collect the internal nodes of a binary tree in a list . ( easy )
An internal node of a binary tree has either one or two non - empty successors . Write a function internals to collect them in a list .
Collect the internal nodes of a binary tree in a list. (easy)
An internal node of a binary tree has either one or two non-empty successors. Write a function internals to collect them in a list.
*)
type 'a bt = Empty | Node of 'a * 'a bt * 'a bt
let rec internals = function
| Empty | Node (_,Empty,Empty) -> []
| Node (x,l,r) -> x::(List.rev_append (internals l) (internals r))
let internals' btree =
let rec collect acc = function
| Empty, [] | Node (_,Empty,Empty), [] -> acc
| Empty, hd::tl | Node (_,Empty,Empty), hd::tl -> collect acc (hd,tl)
| Node (x,l,r), wl -> collect (x::acc) (l, r::wl)
in
collect [] (btree,[])
| |
e23f1159abcf1c9570199780aa74491cead29eede5d477d4916428d806c7b5d1 | mirage/mirage-channel | test_channel.ml | open Lwt.Infix
module F = Mirage_flow_combinators.F
let fail fmt = Fmt.kstr (fun s -> Alcotest.fail s) fmt
(* this is a very small set of tests for the channel interface,
intended to ensure that EOF conditions on the underlying flow are
handled properly *)
module Channel = Mirage_channel.Make(F)
let check_eof = function
| Ok (`Data ch) ->
fail "character %c was returned from Channel.read_char on an empty flow" ch
| Ok `Eof -> Lwt.return ()
| Error e -> fail "unexpected error: %a" Channel.pp_error e
let err_no_exception () = fail "no exception"
let err_wrong_exception e = fail "wrong exception: %s" (Printexc.to_string e)
let test_read_char_eof () =
let f = F.make () in
let c = Channel.create f in
Channel.read_char c >>=
check_eof
let test_read_line () =
let input = "I am the very model of a modern major general" in
let f = F.make ~input:(F.input_string input) () in
let c = Channel.create f in
Channel.read_line c >|= function
| Ok (`Data buf) -> Alcotest.(check string) "read line" input (Cstruct.copyv buf)
| Ok `Eof -> fail "eof"
| Error e -> fail "error: %a" Channel.pp_error e
(* The line is longer than the limit *)
let test_read_line_len () =
let input = "I am the very model of a modern major general" in
let f = F.make ~input:(F.input_string input) () in
let c = Channel.create f in
Channel.read_line ~len:5 c >|= function
| Ok (`Data _) -> fail "read a line which was too big"
| Ok `Eof -> fail "eof"
| Error _ -> ()
(* The line is shorter than the limit and bounded by \r\n *)
let test_read_line_len2 () =
let input = "I\r\n am the very model of a modern major general" in
let f = F.make ~input:(F.input_string input) () in
let c = Channel.create f in
Channel.read_line ~len:5 c >|= function
| Ok (`Data buf) -> Alcotest.(check string) "read line" "I" (Cstruct.copyv buf)
| Ok `Eof -> fail "eof"
| Error e -> fail "error: %a" Channel.pp_error e
The line is shorter than the limit and bounded by EOF
let test_read_line_len3 () =
let input = "I am the very model of a modern major general" in
let f = F.make ~input:(F.input_string input) () in
let c = Channel.create f in
Channel.read_line ~len:50 c >|= function
| Ok (`Data buf) -> Alcotest.(check string) "read line" input (Cstruct.copyv buf)
| Ok `Eof -> fail "eof"
| Error e -> fail "error: %a" Channel.pp_error e
type channel = V : (module Mirage_channel.S with type t = 'a and type error = [> `Line_too_long ]) * 'a -> channel
let channel_from_raw_string s =
let consumed = ref false in
let module Flow = struct
type flow = unit
type error = |
type write_error = Mirage_flow.write_error
let pp_error : error Fmt.t = fun _ -> function _ -> .
let pp_write_error : Mirage_flow.write_error Fmt.t =
fun ppf `Closed -> Fmt.string ppf "Flow closed"
let read () =
if not !consumed
then ( consumed := true
; Lwt.return_ok (`Data (Cstruct.of_string s)) )
else Lwt.return_ok `Eof
let write _ _ = assert false
let writev _ _ = assert false
let close _ = Lwt.return ()
end in
let module Channel = Mirage_channel.Make(Flow) in
V ((module Channel), Channel.create ())
let test_read_line_len4 () =
let V ((module Channel), c) = channel_from_raw_string "foo" in
Channel.read_line ~len:3 c >|= function
| Ok (`Data bufs) -> fail "Unexpected data: %S"
Cstruct.(to_string (concat bufs))
| Ok `Eof -> fail "eof"
| Error e -> match e with
| `Line_too_long -> ()
| e -> fail "Unexpected error: %a" Channel.pp_error e
let test_read_line_len5 () =
let V ((module Channel), c) = channel_from_raw_string "foo\r" in
Channel.read_line ~len:3 c >|= function
| Ok (`Data bufs) -> fail "Unexpected data: %S"
Cstruct.(to_string (concat bufs))
| Ok `Eof -> fail "eof"
| Error e -> match e with
| `Line_too_long -> ()
| e -> fail "Unexpected error: %a" Channel.pp_error e
let test_read_line_len6 () =
let V ((module Channel), c) = channel_from_raw_string "foo\r\n" in
Channel.read_line ~len:3 c >|= function
| Ok (`Data bufs) -> fail "Unexpected data: %S"
Cstruct.(to_string (concat bufs))
| Ok `Eof -> fail "eof"
| Error e -> match e with
| `Line_too_long -> ()
| e -> fail "Unexpected error: %a" Channel.pp_error e
let test_read_line_len7 () =
let V ((module Channel), c) = channel_from_raw_string "foo\r\n" in
Channel.read_line ~len:4 c >|= function
| Ok (`Data bufs) -> fail "Unexpected data: %S"
Cstruct.(to_string (concat bufs))
| Ok `Eof -> fail "eof"
| Error e -> match e with
| `Line_too_long -> ()
| e -> fail "Unexpected error: %a" Channel.pp_error e
let test_read_line_len8 () =
let V ((module Channel), c) = channel_from_raw_string "foo\r\n" in
Channel.read_line ~len:5 c >|= function
| Ok (`Data bufs) ->
Alcotest.(check string) "read line" "foo" Cstruct.(to_string (concat bufs))
| Ok `Eof -> fail "eof"
| Error e -> fail "Unexpected error: %a" Channel.pp_error e
let test_read_exactly () =
let input = "I am the very model of a modern major general" in
let f = F.make ~input:(F.input_string input) () in
let c = Channel.create f in
Channel.read_exactly ~len:4 c >|= function
| Ok (`Data bufs) ->
Alcotest.(check int) "wrong length" 4 (Cstruct.(length (concat bufs)))
| Ok `Eof -> fail "eof"
| Error e -> fail "error: %a" Channel.pp_error e
let test_read_until_eof_then_write () =
let str = "I am the very model of a modern major general" in
let closed = ref false in
let output _buf _off len =
if !closed
then Alcotest.fail "attempted to write after the flow was closed"
else Lwt.return len in
let close () =
closed := true;
Lwt.return_unit in
let input = F.input_string str in
let f = F.make ~close ~input ~output () in
let c = Channel.create f in
Should read to EOF :
Channel.read_line c >>= fun _ ->
Channel.write_line c "Even though I've read to EOF, I should be able to write";
Channel.flush c >|= function
| Ok () -> ()
| Error `Closed -> fail "error: closed"
| Error e -> fail "error: %a" Channel.pp_write_error e
let suite = [
"read_char + EOF" , `Quick, test_read_char_eof;
"read_line" , `Quick, test_read_line;
"read_exactly" , `Quick, test_read_exactly;
"write after read EOF", `Quick, test_read_until_eof_then_write;
"read_line_len" , `Quick, test_read_line_len;
"read_line_len2" , `Quick, test_read_line_len2;
"read_line_len3" , `Quick, test_read_line_len3;
"read_line_len4" , `Quick, test_read_line_len4;
"read_line_len5" , `Quick, test_read_line_len5;
"read_line_len6" , `Quick, test_read_line_len6;
"read_line_len7" , `Quick, test_read_line_len7;
"read_line_len8" , `Quick, test_read_line_len8;
]
| null | https://raw.githubusercontent.com/mirage/mirage-channel/7e6729e3e06de402526f7a27be18279e5449257f/test/test_channel.ml | ocaml | this is a very small set of tests for the channel interface,
intended to ensure that EOF conditions on the underlying flow are
handled properly
The line is longer than the limit
The line is shorter than the limit and bounded by \r\n | open Lwt.Infix
module F = Mirage_flow_combinators.F
let fail fmt = Fmt.kstr (fun s -> Alcotest.fail s) fmt
module Channel = Mirage_channel.Make(F)
let check_eof = function
| Ok (`Data ch) ->
fail "character %c was returned from Channel.read_char on an empty flow" ch
| Ok `Eof -> Lwt.return ()
| Error e -> fail "unexpected error: %a" Channel.pp_error e
let err_no_exception () = fail "no exception"
let err_wrong_exception e = fail "wrong exception: %s" (Printexc.to_string e)
let test_read_char_eof () =
let f = F.make () in
let c = Channel.create f in
Channel.read_char c >>=
check_eof
let test_read_line () =
let input = "I am the very model of a modern major general" in
let f = F.make ~input:(F.input_string input) () in
let c = Channel.create f in
Channel.read_line c >|= function
| Ok (`Data buf) -> Alcotest.(check string) "read line" input (Cstruct.copyv buf)
| Ok `Eof -> fail "eof"
| Error e -> fail "error: %a" Channel.pp_error e
let test_read_line_len () =
let input = "I am the very model of a modern major general" in
let f = F.make ~input:(F.input_string input) () in
let c = Channel.create f in
Channel.read_line ~len:5 c >|= function
| Ok (`Data _) -> fail "read a line which was too big"
| Ok `Eof -> fail "eof"
| Error _ -> ()
let test_read_line_len2 () =
let input = "I\r\n am the very model of a modern major general" in
let f = F.make ~input:(F.input_string input) () in
let c = Channel.create f in
Channel.read_line ~len:5 c >|= function
| Ok (`Data buf) -> Alcotest.(check string) "read line" "I" (Cstruct.copyv buf)
| Ok `Eof -> fail "eof"
| Error e -> fail "error: %a" Channel.pp_error e
The line is shorter than the limit and bounded by EOF
let test_read_line_len3 () =
let input = "I am the very model of a modern major general" in
let f = F.make ~input:(F.input_string input) () in
let c = Channel.create f in
Channel.read_line ~len:50 c >|= function
| Ok (`Data buf) -> Alcotest.(check string) "read line" input (Cstruct.copyv buf)
| Ok `Eof -> fail "eof"
| Error e -> fail "error: %a" Channel.pp_error e
type channel = V : (module Mirage_channel.S with type t = 'a and type error = [> `Line_too_long ]) * 'a -> channel
let channel_from_raw_string s =
let consumed = ref false in
let module Flow = struct
type flow = unit
type error = |
type write_error = Mirage_flow.write_error
let pp_error : error Fmt.t = fun _ -> function _ -> .
let pp_write_error : Mirage_flow.write_error Fmt.t =
fun ppf `Closed -> Fmt.string ppf "Flow closed"
let read () =
if not !consumed
then ( consumed := true
; Lwt.return_ok (`Data (Cstruct.of_string s)) )
else Lwt.return_ok `Eof
let write _ _ = assert false
let writev _ _ = assert false
let close _ = Lwt.return ()
end in
let module Channel = Mirage_channel.Make(Flow) in
V ((module Channel), Channel.create ())
let test_read_line_len4 () =
let V ((module Channel), c) = channel_from_raw_string "foo" in
Channel.read_line ~len:3 c >|= function
| Ok (`Data bufs) -> fail "Unexpected data: %S"
Cstruct.(to_string (concat bufs))
| Ok `Eof -> fail "eof"
| Error e -> match e with
| `Line_too_long -> ()
| e -> fail "Unexpected error: %a" Channel.pp_error e
let test_read_line_len5 () =
let V ((module Channel), c) = channel_from_raw_string "foo\r" in
Channel.read_line ~len:3 c >|= function
| Ok (`Data bufs) -> fail "Unexpected data: %S"
Cstruct.(to_string (concat bufs))
| Ok `Eof -> fail "eof"
| Error e -> match e with
| `Line_too_long -> ()
| e -> fail "Unexpected error: %a" Channel.pp_error e
let test_read_line_len6 () =
let V ((module Channel), c) = channel_from_raw_string "foo\r\n" in
Channel.read_line ~len:3 c >|= function
| Ok (`Data bufs) -> fail "Unexpected data: %S"
Cstruct.(to_string (concat bufs))
| Ok `Eof -> fail "eof"
| Error e -> match e with
| `Line_too_long -> ()
| e -> fail "Unexpected error: %a" Channel.pp_error e
let test_read_line_len7 () =
let V ((module Channel), c) = channel_from_raw_string "foo\r\n" in
Channel.read_line ~len:4 c >|= function
| Ok (`Data bufs) -> fail "Unexpected data: %S"
Cstruct.(to_string (concat bufs))
| Ok `Eof -> fail "eof"
| Error e -> match e with
| `Line_too_long -> ()
| e -> fail "Unexpected error: %a" Channel.pp_error e
let test_read_line_len8 () =
let V ((module Channel), c) = channel_from_raw_string "foo\r\n" in
Channel.read_line ~len:5 c >|= function
| Ok (`Data bufs) ->
Alcotest.(check string) "read line" "foo" Cstruct.(to_string (concat bufs))
| Ok `Eof -> fail "eof"
| Error e -> fail "Unexpected error: %a" Channel.pp_error e
let test_read_exactly () =
let input = "I am the very model of a modern major general" in
let f = F.make ~input:(F.input_string input) () in
let c = Channel.create f in
Channel.read_exactly ~len:4 c >|= function
| Ok (`Data bufs) ->
Alcotest.(check int) "wrong length" 4 (Cstruct.(length (concat bufs)))
| Ok `Eof -> fail "eof"
| Error e -> fail "error: %a" Channel.pp_error e
let test_read_until_eof_then_write () =
let str = "I am the very model of a modern major general" in
let closed = ref false in
let output _buf _off len =
if !closed
then Alcotest.fail "attempted to write after the flow was closed"
else Lwt.return len in
let close () =
closed := true;
Lwt.return_unit in
let input = F.input_string str in
let f = F.make ~close ~input ~output () in
let c = Channel.create f in
Should read to EOF :
Channel.read_line c >>= fun _ ->
Channel.write_line c "Even though I've read to EOF, I should be able to write";
Channel.flush c >|= function
| Ok () -> ()
| Error `Closed -> fail "error: closed"
| Error e -> fail "error: %a" Channel.pp_write_error e
let suite = [
"read_char + EOF" , `Quick, test_read_char_eof;
"read_line" , `Quick, test_read_line;
"read_exactly" , `Quick, test_read_exactly;
"write after read EOF", `Quick, test_read_until_eof_then_write;
"read_line_len" , `Quick, test_read_line_len;
"read_line_len2" , `Quick, test_read_line_len2;
"read_line_len3" , `Quick, test_read_line_len3;
"read_line_len4" , `Quick, test_read_line_len4;
"read_line_len5" , `Quick, test_read_line_len5;
"read_line_len6" , `Quick, test_read_line_len6;
"read_line_len7" , `Quick, test_read_line_len7;
"read_line_len8" , `Quick, test_read_line_len8;
]
|
fd2c347ca26f119c493a5e7b0978c6746b6532d08b7d68b55082a4b19e50d7da | clash-lang/clash-compiler | Unsigned.hs | |
Copyright : ( C ) 2021 - 2022 , QBayLogic B.V.
License : BSD2 ( see the file LICENSE )
Maintainer : QBayLogic B.V. < >
Random generation of Unsigned numbers .
Copyright : (C) 2021-2022, QBayLogic B.V.
License : BSD2 (see the file LICENSE)
Maintainer : QBayLogic B.V. <>
Random generation of Unsigned numbers.
-}
# OPTIONS_GHC -fplugin = GHC.TypeLits . KnownNat . Solver #
# LANGUAGE CPP #
{-# LANGUAGE GADTs #-}
module Clash.Hedgehog.Sized.Unsigned
( genUnsigned
, SomeUnsigned(..)
, genSomeUnsigned
) where
#if !MIN_VERSION_base(4,16,0)
import GHC.Natural (Natural)
#endif
import GHC.TypeNats
import Hedgehog (MonadGen, Range)
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Clash.Promoted.Nat
import Clash.Sized.Internal.Unsigned
genUnsigned :: (MonadGen m, KnownNat n) => Range (Unsigned n) -> m (Unsigned n)
genUnsigned range =
Gen.frequency
[ (70, Gen.integral range)
, (30, Gen.constant (Range.upperBound 99 range))
]
data SomeUnsigned atLeast where
SomeUnsigned :: SNat n -> Unsigned (atLeast + n) -> SomeUnsigned atLeast
instance KnownNat atLeast => Show (SomeUnsigned atLeast) where
show (SomeUnsigned SNat x) = show x
genSomeUnsigned
:: (MonadGen m, KnownNat atLeast)
=> Range Natural
-> m (SomeUnsigned atLeast)
genSomeUnsigned rangeUnsigned = do
numExtra <- Gen.integral rangeUnsigned
case someNatVal numExtra of
SomeNat proxy -> SomeUnsigned (snatProxy proxy) <$> genUnsigned Range.linearBounded
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/ba4765139ea0728546bf934005d2d9b77e48d8c7/clash-prelude-hedgehog/src/Clash/Hedgehog/Sized/Unsigned.hs | haskell | # LANGUAGE GADTs # | |
Copyright : ( C ) 2021 - 2022 , QBayLogic B.V.
License : BSD2 ( see the file LICENSE )
Maintainer : QBayLogic B.V. < >
Random generation of Unsigned numbers .
Copyright : (C) 2021-2022, QBayLogic B.V.
License : BSD2 (see the file LICENSE)
Maintainer : QBayLogic B.V. <>
Random generation of Unsigned numbers.
-}
# OPTIONS_GHC -fplugin = GHC.TypeLits . KnownNat . Solver #
# LANGUAGE CPP #
module Clash.Hedgehog.Sized.Unsigned
( genUnsigned
, SomeUnsigned(..)
, genSomeUnsigned
) where
#if !MIN_VERSION_base(4,16,0)
import GHC.Natural (Natural)
#endif
import GHC.TypeNats
import Hedgehog (MonadGen, Range)
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Clash.Promoted.Nat
import Clash.Sized.Internal.Unsigned
genUnsigned :: (MonadGen m, KnownNat n) => Range (Unsigned n) -> m (Unsigned n)
genUnsigned range =
Gen.frequency
[ (70, Gen.integral range)
, (30, Gen.constant (Range.upperBound 99 range))
]
data SomeUnsigned atLeast where
SomeUnsigned :: SNat n -> Unsigned (atLeast + n) -> SomeUnsigned atLeast
instance KnownNat atLeast => Show (SomeUnsigned atLeast) where
show (SomeUnsigned SNat x) = show x
genSomeUnsigned
:: (MonadGen m, KnownNat atLeast)
=> Range Natural
-> m (SomeUnsigned atLeast)
genSomeUnsigned rangeUnsigned = do
numExtra <- Gen.integral rangeUnsigned
case someNatVal numExtra of
SomeNat proxy -> SomeUnsigned (snatProxy proxy) <$> genUnsigned Range.linearBounded
|
f58c46a8d88340c26b9bf78ec4755bc72e30244ccfbfdf44f0cf9f0981f3bfe4 | discus-lang/ddc | LetPrivate.hs | {-# OPTIONS_HADDOCK hide #-}
module DDC.Core.Check.Judge.Type.LetPrivate
(checkLetPrivate)
where
import DDC.Core.Check.Judge.Kind
import DDC.Core.Check.Judge.EqT
import DDC.Core.Check.Judge.Type.Base
import qualified DDC.Core.Env.EnvT as EnvT
import qualified DDC.Type.Sum as Sum
import qualified DDC.Type.Env as Env
import qualified Data.Set as Set
import Data.List as L
checkLetPrivate :: Checker a n
-- private --------------------------------------
checkLetPrivate !table !ctx mode demand
xx@(XLet a (LPrivate bsRgn mtParent bsWit) x)
= case takeSubstBoundsOfBinds bsRgn of
[] -> tableCheckExp table table ctx Recon demand x
us -> do
let config = tableConfig table
let depth = length $ map isBAnon bsRgn
ctrace $ vcat
[ text "*> Let Private"
, text " mode =" %% ppr mode
, text " demand =" %% string (show demand)
, text " in region binds =" %% ppr bsRgn
, text " in parent bind =" %% string (show mtParent)
, text " in witness binds =" %% ppr bsWit
, empty ]
-- Check the kinds of the region binders.
-- These must already set to kind Region.
(bsRgn', _, _)
<- liftM unzip3
$ mapM (\b -> checkBindM config ctx UniverseKind b Recon) bsRgn
let ksRgn = map typeOfBind bsRgn'
-- The binders must have region kind.
when (any (not . isRegionKind) ksRgn)
$ throw $ ErrorPrivateNotRegion a xx bsRgn ksRgn
-- We can't shadow region binders because we might have witnesses
-- in the environment that conflict with the ones created here.
let rebounds = filter (flip memberKindBind ctx) bsRgn'
when (not $ null rebounds)
$ throw $ ErrorPrivateRebound a xx rebounds
-- Check the witness binders.
-- These must have full type annotations, as we don't infer
-- the types of introduced witnesses.
let (ctx', pos1) = markContext ctx
let ctx1 = pushKinds [(b, RoleConcrete) | b <- bsRgn] ctx'
let ctx2 = liftTypes depth ctx1
(bsWit', _, _)
<- liftM unzip3
$ mapM (\b -> checkBindM config ctx2 UniverseSpec b Recon)
bsWit
-- Check that the witnesses bound here are for the region,
-- and they don't conflict with each other.
checkWitnessBindsM config a ctx xx us bsWit'
-- Check the body expression.
-- We always want to do this in 'Synth' mode as the expected
-- type uses the region names visible from outside, and will
-- not mention local regions are introduced by the 'private'
-- construct.
let ctx3 = pushTypes bsWit' ctx2
(xBody3, tBody3, effs3, ctx4)
<- tableCheckExp table table ctx3 (Synth []) demand x
-- The body type must have data kind.
(tBody4, kBody4, ctx5)
<- checkTypeM config ctx4 UniverseSpec tBody3
$ case mode of
Recon -> Recon
_ -> Check kData
tBody5 <- applyContext ctx5 tBody4
kBody5 <- applyContext ctx5 kBody4
TODO : cleanup mess introduced in GHC MonadFail transition .
effs5 <- applyContext ctx5 (TSum effs3)
>>= \case TSum effs5 -> return effs5
_ -> error "not a type sum"
when (not $ isDataKind kBody5)
$ throw $ ErrorMismatch a kBody5 kData xx
-- The final body type.
tBody_final
<- case mtParent of
-- If the bound region variables are children of some parent
-- region then they are merged into the parent when the
-- private/extend construct ends.
Just tParent
-> do return $ foldl (\t b -> substituteTX b tParent t)
tBody5 bsRgn
-- If the bound region variables have no parent then they are
-- deallocated when the private construct ends.
-- The bound region variables cannot be free in the body type.
_
-> do let fvsT = freeT Env.empty tBody5
when (any (flip Set.member fvsT) us)
$ throw $ ErrorPrivateEscape a xx bsRgn tBody5
return $ lowerT depth tBody5
-- Check that the result matches any expected type.
ctx6 <- case mode of
Check tExpected
-> do makeEqT config ctx5 tExpected tBody_final
$ ErrorMismatch a tExpected tBody_final xx
_ -> return ctx5
tBody_final' <- applyContext ctx6 tBody_final
-- Delete effects on the bound region from the result.
let delEff es u = Sum.delete (tRead (TVar u))
$ Sum.delete (tWrite (TVar u))
$ Sum.delete (tAlloc (TVar u))
$ es
-- The final effect type.
effs_cut
<- case mtParent of
-- If the bound region variables are children of some parent
-- region then the overall effect is to allocate into
-- the parent.
Just tParent
-> return $ (lowerT depth $ foldl delEff effs5 us)
`Sum.union` (Sum.singleton kEffect (tAlloc tParent))
-- If the bound region variables have no parent then they
-- are deallocated when the private construct ends and no
-- effect on these regions is visible.
_ -> return $ lowerT depth
$ foldl delEff effs5 us
-- Cut stack back to the length we started with,
-- remembering to lower to undo the lift we applied previously.
let ctx_cut = lowerTypes depth
$ popToPos pos1 ctx6
returnX a
(\z -> XLet z (LPrivate bsRgn mtParent bsWit) xBody3)
tBody_final' effs_cut ctx_cut
checkLetPrivate _ _ _ _ _
= error "ddc-core.checkLetPrivate: no match"
-------------------------------------------------------------------------------
-- | Check the set of witness bindings bound in a letregion for conflicts.
checkWitnessBindsM
:: (Show n, Ord n)
=> Config n -- ^ Type checker config.
-> a -- ^ Annotation for error messages.
-> Context n -- ^ Context
-> Exp a n -- ^ The whole expression, for error messages.
-> [Bound n] -- ^ Region variables bound in the letregion.
-> [Bind n] -- ^ Other witness bindings in the same set.
-> CheckM a n ()
checkWitnessBindsM !config !a !ctx !xx !uRegions !bsWit
= mapM_ checkWitnessBindM bsWit
where
-- Check if some type variable or constructor is already in the
-- environment. NOTE: The constructor case is for region handles
-- when using the Eval fragment.
inEnv tt
= case tt of
TVar u'
| EnvT.member u' (contextEnvT ctx) -> True
| memberKind u' ctx -> True
TCon (TyConBound n')
| EnvT.member (UName n') (contextEnvT ctx) -> True
| memberKind (UName n') ctx -> True
_ -> False
-- Check the argument of a witness type is for the region we're
-- introducing here.
checkWitnessArg bWit t2
= case t2 of
TVar u'
| all (/= u') uRegions
-> throw $ ErrorPrivateWitnessOther a xx uRegions bWit
| otherwise -> return ()
TCon (TyConBound n')
| all (/= UName n') uRegions
-> throw $ ErrorPrivateWitnessOther a xx uRegions bWit
| otherwise -> return ()
-- The parser should ensure the right of a witness is a
-- constructor or variable.
_ -> throw $ ErrorPrivateWitnessInvalid a xx bWit
-- Associate each witness binder with its type.
btsWit = [(typeOfBind b, b) | b <- bsWit]
-- Check a single witness binder for conflicts with other witnesses.
checkWitnessBindM bWit
= case typeOfBind bWit of
TApp (TCon (TyConWitness TwConConst)) t2
| Just bConflict <- L.lookup (tMutable t2) btsWit
-> throw $ ErrorPrivateWitnessConflict a xx bWit bConflict
| otherwise -> checkWitnessArg bWit t2
TApp (TCon (TyConWitness TwConMutable)) t2
| Just bConflict <- L.lookup (tConst t2) btsWit
-> throw $ ErrorPrivateWitnessConflict a xx bWit bConflict
| otherwise -> checkWitnessArg bWit t2
(takeTyConApps -> Just (TyConWitness (TwConDistinct 2), [t1, t2]))
| inEnv t1 -> checkWitnessArg bWit t2
| inEnv t2 -> checkWitnessArg bWit t1
| t1 /= t2 -> mapM_ (checkWitnessArg bWit) [t1, t2]
| otherwise -> throw $ ErrorPrivateWitnessInvalid a xx bWit
(takeTyConApps -> Just (TyConWitness (TwConDistinct _), ts))
-> mapM_ (checkWitnessArg bWit) ts
TApp (TCon (TyConSpec TcConRead)) t2
| configEffectCapabilities config
-> checkWitnessArg bWit t2
TApp (TCon (TyConSpec TcConWrite)) t2
| configEffectCapabilities config
-> checkWitnessArg bWit t2
TApp (TCon (TyConSpec TcConAlloc)) t2
| configEffectCapabilities config
-> checkWitnessArg bWit t2
_ -> throw $ ErrorPrivateWitnessInvalid a xx bWit
| null | https://raw.githubusercontent.com/discus-lang/ddc/2baa1b4e2d43b6b02135257677671a83cb7384ac/src/s1/ddc-core/DDC/Core/Check/Judge/Type/LetPrivate.hs | haskell | # OPTIONS_HADDOCK hide #
private --------------------------------------
Check the kinds of the region binders.
These must already set to kind Region.
The binders must have region kind.
We can't shadow region binders because we might have witnesses
in the environment that conflict with the ones created here.
Check the witness binders.
These must have full type annotations, as we don't infer
the types of introduced witnesses.
Check that the witnesses bound here are for the region,
and they don't conflict with each other.
Check the body expression.
We always want to do this in 'Synth' mode as the expected
type uses the region names visible from outside, and will
not mention local regions are introduced by the 'private'
construct.
The body type must have data kind.
The final body type.
If the bound region variables are children of some parent
region then they are merged into the parent when the
private/extend construct ends.
If the bound region variables have no parent then they are
deallocated when the private construct ends.
The bound region variables cannot be free in the body type.
Check that the result matches any expected type.
Delete effects on the bound region from the result.
The final effect type.
If the bound region variables are children of some parent
region then the overall effect is to allocate into
the parent.
If the bound region variables have no parent then they
are deallocated when the private construct ends and no
effect on these regions is visible.
Cut stack back to the length we started with,
remembering to lower to undo the lift we applied previously.
-----------------------------------------------------------------------------
| Check the set of witness bindings bound in a letregion for conflicts.
^ Type checker config.
^ Annotation for error messages.
^ Context
^ The whole expression, for error messages.
^ Region variables bound in the letregion.
^ Other witness bindings in the same set.
Check if some type variable or constructor is already in the
environment. NOTE: The constructor case is for region handles
when using the Eval fragment.
Check the argument of a witness type is for the region we're
introducing here.
The parser should ensure the right of a witness is a
constructor or variable.
Associate each witness binder with its type.
Check a single witness binder for conflicts with other witnesses. | module DDC.Core.Check.Judge.Type.LetPrivate
(checkLetPrivate)
where
import DDC.Core.Check.Judge.Kind
import DDC.Core.Check.Judge.EqT
import DDC.Core.Check.Judge.Type.Base
import qualified DDC.Core.Env.EnvT as EnvT
import qualified DDC.Type.Sum as Sum
import qualified DDC.Type.Env as Env
import qualified Data.Set as Set
import Data.List as L
checkLetPrivate :: Checker a n
checkLetPrivate !table !ctx mode demand
xx@(XLet a (LPrivate bsRgn mtParent bsWit) x)
= case takeSubstBoundsOfBinds bsRgn of
[] -> tableCheckExp table table ctx Recon demand x
us -> do
let config = tableConfig table
let depth = length $ map isBAnon bsRgn
ctrace $ vcat
[ text "*> Let Private"
, text " mode =" %% ppr mode
, text " demand =" %% string (show demand)
, text " in region binds =" %% ppr bsRgn
, text " in parent bind =" %% string (show mtParent)
, text " in witness binds =" %% ppr bsWit
, empty ]
(bsRgn', _, _)
<- liftM unzip3
$ mapM (\b -> checkBindM config ctx UniverseKind b Recon) bsRgn
let ksRgn = map typeOfBind bsRgn'
when (any (not . isRegionKind) ksRgn)
$ throw $ ErrorPrivateNotRegion a xx bsRgn ksRgn
let rebounds = filter (flip memberKindBind ctx) bsRgn'
when (not $ null rebounds)
$ throw $ ErrorPrivateRebound a xx rebounds
let (ctx', pos1) = markContext ctx
let ctx1 = pushKinds [(b, RoleConcrete) | b <- bsRgn] ctx'
let ctx2 = liftTypes depth ctx1
(bsWit', _, _)
<- liftM unzip3
$ mapM (\b -> checkBindM config ctx2 UniverseSpec b Recon)
bsWit
checkWitnessBindsM config a ctx xx us bsWit'
let ctx3 = pushTypes bsWit' ctx2
(xBody3, tBody3, effs3, ctx4)
<- tableCheckExp table table ctx3 (Synth []) demand x
(tBody4, kBody4, ctx5)
<- checkTypeM config ctx4 UniverseSpec tBody3
$ case mode of
Recon -> Recon
_ -> Check kData
tBody5 <- applyContext ctx5 tBody4
kBody5 <- applyContext ctx5 kBody4
TODO : cleanup mess introduced in GHC MonadFail transition .
effs5 <- applyContext ctx5 (TSum effs3)
>>= \case TSum effs5 -> return effs5
_ -> error "not a type sum"
when (not $ isDataKind kBody5)
$ throw $ ErrorMismatch a kBody5 kData xx
tBody_final
<- case mtParent of
Just tParent
-> do return $ foldl (\t b -> substituteTX b tParent t)
tBody5 bsRgn
_
-> do let fvsT = freeT Env.empty tBody5
when (any (flip Set.member fvsT) us)
$ throw $ ErrorPrivateEscape a xx bsRgn tBody5
return $ lowerT depth tBody5
ctx6 <- case mode of
Check tExpected
-> do makeEqT config ctx5 tExpected tBody_final
$ ErrorMismatch a tExpected tBody_final xx
_ -> return ctx5
tBody_final' <- applyContext ctx6 tBody_final
let delEff es u = Sum.delete (tRead (TVar u))
$ Sum.delete (tWrite (TVar u))
$ Sum.delete (tAlloc (TVar u))
$ es
effs_cut
<- case mtParent of
Just tParent
-> return $ (lowerT depth $ foldl delEff effs5 us)
`Sum.union` (Sum.singleton kEffect (tAlloc tParent))
_ -> return $ lowerT depth
$ foldl delEff effs5 us
let ctx_cut = lowerTypes depth
$ popToPos pos1 ctx6
returnX a
(\z -> XLet z (LPrivate bsRgn mtParent bsWit) xBody3)
tBody_final' effs_cut ctx_cut
checkLetPrivate _ _ _ _ _
= error "ddc-core.checkLetPrivate: no match"
checkWitnessBindsM
:: (Show n, Ord n)
-> CheckM a n ()
checkWitnessBindsM !config !a !ctx !xx !uRegions !bsWit
= mapM_ checkWitnessBindM bsWit
where
inEnv tt
= case tt of
TVar u'
| EnvT.member u' (contextEnvT ctx) -> True
| memberKind u' ctx -> True
TCon (TyConBound n')
| EnvT.member (UName n') (contextEnvT ctx) -> True
| memberKind (UName n') ctx -> True
_ -> False
checkWitnessArg bWit t2
= case t2 of
TVar u'
| all (/= u') uRegions
-> throw $ ErrorPrivateWitnessOther a xx uRegions bWit
| otherwise -> return ()
TCon (TyConBound n')
| all (/= UName n') uRegions
-> throw $ ErrorPrivateWitnessOther a xx uRegions bWit
| otherwise -> return ()
_ -> throw $ ErrorPrivateWitnessInvalid a xx bWit
btsWit = [(typeOfBind b, b) | b <- bsWit]
checkWitnessBindM bWit
= case typeOfBind bWit of
TApp (TCon (TyConWitness TwConConst)) t2
| Just bConflict <- L.lookup (tMutable t2) btsWit
-> throw $ ErrorPrivateWitnessConflict a xx bWit bConflict
| otherwise -> checkWitnessArg bWit t2
TApp (TCon (TyConWitness TwConMutable)) t2
| Just bConflict <- L.lookup (tConst t2) btsWit
-> throw $ ErrorPrivateWitnessConflict a xx bWit bConflict
| otherwise -> checkWitnessArg bWit t2
(takeTyConApps -> Just (TyConWitness (TwConDistinct 2), [t1, t2]))
| inEnv t1 -> checkWitnessArg bWit t2
| inEnv t2 -> checkWitnessArg bWit t1
| t1 /= t2 -> mapM_ (checkWitnessArg bWit) [t1, t2]
| otherwise -> throw $ ErrorPrivateWitnessInvalid a xx bWit
(takeTyConApps -> Just (TyConWitness (TwConDistinct _), ts))
-> mapM_ (checkWitnessArg bWit) ts
TApp (TCon (TyConSpec TcConRead)) t2
| configEffectCapabilities config
-> checkWitnessArg bWit t2
TApp (TCon (TyConSpec TcConWrite)) t2
| configEffectCapabilities config
-> checkWitnessArg bWit t2
TApp (TCon (TyConSpec TcConAlloc)) t2
| configEffectCapabilities config
-> checkWitnessArg bWit t2
_ -> throw $ ErrorPrivateWitnessInvalid a xx bWit
|
a15c776ecf2ea1926840cf3938d47462df93288300166218bd8caddba2a0196a | wdebeaum/step | utilize.lisp | ;;;;
;;;; W::utilize
;;;;
(define-words :pos W::v
:words (
(W::utilize
(SENSES
((LF-PARENT ont::use)
(example "a battery utilises a chemical reaction to maintain voltage")
(meta-data :origin beetle2 :entry-date 20080218 :change-date nil :comments pilot1)
(TEMPL AGENT-AFFECTED-XP-NP-TEMPL)
)
)
)
))
| null | https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/Data/new/utilize.lisp | lisp |
W::utilize
|
(define-words :pos W::v
:words (
(W::utilize
(SENSES
((LF-PARENT ont::use)
(example "a battery utilises a chemical reaction to maintain voltage")
(meta-data :origin beetle2 :entry-date 20080218 :change-date nil :comments pilot1)
(TEMPL AGENT-AFFECTED-XP-NP-TEMPL)
)
)
)
))
|
30ce169b8da6fbb1d6a06961f289829dec30fa41d3d7096522c27b6e0004590f | input-output-hk/plutus-apps | Types.hs | # LANGUAGE DataKinds #
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DerivingVia #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
| Defines a number of types that are used in Wallet . XXX modules
module Wallet.Types(
ContractInstanceId(..)
, contractInstanceIDs
, randomID
, ContractActivityStatus(..)
, parseContractActivityStatus
, Notification(..)
, NotificationError(..)
, EndpointDescription(..)
, EndpointValue(..)
) where
import Control.Lens.TH (makeClassyPrisms)
import Data.Aeson (FromJSON, FromJSONKey, ToJSON, ToJSONKey)
import Data.Aeson qualified as Aeson
import Data.Aeson.Encode.Pretty qualified as JSON
import Data.ByteString.Lazy.Char8 qualified as BSL8
import Data.String (IsString (..))
import Data.Text (Text)
import Data.Text qualified as T
import Data.UUID (UUID)
import Data.UUID.Extras qualified as UUID
import Data.UUID.V4 qualified as UUID
import GHC.Generics (Generic)
import Language.Haskell.TH.Syntax qualified as TH
import Prettyprinter (Pretty (..), colon, hang, viaShow, vsep, (<+>))
import Prettyprinter.Extras (PrettyShow (..), Tagged (..))
-- | Unique ID for contract instance
newtype ContractInstanceId = ContractInstanceId { unContractInstanceId :: UUID }
deriving (Eq, Ord, Show, Generic)
deriving newtype (FromJSONKey, ToJSONKey)
deriving anyclass (FromJSON, ToJSON)
deriving Pretty via (PrettyShow UUID)
-- | A pure list of all 'ContractInstanceId' values. To be used in testing.
contractInstanceIDs :: [ContractInstanceId]
contractInstanceIDs = ContractInstanceId <$> UUID.mockUUIDs
randomID :: IO ContractInstanceId
randomID = ContractInstanceId <$> UUID.nextRandom
data ContractActivityStatus = Active | Stopped | Done deriving (Eq, Show, Generic, ToJSON, FromJSON)
parseContractActivityStatus :: Text -> Maybe ContractActivityStatus
parseContractActivityStatus t = case T.toLower t of
"active" -> Just Active
"stopped" -> Just Stopped
"done" -> Just Done
_ -> Nothing
newtype EndpointDescription = EndpointDescription { getEndpointDescription :: String }
deriving stock (Eq, Ord, Generic, Show, TH.Lift)
deriving newtype (IsString, Pretty)
deriving anyclass (ToJSON, FromJSON)
newtype EndpointValue a = EndpointValue { unEndpointValue :: a }
deriving stock (Eq, Ord, Generic, Show)
deriving anyclass (ToJSON, FromJSON)
deriving via (Tagged "EndpointValue:" (PrettyShow a)) instance (Show a => Pretty (EndpointValue a))
data Notification =
Notification
{ notificationContractID :: ContractInstanceId
, notificationContractEndpoint :: EndpointDescription
, notificationContractArg :: Aeson.Value
}
deriving stock (Eq, Show, Generic)
deriving anyclass (ToJSON, FromJSON)
instance Pretty Notification where
pretty Notification{notificationContractID,notificationContractEndpoint,notificationContractArg} =
hang 2 $ vsep
[ "Instance:" <+> pretty notificationContractID
, "Endpoint:" <+> pretty notificationContractEndpoint
, "Argument:" <+> viaShow notificationContractArg
]
data NotificationError =
EndpointNotAvailable ContractInstanceId EndpointDescription
| MoreThanOneEndpointAvailable ContractInstanceId EndpointDescription
| InstanceDoesNotExist ContractInstanceId
| NotificationJSONDecodeError EndpointDescription Aeson.Value String
-- ^ Indicates that the target contract does not have the expected schema
--
TODO : SCP-2137
-- Not currently used. As endpoint parameter decoding happends inside the Contract and
-- a throwError is used is decoding failed.
However , still valuable to be used by the PAB to throw an error is an endpoint
-- could not be decoded.
deriving stock (Eq, Show, Generic)
deriving anyclass (ToJSON, FromJSON)
instance Pretty NotificationError where
pretty = \case
EndpointNotAvailable i ep -> "Endpoint" <+> pretty ep <+> "not available on" <+> pretty i
MoreThanOneEndpointAvailable i ep -> "Endpoint" <+> pretty ep <+> "is exposed more than once on" <+> pretty i
InstanceDoesNotExist i -> "Instance does not exist:" <+> pretty i
NotificationJSONDecodeError ep vv e ->
"Notification JSON decoding error:"
<+> pretty e
<> colon
<+> pretty (BSL8.unpack (JSON.encodePretty vv))
<+> pretty ep
makeClassyPrisms ''NotificationError
| null | https://raw.githubusercontent.com/input-output-hk/plutus-apps/0d35e44c615b57c8cce48d4e2d38f33e03e6f7cf/plutus-contract/src/Wallet/Types.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE DerivingVia #
# LANGUAGE NamedFieldPuns #
# LANGUAGE OverloadedStrings #
| Unique ID for contract instance
| A pure list of all 'ContractInstanceId' values. To be used in testing.
^ Indicates that the target contract does not have the expected schema
Not currently used. As endpoint parameter decoding happends inside the Contract and
a throwError is used is decoding failed.
could not be decoded. | # LANGUAGE DataKinds #
# LANGUAGE LambdaCase #
# LANGUAGE TemplateHaskell #
| Defines a number of types that are used in Wallet . XXX modules
module Wallet.Types(
ContractInstanceId(..)
, contractInstanceIDs
, randomID
, ContractActivityStatus(..)
, parseContractActivityStatus
, Notification(..)
, NotificationError(..)
, EndpointDescription(..)
, EndpointValue(..)
) where
import Control.Lens.TH (makeClassyPrisms)
import Data.Aeson (FromJSON, FromJSONKey, ToJSON, ToJSONKey)
import Data.Aeson qualified as Aeson
import Data.Aeson.Encode.Pretty qualified as JSON
import Data.ByteString.Lazy.Char8 qualified as BSL8
import Data.String (IsString (..))
import Data.Text (Text)
import Data.Text qualified as T
import Data.UUID (UUID)
import Data.UUID.Extras qualified as UUID
import Data.UUID.V4 qualified as UUID
import GHC.Generics (Generic)
import Language.Haskell.TH.Syntax qualified as TH
import Prettyprinter (Pretty (..), colon, hang, viaShow, vsep, (<+>))
import Prettyprinter.Extras (PrettyShow (..), Tagged (..))
newtype ContractInstanceId = ContractInstanceId { unContractInstanceId :: UUID }
deriving (Eq, Ord, Show, Generic)
deriving newtype (FromJSONKey, ToJSONKey)
deriving anyclass (FromJSON, ToJSON)
deriving Pretty via (PrettyShow UUID)
contractInstanceIDs :: [ContractInstanceId]
contractInstanceIDs = ContractInstanceId <$> UUID.mockUUIDs
randomID :: IO ContractInstanceId
randomID = ContractInstanceId <$> UUID.nextRandom
data ContractActivityStatus = Active | Stopped | Done deriving (Eq, Show, Generic, ToJSON, FromJSON)
parseContractActivityStatus :: Text -> Maybe ContractActivityStatus
parseContractActivityStatus t = case T.toLower t of
"active" -> Just Active
"stopped" -> Just Stopped
"done" -> Just Done
_ -> Nothing
newtype EndpointDescription = EndpointDescription { getEndpointDescription :: String }
deriving stock (Eq, Ord, Generic, Show, TH.Lift)
deriving newtype (IsString, Pretty)
deriving anyclass (ToJSON, FromJSON)
newtype EndpointValue a = EndpointValue { unEndpointValue :: a }
deriving stock (Eq, Ord, Generic, Show)
deriving anyclass (ToJSON, FromJSON)
deriving via (Tagged "EndpointValue:" (PrettyShow a)) instance (Show a => Pretty (EndpointValue a))
data Notification =
Notification
{ notificationContractID :: ContractInstanceId
, notificationContractEndpoint :: EndpointDescription
, notificationContractArg :: Aeson.Value
}
deriving stock (Eq, Show, Generic)
deriving anyclass (ToJSON, FromJSON)
instance Pretty Notification where
pretty Notification{notificationContractID,notificationContractEndpoint,notificationContractArg} =
hang 2 $ vsep
[ "Instance:" <+> pretty notificationContractID
, "Endpoint:" <+> pretty notificationContractEndpoint
, "Argument:" <+> viaShow notificationContractArg
]
data NotificationError =
EndpointNotAvailable ContractInstanceId EndpointDescription
| MoreThanOneEndpointAvailable ContractInstanceId EndpointDescription
| InstanceDoesNotExist ContractInstanceId
| NotificationJSONDecodeError EndpointDescription Aeson.Value String
TODO : SCP-2137
However , still valuable to be used by the PAB to throw an error is an endpoint
deriving stock (Eq, Show, Generic)
deriving anyclass (ToJSON, FromJSON)
instance Pretty NotificationError where
pretty = \case
EndpointNotAvailable i ep -> "Endpoint" <+> pretty ep <+> "not available on" <+> pretty i
MoreThanOneEndpointAvailable i ep -> "Endpoint" <+> pretty ep <+> "is exposed more than once on" <+> pretty i
InstanceDoesNotExist i -> "Instance does not exist:" <+> pretty i
NotificationJSONDecodeError ep vv e ->
"Notification JSON decoding error:"
<+> pretty e
<> colon
<+> pretty (BSL8.unpack (JSON.encodePretty vv))
<+> pretty ep
makeClassyPrisms ''NotificationError
|
0224607220eb5faa8f6fca20213df9b0182fc615c675f6deb70dae221845b993 | souenzzo/eql-as | alpha.cljc | (ns br.com.souenzzo.eql-as.alpha
(:refer-clojure :exclude [reverse])
(:require [edn-query-language.core :as eql]
[br.com.souenzzo.eql-as.ast :as ast]
[clojure.spec.alpha :as s]))
(defn as-query
[{::keys [as-map as-key]}]
(-> (ast/as-query {::ast/as-map as-map
::ast/as-key as-key})
(eql/ast->query)))
(defn ident-query
[{::keys [as-map as-key]}]
(-> (ast/ident-query {::ast/as-map as-map
::ast/as-key as-key})
(eql/ast->query)))
(defn reverse
[as-map]
(into (empty as-map)
(map (fn [[k v]]
(if (vector? v)
[(first v) [k (reverse (last v))]]
[v k])))
as-map))
| null | https://raw.githubusercontent.com/souenzzo/eql-as/d33de8d1b4428d99c25e0d95b008fe4e3e842f24/src/main/br/com/souenzzo/eql_as/alpha.cljc | clojure | (ns br.com.souenzzo.eql-as.alpha
(:refer-clojure :exclude [reverse])
(:require [edn-query-language.core :as eql]
[br.com.souenzzo.eql-as.ast :as ast]
[clojure.spec.alpha :as s]))
(defn as-query
[{::keys [as-map as-key]}]
(-> (ast/as-query {::ast/as-map as-map
::ast/as-key as-key})
(eql/ast->query)))
(defn ident-query
[{::keys [as-map as-key]}]
(-> (ast/ident-query {::ast/as-map as-map
::ast/as-key as-key})
(eql/ast->query)))
(defn reverse
[as-map]
(into (empty as-map)
(map (fn [[k v]]
(if (vector? v)
[(first v) [k (reverse (last v))]]
[v k])))
as-map))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.